code
stringlengths 114
1.05M
| path
stringlengths 3
312
| quality_prob
float64 0.5
0.99
| learning_prob
float64 0.2
1
| filename
stringlengths 3
168
| kind
stringclasses 1
value |
---|---|---|---|---|---|
defmodule Vex do
@moduledoc """
Data Validation for Elixir.
"""
alias Vex.{
Extract,
InvalidValidatorError,
Validator,
Validator.Source
}
def valid?(data) do
valid?(data, Extract.settings(data))
end
def valid?(data, settings) do
errors(data, settings) == []
end
def validate(data) do
validate(data, Extract.settings(data))
end
def validate(data, settings) do
case errors(data, settings) do
errors when errors != [] -> {:error, errors}
_ -> {:ok, data}
end
end
def errors(data) do
errors(data, Extract.settings(data))
end
def errors(data, settings) do
Enum.filter(results(data, settings), &match?({:error, _, _, _}, &1))
end
def results(data) do
results(data, Extract.settings(data))
end
def results(data, settings) do
settings
|> Enum.map(fn {attribute, validations} ->
validations =
case is_function(validations) do
true -> [by: validations]
false -> validations
end
Enum.map(validations, fn {name, options} ->
result(data, attribute, name, options)
end)
end)
|> List.flatten()
end
defp result(data, attribute, name, options) do
v = validator(name)
if Validator.validate?(data, options) do
result = data |> extract(attribute, name) |> v.validate(data, options)
case result do
{:error, message} -> {:error, attribute, name, message}
:ok -> {:ok, attribute, name}
_ -> raise "'#{name}'' validator should return :ok or {:error, message}"
end
else
{:not_applicable, attribute, name}
end
end
@doc """
Lookup a validator from configured sources
## Examples
iex> Vex.validator(:presence)
Vex.Validators.Presence
iex> Vex.validator(:exclusion)
Vex.Validators.Exclusion
"""
def validator(name) do
case name |> validator(sources()) do
nil -> raise InvalidValidatorError, validator: name, sources: sources()
found -> found
end
end
@doc """
Lookup a validator from given sources
## Examples
iex> Vex.validator(:presence, [[presence: :presence_stub]])
:presence_stub
iex> Vex.validator(:exclusion, [Vex.Validators])
Vex.Validators.Exclusion
iex> Vex.validator(:presence, [Vex.Validators, [presence: :presence_stub]])
Vex.Validators.Presence
iex> Vex.validator(:presence, [[presence: :presence_stub], Vex.Validators])
:presence_stub
"""
def validator(name, sources) do
Enum.find_value(sources, fn source ->
Source.lookup(source, name)
end)
end
defp sources do
case Application.get_env(:vex, :sources) do
nil -> [Vex.Validators]
sources -> sources
end
end
defp extract(data, attribute, :confirmation) do
[attribute, String.to_atom("#{attribute}_confirmation")]
|> Enum.map(fn attr -> Extract.attribute(data, attr) end)
end
defp extract(data, attribute, _name) do
Extract.attribute(data, attribute)
end
end
|
lib/vex.ex
| 0.780662 | 0.405802 |
vex.ex
|
starcoder
|
defmodule Stripe.PaymentIntent do
@moduledoc """
Work with [Stripe `payment_intent` objects](https://stripe.com/docs/api/payment_intents).
You can:
- [Create a payment_intent](https://stripe.com/docs/api/payment_intents/create)
- [Retrieve a payment_intent](https://stripe.com/docs/api/payment_intents/retrieve)
- [Update a payment_intent](https://stripe.com/docs/api/payment_intents/update)
- [Confirm a payment_intent](https://stripe.com/docs/api/payment_intents/confirm)
- [Capture a payment_intent](https://stripe.com/docs/api/payment_intents/capture)
- [Cancel a payment_intent](https://stripe.com/docs/api/payment_intents/cancel)
- [List all payment_intent](https://stripe.com/docs/api/payment_intents/list)
"""
use Stripe.Entity
import Stripe.Request
require Stripe.Util
@type last_payment_error :: %{
type: String.t(),
charge: String.t(),
code: String.t(),
decline_code: String.t(),
doc_url: String.t(),
message: String.t(),
param: String.t(),
payment_intent: Stripe.PaymentIntent.t() | map,
source: Stripe.Card.t() | map
}
@type next_action :: %{
redirect_to_url: redirect_to_url | nil,
type: String.t(),
use_stripe_sdk: map | nil
}
@type redirect_to_url :: %{
return_url: String.t(),
url: String.t()
}
@type transfer_data :: %{
:destination => String.t()
}
@type t :: %__MODULE__{
id: Stripe.id(),
object: String.t(),
amount: non_neg_integer,
amount_capturable: non_neg_integer,
amount_received: non_neg_integer,
application: Stripe.id() | nil,
application_fee_amount: non_neg_integer | nil,
canceled_at: Stripe.timestamp() | nil,
cancellation_reason: String.t() | nil,
capture_method: String.t(),
charges: Stripe.List.t(Stripe.Charge.t()),
client_secret: String.t(),
confirmation_method: String.t(),
created: Stripe.timestamp(),
currency: String.t(),
customer: Stripe.id() | Stripe.Customer.t() | nil,
description: String.t() | nil,
invoice: Stripe.id() | Stripe.Invoice.t() | nil,
last_payment_error: last_payment_error | nil,
livemode: boolean,
metadata: Stripe.Types.metadata(),
next_action: next_action | nil,
on_behalf_of: Stripe.id() | Stripe.Account.t() | nil,
payment_method: Stripe.id() | Stripe.PaymentMethod.t() | nil,
payment_method_options: map,
payment_method_types: list(String.t()),
receipt_email: String.t() | nil,
review: Stripe.id() | Stripe.Review.t() | nil,
shipping: Stripe.Types.shipping() | nil,
source: Stripe.Card.t() | map,
statement_descriptor: String.t() | nil,
statement_descriptor_suffix: String.t() | nil,
status: String.t(),
setup_future_usage: String.t() | nil,
transfer_data: transfer_data | nil,
transfer_group: String.t() | nil
}
defstruct [
:id,
:object,
:amount,
:amount_capturable,
:amount_received,
:application,
:application_fee_amount,
:canceled_at,
:cancellation_reason,
:capture_method,
:charges,
:client_secret,
:confirmation_method,
:created,
:currency,
:customer,
:description,
:invoice,
:last_payment_error,
:livemode,
:metadata,
:next_action,
:on_behalf_of,
:payment_method,
:payment_method_options,
:payment_method_types,
:receipt_email,
:review,
:shipping,
:source,
:statement_descriptor,
:statement_descriptor_suffix,
:setup_future_usage,
:status,
:transfer_data,
:transfer_group
]
@plural_endpoint "payment_intents"
@doc """
Create a payment intent.
See the [Stripe docs](https://stripe.com/docs/api/payment_intents/create).
"""
@spec create(params, Stripe.options()) :: {:ok, t} | {:error, Stripe.Error.t()}
when params:
%{
:amount => pos_integer,
:currency => String.t(),
:payment_method_types => [String.t()],
optional(:application_fee_amount) => non_neg_integer,
optional(:capture_method) => String.t(),
optional(:confirm) => boolean,
optional(:customer) => Stripe.id() | Stripe.Customer.t(),
optional(:description) => String.t(),
optional(:metadata) => map,
optional(:off_session) => boolean,
optional(:on_behalf_of) => Stripe.id() | Stripe.Account.t(),
optional(:payment_method) => String.t(),
optional(:payment_method_options) => map,
optional(:payment_method_types) => [Stripe.id()],
optional(:receipt_email) => String.t(),
optional(:return_url) => String.t(),
optional(:save_payment_method) => boolean,
optional(:setup_future_usage) => String.t(),
optional(:shipping) => Stripe.Types.shipping(),
optional(:source) => Stripe.id() | Stripe.Card.t(),
optional(:statement_descriptor) => String.t(),
optional(:statement_descriptor_suffix) => String.t(),
optional(:transfer_data) => transfer_data,
optional(:transfer_group) => String.t()
}
| %{}
def create(params, opts \\ []) do
new_request(opts)
|> put_endpoint(@plural_endpoint)
|> put_params(params)
|> put_method(:post)
|> cast_to_id([:on_behalf_of, :customer, :source])
|> make_request()
end
@doc """
Retrieves the details of a PaymentIntent that has previously been created.
Client-side retrieval using a publishable key is allowed when the client_secret is provided in the query string.
When retrieved with a publishable key, only a subset of properties will be returned. Please refer to the payment intent object reference for more details.
See the [Stripe docs](https://stripe.com/docs/api/payment_intents/retrieve).
"""
@spec retrieve(Stripe.id() | t, params, Stripe.options()) ::
{:ok, t} | {:error, Stripe.Error.t()}
when params:
%{
optional(:client_secret) => String.t()
}
| %{}
def retrieve(id, params, opts \\ []) do
new_request(opts)
|> put_endpoint(@plural_endpoint <> "/#{get_id!(id)}")
|> put_params(params)
|> put_method(:get)
|> make_request()
end
@doc """
Updates a PaymentIntent object.
See the [Stripe docs](https://stripe.com/docs/api/payment_intents/update).
"""
@spec update(Stripe.id() | t, params, Stripe.options()) :: {:ok, t} | {:error, Stripe.Error.t()}
when params:
%{
optional(:amount) => non_neg_integer,
optional(:application_fee_amount) => non_neg_integer,
optional(:currency) => String.t(),
optional(:customer) => Stripe.id() | Stripe.Customer.t(),
optional(:description) => String.t(),
optional(:metadata) => map,
optional(:payment_method) => String.t(),
optional(:payment_method_types) => [Stripe.id()],
optional(:receipt_email) => String.t(),
optional(:save_payment_method) => boolean,
optional(:setup_future_usage) => String.t(),
optional(:shipping) => Stripe.Types.shipping(),
optional(:source) => Stripe.id() | Stripe.Card.t(),
optional(:statement_descriptor_suffix) => String.t(),
optional(:transfer_group) => String.t()
}
| %{}
def update(id, params, opts \\ []) do
new_request(opts)
|> put_endpoint(@plural_endpoint <> "/#{get_id!(id)}")
|> put_method(:post)
|> put_params(params)
|> make_request()
end
@doc """
Confirm that your customer intends to pay with current or provided source. Upon confirmation,
the PaymentIntent will attempt to initiate a payment.
If the selected source requires additional authentication steps, the PaymentIntent will transition to
the requires_action status and suggest additional actions via next_source_action.
If payment fails, the PaymentIntent will transition to the requires_payment_method status.
If payment succeeds, the PaymentIntent will transition to the succeeded status (or requires_capture,
if capture_method is set to manual). Read the expanded documentation to learn more about server-side confirmation.
See the [Stripe docs](https://stripe.com/docs/api/payment_intents/confirm).
"""
@spec confirm(Stripe.id() | t, params, Stripe.options()) ::
{:ok, t} | {:error, Stripe.Error.t()}
when params:
%{
optional(:client_secret) => String.t(),
optional(:receipt_email) => String.t(),
optional(:return_url) => String.t(),
optional(:save_payment_method) => boolean,
optional(:shipping) => Stripe.Types.shipping(),
optional(:source) => Stripe.id() | Stripe.Card.t()
}
| %{}
def confirm(id, params, opts \\ []) do
new_request(opts)
|> put_endpoint(@plural_endpoint <> "/#{get_id!(id)}" <> "/confirm")
|> put_method(:post)
|> put_params(params)
|> make_request()
end
@doc """
Capture the funds of an existing uncaptured PaymentIntent where required_action="requires_capture".
Uncaptured PaymentIntents will be canceled exactly seven days after they are created.
See the [Stripe docs](https://stripe.com/docs/api/payment_intents/capture).
"""
@spec capture(Stripe.id() | t, params, Stripe.options()) ::
{:ok, t} | {:error, Stripe.Error.t()}
when params:
%{
optional(:amount_to_capture) => non_neg_integer,
optional(:application_fee_amount) => non_neg_integer
}
| %{}
def capture(id, params, opts \\ []) do
new_request(opts)
|> put_endpoint(@plural_endpoint <> "/#{get_id!(id)}/capture")
|> put_params(params)
|> put_method(:post)
|> make_request()
end
@doc """
A PaymentIntent object can be canceled when it is in one of these statuses: requires_payment_method,
requires_capture, requires_confirmation, requires_action.
Once canceled, no additional charges will be made by the PaymentIntent and any operations on the PaymentIntent will fail with an error.
For PaymentIntents with status='requires_capture', the remaining amount_capturable will automatically be refunded.
See the [Stripe docs](https://stripe.com/docs/api/payment_intents/cancel).
"""
@spec cancel(Stripe.id() | t, params, Stripe.options()) :: {:ok, t} | {:error, Stripe.Error.t()}
when params:
%{
optional(:cancellation_reason) => String.t()
}
| %{}
def cancel(id, params, opts \\ []) do
new_request(opts)
|> put_endpoint(@plural_endpoint <> "/#{get_id!(id)}" <> "/cancel")
|> put_method(:post)
|> put_params(params)
|> make_request()
end
@doc """
Returns a list of PaymentIntents.
See the [Stripe docs](https://stripe.com/docs/api/payment_intents/list).
"""
@spec list(params, Stripe.options()) :: {:ok, Stripe.List.t(t)} | {:error, Stripe.Error.t()}
when params: %{
optional(:customer) => Stripe.id() | Stripe.Customer.t(),
optional(:created) => Stripe.date_query(),
optional(:ending_before) => t | Stripe.id(),
optional(:limit) => 1..100,
optional(:starting_after) => t | Stripe.id()
}
def list(params \\ %{}, opts \\ []) do
new_request(opts)
|> prefix_expansions()
|> put_endpoint(@plural_endpoint)
|> put_method(:get)
|> put_params(params)
|> cast_to_id([:ending_before, :starting_after, :customer])
|> make_request()
end
end
|
lib/stripe/core_resources/payment_intent.ex
| 0.780286 | 0.459076 |
payment_intent.ex
|
starcoder
|
defmodule Timber.Formatter do
@moduledoc """
Provides utilities for formatting log lines as text
This formatter is designed for use with the default `:console` backend provided by
Elixir Logger. To use this, you'll need to configure the console backend to call
the `Timber.Formatter.format/4` function instead of its default formatting function.
This is done with a simple configuration change. You'll also need to let `:console`
know that `:all` metadata keys should be passed to the formatter.
The result of the configuration looks like:
```elixir
config :logger, backends: [:console]
config :logger, :console,
format: {Timber.Formatter, :format},
metadata: :all
```
Further configuration options available on this module are documented below.
## Configuration Recommendations: Development vs. Production
In a standard Elixir project, you will probably have different configuration files
for your development and production setups. These configuration files typically
take the form of `config/dev.exs` and `config/prod.exs` which override defaults set
in `config/config.exs`.
Timber's defaults are production ready, but the production settings also assume that
you'll be viewing the logs through the Timber console, so they forego some niceties
that help when developing locally. Therefore, to help with local development, we
recommended this configuration for your `:dev` environment:
```
# config/dev.exs
config :timber, Timber.Formatter,
colorize: true,
format: :logfmt,
print_timestamps: true
print_log_level: true
```
This will configure Timber to output logs in logfmt instead of JSON, print the log
level and timestamps, and colorize the logs.
## Transport Configuration Options
The following options are available when configuring the formatter:
#### `colorize`
When `true`, the log level will be printed in a corresponding color using
ANSI console control characters to help identify it.
When `false`, the log level will be printed out as standard text.
_Defaults to `true`._
#### `escape_new_lines`
When `true`, new lines characters are escaped as `\\n`.
When `false`, new lines characters are left alone.
This circumvents issues with output devices (like Heroku Logplex) that will tranform
line breaks into multiple log lines.
The default depends on on the environment variable `HEROKU`. If the environment variable
is present, this will be set to `true`. Otherwise, this defaults to `false`. Setting the
value in your application configuration will always override the initialized setting.
#### `format`
Determines the output format to use. Even though the Timber service is designed
to receive log metadata in JSON format, it's not the prettiest format to look at when
you're developing locally. Therefore, we let you print the metadata in logfmt locally
to make it easier on the eyes.
Valid values:
- `:json`
- `:logfmt` (not supported in production)
_Defaults to `:json`._
#### `print_log_level`
When `true`, the log level is printed in brackets as part of your log message.
When `false`, the log level is not printed.
Regardless of the setting used, the log level will be recorded as part of Timber's
metadata. Setting this to `false` is recommended for production usage if you only
use Timber for viewing logs.
_Defaults to `false`._
#### `print_metadata`
The Timber metadata contains additional information about your log lines, but this
can become unwieldy in local development scenarios.
When `true`, the Timber metadata is printed out at the end of the log line (starting
with the indicator "@metadata").
When `false`, the Timber metadata is not printed.
Note: This should _always_ be `true` in production.
_Defaults to `true`._
#### `print_timestamps`
When `true`, the timestamp for the log will be output at the front
of the statement.
When `false`, the timestamp will be suppressed. This is only useful in situations
where the log will be written to an evented IO service that automatically adds
timestamps for incoming data, like Heroku Logplex.
Regardless of the setting used, the timestamp will be recorded as part of Timber's
metadata. Setting this to `false` is recommended for production usage if you only
use Timber for viewing logs.
_Defaults to `false`._
"""
alias Timber.LoggerBackends.HTTP, as: LoggerBackend
@default_colorize true
@default_escape_new_lines false
@default_format :json
@default_print_log_level false
@default_print_metadata true
@default_print_timestamps false
@metadata_delimiter " @metadata "
alias Timber.LogEntry
@type configuration :: %{
required(:colorize) => boolean,
required(:escape_new_lines) => boolean,
required(:format) => :json | :logfmt,
required(:print_log_level) => boolean,
required(:print_metadata) => boolean,
required(:print_timestamps) => boolean
}
@doc """
Handles formatting a log for the `Logger` application
This function allows you to integrate Timber with the default `:console` backend
distributed with the Elixir `Logger` application. By default, metadata will be
output as a JSON document after the `@metadata` keyword on the line. You can also
opt for the output to be in logfmt by setting the appropriate configuration key.
"""
def format(level, message, ts, metadata) do
configuration = get_configuration()
log_entry = LogEntry.new(ts, level, message, metadata)
level_b = colorize_log_level(log_entry.level, configuration.colorize)
metadata =
if configuration.print_metadata do
log_entry
|> LogEntry.encode_to_iodata!(configuration.format, except: [:message])
|> wrap_metadata()
else
[]
end
line_output =
[message, metadata]
|> add_log_level(level_b, configuration.print_log_level)
|> add_timestamp(log_entry.dt, configuration.print_timestamps)
|> escape_new_lines(configuration.escape_new_lines)
# Prevents the final new line from being escaped
[line_output, ?\n]
end
@spec get_configuration() :: configuration
defp get_configuration() do
options = Application.get_env(:timber, __MODULE__, [])
colorize = Keyword.get(options, :colorize, @default_colorize)
escape_new_lines = Keyword.get(options, :escape_new_lines, @default_escape_new_lines)
format = Keyword.get(options, :format, @default_format)
print_log_level = Keyword.get(options, :print_log_level, @default_print_log_level)
print_metadata = Keyword.get(options, :print_metadata, @default_print_metadata)
print_timestamps = Keyword.get(options, :print_timestamps, @default_print_timestamps)
%{
colorize: colorize,
escape_new_lines: escape_new_lines,
format: format,
print_log_level: print_log_level,
print_metadata: print_metadata,
print_timestamps: print_timestamps
}
end
@spec add_timestamp(IO.chardata(), IO.chardata(), boolean) :: IO.chardata()
defp add_timestamp(message, _, false), do: message
defp add_timestamp(message, timestamp, true) do
[timestamp, " " | message]
end
@spec wrap_metadata(IO.chardata()) :: IO.chardata()
defp wrap_metadata(metadata) do
[@metadata_delimiter, metadata]
end
@spec add_log_level(IO.chardata(), IO.chardata(), boolean) :: IO.chardata()
defp add_log_level(message, _, false), do: message
defp add_log_level(message, log_level, true) do
["[", log_level, "] " | message]
end
@spec colorize_log_level(LoggerBackend.level(), boolean) :: IO.chardata()
defp colorize_log_level(level_a, false), do: Atom.to_string(level_a)
defp colorize_log_level(level_a, true) do
color = log_level_color(level_a)
level_b = Atom.to_string(level_a)
[color, level_b]
|> IO.ANSI.format(true)
end
@spec log_level_color(LoggerBackend.level()) :: atom
defp log_level_color(:debug), do: :cyan
defp log_level_color(:warn), do: :yellow
defp log_level_color(:error), do: :red
defp log_level_color(_), do: :normal
@spec escape_new_lines(IO.chardata(), boolean) :: IO.chardata()
defp escape_new_lines(message, false),
do: message
defp escape_new_lines(message, true) do
message
|> to_string()
|> String.replace("\n", "\\n")
end
end
|
lib/timber/formatter.ex
| 0.823896 | 0.842604 |
formatter.ex
|
starcoder
|
defmodule Bump do
def to_iodata(%{size: %{height: height, width: width}} = canvas) do
resolution = 2835
info_header_size = 40
offset = 14 + info_header_size
padding_size = rem(width, 4)
padding = List.duplicate(0, padding_size)
pixel_data =
canvas
|> Canvas.pixel_data()
|> Stream.chunk_every(width)
|> Stream.map(&(&1 ++ padding))
|> Enum.to_list()
|> :binary.list_to_bin()
size_of_file = byte_size(pixel_data) + offset
header = <<
0x42,
0x4D,
size_of_file::unsigned-little-integer-size(32),
0x0::size(32),
offset::unsigned-little-integer-size(32),
info_header_size::unsigned-little-integer-size(32),
width::unsigned-little-integer-size(32),
# negative height signals that rows are top to bottom
-height::unsigned-little-integer-size(32),
# color plane
1::unsigned-little-integer-size(16),
# bits per pixel (color depth)
24::unsigned-little-integer-size(16),
# disable compression
0x0::unsigned-little-integer-size(32),
# size of image
byte_size(pixel_data)::unsigned-little-integer-size(32),
# horizontal resolution
resolution::unsigned-little-integer-size(32),
# vertical resolution
resolution::unsigned-little-integer-size(32),
# colors
0x0::unsigned-little-integer-size(32),
# important colors
0x0::unsigned-little-integer-size(32)
>>
header <> pixel_data
end
def pixel_data(filename) do
{:ok, filedata} = File.read(filename)
<<0x42, 0x4D, _size_of_file::unsigned-little-integer-size(32), _unused::size(4)-binary,
offset::unsigned-little-integer-size(32),
_info_header_size::unsigned-little-integer-size(32),
_height::unsigned-little-integer-size(32), width::unsigned-little-integer-size(32),
_unused2::binary>> = filedata
<<_header::size(offset)-binary, data::binary>> = filedata
Stream.chunk(:binary.bin_to_list(data), 8)
|> Stream.map(fn row -> Enum.slice(row, 0..(width * 3 - 1)) |> Enum.chunk(3) end)
|> Enum.to_list()
end
end
|
lib/bump.ex
| 0.521715 | 0.427994 |
bump.ex
|
starcoder
|
defmodule Ockam.Vault do
@moduledoc false
## NIF functions always infer as any()
## The types are useful for readability
@dialyzer [:no_contracts]
@default_secret_attributes [type: :curve25519, persistence: :ephemeral, length: 32]
@doc """
Computes a SHA-256 hash based on input data.
"""
@spec sha256(Ockam.Vault, binary | String.t()) :: {:ok, binary} | :error
def sha256(%vault_module{id: vault_id}, input) do
vault_module.sha256(vault_id, input)
end
@doc """
Fills output_buffer with randomly generate bytes.
"""
@spec random_bytes(Ockam.Vault, binary) :: :error
def random_bytes(%vault_module{id: vault_id}, output_buffer) do
vault_module.random_bytes(vault_id, output_buffer)
end
@doc """
Generates an ockam secret. Attributes struct must specify the
configuration for the type of secret to generate.
"""
@spec secret_generate(Ockam.Vault, keyword()) :: {:ok, reference()} | :error
def secret_generate(%vault_module{id: vault_id}, attributes) when is_list(attributes) do
attributes = @default_secret_attributes |> Keyword.merge(attributes) |> Map.new()
vault_module.secret_generate(vault_id, attributes)
end
@doc """
Imports the specified data into the supplied ockam vault secret.
"""
@spec secret_import(Ockam.Vault, keyword(), binary) :: {:ok, reference()} | :error
def secret_import(%vault_module{id: vault_id}, attributes, input) when is_list(attributes) do
attributes = @default_secret_attributes |> Keyword.merge(attributes) |> Map.new()
vault_module.secret_import(vault_id, attributes, input)
end
@doc """
Exports data from an ockam vault secret into the supplied output buffer.
"""
@spec secret_export(Ockam.Vault, reference()) :: {:ok, binary} | :error
def secret_export(%vault_module{id: vault_id}, secret_handle) do
vault_module.secret_export(vault_id, secret_handle)
end
@doc """
Retrieves the public key from an ockam vault secret.
"""
@spec secret_publickey_get(Ockam.Vault, reference()) :: {:ok, reference()} | :error
def secret_publickey_get(%vault_module{id: vault_id}, secret_handle) do
vault_module.secret_publickey_get(vault_id, secret_handle)
end
@doc """
Retrieves the attributes for a specified secret
"""
@spec secret_attributes_get(Ockam.Vault, reference()) :: {:ok, keyword()} | :error
def secret_attributes_get(%vault_module{id: vault_id}, secret_handle) do
with {:ok, attributes} <- vault_module.secret_attributes_get(vault_id, secret_handle) do
{:ok, Map.to_list(attributes)}
end
end
@doc """
Deletes an ockam vault secret.
"""
@spec secret_destroy(Ockam.Vault, reference()) :: :ok | :error
def secret_destroy(%vault_module{id: vault_id}, secret_handle) do
vault_module.secret_destroy(vault_id, secret_handle)
end
@doc """
Performs an ECDH operation on the supplied ockam vault secret and peer_publickey.
The result is another ockam vault secret of type unknown.
"""
@spec ecdh(Ockam.Vault, reference(), binary) :: {:ok, reference()} | :error
def ecdh(%vault_module{id: vault_id}, secret_handle, peer_public_key) do
vault_module.ecdh(vault_id, secret_handle, peer_public_key)
end
@doc """
Performs an HMAC-SHA256 based key derivation function on the supplied salt and input
key material.
Returns handle to derived_output.
"""
@spec hkdf_sha256(Ockam.Vault, reference(), reference(), non_neg_integer()) ::
{:ok, reference()} | :error
def hkdf_sha256(%vault_module{id: vault_id}, salt_handle, ikm_handle, derived_outputs_count) do
vault_module.hkdf_sha256(vault_id, salt_handle, ikm_handle, derived_outputs_count)
end
@doc """
Performs an HMAC-SHA256 based key derivation function on the supplied salt and input key
material.
Returns handle to derived_output.
"""
@spec hkdf_sha256(Ockam.Vault, reference(), reference()) :: {:ok, reference()} | :error
def hkdf_sha256(%vault_module{id: vault_id}, salt_handle, ikm_handle) do
vault_module.hkdf_sha256(vault_id, salt_handle, ikm_handle)
end
@doc """
Encrypts a payload using AES-GCM.
Returns cipher_text after an encryption.
"""
@spec aead_aes_gcm_encrypt(
Ockam.Vault,
reference(),
non_neg_integer(),
String.t() | binary,
binary | String.t()
) :: {:ok, binary} | :error
def aead_aes_gcm_encrypt(%vault_module{id: vault_id}, key_handle, nonce, ad, plain_text) do
vault_module.aead_aes_gcm_encrypt(vault_id, key_handle, nonce, ad, plain_text)
end
@doc """
Decrypts a payload using AES-GCM.
Returns decrypted payload.
"""
@spec aead_aes_gcm_decrypt(
Ockam.Vault,
reference(),
non_neg_integer(),
binary | String.t(),
binary
) :: {:ok, binary | String.t()} | :error
def aead_aes_gcm_decrypt(%vault_module{id: vault_id}, key_handle, nonce, ad, cipher_text) do
vault_module.aead_aes_gcm_decrypt(vault_id, key_handle, nonce, ad, cipher_text)
end
@doc """
Deinitializes the specified ockam vault object.
"""
@spec deinit(Ockam.Vault) :: :ok | :error
def deinit(%vault_module{id: vault_id}) do
vault_module.deinit(vault_id)
end
end
|
implementations/elixir/ockam/ockam/lib/ockam/vault.ex
| 0.854156 | 0.436262 |
vault.ex
|
starcoder
|
defmodule State.Alert.Filter do
@moduledoc """
Documented in State.Alert.filter_by/1.
"""
alias State.Alert
alias State.Alert.{ActivePeriod, InformedEntity, InformedEntityActivity}
@doc false
@spec filter_by(Alert.filter_opts()) :: [Model.Alert.t()]
def filter_by(filter_opts) do
filter_opts
|> filter_to_list_of_ids
|> filter_by_ids(filter_opts)
|> filter_by_informed_entity_activity(filter_opts)
|> filter_by_active_period(filter_opts)
|> Alert.by_ids()
|> filter_alerts_by_banner(filter_opts)
|> filter_alerts_by_lifecycles(filter_opts)
|> filter_alerts_by_severity(filter_opts)
end
defp filter_to_list_of_ids(filter_opts) when filter_opts == %{} do
Alert.all_keys()
end
defp filter_to_list_of_ids(filter_opts) do
filter_opts
|> build_matchers()
|> InformedEntity.match()
end
defp build_matchers(filter_opts) do
filter_opts
|> Enum.reduce([%{}], &do_build_matcher/2)
|> reject_empty_matchers
|> Enum.uniq()
end
defp do_build_matcher({:ids, values}, _acc) when is_list(values) do
MapSet.new(values, fn id -> %{id: id} end)
end
defp do_build_matcher({:facilities, values}, acc) when is_list(values) do
matchers_for_values(acc, :facility, values)
end
defp do_build_matcher({:stops, values}, acc) when is_list(values) do
route_matchers =
for route_id <- State.RoutesPatternsAtStop.routes_by_family_stops(values),
stop_id <- [nil | values] do
%{route: route_id, stop: stop_id}
end
stop_matchers =
for stop_id <- [nil | values] do
%{stop: stop_id}
end
for matcher_list <- [route_matchers, stop_matchers],
merge <- matcher_list,
matcher <- acc do
Map.merge(matcher, merge)
end
end
defp do_build_matcher({:routes, values}, acc) when is_list(values) do
for route_id <- values,
for_route <- matchers_for_route_id(route_id),
matcher <- acc do
Map.merge(matcher, for_route)
end
end
defp do_build_matcher({:route_types, values}, acc) when is_list(values) do
matchers_for_values(acc, :route_type, values)
end
defp do_build_matcher({:direction_id, value}, acc) when value in [0, 1] do
matchers_for_values(acc, :direction_id, [value])
end
defp do_build_matcher({:activities, values}, acc) when is_list(values) do
# these are matched later
acc
end
defp do_build_matcher({:trips, values}, acc) when is_list(values) do
# we expand the match for trips, to include the route type, route, and
# direction ID
for trip_id <- values,
for_trip <- matchers_for_trip_id(trip_id),
matcher <- acc do
Map.merge(matcher, for_trip)
end
end
defp do_build_matcher({:banner, value}, acc) when is_boolean(value) do
# these are filtered later
acc
end
defp do_build_matcher({:severity, _}, acc) do
# these are filtered later
acc
end
defp do_build_matcher({:datetime, %DateTime{}}, acc) do
# filtered later
acc
end
defp do_build_matcher({:lifecycles, value}, acc) when is_list(value) do
# filtered later
acc
end
defp do_build_matcher({key, values}, _acc) do
raise ArgumentError, "unknown filter option #{key}, values #{inspect(values)}"
end
defp matchers_for_values(acc, key, values) when is_list(values) do
for value <- values,
matcher <- acc do
Map.put(matcher, key, value)
end
end
defp matchers_for_trip_id(nil) do
[%{trip: nil}]
end
defp matchers_for_trip_id(trip_id) do
with %Model.Trip{} = trip <- State.Trip.by_primary_id(trip_id),
%Model.Route{} = route <- State.Route.by_id(trip.route_id) do
[
%{
route_type: route.type,
route: trip.route_id,
direction_id: trip.direction_id,
trip: trip_id
}
]
else
_ ->
[%{trip: trip_id}]
end
end
defp matchers_for_route_id(nil) do
[%{route: nil}]
end
defp matchers_for_route_id(route_id) do
case State.Route.by_id(route_id) do
%Model.Route{} = route ->
[
%{
route_type: route.type,
route: route_id
}
]
_ ->
[%{route: route_id}]
end
end
# we don't want to include matchers with all nil values unless it's the
# only matcher
defp reject_empty_matchers([_] = matchers) do
matchers
end
defp reject_empty_matchers(matchers) do
Enum.reject(matchers, &empty_matcher?/1)
end
defp empty_matcher?(matcher) do
Enum.all?(matcher, &is_nil(elem(&1, 1)))
end
defp filter_by_informed_entity_activity(alert_ids, filter_opts) do
activities = Map.get(filter_opts, :activities, [])
InformedEntityActivity.filter(alert_ids, activities)
end
defp filter_by_active_period(alert_ids, %{datetime: dt}) do
ActivePeriod.filter(alert_ids, dt)
end
defp filter_by_active_period(alert_ids, _) do
alert_ids
end
defp filter_by_ids([] = ids, _) do
ids
end
defp filter_by_ids(ids, %{ids: ids_to_filter_by}) do
Enum.filter(ids, &(&1 in ids_to_filter_by))
end
defp filter_by_ids(ids, _) do
ids
end
defp filter_alerts_by_severity(alerts, %{severity: nil}) do
alerts
end
defp filter_alerts_by_severity(alerts, %{severity: severities}) when is_list(severities) do
severities = MapSet.new(severities)
Enum.filter(alerts, &MapSet.member?(severities, &1.severity))
end
defp filter_alerts_by_severity(alerts, _) do
# doesn't filter by severity if severity filter missing
alerts
end
defp filter_alerts_by_banner(alerts, %{banner: banner?}) do
Enum.reject(alerts, &(is_nil(&1.banner) == banner?))
end
defp filter_alerts_by_banner(alerts, _) do
# doesn't filter by banner if banner filter missing
alerts
end
defp filter_alerts_by_lifecycles(alerts, %{lifecycles: lifecycles}) do
lifecycles = MapSet.new(lifecycles)
Enum.filter(alerts, &MapSet.member?(lifecycles, &1.lifecycle))
end
defp filter_alerts_by_lifecycles(alerts, _) do
alerts
end
end
|
apps/state/lib/state/alert/filter.ex
| 0.645902 | 0.400398 |
filter.ex
|
starcoder
|
defmodule RePG2 do
@moduledoc """
The RePG2 interface.
From the [Erlang pg2 docs](http://erlang.org/doc/man/pg2.html):
> This module implements process groups. Each message may be sent to one,
> some, or all members of the group.
>
> A group of processes can be accessed by a common name. For example, if
> there is a group named foobar, there can be a set of processes (which
> can be located on different nodes) which are all members of the group
> foobar. There are no special functions for sending a message to the group.
> Instead, client functions should be written with the functions
> get_members/1 and get_local_members/1 to find out which processes are
> members of the group. Then the message can be sent to one or more members
> of the group.
>
> If a member terminates, it is automatically removed from the group.
"""
alias RePG2.{Impl, Worker}
@typedoc "A process group name."
@type name :: term
@doc """
Create a process group with given `name`.
From the [Erlang pg2 docs](http://erlang.org/doc/man/pg2.html):
> Creates a new, empty process group. The group is globally visible on all
> nodes. If the group exists, nothing happens.
"""
@spec create(name) :: :ok
def create(name) do
unless Impl.group_exists?(name) do
Worker.globally_locked_multi_call(name, {:create, name})
end
:ok
end
@doc """
Delete the process group with given `name`.
From the [Erlang pg2 docs](http://erlang.org/doc/man/pg2.html):
> Deletes a process group.
"""
@spec delete(name) :: :ok
def delete(name) do
Worker.globally_locked_multi_call(name, {:delete, name})
:ok
end
@doc """
Join `pid` to the process group with given `name`.
From the [Erlang pg2 docs](http://erlang.org/doc/man/pg2.html):
> Joins the process Pid to the group Name. A process can join a group several
> times; it must then leave the group the same number of times.
"""
@spec join(name, pid) :: :ok | {:error, {:no_such_group, name}}
def join(name, pid) do
if Impl.group_exists?(name) do
Worker.globally_locked_multi_call(name, {:join, name, pid})
:ok
else
{:error, {:no_such_group, name}}
end
end
@doc """
Make `pid` leave the process group with given `name`.
From the [Erlang pg2 docs](http://erlang.org/doc/man/pg2.html):
> Makes the process Pid leave the group Name. If the process is not a member
> of the group, ok is returned.
"""
@spec leave(name, pid) :: :ok | {:error, {:no_such_group, name}}
def leave(name, pid) do
if Impl.group_exists?(name) do
Worker.globally_locked_multi_call(name, {:leave, name, pid})
:ok
else
{:error, {:no_such_group, name}}
end
end
@doc """
Get all members of the process group with given `name`.
From the [Erlang pg2 docs](http://erlang.org/doc/man/pg2.html):
> Returns all processes in the group Name. This function should be used from
> within a client function that accesses the group. It is therefore optimized
> for speed.
"""
@spec get_members(name) :: [pid] | {:error, {:no_such_group, name}}
def get_members(name) do
if Impl.group_exists?(name) do
Impl.group_members(name)
else
{:error, {:no_such_group, name}}
end
end
@doc """
Get all members of the process group with given `name` on the local node.
From the [Erlang pg2 docs](http://erlang.org/doc/man/pg2.html):
> Returns all processes running on the local node in the group Name. This
> function should to be used from within a client function that accesses the
> group. It is therefore optimized for speed.
"""
@spec get_local_members(name) :: [pid] | {:error, {:no_such_group, name}}
def get_local_members(name) do
if Impl.group_exists?(name) do
Impl.local_group_members(name)
else
{:error, {:no_such_group, name}}
end
end
@doc """
Get a random member of the process group with given `name` on the local node.
From the [Erlang pg2 docs](http://erlang.org/doc/man/pg2.html):
> This is a useful dispatch function which can be used from client functions.
> It returns a process on the local node, if such a process exist. Otherwise,
> it chooses one randomly.
"""
@spec get_closest_pid(name) ::
pid | {:error, {:no_such_group, name} | {:no_process, name}}
def get_closest_pid(name) do
case get_local_members(name) do
[pid] ->
pid
[] ->
case get_members(name) do
[] ->
{:error, {:no_process, name}}
members ->
Enum.random(members)
end
members when is_list(members) ->
Enum.random(members)
other ->
other
end
end
@doc """
Get a list of all known groups.
From the [Erlang pg2 docs](http://erlang.org/doc/man/pg2.html):
> Returns a list of all known groups.
"""
@spec which_groups() :: [name]
def which_groups, do: Impl.all_groups()
end
|
lib/repg2.ex
| 0.857694 | 0.567997 |
repg2.ex
|
starcoder
|
defmodule Ash.Filter do
@moduledoc """
The representation of a filter in Ash.
Ash filters are stored as nested `Ash.Query.Expression{}` and `%Ash.Query.Not{}` structs,
terminating in an operator or a function struct. An expression is simply a boolean operator
and the left and right hand side of that operator.
## Filter Templates
Filter templates are simplified fielter statements (they only support atom keys), that have substitutions in them.
Currently, the substitutions are `{:_actor, :field}` and `{:_actor, :_primary_key}`
You can pass a filter template to `build_filter_from_template/2` with an actor, and it will return the new result
Additionally, you can ask if the filter template contains an actor reference via `template_references_actor?/1`
## Writing a filter:
A filter is a nested keyword list (with some exceptions, like `true` for everything and `false` for nothing).
The key is the "predicate" (A.K.A condition) and the value is the parameter. You can use `and` and `or` to create
nested filters. Datalayers can expose custom predicates. Eventually, you will be able to define your own custom
predicates, which will be a mechanism for you to attach complex filters supported by the data layer to your queries.
** Important **
In a given keyword list, all predicates are considered to be "ands". So `[or: [first_name: "Tom", last_name: "Bombadil"]]` doesn't
mean 'First name == "tom" or last_name == "bombadil"'. To say that, you want to provide a list of filters,
like so: `[or: [[first_name: "Tom"], [last_name: "Bombadil"]]]`
The builtin predicates are:
* eq - shorthand for equals
* equals
* in
* lt - shorthand for less_than
* gt - shorthand for greater_than
* lte - shorthand for less_than_or_equal
* gte - shorthand for greater_than_or_equal
* less_than
* greater_than
* less_than_or_equal
* greater_than_or_equal
* is_nil
Some example filters:
```elixir
[name: "Zardoz"]
[first_name: "Zar", last_name: "Doz"]
[first_name: "Zar", last_name: [in: ["Doz", "Daz"]], high_score: [greater_than: 10]]
[first_name: "Zar", last_name: [in: ["Doz", "Daz"]], high_score: [greater_than: 10]]
[or: [
[first_name: "Zar"],
[last_name: "Doz"],
[or: [
[high_score: [greater_than: 10]]],
[high_score: [less_than: -10]]
]
]]
```
"""
alias Ash.Actions.SideLoad
alias Ash.Engine.Request
alias Ash.Error.Query.{
AggregatesNotSupported,
InvalidFilterValue,
NoSuchAttributeOrRelationship,
NoSuchFilterPredicate,
ReadActionRequired
}
alias Ash.Query.Function.IsNil
alias Ash.Query.Operator.{
Eq,
GreaterThan,
GreaterThanOrEqual,
In,
LessThan,
LessThanOrEqual
}
alias Ash.Query.{Expression, Not, Ref}
alias Ash.Query.{Aggregate, Function, Operator}
@functions [
IsNil
]
@operators [
Ash.Query.Operator.IsNil,
Eq,
In,
LessThan,
GreaterThan,
LessThanOrEqual,
GreaterThanOrEqual
]
@builtins @functions ++ @operators
@operator_aliases [
eq: Eq,
equals: Eq,
gt: GreaterThan,
greater_than: GreaterThan,
lt: LessThan,
less_than: LessThan,
gte: GreaterThanOrEqual,
greater_than_or_equal: GreaterThanOrEqual,
lte: LessThanOrEqual,
less_than_or_equal: LessThanOrEqual
]
@builtin_operators Enum.map(@operators, &{&1.operator(), &1}) ++ @operator_aliases
@builtin_functions Enum.map(@functions, &{&1.name(), &1})
@string_builtin_operators Enum.into(@builtin_operators, %{}, fn {key, value} ->
{to_string(key), value}
end)
@string_builtin_functions Enum.into(@builtin_functions, %{}, fn {key, value} ->
{to_string(key), value}
end)
defstruct [:resource, :expression]
@type t :: %__MODULE__{}
def builtins, do: @builtins
def builtin_functions, do: @functions
def builtin_operators, do: @operators
defmodule Simple do
@moduledoc "Represents a simplified filter, with a simple list of predicates"
defstruct [:resource, :predicates]
defmodule Not do
@moduledoc "A negated predicate"
defstruct [:predicate]
end
end
def parse!(resource, statement, aggregates \\ %{}) do
case parse(resource, statement, aggregates) do
{:ok, filter} ->
filter
{:error, error} ->
raise error
end
end
def parse(resource, statement, aggregates \\ %{}) do
context = %{
resource: resource,
relationship_path: [],
aggregates: aggregates
}
case parse_expression(statement, context) do
{:ok, expression} ->
{:ok, %__MODULE__{expression: expression, resource: resource}}
{:error, error} ->
{:error, error}
end
end
@doc "transform an expression based filter to a simple filter, which is just a list of predicates"
def to_simple_filter(%{resource: resource, expression: expression}) do
predicates = get_predicates(expression)
%Simple{resource: resource, predicates: predicates}
end
@doc "Replace any actor value references in a template with the values from a given actor"
def build_filter_from_template(template, actor) do
walk_filter_template(template, fn
{:_actor, :_primary_key} ->
if actor do
Map.take(actor, Ash.Resource.primary_key(actor.__struct__))
else
false
end
{:_actor, field} ->
Map.get(actor || %{}, field)
other ->
other
end)
end
@doc "Whether or not a given template contains an actor reference"
def template_references_actor?({:_actor, _}), do: true
def template_references_actor?(filter) when is_list(filter) do
Enum.any?(filter, &template_references_actor?/1)
end
def template_references_actor?(filter) when is_map(filter) do
Enum.any?(fn {key, value} ->
template_references_actor?(key) || template_references_actor?(value)
end)
end
def template_references_actor?(tuple) when is_tuple(tuple) do
Enum.any?(Tuple.to_list(tuple), &template_references_actor?/1)
end
def template_references_actor?(_), do: false
defp walk_filter_template(filter, mapper) when is_list(filter) do
case mapper.(filter) do
^filter ->
Enum.map(filter, &walk_filter_template(&1, mapper))
other ->
walk_filter_template(other, mapper)
end
end
defp walk_filter_template(filter, mapper) when is_map(filter) do
case mapper.(filter) do
^filter ->
Enum.into(filter, %{}, &walk_filter_template(&1, mapper))
other ->
walk_filter_template(other, mapper)
end
end
defp walk_filter_template(tuple, mapper) when is_tuple(tuple) do
case mapper.(tuple) do
^tuple ->
tuple
|> Tuple.to_list()
|> Enum.map(&walk_filter_template(&1, mapper))
|> List.to_tuple()
other ->
walk_filter_template(other, mapper)
end
end
defp walk_filter_template(value, mapper), do: mapper.(value)
defp get_predicates(expr, acc \\ [])
defp get_predicates(true, acc), do: acc
defp get_predicates(false, _), do: false
defp get_predicates(_, false), do: false
defp get_predicates(%Expression{op: :and, left: left, right: right}, acc) do
acc = get_predicates(left, acc)
get_predicates(right, acc)
end
defp get_predicates(%Not{expression: expression}, acc) do
expression
|> get_predicates()
|> Enum.reduce(acc, fn predicate, acc ->
[%Simple.Not{predicate: predicate} | acc]
end)
end
defp get_predicates(%{__predicate__?: true} = predicate, acc), do: [predicate | acc]
def used_aggregates(filter) do
filter
|> list_predicates()
|> Enum.flat_map(fn
%{__operator__?: true, left: left, right: right} ->
[left, right]
|> Enum.filter(fn
%Ref{attribute: %Aggregate{}} ->
true
_ ->
false
end)
|> Enum.map(& &1.attribute)
%{__function__?: true, arguments: arguments} ->
arguments
|> Enum.filter(fn
%Ash.Query.Ref{attribute: %Aggregate{}} ->
true
_ ->
false
end)
|> Enum.map(& &1.attribute)
end)
end
def run_other_data_layer_filters(api, resource, %{expression: expression} = filter) do
case do_run_other_data_layer_filters(expression, api, resource) do
{:ok, new_expression} -> {:ok, %{filter | expression: new_expression}}
{:error, error} -> {:error, error}
end
end
def run_other_data_layer_filters(_, _, filter) when filter in [nil, true, false],
do: {:ok, filter}
defp do_run_other_data_layer_filters(
%Expression{op: :or, left: left, right: right},
api,
resource
) do
with {:ok, left} <- do_run_other_data_layer_filters(left, api, resource),
{:ok, right} <- do_run_other_data_layer_filters(right, api, resource) do
{:ok, Expression.optimized_new(:or, left, right)}
end
end
defp do_run_other_data_layer_filters(%Expression{op: :and} = expression, api, resource) do
expression
|> relationship_paths(:ands_only)
|> filter_paths_that_change_data_layers(resource)
|> case do
[] ->
{:ok, expression}
paths ->
paths
|> do_run_other_data_layer_filter_paths(expression, resource, api)
|> case do
{:ok, result} -> do_run_other_data_layer_filters(result, api, resource)
{:error, error} -> {:error, error}
end
end
|> case do
{:ok, %Expression{op: :and, left: left, right: right}} ->
with {:ok, new_left} <- do_run_other_data_layer_filters(left, api, resource),
{:ok, new_right} <- do_run_other_data_layer_filters(right, api, resource) do
{:ok, Expression.optimized_new(:and, new_left, new_right)}
end
end
end
defp do_run_other_data_layer_filters(%Not{expression: expression}, api, resource) do
case do_run_other_data_layer_filters(expression, api, resource) do
{:ok, expr} -> {:ok, Not.new(expr)}
{:error, error} -> {:error, error}
end
end
defp do_run_other_data_layer_filters(%{__predicate__?: true} = predicate, api, resource) do
predicate
|> relationship_paths(:ands_only)
|> filter_paths_that_change_data_layers(resource)
|> Enum.find_value(fn path ->
case split_expression_by_relationship_path(predicate, path) do
{nil, _} ->
nil
{for_path, nil} ->
{path, for_path}
end
end)
|> case do
nil ->
{:ok, predicate}
{path, new_predicate} ->
relationship = Ash.Resource.relationship(resource, path)
fetch_related_data(resource, path, new_predicate, api, relationship)
end
end
defp do_run_other_data_layer_filters(other, _api, _resource), do: {:ok, other}
defp do_run_other_data_layer_filter_paths(paths, expression, resource, api) do
Enum.reduce_while(paths, {:ok, expression}, fn path, {:ok, expression} ->
{for_path, without_path} = split_expression_by_relationship_path(expression, path)
relationship = Ash.Resource.relationship(resource, path)
query =
relationship.destination
|> Ash.Query.new(api)
|> Map.put(:filter, %__MODULE__{
expression: for_path,
resource: relationship.destination
})
case filter_related_in(query, relationship, :lists.droplast(path)) do
{:ok, new_predicate} ->
{:cont, {:ok, Expression.optimized_new(:and, without_path, new_predicate)}}
{:error, error} ->
{:halt, {:error, error}}
end
end)
end
defp fetch_related_data(
resource,
path,
new_predicate,
api,
%{type: :many_to_many, join_relationship: join_relationship, through: through} =
relationship
) do
if Ash.Resource.data_layer(through) == Ash.Resource.data_layer(resource) &&
Ash.Resource.data_layer_can?(resource, {:join, through}) do
filter = %__MODULE__{
resource: relationship.destination,
expression: new_predicate
}
relationship.destination
|> Ash.Query.new(api)
|> Ash.Query.filter(filter)
|> filter_related_in(
relationship,
:lists.droplast(path) ++ [join_relationship]
)
else
filter = %__MODULE__{
resource: through,
expression: new_predicate
}
relationship.destination
|> Ash.Query.new(api)
|> Ash.Query.filter(filter)
|> api.read()
|> case do
{:ok, results} ->
relationship.through
|> Ash.Query.new(api)
|> Ash.Query.filter([
{relationship.destination_field_on_join_table,
in: Enum.map(results, &Map.get(&1, relationship.destination_field))}
])
|> filter_related_in(
Ash.Resource.relationship(resource, join_relationship),
:lists.droplast(path)
)
{:error, error} ->
{:error, error}
end
end
end
defp fetch_related_data(
_resource,
path,
new_predicate,
api,
relationship
) do
filter = %__MODULE__{
resource: relationship.destination,
expression: new_predicate
}
relationship.destination
|> Ash.Query.new(api)
|> Ash.Query.filter(filter)
|> filter_related_in(relationship, :lists.droplast(path))
end
defp filter_related_in(query, relationship, path) do
case query.api.read(query) do
{:error, error} ->
{:error, error}
{:ok, records} ->
records_to_expression(
records,
relationship,
path
)
end
end
defp records_to_expression([], _, _), do: {:ok, false}
defp records_to_expression([single_record], relationship, path) do
Ash.Query.Operator.new(
Eq,
%Ref{
relationship_path: path,
resource: relationship.source,
attribute: Ash.Resource.attribute(relationship.source, relationship.source_field)
},
Map.get(single_record, relationship.destination_field)
)
end
defp records_to_expression(records, relationship, path) do
Enum.reduce_while(records, {:ok, nil}, fn record, {:ok, expression} ->
case records_to_expression([record], relationship, path) do
{:ok, operator} ->
{:cont, {:ok, Expression.optimized_new(:and, expression, operator)}}
{:error, error} ->
{:halt, {:error, error}}
end
end)
end
defp filter_paths_that_change_data_layers(paths, resource, acc \\ [])
defp filter_paths_that_change_data_layers([], _resource, acc), do: acc
defp filter_paths_that_change_data_layers([path | rest], resource, acc) do
case shortest_path_to_changed_data_layer(resource, path) do
{:ok, path} ->
new_rest = Enum.reject(rest, &List.starts_with?(&1, path))
filter_paths_that_change_data_layers(new_rest, resource, [path | acc])
:error ->
filter_paths_that_change_data_layers(rest, resource, acc)
end
end
defp shortest_path_to_changed_data_layer(resource, path, acc \\ [])
defp shortest_path_to_changed_data_layer(_resource, [], _acc), do: :error
defp shortest_path_to_changed_data_layer(resource, [relationship | rest], acc) do
relationship = Ash.Resource.relationship(resource, relationship)
if relationship.type == :many_to_many do
if Ash.Resource.data_layer_can?(resource, {:join, relationship.through}) do
shortest_path_to_changed_data_layer(relationship.destination, rest, [
relationship.name | acc
])
else
{:ok, Enum.reverse([relationship.name | acc])}
end
else
if Ash.Resource.data_layer_can?(resource, {:join, relationship.destination}) do
shortest_path_to_changed_data_layer(relationship.destination, rest, [
relationship.name | acc
])
else
{:ok, Enum.reverse([relationship.name | acc])}
end
end
end
def put_at_path(value, []), do: value
def put_at_path(value, [key | rest]), do: [{key, put_at_path(value, rest)}]
def relationship_paths(filter_or_expression, kind \\ :all)
def relationship_paths(nil, _), do: []
def relationship_paths(%{expression: nil}, _), do: []
def relationship_paths(%__MODULE__{expression: expression}, kind),
do: relationship_paths(expression, kind)
def relationship_paths(expression, kind) do
expression
|> do_relationship_paths(kind)
|> List.wrap()
|> List.flatten()
|> Enum.uniq()
|> Enum.map(fn {path} -> path end)
end
def add_to_filter!(base, addition, op \\ :and, aggregates \\ %{}) do
case add_to_filter(base, addition, op, aggregates) do
{:ok, value} ->
value
{:error, error} ->
raise Ash.Error.to_ash_error(error)
end
end
def add_to_filter(base, addition, op \\ :and, aggregates \\ %{})
def add_to_filter(nil, %__MODULE__{} = addition, _, _), do: {:ok, addition}
def add_to_filter(
%__MODULE__{} = base,
%__MODULE__{} = addition,
op,
_
) do
{:ok,
%{
base
| expression: Expression.optimized_new(op, base.expression, addition.expression)
}}
end
def add_to_filter(%__MODULE__{} = base, statement, op, aggregates) do
case parse(base.resource, statement, aggregates) do
{:ok, filter} -> add_to_filter(base, filter, op, aggregates)
{:error, error} -> {:error, error}
end
end
@doc """
Returns true if the second argument is a strict subset (always returns the same or less data) of the first
"""
def strict_subset_of(nil, _), do: true
def strict_subset_of(_, nil), do: false
def strict_subset_of(%{resource: resource}, %{resource: other_resource})
when resource != other_resource,
do: false
def strict_subset_of(filter, candidate) do
Ash.SatSolver.strict_filter_subset(filter, candidate)
end
def strict_subset_of?(filter, candidate) do
strict_subset_of(filter, candidate) == true
end
def relationship_filter_request_paths(filter) do
filter
|> relationship_paths()
|> Enum.map(&[:filter, &1])
end
def read_requests(_, nil), do: {:ok, []}
def read_requests(api, filter) do
filter
|> Ash.Filter.relationship_paths()
|> Enum.map(fn path ->
{path, filter_expression_by_relationship_path(filter, path, true)}
end)
|> Enum.reduce_while({:ok, []}, fn {path, scoped_filter}, {:ok, requests} ->
%{resource: resource} = scoped_filter
with %{errors: []} = query <- Ash.Query.new(resource, api),
%{errors: []} = query <- Ash.Query.filter(query, scoped_filter),
{:action, action} when not is_nil(action) <-
{:action, Ash.Resource.primary_action(resource, :read)} do
request =
Request.new(
resource: resource,
api: api,
query:
Request.resolve(
[[:data, :authorization_filter]],
fn %{
data: %{
authorization_filter: authorization_filter
}
} ->
if authorization_filter do
relationship =
Ash.Resource.relationship(
resource,
List.first(path)
)
case SideLoad.reverse_relationship_path(
relationship,
tl(path)
) do
:error ->
{:ok, query}
{:ok, reverse_relationship} ->
filter = put_at_path(authorization_filter, reverse_relationship)
{:ok, Ash.Query.filter(query, filter)}
end
else
{:ok, query}
end
end
),
async?: false,
path: [:filter, path],
strict_check_only?: true,
action: action,
name: "authorize filter #{Enum.join(path, ".")}",
data: []
)
{:cont, {:ok, [request | requests]}}
else
{:error, error} -> {:halt, {:error, error}}
%{errors: errors} -> {:halt, {:error, errors}}
{:action, nil} -> {:halt, {:error, ReadActionRequired.exception(resource: resource)}}
end
end)
end
def map(%__MODULE__{expression: nil} = filter, _) do
filter
end
def map(%__MODULE__{expression: expression} = filter, func) do
%{filter | expression: do_map(func.(expression), func)}
end
def map(expression, func) do
do_map(func.(expression), func)
end
def do_map(expression, func) do
case expression do
{:halt, expr} ->
expr
%Expression{left: left, right: right} = expr ->
%{expr | left: do_map(left, func), right: do_map(right, func)}
%Not{expression: not_expr} = expr ->
%{expr | expression: do_map(not_expr, func)}
%{__operator__?: true, left: left, right: right} = op ->
%{op | left: do_map(left, func), right: do_map(right, func)}
%{__function__?: true, arguments: arguments} = func ->
%{func | arguments: Enum.map(arguments, &do_map(&1, func))}
other ->
func.(other)
end
end
def list_predicates(%__MODULE__{expression: expression}) do
list_predicates(expression)
end
def list_predicates(expression) do
case expression do
%Expression{left: left, right: right} ->
list_predicates(left) ++ list_predicates(right)
%Not{expression: not_expr} ->
list_predicates(not_expr)
%{__predicate__?: true} = pred ->
[pred]
_ ->
[]
end
end
def filter_expression_by_relationship_path(filter, path, scope? \\ false) do
%__MODULE__{
resource: Ash.Resource.related(filter.resource, path),
expression: do_filter_expression_by_relationship_path(filter.expression, path, scope?)
}
end
defp split_expression_by_relationship_path(%{expression: expression}, path) do
split_expression_by_relationship_path(expression, path)
end
defp split_expression_by_relationship_path(
%Expression{op: op, left: left, right: right},
path
) do
{new_for_path_left, new_without_path_left} = split_expression_by_relationship_path(left, path)
{new_for_path_right, new_without_path_right} =
split_expression_by_relationship_path(right, path)
{Expression.optimized_new(op, new_for_path_left, new_for_path_right),
Expression.optimized_new(op, new_without_path_left, new_without_path_right)}
end
defp split_expression_by_relationship_path(%Not{expression: expression}, path) do
{new_for_path, new_without_path} = split_expression_by_relationship_path(expression, path)
{Not.new(new_for_path), Not.new(new_without_path)}
end
defp split_expression_by_relationship_path(
%{
__operator__?: true,
left: %Ref{relationship_path: predicate_path} = left,
right: %Ref{relationship_path: predicate_path}
} = predicate,
path
) do
if List.starts_with?(predicate_path, path) do
new_path = Enum.drop(predicate_path, length(path))
{%{
predicate
| left: %{
left
| relationship_path: new_path
}
}, nil}
else
{nil, predicate}
end
end
defp split_expression_by_relationship_path(
%{__operator__?: true, right: %Ref{}},
_path
) do
raise "Refs not currently supported on the right side of operators with different relationship paths"
end
defp split_expression_by_relationship_path(
%{__operator__?: true, left: %Ref{relationship_path: predicate_path} = ref} = predicate,
path
) do
if List.starts_with?(predicate_path, path) do
new_path = Enum.drop(predicate_path, length(path))
{%{predicate | left: %{ref | relationship_path: new_path}}, nil}
else
{nil, predicate}
end
end
defp split_expression_by_relationship_path(
%{__function__?: true, arguments: arguments} = func,
path
) do
arguments
|> Enum.filter(&match?(%Ref{}, &1))
|> Enum.map(& &1.relationship_path)
|> Enum.uniq()
|> case do
[] ->
{func, func}
[predicate_path] ->
if List.starts_with?(predicate_path, path) do
new_args =
Enum.map(arguments, fn
%Ref{relationship_path: predicate_path} = ref ->
%{ref | relationship_path: Enum.drop(predicate_path, length(path))}
arg ->
arg
end)
{%{func | arguments: new_args}, nil}
else
{nil, func}
end
_ ->
raise "Refs for multiple relationship paths not supported in a single function call"
end
end
defp do_filter_expression_by_relationship_path(
%Expression{op: op, left: left, right: right},
path,
scope?
) do
new_left = do_filter_expression_by_relationship_path(left, path, scope?)
new_right = do_filter_expression_by_relationship_path(right, path, scope?)
Expression.optimized_new(op, new_left, new_right)
end
defp do_filter_expression_by_relationship_path(%Not{expression: expression}, path, scope?) do
new_expression = do_filter_expression_by_relationship_path(expression, path, scope?)
Not.new(new_expression)
end
defp do_filter_expression_by_relationship_path(
%{__operator__?: true, left: left, right: right} = op,
path,
scope?
) do
if scope? do
%{op | left: scope_ref(left, path), right: scope_ref(right, path)}
else
[left, right]
|> Enum.filter(&match?(%Ref{}, &1))
|> Enum.any?(&List.starts_with?(&1.relationship_path, path))
|> case do
true ->
nil
false ->
op
end
end
end
defp do_filter_expression_by_relationship_path(
%{__function__?: true, arguments: arguments} = func,
path,
scope?
) do
if scope? do
%{func | arguments: Enum.map(arguments, &scope_ref(&1, path))}
else
arguments
|> Enum.filter(&match?(%Ref{}, &1))
|> Enum.any?(&List.starts_with?(&1.relationship_path, path))
|> case do
true ->
nil
false ->
func
end
end
end
defp do_filter_expression_by_relationship_path(other, _path, _scope) do
other
end
defp scope_ref(%Ref{} = ref, path) do
if List.starts_with?(ref.relationship_path, path) do
%{ref | relationship_path: Enum.drop(ref.relationship_path, Enum.count(path))}
else
ref
end
end
defp scope_ref(other, _), do: other
defp do_relationship_paths(%Ref{relationship_path: path}, _) do
{path}
end
defp do_relationship_paths(%Expression{op: :or}, :ands_only) do
[]
end
defp do_relationship_paths(%Expression{left: left, right: right}, kind) do
[do_relationship_paths(left, kind), do_relationship_paths(right, kind)]
end
defp do_relationship_paths(%Not{expression: expression}, kind) do
do_relationship_paths(expression, kind)
end
defp do_relationship_paths(%{__operator__?: true, left: left, right: right}, kind) do
[do_relationship_paths(left, kind), do_relationship_paths(right, kind)]
end
defp do_relationship_paths(%{__operator__?: true, arguments: arguments}, kind) do
Enum.map(arguments, &do_relationship_paths(&1, kind))
end
defp do_relationship_paths(_, _), do: []
defp parse_expression(%__MODULE__{expression: expression}, context),
do: {:ok, add_to_predicate_path(expression, context)}
defp parse_expression(statement, context) when is_map(statement) or is_list(statement) do
Enum.reduce_while(statement, {:ok, nil}, fn expression_part, {:ok, expression} ->
case add_expression_part(expression_part, context, expression) do
{:ok, new_expression} ->
{:cont, {:ok, new_expression}}
{:error, error} ->
{:halt, {:error, error}}
end
end)
end
defp parse_expression(statement, context) do
parse_expression([statement], context)
end
defp add_expression_part(boolean, _context, expression) when is_boolean(boolean),
do: {:ok, Expression.optimized_new(:and, expression, boolean)}
defp add_expression_part(%__MODULE__{expression: adding_expression}, context, expression) do
{:ok,
Expression.optimized_new(
:and,
expression,
add_to_predicate_path(adding_expression, context)
)}
end
defp add_expression_part(%resource{} = record, context, expression) do
if resource == context.resource do
pkey_filter = record |> Map.take(Ash.Resource.primary_key(resource)) |> Map.to_list()
add_expression_part(pkey_filter, context, expression)
else
{:error,
InvalidFilterValue.exception(
value: record,
message: "Records must match the resource being filtered"
)}
end
end
defp add_expression_part({not_key, nested_statement}, context, expression)
when not_key in [:not, "not"] do
case parse_expression(nested_statement, context) do
{:ok, nested_expression} ->
{:ok, Expression.optimized_new(:and, expression, Not.new(nested_expression))}
{:error, error} ->
{:error, error}
end
end
defp add_expression_part({or_key, nested_statements}, context, expression)
when or_key in [:or, "or"] do
with {:ok, nested_expression} <- parse_and_join(nested_statements, :or, context),
:ok <- validate_data_layers_support_boolean_filters(nested_expression) do
{:ok, Expression.optimized_new(:and, expression, nested_expression)}
end
end
defp add_expression_part({and_key, nested_statements}, context, expression)
when and_key in [:and, "and"] do
case parse_and_join(nested_statements, :and, context) do
{:ok, nested_expression} ->
{:ok, Expression.optimized_new(:and, expression, nested_expression)}
{:error, error} ->
{:error, error}
end
end
defp add_expression_part({%Ref{} = ref, nested_statement}, context, expression) do
new_context = %{
relationship_path: ref.relationship_path,
resource: Ash.Resource.related(context.resource, ref.relationship_path),
aggregates: context.aggregates
}
add_expression_part({ref.attribute.name, nested_statement}, new_context, expression)
end
defp add_expression_part({field, nested_statement}, context, expression)
when is_atom(field) or is_binary(field) do
aggregates =
Enum.flat_map(context.aggregates, fn {key, _} ->
[key, to_string(key)]
end)
cond do
function_module = get_function(field, Ash.Resource.data_layer_functions(context.resource)) ->
case Function.new(function_module, List.wrap(nested_statement), %Ref{
relationship_path: context.relationship_path,
resource: context.resource
}) do
{:ok, function} ->
{:ok, Expression.optimized_new(:and, expression, function)}
{:error, error} ->
{:error, error}
end
attr = Ash.Resource.attribute(context.resource, field) ->
case parse_predicates(nested_statement, attr, context) do
{:ok, nested_statement} ->
{:ok, Expression.optimized_new(:and, expression, nested_statement)}
{:error, error} ->
{:error, error}
end
rel = Ash.Resource.relationship(context.resource, field) ->
context =
context
|> Map.update!(:relationship_path, fn path -> path ++ [rel.name] end)
|> Map.put(:resource, rel.destination)
if is_list(nested_statement) || is_map(nested_statement) do
case parse_expression(nested_statement, context) do
{:ok, nested_expression} ->
{:ok, Expression.optimized_new(:and, expression, nested_expression)}
{:error, error} ->
{:error, error}
end
else
with [field] <- Ash.Resource.primary_key(context.resource),
attribute <- Ash.Resource.attribute(context.resource, field),
{:ok, casted} <-
Ash.Type.cast_input(attribute.type, nested_statement) do
add_expression_part({field, casted}, context, expression)
else
_other ->
{:error,
InvalidFilterValue.exception(
value: inspect(nested_statement),
message:
"A single value must be castable to the primary key of the resource: #{
inspect(context.resource)
}"
)}
end
end
field in aggregates ->
field =
if is_binary(field) do
String.to_existing_atom(field)
else
field
end
add_aggregate_expression(context, nested_statement, field, expression)
true ->
{:error,
NoSuchAttributeOrRelationship.exception(
attribute_or_relationship: field,
resource: context.resource
)}
end
end
defp add_expression_part(value, context, expression) when is_map(value) do
# Can't call `parse_expression/2` here because it will loop
value
|> Map.to_list()
|> Enum.reduce_while({:ok, nil}, fn {key, value}, {:ok, expression} ->
case add_expression_part({key, value}, context, expression) do
{:ok, new_expression} ->
{:cont, {:ok, new_expression}}
{:error, error} ->
{:halt, {:error, error}}
end
end)
|> case do
{:ok, new_expression} ->
{:ok, Expression.optimized_new(:and, expression, new_expression)}
{:error, error} ->
{:error, error}
end
end
defp add_expression_part(value, context, expression) when is_list(value) do
Enum.reduce_while(value, {:ok, expression}, fn value, {:ok, expression} ->
case add_expression_part(value, context, expression) do
{:ok, expression} -> {:cont, {:ok, expression}}
{:error, error} -> {:halt, {:error, error}}
end
end)
end
defp add_expression_part(value, _, _) do
{:error, InvalidFilterValue.exception(value: value)}
end
defp add_aggregate_expression(context, nested_statement, field, expression) do
if Ash.Resource.data_layer_can?(context.resource, :aggregate_filter) do
case parse_predicates(nested_statement, Map.get(context.aggregates, field), context) do
{:ok, nested_statement} ->
{:ok, Expression.optimized_new(:and, expression, nested_statement)}
{:error, error} ->
{:error, error}
end
else
{:error, AggregatesNotSupported.exception(resource: context.resource, feature: "filtering")}
end
end
defp validate_data_layers_support_boolean_filters(%Expression{
op: :or,
left: left,
right: right
}) do
left_resources =
left
|> map(fn
%Ref{} = ref ->
[ref.resource]
_ ->
[]
end)
|> List.flatten()
|> Enum.uniq()
right_resources =
right
|> map(fn
%Ref{} = ref ->
[ref.resource]
_ ->
[]
end)
|> List.flatten()
|> Enum.uniq()
left_resources
|> Enum.filter(&(&1 in right_resources))
|> Enum.reduce_while(:ok, fn resource, :ok ->
if Ash.Resource.data_layer_can?(resource, :boolean_filter) do
{:cont, :ok}
else
{:halt, {:error, "Data layer for #{resource} does not support boolean filters"}}
end
end)
end
defp validate_data_layers_support_boolean_filters(_), do: :ok
defp add_to_predicate_path(expression, context) do
case expression do
%Not{expression: expression} = not_expr ->
%{not_expr | expression: add_to_predicate_path(expression, context)}
%Expression{left: left, right: right} = expression ->
%{
expression
| left: add_to_predicate_path(left, context),
right: add_to_predicate_path(right, context)
}
%{__operator__?: true, left: left, right: right} = op ->
left = add_to_ref_path(left, context.relationship_path)
right = add_to_ref_path(right, context.relationship_path)
%{op | left: left, right: right}
%{__function__?: true, arguments: args} = func ->
%{func | arguments: Enum.map(args, &add_to_ref_path(&1, context.relationship_path))}
other ->
other
end
end
defp add_to_ref_path(%Ref{relationship_path: relationship_path} = ref, to_add) do
%{ref | relationship_path: to_add ++ relationship_path}
end
defp add_to_ref_path(other, _), do: other
defp parse_and_join(statements, op, context) do
Enum.reduce_while(statements, {:ok, nil}, fn statement, {:ok, expression} ->
case parse_expression(statement, context) do
{:ok, nested_expression} ->
{:cont, {:ok, Expression.optimized_new(op, expression, nested_expression)}}
{:error, error} ->
{:halt, {:error, error}}
end
end)
end
defp parse_predicates(value, field, context) when not is_list(value) and not is_map(value) do
parse_predicates([eq: value], field, context)
end
defp parse_predicates(values, attr, context) do
if is_map(values) || Keyword.keyword?(values) do
Enum.reduce_while(values, {:ok, nil}, fn {key, value}, {:ok, expression} ->
case get_operator(key, Ash.Resource.data_layer_operators(context.resource)) do
nil ->
error = NoSuchFilterPredicate.exception(key: key, resource: context.resource)
{:halt, {:error, error}}
operator_module ->
left = %Ref{
attribute: attr,
relationship_path: context.relationship_path,
resource: context.resource
}
case Operator.new(operator_module, left, value) do
{:ok, boolean} when is_boolean(boolean) ->
{:cont, {:ok, boolean}}
{:ok, operator} ->
if Ash.Resource.data_layer_can?(context.resource, {:filter_operator, operator}) do
{:cont, {:ok, Expression.optimized_new(:and, expression, operator)}}
else
{:halt,
{:error, "data layer does not support the operator #{inspect(operator)}"}}
end
{:error, error} ->
{:halt, {:error, error}}
end
end
end)
else
error = InvalidFilterValue.exception(value: values)
{:halt, error}
end
end
defp get_function(key, data_layer_functions) when is_atom(key) do
@builtin_functions[key] || Enum.find(data_layer_functions, &(&1.name() == key))
end
defp get_function(key, data_layer_functions) when is_binary(key) do
Map.get(@string_builtin_functions, key) ||
Enum.find(data_layer_functions, &(&1.name() == key))
end
defp get_operator(key, data_layer_operators) when is_atom(key) do
@builtin_operators[key] || Enum.find(data_layer_operators, &(&1.operator() == key))
end
defp get_operator(key, data_layer_operators) when is_binary(key) do
Map.get(@string_builtin_operators, key) ||
Enum.find(data_layer_operators, &(&1.name() == key))
end
defp get_operator(_, _), do: nil
defimpl Inspect do
import Inspect.Algebra
@custom_colors [
number: :cyan
]
def inspect(
%{expression: expression},
opts
) do
opts = %{opts | syntax_colors: Keyword.merge(opts.syntax_colors, @custom_colors)}
concat(["#Ash.Filter<", to_doc(expression, opts), ">"])
end
end
end
|
lib/ash/filter/filter.ex
| 0.92391 | 0.903635 |
filter.ex
|
starcoder
|
defmodule Ockam.Examples.Stream.BiDirectional.SecureChannel do
@moduledoc """
Ping-pong example for bi-directional stream communication
Use-case: integrate ockam nodes which implement stream protocol consumer and publisher
Pre-requisites:
Ockam hub running with stream service and TCP listener
Two ockam nodes "ping" and "pong"
Expected behaviour:
Two nodes "ping" and "pong" send messages to each other using two streams:
"sc_listener_topic" to send messages to "pong" node
"sc_initiator_topic" to send messages to "ping" node
Implementation:
Stream service is running on the hub node
Ping and pong nodes create local consumers and publishers to exchange messages
Ping establishes an ordered channel to pong over the stream publisher
Ping creates a secure channel over the ordered channel
Ping exchanges messages with ping using the secure channel
"""
alias Ockam.SecureChannel
alias Ockam.Vault
alias Ockam.Vault.Software, as: SoftwareVault
alias Ockam.Examples.Ping
alias Ockam.Examples.Pong
alias Ockam.Stream.Client.BiDirectional
alias Ockam.Stream.Client.BiDirectional.PublisherRegistry
alias Ockam.Messaging.PipeChannel
alias Ockam.Messaging.Ordering.Strict.IndexPipe
alias Ockam.Transport.TCP
require Logger
## Ignore no local return for secure channel
@dialyzer :no_return
def simple_ping_pong() do
{:ok, "pong"} = Pong.create(address: "pong")
{:ok, "ping"} = Ping.create(address: "ping")
send_message(["pong"], ["ping"], "0")
end
def outline() do
## On one node:
Ockam.Examples.Stream.BiDirectional.SecureChannel.init_pong()
## On another node:
Ockam.Examples.Stream.BiDirectional.SecureChannel.run()
end
def config() do
%{
hub_ip: "127.0.0.1",
hub_port: 4000,
hub_port_udp: 7000,
service_address: "stream_kafka",
index_address: "stream_kafka_index",
ping_stream: "ping_stream",
pong_stream: "pong_stream"
}
end
def init_pong() do
TCP.start()
## PONG worker
{:ok, "pong"} = Pong.create(address: "pong")
## Create secure channel listener
create_secure_channel_listener()
## Create ordered channel spawner
{:ok, "ord_channel_spawner"} =
PipeChannel.Spawner.create(
responder_options: [pipe_mods: IndexPipe],
address: "ord_channel_spawner"
)
config = config()
## Create a local subscription to forward pong_topic messages to local node
subscribe(config.pong_stream, "pong", :tcp)
end
def run() do
TCP.start()
## PING worker
{:ok, "ping"} = Ping.create(address: "ping")
config = config()
## Subscribe to response topic
subscribe(config.ping_stream, "ping", :tcp)
## Create local publisher worker to forward to pong_topic and add metadata to
## messages to send responses to ping_topic
{:ok, publisher} = init_publisher(config.pong_stream, config.ping_stream, "ping", :tcp)
## Create an ordered channel over the stream communication
## Strictly ordered channel would de-duplicate messages
{:ok, ord_channel} =
PipeChannel.Initiator.create(
pipe_mods: IndexPipe,
spawner_route: [publisher, "ord_channel_spawner"]
)
## Create a secure channel over the ordered channel
{:ok, channel} = create_secure_channel([ord_channel, "SC_listener"])
## Send a message THROUGH the channel to the remote worker
send_message([channel, "pong"], ["ping"], "0")
end
def init_publisher(publisher_stream, consumer_stream, subscription_id, protocol \\ :tcp) do
BiDirectional.ensure_publisher(
consumer_stream,
publisher_stream,
subscription_id,
stream_options(protocol)
)
end
def subscribe(stream, subscription_id, protocol \\ :tcp) do
## Local subscribe
## Create bidirectional subscription on local node
## using stream service configuration from stream_options
{:ok, consumer} = BiDirectional.subscribe(stream, subscription_id, stream_options(protocol))
wait(fn ->
# Logger.info("Consumer: #{consumer} ready?")
ready = Ockam.Stream.Client.Consumer.ready?(consumer)
# Logger.info("#{ready}")
ready
end)
## This is necessary to make sure we don't spawn publisher for each message
PublisherRegistry.start_link([])
end
defp create_secure_channel_listener() do
{:ok, vault} = SoftwareVault.init()
{:ok, identity} = Vault.secret_generate(vault, type: :curve25519)
SecureChannel.create_listener(
vault: vault,
identity_keypair: identity,
address: "SC_listener"
)
end
defp create_secure_channel(route_to_listener) do
{:ok, vault} = SoftwareVault.init()
{:ok, identity} = Vault.secret_generate(vault, type: :curve25519)
{:ok, c} =
SecureChannel.create(route: route_to_listener, vault: vault, identity_keypair: identity)
wait(fn -> SecureChannel.established?(c) end)
{:ok, c}
end
defp wait(fun) do
case fun.() do
true ->
:ok
false ->
:timer.sleep(100)
wait(fun)
end
end
def send_message(onward_route, return_route, payload) do
msg = %{
onward_route: onward_route,
return_route: return_route,
payload: payload
}
Ockam.Router.route(msg)
end
def ensure_udp(port) do
Ockam.Transport.UDP.start(port: port)
end
def stream_options(protocol) do
config = config()
{:ok, hub_ip_n} = :inet.parse_address(to_charlist(config.hub_ip))
tcp_address = Ockam.Transport.TCPAddress.new(hub_ip_n, config.hub_port)
udp_address = Ockam.Transport.UDPAddress.new(hub_ip_n, config.hub_port_udp)
hub_address =
case protocol do
:tcp -> tcp_address
:udp -> udp_address
end
[
service_route: [hub_address, config.service_address],
index_route: [hub_address, config.index_address],
partitions: 1
]
end
end
|
implementations/elixir/ockam/ockam/lib/ockam/examples/stream/bi_directional/secure_channel.ex
| 0.844104 | 0.525004 |
secure_channel.ex
|
starcoder
|
defmodule CircuitRunner do
use Bitwise
require CircuitParser
def main([filename, output|_]) do
circuit = parse_file(filename)
{circuit, value} = get_gate(circuit, String.to_atom(output))
IO.inspect(value)
end
def main([filename|_]) do
IO.puts("No output gate. Defaulting to 'a'")
main([filename, "a"])
end
def main([]) do
IO.puts("No arguments. Defaulting to day7.txt and gate 'a'")
main(["day7.txt", "a"])
end
defp parse_file(filename), do: parse_file(File.open!(filename, [:read]), Map.new)
defp parse_file(file, acc) do
row = IO.read(file, :line)
if (row != :eof) do
parsed_row = CircuitParser.parse(to_char_list(String.strip(row)))
parse_file(file, parse_row(parsed_row, acc))
else
acc
end
end
defp parse_row(row={gate, operation}, acc) do
Map.put(acc, gate, operation)
end
defp update(circuit, gate, _value) when is_integer(gate), do: circuit
defp update(circuit, gate, value) do
Map.update!(circuit, gate, fn _ -> value end)
end
defp get_operand_values(circuit, operands) do
Enum.reduce(Enum.reverse(operands), {circuit, []},
fn (operand, {circuit, operand_values}) ->
{circuit, value} = get_gate(circuit, operand)
circuit = update(circuit, operand, value) # Memoize operand value
{circuit, [value | operand_values]}
end)
end
defp get_func(opcode) do
case opcode do
:AND -> &band/2
:OR -> &bor/2
:RSHIFT -> &bsr/2
:LSHIFT -> &bsl/2
:'->' -> &(&1)
:NOT -> &bnot/1
end
end
# Returns {circuit, value} with the memoized values for each operand gate and the
# value of the current gate
defp get_gate(circuit, gate) when is_atom(gate) do
gate_value = Map.get(circuit, gate)
if is_integer(gate_value) do
{circuit, gate_value} # Got a memoized value
else
[opcode | operands] = Tuple.to_list(gate_value)
{circuit, operand_values} = get_operand_values(circuit, operands)
value = apply(get_func(opcode), operand_values)
{circuit, value}
end
end
defp get_gate(circuit, gate) when is_integer(gate), do: {circuit, gate}
end
|
advent_umbrella_2016/apps/day7/lib/circuit_runner.ex
| 0.696991 | 0.449513 |
circuit_runner.ex
|
starcoder
|
defmodule SMPPEX.Pdu.PP do
@moduledoc """
Module for colored pretty printing Pdu structs.
"""
alias IO.ANSI, as: C
alias SMPPEX.Pdu
alias SMPPEX.Protocol.TlvFormat
@pad ""
@indent " "
@field_inspect_limit 999999
@spec format(pdu :: Pdu.t, indent :: String.t, pad :: String.t) :: iolist
@doc """
Forms an iolist containing colored Pdu dump.
`indent` is the string prepended to each line of the dump ("#{@indent}" by default).
`pad` is the string prepended to nested lines of the dump together with `indent`.
The default is "#{@pad}".
## Example
iex> pdu = SMPPEX.Pdu.Factory.submit_sm({"from", 1, 1}, {"to", 1, 1}, "hello")
Then `pdu |> SMPPEX.Pdu.PP.format |> IO.puts` will print:
```
pdu: submit_sm
command_id: 4
command_status: 0 (ok)
sequence_number: 0
mandatory fields:
dest_addr_npi: 1
dest_addr_ton: 1
destination_addr: "to"
registered_delivery: 0
short_message: "hello"
source_addr: "from"
source_addr_npi: 1
source_addr_ton: 1
optional fields: []
```
"""
def format(pdu, indent \\ @indent, pad \\ @pad) do
["\n", pdu |> pdu_lines |> Enum.map(fn([section_head | section_lines]) ->
[pad, section_head, "\n", section_lines |> Enum.map(fn(line) ->
[pad, indent, line, "\n"]
end) ]
end) ]
end
defp pdu_lines(pdu) do
[
header(pdu),
mandatory_fields(pdu),
optional_fields(pdu)
]
end
defp header(pdu) do
[
name(pdu),
[pp_field_name("command_id"), ": ", pdu |> Pdu.command_id |> inspect |> pp_val],
[pp_field_name("command_status"), ": ", pdu |> Pdu.command_status |> pp_command_status],
[pp_field_name("sequence_number"), ": ", pdu |> Pdu.sequence_number |> inspect |> pp_val]
]
end
defp name(pdu) do
["pdu: ", pp_command_name(pdu)]
end
defp mandatory_fields(pdu) do
[["mandatory fields:", pdu |> Pdu.mandatory_fields |> pp_empty_list]] ++
(pdu |> Pdu.mandatory_fields |> Map.to_list |> pp_fields)
end
defp optional_fields(pdu) do
[["optional fields:", pdu |> Pdu.optional_fields |> pp_empty_list]] ++
(pdu |> Pdu.optional_fields |> Map.to_list |> name_known_tlvs |> pp_fields)
end
defp name_known_tlvs(_, res \\ [])
defp name_known_tlvs([], res), do: Enum.reverse(res)
defp name_known_tlvs([{k, v} | left], res) do
case TlvFormat.name_by_id(k) do
{:ok, name} -> name_known_tlvs(left, [{name, v} | res])
:unknown -> name_known_tlvs(left, [{k, v} | res])
end
end
defp pp_empty_list(map) when map == %{}, do: " []"
defp pp_empty_list(_), do: ""
defp pp_command_status(status) do
case status do
0 -> [C.green, C.bright, "0 (ok)", C.reset]
_ -> [C.red, C.bright, "#{status} (error)", C.reset]
end
end
defp pp_field_name(field_name) do
[C.green, field_name, C.reset]
end
defp pp_val(str) do
[C.yellow, str, C.reset]
end
defp pp_fields(fields) do
fields |> Enum.sort |> Enum.map(fn({key, val}) ->
[key |> to_string |> pp_field_name , ": ", val |> inspect(limit: @field_inspect_limit) |> pp_val]
end)
end
defp pp_command_name(pdu) do
name = pdu |> Pdu.command_name |> to_string
[C.cyan, C.bright, name, C.reset]
end
end
|
lib/smppex/pdu/pp.ex
| 0.805058 | 0.677904 |
pp.ex
|
starcoder
|
defmodule Quantum.Storage do
@moduledoc """
Behaviour to be implemented by all Storage Adapters.
The calls to the storage are blocking, make sure they're fast to not block the job execution.
"""
alias Quantum.Job
@typedoc """
The location of the `server`.
### Values
* `nil` if the storage was not started
* `server()` if the storage was started
"""
@type storage_pid :: nil | GenServer.server()
@doc """
Storage child spec
If the storage does not need a process, specify a function that returns `:ignore`.
### Values
* `:scheduler` - The Scheduler
"""
@callback child_spec(init_arg :: Keyword.t()) :: Supervisor.child_spec()
@doc """
Load saved jobs from storage.
Returns `:not_applicable` if the storage has never received an `add_job` call or after it has been purged.
In this case the jobs from the configuration will be loaded.
"""
@callback jobs(storage_pid :: storage_pid) ::
:not_applicable | [Job.t()]
@doc """
Save new job in storage.
"""
@callback add_job(storage_pid :: storage_pid, job :: Job.t()) ::
:ok
@doc """
Delete new job in storage.
"""
@callback delete_job(storage_pid :: storage_pid, job :: Job.name()) :: :ok
@doc """
Change Job State from given job.
"""
@callback update_job_state(storage_pid :: storage_pid, job :: Job.name(), state :: Job.state()) ::
:ok
@doc """
Load last execution time from storage.
Returns `:unknown` if the storage does not know the last execution time.
In this case all jobs will be run at the next applicable date.
"""
@callback last_execution_date(storage_pid :: storage_pid) :: :unknown | NaiveDateTime.t()
@doc """
Update last execution time to given date.
"""
@callback update_last_execution_date(
storage_pid :: storage_pid,
last_execution_date :: NaiveDateTime.t()
) :: :ok
@doc """
Purge all date from storage and go back to initial state.
"""
@callback purge(storage_pid :: storage_pid) :: :ok
@doc """
Updates existing job in storage.
This callback is optional. If not implemented then the `c:delete_job/2`
and then the `c:add_job/2` callbacks will be called instead.
"""
@callback update_job(storage_pid :: storage_pid, job :: Job.t()) :: :ok
@optional_callbacks update_job: 2
end
|
lib/quantum/storage.ex
| 0.919971 | 0.502258 |
storage.ex
|
starcoder
|
defmodule Sneex.AddressMode do
@moduledoc """
This module contains the logic for converting an address offset into a full address
using the current state of the CPU and the logic for each addressing mode.
"""
alias Sneex.Address.Helper
alias Sneex.{BasicTypes, Cpu}
use Bitwise
@typep word :: BasicTypes.word()
@typep long :: BasicTypes.long()
@spec absolute_indexed_indirect(Cpu.t()) :: long()
def absolute_indexed_indirect(cpu = %Cpu{}) do
pbr = cpu |> Cpu.program_bank()
operand = cpu |> Cpu.read_operand(2)
addr =
pbr
|> Helper.absolute_offset(operand)
|> Helper.indexed(cpu, :x)
|> Helper.read_indirect(cpu, 2)
pbr |> Helper.absolute_offset(addr)
end
@spec absolute_indirect(Cpu.t()) :: long()
def absolute_indirect(cpu = %Cpu{}) do
addr = cpu |> Cpu.read_operand(2) |> Helper.read_indirect(cpu, 2)
cpu |> Cpu.program_bank() |> Helper.absolute_offset(addr)
end
@spec absolute_indirect_long(Cpu.t()) :: long()
def absolute_indirect_long(cpu = %Cpu{}),
do: cpu |> Cpu.read_operand(2) |> Helper.read_indirect(cpu, 3)
@spec absolute_long(Cpu.t()) :: long()
def absolute_long(cpu = %Cpu{}), do: cpu |> Cpu.read_operand(3)
@spec absolute_long_indexed_x(Cpu.t()) :: long()
def absolute_long_indexed_x(cpu = %Cpu{}),
do: cpu |> Cpu.read_operand(3) |> Helper.indexed(cpu, :x)
@spec block_move(Cpu.t()) :: {long(), long(), long()}
def block_move(cpu = %Cpu{}) do
operand = cpu |> Cpu.read_operand(2)
src_bank = operand |> band(0xFF00) |> bsl(8)
src_addr = src_bank + Cpu.x(cpu)
dst_bank = operand |> band(0x00FF) |> bsl(16)
dst_addr = dst_bank + Cpu.y(cpu)
{src_addr, dst_addr, Cpu.c(cpu) + 1}
end
@spec direct_page(Cpu.t(), word()) :: long()
def direct_page(cpu = %Cpu{}, address_offset),
do: cpu |> Cpu.direct_page() |> Helper.calc_offset(address_offset)
@spec direct_page_indexed_x(Cpu.t(), word()) :: long()
def direct_page_indexed_x(cpu = %Cpu{}, address_offset),
do: direct_page(cpu, address_offset + Cpu.x(cpu))
@spec direct_page_indexed_y(Cpu.t(), word()) :: long()
def direct_page_indexed_y(cpu = %Cpu{}, address_offset),
do: direct_page(cpu, address_offset + Cpu.y(cpu))
@spec direct_page_indexed_indirect(Cpu.t()) :: long()
def direct_page_indexed_indirect(cpu = %Cpu{}) do
temp_addr = Cpu.x(cpu) + Cpu.read_operand(cpu, 1)
addr = cpu |> direct_page(temp_addr) |> Helper.read_indirect(cpu, 2)
cpu |> Cpu.data_bank() |> Helper.absolute_offset(addr)
end
@spec direct_page_indirect(Cpu.t()) :: long()
def direct_page_indirect(cpu = %Cpu{}) do
operand = cpu |> Cpu.read_operand(1)
addr = cpu |> direct_page(operand) |> Helper.read_indirect(cpu, 2)
cpu |> Cpu.data_bank() |> Helper.absolute_offset(addr)
end
@spec direct_page_indirect_long(Cpu.t()) :: long()
def direct_page_indirect_long(cpu = %Cpu{}) do
operand = cpu |> Cpu.read_operand(1)
cpu |> direct_page(operand) |> Helper.read_indirect(cpu, 3)
end
@spec direct_page_indirect_indexed_y(Cpu.t()) :: long()
def direct_page_indirect_indexed_y(cpu = %Cpu{}) do
operand = cpu |> Cpu.read_operand(1)
base_addr = cpu |> direct_page(operand) |> Helper.read_indirect(cpu, 2)
cpu |> Cpu.data_bank() |> Helper.absolute_offset(base_addr) |> Helper.indexed(cpu, :y)
end
@spec direct_page_indirect_long_indexed_y(Cpu.t()) :: long()
def direct_page_indirect_long_indexed_y(cpu = %Cpu{}) do
operand = cpu |> Cpu.read_operand(1)
cpu |> direct_page(operand) |> Helper.read_indirect(cpu, 3) |> Helper.indexed(cpu, :y)
end
defp program_counter(cpu = %Cpu{}, offset) do
pc_with_offset = cpu |> Cpu.pc() |> Helper.calc_offset(offset)
cpu |> Cpu.program_bank() |> Helper.absolute_offset(pc_with_offset)
end
@spec program_counter_relative(Cpu.t()) :: long()
def program_counter_relative(cpu = %Cpu{}) do
operand = cpu |> Cpu.read_operand(1) |> BasicTypes.signed_byte()
cpu |> program_counter(operand + 2)
end
@spec program_counter_relative_long(Cpu.t()) :: long()
def program_counter_relative_long(cpu = %Cpu{}) do
operand = cpu |> Cpu.read_operand(2) |> BasicTypes.signed_word()
cpu |> program_counter(operand + 3)
end
@spec stack_relative(Cpu.t()) :: long()
def stack_relative(cpu = %Cpu{}) do
operand = cpu |> Cpu.read_operand(1)
cpu |> Cpu.stack_ptr() |> Helper.calc_offset(operand)
end
@spec stack_relative_indirect_indexed_y(Cpu.t()) :: long()
def stack_relative_indirect_indexed_y(cpu = %Cpu{}) do
offset = cpu |> stack_relative() |> Helper.read_indirect(cpu, 2)
cpu |> Cpu.data_bank() |> Helper.absolute_offset(offset) |> Helper.indexed(cpu, :y)
end
end
|
lib/sneex/address_mode.ex
| 0.728265 | 0.448487 |
address_mode.ex
|
starcoder
|
defmodule Grakn.Protocol do
@moduledoc """
This is the DBConnection behaviour implementation for Grakn database
"""
use DBConnection
defstruct [:session, :transaction]
defguardp transaction_open?(tx) when not is_nil(tx)
def checkin(state) do
# empty - process is independent from state
{:ok, state}
end
def checkout(state) do
# empty - process is independent from state
{:ok, state}
end
def connect(opts) do
case Grakn.Session.new(connection_uri(opts)) do
{:ok, session} -> {:ok, %__MODULE__{session: session}}
end
end
def disconnect(_error, %{session: session}) do
Grakn.Session.close(session)
end
def handle_begin(_opts, %{transaction: tx} = state) when not is_nil(tx) do
{:error, Grakn.Error.exception("Transaction already opened on this connection"), state}
end
def handle_begin(opts, %{session: session} = state) do
case Grakn.Session.transaction(session) do
{:ok, tx} ->
{:ok, tx} = Grakn.Transaction.open(tx, opts[:keyspace] || "grakn", opts[:type] || Grakn.Transaction.Type.read())
{:ok, nil, %{state | transaction: tx}}
{:error, reason} ->
{:disconnect, Grakn.Error.exception("Failed to create transaction", reason), state}
end
end
def handle_commit(_opts, %{transaction: tx} = state) when transaction_open?(tx) do
:ok = Grakn.Transaction.commit(tx)
{:ok, nil, %{state | transaction: nil}}
end
def handle_commit(_opts, state) do
{:error, Grakn.Error.exception("Cannot commit if transaction is not open"), state}
end
def handle_execute(%{graql: graql}, _params, opts, %{transaction: tx} = state)
when transaction_open?(tx) do
case Grakn.Transaction.query(tx, graql, Keyword.get(opts, :include_inferences)) do
{:ok, iterator} ->
{:ok, iterator, state}
{:error, reason} ->
{:error,
Grakn.Error.exception(
"Failed to execute #{inspect(graql)}. Reason: #{Map.get(reason, :message, "unknown")}",
reason),
state
}
end
end
def handle_execute(%Grakn.Query{}, _, _, state) do
{:error, Grakn.Error.exception("Cannot execute a query before starting a tranaction"), state}
end
def handle_execute(%Grakn.Command{command: command, params: params}, _, _, %{session: session} = state) do
session
|> Grakn.Session.command(command, params)
|> Tuple.append(state)
end
def handle_rollback(_opts, %{transaction: tx} = state) do
:ok = Grakn.Transaction.cancel(tx)
{:ok, nil, %{state | transaction: nil}}
end
defp connection_uri(opts) do
"#{Keyword.fetch!(opts, :hostname)}:#{Keyword.get(opts, :port, 48555)}"
end
end
|
lib/grakn/protocol.ex
| 0.757705 | 0.401658 |
protocol.ex
|
starcoder
|
defmodule EWalletDB.SoftDelete do
@moduledoc """
Allows soft delete of Ecto records.
Requires a `:deleted_at` column with type `:naive_datetime_usec` on the schema.
The type `:naive_datetime_usec` is used so that it aligns with `Ecto.Migration.timestamps/2`.
See https://elixirforum.com/t/10129 and https://elixirforum.com/t/9910.
# Usage
First, create a new migration that adds `:deleted_at` column:
```
defmodule EWalletDB.Repo.Migrations.AddDeletedAtToSomeSchema do
use Ecto.Migration
def change do
alter table(:some_schema) do
add :deleted_at, :naive_datetime_usec
end
create index(:some_schema, [:deleted_at])
end
end
```
Then, implement soft delete in the schema.
To avoid conflicts with any `delete/1` and/or `restore/1` that may vary between schemas,
those two functions are not automatically injected with `use`. In order to use them,
implement your own `delete/1` and `restore/1` that call this module instead.
```
defmodule SomeSchema do
# ...
use EWalletDB.SoftDelete
schema "some_schema" do
# field :id, ...
soft_delete()
end
def delete(struct), do: SoftDelete.delete(struct)
def restore(struct), do: SoftDelete.restore(struct)
end
```
Instead of implementing functions that directly call this module,
you may use `defdelegate` to delegate the functions to this module:
```
defmodule SomeSchema do
use EWalletDB.SoftDelete
# ...
defdelegate delete(struct), to: SoftDelete
defdelegate restore(struct), to: SoftDelete
end
```
"""
use ActivityLogger.ActivityLogging
import Ecto.Query
alias EWalletDB.Repo
@doc false
defmacro __using__(_) do
quote do
# Force `delete/1` and `restore/1` to be imported separately if needed,
# to avoid confusion with the schema's own `delete/1` or `restore/1` implementation.
import EWalletDB.SoftDelete, except: [deleted?: 1, delete: 1, restore: 1]
alias EWalletDB.SoftDelete
end
end
@doc """
A macro that adds `:deleted_at` field to a schema.
Use this on a schema declaration so that it recognizes the soft delete field.
"""
defmacro soft_delete do
quote do
field(:deleted_at, :naive_datetime_usec)
end
end
defp soft_delete_changeset(record, attrs) do
cast_and_validate_required_for_activity_log(
record,
attrs,
cast: [:deleted_at]
)
end
@doc """
Scopes a query down to only records that are not deleted.
"""
@spec exclude_deleted(Ecto.Queryable.t()) :: Ecto.Queryable.t()
def exclude_deleted(queryable) do
where(queryable, [q], is_nil(q.deleted_at))
end
@doc """
Returns whether the given struct is soft-deleted or not.
"""
@spec deleted?(struct()) :: boolean()
def deleted?(struct) do
!is_nil(struct.deleted_at)
end
@doc """
Soft-deletes the given struct.
"""
@spec delete(struct(), map()) :: any()
def delete(struct, originator) do
struct
|> soft_delete_changeset(%{
deleted_at: NaiveDateTime.utc_now(),
originator: originator
})
|> Repo.update_record_with_activity_log()
end
@doc """
Restores the given struct from soft-delete.
"""
@spec restore(struct(), map()) :: any()
def restore(struct, originator) do
struct
|> soft_delete_changeset(%{
deleted_at: nil,
originator: originator
})
|> Repo.update_record_with_activity_log()
end
end
|
apps/ewallet_db/lib/ewallet_db/soft_delete.ex
| 0.834609 | 0.903166 |
soft_delete.ex
|
starcoder
|
defmodule Lexin.Dictionary.XMLConverter do
@moduledoc """
In order to get quick lookups for the words in the dictionary files, we want to convert original
XML files into similar SQLite counterparts with simple structure.
Every word definition might have referential `Index`-es – the words that can point to the main
one. We should consider these variants when users search for the words.
The same spelling of the word can be references in multiple definitions (check "a", for example),
so we need a two-tables structure in our SQLite dictionary files; here is its basic structure:
| definitions |
|------------------|
| id INTEGER | <--- | vocabulary |
| word TEXT | | |-----------------------|
| definition TEXT | | | id INTEGER |
--- | definition_id INTEGER |
| word TEXT |
| type TEXT |
In the `definition` field we store the original XML snippet from the input XML file. In the
`word` field of `definitions` table we store translation of the word (it is needed to let users
type their queries in any language – it can be both in Swedish or in other language).
Note: `Floki.raw_html/1` that we use in the code makes all original XML tag names downcased.
The `vocabulary` table contains Swedish variants of the words and helps us to do fast lookups
(10-15ms for the set of ~100k words in the table) and find corresponding definitions, which we
lately might render to the user.
Here is an example of SQL-query we can use to get definitions:
```sql
SELECT DISTINCT definition FROM definitions
JOIN vocabulary ON definitions.id = vocabulary.definition_id
WHERE vocabulary.word LIKE 'fordon' OR definitions.translation LIKE 'fordon'
```
In addition, these tables can also be used to generate suggestions to the input field while user
is typing a search query.
Here is an example of SQL-query we can use to get Swedish suggestions (similar to target lang):
```sql
SELECT DISTINCT word FROM vocabulary
```
Here is the way to prepare these `.sqlite` files which can be consumed later by the Lexin application:
```console
mix run scripts/converter.exs --input swe_rus.xml --output russian.sqlite
```
P.S. We need to check and be careful of words that spell the same way in both languages – should
we show all definitions then? Maybe yes, maybe not.
"""
@doc """
Parse input XML, find all words' definitions and indexable references (variants of the word to
lookup for), prepare database and insert converted data.
"""
def convert(input_filename, output_filename) do
IO.puts("Resetting database...")
conn = reset_db!(output_filename)
IO.puts("Parsing input XML...")
all_words =
input_filename
|> parse_xml()
IO.puts("Inserting into SQLite...")
{_conn, _total, _processed} =
all_words
|> Enum.reduce({conn, Enum.count(all_words), 0}, &insert/2)
IO.puts("\nDone!")
end
defp reset_db!(output_filename) do
File.rm(output_filename)
{:ok, conn} = Exqlite.Sqlite3.open(output_filename)
vocabulary_table = """
CREATE TABLE "vocabulary" ("id" INTEGER PRIMARY KEY, "definition_id" INTEGER, "word" TEXT, "type" TEXT);
"""
:ok = Exqlite.Sqlite3.execute(conn, vocabulary_table)
definitions_table = """
CREATE TABLE "definitions" ("id" INTEGER PRIMARY KEY, "word" TEXT, "definition" TEXT);
"""
:ok = Exqlite.Sqlite3.execute(conn, definitions_table)
conn
end
defp parse_xml(input_filename) do
input_filename
|> File.read!()
|> Floki.parse_document!()
|> Floki.find("word")
|> Enum.map(&parse_word/1)
end
defp parse_word(word_block) do
id =
word_block
|> Floki.attribute("variantid")
|> List.first()
|> String.to_integer()
variants =
for index <- Floki.find(word_block, "index") do
{
Floki.attribute(index, "value") |> List.first(),
Floki.attribute(index, "type") |> List.first()
}
end
word = Floki.find(word_block, "translation") |> Floki.text()
definition = Floki.raw_html(word_block)
{id, variants, word, definition}
end
defp insert({id, variants, word, definition}, {conn, total, processed}) do
Enum.each(variants, fn {word, type} ->
word_sql = "INSERT INTO vocabulary (definition_id, word, type) VALUES (?1, ?2, ?3)"
{:ok, statement} = Exqlite.Sqlite3.prepare(conn, word_sql)
:ok = Exqlite.Sqlite3.bind(conn, statement, [id, word, type])
:done = Exqlite.Sqlite3.step(conn, statement)
end)
definition_sql = "INSERT INTO definitions (id, word, definition) VALUES (?1, ?2, ?3)"
{:ok, statement} = Exqlite.Sqlite3.prepare(conn, definition_sql)
:ok = Exqlite.Sqlite3.bind(conn, statement, [id, word, definition])
:done = Exqlite.Sqlite3.step(conn, statement)
# a simple "progress bar"
IO.write("#{processed + 1} / #{total}\r")
{conn, total, processed + 1}
end
end
|
lib/lexin/dictionary/xml_converter.ex
| 0.873485 | 0.943243 |
xml_converter.ex
|
starcoder
|
defmodule Ello.V3.Schema.AssetTypes do
import Ello.V3.Schema.Helpers
use Absinthe.Schema.Notation
object :asset do
field :id, :id
field :attachment, :responsive_image_versions, resolve: fn(_args, %{source: post}) ->
{:ok, post.attachment_struct}
end
end
object :tshirt_image_versions do
field :small, :image, resolve: &resolve_image/2
field :regular, :image, resolve: &resolve_image/2
field :large, :image, resolve: &resolve_image/2
field :original, :image, resolve: &resolve_image/2
end
object :responsive_image_versions do
field :hdpi, :image, resolve: &resolve_image/2
field :ldpi, :image, resolve: &resolve_image/2
field :mdpi, :image, resolve: &resolve_image/2
field :xhdpi, :image, resolve: &resolve_image/2
field :original, :image, resolve: &resolve_image/2
field :optimized, :image, resolve: &resolve_image/2
field :video, :image, resolve: &resolve_image/2
end
object :image do
field :metadata, :metadata, resolve: fn(_args, %{source: %{version: version}}) ->
{:ok, version}
end
field :url, :string, resolve: &url_from_version/2
end
object :metadata do
field :width, :integer
field :height, :integer
field :size, :integer
field :type, :integer
end
# content nsfw + no nsfw = pixellated
defp filename(version,
%{user: %{settings: %{posts_adult_content: true}}},
%{assigns: %{allow_nsfw: false}}), do: version.pixellated_filename
# content nudity + no nudity = pixellated
defp filename(version,
%{user: %{settings: %{posts_nudity: true}}},
%{assigns: %{allow_nudity: false}}), do: version.pixellated_filename
# _ + _ = normal
defp filename(version, _, _), do: version.filename
defp url_from_version(_, %{source: %{version: nil}}),
do: {:ok, nil}
defp url_from_version(_, %{source: %{version: version, image: image}, context: context}),
do: {:ok, image_url(image.path, filename(version, image, context))}
end
|
apps/ello_v3/lib/ello_v3/schema/asset_types.ex
| 0.572723 | 0.401101 |
asset_types.ex
|
starcoder
|
defmodule CommonParser.Expr do
@moduledoc """
Documentation for Parser.
"""
import NimbleParsec
import CommonParser.Helper
# tag := ascii_tag_with_space([?a..?z])
# single_value := string_with_quote | integer | atom_with_space
# list_value := [ single_value | single_value , single_value ]
# value := single_value | list_value
# op1 := = | in | not in
# op2 := < | <= | > | >=
# op3 := and | or
# cond1 := ( tag op1 value )
# cond2 := ( tag op2 integer )
# sub_expr := ( cond1 | cond2 )
# expr := sub_expr op3 expr | sub_expr
tag = parse_tag() |> reduce({:parser_result_to_atom, []})
single_value = choice([parse_string(), parse_integer(), parse_atom()])
defcombinatorp :list_entries,
choice([
single_value
|> concat(ignore_space())
|> concat(ignore_sep(","))
|> concat(ignore_space())
|> concat(parsec(:list_entries)),
single_value
])
list_value =
ignore(string("["))
|> concat(ignore_space())
|> parsec(:list_entries)
|> concat(ignore_space())
|> ignore(string("]"))
|> reduce({Enum, :uniq, []})
value = choice([single_value, list_value]) |> unwrap_and_tag(:v)
op1 = parse_ops(["=", "!=", "in", "not in"]) |> reduce({:parser_result_to_atom, []})
op2 = parse_ops(["<", "<=", ">", ">="]) |> reduce({:parser_result_to_atom, []})
op3 = parse_ops(["and", "or"]) |> reduce({:parser_result_to_atom, []})
cond1 = tag |> concat(ignore_space()) |> concat(op1) |> concat(ignore_space()) |> concat(value)
cond2 =
tag
|> concat(ignore_space())
|> concat(op2)
|> concat(ignore_space())
|> concat(parse_integer())
sub_expr = ignore_bracket(?\(, choice([cond1, cond2]), ?\))
defcombinatorp :expr,
choice([
sub_expr
|> concat(ignore_space())
|> concat(op3)
|> concat(ignore_space())
|> concat(ignore_bracket(?\(, parsec(:expr), ?\)))
|> tag(:expr),
sub_expr
])
@doc """
Parse to an atom. For testing purpose. Please use `parse/2` instead.
iex> CommonParser.Expr.parse_atom(":h")
{:ok, [:h], "", %{}, {1, 0}, 2}
iex> CommonParser.Expr.parse_atom(":hello_world")
{:ok, [:hello_world], "", %{}, {1, 0}, 12}
iex> CommonParser.Expr.parse_atom(":he2llo_world1")
{:ok, [:he], "2llo_world1", %{}, {1, 0}, 3}
"""
defparsec :parse_atom, parse_atom()
@doc ~S"""
Parse to a string. For testing purpose. Please use `parse/2` instead.
iex> CommonParser.Expr.parse_quoted_string(~S("hello world"))
{:ok, ["hello world"], "", %{}, {1, 0}, 13}
iex> CommonParser.Expr.parse_quoted_string(~S(hello world))
{:error, "expected byte equal to ?\"", "hello world", %{}, {1, 0}, 0}
iex> CommonParser.Expr.parse_quoted_string(~S("hello \"world\""))
{:ok, ["hello \"world\""], "", %{}, {1, 0}, 17}
"""
defparsec :parse_quoted_string, parse_string()
@doc """
Parse a value. For testing purpose. Please use `parse/2` instead.
iex> CommonParser.Expr.parse_value("10")
{:ok, [v: 10], "", %{}, {1, 0}, 2}
iex> CommonParser.Expr.parse_value(~S(["a", :a, 1]))
{:ok, [v: ["a", :a, 1]], "", %{}, {1, 0}, 12}
"""
defparsec :parse_value, value
@doc """
Parse a sub expr. For testing purpose. Please use `parse/2` instead.
iex> CommonParser.Expr.parse_expr("a != 1")
{:ok, [:a, :!=, {:v, 1}], "", %{}, {1, 0}, 6}
iex> CommonParser.Expr.parse_expr(~S(a in ["hello", :world, 2]))
{:ok, [:a, :in, {:v, ["hello", :world, 2]}], "", %{}, {1, 0}, 25}
"""
defparsec :parse_expr, sub_expr
@doc ~S"""
Parse an expression.
iex> CommonParser.Expr.parse("a=1 and b = 2")
{:ok, [expr: [:a, :==, {:v, 1}, :and, :b, :==, {:v, 2}]], "", %{}, {1, 0}, 13}
iex> CommonParser.Expr.parse("a=1 and b in [\"abc\", :abc, 123]")
{:ok, [expr: [:a, :==, {:v, 1}, :and, :b, :in, {:v, ["abc", :abc, 123]}]], "", %{}, {1, 0}, 31}
iex> CommonParser.Expr.parse("a=1 and (b in [\"abc\", :abc, 123] or c != [1,2,3])")
{:ok, [expr: [:a, :==, {:v, 1}, :and, {:expr, [:b, :in, {:v, ["abc", :abc, 123]}, :or, :c, :!=, {:v, [1, 2, 3]}]}]], "", %{}, {1, 0}, 49}
"""
defparsec :parse, parsec(:expr)
end
|
lib/expr.ex
| 0.51879 | 0.543409 |
expr.ex
|
starcoder
|
defmodule AdventOfCode.Day8 do
@input_example """
rect 3x2
rotate column x=1 by 1
rotate row y=0 by 4
rotate column x=1 by 1
"""
defmodule Field do
defstruct height: 6, width: 50, marks: Keyword.new([])
def apply_ops(%Field{} = field, []), do: field
def apply_ops(%Field{} = field, [op|ops]) do
op
|> case do
{:rect, x, y} ->
marks = 0..(x-1)
|> Enum.map(fn(x) ->
0..(y-1)
|> Enum.map(fn(y) ->
{x, y}
end)
end)
|> IO.inspect
|> List.flatten
|> IO.inspect
|> unique_marks
|> IO.inspect
%Field{field|marks: marks}
|> IO.inspect
|> marks_to_columns
|> IO.inspect
# List.duplicate(nil, x)
# |> List.duplicate(y)
# |> IO.inspect
field
{:rotate_column, index, distance} ->
# Convert to columns and shift
field
{:rotate_row, index, distance} ->
# Convert to rows and shift
field
end
|> apply_ops(ops)
end
def count_marks(%Field{marks: marks}) do
length(marks)
end
defp unique_marks(marks) do
marks
|> Enum.sort
|> Enum.uniq
end
defp marks_to_columns(%Field{height: h, width: w, marks: marks}) do
cols = false
|> List.duplicate(w)
|> List.duplicate(h)
Enum.reduce(marks, cols, fn({x, y}, acc) ->
#IO.inspect {x, acc}
List.replace_at(acc, x, List.replace_at(List.at))
put_in(acc, [x, y], true)
end)
|> IO.inspect
end
end
def solve() do
ops = @input_example
|> String.split("\n", trim: true)
|> IO.inspect
|> Enum.map(&to_op/1)
|> IO.inspect
count_marked = %Field{}
|> Field.apply_ops(ops)
|> Field.count_marks
end
defp to_op(<<"rect ", coords::bitstring>>) do
[x, y] = coords
|> String.split("x", trim: true)
|> Enum.map(&String.to_integer/1)
{:rect, x, y}
end
defp to_op(<<"rotate column x=", coords::bitstring>>) do
[index, distance] = coords
|> String.split(" by ", trim: true)
|> Enum.map(&String.to_integer/1)
{:rotate_column, index, distance}
end
defp to_op(<<"rotate row y=", coords::bitstring>>) do
[index, distance] = coords
|> String.split(" by ", trim: true)
|> Enum.map(&String.to_integer/1)
{:rotate_row, index, distance}
end
end
|
lib/advent_of_code/day8.ex
| 0.600305 | 0.528716 |
day8.ex
|
starcoder
|
defmodule PhxIzitoast do
@moduledoc """
Documentation for `PhxIzitoast` - Phoenix Notification Package.

## Configuration
Add the below config to `config/config.exs`. This includes the default configurations(optional).
```elixir
config :phx_izitoast, :opts, # bottomRight, bottomLeft, topRight, topLeft, topCenter,
position: "topRight", # dark,
theme: "light",
timeout: 5000,
close: true,
titleSize: 18,
messageSize: 18,
progressBar: true
```
Adding the JS Files to Layout and Template. First import the Izitoast to your `layout_view.ex`
```elixir
import PhxIzitoast
```
Add the below function to the bottom of your `app.html.eex` just efore the closing `</body>` tag . This will import the needed `css` and `js` files.
```elixir
<body>
...............
<%= izi_toast(@conn) %>
.................
</body>
```
Add the below code to your `app_web/endpoint.ex` file just below the existing `plug Plug.Static` configuration.
```elixir
plug Plug.Static,
at: "/",
from: {:phx_izitoast, "priv/static"},
gzip: false,
only: ~w(css js )
```
This adds the necessary js and css for iziToast
## Usage
Quickest way to use PhxIzitoast
```elixir
conn
|> PhxIzitoast.message("message")
```
or
```elixir
conn
|> PhxIzitoast.success("title", "message", opts \\ [])
|> PhxIzitoast.error("", "This is an Error message", [position: "center", timeout: 10000])
```
The title can be left to `""` to ignore the toast title
Usage in code would be like:
```elixir
def create(conn, %{"category" => category_params}) do
slug = slugified_title(category_params["name"])
category_params = Map.put(category_params, "slug", slug)
case Categories.create_category(category_params) do
{:ok, _category} ->
conn
|> PhxIzitoast.success("Category", " created successfully")
|> redirect(to: Routes.category_path(conn, :index))
{:error, %Ecto.Changeset{} = changeset} ->
conn
|> PhxIzitoast.error("Category", "A Validation Error !!!")
|> render("new.html", changeset: changeset)
end
end
```
WIth this you can remove the default notification alerts in `app.html.eex` and replace all your `put_flash/2` with `PhxIzitoast` .
### More functions include:
"""
import Phoenix.HTML.Tag
import Phoenix.HTML
import Phoenix.Controller
@defaults [
position: "topRight",
theme: "light",
timeout: 5000,
close: true,
titleSize: 18,
messageSize: 18,
progressBar: true
]
@doc """
Inserts the CSS and Js files, takes in the `@conn`.Its is added in the `app.html.eex` just before `</body>`
"""
def izi_toast(conn) do
toasts = get_flash(conn, :izitoast)
# toasts = conn.assigns[:izitoast]
# conn |> fetch_session |> delete_session(:izitoast)
# delete_session(conn, :izitoast)
if toasts do
[toast_required_tags(), create_toast_tag(toasts)]
end
end
@doc false
def flash(conn, opts) do
toasts = get_flash(conn, :izitoast)
# toasts = conn.assigns[:izitoast]
if(toasts) do
# # delete_session(conn, :izitoast)
opts = toasts ++ [opts]
conn = put_flash(conn, :izitoast, opts)
conn
# # assign(conn, :izitoast, opts)
else
# # delete_session(conn, :izitoast)
# # assign(conn, :izitoast, [opts])
conn = put_flash(conn, :izitoast, [opts])
conn
end
# assign(conn, :izitoast, opts)
# put_flash(conn, :message, "new stuff we just set in the session")
end
@doc false
def make_toast(conn, title, message, color, opts) do
opts = Keyword.merge(Application.get_env(:phx_izitoast, :opts) || [], opts)
merged_opts = Keyword.merge(@defaults, opts)
final_opts = merged_opts ++ [title: title, message: message, color: color]
flash(conn, final_opts)
end
@doc false
def make_toast(conn, title, message, color),
do: make_toast(conn, title, message, color, [])
@doc """
```elixir
conn
|> PhxIzitoast.message("awesome things only")
```
"""
def message(conn, message),
do: make_toast(conn, " ", message, "blue", [])
@doc """
```elixir
conn
|> PhxIzitoast.success("title", "awesome", position: "bottomRight")
```
"""
def success(conn, title, message, opts),
do: make_toast(conn, title, message, "green", opts)
@doc """
```elixir
conn
|> PhxIzitoast.success("title", "awesome")
```
"""
def success(conn, title, message),
do: make_toast(conn, title, message, "green", [])
@doc """
```elixir
conn
|> PhxIzitoast.info("Success", "awesome", position: "topRight")
```
"""
def info(conn, title, message, opts),
do: make_toast(conn, title, message, "blue", opts)
@doc """
```elixir
conn
|> PhxIzitoast.info("Hey", "This is Info")
```
"""
def info(conn, title, message),
do: make_toast(conn, title, message, "blue", [])
@doc """
```elixir
conn
|> PhxIzitoast.warning("title", "awesome", timeout: 1000)
```
"""
def warning(conn, title, message, opts),
do: make_toast(conn, title, message, "yellow", opts)
@doc """
```elixir
conn
|> PhxIzitoast.warning("title", "not very awesome")
```
"""
def warning(conn, title, message),
do: make_toast(conn, title, message, "yellow", [])
@doc """
```elixir
conn
|> PhxIzitoast.error("Arrow", "You've Failed this city", position: "bottomLeft")
```
"""
def error(conn, title, message, opts),
do: make_toast(conn, title, message, "red", opts)
@doc """
```elixir
conn
|> PhxIzitoast.info("Error 500", "Error Occured !!!")
```
"""
def error(conn, title, message),
do: make_toast(conn, title, message, "red", [])
@doc false
def create_toast_tag(toasts) do
for toast <- toasts do
content_tag(:script, raw("
var options = {
title: '#{toast[:title]}',
message: '#{toast[:message]}',
color: '#{toast[:color]}', // blue, red, green, yellow
position: '#{toast[:position]}', // bottomRight, bottomLeft, topRight, topLeft, topCenter, bottomCenter, center
theme: '#{toast[:theme]}', // dark
timeout: #{toast[:timeout]},
close: #{toast[:close]},
titleSize: '#{toast[:titleSize]}',
messageSize: '#{toast[:messageSize]}',
progressBar: '#{toast[:progressBar]}'
};
var color = '#{toast[:color]}';
if (color === 'blue'){
iziToast.info(options);
}
else if (color === 'green'){
iziToast.success(options);
}
else if (color === 'yellow'){
iziToast.warning(options);
}
else if (color === 'red'){
iziToast.error(options);
} else {
iziToast.show(options);
}
// console.log('here')
"), type: 'text/javascript')
end
end
@doc false
def toast_required_tags do
~E(<link href="/css/iziToast.css" rel="stylesheet" />
<script src="/js/iziToast.js"></script>)
end
@doc """
```elixir
conn
|> PhxIzitoast.clear_toast()
```
"""
def clear_toast(conn) do
conn |> clear_flash()
end
end
|
lib/phx_izitoast.ex
| 0.731251 | 0.836988 |
phx_izitoast.ex
|
starcoder
|
require Utils
defmodule D4 do
@moduledoc """
--- Day 4: Secure Container ---
You arrive at the Venus fuel depot only to discover it's protected by a password. The Elves had written the password on a sticky note, but someone threw it out.
However, they do remember a few key facts about the password:
It is a six-digit number.
The value is within the range given in your puzzle input.
Two adjacent digits are the same (like 22 in 122345).
Going from left to right, the digits never decrease; they only ever increase or stay the same (like 111123 or 135679).
How many different passwords within the range given in your puzzle input meet these criteria?
--- Part Two ---
An Elf just remembered one more important detail: the two adjacent matching digits are not part of a larger group of matching digits.
How many different passwords within the range given in your puzzle input meet all of the criteria?
"""
@behaviour Day
def ascending?(x) when x < 10, do: true
def ascending?(x) do
next = div(x, 10)
right = rem(x, 10)
left = rem(next, 10)
right >= left and ascending?(next)
end
def next_ascending(x) do
x = x + 1
if ascending?(x) do
x
else
x
|> Integer.digits()
|> Enum.scan(fn x, last ->
if last == 0 or x < last, do: 0, else: x
end)
|> Enum.scan(fn x, last ->
if x == 0, do: last, else: x
end)
|> Integer.undigits()
end
end
def double?(x) when x < 10, do: false
def double?(x) do
last_two = rem(x, 100)
rem(last_two, 11) == 0 or double?(div(x, 10))
end
def explicit_double?(0, _, count), do: count == 2
def explicit_double?(x, current, count) do
new_current = rem(x, 10)
cond do
new_current != current and count == 2 -> true
new_current == current -> explicit_double?(div(x, 10), current, count + 1)
true -> explicit_double?(div(x, 10), new_current, 1)
end
end
def explicit_double?(x), do: explicit_double?(div(x, 10), rem(x, 10), 1)
def solve(input) do
[minimum, maximum] = input |> hd |> String.split("-") |> Utils.to_ints()
ascending =
Stream.iterate(minimum, &next_ascending/1) |> Enum.take_while(fn x -> x <= maximum end)
part_1 = ascending |> Enum.filter(&double?/1)
part_2 = part_1 |> Enum.filter(&explicit_double?/1)
{
length(part_1),
length(part_2)
}
end
end
|
lib/days/04.ex
| 0.706697 | 0.730915 |
04.ex
|
starcoder
|
defmodule Mexpanel.EngageRequest do
@enforce_keys [:token, :distinct_id]
defstruct [
:token,
:distinct_id,
:time,
:ip,
:ignore_time,
:operation,
:properties
]
@type properties :: map() | list() | nil
@type operation ::
:set
| :set_once
| :add
| :append
| :union
| :remove
| :unset
| :delete
@type t :: %__MODULE__{
token: String.t(),
distinct_id: String.t(),
time: DateTime.t(),
ip: String.t(),
ignore_time: boolean,
operation: operation,
properties: properties
}
@spec new(String.t(), String.t()) :: t
def new(token, distinct_id) do
%__MODULE__{
token: token,
distinct_id: distinct_id
}
end
@spec time(t, String.t()) :: t
def time(request, time) do
%{request | time: time}
end
@spec ip(t, String.t()) :: t
def ip(request, ip) do
%{request | ip: ip}
end
@spec ignore_time(t) :: t
def ignore_time(request) do
%{request | ignore_time: true}
end
### Operations
@spec set(t, properties) :: t
def set(request, properties) do
%{request | operation: :set, properties: properties}
end
@spec set_once(t, properties) :: t
def set_once(request, properties) do
%{request | operation: :set_once, properties: properties}
end
@spec add(t, properties) :: t
def add(request, properties) do
%{request | operation: :add, properties: properties}
end
@spec append(t, properties) :: t
def append(request, properties) do
%{request | operation: :append, properties: properties}
end
@spec union(t, properties) :: t
def union(request, properties) do
%{request | operation: :union, properties: properties}
end
@spec remove(t, properties) :: t
def remove(request, properties) do
%{request | operation: :remove, properties: properties}
end
@spec unset(t, properties) :: t
def unset(request, properties) do
%{request | operation: :unset, properties: properties}
end
@spec delete(t) :: t
def delete(request) do
%{request | operation: :delete, properties: nil}
end
end
|
lib/mexpanel/engage_request.ex
| 0.79858 | 0.459561 |
engage_request.ex
|
starcoder
|
defmodule Comeonin.Bcrypt.Base64 do
@moduledoc """
Module that provides base64 encoding for bcrypt.
Bcrypt uses an adapted base64 alphabet (using `.` instead of `+`,
starting with `./` and with no padding).
"""
use Bitwise
@decode_map {:bad,:bad,:bad,:bad,:bad,:bad,:bad,:bad,:bad,:ws,:ws,:bad,:bad,:ws,:bad,:bad,
:bad,:bad,:bad,:bad,:bad,:bad,:bad,:bad,:bad,:bad,:bad,:bad,:bad,:bad,:bad,:bad,
:ws,:bad,:bad,:bad,:bad,:bad,:bad,:bad,:bad,:bad,:bad,:bad,:bad,:bad,0,1,
54,55,56,57,58,59,60,61,62,63,:bad,:bad,:bad,:eq,:bad,:bad,
:bad,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,
17,18,19,20,21,22,23,24,25,26,27,:bad,:bad,:bad,:bad,:bad,
:bad,28,29,30,31,32,33,34,35,36,37,38,39,40,41,42,
43,44,45,46,47,48,49,50,51,52,53,:bad,:bad,:bad,:bad,:bad,
:bad,:bad,:bad,:bad,:bad,:bad,:bad,:bad,:bad,:bad,:bad,:bad,:bad,:bad,:bad,:bad,
:bad,:bad,:bad,:bad,:bad,:bad,:bad,:bad,:bad,:bad,:bad,:bad,:bad,:bad,:bad,:bad,
:bad,:bad,:bad,:bad,:bad,:bad,:bad,:bad,:bad,:bad,:bad,:bad,:bad,:bad,:bad,:bad,
:bad,:bad,:bad,:bad,:bad,:bad,:bad,:bad,:bad,:bad,:bad,:bad,:bad,:bad,:bad,:bad,
:bad,:bad,:bad,:bad,:bad,:bad,:bad,:bad,:bad,:bad,:bad,:bad,:bad,:bad,:bad,:bad,
:bad,:bad,:bad,:bad,:bad,:bad,:bad,:bad,:bad,:bad,:bad,:bad,:bad,:bad,:bad,:bad,
:bad,:bad,:bad,:bad,:bad,:bad,:bad,:bad,:bad,:bad,:bad,:bad,:bad,:bad,:bad,:bad,
:bad,:bad,:bad,:bad,:bad,:bad,:bad,:bad,:bad,:bad,:bad,:bad,:bad,:bad,:bad,:bad}
@doc """
Encode using the adapted Bcrypt alphabet.
## Examples
iex> Comeonin.Bcrypt.Base64.encode 'spamandeggs'
'a1/fZUDsXETlX1K'
"""
def encode(words), do: encode_l(words)
@doc """
Decode using the adapted Bcrypt alphabet.
## Examples
iex> Comeonin.Bcrypt.Base64.decode 'a1/fZUDsXETlX1K'
'spamandeggs'
"""
def decode(words), do: decode_l(words, [])
defp b64e(val) do
elem({?., ?/, ?A, ?B, ?C, ?D, ?E, ?F, ?G, ?H, ?I, ?J, ?K, ?L,
?M, ?N, ?O, ?P, ?Q, ?R, ?S, ?T, ?U, ?V, ?W, ?X,
?Y, ?Z, ?a, ?b, ?c, ?d, ?e, ?f, ?g, ?h, ?i, ?j, ?k, ?l,
?m, ?n, ?o, ?p, ?q, ?r, ?s, ?t, ?u, ?v, ?w, ?x,
?y, ?z, ?0, ?1, ?2, ?3, ?4, ?5, ?6, ?7, ?8, ?9}, val)
end
defp encode_l([]), do: []
defp encode_l([a]) do
[b64e(a >>> 2),
b64e((a &&& 3) <<< 4)]
end
defp encode_l([a,b]) do
[b64e(a >>> 2),
b64e(((a &&& 3) <<< 4) ||| (b >>> 4)),
b64e((b &&& 15) <<< 2)]
end
defp encode_l([a,b,c|ls]) do
bb = (a <<< 16) ||| (b <<< 8) ||| c
[b64e(bb >>> 18),
b64e((bb >>> 12) &&& 63),
b64e((bb >>> 6) &&& 63),
b64e(bb &&& 63) | encode_l(ls)]
end
defp decode_l([], a), do: a
defp decode_l([c1,c2], a) do
bits2x6 = (b64d(c1) <<< 18) ||| (b64d(c2) <<< 12)
octet1 = bits2x6 >>> 16
a ++ [octet1]
end
defp decode_l([c1,c2,c3], a) do
bits3x6 = (b64d(c1) <<< 18) ||| (b64d(c2) <<< 12) ||| (b64d(c3) <<< 6)
octet1 = bits3x6 >>> 16
octet2 = (bits3x6 >>> 8) &&& 0xff
a ++ [octet1,octet2]
end
defp decode_l([c1,c2,c3,c4| cs], a) do
bits4x6 = (b64d(c1) <<< 18) ||| (b64d(c2) <<< 12) ||| (b64d(c3) <<< 6) ||| b64d(c4)
octet1 = bits4x6 >>> 16
octet2 = (bits4x6 >>> 8) &&& 0xff
octet3 = bits4x6 &&& 0xff
decode_l(cs, a ++ [octet1,octet2,octet3])
end
defp b64d(val) do
b64d_ok(elem(@decode_map, val))
end
defp b64d_ok(val) when is_integer(val), do: val
defp b64d_ok(val) do
raise ArgumentError, "Invalid character: #{val}"
end
end
|
deps/comeonin/lib/comeonin/bcrypt/base64.ex
| 0.790004 | 0.74001 |
base64.ex
|
starcoder
|
defmodule OnFlow.Credentials do
@moduledoc """
Defines a struct that contains an address, a public key, and a private key.
`:address` might be `nil`, but the key will be present in the struct.
Do not initialize this directly. Instead, call `Credentials.new/1` or
`Credentials.new!/1`.
"""
@enforce_keys [:address, :public_key, :private_key]
defstruct [:address, :public_key, :private_key]
@typep hex_string() :: String.t()
@type keys_with_address() :: %{
optional(:address) => nil | String.t(),
required(:public_key) => String.t(),
required(:private_key) => String.t()
}
@type t() :: %__MODULE__{
address: nil | hex_string(),
public_key: hex_string(),
private_key: hex_string()
}
@doc """
Initializes a `%Credentials{}` struct. Returns `{:ok, credentials}` on
success. The params _must_ contain a `:private_key` and `:public_key`, or
`{:error, :missing_keys}` will be returned.
`:public_key` and `:private_key` must be hex-encoded strings.
"""
@spec new(keys_with_address()) :: {:ok, t()} | {:error, :missing_keys}
def new(attrs) do
address =
case Map.get(attrs, :address) do
address when is_binary(address) or is_nil(address) -> address
_ -> nil
end
with public_key when is_binary(public_key) <- Map.get(attrs, :public_key),
private_key when is_binary(private_key) <- Map.get(attrs, :private_key) do
credentials = %__MODULE__{
address: address,
public_key: public_key,
private_key: private_key
}
{:ok, credentials}
else
_ -> {:error, :missing_keys}
end
end
@doc """
Initializes a `%Credentials{}` struct. Raises on error. See the documentation
for `new/1`.
"""
@spec new!(map()) :: t() | none()
def new!(attrs) do
case new(attrs) do
{:ok, credentials} -> credentials
_ -> raise OnFlow.MissingKeysError, attrs
end
end
@doc """
Returns a `%Credentials{}` struct with the keypairs generated.
"""
@spec generate_keys() :: t()
def generate_keys do
new!(OnFlow.Crypto.generate_keys())
end
end
|
lib/on_flow/credentials.ex
| 0.851042 | 0.560102 |
credentials.ex
|
starcoder
|
defmodule ConnectFour do
use Servus.Game, features: [:hiscore]
require Logger
alias Servus.Serverutils
def init(players) do
Logger.debug("Initializing game state machine")
[player2, player1] = players
{:ok, field_pid} = Gamefield.start_link()
fsm_state = %{player1: player1, player2: player2, field: field_pid}
msgToSend = %LoadedProtobuf.ServusMessage{gameID: "test", functionID: {:cffunc, :START_CF}}
# Serverutils.send(player1.socket, "start", player2.name)
# Serverutils.send(player2.socket, "start", player1.name)
# Serverutils.send(player2.socket, "turn", nil)
{:ok, :p2, fsm_state}
end
@doc """
When one of the players looses the connection this callback will
be invoked. Send the remaining player a message and let the client
decide what to do.
"""
def abort(player, state) do
Logger.warn("Player #{player.name} has aborted the game")
cond do
player.id == state.player1.id ->
# Serverutils.send(state.player2.socket, "abort", state.player1.id)
{:stop, :shutdown, state}
player.id == state.player2.id ->
# Serverutils.send(state.player1.socket, "abort", state.player2.id)
{:stop, :shutdown, state}
end
end
@doc """
FSM is in state `p2`. Player 2 puts.
Outcome: p1 state
"""
def p2({id, "put", slot}, state) do
cond do
id != state.player2.id ->
Logger.warn("Not your turn")
{:next_state, :p2, state}
slot in 0..7 ->
Gamefield.update_field(state.field, slot, :p2)
if Gamefield.check_win_condition(state.field) do
# Send the final move to the loosingn player
# Serverutils.send(state.player1.socket, "set", slot)
# Serverutils.send(state.player2.socket, "win", nil)
# Serverutils.send(state.player1.socket, "loose", nil)
# Game over
Logger.info("Player #{state.player2.id} achieved score 1")
# Servus.Serverutils.callp("hiscore", "put", %{module: "ConnectFour", player: state.player2.id, score: 1})
# Servus.Serverutils.send(state.player2.socket, ["hiscore", "achieved"], 1)
{:next_state, :win, state}
else
# No win yet
# Notify the other player about the put...
# Serverutils.send(state.player1.socket, "set", slot)
# ...and give him the next turn.
# Serverutils.send(state.player1.socket, "turn", nil)
{:next_state, :p1, state}
end
true ->
Logger.warn("Invalid slot: #{slot}")
{:next_state, :p2, state}
end
end
def p2({_, "restart", _}, state) do
Logger.warn("Restart not allowed while game is ongoing")
{:next_state, :p2, state}
end
def hiscore_achieve(player, score, state) do
end
@doc """
FSM is in state `p1`. Player 1 puts.
Outcome: p2 state
"""
def p1({id, "put", slot}, state) do
cond do
id != state.player1.id ->
Logger.warn("Not your turn")
{:next_state, :p1, state}
slot in 0..7 ->
Gamefield.update_field(state.field, slot, :p1)
if Gamefield.check_win_condition(state.field) do
# Set the final move to the loosing player
# Serverutils.send(state.player2.socket, "set", slot)
# Serverutils.send(state.player1.socket, "win", nil)
# Serverutils.send(state.player2.socket, "loose", nil)
# Game over
Logger.info("Player #{state.player1.id} achieved score 1")
# Servus.Serverutils.callp("hiscore", "put", %{module: "ConnectFour", player: state.player1.id, score: 1})
# Servus.Serverutils.send(state.player1.socket, ["hiscore", "achieved"], 1)
{:next_state, :win, state}
else
# No win yet
# Notify the other player about the put...
# Serverutils.send(state.player2.socket, "set", slot)
# ...and give him the next turn.
# Serverutils.send(state.player2.socket, "turn", nil)
{:next_state, :p2, state}
end
true ->
Logger.warn("Invalid slot: #{slot}")
{:next_state, :p1, state}
end
end
def p1({_, "restart", _}, state) do
Logger.warn("Restart not allowed while game is ongoing")
{:next_state, :p2, state}
end
def win({_, "restart", _}, state) do
Gamefield.reset_game(state.field)
# Serverutils.send(state.player1.socket, "reset", nil)
# Serverutils.send(state.player2.socket, "reset", nil)
# Serverutils.send(state.player2.socket, "turn", nil)
{:next_state, :p2, state}
end
end
|
lib/Backends/connect_four/connectfour.ex
| 0.599016 | 0.400251 |
connectfour.ex
|
starcoder
|
defmodule Day03 do
@moduledoc """
AoC 2019, Day 3 - Crossed Wires
"""
@doc """
Find the distance to the closes intersection point
"""
def part1 do
eval(&intersection_distance/1)
end
@doc """
Find the distance to the shortest path intersection
"""
def part2 do
eval(&intersection_shortest/1)
end
defp eval(f) do
Util.priv_file(:day03, "day3_input.txt")
|> File.read!()
|> parse_input()
|> f.()
end
@doc """
Parse a string containing two wiress
"""
def parse_input(s) do
[s1, s2] = String.split(s, "\n", trim: true)
w1 = parse_wire(s1)
w2 = parse_wire(s2)
{w1, w2}
end
defp parse_wire(w) do
String.split(w, ",", trim: true)
|> Enum.map(&parse_point/1)
end
defp parse_point(p) do
{a, b} = String.split_at(p, 1)
{a, String.to_integer(b)}
end
@doc """
Compute the ordered list of points that makes up a wire
"""
def wire_points(w) do
wire_points({0,0}, w, [])
end
defp wire_points(_loc, [], acc), do: List.flatten(Enum.reverse(acc))
defp wire_points(loc, [inst|rest], acc) do
pts = segment_points(loc, inst)
new_loc = update_loc(loc, inst)
wire_points(new_loc, rest, [pts | acc])
end
defp segment_points(loc, inst={_dir, dist}) do
point_acc(dist+1, loc, dir_mod(inst), [])
end
defp dir_mod({"U", _d}), do: {0, 1}
defp dir_mod({"D", _d}), do: {0, -1}
defp dir_mod({"L", _d}), do: {-1, 0}
defp dir_mod({"R", _d}), do: {1, 0}
defp point_acc(0, _loc, _mods, acc), do: tl(Enum.reverse(acc))
defp point_acc(cnt, {x, y}, mods = {xmod, ymod}, acc) do
point_acc(cnt-1, {x+xmod, y+ymod}, mods, [{x,y} | acc])
end
defp update_loc({x, y}, {"U", dist}), do: {x, y+dist}
defp update_loc({x, y}, {"D", dist}), do: {x, y-dist}
defp update_loc({x, y}, {"L", dist}), do: {x-dist, y}
defp update_loc({x, y}, {"R", dist}), do: {x+dist, y}
@doc """
Find distance to the closest point of intersection of two wires
"""
def intersection_distance({w1, w2}) do
p1 = MapSet.new(wire_points(w1))
p2 = MapSet.new(wire_points(w2))
MapSet.intersection(p1, p2)
|> Enum.map(&manhattan/1)
|> Enum.sort()
|> hd()
end
defp manhattan({x,y}), do: abs(x) + abs(y)
@doc """
Find length of the shortest path to an intersection of two wires
"""
def intersection_shortest({w1, w2}) do
l1 = wire_points(w1)
l2 = wire_points(w2)
p1 = MapSet.new(wire_points(w1))
p2 = MapSet.new(wire_points(w2))
MapSet.intersection(p1, p2)
|> Enum.map(fn loc -> combined_length(loc, l1, l2) end)
|> Enum.sort()
|> hd()
end
defp combined_length(loc, w1, w2) do
path_length(loc, w1, 0) + path_length(loc, w2, 0)
end
defp path_length({x, y}, [{x, y} | _rest], acc) do
acc + 1
end
defp path_length(loc, [_hd | rest], acc) do
path_length(loc, rest, acc+1)
end
end
|
apps/day03/lib/day03.ex
| 0.793266 | 0.691243 |
day03.ex
|
starcoder
|
defmodule Periodic do
@moduledoc """
Periodic job execution.
This module can be used when you need to periodically run some code in a
separate process.
To setup the job execution, you can include the child_spec in your supervision
tree. The childspec has the following shape:
```
{Periodic, run: mfa_or_zero_arity_lambda, every: interval}
```
For example:
```
Supervisor.start_link(
[{Periodic, run: {IO, :puts, ["Hello, World!"]}, every: :timer.seconds(1)}],
strategy: :one_for_one
)
Hello, World! # after one second
Hello, World! # after two seconds
...
```
## Multiple children under the same supervisor
You can start multiple periodic tasks under the same supervisor. However,
in this case you need to provide a unique id for each task, which is used as
the supervisor child id:
```
Supervisor.start_link(
[
{Periodic, id: :job1, run: {IO, :puts, ["Hi!"]}, every: :timer.seconds(1)},
{Periodic, id: :job2, run: {IO, :puts, ["Hello!"]}, every: :timer.seconds(2)}
],
strategy: :one_for_one
)
Hi!
Hello!
Hi!
Hi!
Hello!
...
```
## Overlapped execution
By default, the jobs are running as overlapped. This means that a new job
instance will be started even if the previous one is not running. If you want
to change that, you can pass the `overlap?: false` option.
## Disabling execution
If you pass the `:infinity` as the timeout value, the job will not be executed.
This can be useful to disable the job in some environments (e.g. in `:test`).
## Logging
By default, nothing is logged. You can however, turn logging with `:log_level` and `:log_meta` options.
See the timeout example for usage.
## Timeout
You can also pass the :timeout option:
```
Supervisor.start_link(
[
{Periodic,
run: {Process, :sleep, [:infinity]}, every: :timer.seconds(1),
overlap?: false,
timeout: :timer.seconds(2),
strategy: :one_for_one,
log_level: :debug,
log_meta: [job_id: :my_job]
}
],
strategy: :one_for_one
)
job_id=my_job [debug] starting the job
job_id=my_job [debug] previous job still running, not starting another instance
job_id=my_job [debug] job failed with the reason `:timeout`
job_id=my_job [debug] starting the job
job_id=my_job [debug] previous job still running, not starting another instance
job_id=my_job [debug] job failed with the reason `:timeout`
...
```
## Shutdown
Since periodic executor is a plain supervisor child, shutting down is not
explicitly supported. If you want to stop the job, just take it down via its
supervisor, or shut down either of its ancestors.
"""
use Parent.GenServer
require Logger
@type opts :: [
every: duration,
run: job_spec,
overlap?: boolean,
timeout: duration,
log_level: nil | Logger.level(),
log_meta: Keyword.t()
]
@type duration :: pos_integer | :infinity
@type job_spec :: (() -> term) | {module, atom, [term]}
@doc "Starts the periodic executor."
@spec start_link(opts) :: GenServer.on_start()
def start_link(opts), do: Parent.GenServer.start_link(__MODULE__, Map.new(opts))
@doc "Builds a child specification for starting the periodic executor."
@spec child_spec(opts) :: Supervisor.child_spec()
def child_spec(opts) do
opts
|> super()
|> Supervisor.child_spec(id: Keyword.get(opts, :id, __MODULE__))
end
@impl GenServer
def init(opts) do
state = defaults() |> Map.merge(opts) |> Map.put(:timer, nil)
enqueue_next(state)
{:ok, state}
end
@impl GenServer
def handle_info(:run_job, state) do
maybe_start_job(state)
enqueue_next(state)
{:noreply, state}
end
@impl Parent.GenServer
def handle_child_terminated(_id, _meta, _pid, reason, state) do
case reason do
:normal -> log(state, "job finished")
_other -> log(state, "job failed with the reason `#{inspect(reason)}`")
end
{:noreply, state}
end
defp defaults(), do: %{overlap?: true, timeout: :infinity, log_level: nil, log_meta: []}
defp maybe_start_job(state) do
if state.overlap? == true or not job_running?() do
start_job(state)
else
log(state, "previous job still running, not starting another instance")
end
end
defp job_running?(), do: Parent.GenServer.child?(:job)
defp start_job(state) do
log(state, "starting the job")
id = if state.overlap?, do: make_ref(), else: :job
job = state.run
Parent.GenServer.start_child(%{
id: id,
start: {Task, :start_link, [fn -> invoke_job(job) end]},
timeout: state.timeout,
shutdown: :brutal_kill
})
end
defp invoke_job({mod, fun, args}), do: apply(mod, fun, args)
defp invoke_job(fun) when is_function(fun, 0), do: fun.()
defp enqueue_next(%{every: :infinity}), do: :ok
defp enqueue_next(state), do: Process.send_after(self(), :run_job, state.every)
defp log(state, message) do
if not is_nil(state.log_level), do: Logger.log(state.log_level, message, state.log_meta)
end
end
|
lib/periodic.ex
| 0.879768 | 0.884639 |
periodic.ex
|
starcoder
|
defmodule Oli.Utils.Database do
alias Oli.Repo
require Logger
@doc """
Explains the query plan for a given raw, string based query. Options to either inline log
the result and return the query, or to just return to analyzed result. Results can be in either
json, text, or yaml format. Default is to log output as JSON.
"""
def explain_sql(query, params, opts \\ []) when is_binary(query) do
opts = put_defaults(opts)
sql = "EXPLAIN (#{analyze_to_sql(opts[:analyze])}, #{format_to_sql(opts[:format])}) #{query}"
explain =
Repo
|> Ecto.Adapters.SQL.query!(sql, params)
if opts[:log_output] do
log_output(explain, opts[:format])
query
else
explain
end
end
@doc """
Explains the query plan for a given Ecto based query. Options to either inline log
the result and return the query, or to just return to analyzed result. Results can be in either
json, text, or yaml format. Default is to log output as JSON.
Use this inline for development on a query by query basis like:
```
from(s in Section,
join: spp in SectionsProjectsPublications,
on: s.id == spp.section_id,
join: pr in PublishedResource,
on: pr.publication_id == spp.publication_id,
join: rev in Revision,
on: rev.id == pr.revision_id,
where:
(rev.resource_type_id == ^page_id or rev.resource_type_id == ^container_id) and
s.slug == ^section_slug,
select: rev
)
|> Oli.Utils.Database.explain()
|> Repo.all()
```
"""
def explain(query, opts \\ []) do
opts = put_defaults(opts)
{sql, params} = Ecto.Adapters.SQL.to_sql(opts[:op], Repo, query)
sql = "EXPLAIN (#{analyze_to_sql(opts[:analyze])}, #{format_to_sql(opts[:format])}) #{sql}"
explain =
Repo
|> Ecto.Adapters.SQL.query!(sql, params)
if opts[:log_output] do
log_output(explain, opts[:format])
query
else
explain
end
end
@doc """
Logs as a warning the query and stacktrace of the caller if the query contains a sequential
table scan or if the query equals or exceeds a cost threshold.
"""
def flag_problem_queries(query, cost_threshold) do
result = explain(query, log_output: false)
explanation =
Map.get(result, :rows)
|> List.first()
count = count_sequential(explanation)
cost = explanation |> hd |> hd |> Map.get("Plan") |> Map.get("Total Cost")
if count > 0 do
output_problematic(result, "A query with #{count} sequential scans was detected")
end
if cost >= cost_threshold do
output_problematic(result, "A query with #{cost} total compute cost was detected")
end
end
defp output_problematic(result, reason) do
trace =
try do
raise "Problematic Query"
rescue
_ -> Exception.format_stacktrace()
end
Logger.warn(reason)
Logger.warn(trace)
log_output(result, :json)
end
defp count_sequential(explanation) do
count_sequential(explanation, 0)
end
defp count_sequential(list, count) when is_list(list) do
Enum.reduce(list, count, fn item, c -> c + count_sequential(item, c) end)
end
defp count_sequential(%{"Plan" => %{"Plans" => plans}}, count) do
count_sequential(plans, count)
end
defp count_sequential(%{"Plan" => plan}, count) do
count_sequential(plan, count)
end
defp count_sequential(%{"Node Type" => "Seq Scan"}, count) do
count + 1
end
defp count_sequential(_, count) do
count
end
defp put_defaults(opts) do
opts
|> Keyword.put_new(:op, :all)
|> Keyword.put_new(:format, :json)
|> Keyword.put_new(:analyze, false)
|> Keyword.put_new(:log_output, true)
end
defp log_output(results, :text) do
results
|> Map.get(:rows)
|> Enum.join("\n")
|> Logger.warn()
end
defp log_output(results, :json) do
results
|> Map.get(:rows)
|> List.first()
|> Jason.encode!(pretty: true)
|> Logger.warn()
end
defp log_output(results, :yaml) do
results
|> Map.get(:rows)
|> List.first()
|> Logger.warn()
end
defp format_to_sql(:text), do: "FORMAT TEXT"
defp format_to_sql(:json), do: "FORMAT JSON"
defp format_to_sql(:yaml), do: "FORMAT YAML"
defp analyze_to_sql(true), do: "ANALYZE true"
defp analyze_to_sql(false), do: "ANALYZE false"
end
|
lib/oli/utils/database.ex
| 0.74512 | 0.755614 |
database.ex
|
starcoder
|
defmodule CTE.InMemory do
@moduledoc """
CT implementation using the memory adapter.
The good ol' friends Rolie, Olie and Polie, debating the usefulness of this implementation :)
You can watch them in action on: [youtube](https://www.youtube.com/watch?v=LTkmaE_QWMQ)
After seeding the data, we'll have this graph:
(1) Is Closure Table better than the Nested Sets?
├── (2) It depends. Do you need referential integrity?
│ └── (3) Yeah.
│ └── (7) Closure Table *has* referential integrity?
└── (4) Querying the data it's easier.
├── (5) What about inserting nodes?
└── (6) Everything is easier, than with the Nested Sets.
├── (8) I'm sold! And I'll use its Elixir implementation! <3
└── (9) w⦿‿⦿t!
"""
# %{comment_id => comment}
@comments %{
1 => %{id: 1, author: "Olie", comment: "Is Closure Table better than the Nested Sets?"},
2 => %{id: 2, author: "Rolie", comment: "It depends. Do you need referential integrity?"},
3 => %{id: 3, author: "Olie", comment: "Yeah."},
7 => %{id: 7, author: "Rolie", comment: "Closure Table *has* referential integrity?"},
4 => %{id: 4, author: "Polie", comment: "Querying the data it's easier."},
5 => %{id: 5, author: "Olie", comment: "What about inserting nodes?"},
6 => %{id: 6, author: "Rolie", comment: "Everything is easier, than with the Nested Sets."},
8 => %{
id: 8,
author: "Olie",
comment: "I'm sold! And I'll use its Elixir implementation! <3"
},
9 => %{id: 9, author: "Polie", comment: "w⦿‿⦿t!"},
281 => %{author: "Polie", comment: "Rolie is right!", id: 281}
}
# [[ancestor, descendant], [..., ...], ...]
@tree_paths []
@insert_list [
[1, 1],
[1, 2],
[2, 3],
[3, 7],
[1, 4],
[4, 5],
[4, 6],
[6, 8],
[6, 9]
]
# -1
# --2
# ---3
# ----7
# --4
# ---5
# ---6
# ----8
# ----9
use CTE,
otp_app: :closure_table,
adapter: CTE.Adapter.Memory,
nodes: @comments,
paths: @tree_paths
def seed do
@insert_list
|> Enum.each(fn [ancestor, leaf] -> insert(leaf, ancestor) end)
end
end
|
test/support/in_memory.ex
| 0.610453 | 0.587322 |
in_memory.ex
|
starcoder
|
defmodule Grizzly.DSK do
@moduledoc """
Module for working with the SmartStart and S2 DSKs
"""
@typedoc """
The DSK string is the string version of the DSK
The general format is `XXXXX-XXXXX-XXXXX-XXXXX-XXXXX-XXXXX-XXXXX-XXXXX`
That is 8 blocks of 16 bit integers separated by a dash.
An example of this would be `50285-18819-09924-30691-15973-33711-04005-03623`
"""
@type dsk_string :: String.t()
@typedoc """
The DSK binary is the elixir binary string form of the DSK
The format is `<<b1, b2, b3, ... b16>>`
That is 16 bytes.
An example of this would be:
```elixir
<<196, 109, 73, 131, 38, 196, 119, 227, 62, 101, 131, 175, 15, 165, 14, 39>>
```
"""
@type dsk_binary :: binary()
@doc """
Take a string representation of the DSK and change it into the
binary representation
"""
@spec string_to_binary(dsk_string()) ::
{:ok, dsk_binary()} | {:error, :dsk_too_short | :dsk_too_long}
def string_to_binary(dsk_string) when byte_size(dsk_string) > 47, do: {:error, :dsk_too_long}
def string_to_binary(dsk_string) when byte_size(dsk_string) < 47, do: {:error, :dsk_too_short}
def string_to_binary(dsk_string) do
dsk_binary =
dsk_string
|> String.split("-")
|> Enum.map(&String.to_integer/1)
|> Enum.reduce(<<>>, fn dsk_number, binary ->
binary <> <<dsk_number::size(16)>>
end)
{:ok, dsk_binary}
end
@doc """
Take a binary representation of the DSK and change it into the
string representation
"""
@spec binary_to_string(dsk_binary()) ::
{:ok, dsk_string()} | {:error, :dsk_too_short | :dsk_too_long}
def binary_to_string(dsk_binary) when byte_size(dsk_binary) > 16, do: {:error, :dsk_too_long}
def binary_to_string(dsk_binary) when byte_size(dsk_binary) < 16, do: {:error, :dsk_too_short}
def binary_to_string(dsk_binary) do
dsk_string =
for(<<b::16 <- dsk_binary>>, do: b)
|> Enum.map(fn b -> String.slice("00000" <> "#{b}", -5, 5) end)
|> Enum.join("-")
{:ok, dsk_string}
end
end
|
lib/grizzly/dsk.ex
| 0.904251 | 0.822724 |
dsk.ex
|
starcoder
|
defmodule Commanded.Assertions.EventAssertions do
@moduledoc """
Provides test assertion and wait for event functions to help test applications
built using Commanded.
The default assert and refute receive timeouts are one second.
You can override the default timeout in config (e.g. `config/test.exs`):
config :commanded,
assert_receive_event_timeout: 1_000,
refute_receive_event_timeout: 1_000
"""
import ExUnit.Assertions
alias Commanded.EventStore
alias Commanded.EventStore.RecordedEvent
@doc """
Assert that events matching their respective predicates have a matching
correlation id.
Useful when there is a chain of events that is connected through event handlers.
## Example
assert_correlated(
BankApp,
BankAccountOpened, fn opened -> opened.id == 1 end,
InitialAmountDeposited, fn deposited -> deposited.id == 2 end
)
"""
def assert_correlated(application, event_type_a, predicate_a, event_type_b, predicate_b) do
assert_receive_event(application, event_type_a, predicate_a, fn _event_a, metadata_a ->
assert_receive_event(application, event_type_b, predicate_b, fn _event_b, metadata_b ->
assert metadata_a.correlation_id == metadata_b.correlation_id
end)
end)
end
@doc """
Assert that an event of the given event type is published.
Verify that event using the assertion function.
## Example
assert_receive_event(BankApp, BankAccountOpened, fn opened ->
assert opened.account_number == "ACC123"
end)
"""
def assert_receive_event(application, event_type, assertion_fn) do
assert_receive_event(application, event_type, fn _event -> true end, assertion_fn)
end
@doc """
Assert that an event of the given event type, matching the predicate, is
published. Verify that event using the assertion function.
## Example
assert_receive_event(BankApp, BankAccountOpened,
fn opened -> opened.account_number == "ACC123" end,
fn opened ->
assert opened.balance == 1_000
end)
"""
def assert_receive_event(application, event_type, predicate_fn, assertion_fn) do
unless Code.ensure_compiled?(event_type) do
raise ExUnit.AssertionError, "Event #{inspect(event_type)} not found"
end
with_subscription(application, fn subscription ->
do_assert_receive(application, subscription, event_type, predicate_fn, assertion_fn)
end)
end
@doc """
Refute that an event of the given type has been received.
An optional predicate may be provided to filter events matching the refuted
type.
## Examples
Refute that `ExampleEvent` is created by `some_func/0` function:
refute_receive_event(ExampleApp, ExampleEvent) do
some_func()
end
Refute that `ExampleEvent` matching given predicate is created by
`some_func/0` function:
refute_receive_event(ExampleApp, ExampleEvent,
predicate: fn event -> event.foo == :foo end) do
some_func()
end
"""
defmacro refute_receive_event(application, event_type, opts \\ [], do: block) do
predicate = Keyword.get(opts, :predicate)
timeout = Keyword.get(opts, :timeout, default_refute_receive_timeout())
quote do
task =
Task.async(fn ->
with_subscription(unquote(application), fn subscription ->
predicate = unquote(predicate) || fn _event -> true end
do_refute_receive_event(
unquote(application),
subscription,
unquote(event_type),
predicate
)
end)
end)
unquote(block)
case Task.yield(task, unquote(timeout)) || Task.shutdown(task) do
{:ok, :ok} -> :ok
{:ok, {:error, event}} -> flunk("Unexpectedly received event: " <> inspect(event))
{:error, error} -> flunk("Encountered an error: " <> inspect(error))
{:exit, error} -> flunk("Encountered an error: " <> inspect(error))
nil -> :ok
end
end
end
@doc """
Wait for an event of the given event type to be published.
## Examples
wait_for_event(BankApp, BankAccountOpened)
"""
def wait_for_event(application, event_type) do
wait_for_event(application, event_type, fn _event -> true end)
end
@doc """
Wait for an event of the given event type, matching the predicate, to be
published.
## Examples
wait_for_event(BankApp, BankAccountOpened, fn opened ->
opened.account_number == "ACC123"
end)
"""
def wait_for_event(application, event_type, predicate_fn) when is_function(predicate_fn) do
with_subscription(application, fn subscription ->
do_wait_for_event(application, subscription, event_type, predicate_fn)
end)
end
@doc false
def with_subscription(application, callback_fun) when is_function(callback_fun, 1) do
subscription_name = UUID.uuid4()
{:ok, subscription} =
EventStore.subscribe_to(application, :all, subscription_name, self(), :origin)
assert_receive {:subscribed, ^subscription}, default_receive_timeout()
try do
apply(callback_fun, [subscription])
after
:ok = EventStore.unsubscribe(application, subscription)
:ok = EventStore.delete_subscription(application, :all, subscription_name)
end
end
defp do_assert_receive(application, subscription, event_type, predicate_fn, assertion_fn) do
assert_receive {:events, received_events}, default_receive_timeout()
case find_expected_event(received_events, event_type, predicate_fn) do
%RecordedEvent{data: data} = expected_event ->
case assertion_fn do
assertion_fn when is_function(assertion_fn, 1) ->
apply(assertion_fn, [data])
assertion_fn when is_function(assertion_fn, 2) ->
apply(assertion_fn, [data, expected_event])
end
nil ->
:ok = ack_events(application, subscription, received_events)
do_assert_receive(application, subscription, event_type, predicate_fn, assertion_fn)
end
end
def do_refute_receive_event(application, subscription, event_type, predicate_fn) do
receive do
{:events, events} ->
case find_expected_event(events, event_type, predicate_fn) do
%RecordedEvent{data: data} ->
{:error, data}
nil ->
:ok = ack_events(application, subscription, events)
do_refute_receive_event(application, subscription, event_type, predicate_fn)
end
end
end
defp do_wait_for_event(application, subscription, event_type, predicate_fn) do
assert_receive {:events, received_events}, default_receive_timeout()
case find_expected_event(received_events, event_type, predicate_fn) do
%RecordedEvent{} = expected_event ->
expected_event
nil ->
:ok = ack_events(application, subscription, received_events)
do_wait_for_event(application, subscription, event_type, predicate_fn)
end
end
defp find_expected_event(received_events, event_type, predicate_fn) do
Enum.find(received_events, fn
%RecordedEvent{data: %{__struct__: ^event_type} = data}
when is_function(predicate_fn, 1) ->
apply(predicate_fn, [data])
%RecordedEvent{data: %{__struct__: ^event_type} = data} = received_event
when is_function(predicate_fn, 2) ->
apply(predicate_fn, [data, received_event])
%RecordedEvent{} ->
false
end)
end
defp ack_events(_application, _subscription, []), do: :ok
defp ack_events(application, subscription, [event]),
do: EventStore.ack_event(application, subscription, event)
defp ack_events(application, subscription, [_event | events]),
do: ack_events(application, subscription, events)
defp default_receive_timeout,
do: Application.get_env(:commanded, :assert_receive_event_timeout, 1_000)
defp default_refute_receive_timeout,
do: Application.get_env(:commanded, :refute_receive_event_timeout, 1_000)
end
|
lib/commanded/assertions/event_assertions.ex
| 0.910253 | 0.789741 |
event_assertions.ex
|
starcoder
|
defmodule InfluxDB do
@moduledoc """
Main interface to query and insert data into InfluxDB.
"""
alias InfluxDB.Config
@type config :: Config.t
@type time_unit :: :hour | :minute | :second | :millisecond | :microsecond | :nanosecond
@doc """
Send a query to InfluxDB and return the result.
In case of success, it will return either:
* `:ok`, when the query doesn't have any result set
* `{:ok, [result]}`, where result is a list of series, and a series is a
map containing the `:name`, `:columns`, `:rows` and `:tags` keys.
In case of error, it will return either:
* `{:error, {:not_found, description}}}`, when the series being queried could not be found
* `{:error, {:server_error, description}}}`, when a server error occurs
"""
@spec query(config, query, query_parameters, query_options) ::
:ok
| {:ok, [result]}
| {:error, {:not_found, charlist}}
| {:error, {:server_error, charlist}}
@type query :: iodata
@type query_parameters :: %{
optional(atom) => atom | String.t | number
}
@type query_options :: %{
optional(:timeout) => timeout,
optional(:precision) => time_unit,
optional(:retention_policy) => String.t
}
@type result :: [series]
@type series :: %{
required(:name) => String.t,
required(:columns) => [String.t],
required(:rows) => [tuple],
optional(:tags) => %{
optional(String.t) => String.t
}
}
defdelegate query(config, query, params \\ %{}, options \\ %{}), to: :influxdb
@doc """
Write one or more points to InfluxDB.
In case of success, it will return either:
* `:ok`, when the write completes successfully
In case of error, it will return either:
* `{:error, {:not_found, description}}}`, when the database could not be found
* `{:error, {:server_error, description}}}`, when a server error occurs
"""
@spec write(config, points, write_options) ::
:ok
| {:error, {:not_found, charlist}}
| {:error, {:server_error, charlist}}
@type points :: [point]
@type point :: {measurement, tags, fields, timestamp} | {measurement, tags, fields}
@type measurement :: key
@type tags :: %{ optional(key) => iodata | atom }
@type fields :: %{ optional(key) => number | boolean | iodata | atom }
@type key :: iodata | atom
@type timestamp :: integer
@type write_options :: %{
optional(:timeout) => timeout,
optional(:precision) => time_unit,
optional(:retention_policy) => String.t
}
defdelegate write(config, points, options \\ %{}), to: :influxdb
end
|
lib/influxdb.ex
| 0.859649 | 0.605041 |
influxdb.ex
|
starcoder
|
defmodule ARP.Account.Promise do
@moduledoc false
alias ARP.{Config, Crypto, Utils}
use GenServer
defstruct [:cid, :from, :to, :amount, :sign, :paid]
def create(private_key, cid, from, to, amount, paid \\ 0) do
decoded_from = from |> String.slice(2..-1) |> Base.decode16!(case: :mixed)
decoded_to = to |> String.slice(2..-1) |> Base.decode16!(case: :mixed)
data =
<<cid::size(256), decoded_from::binary-size(20), decoded_to::binary-size(20),
amount::size(256)>>
sign = Crypto.eth_sign(data, private_key)
%__MODULE__{
cid: cid,
from: from,
to: to,
amount: amount,
sign: sign,
paid: paid
}
end
def verify(%__MODULE__{cid: cid} = p, from, to) do
cid = if(is_binary(cid), do: Utils.decode_hex(cid), else: cid)
verify(p, cid, from, to)
end
def verify(%__MODULE__{cid: c} = p, cid, from, to) when is_binary(c) do
verify(decode(p), cid, from, to)
end
def verify(%__MODULE__{cid: c} = p, cid, from, to) when is_integer(c) do
if p.cid && p.from && p.to && p.amount && p.sign && p.cid == cid && p.from == from &&
p.to == to do
from_binary = p.from |> String.slice(2..-1) |> Base.decode16!(case: :mixed)
to_binary = p.to |> String.slice(2..-1) |> Base.decode16!(case: :mixed)
encode = <<p.cid::size(256), from_binary::binary, to_binary::binary, p.amount::size(256)>>
with {:ok, recover_addr} <- Crypto.eth_recover(encode, p.sign) do
recover_addr == from
else
_ -> false
end
else
false
end
rescue
_ ->
false
end
def encode(%__MODULE__{} = promise) do
%__MODULE__{
cid: promise.cid |> Utils.encode_integer(),
from: promise.from,
to: promise.to,
amount: promise.amount |> Utils.encode_integer(),
sign: promise.sign,
paid: (promise.paid || 0) |> Utils.encode_integer()
}
end
def decode(%__MODULE__{} = promise) do
%__MODULE__{
cid: promise.cid |> Utils.decode_hex(),
from: promise.from,
to: promise.to,
amount: promise.amount |> Utils.decode_hex(),
sign: promise.sign,
paid: (promise.paid || "0x0") |> Utils.decode_hex()
}
end
@doc """
{{:device, address}, promise}
{{:dapp, address}, promise}
"""
def start_link(_opts) do
GenServer.start_link(__MODULE__, [], name: __MODULE__)
end
def get_device(address) do
get(:device, address)
end
def get_dapp(address) do
get(:dapp, address)
end
def get(type, address) do
case :ets.lookup(__MODULE__, {type, address}) do
[{_, value}] -> value
[] -> nil
end
end
def get_all_device do
get_all(:device)
end
def get_all_dapp do
get_all(:dapp)
end
def get_all(type) do
# :ets.fun2ms(fn {{type, address}, promise} when type == t -> {address, promise} end)
match = [{{{:"$1", :"$2"}, :"$3"}, [{:==, :"$1", {:const, type}}], [{{:"$2", :"$3"}}]}]
:ets.select(__MODULE__, match)
end
def set_device(address, promise) do
set(:device, address, promise)
end
def set_dapp(address, promise) do
set(:dapp, address, promise)
end
def set(type, address, promise) do
:ets.insert(__MODULE__, {{type, address}, promise})
GenServer.cast(__MODULE__, :write)
end
def delete_device(address) do
delete(:device, address)
end
def delete_dapp(address) do
delete(:dapp, address)
end
def delete(type, address) do
:ets.delete(__MODULE__, {type, address})
GenServer.cast(__MODULE__, :write)
end
# Callbacks
def init(_opts) do
with {:error, _} <- :ets.file2tab(file_path(), verify: true) do
:ets.new(__MODULE__, [
:named_table,
:public,
read_concurrency: true,
write_concurrency: true
])
GenServer.cast(__MODULE__, :write)
end
{:ok, %{}}
end
def handle_cast(:write, state) do
:ets.tab2file(__MODULE__, file_path(), extended_info: [:md5sum], sync: true)
{:noreply, state}
end
defp file_path do
Config.get(:data_path)
|> Path.join("promise")
|> String.to_charlist()
end
end
|
lib/arp_server/account/promise.ex
| 0.702428 | 0.400075 |
promise.ex
|
starcoder
|
defmodule AWS.Textract do
@moduledoc """
Amazon Textract detects and analyzes text in documents and converts it into
machine-readable text.
This is the API reference documentation for Amazon Textract.
"""
@doc """
Analyzes an input document for relationships between detected items.
The types of information returned are as follows:
* Form data (key-value pairs). The related information is returned
in two `Block` objects, each of type `KEY_VALUE_SET`: a KEY `Block` object and a
VALUE `Block` object. For example, *Name: <NAME>* contains a key and
value. *Name:* is the key. *<NAME>* is the value.
* Table and table cell data. A TABLE `Block` object contains
information about a detected table. A CELL `Block` object is returned for each
cell in a table.
* Lines and words of text. A LINE `Block` object contains one or
more WORD `Block` objects. All lines and words that are detected in the document
are returned (including text that doesn't have a relationship with the value of
`FeatureTypes`).
Selection elements such as check boxes and option buttons (radio buttons) can be
detected in form data and in tables. A SELECTION_ELEMENT `Block` object contains
information about a selection element, including the selection status.
You can choose which type of analysis to perform by specifying the
`FeatureTypes` list.
The output is returned in a list of `Block` objects.
`AnalyzeDocument` is a synchronous operation. To analyze documents
asynchronously, use `StartDocumentAnalysis`.
For more information, see [Document Text Analysis](https://docs.aws.amazon.com/textract/latest/dg/how-it-works-analyzing.html).
"""
def analyze_document(client, input, options \\ []) do
request(client, "AnalyzeDocument", input, options)
end
@doc """
Detects text in the input document.
Amazon Textract can detect lines of text and the words that make up a line of
text. The input document must be an image in JPEG or PNG format.
`DetectDocumentText` returns the detected text in an array of `Block` objects.
Each document page has as an associated `Block` of type PAGE. Each PAGE `Block`
object is the parent of LINE `Block` objects that represent the lines of
detected text on a page. A LINE `Block` object is a parent for each word that
makes up the line. Words are represented by `Block` objects of type WORD.
`DetectDocumentText` is a synchronous operation. To analyze documents
asynchronously, use `StartDocumentTextDetection`.
For more information, see [Document Text Detection](https://docs.aws.amazon.com/textract/latest/dg/how-it-works-detecting.html).
"""
def detect_document_text(client, input, options \\ []) do
request(client, "DetectDocumentText", input, options)
end
@doc """
Gets the results for an Amazon Textract asynchronous operation that analyzes
text in a document.
You start asynchronous text analysis by calling `StartDocumentAnalysis`, which
returns a job identifier (`JobId`). When the text analysis operation finishes,
Amazon Textract publishes a completion status to the Amazon Simple Notification
Service (Amazon SNS) topic that's registered in the initial call to
`StartDocumentAnalysis`. To get the results of the text-detection operation,
first check that the status value published to the Amazon SNS topic is
`SUCCEEDED`. If so, call `GetDocumentAnalysis`, and pass the job identifier
(`JobId`) from the initial call to `StartDocumentAnalysis`.
`GetDocumentAnalysis` returns an array of `Block` objects. The following types
of information are returned:
* Form data (key-value pairs). The related information is returned
in two `Block` objects, each of type `KEY_VALUE_SET`: a KEY `Block` object and a
VALUE `Block` object. For example, *Name: <NAME>* contains a key and
value. *Name:* is the key. *<NAME>* is the value.
* Table and table cell data. A TABLE `Block` object contains
information about a detected table. A CELL `Block` object is returned for each
cell in a table.
* Lines and words of text. A LINE `Block` object contains one or
more WORD `Block` objects. All lines and words that are detected in the document
are returned (including text that doesn't have a relationship with the value of
the `StartDocumentAnalysis` `FeatureTypes` input parameter).
Selection elements such as check boxes and option buttons (radio buttons) can be
detected in form data and in tables. A SELECTION_ELEMENT `Block` object contains
information about a selection element, including the selection status.
Use the `MaxResults` parameter to limit the number of blocks that are returned.
If there are more results than specified in `MaxResults`, the value of
`NextToken` in the operation response contains a pagination token for getting
the next set of results. To get the next page of results, call
`GetDocumentAnalysis`, and populate the `NextToken` request parameter with the
token value that's returned from the previous call to `GetDocumentAnalysis`.
For more information, see [Document Text Analysis](https://docs.aws.amazon.com/textract/latest/dg/how-it-works-analyzing.html).
"""
def get_document_analysis(client, input, options \\ []) do
request(client, "GetDocumentAnalysis", input, options)
end
@doc """
Gets the results for an Amazon Textract asynchronous operation that detects text
in a document.
Amazon Textract can detect lines of text and the words that make up a line of
text.
You start asynchronous text detection by calling `StartDocumentTextDetection`,
which returns a job identifier (`JobId`). When the text detection operation
finishes, Amazon Textract publishes a completion status to the Amazon Simple
Notification Service (Amazon SNS) topic that's registered in the initial call to
`StartDocumentTextDetection`. To get the results of the text-detection
operation, first check that the status value published to the Amazon SNS topic
is `SUCCEEDED`. If so, call `GetDocumentTextDetection`, and pass the job
identifier (`JobId`) from the initial call to `StartDocumentTextDetection`.
`GetDocumentTextDetection` returns an array of `Block` objects.
Each document page has as an associated `Block` of type PAGE. Each PAGE `Block`
object is the parent of LINE `Block` objects that represent the lines of
detected text on a page. A LINE `Block` object is a parent for each word that
makes up the line. Words are represented by `Block` objects of type WORD.
Use the MaxResults parameter to limit the number of blocks that are returned. If
there are more results than specified in `MaxResults`, the value of `NextToken`
in the operation response contains a pagination token for getting the next set
of results. To get the next page of results, call `GetDocumentTextDetection`,
and populate the `NextToken` request parameter with the token value that's
returned from the previous call to `GetDocumentTextDetection`.
For more information, see [Document Text Detection](https://docs.aws.amazon.com/textract/latest/dg/how-it-works-detecting.html).
"""
def get_document_text_detection(client, input, options \\ []) do
request(client, "GetDocumentTextDetection", input, options)
end
@doc """
Starts the asynchronous analysis of an input document for relationships between
detected items such as key-value pairs, tables, and selection elements.
`StartDocumentAnalysis` can analyze text in documents that are in JPEG, PNG, and
PDF format. The documents are stored in an Amazon S3 bucket. Use
`DocumentLocation` to specify the bucket name and file name of the document.
`StartDocumentAnalysis` returns a job identifier (`JobId`) that you use to get
the results of the operation. When text analysis is finished, Amazon Textract
publishes a completion status to the Amazon Simple Notification Service (Amazon
SNS) topic that you specify in `NotificationChannel`. To get the results of the
text analysis operation, first check that the status value published to the
Amazon SNS topic is `SUCCEEDED`. If so, call `GetDocumentAnalysis`, and pass the
job identifier (`JobId`) from the initial call to `StartDocumentAnalysis`.
For more information, see [Document Text Analysis](https://docs.aws.amazon.com/textract/latest/dg/how-it-works-analyzing.html).
"""
def start_document_analysis(client, input, options \\ []) do
request(client, "StartDocumentAnalysis", input, options)
end
@doc """
Starts the asynchronous detection of text in a document.
Amazon Textract can detect lines of text and the words that make up a line of
text.
`StartDocumentTextDetection` can analyze text in documents that are in JPEG,
PNG, and PDF format. The documents are stored in an Amazon S3 bucket. Use
`DocumentLocation` to specify the bucket name and file name of the document.
`StartTextDetection` returns a job identifier (`JobId`) that you use to get the
results of the operation. When text detection is finished, Amazon Textract
publishes a completion status to the Amazon Simple Notification Service (Amazon
SNS) topic that you specify in `NotificationChannel`. To get the results of the
text detection operation, first check that the status value published to the
Amazon SNS topic is `SUCCEEDED`. If so, call `GetDocumentTextDetection`, and
pass the job identifier (`JobId`) from the initial call to
`StartDocumentTextDetection`.
For more information, see [Document Text Detection](https://docs.aws.amazon.com/textract/latest/dg/how-it-works-detecting.html).
"""
def start_document_text_detection(client, input, options \\ []) do
request(client, "StartDocumentTextDetection", input, options)
end
@spec request(AWS.Client.t(), binary(), map(), list()) ::
{:ok, map() | nil, map()}
| {:error, term()}
defp request(client, action, input, options) do
client = %{client | service: "textract"}
host = build_host("textract", client)
url = build_url(host, client)
headers = [
{"Host", host},
{"Content-Type", "application/x-amz-json-1.1"},
{"X-Amz-Target", "Textract.#{action}"}
]
payload = encode!(client, input)
headers = AWS.Request.sign_v4(client, "POST", url, headers, payload)
post(client, url, payload, headers, options)
end
defp post(client, url, payload, headers, options) do
case AWS.Client.request(client, :post, url, payload, headers, options) do
{:ok, %{status_code: 200, body: body} = response} ->
body = if body != "", do: decode!(client, body)
{:ok, body, response}
{:ok, response} ->
{:error, {:unexpected_response, response}}
error = {:error, _reason} -> error
end
end
defp build_host(_endpoint_prefix, %{region: "local", endpoint: endpoint}) do
endpoint
end
defp build_host(_endpoint_prefix, %{region: "local"}) do
"localhost"
end
defp build_host(endpoint_prefix, %{region: region, endpoint: endpoint}) do
"#{endpoint_prefix}.#{region}.#{endpoint}"
end
defp build_url(host, %{:proto => proto, :port => port}) do
"#{proto}://#{host}:#{port}/"
end
defp encode!(client, payload) do
AWS.Client.encode!(client, payload, :json)
end
defp decode!(client, payload) do
AWS.Client.decode!(client, payload, :json)
end
end
|
lib/aws/generated/textract.ex
| 0.920057 | 0.818809 |
textract.ex
|
starcoder
|
defmodule Map do
@moduledoc """
A Dict implementation that works on maps.
Maps are key-value stores where keys are compared using
the match operator (`===`). Maps can be created with
the `%{}` special form defined in the `Kernel.SpecialForms`
module.
For more information about the functions in this module and
their APIs, please consult the `Dict` module.
"""
use Dict.Behaviour
defdelegate [keys(map), values(map), size(map), merge(map1, map2)], to: :maps
@doc """
Returns a new empty map.
"""
def new, do: %{}
@doc """
Creates a new map from the given pairs.
## Examples
iex> Map.new [{:b, 1}, {:a, 2}]
%{a: 2, b: 1}
"""
def new(pairs) do
:maps.from_list pairs
end
@doc """
Creates a new map from the given pairs
via the given transformation function.
## Examples
iex> Map.new ["a", "b"], fn x -> {x, x} end
%{"a" => "a", "b" => "b"}
"""
def new(list, transform) when is_function(transform) do
Enum.map(list, transform) |> :maps.from_list
end
def has_key?(map, key), do: :maps.is_key(key, map)
def fetch(map, key), do: :maps.find(key, map)
def pop(map, key, default \\ nil) do
{ get(map, key, default), delete(map, key) }
end
def put(map, key, val) do
:maps.put(key, val, map)
end
def put_new(map, key, val) do
case has_key?(map, key) do
true -> map
false -> :maps.put(key, val, map)
end
end
def delete(map, key), do: :maps.remove(key, map)
def merge(map1, map2, callback) do
:maps.fold fn k, v2, acc ->
update(acc, k, v2, fn(v1) -> callback.(k, v1, v2) end)
end, map1, map2
end
def split(map, keys) do
acc = { %{}, %{} }
:maps.fold fn k, v, { take, drop } ->
if k in keys do
{ put(take, k, v), drop }
else
{ take, put(drop, k, v) }
end
end, acc, map
end
def update!(map, key, fun) do
case :maps.find(key, map) do
:error ->
raise(KeyError, key: key, term: map)
{ :ok, val } ->
:maps.put(key, fun.(val), map)
end
end
def update(map, key, initial, fun) do
case :maps.find(key, map) do
:error ->
:maps.put(key, initial, map)
{ :ok, val } ->
:maps.put(key, fun.(val), map)
end
end
def empty(_) do
IO.write :stderr, "Map.empty/1 is deprecated, please use Collectable.empty/1 instead\n#{Exception.format_stacktrace}"
%{}
end
def equal?(map, map), do: true
def equal?(_, _), do: false
def to_list(map), do: :maps.to_list map
end
|
lib/elixir/lib/map.ex
| 0.821438 | 0.674446 |
map.ex
|
starcoder
|
defmodule Pie.Pipeline do
@moduledoc """
Pipeline handling.
A pipeline consists of a state and several steps. When executed, the updated
stated will be passed to each step, in order, until all of them are executed.
At the end, the final version of the state will be evaluated and returned.
"""
defstruct step_queue: :queue.new(),
executed_steps: [],
state: nil,
executed?: false,
track_steps?: false,
capture_errors?: false
alias Pie.State
alias Pie.Pipeline.Step
@typedoc """
A struct to hold information about the pipeline
"""
@type t :: %__MODULE__{
step_queue: :queue.queue(Step.t()),
executed_steps: [Step.t()],
state: State.t(),
executed?: boolean(),
track_steps?: boolean()
}
@typedoc """
Returns the result of the pipeline execution
"""
@type result :: {:ok, any()} | {:error, t()}
@doc """
Creates a new pipeline with an empty state
"""
@spec new(input :: any(), options :: Keyword.t()) :: t()
def new(input, options \\ []) do
%__MODULE__{
state: State.new(input, options),
step_queue: :queue.new(),
track_steps?: options[:track_steps] == true,
capture_errors?: options[:capture_errors] == true
}
end
@doc """
Adds a step into the pipeline queue.
"""
@spec step(pipeline :: t(), step_fun :: fun(), context :: any(), options :: Keyword.t()) :: t()
def step(pipeline = %__MODULE__{}, step_fun, context \\ nil, options \\ [])
when is_function(step_fun) do
steps =
step_fun
|> Step.new(context, options)
|> :queue.in(pipeline.step_queue)
%__MODULE__{pipeline | step_queue: steps}
end
@doc """
Executes a pipeline and evalutes its state after all steps were applied
"""
@spec run(t()) :: State.result()
def run(pipeline = %__MODULE__{}) do
pipeline = execute_steps(pipeline)
case State.eval(pipeline.state) do
result = {:ok, _result} ->
result
_error ->
{:error, pipeline}
end
end
defp execute_steps(pipeline) do
case :queue.out(pipeline.step_queue) do
{{:value, step}, queue} ->
{updated_step, updated_state} =
Step.execute(step, pipeline.state, pipeline.capture_errors?)
steps = if pipeline.track_steps?, do: [updated_step | pipeline.executed_steps], else: []
updated_pipeline = %__MODULE__{
pipeline
| step_queue: queue,
executed_steps: steps,
state: updated_state
}
execute_steps(updated_pipeline)
{:empty, _queue} ->
%__MODULE__{pipeline | executed?: true}
end
end
end
|
lib/pie/pipeline.ex
| 0.890844 | 0.697686 |
pipeline.ex
|
starcoder
|
defmodule GGity.Plot do
@moduledoc """
Configures and generates an iolist representing an SVG plot.
The Plot module is GGity's public interface. A Plot struct is created
with `new/3`, specifying the data and aesthetic mappings to be used,
along with options associated with the plot's general appearance.
Data must be provided as a list of maps, where each map in the list
represents an observation, and the map's keys represent variable names.
**GGity does not perform any validation of the data**; data is assumed to be
clean and not to have missing values.
```
Examples.mtcars()
|> Plot.new(%{x: :wt, y: :mpg})
```
Mappings are specified using maps, where the map's keys are the names
of supported aesthetics, and the values are the names of variables in
the data.
```
Examples.mtcars()
|> Plot.new(%{x: :wt, y: :mpg})
|> Plot.geom_point()
```
A plot layer (represented as a struct that implements the `GGity.Geom` protocol)
is added to the plot using functions such as `geom_point/3` or `geom_line/3`.
As layers are assembled into a plot, the scales for each aesthetic are calculated
using the data assigned to each aesthetic in each layer. Scales generate functions
that transform data into an aesthetic value (e.g, an x coordinate or a color) and
functions that transform an aesthetic value back into an observation (for the
purpose of drawing axes or legends).
The plot will assign default scales based on the type of data assigned to each
aesthetic in each layer (by examining the value in the first row of the data),
typically mapping numerical data to a continuous scale (if available) and binary
data to a discrete scale. These assignments can be overridden by passing the Plot
struct to a scale-setting function, e.g. `scale_[scale_type]/2`. For `x` values
only, GGity will assign at date/datetime scale if the data mapped to the `:x`
aesthetic is a `Date`, `DateTime` or `NaiveDateTime` struct.
```
Examples.mtcars()
|> Plot.new(%{x: :wt, y: :mpg})
|> Plot.geom_point()
|> Plot.plot()
```
`plot/1` generates an iolist that represents the plot. None of the data
is sanitized, so users will need to be mindful of the risks of generating
plots using user-supplied data or parameters.
"""
alias __MODULE__
alias GGity.{Axis, Draw, Geom, Layer, Legend, Scale, Stat, Theme}
@type t() :: %__MODULE__{}
@type column() :: list()
@type name() :: binary() | atom()
@type record() :: map()
@type mapping() :: map()
@type options() :: keyword()
@continuous_scales [
Scale.Alpha.Continuous,
Scale.Size.Continuous,
Scale.X.Continuous,
Scale.X.Date,
Scale.X.DateTime,
Scale.Y.Continuous
]
defstruct data: [],
mapping: %{},
aspect_ratio: 1.5,
title_margin: 15,
layers: [%Geom.Blank{}],
scales: %{},
limits: %{x: {nil, nil}, y: {nil, nil}},
labels: %{title: nil, x: nil, y: nil},
y_label_padding: 20,
breaks: 5,
area_padding: 10,
theme: %Theme{},
margins: %{left: 30, top: 5, right: 0, bottom: 0},
width: 200,
combined_layers: []
@doc """
Generates a Plot struct with provided data and aesthetic mappings.
`data` must be passed in the form of a list of maps, where each map represents
an observation or record, the map keys are variable names, and the map values
represent the measurement of that variable for that observation.integer()
Mappings tie variables to aesthetics, i.e. visual characteristics of the plot.
A mapping is specified using a map, with key-value pairs representing the assignment of
variables to available aesthetics. Mappings passed to `new/3` must include key-value
pairs for the `:x` aesthetic and the `:y` aesthetic.
`new/3` also supports several options that shortcut plot creation or alter the
appearance of the plot. All graphical size units are in pixels.
* `:area_padding` - amount of blank space before the first tick and after the last
tick on each axis (same value applied to both axes) defaults to `10`.
* `:aspect_ratio` - the ratio of the plot area height to `:width`. Defaults to `1.5.`
* `:breaks` - the number of tick intervals on the x- and y axis (same value applied
to both axes). This may be adjusted by the scale function based on the data. Defaults to `5`.
* `:labels` - a map specifying the titles of the plot (`:title`), x and y-axes
(`:x` and `:y`) or legend title for another aesthetic (e.g. `:color`). A `nil` value indicates
no label. Defaults to `%{title: :nil, x: nil, y: nil}`.
* `:margins` - a map with keys `:left`, `:top`, `:right` and `:bottom`, specifying the
plot margins. Default is `%{left: 30, top: 10, right: 0, bottom: 0}`.
* `:panel_background_color` - a string value (hex or CSS color name) for the panel background.
Defaults to grey (`#eeeeee`)
* `:y_label_padding` - vertical distance between the y axis and its label. Defaults to `20`.
"""
@spec new(list(record()), mapping(), keyword()) :: Plot.t()
def new([first_row | _rest] = data, mapping \\ %{}, options \\ []) do
scales = assign_scales(mapping, first_row)
Plot
|> struct(options)
|> struct(data: data, mapping: mapping, scales: scales)
end
defp assign_scales(mapping, record) do
mapping
|> Map.keys()
|> Enum.reduce(%{}, fn aesthetic, scale_map ->
Map.put(scale_map, aesthetic, assign_scale(aesthetic, record[mapping[aesthetic]]))
end)
end
defp assign_scale(:alpha, value) when is_number(value) do
Scale.Alpha.Continuous.new()
end
defp assign_scale(:alpha, _value), do: Scale.Alpha.Discrete.new()
defp assign_scale(:color, _value), do: Scale.Color.Viridis.new()
defp assign_scale(:fill, _value), do: Scale.Fill.Viridis.new()
defp assign_scale(:linetype, _value), do: Scale.Linetype.Discrete.new()
defp assign_scale(:shape, _value), do: Scale.Shape.new()
defp assign_scale(:size, value) when is_number(value) do
Scale.Size.Continuous.new()
end
defp assign_scale(:size, _value), do: Scale.Size.Discrete.new()
defp assign_scale(:x, %Date{}), do: Scale.X.Date.new()
defp assign_scale(:x, %DateTime{}), do: Scale.X.DateTime.new()
defp assign_scale(:x, value) when is_number(value) do
Scale.X.Continuous.new()
end
defp assign_scale(:x, _value), do: Scale.X.Discrete.new()
defp assign_scale(:y, _value), do: Scale.Y.Continuous.new()
defp assign_scale(:y_max, _value), do: Scale.Y.Continuous.new()
defp assign_scale(other, _value), do: Scale.Identity.new(other)
@doc """
Generates an iolist of SVG markup representing a `Plot`.
The data is not sanitized; users should be mindful of the risks of generating a plot
with user-defined data and parameters.
The `Plot` struct's `:plot_width` and `:aspect_ratio` values are used to set the height
and width properties of the SVG. The viewBox property is set by the plot's `:width` and
`:aspect_ratio` values.
"""
@spec plot(Plot.t()) :: iolist()
def plot(%Plot{} = plot) do
plot
|> map_aesthetics()
|> apply_stats()
|> provide_default_axis_labels()
|> train_scales()
|> render()
end
defp map_aesthetics(%Plot{} = plot) do
layers =
Enum.map(plot.layers, fn layer ->
struct(layer, mapping: Map.merge(plot.mapping, layer.mapping || %{}))
end)
struct(plot, combined_layers: layers)
end
defp apply_stats(%Plot{} = plot) do
layers =
Enum.map(plot.combined_layers, fn layer ->
{data, mapping} = apply(Stat, layer.stat, [layer.data || plot.data, layer.mapping])
struct(layer, data: data, mapping: mapping)
end)
struct(plot, combined_layers: layers)
end
defp provide_default_axis_labels(%Plot{} = plot) do
[x_label | y_labels] =
plot.combined_layers
|> hd()
|> Map.get(:mapping)
|> Map.take([:x, :y, :y_max])
|> Map.values()
labels =
Map.merge(plot.labels, %{x: plot.labels.x || x_label, y: plot.labels.y || hd(y_labels)})
struct(plot, labels: labels)
end
defp train_scales(%Plot{} = plot) do
plot
|> all_mapped_aesthetics()
|> train_scales(plot)
end
defp all_mapped_aesthetics(%Plot{} = plot) do
plot.combined_layers
|> Enum.flat_map(fn layer -> Map.keys(layer.mapping) end)
|> Enum.uniq()
end
defp train_scales(aesthetics, %Plot{} = plot) do
trained_scales =
Enum.reduce(aesthetics, %{}, fn aesthetic, scales_map ->
Map.put(scales_map, aesthetic, train_scale(aesthetic, plot))
end)
scales =
if :y_max in aesthetics do
trained_scales
|> Map.put(:y, trained_scales.y_max)
|> Map.delete(:y_max)
else
trained_scales
end
struct(plot, scales: scales)
end
defp train_scale(:y_max, plot) do
sample_layer =
plot.combined_layers
|> Enum.filter(fn layer -> layer.mapping[:y_max] end)
|> hd()
sample_value = hd(sample_layer.data)[sample_layer.mapping[:y_max]]
scale = plot.scales[:y_max] || assign_scale(:y_max, sample_value)
y_max_global_min_max = global_min_max(:y_max, plot, scale)
global_min_max =
if :y_min in all_mapped_aesthetics(plot) do
y_min_global_min_max = global_min_max(:y_min, plot, scale)
{elem(y_min_global_min_max, 0), elem(y_max_global_min_max, 1)}
else
y_max_global_min_max
end
Scale.train(scale, global_min_max)
end
defp train_scale(aesthetic, plot) do
sample_layer =
plot.combined_layers
|> Enum.filter(fn layer -> layer.mapping[aesthetic] end)
|> hd()
sample_value = hd(sample_layer.data)[sample_layer.mapping[aesthetic]]
scale = plot.scales[aesthetic] || assign_scale(aesthetic, sample_value)
global_min_max = global_min_max(aesthetic, plot, scale)
Scale.train(scale, global_min_max)
end
defp global_min_max(aesthetic, plot, %scale_type{}) when scale_type in @continuous_scales do
{fixed_min, fixed_max} = plot.limits[aesthetic] || {nil, nil}
plot.combined_layers
|> Enum.filter(fn layer -> layer.mapping[aesthetic] end)
|> Enum.map(fn layer -> layer_min_max(aesthetic, layer) end)
|> Enum.reduce({fixed_min, fixed_max}, fn {layer_min, layer_max}, {global_min, global_max} ->
{safe_min(fixed_min || layer_min, global_min || layer_min),
safe_max(fixed_max || layer_max, global_max || layer_max)}
end)
end
defp global_min_max(aesthetic, plot, _sample_value) do
plot.combined_layers
|> Enum.filter(fn layer -> layer.mapping[aesthetic] end)
|> Enum.reduce(MapSet.new(), fn layer, levels ->
MapSet.union(levels, layer_value_set(aesthetic, layer))
end)
|> Enum.sort()
end
defp layer_min_max(aesthetic, layer) do
layer.data
|> Enum.map(fn row -> row[layer.mapping[aesthetic]] end)
|> min_max()
end
defp layer_value_set(aesthetic, layer) do
layer.data
|> Enum.map(fn row -> row[layer.mapping[aesthetic]] end)
|> Enum.map(&Kernel.to_string/1)
|> MapSet.new()
end
defp safe_min(%date_type{} = first, second)
when date_type in [Date, DateTime, NaiveDateTime] do
[first, second]
|> min_max()
|> elem(0)
end
defp safe_min(first, second), do: min(first, second)
defp safe_max(%date_type{} = first, second)
when date_type in [Date, DateTime, NaiveDateTime] do
[first, second]
|> min_max()
|> elem(1)
end
defp safe_max(first, second), do: max(first, second)
defp min_max([]), do: raise(Enum.EmptyError)
defp min_max([single_value]), do: {single_value, single_value}
defp min_max([%date_type{} | _rest] = dates)
when date_type in [Date, DateTime, NaiveDateTime] do
{Enum.min_by(dates, & &1, date_type, fn -> raise(Enum.EmptyError) end),
Enum.max_by(dates, & &1, date_type, fn -> raise(Enum.EmptyError) end)}
end
defp min_max(list), do: Enum.min_max(list)
defp render(%Plot{} = plot) do
legend = draw_legend_group(plot)
# This is an arbitrary number that works for all the visual tests,
# but user can make more room by increasing right margin
legend_margin = if legend == [], do: 0, else: 80
viewbox_width =
plot.width + plot.y_label_padding + plot.area_padding * 2 + plot.margins.left +
plot.margins.right + legend_margin
# MAGIC NUMBERS
# 45 is a magic number - think it is the height of x-axis label text plus
# padding/margin that is hard-coded somewhere
height_adjustment =
title_margin(plot) + plot.margins.top + 45 + plot.area_padding * 2 +
plot.theme.axis_text_x.angle / 90 * 20
viewbox_height = plot.width / plot.aspect_ratio + height_adjustment
id = "gg-#{System.unique_integer([:positive])}"
[
Theme.to_stylesheet(plot.theme, id),
draw_plot_background(),
draw_panel(plot),
draw_title(plot),
legend
]
|> Draw.svg(
id: id,
viewBox: "0 0 #{viewbox_width} #{viewbox_height}"
# viewBox: "0 0 #{viewbox_width} #{viewbox_width / plot.aspect_ratio}"
)
end
defp draw_plot_background do
["<rect class=\"gg-plot-background\" width=100% height=100%></rect>"]
end
defp draw_panel(plot) do
translate_for_title_and_y_axis(
[
draw_panel_background(plot),
Axis.draw_x_axis(plot),
Axis.draw_y_axis(plot),
draw_layers(plot)
],
plot
)
end
defp draw_panel_background(%Plot{} = plot) do
Draw.rect(
x: "0",
y: "0",
height: to_string(plot.width / plot.aspect_ratio + plot.area_padding * 2),
width: to_string(plot.width + plot.area_padding * 2),
class: "gg-panel-background gg-panel-border"
)
end
defp title_margin(%Plot{labels: %{title: title}} = plot) when is_binary(title),
do: plot.title_margin
defp title_margin(%Plot{}), do: 0
defp draw_layers(%Plot{} = plot) do
plot.combined_layers
|> Enum.reverse()
|> Enum.map(fn layer -> Layer.draw(layer, layer.data, plot) end)
end
defp draw_title(%Plot{labels: %{title: title}}) when not is_binary(title), do: ""
defp draw_title(%Plot{margins: margins} = plot) do
left_shift = margins.left + plot.y_label_padding
plot.labels.title
|> Draw.text(
x: "0",
y: "#{margins.top}",
dy: "0.71em",
dx: "0",
class: "gg-text gg-plot-title"
)
|> Draw.g(transform: "translate(#{left_shift}, 0)")
end
defp draw_legend_group(plot) do
{legend_group, legend_group_height} =
Enum.reduce(
[:color, :fill, :linetype, :shape, :size, :alpha],
{[], 0},
fn aesthetic, {legends, offset_acc} ->
{[draw_legend(plot, aesthetic, offset_acc) | legends],
offset_acc + legend_height(plot, Map.get(plot.scales, aesthetic))}
end
)
left_shift = plot.margins.left + plot.y_label_padding + plot.width + 25
top_shift =
plot.margins.top + title_margin(plot) + plot.width / plot.aspect_ratio / 2 + 10 -
legend_group_height / 2 + 10
case legend_group do
[[], [], [], [], [], []] ->
[]
legend_group ->
Draw.g(legend_group, transform: "translate(#{left_shift}, #{top_shift})")
end
end
defp draw_legend(%Plot{} = plot, aesthetic, offset) do
scale = Map.get(plot.scales, aesthetic)
if display_legend?(plot, scale) do
label = plot.labels[aesthetic]
key_glyph = key_glyph(plot, aesthetic)
scale
|> Legend.draw_legend(label, key_glyph, plot.theme.legend_key.height)
|> Draw.g(transform: "translate(0, #{offset})")
else
[]
end
end
defp display_legend?(plot, scale), do: legend_height(plot, scale) > 0
defp legend_height(_plot, %scale_type{}) when scale_type in @continuous_scales do
0
end
defp legend_height(_plot, %{guide: :none}), do: 0
defp legend_height(_plot, %{levels: []}), do: 0
defp legend_height(_plot, %{levels: [_]}), do: 0
defp legend_height(plot, %{} = scale) do
20 + plot.theme.legend_key.height * length(scale.levels)
end
defp legend_height(_plot, _nil_or_other), do: 0
defp translate_for_title_and_y_axis(element, %Plot{margins: margins} = plot) do
left_shift = margins.left + plot.y_label_padding
top_shift = margins.top + title_margin(plot)
Draw.g(element, transform: "translate(#{left_shift}, #{top_shift})")
end
defp key_glyph(plot, aesthetic) do
cond do
mapped_to_layer?(plot, aesthetic) ->
plot.layers
|> Enum.filter(fn layer -> aesthetic in Map.keys(layer.mapping || %{}) end)
|> hd()
|> Map.get(:key_glyph)
part_of_layer_geom?(plot, aesthetic) ->
plot.layers
|> Enum.filter(fn layer -> aesthetic in Map.keys(layer) end)
|> hd()
|> Map.get(:key_glyph)
true ->
hd(plot.layers).key_glyph
end
end
defp mapped_to_layer?(plot, aesthetic) do
not (plot.layers
|> Enum.filter(fn layer -> aesthetic in Map.keys(layer.mapping || %{}) end)
|> Enum.empty?())
end
defp part_of_layer_geom?(plot, aesthetic) do
not (plot.layers
|> Enum.filter(fn layer -> aesthetic in Map.keys(layer) end)
|> Enum.empty?())
end
@doc """
Adds a ribbon geom to the plot with the `position: :stack` option set.
`geom_area/3` is a convenience alias for `geom_ribbon/3` that sets the
`:position` option to `:stack` in order to create stacked area chart.
See `geom_ribbon/3` for available aesthetics and options.
Note that stacked ribbon charts are not yet supported - mappings to the
`:y_min` aesthetic will be ignored.
"""
@spec geom_area(Plot.t(), map() | keyword(), keyword()) :: Plot.t()
def geom_area(plot, mapping \\ [], options \\ [])
def geom_area(%Plot{} = plot, [], []) do
geom_ribbon(plot, position: :stack)
end
def geom_area(%Plot{} = plot, mapping_or_options, []) when is_list(mapping_or_options) do
options = Keyword.merge(mapping_or_options, position: :stack)
geom_ribbon(plot, options)
end
def geom_area(%Plot{} = plot, mapping, options) do
options = Keyword.merge(options, position: :stack)
geom_ribbon(plot, mapping, options)
end
@doc """
Adds a bar geom to the plot.
Accepts an alternative dataset to be used; if one is not provided defaults to
the plot dataset.
Accepts a mapping and/or additonal options to be used. The provided mapping
is merged with the plot mapping for purposes of the geom - there is no need
to re-specify the `:x` mapping.
Bar geoms support mapping data to the following aesthetics, which use the
noted default scales:
* `:x` (required)
* `:y` (required to draw the geom, but not typically specified for bar geoms - see below)
* `:alpha`
* `:fill`
Bar geoms also support providing fixed values (specified as options, e.g. `color: "blue"`)
for the optional aesthetics above. A fixed value is assigned to the aesthetic
for all observations.
`geom_bar/3` uses the `:count` stat, which counts the number of
observations in the data for each combination of mapped aesthetics and assigns
that value to the `:y` aesthetic. To create a bar chart with bars tied values of
a specific variable use specify `stat: :identity` or use `geom_col/3`,
which is identical to calling `geom_bar/3` with the `stat: :identity`
option. In either case, if `stat: :identity` is called, a variable in the data
must be mapped to the `:y` aesthetic.
Other supported options:
* `:key_glyph` - Type of glyph to use in the legend key. Available values are
`:a`, `:point`, `:path`, `:rect` and `:timeseries`. Defaults to `:rect`.
* `:position` - Available values are:
* `:identity` (bars sit on top of each other; not recommended),
* `:stack` (one bar per `:x` value)
* `:dodge` (one bar per unique `:x`/`:fill` value pair).
Defaults to `:stack`.
* `:stat` - an atom referring to a statistical transformation function in the
`GGity.Stat` module that is to be applied to the data. Defaults to `:count` (see above).
"""
@spec geom_bar(Plot.t(), map() | keyword(), keyword()) :: Plot.t()
def geom_bar(plot, mapping \\ [], options \\ [])
def geom_bar(%Plot{} = plot, [], []) do
updated_plot = add_geom(plot, Geom.Bar)
bar_geom = hd(updated_plot.layers)
scale_adjustment =
case bar_geom.position do
:stack -> {min(0, elem(plot.limits.y, 0) || 0), elem(plot.limits.y, 1)}
_other_positions -> {min(0, elem(plot.limits.y, 0) || 0), elem(plot.limits.y, 1)}
end
struct(updated_plot, limits: %{y: scale_adjustment})
end
def geom_bar(%Plot{} = plot, mapping_or_options, []) do
updated_plot = add_geom(plot, Geom.Bar, mapping_or_options)
bar_geom = hd(updated_plot.layers)
{data, mapping} = apply(Stat, bar_geom.stat, [updated_plot.data, updated_plot.mapping])
fixed_max = stacked_y_axis_max(data, mapping, :y)
scale_adjustment = position_adjusted_scale_min_max(bar_geom, plot, fixed_max)
struct(updated_plot, limits: %{y: scale_adjustment})
end
def geom_bar(%Plot{} = plot, mapping, options) do
updated_plot = add_geom(plot, Geom.Bar, mapping, options)
bar_geom = hd(updated_plot.layers)
{data, mapping} =
apply(Stat, bar_geom.stat, [updated_plot.data, Map.merge(updated_plot.mapping, mapping)])
fixed_max = stacked_y_axis_max(data, mapping, :y)
scale_adjustment = position_adjusted_scale_min_max(bar_geom, plot, fixed_max)
struct(updated_plot, limits: %{y: scale_adjustment})
end
@doc """
Shorthand for `geom_bar(plot, stat: :identity)`.
Produces a bar chart similar to `geom_bar/3`, but uses the values of
observations mapped to the `:y` aesthetic (instead of observation counts) to
calculate the height of the bars. See `geom_bar/3` for supported options.
"""
@spec geom_col(Plot.t(), map() | keyword(), keyword()) :: Plot.t()
def geom_col(plot, mapping \\ [], options \\ [])
def geom_col(%Plot{} = plot, [], []) do
geom_bar(plot, stat: :identity)
end
def geom_col(%Plot{} = plot, mapping_or_options, []) when is_list(mapping_or_options) do
options = Keyword.merge(mapping_or_options, stat: :identity, limits: %{y: {0, nil}})
geom_bar(plot, options)
end
def geom_col(%Plot{} = plot, mapping, options) do
options = Keyword.merge(options, stat: :identity, limits: %{y: {0, nil}})
geom_bar(plot, mapping, options)
end
@doc """
Adds a line geom to the plot.
Accepts an alternative dataset to be used; if one is not provided defaults to
the plot dataset.
Accepts a mapping and/or additonal options to be used. The provided mapping
is merged with the plot mapping for purposes of the geom - there is no need
to re-specify the `:x` or `:y` mappings.
Note that the line geom sorts the data by the values for the variable mapped
to the `:x` aesthetic using Erlang default term ordering.
Line geoms support mapping data to the following aesthetics, which use the
noted default scales:
* `:x` (required)
* `:y` (required)
* `:alpha`
* `:color`
* `:linetype`
* `:size`
Line geoms also support providing fixed values (specified as options, e.g. `color: "blue"`)
for the optional aesthetics above. A fixed value is assigned to the aesthetic
for all observations.
Other supported options:
* `:key_glyph` - Type of glyph to use in the legend key. Available values are
`:path` and `:timeseries`. By default this value is assigned based on the type
of the value in the first row of the data for the variable mapped to the `:x`
aesthetic.
"""
@spec geom_line(Plot.t(), map() | keyword(), keyword()) :: Plot.t()
def geom_line(plot, mapping \\ [], options \\ [])
def geom_line(%Plot{} = plot, [], []) do
add_geom(plot, Geom.Line, key_glyph: line_key_glyph(plot))
end
def geom_line(%Plot{} = plot, mapping, []) when is_map(mapping) do
add_geom(plot, Geom.Line, mapping, key_glyph: line_key_glyph(plot, mapping))
end
def geom_line(%Plot{} = plot, options, []) when is_list(options) do
key_glyph = options[:key_glyph] || line_key_glyph(plot, options)
add_geom(plot, Geom.Line, [{:key_glyph, key_glyph} | options])
end
def geom_line(%Plot{} = plot, mapping, options) do
key_glyph = options[:key_glyph] || line_key_glyph(plot, mapping, options)
add_geom(plot, Geom.Line, mapping, [{:key_glyph, key_glyph} | options])
end
defp line_key_glyph(%Plot{scales: %{x: %Date{}}}), do: :timeseries
defp line_key_glyph(%Plot{scales: %{x: %DateTime{}}}), do: :timeseries
defp line_key_glyph(_plot), do: :path
defp line_key_glyph(%Plot{} = plot, mapping) when is_map(mapping) do
mapping = Map.merge(plot.mapping, mapping)
case hd(plot.data)[mapping[:x]] do
%type{} when type in [Date, DateTime] -> :timeseries
_type -> :path
end
end
defp line_key_glyph(%Plot{} = plot, options) when is_list(options) do
case hd(options[:data] || plot.data)[plot.mapping[:x]] do
%type{} when type in [Date, DateTime] -> :timeseries
_type -> :path
end
end
defp line_key_glyph(%Plot{} = plot, mapping, options) do
mapping = Map.merge(plot.mapping, mapping)
case hd(options[:data] || plot.data)[mapping[:x]] do
%type{} when type in [Date, DateTime] -> :timeseries
_type -> :path
end
end
@doc """
Adds a layer with a point geom to the plot.
Accepts an alternative dataset to be used; if one is not provided defaults to
the plot dataset.
Accepts a mapping and/or additonal options to be used. The provided mapping
is merged with the plot mapping for purposes of the geom - there is no need
to re-specify the `:x` or `:y` mappings.
Point geoms support mapping data to the following aesthetics, which use the noted
default scales:
* `:x` (required)
* `:y` (required)
* `:alpha`
* `:color`
* `:shape`
* `:size`
Point geoms also support providing fixed values (specified as options, e.g. `color: "blue"`)
for the optional aesthetics above. A fixed value is assigned to the aesthetic for
all observations.
Other supported options:
* `:key_glyph` - Type of glyph to use in the legend key. Available values are
`:point`, `:path` and `:timeseries`; defaults to `:point`.
* `:stat` - an atom referring to a statistical transformation function in the
`GGity.Stat` module that is to be applied to the data. Defaults to `:identity`
(i.e., no transformation).
"""
@spec geom_point(Plot.t(), map() | keyword(), keyword()) :: Plot.t()
def geom_point(plot, mapping \\ [], options \\ [])
def geom_point(%Plot{} = plot, [], []) do
add_geom(plot, Geom.Point)
end
def geom_point(%Plot{} = plot, mapping_or_options, []) do
add_geom(plot, Geom.Point, mapping_or_options)
end
def geom_point(%Plot{} = plot, mapping, options) do
add_geom(plot, Geom.Point, mapping, options)
end
@doc """
Adds a ribbon geom to the plot.
Accepts an alternative dataset to be used; if one is not provided defaults to
the plot dataset.
Accepts a mapping and/or additonal options to be used. The provided mapping
is merged with the plot mapping for purposes of the geom - there is no need
to re-specify the `:x` mapping.
Ribbon geoms support mapping data to the following aesthetics, which use the
noted default scales:
* `:x` (required)
* `:y_max` (required) - defines the top boundary of the ribbon
* `:y_min` - defines the bottom boundary of the ribbon; defaults to zero
* `:alpha`
* `:fill`
A ribbon geom with no `:y_min` specified is essentially an area chart. To draw
a stacked area chart, set the `:position` option to `:stack`, or use the `geom_area/3`
convenience function.
Ribbon geoms also support providing fixed values (specified as options, e.g. `fill: "blue"`)
for the `:alpha` and `:fill` aesthetics above. A fixed value is assigned to the aesthetic
for all observations. Fixed values can also be specified for:
* `:color` - ribbon border color
* `:size` - ribbon border color width
Other supported options:
* `:key_glyph` - Type of glyph to use in the legend key. Available values are
`:a`, `:point`, `:path`, `:rect` and `:timeseries`. Defaults to `:rect`.
* `:position` - Available values are:
* `:identity` (ribbons overlay one another),
* `:stack` (ribbons stacked on the y-axis; note )
Defaults to `:identity`.
Note that stacked ribbon charts are not yet supported - mappings to the
`:y_min` aesthetic will be ignored is `:positon` is set to `:stack`.
"""
@spec geom_ribbon(Plot.t(), map() | keyword(), keyword()) :: Plot.t()
def geom_ribbon(plot, mapping \\ [], options \\ [])
def geom_ribbon(%Plot{} = plot, [], []) do
plot = add_geom(plot, Geom.Ribbon)
ribbon_geom = hd(plot.layers)
scale_adjustment =
case ribbon_geom.position do
:stack -> {min(0, elem(plot.limits.y, 0) || 0), elem(plot.limits.y, 1)}
_other_positions -> {min(0, elem(plot.limits.y, 0) || 0), elem(plot.limits.y, 1)}
end
struct(plot, limits: %{y_max: scale_adjustment})
end
def geom_ribbon(%Plot{} = plot, mapping_or_options, []) do
plot = add_geom(plot, Geom.Ribbon, mapping_or_options)
ribbon_geom = hd(plot.layers)
fixed_max = stacked_y_axis_max(plot.data, plot.mapping, :y_max)
scale_adjustment = position_adjusted_scale_min_max(ribbon_geom, plot, fixed_max)
struct(plot, limits: %{y_max: scale_adjustment})
end
def geom_ribbon(%Plot{} = plot, mapping, options) do
plot = add_geom(plot, Geom.Ribbon, mapping, options)
ribbon_geom = hd(plot.layers)
fixed_max = stacked_y_axis_max(plot.data, plot.mapping, :y_max)
scale_adjustment = position_adjusted_scale_min_max(ribbon_geom, plot, fixed_max)
struct(plot, limits: %{y_max: scale_adjustment})
end
@doc """
Adds a layer with a text geom to the plot.
Accepts an alternative dataset to be used; if one is not provided defaults to
the plot dataset.
A common use for text geoms is labelling of bar or point geoms. For bar chart
labels in particular, it is important to specify the same stat and postion
adjustment for the text geom as that specified for the bar chart.
Accepts a mapping and/or additonal options to be used. The provided mapping
is merged with the plot mapping for purposes of the geom - there is no need
to re-specify the `:x` or `:y` mappings.
Text geoms support mapping data to the following aesthetics, which use the
noted default scales:
* `:x` (required)
* `:y` (required)
* `:label` (required - the text to be displayed)
* `:group`
* `:alpha`
* `:color`
* `:size`
The `:group` aesthetic is generally needed for bar chart labelling, where the
`:fill` or `:alpha` aesthetic is mapped to a value in the data, in those scenarios,
the text geom position adjustment must match the bar, and the `:group` aesthetic for
the text geom should be mapped to the variable mapped to `:fill` or `:alpha` on the
bar chart layer. See the visual examples code for examples.
Text geoms also support providing fixed values (specified as options, e.g. `color: "blue"`)
for the optional aesthetics above. A fixed value is assigned to the aesthetic for
all observations.
Other supported options:
* `:family` - The font family used to display the text; equivalent to the
SVG `font-family` attribute. Defaults to `"Helvetica, Arial, sans-serif"`.
* `:fontface` - Equivalent to SVG `font-weight` attribute. Defaults to `:normal`.
* `:hjust` - Horizontal alignment of the text relevant to element's `:x` value.
Valid values are `:left`, `:center` and `:right`. Defaults to `:center`.
* `:key_glyph` - Type of glyph to use in the legend key. Available values are
`:a`, `:point`, `:path` and `:timeseries`. Defaults to `:a`.
* `nudge_x`, `:nudge_y` - Adjust the x- or y-position value by the specified number
of pixels. Both default to `0`.
* `:position` - Available values are `:identity` (no adjustment), `:stack` (`:y` value
represents cumulative value for a given `:x` value) or `:dodge` (one text element per
unique pair of `:x` and other non-`:y` mapped aesthetics). Defaults to `:identity`.
* `position_vjust` - Adjust `:y` position vertically; expressed as a percentage of
the calculated `:y` value after taking into account the specified position adjustment.
Defaults to `1`.
* `:stat` - an atom referring to a statistical transformation function in the `GGity.Stat`
module that is to be applied to the data. Defaults to `:identity` (i.e., no transformation).
Supported values are `:count` and `:identity`. Where text geom is intended to serve as a
label for another layer with the `:count` stat, the state for the text layer should
* `:vjust` - Baseline of the text relevant to element's `:y` value. Valid values are
`:top`, `:middle` and `:bottom`. Defaults to `:center`.
"""
@spec geom_text(Plot.t(), map() | keyword(), keyword()) :: Plot.t()
def geom_text(plot, mapping \\ [], options \\ [])
def geom_text(%Plot{} = plot, [], []) do
updated_plot = add_geom(plot, Geom.Text)
geom = hd(updated_plot.layers)
scale_adjustment =
case geom.position do
:stack -> {min(0, elem(plot.limits.y, 0) || 0), elem(plot.limits.y, 1)}
_other_positions -> plot.limits.y
end
struct(updated_plot, limits: %{y: scale_adjustment})
end
def geom_text(%Plot{} = plot, mapping_or_options, []) do
updated_plot = add_geom(plot, Geom.Text, mapping_or_options)
geom = hd(updated_plot.layers)
{data, mapping} = apply(Stat, geom.stat, [updated_plot.data, updated_plot.mapping])
scale_adjustment =
case geom.position do
:stack ->
fixed_max = stacked_y_axis_max(data, mapping, :y)
{min(0, elem(plot.limits.y, 0) || 0),
max(fixed_max, fixed_max || elem(plot.limits.y, 1))}
_other_positions ->
plot.limits.y
end
struct(updated_plot, limits: %{y: scale_adjustment})
end
def geom_text(%Plot{} = plot, mapping, options) do
updated_plot = add_geom(plot, Geom.Text, mapping, options)
geom = hd(updated_plot.layers)
{data, mapping} =
apply(Stat, geom.stat, [updated_plot.data, Map.merge(updated_plot.mapping, mapping)])
scale_adjustment =
case geom.position do
:stack ->
fixed_max = stacked_y_axis_max(data, mapping, :y)
{min(0, elem(plot.limits.y, 0) || 0),
max(fixed_max, fixed_max || elem(plot.limits.y, 1))}
_other_positions ->
plot.limits.y
end
struct(updated_plot, limits: %{y: scale_adjustment})
end
defp add_geom(%Plot{} = plot, geom_type) do
layer = Layer.new(struct(geom_type), %{}, [])
struct(plot, layers: [layer | plot.layers])
end
defp add_geom(%Plot{} = plot, geom_type, mapping) when is_map(mapping) do
layer = Layer.new(struct(geom_type), mapping, [])
struct(plot, layers: [layer | plot.layers], labels: Map.merge(plot.labels, mapping))
end
defp add_geom(%Plot{} = plot, geom_type, options) when is_list(options) do
layer = Layer.new(struct(geom_type), %{}, options)
struct(plot, layers: [layer | plot.layers])
end
defp add_geom(%Plot{} = plot, geom_type, mapping, options) do
layer = Layer.new(struct(geom_type), mapping, options)
struct(plot, layers: [layer | plot.layers], labels: Map.merge(plot.labels, mapping))
end
defp stacked_y_axis_max(data, mapping, y_aesthetic) do
data
|> Enum.group_by(fn item -> item[mapping[:x]] end)
|> Enum.map(fn {_category, values} ->
Enum.map(values, fn value -> value[mapping[y_aesthetic]] end)
end)
|> Enum.map(fn counts -> Enum.sum(counts) end)
|> Enum.max()
end
defp position_adjusted_scale_min_max(geom, plot, fixed_max) do
case geom.position do
:stack ->
{min(0, elem(plot.limits.y, 0) || 0), max(fixed_max, fixed_max || elem(plot.limits.y, 1))}
_other_positions ->
{min(0, elem(plot.limits.y, 0) || 0), elem(plot.limits.y, 1)}
end
end
@doc """
Sets geom point opacity for continuous data.
This scale defines a mapping function that assigns an opacity to be mapped to a given
value of the mapped variable.
This function takes the following options:
- `:range` - a tuple with minimum (default - `0.1`) and maximum (default - `1`)
values to be bound to the data
"""
@spec scale_alpha_continuous(Plot.t(), keyword()) :: Plot.t()
def scale_alpha_continuous(%Plot{} = plot, options \\ []) do
struct(plot, scales: Map.put(plot.scales, :alpha, Scale.Alpha.Continuous.new(options)))
end
@doc """
Sets geom point opacity for categorical data.
For categorical data for which a linear mapping of values to opacity is not
appropriate, this scale generates a palette of evenly spaced opacity values
mapped to each unique value of the data. The palette is generated such that the
difference between each opacity value is maximized. The set of unique data values
are sorted for the purpose of assigning them to an opacity and ordering the legend.
This function also takes the following options:
- `:labels` - specifies how legend item names (levels of the scale) should be
formatted. See `GGity.Labels` for valid values for this option.
- `:range` - a tuple with minimum (default - `0.1`) and maximum (default - `1`)
values to be bound to the data
"""
@spec scale_alpha_discrete(Plot.t(), keyword()) :: Plot.t()
def scale_alpha_discrete(%Plot{} = plot, options \\ []) do
struct(plot, scales: Map.put(plot.scales, :alpha, Scale.Alpha.Discrete.new(options)))
end
@doc """
Sets geom point opacity using the value of the data mapped to the `:alpha` aesthetic.
Can be used to manually assign opacity to individual data points by including an
value with each observation.
See `scale_color_identity/1` for an example of identity scale use.
"""
@spec scale_alpha_identity(Plot.t()) :: Plot.t()
def scale_alpha_identity(%Plot{} = plot) do
struct(plot, scales: Map.put(plot.scales, :alpha, Scale.Identity.new(:alpha)))
end
@doc """
Sets geom point color using the value of the data mapped to
the color aesthetic. Can be used to manually assign colors to
individual data points by including a color value with each observation.
Such color values must be provided as a hex value or CSS color name.
For example, with the dataset below, one could render points for
`:weight` values as `"blue"` for low weights and `"red"` for high weights
by assigning a value to the `:point_color` variable accordingly.
```
[
%{weight: 6, age: 4, point_color: "blue"},
%{weight: 5, age: 3, point_color: "blue"},
%{weight: 8, age: 4, point_color: "red"},
%{weight: 7, age: 4, point_color: "red"},
]
|> Plot.new(%{x: :weight, y: :age})
|> Plot.geom_point(%{color: :point_color})
|> Plot.scale_color_identity()
|> Plot.plot
```
"""
@spec scale_color_identity(Plot.t()) :: Plot.t()
def scale_color_identity(%Plot{} = plot) do
struct(plot, scales: Map.put(plot.scales, :color, Scale.Identity.new(:color)))
end
@spec scale_label_identity(Plot.t()) :: Plot.t()
def scale_label_identity(%Plot{} = plot) do
struct(plot, scales: Map.put(plot.scales, :color, Scale.Identity.new(:label)))
end
@doc """
Sets geom point colour using the Viridis color palettes. Viridis
is used by ggplot2 and other libraries in part because it is optimized
to maintain contrast when viewed by those with various types of
color blindess.
The scale is discrete - it is intended to map colors to categorical data.
The scale generates a palette of evenly spaced values from the Viridis color palette
and these are mapped to each unique value of the data. The palette is generated such
that the visual difference between each color value is maximized. The set of unique
data values are sorted for the purpose of assigning them to a color and ordering the
legend.
This function also takes the following options:
- `:labels` - specifies how legend item names (levels of the data mapped to the scale) should be
formatted. See `GGity.Labels` for valid values for this option.
- `:option` - specifies which palette to use. Available palettes are `:magma`, `:inferno`,
`:plasma`, `:viridis` (the default) and `:cividis`. These palettes can also be specified via their
letter codes - `:a`, `:b`, `:c`, `:d` and `:e`, respectively.
Examples of each color palette option can be generated using `mix ggity.visual.scale_color_viridis`.
"""
@spec scale_color_viridis(Plot.t(), keyword()) :: Plot.t()
def scale_color_viridis(%Plot{} = plot, options \\ []) do
struct(plot, scales: Map.put(plot.scales, :color, Scale.Color.Viridis.new(options)))
end
@doc """
Sets fill color for fillable shapes (e.g., bars).
Accepts the same options as `scale_color_viridis/2`.
"""
@spec scale_fill_viridis(Plot.t(), keyword()) :: Plot.t()
def scale_fill_viridis(%Plot{} = plot, options \\ []) do
struct(plot, scales: Map.put(plot.scales, :fill, Scale.Fill.Viridis.new(options)))
end
@doc """
Sets type of line for categorical data in line charts.
This scale uses a palette of six line types (`:solid`, `:dashed`, `:dotted`, `:longdash`,
`:dotdash` and `:twodash`) that are mapped to each unique value of the data. The
set of unique data values are sorted for the purpose of assigning them to a line type
(in the same order as listed above) and ordering the legend.
If there are more than six unique values in the data, the line types are recycled
per the order above.
This function also takes the following options:
- `:labels` - specifies how legend item names (levels of the scale) should be
formatted. See `GGity.Labels` for valid values for this option.
"""
@spec scale_linetype_discrete(Plot.t(), keyword()) :: Plot.t()
def scale_linetype_discrete(%Plot{} = plot, options \\ []) do
struct(plot, scales: Map.put(plot.scales, :linetype, Scale.Linetype.Discrete.new(options)))
end
@doc """
Sets geom point marker shape for categorical data.
This scale uses a palette of four marker types (`:circle`, `:square`, `:diamond`
and `:triangle`) that are mapped to each unique value of the data. The set of unique
data values are sorted for the purpose of assigning them to a size (using the shape
order above) and ordering the legend.
If there are greater than four unique values in the data, the shapes are recycled
per the order above.
This function also takes the following options:
- `:labels` - specifies how legend item names (levels of the scale) should be
formatted. See `GGity.Labels` for valid values for this option.
"""
@spec scale_shape(Plot.t(), keyword()) :: Plot.t()
def scale_shape(%Plot{} = plot, options \\ []) do
struct(plot, scales: Map.put(plot.scales, :shape, Scale.Shape.new(options)))
end
@doc """
Sets geom point marker shape for categorical data using a custom palette.
This scale requires a `:values` option be passed, which must contain a list
of characters or valid shape names (`:circle`, `:square`, `:diamond` or `:triangle`)
to be used as markers. These values are mapped to the unique values of the mapped variable
in term order. The list must have as many values as there are unique values in the data.
This function also takes the following (optional) options:
- `:labels` - specifies how legend item names (levels of the scale) should be
formatted. See `GGity.Labels` for valid values for this option.
```
[
%{x: 6, y: 4, mood: "happy"},
%{x: 5, y: 3, mood: "ok"},
%{x: 8, y: 4, mood: "sad"},
%{x: 7, y: 4, mood: "sad"},
]
|> Plot.new(%{x: :x, y: :y})
|> Plot.geom_point(%{shape: :mood}, size: 7)
|> Plot.scale_shape_manual(values: ["😀", "😐", "☹️"])
|> Plot.plot()
```
"""
@spec scale_shape_manual(Plot.t(), keyword()) :: Plot.t()
def scale_shape_manual(%Plot{} = plot, options \\ []) do
struct(plot, scales: Map.put(plot.scales, :shape, Scale.Shape.Manual.new(options)))
end
@doc """
Sets geom point size for continuous data.
This scale defines a mapping function that assigns a shape area based on the given
value of the mapped variable.
This function takes the following options:
- `:range` - a tuple with minimum (default - `9`) and maximum (default - `100`)
values to be bound to the data
"""
@spec scale_size_continuous(Plot.t(), keyword()) :: Plot.t()
def scale_size_continuous(%Plot{} = plot, options \\ []) do
struct(plot, scales: Map.put(plot.scales, :size, Scale.Size.Continuous.new(options)))
end
@doc """
Sets geom point size for categorical data.
For categorical data for which a linear mapping of values to marker size is not
appropriate, this scale generates a palette of evenly spaced area values
between `9` and `100` that are mapped to each unique value of the data. The
palette is generated such that the difference between each size value is
maximized. The set of unique data values are sorted for the purpose of assigning
them to a size and ordering the legend.
This function also takes the following options:
- `:labels` - specifies how legend item names (levels of the scale) should be
formatted. See `GGity.Labels` for valid values for this option.
- `:range` - a tuple with minimum (default - `9`) and maximum (default - `100`)
values to be bound to the data
"""
@spec scale_size_discrete(Plot.t(), keyword()) :: Plot.t()
def scale_size_discrete(%Plot{} = plot, options \\ []) do
struct(plot, scales: Map.put(plot.scales, :size, Scale.Size.Discrete.new(options)))
end
@doc """
Sets geom point size using the value of the data mapped to the size aesthetic.
Can be used to manually assign size to individual data points by including an
value with each observation.
Note that "size" is the marker diameter, not marker area (which is generally
preferable but not yet implemented).
See `scale_color_identity/1` for an example of identity scale use.
"""
@spec scale_size_identity(Plot.t()) :: Plot.t()
def scale_size_identity(%Plot{} = plot) do
struct(plot, scales: Map.put(plot.scales, :size, Scale.Identity.new(:size)))
end
@doc """
Sets geom x coordinate for continuous numerical data.
This scale defines a mapping function that assigns a coordinate on the x axis
to the value of the mapped variable. The scale also defines an inverse of this
function that is used to generate axis tick labels.
This function also takes the following options:
- `:labels` - specifies how break names (tick labels calculated by the scale) should be
formatted. See `GGity.Labels` for valid values for this option.
"""
@spec scale_x_continuous(Plot.t(), keyword()) :: Plot.t()
def scale_x_continuous(%Plot{} = plot, options \\ []) do
struct(plot, scales: Map.put(plot.scales, :x, Scale.X.Continuous.new(options)))
end
@doc """
Sets geom x coordinate for continuous `Date` data.
This scale defines a mapping function that assigns a coordinate on the x axis
to the value of the mapped variable. The scale also defines an inverse of this
function that is used to generate axis tick labels.
This function also takes the following options:
- `:labels` - specifies how break names (tick labels calculated by the scale) should be
formatted. See `GGity.Labels` for valid values for this option.
-`:date_labels` - special formatting patterns for dates. If `:date_labels` is specified,
the value of the `:labels` option will be overridden.
`:date_labels` can be either a format string pattern that is accepted by [`NimbleStrftime`](https://hexdocs.pm/nimble_strftime/NimbleStrftime.html):
```
data
|> Plot.new(%{x: :date_variable, y: :other_variable})
|> Plot.geom_line()
|> Plot.scale_x_date(date_labels: "%b %d %Y") # Label format "Jan 01 2001"
```
or a tuple `{format, options}` where `format` is the pattern and `options` is a keyword
list of options accepted by `NimbleStrftime.format/3`:
```
rename_weekdays = fn day_of_week ->
{
"Monday",
"Tuesday",
"Hump Day",
"Thursday",
"Friday",
"Saturday",
"Sunday"
}
|> elem(day_of_week - 1)
end
data
|> Plot.new(%{x: :date_variable, y: :other_variable})
|> Plot.geom_line()
|> Plot.scale_x_date(date_labels: {"%A", day_of_week_names: rename_weekdays})
# Ticks are just weekday names, Wednesday is Hump Day
```
"""
@spec scale_x_date(Plot.t(), keyword()) :: Plot.t()
def scale_x_date(%Plot{} = plot, options \\ []) do
struct(plot, scales: Map.put(plot.scales, :x, Scale.X.Date.new(options)))
end
@doc """
Sets geom x coordinate for continuous `DateTime` data.
This scale defines a mapping function that assigns a coordinate on the x axis
to the value of the mapped variable. The scale also defines an inverse of this
function that is used to generate axis tick labels.
This function also takes the following options:
- `:labels` - specifies how break names (tick labels calculated by the scale) should be
formatted. See `GGity.Labels` for valid values for this option.
-`:date_labels` - special formatting patterns for dates. If `:date_labels` is specified,
the value of the `:labels` option will be overridden.
`:date_labels` can be either a format string pattern that is accepted by [`NimbleStrftime`](https://hexdocs.pm/nimble_strftime/NimbleStrftime.html):
See `scale_x_date/2` for more usage examples.
```
data
|> Plot.new(%{x: :datetime_variable, y: :other_variable})
|> Plot.geom_line()
|> Plot.scale_x_datetime(date_labels: "%b %d H%H") # Label format "Jan 01 H01"
```
"""
@spec scale_x_datetime(Plot.t(), keyword()) :: Plot.t()
def scale_x_datetime(%Plot{} = plot, options \\ []) do
struct(plot, scales: Map.put(plot.scales, :x, Scale.X.DateTime.new(options)))
end
@doc """
Sets geom x coordinate for discrete (categorical) data.
This scale defines a mapping function that assigns a coordinate on the x axis
to the value of the mapped variable. In the discrete case, this is equivalent to
evenly distributing geoms across the x axis.
This function also takes the following options:
- `:labels` - specifies how break names (tick labels calculated by the scale) should be
formatted. See `GGity.Labels` for valid values for this option.
"""
@spec scale_x_discrete(Plot.t(), keyword()) :: Plot.t()
def scale_x_discrete(%Plot{} = plot, options \\ []) do
struct(plot, scales: Map.put(plot.scales, :x, Scale.X.Discrete.new(options)))
end
@doc """
Sets geom y coordinate for continuous numerical data.
This scale defines a mapping function that assigns a coordinate on the y axis
to the value of the mapped variable. The scale also defines an inverse of this
function that is used to generate axis tick labels.
This function also takes the following options:
- `:labels` - specifies how break names (tick labels calculated by the scale) should be
formatted. See `GGity.Labels` for valid values for this option.
"""
@spec scale_y_continuous(Plot.t(), keyword()) :: Plot.t()
def scale_y_continuous(%Plot{} = plot, options \\ []) do
Enum.reduce([:y, :y_max], plot, fn aesthetic, plot ->
if plot.scales[aesthetic] do
struct(plot, scales: Map.put(plot.scales, aesthetic, Scale.Y.Continuous.new(options)))
else
struct(plot, scales: Map.put(plot.scales, :y, Scale.Y.Continuous.new(options)))
end
end)
end
@doc """
Updates the plot theme.
GGity uses themes to style non-data plot elements. The default theme is similar
to ggplot2's signature gray background/white gridline theme.
`theme/2` is used to update on or more elements of the plot theme by passing
a keyword list of new elements and values, which are merged with those of the
current theme.
For supported elements and values, see `GGity.Theme`.
"""
@spec theme(Plot.t(), keyword()) :: Plot.t()
def theme(%Plot{} = plot, elements) do
elements = Enum.into(elements, %{})
theme =
Map.merge(plot.theme, elements, fn
_key, _original_value, nil ->
nil
_key, nil, new_value ->
new_value
_key, original_value, new_value when is_map(original_value) ->
Map.merge(original_value, new_value, fn
_key, original, nil -> original
_key, _original, new -> new
end)
_key, _original_value, new_value ->
new_value
end)
struct(plot, theme: theme)
end
@doc """
Updates plot title, axis and legend labels.
Accepts a keyword list where the keys are `:title` and/or the aesthetic(s)
tied to the axes/legends to be labelled.
"""
@spec labs(Plot.t(), keyword()) :: Plot.t()
def labs(plot, labels) do
labels = Map.merge(plot.labels, Enum.into(labels, %{}))
struct(plot, labels: labels)
end
@doc """
Updates the plot x axis label.
"""
@spec xlab(Plot.t(), binary()) :: Plot.t()
def xlab(%Plot{} = plot, label) do
labels =
plot.labels
|> Map.merge(%{x: label})
struct(plot, labels: labels)
end
@doc """
Updates the plot y axis label.
"""
@spec ylab(Plot.t(), binary()) :: Plot.t()
def ylab(plot, label) do
labels =
plot.labels
|> Map.merge(%{y: label})
struct(plot, labels: labels)
end
@doc """
Manually sets the type of guide used for specified scales.
Accepts a keyword list of aesthetics and values for the `:guide` options
for the associated scales.
Currently this is only used to turn legends on or off. Valid values are
`:legend` (draw a legend) and `:none` (do not draw a legend).
## Example
```
Plot.new(%{x: "x", y: "y"})
|> Plot.geom_point(color: "color", shape: "shape", size: "size")
# By default all three legends will be drawn
|> Plot.guides(shape: :none, size: :none) # Plot will only draw a legend for the color scale
"""
@spec guides(Plot.t(), keyword()) :: Plot.t()
def guides(plot, guides) do
scales =
guides
|> Keyword.keys()
|> Enum.reduce(%{}, fn aesthetic, new_scales ->
scale = plot.scales[aesthetic] || assign_scale(aesthetic, "a string")
Map.put(new_scales, aesthetic, struct(scale, guide: guides[aesthetic]))
end)
struct(plot, scales: Map.merge(plot.scales, scales))
end
@doc """
Saves the plot to a file at the given path.
"""
@spec to_file(Plot.t(), list(binary)) :: :ok
def to_file(%Plot{} = plot, path) do
File.write!(path, plot(plot))
end
end
|
lib/ggity/plot.ex
| 0.952959 | 0.985482 |
plot.ex
|
starcoder
|
defmodule HPDF do
@moduledoc """
Uses Chrome in Headless mode to print pages to PDF.
Each page is loaded in it's own browser context, similar to an Incognito window.
Pages may be printed that require authentication allowing you to print pages that are behind login wall.
When using HPDF you need to have a headless chrome running.
By default HPDF will look for chrome at `http://localhost:9222`.
This can be configured in your configuration files by using:
```elixir
config :hpdf, HPDF,
address: "http://my_custom_domain:9222"
```
You can get a headless chrome browser by using a docker container.
A public container can be found at: https://hub.docker.com/r/justinribeiro/chrome-headless/
```sh
docker run -d -p 9222:9222 --cap-add=SYS_ADMIN justinribeiro/chrome-headless
```
### Example
```elixir
case HPDF.print_page!(my_url, timeout: 30_000) do
{:ok, pdf_data} -> do_stuff_with_the_pdf_binary_data(pdf_data)
{:error, error_type, reason} -> #Handle error
{:error, reason} -> # Handle error
```
Common error types provided by HPDF
* `:page_error` - An error was returned by the browser
* `:page_redirected` - The URL was redirected
* `:page_load_failure` - The page loaded with a non 200 status code
* `:crashed` - The browser crashed
### Using header authentication
When printing a page using header authentication,
usually it's not only the original page, but all AJAX requests made within it that need to have the authentication header included.
Assuming you have a token
```elixir
header_value = get_my_auth_header()
headers = %{"authorization" => header_value}
case HPDF.print_page!(my_url, timeout: 30_000, page_headers: headers) do
{:ok, pdf_data} -> do_stuff_with_the_pdf_binary_data(pdf_data)
{:error, error_type, reason} -> #Handle error
{:error, reason} -> # Handle error
end
```
### Using cookie authentication
An initiating cookie can be used to access pages.
```elixir
cookie = %{
name: "_cookie_name",
value: cookie_value,
domain: "your.domain",
path: "/",
secure: true,
httpOnly: true,
}
{:ok, data} = HPDF.print_pdf!(url, timeout: 30_000, cookie: cookie)
```
"""
@doc """
Prints a PDF file with the provided options.
The HPDF.Application must be running before calling this funtion
### Options
* `timeout` - The timeout for the call. Deafult 5_000
* `after_load_delay` - The time to wait after the page finishes loading. Allowing for dynamic JS calls and rendering.
* `cookie` - Supply a cookie for the page to be loaded with. See https://chromedevtools.github.io/devtools-protocol/tot/Network/#method-setCookie
* `page_headers` - A map of headers to supply to the page
* `include_headers_on_same_domain` - A bool. Default True. If true, all requests to the same domain will include the same headers as the main page
* `print_options` - A map of options to the print method. See https://chromedevtools.github.io/devtools-protocol/tot/Page/#method-printToPDF
* `max_wait_time` - The maximum amount of time to wait for loading before printing in miliseconds.
"""
def print_pdf!(url, options \\ []) do
HPDF.Controller.print_pdf!(url, options)
end
end
|
lib/hpdf.ex
| 0.895168 | 0.746624 |
hpdf.ex
|
starcoder
|
defmodule PortMidi do
@moduledoc """
The entry module of portmidi. Through this module you can open and close
devices, listen on input devices, or write to output devices.
"""
alias PortMidi.Input
alias PortMidi.Output
alias PortMidi.Listeners
alias PortMidi.Devices
use Application
@doc """
Starts the `:portmidi` application. Under the hood, starts the
`Portmidi.Listeners` GenServer, that holds all the listeners to
input devices.
"""
def start(_type, _args) do
import Supervisor.Spec, warn: false
children = [
{Listeners, []}
]
opts = [strategy: :one_for_one, name: PortMidi.Supervisor]
Supervisor.start_link(children, opts)
end
@doc """
Opens a connection to the input device with name `device_name`.
Returns the `pid` to the corresponding GenServer. Use this `pid` to call
`listen/2`.
If Portmidi can't open the device, a tuple `{:error, reason}` is returned.
Check `src/portmidi_shared.c#makePmErrorAtom` for all possible errors.
"""
@spec open(:input, <<>>) :: {:ok, pid()} | {:error, atom()}
def open(:input, device_name) do
Input.start_link(device_name)
end
@doc """
Opens a connection to the output device with name `device_name`.
Returns the `pid` to the corresponding GenServer. Use this `pid` to call
`write/2`.
If Portmidi can't open the device, a tuple `{:error, reason}` is returned.
Check `src/portmidi_shared.c#makePmErrorAtom` for all possible errors.
"""
@spec open(:output, <<>>, non_neg_integer()) :: {:ok, pid()} | {:error, atom()}
def open(:output, device_name, latency \\ 0) do
Output.start_link(device_name, latency)
end
@doc """
Terminates the GenServer held by the `device` argument, and closes the
PortMidi stream. If the type is an input, and `listen/2` was called on it,
it also shuts down the listening process. Using the given `device` after
calling this method will raise an error.
"""
@spec close(atom, pid()) :: :ok
def close(device_type, device)
def close(:input, input), do: Input.stop(input)
def close(:output, output), do: Input.stop(output)
@doc """
Starts a listening process on the given `input`, and returns `:ok`. After
calling this method, the process with the given `pid` will receive MIDI
events in its mailbox as soon as they are emitted from the device.
"""
@spec listen(pid(), pid()) :: :ok
def listen(input, pid), do: Input.listen(input, pid)
@doc """
Writes a MIDI event to the given `output` device. `message` can be a tuple
`{status, note, velocity}`, a tuple `{{status, note, velocity}, timestamp}`
or a list `[{{status, note, velocity}, timestamp}, ...]`. Returns `:ok` on write.
"""
@type message :: {byte(), byte(), byte()}
@type timestamp :: byte()
@spec write(pid(), message) :: :ok
@spec write(pid(), {message, timestamp}) :: :ok
@spec write(pid(), [{message, timestamp}, ...]) :: :ok
def write(output, message), do: Output.write(output, message)
@doc """
Returns a map with input and output devices, in the form of
`PortMidi.Device` structs
"""
@spec devices() :: %{input: [%PortMidi.Device{}, ...], output: [%PortMidi.Device{}, ...]}
def devices, do: Devices.list()
end
|
lib/portmidi.ex
| 0.841793 | 0.478894 |
portmidi.ex
|
starcoder
|
defmodule Recurly.Transaction do
@moduledoc """
Module for handling transactions in Recurly.
See the [developer docs on transactions](https://dev.recurly.com/docs/list-transactions)
for more details
"""
use Recurly.Resource
alias Recurly.{Resource,Transaction,TransactionDetails,Account,Invoice,Subscription}
@endpoint "/transactions"
schema :transaction do
field :account, Account, read_only: true
field :action, :string
field :amount_in_cents, :integer
field :currency, :string
field :details, TransactionDetails, read_only: true
field :invoice, Invoice, read_only: true
field :ip_address, :string
field :original_transaction, Transaction, read_only: true
field :payment_method, :string
field :recurring_type, :boolean
field :reference, :string
field :refundable_type, :boolean
field :source, :string
field :subscription, Subscription, read_only: true
field :tax_in_cents, :integer
field :test_type, :boolean
field :transaction_code, :string
field :uuid, :string
field :voidable_type, :boolean
end
@doc """
Creates a stream of transactions given some options.
## Parameters
- `options` Keyword list of the request options. See options in the
[transaction list section](https://dev.recurly.com/docs/list-transactions)
of the docs.
## Examples
See `Recurly.Resource.stream/3` for more detailed examples of
working with resource streams.
```
# stream of successful transactions sorted from most recently
# created to least recently created
stream = Recurly.Transaction.stream(state: :successful, sort: :created_at)
```
"""
def stream(options \\ []) do
Resource.stream(Transaction, @endpoint, options)
end
@doc """
Finds an transaction given a transaction uuid. Returns the transaction or an error.
## Parameters
- `uuid` String transaction uuid
## Examples
```
alias Recurly.NotFoundError
case Recurly.Transaction.find("ialskdjaldkjsaldkjas") do
{:ok, transaction} ->
# Found the transaction
{:error, %NotFoundError{}} ->
# 404 transaction was not found
end
```
"""
def find(uuid) do
Resource.find(%Transaction{}, path(uuid))
end
@doc """
Creates an transaction from a changeset. Supports nesting the `billing_info`
## Parameters
- `changeset` Keyword list changeset
## Examples
```
alias Recurly.ValidationError
case Recurly.Transaction.create(transaction_code: "mytransactioncode") do
{:ok, transaction} ->
# created the transaction
{:error, %ValidationError{errors: errors}} ->
# will give you a list of validation errors
end
```
"""
def create(changeset) do
Resource.create(%Transaction{}, changeset, @endpoint)
end
@doc """
Updates an transaction from a changeset
## Parameters
- `transaction` transaction resource struct
- `changeset` Keyword list changeset representing the updates
## Examples
```
alias Recurly.ValidationError
changes = [
first_name: "Benjamin",
last_name: nil
]
case Recurly.transaction.update(transaction, changes) do
{:ok, transaction} ->
# the updated transaction
{:error, %ValidationError{errors: errors}} ->
# will give you a list of validation errors
end
```
"""
def update(transaction = %Transaction{}, changeset) do
Resource.update(transaction, changeset)
end
@doc """
Generates the path to an transaction given the transaction code
## Parameters
- `transaction_code` String transaction code
"""
def path(transaction_code) do
Path.join(@endpoint, transaction_code)
end
end
|
lib/recurly/transaction.ex
| 0.919683 | 0.836087 |
transaction.ex
|
starcoder
|
defmodule Day15 do
def part1 lines do
{map, units} = read_map lines
do_round units, map, 0
end
def part2 lines do
{map, units} = read_map lines
do_power(units, map, 4)
end
defp do_power(units, map, power) when power < 40 do
:io.format("power: ~p\n", [power])
units = units_set_elf_power(units, power)
try do
res = do_round units, map, 0
IO.inspect {:final_power, power}
res
catch
:elf_killed ->
do_power units, map, power + 1
end
end
defp do_round units, map, round do
IO.puts ""
IO.inspect round
print_map map, units
result = units_in_reading_order(units)
|> Enum.reduce_while(units, fn unit_id, acc ->
unit_turn unit_id, map, acc
end)
case result do
{:ended, units} ->
print_map map, units
total_points = units_total_points units
{round, total_points, round * total_points}
units ->
do_round units, map, round + 1
end
end
defp unit_turn unit_id, map, acc do
case acc[unit_id] do
{pos, kind, _} ->
target_kind = other_unit_kind(kind)
case adjacent_units(pos, map, acc, target_kind) do
[] ->
targets = unit_targets(kind, acc)
case targets do
[] ->
{:halt, {:ended, acc}}
[_ | _] ->
acc = move(unit_id, targets, map, acc)
acc = attack(unit_id, target_kind, map, acc)
{:cont, acc}
end
[_ | _] ->
# Already adjacent to the enemy. Attack.
acc = attack(unit_id, target_kind, map, acc);
{:cont, acc}
end
nil ->
{:cont, acc}
end
end
defp move unit_id, targets, map, units do
{pos, _kind, _points} = units[unit_id]
new_pos = targets
|> Enum.flat_map(fn {_, {pos, _, _}} ->
empty_adjacent(pos, map, units)
end)
|> Enum.sort
|> MapSet.new
|> shortest_paths(pos, map, units)
|> find_move
case new_pos do
nil ->
units
_ ->
move_unit units, unit_id, new_pos
end
end
defp find_move paths do
sorted = paths
|> Enum.map(fn [final | _] = path -> {final, second_last(path)} end)
|> Enum.sort
case sorted do
[{_, step} | _] -> step
[] -> nil
end
end
defp second_last([sl, _]), do: sl
defp second_last([_ | t]), do: second_last(t)
defp attack(unit_id, target_kind, map, units) when is_integer(unit_id) do
{pos, _, _} = units[unit_id]
case adjacent_units(pos, map, units, target_kind) do
[] ->
units
[_ | _] = enemies ->
targeted = enemies
|> Enum.map(fn position ->
{:unit, unit_id, kind} = at(position, map, units);
{_pos, ^kind, points} = units[unit_id]
{points, position, unit_id}
end)
|> Enum.sort
|> hd
{_points, _pos, target_unit_id} = targeted
attack_unit units, unit_id, target_unit_id
end
end
defp other_unit_kind(:elf), do: :goblin
defp other_unit_kind(:goblin), do: :elf
defp shortest_paths in_range, root, map, units do
if MapSet.size(in_range) == 0 do
[]
else
visited = MapSet.new([root])
shortest_paths in_range, [[root]], map, units, visited
end
end
defp shortest_paths in_range, paths, map, units, visited do
case extend_paths paths, map, units, visited, [] do
[] ->
[]
[_|_] = paths ->
case Enum.filter(paths, fn [pos | _] -> pos in in_range end) do
[_ | _] = paths ->
paths
[] ->
newly_visited = Enum.map(paths, fn [pos | _] -> pos end)
visited = MapSet.union visited, MapSet.new(newly_visited)
shortest_paths in_range, paths, map, units, visited
end
end
end
defp extend_paths [[pos | _] = path | paths], map, units, visited, acc do
new_squares = adjacent_squares(pos)
|> Enum.reject(fn pos ->
pos in visited || is_occupied(pos, map, units)
end)
acc = add_new_paths(new_squares, path, acc)
extend_paths(paths, map, units, visited, acc)
end
defp extend_paths [], _map, _units, _visited, acc do
acc
end
defp add_new_paths([square | squares], path, acc) do
add_new_paths squares, path, [[square | path] | acc]
end
defp add_new_paths([], _path, acc) do
acc
end
defp empty_adjacent pos, map, units do
adjacent pos, map, units, &(&1 == :empty)
end
defp adjacent_units pos, map, units, kind do
adjacent(pos, map, units, fn content ->
case content do
{:unit, _unit_id, ^kind} ->
true
_ ->
false
end
end)
end
defp is_occupied pos, map, units do
case raw_at(pos, map) do
:wall ->
true
:empty_or_unit ->
units[pos] != nil
end
end
defp adjacent {row, col}, map, units, fun do
[{row - 1, col}, {row + 1, col}, {row, col - 1}, {row, col + 1}]
|> Enum.filter(fn pos -> fun.(at(pos, map, units)) end)
end
defp adjacent_squares {row, col} do
[{row - 1, col}, {row, col - 1}, {row, col + 1}, {row + 1, col}]
end
defp at pos, map, units do
case raw_at(pos, map) do
:empty_or_unit ->
case units[pos] do
nil ->
:empty
unit_id ->
{_, kind, _} = units[unit_id];
{:unit, unit_id, kind}
end
wall ->
wall
end
end
defp raw_at {row, col} = pos, {cols, map} do
case :binary.at(map, row * cols + col) do
?\# ->
:wall
?. ->
:empty_or_unit
end
end
defp unit_targets kind, units do
target_kind = other_unit_kind kind
Enum.filter(units, fn other_unit ->
match?({_, {_, ^target_kind, _}}, other_unit)
end)
end
defp units_new units do
Enum.flat_map(units, fn {unit_id, {pos, _, _}} = unit ->
[unit, {pos, unit_id}]
end)
|> Map.new
end
defp units_set_elf_power units, power do
Enum.reduce(units, units, fn unit, acc ->
case unit do
{unit_id, {pos, :elf, _}} ->
%{acc | unit_id => {pos, :elf, {200, power}}}
_ ->
acc
end
end)
end
defp units_total_points units do
Enum.reduce(units, 0, fn elem, acc ->
case elem do
{_, {_, _, {points, _}}} -> acc + points
_ -> acc
end
end)
end
defp units_in_reading_order units do
units
|> Enum.filter(fn elem ->
match?({id, {_, _, _}} when is_integer(id), elem)
end)
|> Enum.sort_by(fn {_id, {pos, _, _}} -> pos end)
|> Enum.map(fn {id, _} -> id end)
end
defp attack_unit units, attacker, target do
{_, _, {_, attacker_power}} = units[attacker]
{pos, target_kind, {points, target_power}} = units[target]
case (points - attacker_power) do
points when points > 0 ->
%{units | target => {pos, target_kind, {points, target_power}}}
_ ->
if target_kind == :elf and target_power > 3 do
throw(:elf_killed)
end
kill_unit units, target
end
end
defp move_unit units, unit_id, new_pos do
{old_pos, kind, points} = units[unit_id]
units = Map.delete(units, old_pos)
units = Map.put(units, new_pos, unit_id)
%{units | unit_id => {new_pos, kind, points}}
end
defp kill_unit units, unit_id do
{pos, _, _} = units[unit_id]
units = Map.delete(units, pos)
Map.delete(units, unit_id)
end
defp unit_kind units, unit_id do
{_, kind, _} = units[unit_id]
kind
end
defp unit_points units, unit_id do
{_, _, {points, _}} = units[unit_id]
points
end
defp read_map lines do
[cols] = Enum.dedup(Enum.map(lines, &(byte_size &1)))
{map_string, units} = read_map_rows lines, 0, <<>>, []
{{cols, map_string}, units_new(units)}
end
defp read_map_rows [line | lines], row, map_acc, unit_acc do
{map_acc, unit_acc} = read_map_row line, row, 0, map_acc, unit_acc
read_map_rows lines, row + 1, map_acc, unit_acc
end
defp read_map_rows [], _row, map_acc, unit_acc do
{map_acc, unit_acc}
end
defp read_map_row <<char, chars::binary>>, row, col, map_acc, unit_acc do
case char do
u when u == ?E or u == ?G ->
type = case u do
?E -> :elf
?G -> :goblin
end
unit = {length(unit_acc), {{row, col}, type, {200, 3}}}
map_acc = <<map_acc::binary, ?.>>
read_map_row chars, row, col + 1, map_acc, [unit | unit_acc]
_ ->
map_acc = <<map_acc::binary, char>>;
read_map_row chars, row, col + 1, map_acc, unit_acc
end
end
defp read_map_row <<>>, _row, _col, map_acc, unit_acc do
{map_acc, unit_acc}
end
def print_map {cols, map}, units do
IO.puts print_map_1(map, 0, 0, cols, units)
end
defp print_map_1 chars, row, cols, cols, units do
points = Enum.reduce(units, [], fn elem, acc ->
case elem do
{unit_id, {{^row, col}, _, _}} -> [{col, unit_id} | acc]
_ -> acc
end
end)
|> Enum.sort
|> Enum.map(fn {_, unit_id} ->
points = unit_points(units, unit_id)
kind = unit_kind(units, unit_id)
:io_lib.format("~c(~p)", [unit_kind_letter(kind), points])
end)
|> Enum.intersperse(", ")
[" ", points, ?\n | print_map_1(chars, row + 1, 0, cols, units)]
end
defp print_map_1 <<char, chars::binary>>, row, col, cols, units do
pos = {row, col}
[case units do
%{^pos => unit_id} ->
unit_kind_letter(unit_kind(units, unit_id))
_ ->
char
end | print_map_1(chars, row, col + 1, cols, units)]
end
defp print_map_1 <<>>, _row, _col, _cols, _units do
[]
end
defp unit_kind_letter(:elf), do: ?E
defp unit_kind_letter(:goblin), do: ?G
end
|
day15/lib/day15.ex
| 0.518059 | 0.53279 |
day15.ex
|
starcoder
|
defmodule DateTimeParser.Combinators.Time do
@moduledoc false
import DateTimeParser.Combinators.TimeZone, only: [second_letter_of_timezone_abbreviation: 0]
import NimbleParsec
@hour_num ~w(00 01 02 03 04 05 06 07 08 09) ++ Enum.map(23..0, &to_string/1)
@second_minute_num ~w(00 01 02 03 04 05 06 07 08 09) ++ Enum.map(59..0, &to_string/1)
@am_pm ~w(am a.m a.m. a_m pm p.m p.m. p_m a p)
@time_separator ":"
def to_integer(value) when is_binary(value), do: String.to_integer(value)
def hour do
@hour_num
|> Enum.map(&string/1)
|> choice()
|> lookahead_not(invalid_first_digit())
|> map(:to_integer)
|> unwrap_and_tag(:hour)
|> label("numeric hour from 00-23")
end
def microsecond do
[?0..?9]
|> ascii_char()
|> times(min: 1, max: 24)
|> tag(:microsecond)
|> label("numeric subsecond up to 24 digits")
end
def second_or_minute do
@second_minute_num
|> Enum.map(&string/1)
|> choice()
|> lookahead_not(invalid_first_digit())
|> map(:to_integer)
end
def second do
second_or_minute()
|> unwrap_and_tag(:second)
|> label("numeric second from 00-59")
|> concat("." |> string() |> ignore() |> optional())
|> concat(microsecond() |> optional())
end
def minute do
second_or_minute()
|> unwrap_and_tag(:minute)
|> label("numeric minute from 00-59")
end
def hour_minute do
hour()
|> concat(time_separator() |> ignore())
|> concat(minute())
end
def hour_minute_second do
hour_minute()
|> concat(time_separator() |> ignore())
|> concat(second())
end
def am_pm do
@am_pm
|> Enum.map(&string/1)
|> choice()
|> lookahead_not(second_letter_of_timezone_abbreviation())
|> unwrap_and_tag(:am_pm)
|> label("am or pm")
end
def time do
choice([
hour_minute_second(),
hour_minute() |> lookahead_not(time_separator())
])
|> concat(space_separator() |> optional() |> ignore())
|> concat(am_pm() |> optional())
end
defp space_separator, do: string(" ")
defp invalid_first_digit, do: ascii_char([?6..?9])
defp time_separator, do: string(@time_separator)
end
|
lib/combinators/time.ex
| 0.784979 | 0.482673 |
time.ex
|
starcoder
|
defmodule OAuthXYZ.Model.KeyRequest do
@moduledoc """
Key Request Struct and Handling Functions.
```
# full?
"keys": {
"proof": "jwsd",
"jwks": {
"keys": [
{
"kty": "RSA",
"e": "AQAB",
"kid": "xyz-1",
"alg": "RS256",
"n": "kOB5rR4Jv0GMeLaY6_It_r3ORwdf8ci_JtffXyaSx8xY..."
}
]
},
"cert": "MIIEHDCCAwSgAwIBAgIBATANBgkqhkiG9w0BAQsFA...",
"did": "did:example:CV3BVVXK2PWWLCRQLRFU#xyz-1"
}
```
"""
@type t :: %__MODULE__{}
defstruct [
#! :string
:handle,
#! :map
:jwks,
#! :string
:cert,
#! :string
:cert_256,
#! :string
:did,
#! :string
:proof
]
@proof_list ["jwsd", "httpsig", "dpop", "pop", "mtls"]
@doc """
Parse string or map and return structure
"""
@spec parse(request :: map | String.t()) :: t
def parse(handle) when is_binary(handle), do: %__MODULE__{handle: handle}
def parse(request) when is_map(request) do
parsed_request =
%{}
|> parse_jwks(request)
|> parse_cert(request)
|> parse_cert_256(request)
|> parse_did(request)
|> parse_proof(request)
%__MODULE__{
jwks: parsed_request.jwks,
cert: parsed_request.cert,
cert_256: parsed_request.cert_256,
did: parsed_request.did,
proof: parsed_request.proof
}
end
# private
defp parse_jwks(keys, %{"jwks" => jwks}), do: Map.put(keys, :jwks, jwks)
defp parse_jwks(keys, _), do: Map.put(keys, :jwks, nil)
defp parse_cert(keys, %{"cert" => cert}), do: Map.put(keys, :cert, cert)
defp parse_cert(keys, _), do: Map.put(keys, :cert, nil)
defp parse_cert_256(keys, %{"cert#256" => cert_256}), do: Map.put(keys, :cert_256, cert_256)
defp parse_cert_256(keys, _), do: Map.put(keys, :cert_256, nil)
defp parse_did(keys, %{"did" => did}), do: Map.put(keys, :did, did)
defp parse_did(keys, _), do: Map.put(keys, :did, nil)
defp parse_proof(keys, %{"proof" => proof}) when proof in @proof_list,
do: Map.put(keys, :proof, proof)
# TODO : error handling
defp parse_proof(keys, _), do: Map.put(keys, :proof, nil)
end
|
lib/oauth_xyz/model/key_request.ex
| 0.62223 | 0.672048 |
key_request.ex
|
starcoder
|
defmodule Multiverses.DynamicSupervisor do
@moduledoc """
This module is intended to be a drop-in replacement for `DynamicSupervisor`.
It launches the supervised process during a slice of time in which the
universe of the DynamicSupervisor is temporarily set to the universe of
its caller. For example, if the supervised process is `Multiverses.GenServer`,
with `start_link` option `forward_callers: true`, then the GenServer will
exist in the same universe as its caller.
Currently uses DynamicSupervisor private API, so forward compatibility is
not guaranteed.
## Usage
This module should only be used at the point of starting a child under the
supervisor. All other uses of DynamicSupervisor (such as `use DynamicSupervisor`)
should use the native Elixir DynamicSupervisor module, and the supervisor
is fully compatible with native DynamicSupervisor processes.
## Notes
currently, only `start_child/2` is overloaded to provide sharded information.
`count_children/1`, `terminate_child/2` and `which_children/1` will act on
the global information. If you need access to partitioned collections of
processes, use `Multiverse.Registry`.
"""
use Multiverses.Clone,
module: DynamicSupervisor,
except: [start_child: 2]
require Multiverses
@doc "See `DynamicSupervisor.start_child/2`."
def start_child(supervisor, spec) do
# works by injecting a different supervisor bootstrap *through* the
# custom `bootstrap/2` function provided in this module.
child = spec
|> to_spec_tuple
|> inject_bootstrap(Multiverses.link())
GenServer.call(supervisor, {:start_child, child})
end
defp bootstrap_for(mfa, link), do: {__MODULE__, :bootstrap, [mfa, link]}
defp to_spec_tuple({_, _, _, _, _, _} = spec), do: spec
defp to_spec_tuple(spec), do: Supervisor.child_spec(spec, [])
# it's not entirely clear why this is happening here.
@dialyzer {:nowarn_function, inject_bootstrap: 2}
@spec inject_bootstrap(Supervisor.child_spec, Multiverses.link) :: Supervisor.child_spec
defp inject_bootstrap({_, mfa, restart, shutdown, type, modules}, link) do
{bootstrap_for(mfa, link), restart, shutdown, type, modules}
end
defp inject_bootstrap(spec_map = %{start: {mod, _, _}}, link) do
restart = Map.get(spec_map, :restart, :permanent)
type = Map.get(spec_map, :type, :worker)
modules = Map.get(spec_map, :modules, [mod])
shutdown = case type do
:worker -> Map.get(spec_map, :shutdown, 5_000)
:supervisor -> Map.get(spec_map, :shutdown, :infinity)
end
{bootstrap_for(spec_map.start, link), restart, shutdown, type, modules}
end
@doc false
def bootstrap({m, f, a}, universe) do
Multiverses.port(universe)
res = :erlang.apply(m, f, a)
Multiverses.drop()
res
end
end
|
lib/multiverses/dynamic_supervisor.ex
| 0.820757 | 0.560974 |
dynamic_supervisor.ex
|
starcoder
|
defmodule Mathmatical.Runs do
@moduledoc """
The Runs context.
"""
import Ecto.Query, warn: false
alias Mathmatical.Repo
alias Mathmatical.Runs.Attempt
@doc """
Returns the list of attempts.
## Examples
iex> list_attempts()
[%Attempt{}, ...]
"""
def list_attempts do
Repo.all(Attempt)
end
@doc """
Gets a single attempt.
Raises `Ecto.NoResultsError` if the Attempt does not exist.
## Examples
iex> get_attempt!(123)
%Attempt{}
iex> get_attempt!(456)
** (Ecto.NoResultsError)
"""
def get_attempt!(id), do: Repo.get!(Attempt, id)
@doc """
Creates a attempt.
## Examples
iex> create_attempt(%{field: value})
{:ok, %Attempt{}}
iex> create_attempt(%{field: bad_value})
{:error, %Ecto.Changeset{}}
"""
def create_attempt(attrs \\ %{}) do
%Attempt{}
|> Attempt.changeset(attrs)
|> Repo.insert()
end
@doc """
Updates a attempt.
## Examples
iex> update_attempt(attempt, %{field: new_value})
{:ok, %Attempt{}}
iex> update_attempt(attempt, %{field: bad_value})
{:error, %Ecto.Changeset{}}
"""
def update_attempt(%Attempt{} = attempt, attrs) do
attempt
|> Attempt.changeset(attrs)
|> Repo.update()
end
@doc """
Deletes a Attempt.
## Examples
iex> delete_attempt(attempt)
{:ok, %Attempt{}}
iex> delete_attempt(attempt)
{:error, %Ecto.Changeset{}}
"""
def delete_attempt(%Attempt{} = attempt) do
Repo.delete(attempt)
end
@doc """
Returns an `%Ecto.Changeset{}` for tracking attempt changes.
## Examples
iex> change_attempt(attempt)
%Ecto.Changeset{source: %Attempt{}}
"""
def change_attempt(%Attempt{} = attempt) do
Attempt.changeset(attempt, %{})
end
alias Mathmatical.Runs.Result
@doc """
Returns the list of results.
## Examples
iex> list_results()
[%Result{}, ...]
"""
def list_results do
Repo.all(Result)
end
@doc """
Gets a single result.
Raises `Ecto.NoResultsError` if the Result does not exist.
## Examples
iex> get_result!(123)
%Result{}
iex> get_result!(456)
** (Ecto.NoResultsError)
"""
def get_result!(id), do: Repo.get!(Result, id)
@doc """
Creates a result.
## Examples
iex> create_result(%{field: value})
{:ok, %Result{}}
iex> create_result(%{field: bad_value})
{:error, %Ecto.Changeset{}}
"""
def create_result(attrs \\ %{}) do
%Result{}
|> Result.changeset(attrs)
|> Repo.insert()
end
@doc """
Updates a result.
## Examples
iex> update_result(result, %{field: new_value})
{:ok, %Result{}}
iex> update_result(result, %{field: bad_value})
{:error, %Ecto.Changeset{}}
"""
def update_result(%Result{} = result, attrs) do
result
|> Result.changeset(attrs)
|> Repo.update()
end
@doc """
Deletes a Result.
## Examples
iex> delete_result(result)
{:ok, %Result{}}
iex> delete_result(result)
{:error, %Ecto.Changeset{}}
"""
def delete_result(%Result{} = result) do
Repo.delete(result)
end
@doc """
Returns an `%Ecto.Changeset{}` for tracking result changes.
## Examples
iex> change_result(result)
%Ecto.Changeset{source: %Result{}}
"""
def change_result(%Result{} = result) do
Result.changeset(result, %{})
end
end
|
lib/mathmatical/runs.ex
| 0.853043 | 0.425068 |
runs.ex
|
starcoder
|
defmodule Wabbit.Connection do
use Connection
import Wabbit.Record
require Logger
@doc """
Starts a new connection
# Connection Options
* `:username` - Default is `"guest"`
* `:password` - Default is `"<PASSWORD>"`
* `:virtual_host` - The name of the virtual host to work with. Default is `"/"`
* `:host` - Server host name or address. Default is `"localhost"`
* `:port` - Default is `:undefined`
* `:channel_max` - The maximum total number of channels that the
client will use per connection. Default is `0`
* `:frame_max` - The largest frame size that the client and server
will use for the connection. Default is `0`
* `:heartbeat` - The delay, in seconds, of the connection
heartbeat that the client wants. Default is `0`
* `:connection_timeout` - Default is `:infinity`
* `:ssl_options` - Default is `:none`
* `:client_properties` - Default is `[]`
* `:socket_options` - Default is `[]`
* `:auth_mechanisms` - A list of the security mechanisms that the
server supports. Default is `[&:amqp_auth_mechanisms.plain/3,
&:amqp_auth_mechanisms.amqplain/3]`
# Options
See `GenServer.start_link/3` for more information.
"""
def start_link(connection_options \\ [], options \\ []) do
Connection.start_link(__MODULE__, connection_options, options)
end
@doc """
Closes a connection
"""
def close(conn), do: Connection.call(conn, :close)
@doc """
Stops a connection
"""
def stop(conn), do: GenServer.stop(conn)
def connect(_, state) do
case open(state.opts) do
{:ok, conn} ->
true = Process.link(conn)
Logger.info("Wabbit: Connection succeed on #{loggable_opts(state.opts)}")
{:ok,
%{
state
| conn: conn,
retry: 0,
fallback_opts:
state.init_opts
|> List.delete(state.opts)
|> Enum.shuffle()
}}
{:error, reason} ->
Logger.info("Wabbit: Connection error on #{loggable_opts(state.opts)} #{inspect(reason)}")
retry(state)
end
end
def disconnect(info, state) do
case info do
{:close, from} ->
:ok = :amqp_connection.close(state.conn)
Connection.reply(from, :ok)
{:error, :closed} ->
Logger.info("Wabbit: Connection closed on #{loggable_opts(state.opts)}")
{:error, :killed} ->
Logger.info("Wabbit: Connection closed on #{loggable_opts(state.opts)} : shutdown~n")
{:error, reason} ->
Logger.info("Wabbit: Connection error on #{loggable_opts(state.opts)} #{inspect(reason)}")
end
{:connect, :reconnect, %{state | conn: nil, channels: %{}}}
end
@doc """
Opens a new channel
"""
def open_channel(conn) do
Connection.call(conn, :open_channel)
end
def init(opts) do
{current_opts, fallback_opts, init_opts} =
cond do
Keyword.keyword?(opts) ->
{opts, [], [opts]}
is_binary(opts) ->
{opts, [], [opts]}
is_list(opts) && !Keyword.keyword?(opts) ->
opts = Enum.shuffle(opts)
{hd(opts), tl(opts), opts}
true ->
raise "Wabbit: unable to parse opts"
end
Process.flag(:trap_exit, true)
state = %{
conn: nil,
opts: current_opts,
channels: %{},
fallback_opts: fallback_opts,
init_opts: init_opts,
retry: 0
}
{:connect, :init, state}
end
defp retry(state) do
max_retry = 5
cond do
state.retry >= max_retry && Enum.empty?(state.fallback_opts) ->
System.halt(1)
state.retry >= max_retry ->
backoff_time = 1_000
state = %{
state
| retry: 0,
opts: hd(state.fallback_opts),
fallback_opts: tl(state.fallback_opts)
}
Logger.info(
"Wabbit: retrying to connect in #{backoff_time / 1000}s on #{loggable_opts(state.opts)}"
)
{:backoff, backoff_time, state}
true ->
backoff_time = 1_000 + 1_000 * state.retry
Logger.info(
"Wabbit: retrying to reconnect in #{backoff_time / 1000}s on #{
loggable_opts(state.opts)
}"
)
{:backoff, backoff_time, %{state | retry: state.retry + 1}}
end
end
def handle_call(_, _, %{conn: nil} = state) do
{:reply, {:error, :closed}, state}
end
def handle_call(:open_channel, {from, _ref}, state) do
try do
case :amqp_connection.open_channel(state.conn) do
{:ok, chan} ->
monitor_ref = Process.monitor(from)
channels = Map.put(state.channels, monitor_ref, chan)
{:reply, {:ok, chan}, %{state | channels: channels}}
other ->
{:reply, other, state}
end
catch
:exit, {:noproc, _} ->
{:reply, {:error, :closed}, state}
_, _ ->
{:reply, {:error, :closed}, state}
end
end
def handle_call(:close, from, state) do
{:disconnect, {:close, from}, state}
end
def handle_info(
{:EXIT, conn, {:shutdown, {:server_initiated_close, _, _}}},
%{conn: conn} = state
) do
{:disconnect, {:error, :server_initiated_close}, state}
end
def handle_info({:EXIT, conn, reason}, %{conn: conn} = state) do
{:disconnect, {:error, reason}, state}
end
def handle_info({:EXIT, conn, {:shutdown, :normal}}, %{conn: conn} = state) do
{:noreply, state}
end
def handle_info({:DOWN, monitor_ref, :process, _pid, _reason}, state) do
state =
case Map.get(state.channels, monitor_ref) do
nil ->
state
pid ->
try do
:ok = :amqp_channel.close(pid)
catch
_, _ -> :ok
end
%{state | channels: Map.delete(state.channels, monitor_ref)}
end
{:noreply, state}
end
def handle_info(_info, state) do
{:noreply, state}
end
def terminate(_reason, state) do
:amqp_connection.close(state.conn)
end
defp loggable_opts(opts) when is_binary(opts) do
uri = URI.parse(opts)
"#{uri.host || "nohost"}:#{uri.port || "noport"}"
end
defp loggable_opts(opts) do
if Keyword.keyword?(opts) do
"#{opts[:host] || "nohost"}:#{opts[:port] || "noport"}"
else
"could not parse connection opts"
end
end
defp open(options) when is_list(options) do
options = options |> normalize_ssl_options
amqp_params =
amqp_params_network(
username: Keyword.get(options, :username, "guest"),
password: Keyword.get(options, :password, "<PASSWORD>"),
virtual_host: Keyword.get(options, :virtual_host, "/"),
host: Keyword.get(options, :host, 'localhost') |> to_charlist,
port: Keyword.get(options, :port, :undefined),
channel_max: Keyword.get(options, :channel_max, 0),
frame_max: Keyword.get(options, :frame_max, 0),
heartbeat: Keyword.get(options, :heartbeat, 0),
connection_timeout: Keyword.get(options, :connection_timeout, :infinity),
ssl_options: Keyword.get(options, :ssl_options, :none),
client_properties: Keyword.get(options, :client_properties, []),
socket_options: Keyword.get(options, :socket_options, []),
auth_mechanisms:
Keyword.get(options, :auth_mechanisms, [
&:amqp_auth_mechanisms.plain/3,
&:amqp_auth_mechanisms.amqplain/3
])
)
case :amqp_connection.start(amqp_params) do
{:ok, pid} -> {:ok, pid}
error -> error
end
end
defp open(uri) when is_binary(uri) do
case uri |> to_charlist |> :amqp_uri.parse() do
{:ok, amqp_params} -> amqp_params |> amqp_params_network |> open
error -> error
end
end
defp normalize_ssl_options(options) when is_list(options) do
for {k, v} <- options do
if k in [:cacertfile, :cacertfile, :cacertfile] do
{k, to_charlist(v)}
else
{k, v}
end
end
end
defp normalize_ssl_options(options), do: options
end
|
lib/wabbit/connection.ex
| 0.765506 | 0.471832 |
connection.ex
|
starcoder
|
defmodule Plaid.Institutions do
@moduledoc """
Functions for Plaid `institutions` endpoint.
"""
import Plaid, only: [make_request_with_cred: 4, get_cred: 0, get_key: 0]
alias Plaid.Utils
defstruct institutions: [], request_id: nil, total: nil
@type t :: %__MODULE__{institutions: [Plaid.Institutions.Institution.t],
request_id: String.t,
total: integer}
@type params :: %{required(atom) => integer | String.t | list}
@type cred :: %{required(atom) => String.t}
@type key :: %{public_key: String.t}
@endpoint "institutions"
defmodule Institution do
@moduledoc """
Plaid Institution data structure.
"""
defstruct credentials: [], has_mfa: nil, institution_id: nil, mfa: [],
name: nil, products: [], request_id: nil
@type t :: %__MODULE__{credentials: [Plaid.Institutions.Institution.Credentials.t],
has_mfa: false | true,
institution_id: String.t,
mfa: [String.t],
name: String.t,
products: [String.t],
request_id: String.t}
defmodule Credentials do
@moduledoc """
Plaid Institution Credentials data structure.
"""
defstruct label: nil, name: nil, type: nil
@type t :: %__MODULE__{label: String.t, name: String.t, type: String.t}
end
end
@doc """
Gets all institutions. Results paginated.
Parameters
```
%{count: 50, offset: 0}
```
"""
@spec get(params, cred | nil) :: {:ok, Plaid.Institutions.t} | {:error, Plaid.Error.t}
def get(params, cred \\ get_cred()) do
endpoint = "#{@endpoint}/get"
make_request_with_cred(:post, endpoint, cred, params)
|> Utils.handle_resp(:institutions)
end
@doc """
Gets an institution by id.
"""
@spec get_by_id(String.t, key | nil) :: {:ok, Plaid.Institutions.Institution.t} | {:error, Plaid.Error.t}
def get_by_id(id, key \\ get_key()) do
params = %{institution_id: id}
endpoint = "#{@endpoint}/get_by_id"
make_request_with_cred(:post, endpoint, key, params)
|> Utils.handle_resp(:institution)
end
@doc """
Searches institutions by name and product.
Parameters
```
%{query: "Wells", products: ["transactions"]}
```
"""
@spec search(params, key | nil) :: {:ok, Plaid.Institutions.t} | {:error, Plaid.Error.t}
def search(params, key \\ get_key()) do
endpoint = "#{@endpoint}/search"
make_request_with_cred(:post, endpoint, key, params)
|> Utils.handle_resp(:institutions)
end
end
|
lib/plaid/institutions.ex
| 0.790247 | 0.648703 |
institutions.ex
|
starcoder
|
defmodule Discord.SortedSet.Test.Support.Generator do
def supported_terms(options \\ []) do
term_list(supported_term(), options)
end
def supported_term do
StreamData.one_of([
supported_term_scalars(),
nested_tuple(supported_term_scalars()),
nested_list(supported_term_scalars())
])
end
def unsupported_terms(options \\ []) do
term_list(unsupported_term(), options)
end
def unsupported_term do
StreamData.one_of([
unsupported_term_scalars(),
nested_tuple(unsupported_term_scalars()),
StreamData.nonempty(nested_list(unsupported_term_scalars()))
])
end
## Private
@spec term_list(inner :: StreamData.t(), options :: Keyword.t()) :: StreamData.t()
defp term_list(inner, options) do
{unique, options} = Keyword.pop(options, :unique, false)
if unique do
StreamData.uniq_list_of(inner, options)
else
StreamData.list_of(inner, options)
end
end
@spec nested_list(inner :: StreamData.t()) :: StreamData.t()
defp nested_list(inner) do
StreamData.nonempty(
StreamData.list_of(
StreamData.one_of([
inner,
StreamData.tuple({inner}),
StreamData.list_of(inner)
])
)
)
end
@spec nested_tuple(inner :: StreamData.t()) :: StreamData.t()
defp nested_tuple(inner) do
StreamData.one_of([
StreamData.tuple({inner}),
StreamData.tuple({inner, StreamData.tuple({inner})}),
StreamData.tuple({inner, StreamData.list_of(inner)})
])
end
@spec supported_term_scalars() :: StreamData.t()
defp supported_term_scalars do
StreamData.one_of([
StreamData.integer(),
StreamData.atom(:alias),
StreamData.atom(:alphanumeric),
StreamData.string(:printable)
])
end
@spec unsupported_term_scalars() :: StreamData.t()
defp unsupported_term_scalars do
StreamData.one_of([
StreamData.float(),
pid_generator(),
reference_generator(),
function_generator()
])
end
defp pid_generator do
StreamData.bind(
StreamData.tuple({StreamData.integer(0..1000), StreamData.integer(0..1000)}),
fn {a, b} ->
StreamData.constant(:c.pid(0, a, b))
end
)
end
defp reference_generator do
StreamData.constant(make_ref())
end
defp function_generator do
StreamData.constant(fn x -> x end)
end
end
|
test/support/generators.ex
| 0.629319 | 0.411318 |
generators.ex
|
starcoder
|
defmodule SbrokerPlayground do
@moduledoc """
Sbroker regulator simulator for testing various regulator configurations from iex console.
## Usage
Run from `iex`:
Transporter.Regulator.Simulator.run(iterations, config)
Returns performance report, grouped into buckets. Each bucket contains the following items:
* `:count` - how many items is in the bucket
* `:main` - the average value of all items in the bucket
* `:min` - the minimum value in the bucket
* `:max` - the maximum value in the bucket
* `:p50`, `:p75`, `:p95`, `:p99` - 50th, 75th, 95th and 99th percentiles of values
in the bucket
The report may contain the following buckets:
* `:allowed` - contains statistics about queue delay for requests that were allowed to execute
by the regulator.
* `:rejected` - contains statistics about queue delay for requests that were dropped by :sprotector
* `:dropped` - contains statistics about queue delay for requests that were dropped by :sregulator
* `:processed` - contains statistics about processing time for requests that were processed by the worker pool
The following configuration parameters are supported:
* `:rps` - regulates the pace of incoming work requests, per second (default is 1000)
* `:worker` (keyword):
* `:pool_size` - worker pool size
* `:delay` (milliseconds) - how long each worker processes every request (default is 0).
* `:jitter` (milliseconds) - how much jitter time to add to every request processing (default is 0)
* `:queue` (tuple `{handler_module, handler_opts}`) - regulator queue spec, see
https://hexdocs.pm/sbroker/sregulator.html for examples
* `:valve` (tuple `{handler_module, handler_opts}`) - regulator valve spec, see
https://hexdocs.pm/sbroker/sregulator.html for examples
* `:protector` (keyword | map, optional) - `:sprotector_pie_meter` spec. If omitted,
`:sprotector` will not be used during the test. See https://hexdocs.pm/sbroker/sprotector_pie_meter.html
for more info.
Other configuration parameters are passed directly to `Transporter.Regulator`, check its documentation
for supported parameters.
## Example
# Run 10 000 iterations with arrival speed 2000 rps. Set regulator's max concurrency to 30
# and target time to 40 ms with maximum queue size of 1000. Worker delay is 5 ms.
#{__MODULE__}.run(10000, rps: 2000, max_concurrency: 30, target: 40, max_queue_size: 1000, worker_delay: 5)
"""
alias __MODULE__.{Report, Runner}
def run(iterations \\ 1000, config \\ []) do
with_runner(config, fn runner ->
1..iterations
|> Enum.map(fn _ -> Runner.start_task(runner) end)
|> Enum.map(&Task.await(&1, :infinity))
|> Enum.reduce(Report.new(), &Report.add(&2, &1))
|> Report.finish()
|> Report.print()
end)
end
defp with_runner(config, fun) do
runner =
config
|> Runner.new()
|> Runner.setup()
try do
fun.(runner)
after
Runner.tear_down(runner)
end
end
end
|
lib/sbroker_playground.ex
| 0.906161 | 0.612194 |
sbroker_playground.ex
|
starcoder
|
defmodule I18nHelpers.Ecto.Translator do
@doc ~S"""
Translates an Ecto struct, a list of Ecto structs or a map containing translations.
Translating an Ecto struct for a given locale consists of the following steps:
1. Get the list of the fields that need to be translated from the Schema.
The Schema must contain a `get_translatable_fields\0` function returning
a list of those fields.
2. Get the text for the given locale and store it into a virtual field.
The Schema must provide, for each translatable field, a corresponding
virtual field in order to store the translation.
3. Get the list of the associations that also need to be translated from
the Schema. The Schema must contain a `get_translatable_assocs\0` function
returning a list of those associations.
4. Repeat step 1. for each associated Ecto struct.
"""
@spec translate(list | struct | map, String.t() | atom, keyword) ::
list | struct | String.t()
def translate(data_structure, locale \\ Gettext.get_locale(), opts \\ [])
def translate([], _locale, _opts), do: []
def translate([head | tail], locale, opts) do
[
translate(head, locale, opts)
| translate(tail, locale, opts)
]
end
def translate(%{__struct__: _struct_name} = struct, locale, opts) do
translate_struct(struct, locale, opts)
end
def translate(%{} = map, locale, opts) do
translate_map(map, locale, opts)
end
def translate(nil, locale, opts) do
translate_map(%{}, locale, opts)
end
defp translate_struct(%{__struct__: _struct_name} = entity, locale, opts) do
fields_to_translate = entity.__struct__.get_translatable_fields()
assocs_to_translate = entity.__struct__.get_translatable_assocs()
entity =
Enum.reduce(fields_to_translate, entity, fn field, updated_entity ->
virtual_translated_field = String.to_atom("translated_" <> Atom.to_string(field))
%{^field => translations} = entity
handle_missing_translation = fn translations_map, locale ->
Keyword.get(opts, :handle_missing_field_translation, fn _, _, _ -> true end)
|> apply([field, translations_map, locale])
Keyword.get(opts, :handle_missing_translation, fn _, _ -> true end)
|> apply([translations_map, locale])
end
opts = Keyword.put(opts, :handle_missing_translation, handle_missing_translation)
struct(updated_entity, [
{virtual_translated_field, translate(translations, locale, opts)}
])
end)
entity =
Enum.reduce(assocs_to_translate, entity, fn field, updated_entity ->
%{^field => assoc} = entity
case Ecto.assoc_loaded?(assoc) do
true ->
struct(updated_entity, [{field, translate(assoc, locale, opts)}])
_ ->
updated_entity
end
end)
entity
end
defp translate_map(%{} = translations_map, locale, opts) do
locale = to_string(locale)
fallback_locale =
Keyword.get(opts, :fallback_locale, Gettext.get_locale())
|> to_string()
handle_missing_translation =
Keyword.get(opts, :handle_missing_translation, fn _, _ -> true end)
cond do
has_translation?(translations_map, locale) ->
translations_map[locale]
has_translation?(translations_map, fallback_locale) ->
translation = translations_map[fallback_locale]
handle_missing_translation.(translations_map, locale)
translation
true ->
handle_missing_translation.(translations_map, locale)
""
end
end
@doc ~S"""
Same as `translate/3` but raises an error if a translation is missing.
"""
@spec translate!(list | struct | map, String.t() | atom, keyword) ::
list | struct | String.t()
def translate!(data_structure, locale \\ Gettext.get_locale(), opts \\ []) do
handle_missing_field_translation = fn field, translations_map, locale ->
Keyword.get(opts, :handle_missing_field_translation, fn _, _, _ -> true end)
|> apply([field, translations_map, locale])
raise "translation of field #{inspect(field)} for locale \"#{locale}\" not found in map #{
inspect(translations_map)
}"
end
handle_missing_translation = fn translations_map, locale ->
Keyword.get(opts, :handle_missing_translation, fn _, _ -> true end)
|> apply([translations_map, locale])
raise "translation for locale \"#{locale}\" not found in map #{inspect(translations_map)}"
end
opts =
opts
|> Keyword.put(:handle_missing_field_translation, handle_missing_field_translation)
|> Keyword.put(:handle_missing_translation, handle_missing_translation)
translate(data_structure, locale, opts)
end
defp has_translation?(translations_map, locale),
do: Map.has_key?(translations_map, locale) && String.trim(locale) != ""
# @doc ~S"""
# Returns a closure allowing to memorize the given options for `translate\3`.
# """
def set_opts(opts) do
fn data_structure, overriding_opts ->
opts = Keyword.merge(opts, overriding_opts)
locale = Keyword.get(opts, :locale, Gettext.get_locale())
translate(data_structure, locale, opts)
end
end
end
|
lib/ecto/translator.ex
| 0.804329 | 0.489198 |
translator.ex
|
starcoder
|
defmodule BorshEx.Schema do
@moduledoc """
Define a Borsh schema for a given struct
## Example
defmodule Data do
use BorshEx.Data
defstruct id: nil, sub_data: nil
borsh_schema do
field :id, "u16"
field :sub_data, SubData
end
end
"""
@doc """
Serialize struct into a bitstring
#### Example
iex> fake_data = %FakeData{a: 255, b: 20, c: "123"}
iex> FakeData.serialize(fake_data)
<<255, 20, 0, 0, 0, 0, 0, 0, 0, 3, 0, 0, 0, 49, 50, 51>>
"""
@callback serialize(struct :: struct()) :: bitstring()
@doc """
Deserialize bitstring into the struct
#### Example
iex> FakeData.deserialize(<<255, 20, 0, 0, 0, 0, 0, 0, 0, 3, 0, 0, 0, 49, 50, 51>>)
iex> {:ok, %FakeData{a: 255, b: 20, c: "123"}}
iex> FakeData.deserialize(<<255, 20, 0, 0, 0, 0, 0, 0, 0, 3, 0, 0, 0, 49, 50, 51, 87>>)
iex> {:error, %FakeData{a: 255, b: 20, c: "123"}, <<87>>}
"""
@callback deserialize(bitstring :: bitstring()) ::
{:pk, struct()} | {:error, struct(), bitstring()}
defmacro __using__(_) do
quote do
@behaviour BorshEx.Schema
import BorshEx.Schema, only: [borsh_schema: 1, field: 2]
Module.register_attribute(__MODULE__, :fields, accumulate: true)
@before_compile unquote(__MODULE__)
end
end
defmacro __before_compile__(_env) do
quote do
def fields do
Enum.reverse(@fields)
end
def deserialize(bitstring) do
{object, rest} =
BorshEx.Deserializer.deserialize_field(
{struct(__MODULE__), bitstring},
{nil, __MODULE__}
)
if byte_size(rest) == 0 do
{:ok, object}
else
{:error, object, rest}
end
end
def serialize(object) do
BorshEx.Serializer.serialize_field({<<>>, object}, {nil, __MODULE__})
end
end
end
@doc """
Defines a schema to serialize / deserialize the struct
"""
defmacro borsh_schema(do: block) do
quote do
unquote(block)
end
end
@doc """
Defines a field on the schema with given name and type.
"""
defmacro field(name, type) do
quote do
Module.put_attribute(__MODULE__, :fields, {unquote(name), unquote(type)})
end
end
end
|
lib/borsh_ex/schema.ex
| 0.777469 | 0.410963 |
schema.ex
|
starcoder
|
defmodule Oli.Authoring.Locks do
@moduledoc """
This module provides an interface to durable write locks. These locks are
durable in the sense that they will survive server restarts given that
they are stored in the database.
## Scoping
Locks are scoped to publication and resource. This allows the implementation
to support multiple locks per resource per project. This is necessary
to allow the situation where an author (from the project) is editing a
resource concurrent to an instructor (from a section) editing that same
resource. Each edit in this example would pertain to a differrent publication,
thus the scoping of locks to publication and resource.
## Lock Expiration
This implementation allows a user to acquire an already locked
published_resource that is held by another user if that existing lock has expired. Locks
are considered to be expired if they have not been updated for `@ttl` seconds.
This handles the case that a user locks a resource and then abandons their
editing session without closing their browser.
## Actions
Three actions exist for a lock: `acquire`, `update` and `release`. Lock acquiring
results in the published_resource record being stamped with the users id, but
with an `nil` last updated at date. Updating a lock sets the last updated at
date. Releasing a lock sets both the user and last updated at to `nil`. Coupled
with
"""
import Ecto.Query, warn: false
alias Oli.Publishing
alias Oli.Publishing.PublishedResource
alias Oli.Repo
alias Oli.Authoring.Broadcaster
# Locks that are not updated after 10 minutes are considered to be expired
@ttl 10 * 60
@doc """
Attempts to acquire or update a lock for user `user_id` the published resource
mapping defined by `publication_id` and `resource_id`.
Returns:
.`{:acquired}` if the lock was acquired
.`{:error}` if an internal error was encountered
.`{:lock_not_acquired, {user_email, date_time}}` the date and user id of the existing lock
"""
@spec acquire(String.t(), number, number, number) ::
{:error}
| {:acquired}
| {:lock_not_acquired,
{String.t(),
%{
calendar: atom,
day: any,
hour: any,
microsecond: any,
minute: any,
month: any,
second: any,
year: any
}}}
def acquire(project_slug, publication_id, resource_id, user_id) do
# Get the published_resource that pertains to this publication and resource
case Publishing.get_published_resource!(publication_id, resource_id)
|> Repo.preload([:author]) do
# Acquire the lock if held already by this user
%{locked_by_id: ^user_id} = mapping ->
lock_action(project_slug, mapping, user_id, &always?/1, {:acquired}, {:acquired}, nil)
# Acquire the lock if no user has this mapping locked
%{locked_by_id: nil} = mapping ->
lock_action(project_slug, mapping, user_id, &always?/1, {:acquired}, {:acquired}, nil)
# Otherwise, another user may have this locked, acquire it if
# the lock is expired
%{lock_updated_at: lock_updated_at} = mapping ->
lock_action(
project_slug,
mapping,
user_id,
&expired?/1,
{:acquired},
{:lock_not_acquired, {mapping.author.email, lock_updated_at}},
nil
)
end
end
@doc """
Attempts to acquire or update a lock for user `user_id` the published resource mapping
defined by `publication_id` and `resource_id`.
Returns:
.`{:acquired}` if the lock was acquired
.`{:updated}` if the lock was updated
.`{:error}` if an internal error was encountered
.`{:lock_not_acquired, {user_email, date_time}}` the date and user id of the existing lock
"""
@spec update(String.t(), number, number, number) ::
{:error, any()}
| {:acquired}
| {:updated}
| {:lock_not_acquired,
{String.t(),
%{
calendar: atom,
day: any,
hour: any,
microsecond: any,
minute: any,
month: any,
second: any,
year: any
}}}
def update(project_slug, publication_id, resource_id, user_id) do
# Get the mapping that pertains to this publication and resource
case Publishing.get_published_resource!(publication_id, resource_id)
|> Repo.preload([:author]) do
# Acquire the lock if held already by this user and the lock is expired or its last_updated_date is empty
# otherwise, simply update it
%{locked_by_id: ^user_id} = mapping ->
lock_action(
project_slug,
mapping,
user_id,
&expired_or_empty_predicate?/1,
{:acquired},
{:updated},
now()
)
# Otherwise, another user may have this locked, a new revision was created after the user
# acquired the lock, or it was locked by this
# user and it expired and an interleaving lock, redit, release by another user
# has taken place. We must not acquire here since this could lead to lost changes as
# the client has a copy of content in client-side memory and is seeking to update this
# revision.
%{lock_updated_at: lock_updated_at} = mapping ->
# The :lock_not_acquired message shows the author's email if a lock is present. Otherwise,
# it just shows a generic message
{:lock_not_acquired,
{
if is_nil(mapping.author) do
"another author"
else
mapping.author.email
end,
lock_updated_at
}}
end
end
@doc """
Releases a lock held by user `user_id` the resource mapping
defined by `publication_id` and `resource_id`.
Returns:
.`{:ok}` if the lock was released
.`{:error}` if an internal error was encountered
.`{:lock_not_held}` if the lock was not held by this user
"""
@spec release(String.t(), number, number, number) :: {:error} | {:lock_not_held} | {:ok}
def release(project_slug, publication_id, resource_id, user_id) do
case Publishing.get_published_resource!(publication_id, resource_id) do
%{locked_by_id: ^user_id} = mapping -> release_lock(project_slug, mapping)
_ -> {:lock_not_held}
end
end
@doc """
Releases all locks for revisions in the supplied publication.
This removes the `locked_by_id` and `lock_updated_at` fields.
Returns:
.`{number, nil | returned data}` where number is the number of rows updated
"""
@spec release_all(binary) :: {number, nil | term()}
def release_all(publication_id) do
from(pr in PublishedResource,
where: pr.publication_id == ^publication_id and not is_nil(pr.locked_by_id),
select: pr
)
|> Repo.update_all(set: [locked_by_id: nil, lock_updated_at: nil])
end
defp now() do
{:ok, datetime} = DateTime.now("Etc/UTC")
datetime
end
defp lock_action(
project_slug,
published_resource,
current_user_id,
predicate,
success_result,
failure_result,
lock_updated_at
) do
case predicate.(published_resource) do
true ->
case Publishing.update_published_resource(published_resource, %{
locked_by_id: current_user_id,
lock_updated_at: lock_updated_at
}) do
{:ok, _} ->
Broadcaster.broadcast_lock_acquired(
project_slug,
published_resource.publication_id,
published_resource.resource_id,
current_user_id
)
success_result
{:error, _} ->
{:error}
end
false ->
failure_result
end
end
defp always?(_published_resource) do
true
end
defp expired?(%{lock_updated_at: lock_updated_at, updated_at: updated_at}) do
# A lock is expired if a diff from now vs lock_updated_at field exceeds the ttl
# If a no edit has been made, we use the timestamp updated_at instead for this calculation
to_use =
case lock_updated_at do
nil -> updated_at
_ -> lock_updated_at
end
NaiveDateTime.diff(now(), to_use) > @ttl
end
def expired_or_empty?(%{locked_by_id: locked_by_id} = published_resource) do
locked_by_id == nil or expired?(published_resource)
end
def expired_or_empty_predicate?(%{lock_updated_at: lock_updated_at} = published_resource) do
lock_updated_at == nil or expired?(published_resource)
end
defp release_lock(project_slug, published_resource) do
case Publishing.update_published_resource(published_resource, %{
locked_by_id: nil,
lock_updated_at: nil
}) do
{:ok, _} ->
Broadcaster.broadcast_lock_released(
project_slug,
published_resource.publication_id,
published_resource.resource_id
)
{:ok}
{:error, _} ->
{:error}
end
end
end
|
lib/oli/authoring/locks.ex
| 0.863751 | 0.456289 |
locks.ex
|
starcoder
|
defmodule BambooSMTPSandbox.Email do
@moduledoc """
Contains functions for creating email structures using Bamboo.
This module can be considered as a Factory module. Each time we want to build a
new email structure that slightly differed from the existing ones, we should
add a new function here.
"""
import Bamboo.Email
@doc """
Used to create a new email with the following fields being populated:
- `:to`: with the content of the environment variable `EMAIL_TO`.
- `:from`: with the content of the environment variable `EMAIL_FROM`.
- `:subject`: with the corresponding argument or with a default value.
- `:text_body`: with the corresponding argument or with a default value.
## Example
# Same as sample_email()
sample_email("BambooSMTP Sandbox", "Hello 👋")
"""
@spec sample_email(String.t(), String.t()) :: Bamboo.Email.t()
def sample_email(subject \\ "BambooSMTP Sandbox", text_body \\ "Hello 👋") do
new_email(
to: System.get_env("EMAIL_TO"),
from: System.get_env("EMAIL_FROM"),
subject: subject,
text_body: text_body
)
end
@doc """
Used to create a new email with the following fields being populated:
- `:to`: with the content of the environment variable `EMAIL_TO`.
- `:from`: with the content of the environment variable `EMAIL_FROM`.
- `:subject`: with the corresponding argument or with a default value.
- `:text_body`: with the corresponding argument or with a default value.
- `:attachments`: with the content of the directory `priv/attachments/`.
## Example
# Same as sample_email_with_attachments()
sample_email_with_attachments("BambooSMTP Sandbox", "Hello 👋")
"""
@spec sample_email_with_attachments(String.t(), String.t()) :: Bamboo.Email.t()
def sample_email_with_attachments(subject \\ "BambooSMTP Sandbox", text_body \\ "Hello 👋") do
sample_email = sample_email(subject, text_body)
attachments = Path.wildcard("priv/attachments/*.*")
add_attachments(sample_email, attachments)
end
defp add_attachments(email, []), do: email
defp add_attachments(email, [attachment | attachments]) do
email
|> put_attachment(attachment)
|> add_attachments(attachments)
end
end
|
lib/bamboo_smtp_sandbox/email.ex
| 0.859266 | 0.618089 |
email.ex
|
starcoder
|
defmodule MarsWater.Algos.SlidingWindow do
def run(input) when is_binary(input) do
[results_requested, grid_size | measurements] =
String.split(input, " ", trim: true)
|> Enum.map(& Integer.parse(&1) |> elem(0))
compute_water_scores(measurements, grid_size)
|> Enum.take(results_requested)
|> Enum.map(&format_result/1)
|> Enum.join("\n")
end
def compute_water_scores(measurements, grid_size) do
grid = build_tuple_grid(measurements, grid_size)
init_acc = {-1, 0, :right, nil, []}
Enum.reduce(1..length(measurements), init_acc, fn _measurement, {last_x, last_y, last_direction, last_window, last_scores} ->
{x, y, direction, move} = next_coord(last_x, last_y, last_direction, grid_size)
window = next_window(last_window, move, grid, grid_size, x, y)
score = {x, y, window_score(window)}
{x, y, direction, window, [score] ++ last_scores}
end)
|> elem(4)
|> Enum.sort(fn {_x1, _y1, s1}, {_x2, _y2, s2} -> s1 > s2 end)
end
# >--------\
# |
# /--------/
# |
# \--------\
# |
# <--------/
def next_coord(x, y, direction, grid_size) do
cond do
x == grid_size and y == grid_size ->
:done
direction == :right and x < grid_size - 1 ->
{x + 1, y, direction, :right}
direction == :left and x > 0 ->
{x - 1, y, direction, :left}
true ->
if direction == :right do
{x, y + 1, :left, :down}
else
{0, y + 1, :right, :down}
end
end
end
def next_window(nil, _move, grid, grid_size, x, y) do
{
{grid_value(grid, grid_size, x-1, y-1), grid_value(grid, grid_size, x, y-1), grid_value(grid, grid_size, x+1, y-1)},
{grid_value(grid, grid_size, x-1, y), grid_value(grid, grid_size, x, y), grid_value(grid, grid_size, x+1, y)},
{grid_value(grid, grid_size, x-1, y+1), grid_value(grid, grid_size, x, y+1), grid_value(grid, grid_size, x+1, y+1)}
}
end
def next_window(last_window, move, grid, grid_size, x, y) do
case move do
:left ->
{
{grid_value(grid, grid_size, x-1, y-1), window_value(last_window, 0, 0), window_value(last_window, 1, 0)},
{grid_value(grid, grid_size, x-1, y), window_value(last_window, 0, 1), window_value(last_window, 1, 1)},
{grid_value(grid, grid_size, x-1, y+1), window_value(last_window, 0, 2), window_value(last_window, 1, 2)}
}
:right ->
{
{window_value(last_window, 1, 0), window_value(last_window, 2, 0), grid_value(grid, grid_size, x+1, y-1)},
{window_value(last_window, 1, 1), window_value(last_window, 2, 1), grid_value(grid, grid_size, x+1, y)},
{window_value(last_window, 1, 2), window_value(last_window, 2, 2), grid_value(grid, grid_size, x+1, y+1)}
}
:down ->
{
{window_value(last_window, 0, 1), window_value(last_window, 1, 1), window_value(last_window, 2, 1)},
{window_value(last_window, 0, 2), window_value(last_window, 1, 2), window_value(last_window, 2, 2)},
{grid_value(grid, grid_size, x-1, y+1), grid_value(grid, grid_size, x, y+1), grid_value(grid, grid_size, x+1, y+1)}
}
end
end
def window_value(window, x, y) do
window |> elem(y) |> elem(x)
end
def grid_value(grid, grid_size, x, y) do
if (x < 0 or x > grid_size - 1) or (y < 0 or y > grid_size - 1) do
0
else
grid |> elem(y) |> elem(x)
end
end
def window_score(window) do
window_value(window, 0, 0) +
window_value(window, 1, 0) +
window_value(window, 2, 0) +
window_value(window, 0, 1) +
window_value(window, 1, 1) +
window_value(window, 2, 1) +
window_value(window, 0, 2) +
window_value(window, 1, 2) +
window_value(window, 2, 2)
end
def build_tuple_grid(measurements, grid_size) do
Enum.chunk_every(measurements, grid_size)
|> Enum.map(&List.to_tuple/1)
|> List.to_tuple
end
def format_result({x, y, score}) do
" (#{x}, #{y}, score: #{score})"
end
end
|
elixir/elixir-mars-water/lib/algos/sliding_window.ex
| 0.713731 | 0.665854 |
sliding_window.ex
|
starcoder
|
defmodule ExAeonsEnd.Deck do
@moduledoc "
This is a generic abstraction for a deck of cards, consisting of a draw pile and a discard pile.
"
alias ExAeonsEnd.Card
defstruct [:draw, :discard]
@type t :: %__MODULE__{
draw: list(Card.t()),
discard: list(Card.t())
}
@type pile :: :draw | :discard
@spec new :: __MODULE__.t()
def new do
%__MODULE__{
draw: [],
discard: []
}
end
@doc """
This function prepends a card to a pile for the deck (either the draw pile or discard pile).
## Examples
iex> ExAeonsEnd.Deck.new
...> |> ExAeonsEnd.Deck.add_card(:draw, %ExAeonsEnd.Card{id: 1, name: "a"})
...> |> ExAeonsEnd.Deck.add_card(:draw, %ExAeonsEnd.Card{id: 2, name: "b"})
%ExAeonsEnd.Deck{draw: [%ExAeonsEnd.Card{id: 2, name: "b"}, %ExAeonsEnd.Card{id: 1, name: "a"}], discard: []}
iex> ExAeonsEnd.Deck.new
...> |> ExAeonsEnd.Deck.add_card(:discard, %ExAeonsEnd.Card{id: 1, name: "a"})
...> |> ExAeonsEnd.Deck.add_card(:discard, %ExAeonsEnd.Card{id: 2, name: "b"})
%ExAeonsEnd.Deck{discard: [%ExAeonsEnd.Card{id: 2, name: "b"}, %ExAeonsEnd.Card{id: 1, name: "a"}], draw: []}
iex> ExAeonsEnd.Deck.new
...> |> ExAeonsEnd.Deck.add_card(:draw, %ExAeonsEnd.Card{id: 1, name: "a"})
...> |> ExAeonsEnd.Deck.add_card(:discard, %ExAeonsEnd.Card{id: 2, name: "b"})
%ExAeonsEnd.Deck{draw: [%ExAeonsEnd.Card{id: 1, name: "a"}], discard: [%ExAeonsEnd.Card{id: 2, name: "b"}]}
"""
@spec add_card(__MODULE__.t(), __MODULE__.pile(), Card.t()) :: __MODULE__.t()
def add_card(deck, pile_type, card)
def add_card(deck = %__MODULE__{draw: draw}, :draw, card),
do: %__MODULE__{deck | draw: [card | draw]}
def add_card(deck = %__MODULE__{discard: discard}, :discard, card),
do: %__MODULE__{deck | discard: [card | discard]}
# This is probably best tested with a property test?
@spec shuffle(__MODULE__.t()) :: __MODULE__.t()
def shuffle(deck), do: %__MODULE__{deck | draw: deck.draw |> Enum.shuffle()}
@doc """
This function pops the first card off of the draw pile, or returns :empty if the draw is empty.
## Examples
iex> alias ExAeonsEnd.{Deck, Card}
...> Deck.new
...> |> Deck.add_card(:draw, %Card{id: 1, name: "a"})
...> |> Deck.add_card(:draw, %Card{id: 2, name: "b"})
...> |> Deck.draw()
{%ExAeonsEnd.Card{id: 2, name: "b"}, %ExAeonsEnd.Deck{draw: [%ExAeonsEnd.Card{id: 1, name: "a"}], discard: []}}
iex> alias ExAeonsEnd.{Deck, Card}
...> Deck.new
...> |> Deck.add_card(:discard, %Card{id: 1, name: "a"})
...> |> Deck.add_card(:discard, %Card{id: 2, name: "b"})
...> |> Deck.draw()
{:empty, %ExAeonsEnd.Deck{discard: [%ExAeonsEnd.Card{id: 2, name: "b"}, %ExAeonsEnd.Card{id: 1, name: "a"}], draw: []}}
"""
@spec draw(__MODULE__.t()) :: {Card.t(), __MODULE__.t()} | {:empty, __MODULE__.t()}
def draw(deck)
def draw(deck = %__MODULE__{draw: []}), do: {:empty, deck}
def draw(deck = %__MODULE__{draw: [card | draw]}) do
{card, %__MODULE__{deck | draw: draw}}
end
end
|
lib/ExAeonsEnd/deck.ex
| 0.842669 | 0.440951 |
deck.ex
|
starcoder
|
defmodule Crontab.CronExpression do
@moduledoc """
This is the Crontab.CronExpression module / struct.
"""
alias Crontab.CronExpression.Parser
@type t :: %Crontab.CronExpression{
extended: boolean,
reboot: boolean,
second: [value],
minute: [value],
hour: [value],
day: [value],
month: [value],
weekday: [value],
year: [value]
}
@type interval :: :second | :minute | :hour | :day | :month | :weekday | :year
@type min_max :: {:-, time_unit, time_unit}
@type value ::
time_unit
| :*
| :L
| {:L, value}
| {:/,
time_unit
| :*
| min_max, pos_integer}
| min_max
| {:W, time_unit | :L}
@type minute :: 0..59
@type hour :: 0..23
@type day :: 0..31
@type month :: 1..12
@type weekday :: 0..7
@type year :: integer
@type time_unit :: minute | hour | day | month | weekday | year
@type condition :: {interval, [value]}
@type condition_list :: [condition]
@doc """
Defines the Cron Interval
* * * * * * *
| | | | | | |
| | | | | | +-- :year Year (range: 1900-3000)
| | | | | +---- :weekday Day of the Week (range: 1-7, 1 standing for Monday)
| | | | +------ :month Month of the Year (range: 1-12)
| | | +-------- :day Day of the Month (range: 1-31)
| | +---------- :hour Hour (range: 0-23)
| +------------ :minute Minute (range: 0-59)
+-------------- :second Second (range: 0-59)
The :extended attribute defines if the second is taken into account.
"""
defstruct extended: false,
reboot: false,
second: [:*],
minute: [:*],
hour: [:*],
day: [:*],
month: [:*],
weekday: [:*],
year: [:*]
@doc """
Create a `%Crontab.CronExpression{}` via sigil.
### Examples
iex> ~e[*]
%Crontab.CronExpression{
extended: false,
second: [:*],
minute: [:*],
hour: [:*],
day: [:*],
month: [:*],
weekday: [:*],
year: [:*]}
iex> ~e[*]e
%Crontab.CronExpression{
extended: true,
second: [:*],
minute: [:*],
hour: [:*],
day: [:*],
month: [:*],
weekday: [:*],
year: [:*]}
iex> ~e[1 2 3 4 5 6 7]e
%Crontab.CronExpression{
extended: true,
second: [1],
minute: [2],
hour: [3],
day: [4],
month: [5],
weekday: [6],
year: [7]}
"""
@spec sigil_e(binary, charlist) :: t
def sigil_e(cron_expression, options)
def sigil_e(cron_expression, [?e]), do: Parser.parse!(cron_expression, true)
def sigil_e(cron_expression, _options), do: Parser.parse!(cron_expression, false)
@doc """
Convert Crontab.CronExpression struct to Tuple List
### Examples
iex> Crontab.CronExpression.to_condition_list %Crontab.CronExpression{
...> minute: [1], hour: [2], day: [3], month: [4], weekday: [5], year: [6]}
[ {:minute, [1]},
{:hour, [2]},
{:day, [3]},
{:month, [4]},
{:weekday, [5]},
{:year, [6]}]
iex> Crontab.CronExpression.to_condition_list %Crontab.CronExpression{
...> extended: true, second: [0], minute: [1], hour: [2], day: [3], month: [4], weekday: [5], year: [6]}
[ {:second, [0]},
{:minute, [1]},
{:hour, [2]},
{:day, [3]},
{:month, [4]},
{:weekday, [5]},
{:year, [6]}]
"""
@spec to_condition_list(t) :: condition_list
def to_condition_list(interval = %__struct__{extended: false}) do
[
{:minute, interval.minute},
{:hour, interval.hour},
{:day, interval.day},
{:month, interval.month},
{:weekday, interval.weekday},
{:year, interval.year}
]
end
def to_condition_list(interval = %__struct__{}) do
[{:second, interval.second} | to_condition_list(%{interval | extended: false})]
end
defimpl Inspect do
alias Crontab.CronExpression
alias Crontab.CronExpression.Composer
@doc """
Pretty Print Cron Expressions
### Examples:
iex> IO.inspect %Crontab.CronExpression{}
~e[* * * * * *]
iex> import Crontab.CronExpression
iex> IO.inspect %Crontab.CronExpression{extended: true}
~e[* * * * * * *]e
"""
@spec inspect(CronExpression.t(), any) :: String.t()
def inspect(cron_expression = %__struct__{extended: false}, _options) do
"~e[" <> Composer.compose(cron_expression) <> "]"
end
def inspect(cron_expression = %__struct__{extended: true}, _options) do
"~e[" <> Composer.compose(cron_expression) <> "]e"
end
end
end
|
lib/crontab/cron_expression.ex
| 0.890048 | 0.619126 |
cron_expression.ex
|
starcoder
|
defmodule Rabbit.Message do
@moduledoc """
A message consumed by a `Rabbit.Consumer`.
After starting a consumer, any message passed to the `c:Rabbit.Consumer.handle_message/1`
callback will be wrapped in a messsage struct. The struct has the following
fields:
* `:consumer` - The PID of the consumer process.
* `:module` - The module of the consumer process.
* `:channel` - The `AMQP.Channel` being used by the consumer.
* `:payload` - The raw payload of the message.
* `:decoded_payload` - If the message has a content type - this will be the
payload decoded using the applicable serializer.
* `:meta` - The metadata sent when publishing or set by the broker.
* `:custom_meta` - The custom metadata included when starting a consumer.
* `:error_reason` - The reason for any error that occurs during the message
handling callback.
* `:error_stack` - The stacktrace that might accompany the error.
"""
defstruct [
:consumer,
:module,
:channel,
:payload,
:decoded_payload,
:meta,
:custom_meta,
:error_reason,
:error_stack
]
@type t :: %__MODULE__{
consumer: pid(),
module: module(),
channel: AMQP.Channel.t(),
payload: binary(),
decoded_payload: any(),
meta: map(),
custom_meta: map(),
error_reason: any(),
error_stack: nil | list()
}
@doc """
Creates a new message struct.
"""
@spec new(Rabbit.Consumer.t(), module(), AMQP.Channel.t(), any(), map(), map()) ::
Rabbit.Message.t()
def new(consumer, module, channel, payload, meta, custom_meta) do
%__MODULE__{
consumer: consumer,
module: module,
channel: channel,
payload: payload,
meta: meta,
custom_meta: custom_meta
}
end
@doc """
Awknowledges a message.
## Options
* `:multiple` - If `true`, all messages up to the one specified by its
`delivery_tag` are considered acknowledged by the server.
"""
@spec ack(Rabbit.Message.t(), keyword()) :: :ok | {:error, :blocked | :closing}
def ack(message, opts \\ []) do
AMQP.Basic.ack(message.channel, message.meta.delivery_tag, opts)
end
@doc """
Negative awknowledges a message.
## Options
* `:multiple` - If `true`, all messages up to the one specified by it
`delivery_tag` are considered acknowledged by the server.
* `:requeue` - If `true`, the message will be returned to the queue and redelivered
to the next available consumer.
"""
@spec nack(Rabbit.Message.t(), keyword()) :: :ok | {:error, :blocked | :closing}
def nack(message, opts \\ []) do
AMQP.Basic.nack(message.channel, message.meta.delivery_tag, opts)
end
@doc """
Rejects a message.
## Options
* `:requeue` - If `true`, the message will be returned to the queue and redelivered
to the next available consumer.
"""
@spec reject(Rabbit.Message.t(), keyword()) :: :ok | {:error, :blocked | :closing}
def reject(message, opts \\ []) do
AMQP.Basic.reject(message.channel, message.meta.delivery_tag, opts)
end
@doc false
@spec put_error(Rabbit.Message.t(), any(), list()) :: Rabbit.Message.t()
def put_error(message, reason, stack) do
%{message | error_reason: reason, error_stack: stack}
end
end
|
lib/rabbit/message.ex
| 0.886211 | 0.483161 |
message.ex
|
starcoder
|
defmodule Chunky.Geometry.Triangle do
@moduledoc """
Functions for working with **triangles**. For _predicate functions_ related to Triangles, see `Chunky.Geometry.Triangle.Predicates`.
Triangles in Chunky are represented as a tuple of three positive integers, with each integer greater than or equal to `1`. So `{3, 4, 5}`
is a triangle, as is `{145, 7, 139}`. Not every 3-tuple of integers is a _valid_ triangle - `{3, 5, 10}` doesn't describe a realizable
triangle. You can test for a _valid_ triangle with `Chunky.Geometry.is_valid_triangle?/1`.
Some functions will raise argument errors for invalid triangles, while others will return an error tuple, where possible.
# Triangle Functions
## Measurements
These functions provide measurements of triangles, their angles, or their shape.
- `angles/1` - Find the interior angles of a triangle
- `area/1` - Find the area of any triangle
- `height/2` - Find the bisecting height of a triangle
- `is_multiple_heronian_triangle?/2` - Is a triangle a heronian triange with an area that is a specific multiple of the perimeter?
## Construction and Deconstructing Triangles
Create new triangles, break existing triangles into smaller triangles, or recombine smaller triangles:
- `compose/2` - Create a new triangle from two compatible right triangles
- `decompose/1` - Break a triangle into two smaller right triangles
- `normalize/1` - Normalize the ordering of sides of a triangle
- `triangles_from_hypotenuse/2` - Generate integer triangles given a hypotenuse and optional filter
## Meta-inspection of triangles
Metadata about triangles:
- `type/1` - Determine the basic type, or form, of a triangle
# Finding Triangles
Finding specific _shapes_ of triangles (like Heronian, m-heronian, scalenes, etc) can be useful. For instance, finding triangles
that are _decomposable_ or _19-heronian_ triangles. Constructing these by hand can be tedious - instead we can combine a series
of functions from the Triangle and Predicates modules to help find what we're looking for.
The heart of searching for triangles is the `triangles_from_hypotenuse/2` function, which generates the set of all valid triangles
for a given hypotenuse edge:
iex> Triangle.triangles_from_hypotenuse(3)
[{1, 3, 3}, {2, 2, 3}, {2, 3, 3}, {3, 3, 3}]
All of the valid triangles for the given hypotenuse are generated, without duplicates, edge order independent (so `{3, 2, 3}` and `{2, 3, 3}`
would be the same, and only one included in the output). For a small hypotenuse this output by itself can be useful
for visual inspection, but the number of valid triangles grows fairly quickly. For a hypotenuse of `15`, there
are `64` valid triangles. For a hypotenuse of `100`, there are `2550` triangles. On to the next step: filter functions.
The second parameter of `triangles_from_hypotenuse/2` is a predicate function - any function that takes a triangle
as it's only parameter, and returns a boolean. The output of `triangles_from_hypotenuse/2` will be filtered to contain only
those triangles that pass the filter. For instance, we can filter our list of triangles with a hypotenuse of `3` to
only the equilateral triangles:
iex> Triangle.triangles_from_hypotenuse(3, filter: &Triangle.Predicates.is_equilateral?/1)
[{3, 3, 3}]
Let's look at the first example we cited, finding decomposable triangles. Constructing these by hand can be tedious. But using
the above technique, we can quickly find what we're looking for. How about triangles with a hypotenuse of `30` that are
decomposable:
iex> Triangle.triangles_from_hypotenuse(30, filter: &Triangle.Predicates.is_decomposable?/1)
[{8, 26, 30}, {11, 25, 30}, {17, 17, 30}, {25, 25, 30}, {26, 28, 30}]
If we want to tackle the second example we cited, finding _19-heronian_ triangles (these are triangles whose area is exactly 19 times
their perimeter), we'll need to expand our search: we don't know _exactly_ what the hypotenuse will be, so we check multiple:
```elixir
150..250
|> Enum.map(
fn h ->
Triangle.triangles_from_hypotenuse(
h,
filter: fn t ->
Triangle.is_multiple_heronian_triangle?(t, 19)
end)
end
)
|> List.flatten()
```
Here we've take the range of `150` to `250`, and used each of those in order as the hypotenuse of `triangles_from_hypotenuse/2`. The
filter function is an anonymous function, where we use the `is_multiple_heronian?/2` function to filter down to only those triangles
that are an exact multiple of perimeter and area.
"""
import Chunky.Geometry
alias Chunky.Fraction
alias Chunky.Math
alias Chunky.Geometry.Triangle.Predicates
@doc """
Find the angles of a triangle.
The resulting 3-tuple will be floats, in order such that each angle is opposite
it's edge. So for a triangle with sides `{a, b, c}`, the resulting angles `{θa, θb, θc}`
are such that `θa` is opposite edge `a`, `θb` opposite edge `b`, and `θc` opposite edge
`c`:

## Examples
iex> Triangle.angles({3, 4, 5})
{36.86989764584401, 53.13010235415599, 90.0}
iex> Triangle.angles({13, 13, 13})
{59.99999999999999, 59.99999999999999, 59.99999999999999}
iex> Triangle.angles({10, 5, 10})
{75.52248781407008, 28.95502437185985, 75.52248781407008}
iex> Triangle.angles({30, 16, 17})
{130.73591716163173, 23.83600707762401, 25.428075760744253}
"""
def angles(t = {a, b, c}) when is_triangle?(t) do
if is_valid_triangle?(t) == false do
raise ArgumentError, message: "Shape of triangle is invalid"
else
{
solve_for_angle(a, b, c),
solve_for_angle(b, a, c),
solve_for_angle(c, b, a)
}
end
end
defp solve_for_angle(c, a, b) do
:math.acos((c * c - (a * a + b * b)) / (-2.0 * a * b)) |> rad_to_deg()
# :math.acos((c * c) / ((a * a) + (b * b) - (2 * a * b)))
end
defp rad_to_deg(v), do: v * (180.0 / 3.141592653589793)
@doc """
Find the area of a triangle, returning either an integer, float, or fraction value.
This function uses the heronian formula for calculating the area of any triangle.
## Examples
iex> Triangle.area({3, 4, 5})
{:integer, 6}
iex> Triangle.area({5, 15, 15})
{:float, 36.9754986443726}
iex> Triangle.area({5, 5, 5})
{:float, 10.825317547305483}
iex> Triangle.area({7, 11, 13})
{:float, 38.499188303131795}
iex> Triangle.area({5, 12, 13})
{:integer, 30}
iex> Triangle.area({5, 10, 20})
** (ArgumentError) Shape of the triangle is invalid
"""
def area(t = {a, b, c}) when is_triangle?(t) do
if is_valid_triangle?(t) == false do
raise ArgumentError, message: "Shape of the triangle is invalid"
end
# find the semi-perimeter
semi_p = Fraction.new("1/2") |> Fraction.multiply(a + b + c)
# build the inner values
sub_a = semi_p |> Fraction.subtract(a)
sub_b = semi_p |> Fraction.subtract(b)
sub_c = semi_p |> Fraction.subtract(c)
# before our square root
pre_root =
semi_p |> Fraction.multiply(sub_a) |> Fraction.multiply(sub_b) |> Fraction.multiply(sub_c)
# now find our root
case pre_root |> Fraction.power(Fraction.new("1/2"), allow_irrational: true) do
%Fraction{} = res ->
if Fraction.is_whole?(res) do
{:integer, Fraction.get_whole(res)}
else
{:fractional, res |> Fraction.simplify()}
end
v when is_float(v) ->
{:float, v}
end
end
@doc """
Compose two pythagorean (right) triangles into a larger triangle, if possible.
This is the inverse of `decompose/1` - two compatible right triangles can be joined on
an equal, non-hypotenuse edge to form a new triangle.
The results of this function will either be a tuple of `{:ok, _}` with a one or two new triangles,
or `{:error, reason}` for not being composable.
## Examples
iex> Triangle.compose({12, 5, 13}, {5, 13, 12})
{:ok, {10, 13, 13}, {13, 13, 24}}
iex> Triangle.compose({12, 5, 13}, {9, 15, 12})
{:ok, {13, 14, 15}}
iex> Triangle.compose({12, 5, 13}, {3, 4, 5})
{:error, :not_composable}
iex> Triangle.compose({12, 5, 13}, {5, 7, 24})
{:error, :not_pythagorean_triangles}
"""
def compose(v, w) when is_triangle?(v) and is_triangle?(w) do
case {Predicates.is_pythagorean_triangle?(v), Predicates.is_pythagorean_triangle?(w)} do
{true, true} ->
{v_a, v_b, v_c} = normalize(v)
{w_a, w_b, w_c} = normalize(w)
cond do
v_a == w_a && v_b == w_b ->
# build the result triangles
comp_a = {v_c, w_c, v_b + w_b} |> normalize()
comp_b = {v_c, w_c, v_a + w_a} |> normalize()
# sort by the smallest edge
[res_a, res_b] =
[comp_a, comp_b]
|> Enum.sort(fn {side_a, _, _}, {side_b, _, _} -> side_a <= side_b end)
# results!
{:ok, res_a, res_b}
v_a == w_a ->
{:ok, {v_c, w_c, v_b + w_b} |> normalize()}
v_a == w_b ->
{:ok, {v_c, w_c, v_b + w_a} |> normalize()}
v_b == w_a ->
{:ok, {v_c, w_c, v_a + w_b} |> normalize()}
v_b == w_b ->
{:ok, {v_c, w_c, v_a + w_a} |> normalize()}
true ->
{:error, :not_composable}
end
_ ->
{:error, :not_pythagorean_triangles}
end
end
@doc """
Decompose an integer triangle into two smaller integer right triangles.
## Examples
iex> Triangle.decompose({3, 4, 5})
{:error, :indecomposable}
iex> Triangle.decompose({6, 6, 6})
{:error, :indecomposable}
iex> Triangle.decompose({10, 13, 13})
{:ok, {5, 12, 13}, {5, 12, 13}}
iex> Triangle.decompose({13, 14, 15})
{:ok, {5, 12, 13}, {9, 12, 15}}
iex> Triangle.decompose({13, 13, 24})
{:ok, {5, 12, 13}, {5, 12, 13}}
"""
def decompose(t) when is_triangle?(t) do
{a, b, c} = normalize(t)
case type(t) do
# most of our cases are indecomposable
:invalid ->
{:error, :indecomposable}
:right ->
{:error, :indecomposable}
:equilateral ->
{:error, :indecomposable}
# isoceles triangles can be decomposed if the height
# and base/2 are both integers
:isoceles ->
case height(t) do
{:integer, h} ->
if a == b do
# the equal sides are shorter than the inequal side
if rem(c, 2) == 0 do
{:ok, {div(c, 2), h, a} |> normalize(), {div(c, 2), h, a} |> normalize()}
else
{:error, :indecomposable}
end
else
# the equal sides are longer than the inequal side
if rem(a, 2) == 0 do
{:ok, {div(a, 2), h, c} |> normalize(), {div(a, 2), h, c} |> normalize()}
else
{:error, :indecomposable}
end
end
_ ->
{:error, :indecomposable}
end
# scalenes can be decomposed if we have an integer height, and can find
# appropriate bases for each right triangle. A scalene may have an integer
# height (and decomposability) from a base other than the hypotenuse
:scalene ->
# find which side of the scalene has an integer base
case scalene_base(t) do
:none ->
{:error, :indecomposable}
{base, h} ->
# determine which side are our decomposed hypotenuses
{a_h, b_h} =
case base do
:small -> {b, c}
:medium -> {a, c}
:large -> {a, b}
end
# now try and form the right triangles
case {right_triangle_from(a_h, h), right_triangle_from(b_h, h)} do
{nil, _} ->
{:error, :indecomposable}
{_, nil} ->
{:error, :indecomposable}
{nil, nil} ->
{:error, :indecomposable}
{a_base, b_base} ->
{:ok, {a_base, h, a_h}, {b_base, h, b_h}}
end
end
end
end
# form a right triangle from a hypotenuse and an edge
defp right_triangle_from(hyp, edge) do
case Math.nth_integer_root(hyp * hyp - edge * edge, 2) do
{:exact, v} -> v
_ -> nil
end
end
# find which base of a scalene triangle provides an integer height
defp scalene_base(t) do
bases =
[:small, :medium, :large]
|> Enum.map(fn side ->
{side, height(t, base: side)}
end)
|> Enum.filter(fn {_, {v, _}} -> v == :integer end)
case bases do
[] -> :none
[{base, {_, h}}] -> {base, h}
[{base, {_, h}} | _] -> {base, h}
end
end
@doc """
Find the height of a triangle from the hypotenuse.
This function will return an integer, fraction, or float.
## Options
- `base` - Atom. Default `:large`. One of `:small`, `:medium`, or `:large`. Side to use when calculating height for a scalene triangle
## Examples
iex> Triangle.height({5, 5, 5})
{:float, 4.330127018922193}
iex> Triangle.height({3, 4, 5})
{:fractional, %Fraction{num: 12, den: 5}}
iex> Triangle.height({5, 5, 8})
{:integer, 3}
iex> Triangle.height({10, 13, 13})
{:integer, 12}
iex> Triangle.height({13, 14, 15})
{:fractional, %Fraction{num: 56, den: 5}}
iex> Triangle.height({13, 14, 15}, base: :medium)
{:integer, 12}
iex> Triangle.height({3, 4, 9})
** (ArgumentError) Shape of the triangle is invalid
"""
def height(t, opts \\ []) when is_triangle?(t) do
# normalize edge ordering
{a, b, c} = normalize(t)
# which side is our base length?
base =
case type(t) do
:isoceles ->
t |> Tuple.to_list() |> Enum.uniq() |> Enum.reject(fn v -> v == b end) |> List.first()
:scalene ->
# our options may have a base selection, small, medium, large
s_base = opts |> Keyword.get(:base, :large)
case s_base do
:small -> a
:medium -> b
:large -> c
end
_ ->
c
end
case area(t) do
{:integer, ta} ->
h = Fraction.new(1, base) |> Fraction.multiply(ta * 2)
if Fraction.is_whole?(h) do
{:integer, h |> Fraction.get_whole()}
else
{:fractional, h |> Fraction.simplify()}
end
{:float, ta} ->
{:float, 2 * ta / base}
end
end
@doc """
Is a triangle a heronian triangle where the area is a specific multiple of the perimeter?
A `2-heronian` triangle would have an area that is `2*perimeter` of the triangle, while a
`3-heronian` would have an area that is `3*perimeter`. For each multiple size `m`, there
are a finite number of multiple heronians triangles that are `m-heronian`.
## Examples
iex> Triangle.is_multiple_heronian_triangle?({13, 14, 15}, 2)
true
iex> Triangle.is_multiple_heronian_triangle?({11, 25, 30}, 2)
true
iex> Triangle.is_multiple_heronian_triangle?({25, 26, 17}, 3)
true
iex> Triangle.is_multiple_heronian_triangle?({25, 28, 17}, 3)
true
iex> Triangle.is_multiple_heronian_triangle?({25, 36, 29}, 4)
true
"""
def is_multiple_heronian_triangle?(t = {a, b, c}, m) when is_triangle?(t) and is_integer(m) do
{_, ta} = area(t)
tp = a + b + c
Predicates.is_heronian_triangle?(t) && ta == tp * m
end
@doc """
Normalize the order of edges of a triangle.
## Examples
iex> Triangle.normalize({5, 13, 7})
{5, 7, 13}
iex> Triangle.normalize({12, 8, 5})
{5, 8, 12}
iex> Triangle.normalize({55, 13, 47})
{13, 47, 55}
iex> Triangle.normalize({3, 4, 5})
{3, 4, 5}
"""
def normalize(t = {a, b, c}) when is_triangle?(t) do
Enum.sort([a, b, c]) |> List.to_tuple()
end
@doc """
Generate all of the integer triangles from a given hypotenuse `h`.
This will create a list of triangles with integer sides and hypotenuse `h`, with
no duplicates, with order independent sides. So the triangle `{3, 4, 5}` and `{4, 3, 5}`
would be considered the same, and only one of them would be included in the output.
An optional `filter` function can be provided which will be used to pre
filter the result list during computation, which can significantly reduce
memory consumption.
## Options
- `filter` - Predicate function of arity 1, which returns a boolean
## Examples
iex> Triangle.triangles_from_hypotenuse(5)
[{1, 5, 5}, {2, 4, 5}, {2, 5, 5}, {3, 3, 5}, {3, 4, 5}, {3, 5, 5}, {4, 4, 5}, {4, 5, 5}, {5, 5, 5}]
iex> Triangle.triangles_from_hypotenuse(5, filter: &Triangle.Predicates.is_scalene?/1)
[{2, 4, 5}, {3, 4, 5}]
iex> Triangle.triangles_from_hypotenuse(5, filter: &Triangle.Predicates.is_pythagorean_triangle?/1)
[{3, 4, 5}]
iex> Triangle.triangles_from_hypotenuse(125, filter: &Triangle.Predicates.is_pythagorean_triangle?/1)
[{35, 120, 125}, {44, 117, 125}, {75, 100, 125}]
"""
def triangles_from_hypotenuse(h, opts \\ []) when is_integer(h) and h > 0 do
filter = opts |> Keyword.get(:filter, fn _ -> true end)
# build our nested iteration of sides. our A side will go from
# 1 to the length of the hypotenuse, while B will go from (h - a + 1) to
# h
1..h
|> Enum.map(fn a ->
max(h - a + 1, a)..h
|> Enum.map(fn b ->
{a, b, h}
end)
|> Enum.filter(fn v -> filter.(v) end)
end)
|> List.flatten()
end
@doc """
Determine the characteristic _type_ of a triangle, like _right_, _scalene_, or _isoceles_.
The possible types returned by this function are:
- `:invalid`
- `:right`
- `:equilateral`
- `:isoceles`
- `:scalene`
## Examples
iex> Triangle.type({3, 4, 5})
:right
iex> Triangle.type({3, 4, 9})
:invalid
iex> Triangle.type({3, 4, 4})
:isoceles
iex> Triangle.type({13, 13, 13})
:equilateral
iex> Triangle.type({7, 13, 19})
:scalene
"""
def type(t) when is_triangle?(t) do
cond do
is_valid_triangle?(t) == false -> :invalid
Predicates.is_pythagorean_triangle?(t) -> :right
Predicates.is_equilateral?(t) -> :equilateral
Predicates.is_isoceles?(t) -> :isoceles
true -> :scalene
end
end
end
|
lib/geometry/triangle.ex
| 0.956022 | 0.96641 |
triangle.ex
|
starcoder
|
defmodule StarkInfra.PixInfraction do
alias __MODULE__, as: PixInfraction
alias StarkInfra.Utils.Rest
alias StarkInfra.Utils.Check
alias StarkInfra.User.Project
alias StarkInfra.User.Organization
alias StarkInfra.Error
@moduledoc """
Groups PixInfraction related functions
"""
@doc """
PixInfraction are used to report transactions that are suspected of
fraud, to request a refund or to reverse a refund.
When you initialize a PixInfraction, the entity will not be automatically
created in the Stark Infra API. The 'create' function sends the objects
to the Stark Infra API and returns the created struct.
## Parameters (required):
- `:reference_id` [string]: end_to_end_id or return_id of the transaction being reported. ex: "E20018183202201201450u34sDGd19lz"
- `:type` [string]: type of infraction report. Options: "fraud", "reversal", "reversalChargeback"
## Parameters (optional):
- `:description` [string, default nil]: description for any details that can help with the infraction investigation.
## Attributes (return-only):
- id [string]: unique id returned when the PixInfraction is created. ex: "5656565656565656"
- credited_bank_code [string]: bank_code of the credited Pix participant in the reported transaction. ex: "20018183"
- debited_bank_code [string]: bank_code of the debited Pix participant in the reported transaction. ex: "20018183"
- agent [string]: Options: "reporter" if you created the PixInfraction, "reported" if you received the PixInfraction.
- analysis [string]: analysis that led to the result.
- bacen_id [string]: central bank's unique UUID that identifies the infraction report.
- reported_by [string]: agent that reported the PixInfraction. Options: "debited", "credited".
- result [string]: result after the analysis of the PixInfraction by the receiving party. Options: "agreed", "disagreed"
- status [string]: current PixInfraction status. Options: "created", "failed", "delivered", "closed", "canceled".
- created [DateTime]: creation datetime for the PixInfraction. ex: ~U[2020-3-10 10:30:0:0]
- updated [DateTime]: latest update datetime for the PixInfraction. ex: ~U[2020-3-10 10:30:0:0]
"""
@enforce_keys [
:reference_id,
:type
]
defstruct [
:reference_id,
:type,
:description,
:id,
:credited_bank_code,
:debited_bank_code,
:agent,
:analysis,
:bacen_id,
:reported_by,
:result,
:status,
:created,
:updated
]
@type t() :: %__MODULE__{}
@doc """
Create PixInfractions in the Stark Infra API
## Parameters (required):
- `:infractions` [list of PixInfraction]: list of PixInfraction structs to be created in the API.
## Options:
- `:user` [Organization/Project, default nil]: Organization or Project struct returned from StarkInfra.project(). Only necessary if default project or organization has not been set in configs.
## Return:
- list of PixInfraction structs with updated attributes
"""
@spec create(
[PixInfraction.t() | map],
user: Organization.t() | Project.t() | nil
) ::
{:ok, [PixInfraction.t() | map]} |
{:error, Error.t()}
def create(infractions, options \\ []) do
Rest.post(
resource(),
infractions,
options
)
end
@doc """
Same as create(), but it will unwrap the error tuple and raise in case of errors.
"""
@spec create!(
[PixInfraction.t() | map],
user: Organization.t() | Project.t() | nil
) :: any
def create!(infractions, options \\ []) do
Rest.post!(
resource(),
infractions,
options
)
end
@doc """
Retrieve the PixInfraction struct linked to your Workspace in the Stark Infra API using its id.
## Parameters (required):
- `:id` [string]: struct unique id. ex: "5656565656565656".
## Options:
- `:user` [Organization/Project, default nil]: Organization or Project struct returned from StarkInfra.project(). Only necessary if default project or organization has not been set in configs.
## Return:
- PixInfraction struct that corresponds to the given id.
"""
@spec get(
id: binary,
user: Organization.t() | Project.t() | nil
) ::
{:ok, PixInfraction.t()} |
{:error, Error.t()}
def get(id, options \\ []) do
Rest.get_id(
resource(),
id,
options
)
end
@doc """
Same as get(), but it will unwrap the error tuple and raise in case of errors.
"""
@spec get!(
id: binary,
user: Organization.t() | Project.t() | nil
) :: any
def get!(id, options \\ []) do
Rest.get_id!(
resource(),
id,
options
)
end
@doc """
Receive a stream of PixInfractions structs previously created in the Stark Infra API
## Options:
- `:limit` [integer, default nil]: maximum number of structs to be retrieved. Unlimited if nil. ex: 35
- `:after` [Date or string, default nil]: date filter for structs created after a specified date. ex: ~D[2020-03-10]
- `:before` [Date or string, default nil]: date filter for structs created before a specified date. ex: ~D[2020-03-10]
- `:status` [list of strings, default nil]: filter for status of retrieved objects. ex: ["created", "failed", "delivered", "closed", "canceled"]
- `:ids` [list of strings, default nil]: list of ids to filter retrieved objects. ex: ["5656565656565656", "4545454545454545"]
- `:type` [list of strings, default nil]: filter for the type of retrieved PixInfractions. Options: "fraud", "reversal", "reversalChargeback"
- `:user` [Organization/Project, default nil]: Organization or Project struct returned from StarkInfra.project(). Only necessary if default project or organization has not been set in configs.
## Return:
- stream of PixInfraction structs with updated attributes
"""
@spec query(
limit: integer | nil,
after: Date.t() | binary | nil,
before: Date.t() | binary | nil,
status: [binary] | nil,
ids: [binary] | nil,
type: [binary] | nil,
user: Organization.t() | Project.t() | nil
) ::
({:cont, {:ok, [PixInfraction.t() | map]}} |
{:error, [Error.t()]} |
{:halt, any} |
{:suspend, any},
any -> any)
def query(options \\ []) do
Rest.get_list(
resource(),
options
)
end
@doc """
Same as query(), but it will unwrap the error tuple and raise in case of errors.
"""
@spec query!(
limit: integer | nil,
after: Date.t() | binary | nil,
before: Date.t() | binary | nil,
status: [binary] | nil,
ids: [binary] | nil,
type: [binary] | nil,
user: Organization.t() | Project.t() | nil
) :: any
def query!(options \\ []) do
Rest.get_list!(
resource(),
options
)
end
@doc """
Receive a list of up to 100 PixInfractions structs previously created in the Stark Infra API and the cursor to the next page.
Use this function instead of query if you want to manually page your requests.
## Options:
- `:cursor` [string, default nil]: cursor returned on the previous page function call.
- `:limit` [integer, default 100]: maximum number of structs to be retrieved. Max = 100. ex: 35
- `:after` [Date or string, default nil]: date filter for structs created after a specified date. ex: ~D[2020-03-10]
- `:before` [Date or string, default nil]: date filter for structs created before a specified date. ex: ~D[2020-03-10]
- `:status` [list of strings, default nil]: filter for status of retrieved objects. ex: ["created", "failed", "delivered", "closed", "canceled"]
- `:ids` [list of strings, default nil]: list of ids to filter retrieved objects. ex: ["5656565656565656", "4545454545454545"]
- `:type` [list of strings, default nil]: filter for the type of retrieved PixInfractions. Options: "fraud", "reversal", "reversalChargeback"
- `:user` [Organization/Project, default nil]: Organization or Project struct returned from StarkInfra.project(). Only necessary if default project or organization has not been set in configs.
## Return:
- list of PixInfraction structs with updated attributes and cursor to retrieve the next page of PixInfraction objects
"""
@spec page(
cursor: binary | nil,
limit: integer | nil,
after: Date.t() | binary | nil,
before: Date.t() | binary | nil,
status: [binary] | nil,
ids: [binary] | nil,
type: [binary] | nil,
user: Organization.t() | Project.t() | nil
) ::
{:ok, {binary, [PixInfraction.t() | map]}} |
{:error, Error.t()}
def page(options \\ []) do
Rest.get_page(
resource(),
options
)
end
@doc """
Same as page(), but it will unwrap the error tuple and raise in case of errors.
"""
@spec page!(
cursor: binary | nil,
limit: integer | nil,
after: Date.t() | binary | nil,
before: Date.t() | binary | nil,
status: [binary] | nil,
ids: [binary] | nil,
type: [binary] | nil,
user: Organization.t() | Project.t() | nil
) :: any
def page!(options \\ []) do
Rest.get_page!(
resource(),
options
)
end
@doc """
Respond to a received PixInfraction.
## Parameters (required):
- `:id` [string]: PixInfraction id. ex: '5656565656565656'
- `:result` [string]: result after the analysis of the PixInfraction. Options: "agreed", "disagreed"
## Parameters (optional):
- `:analysis` [string, default nil]: analysis that led to the result.
- `:user` [Organization/Project, default nil]: Organization or Project struct returned from StarkInfra.project(). Only necessary if default project or organization has not been set in configs.
## Return:
- PixInfraction with updated attributes
"""
@spec update(
binary,
result: binary,
analysis: binary | nil,
user: Organization.t() | Project.t() | nil
) ::
{:ok, PixInfraction.t()} |
{:error, Error.t()}
def update(id, result, parameters \\ []) do
parameters = [result: result] ++ parameters
Rest.patch_id(
resource(),
id,
parameters
)
end
@doc """
Same as update(), but it will unwrap the error tuple and raise in case of errors.
"""
@spec update!(
binary,
result: binary,
analysis: binary | nil,
user: Organization.t() | Project.t() | nil
) :: any
def update!(id, result, parameters \\ []) do
parameters = [result: result] ++ parameters
Rest.patch_id!(
resource(),
id,
parameters
)
end
@doc """
Cancel a PixInfraction entity previously created in the Stark Infra API
## Parameters (required):
- `:id` [string]: struct unique id. ex: "5656565656565656"
## Options:
- `:user` [Organization/Project, default nil]: Organization or Project struct returned from StarkInfra.project(). Only necessary if default project or organization has not been set in configs.
## Return:
- canceled PixInfraction struct
"""
@spec cancel(
id: binary,
user: Organization.t() | Project.t() | nil
) ::
{:ok, PixInfraction.t()} |
{:error, Error.t()}
def cancel(id, options \\ []) do
Rest.delete_id(
resource(),
id,
options
)
end
@doc """
Same as cancel(), but it will unwrap the error tuple and raise in case of errors.
"""
@spec cancel!(
id: binary,
user: Organization.t() | Project.t() | nil
) :: any
def cancel!(id, options \\ []) do
Rest.delete_id!(
resource(),
id,
options
)
end
@doc false
def resource() do
{
"PixInfraction",
&resource_maker/1
}
end
@doc false
def resource_maker(json) do
%PixInfraction{
reference_id: json[:reference_id],
type: json[:type],
description: json[:description],
id: json[:id],
credited_bank_code: json[:credited_bank_code],
debited_bank_code: json[:debited_bank_code],
agent: json[:agent],
analysis: json[:analysis],
bacen_id: json[:bacen_id],
reported_by: json[:reported_by],
result: json[:result],
status: json[:status],
created: json[:created] |> Check.datetime(),
updated: json[:updated] |> Check.datetime()
}
end
end
|
lib/pix_infraction/pix_infraction.ex
| 0.904533 | 0.579817 |
pix_infraction.ex
|
starcoder
|
defmodule NewRelic.Instrumented.Task.Supervisor do
@moduledoc """
Provides a pre-instrumented convienince module to connect
non-linked `Task.Supervisor` processes to the Transaction
that called them.
You may call these functions directly, or `alias` the
`NewRelic.Instrumented.Task` module and continue to use
`Task` as normal.
Example usage:
```elixir
alias NewRelic.Instrumented.Task
Task.Supervisor.async_nolink(
MySupervisor,
[1,2],
fn n -> do_work(n) end
)
```
"""
import NewRelic.Instrumented.Task.Wrappers
defdelegate async(supervisor, fun, options \\ []),
to: Task.Supervisor
defdelegate async(supervisor, module, fun, args, options \\ []),
to: Task.Supervisor
defdelegate children(supervisor),
to: Task.Supervisor
defdelegate start_link(options),
to: Task.Supervisor
defdelegate terminate_child(supervisor, pid),
to: Task.Supervisor
# These functions _don't_ link their Task so we connect them explicitly
def async_stream(supervisor, enumerable, fun, options \\ []) do
Task.Supervisor.async_stream(supervisor, enumerable, instrument(fun), options)
end
def async_stream(supervisor, enumerable, module, function, args, options \\ []) do
{module, function, args} = instrument({module, function, args})
Task.Supervisor.async_stream(supervisor, enumerable, module, function, args, options)
end
def async_nolink(supervisor, fun, options \\ []) do
Task.Supervisor.async_nolink(supervisor, instrument(fun), options)
end
def async_nolink(supervisor, module, fun, args, options \\ []) do
{module, fun, args} = instrument({module, fun, args})
Task.Supervisor.async_nolink(supervisor, module, fun, args, options)
end
def async_stream_nolink(supervisor, enumerable, fun, options \\ []) do
Task.Supervisor.async_stream_nolink(supervisor, enumerable, instrument(fun), options)
end
def async_stream_nolink(supervisor, enumerable, module, function, args, options \\ []) do
{module, function, args} = instrument({module, function, args})
Task.Supervisor.async_stream_nolink(supervisor, enumerable, module, function, args, options)
end
def start_child(supervisor, fun, options \\ []) do
Task.Supervisor.start_child(supervisor, instrument(fun), options)
end
def start_child(supervisor, module, fun, args, options \\ []) do
{module, fun, args} = instrument({module, fun, args})
Task.Supervisor.start_child(supervisor, module, fun, args, options)
end
end
|
lib/new_relic/instrumented/task/supervisor.ex
| 0.771327 | 0.753988 |
supervisor.ex
|
starcoder
|
defmodule Epicenter.Cases.Demographic do
use Ecto.Schema
import Ecto.Changeset
import Epicenter.PhiValidation, only: [validate_phi: 2]
alias Epicenter.Cases.Person
alias Epicenter.Cases.Ethnicity
@required_attrs ~w{}a
@optional_attrs ~w{
dob
external_id
first_name
last_name
preferred_language
tid
employment
gender_identity
marital_status
notes
occupation
person_id
race
sex_at_birth
source
}a
@derive {Jason.Encoder, only: [:id] ++ @required_attrs ++ @optional_attrs}
@primary_key {:id, :binary_id, autogenerate: true}
@foreign_key_type :binary_id
# demographic field: a field over which someone would want analytics
schema "demographics" do
field :dob, :date
field :employment, :string
field :external_id, :string
field :first_name, :string
field :gender_identity, {:array, :string}
field :last_name, :string
field :marital_status, :string
field :notes, :string
field :occupation, :string
field :preferred_language, :string
field :race, :map
field :seq, :integer, read_after_writes: true
field :sex_at_birth, :string
field :tid, :string
field :source, :string
timestamps(type: :utc_datetime)
embeds_one :ethnicity, Ethnicity, on_replace: :delete
belongs_to :person, Person
end
def changeset(demographic, attrs) do
demographic
|> cast(Enum.into(attrs, %{}), @required_attrs ++ @optional_attrs)
|> cast_embed(:ethnicity, with: &Ethnicity.changeset/2)
|> validate_required(@required_attrs)
|> validate_phi(:demographic)
end
def build_attrs(nil, _field),
do: nil
def build_attrs(value, :race) do
case search_humanized(value, :race) do
{_humanized, value, nil = _parent} ->
%{value => nil}
{_humanized, value, parent} ->
%{parent => [value]}
end
end
defp search_humanized(query, field) do
default = {query, query, nil}
case Map.get(humanized_values(), field) do
nil ->
default
humanized_values_for_field ->
Enum.find(humanized_values_for_field, default, fn {humanized, value, _parent} -> query in [value, humanized] end)
end
end
def find_humanized_value(field, value) do
{humanized, _value, _parent} = search_humanized(value, field)
humanized
end
def standard_values(field),
do: humanized_values() |> Map.get(field) |> Enum.map(fn {_humanized, value, _parent} -> value end)
def reject_nonstandard_values(values, _field, false = _reject?),
do: values
def reject_nonstandard_values(values, field, true = _reject?),
do: MapSet.intersection(MapSet.new(values), MapSet.new(standard_values(field))) |> MapSet.to_list()
def reject_standard_values(values, field),
do: MapSet.difference(MapSet.new(values), MapSet.new(standard_values(field))) |> MapSet.to_list()
def humanized_values do
%{
employment: [
{"Unknown", "unknown", nil},
{"Not employed", "not_employed", nil},
{"Part time", "part_time", nil},
{"Full time", "full_time", nil}
],
ethnicity: [
{"Unknown", "unknown", nil},
{"Declined to answer", "declined_to_answer", nil},
{"Not Hispanic, Latino/a, or Spanish origin", "not_hispanic_latinx_or_spanish_origin", nil},
{"Hispanic, Latino/a, or Spanish origin", "hispanic_latinx_or_spanish_origin", nil},
{"Mexican, Mexican American, Chicano/a", "mexican_mexican_american_chicanx", nil},
{"Puerto Rican", "puerto_rican", nil},
{"Cuban", "cuban", nil}
],
gender_identity: [
{"Unknown", "unknown", nil},
{"Declined to answer", "declined_to_answer", nil},
{"Female", "female", nil},
{"Transgender woman/trans woman/male-to-female (MTF)", "transgender_woman", nil},
{"Male", "male", nil},
{"Transgender man/trans man/female-to-male (FTM)", "transgender_man", nil},
{"Genderqueer/gender nonconforming neither exclusively male nor female", "gender_nonconforming", nil}
],
marital_status: [
{"Unknown", "unknown", nil},
{"Single", "single", nil},
{"Married", "married", nil}
],
race: [
{"Unknown", "unknown", nil},
{"Declined to answer", "declined_to_answer", nil},
{"White", "white", nil},
{"Black or African American", "black_or_african_american", nil},
{"American Indian or Alaska Native", "american_indian_or_alaska_native", nil},
{"Asian", "asian", nil},
{"Asian Indian", "asian_indian", "asian"},
{"Chinese", "chinese", "asian"},
{"Filipino", "filipino", "asian"},
{"Japanese", "japanese", "asian"},
{"Korean", "korean", "asian"},
{"Vietnamese", "vietnamese", "asian"},
{"Native Hawaiian or Other Pacific Islander", "native_hawaiian_or_other_pacific_islander", nil},
{"Native Hawaiian", "native_hawaiian", "native_hawaiian_or_other_pacific_islander"},
{"Guamanian or Chamorro", "guamanian_or_chamorro", "native_hawaiian_or_other_pacific_islander"},
{"Samoan", "samoan", "native_hawaiian_or_other_pacific_islander"}
],
sex_at_birth: [
{"Unknown", "unknown", nil},
{"Declined to answer", "declined_to_answer", nil},
{"Female", "female", nil},
{"Male", "male", nil},
{"Intersex", "intersex", nil}
]
}
end
def humanized_values(field),
do: Map.get(humanized_values(), field)
defmodule Query do
import Ecto.Query
alias Epicenter.Cases.Demographic
def display_order() do
from demographics in Demographic, order_by: [asc: demographics.seq]
end
def latest_form_demographic(%Person{id: person_id}) do
from demographics in Demographic,
where: demographics.person_id == ^person_id,
where: demographics.source == "form",
order_by: [desc: demographics.updated_at, desc: demographics.seq],
limit: 1
end
def matching(dob: dob, first_name: first_name, last_name: last_name) do
from(d in Demographic, where: [dob: ^dob, first_name: ^first_name, last_name: ^last_name])
|> first()
end
end
end
|
lib/epicenter/cases/demographic.ex
| 0.550124 | 0.478468 |
demographic.ex
|
starcoder
|
if Code.ensure_loaded?(Plug) do
defmodule Guardian.Plug.VerifySession do
@moduledoc """
Looks for and validates a token found in the session.
In the case where:
a. The session is not loaded
b. A token is already found for `:key`
This plug will not do anything.
This, like all other Guardian plugs, requires a Guardian pipeline to be setup.
It requires an implementation module, an error handler and a key.
These can be set either:
1. Upstream on the connection with `plug Guardian.Pipeline`
2. Upstream on the connection with `Guardian.Pipeline.{put_module, put_error_handler, put_key}`
3. Inline with an option of `:module`, `:error_handler`, `:key`
If a token is found but is invalid, the error handler will be called with
`auth_error(conn, {:invalid_token, reason}, opts)`
Once a token has been found it will be decoded, the token and claims will be put onto the connection.
They will be available using `Guardian.Plug.current_claims/2` and `Guardian.Plug.current_token/2`
"""
import Plug.Conn
import Guardian.Plug.Keys
alias Guardian.Plug.Pipeline
@behaviour Plug
@impl Plug
@spec init(opts :: Keyword.t()) :: Keyword.t()
def init(opts), do: opts
@impl Plug
@spec call(conn :: Plug.Conn.t(), opts :: Keyword.t()) :: Plug.Conn.t()
def call(conn, opts) do
if Guardian.Plug.session_active?(conn) do
verify_session(conn, opts)
else
conn
end
end
defp verify_session(conn, opts) do
with nil <- Guardian.Plug.current_token(conn, opts),
{:ok, token} <- find_token_from_session(conn, opts),
module <- Pipeline.fetch_module!(conn, opts),
claims_to_check <- Keyword.get(opts, :claims, %{}),
key <- storage_key(conn, opts),
{:ok, claims} <- Guardian.decode_and_verify(module, token, claims_to_check, opts) do
conn
|> Guardian.Plug.put_current_token(token, key: key)
|> Guardian.Plug.put_current_claims(claims, key: key)
else
:no_token_found ->
conn
{:error, reason} ->
conn
|> Pipeline.fetch_error_handler!(opts)
|> apply(:auth_error, [conn, {:invalid_token, reason}, opts])
|> Guardian.Plug.maybe_halt(opts)
_ ->
conn
end
end
defp find_token_from_session(conn, opts) do
key = conn |> storage_key(opts) |> token_key()
token = get_session(conn, key)
if token, do: {:ok, token}, else: :no_token_found
end
defp storage_key(conn, opts), do: Pipeline.fetch_key(conn, opts)
end
end
|
lib/guardian/plug/verify_session.ex
| 0.73307 | 0.476336 |
verify_session.ex
|
starcoder
|
defmodule PlugEarlyHints do
defmodule BadArityError do
defexception [:function, :arity, :key]
@impl true
def message(exception) do
{:arity, arity} = Function.info(exception.function, :arity)
"Function passed to #{exception.key} has arity #{arity} while expected" <>
"arity is #{exception.arity}"
end
end
@moduledoc """
Convenience plug for sending [HTTP 103 Early Hints][mdn-103].
This is useful for static resources that will be **for sure** required by
the resulting page. For example you can use it for informing the client
that you will need CSS later, so it can start fetching it right now.
## Usage
plug #{inspect(__MODULE__)},
# List all resources that will be needed later when rendering page
paths: [
# External resources that will be connected to as we will use
# different resources from it. It will speedup as the TLS handshake
# will be already ended, so we will be able to fetch resources
# right away
"https://gravatar.com/": [rel: "dns-prefetch"],
"https://gravatar.com/": [rel: "preconnect"],
# "Regular" resources. We need to set `:as` to inform the client
# (browser) what kinf of resource it is, so it will be able to
# properly connect them
"/css/app.css": [rel: "preload", as: "style"],
"/js/app.js": [rel: "preload", as: "script"],
# Preloading fonts will require additional `:type` and `:crossorgin`
# to allow CSS engine to properly detect when apply the resource as
# well as to prevent double load.
"/fonts/recursive.woff2": [
rel: "preload",
as: "font",
crossorgin: :anonymous,
type: "font/woff2"
]
]
For more information about available options check out [MDN `Link`][mdn-link].
## Options
- `:paths` - enumerable containing pairs in form of `{path, options}`.
- `:callback` - 2-ary function used for expanding `path` value from `:paths`.
It is useful for example to expand static assets in Phoenix applications.
Due to nature of the `Plug` it must be in form of `&Module.function/2`
(it cannot be `&function/2` nor `fn conn, path -> … end`).
1st argument will be `conn` passed to the plug and 2nd argument will be
current path. By default it return `path` unmodified.
- `:enable` - 1-ary function that will receive `conn` and should return boolean
whether the early hints should be sent or not. You mostly want to do it only
for requests returning HTML. The same rules as in `:callback` apply. By default
uses function that alwayst return `true`.
[mdn-103]: https://developer.mozilla.org/en-US/docs/Web/HTTP/Status/103 "103 Early Hints"
[mdn-link]: https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/Link "Link"
"""
@behaviour Plug
require Logger
@impl true
def init(opts) do
enable = get_func(opts, :enable, 1, &__MODULE__.__true__/1)
paths = Keyword.fetch!(opts, :paths)
cb = get_func(opts, :callback, 2, &__MODULE__.__id__/2)
%{
paths: paths,
callback: cb,
enable: enable
}
end
defp get_func(opts, key, arity, default) do
case Keyword.fetch(opts, key) do
:error ->
default
{:ok, func} when is_function(func, arity) ->
if Function.info(func, :type) != {:type, :external} do
Logger.warn(
"Function passed to `#{inspect(key)}` is not external function, which may cause problems"
)
end
func
{:ok, func} when is_function(func) ->
raise BadArityError, function: func, arity: arity, key: key
end
end
@impl true
def call(conn, %{paths: paths, callback: cb, enable: enable}) do
if enable.(conn) and :"HTTP/2" == Plug.Conn.get_http_protocol(conn) do
headers =
for {path, args} <- paths,
path = cb.(conn, to_string(path)),
not is_nil(path),
do: {"link", encode(path, args)}
Plug.Conn.inform(conn, :early_hints, headers)
else
conn
end
end
defp encode(path, args) do
encoded_args =
args
|> Enum.map(fn {name, value} -> ~s[#{name}=#{value}] end)
|> Enum.join("; ")
"<#{path}>; " <> encoded_args
end
@doc false
def __true__(_conn), do: true
@doc false
def __id__(_conn, path), do: path
end
|
lib/plug_early_hints.ex
| 0.7586 | 0.457016 |
plug_early_hints.ex
|
starcoder
|
defmodule Hunter.Account do
@moduledoc """
Account entity
This module defines a `Hunter.Account` struct and the main functions
for working with Accounts.
## Fields
* `id` - the id of the account
* `username` - the username of the account
* `acct` - equals `username` for local users, includes `@domain` for remote ones
* `display_name` - the account's display name
* `locked` - boolean for when the account cannot be followed without waiting for approval first
* `created_at` - the time the account was created
* `followers_count` - the number of followers for the account
* `following_count` - the number of accounts the given account is following
* `statuses_count` - the number of statuses the account has made
* `note` - biography of user
* `url` - URL of the user's profile page (can be remote)
* `avatar` - URL to the avatar image
* `avatar_static` - URL to the avatar static image (gif)
* `header` - URL to the header image
* `header_static` - URL to the header static image (gif)
* `emojis` - list of emojis
* `moved` - moved from account
* `bot` - whether this account is a bot or not
"""
alias Hunter.Config
@type t :: %__MODULE__{
id: non_neg_integer,
username: String.t(),
acct: String.t(),
display_name: String.t(),
note: String.t(),
url: String.t(),
avatar: String.t(),
avatar_static: String.t(),
header: String.t(),
header_static: String.t(),
locked: String.t(),
created_at: String.t(),
followers_count: non_neg_integer,
following_count: non_neg_integer,
statuses_count: non_neg_integer,
emojis: [Hunter.Emoji.t()],
moved: t(),
fields: [any()],
bot: boolean
}
@derive [Poison.Encoder]
defstruct [
:id,
:username,
:acct,
:display_name,
:note,
:url,
:avatar,
:avatar_static,
:header,
:header_static,
:locked,
:created_at,
:followers_count,
:following_count,
:statuses_count,
:emojis,
:moved,
:fields,
:bot
]
@doc """
Retrieve account of authenticated user
## Parameters
* `conn` - connection credentials
## Examples
iex> conn = Hunter.new([base_url: "https://social.lou.lt", bearer_token: "<PASSWORD>"])
%Hunter.Client{base_url: "https://social.lou.lt", bearer_token: "<PASSWORD>"}
iex> Hunter.Account.verify_credentials(conn)
%Hunter.Account{acct: "milmazz",
avatar: "https://social.lou.lt/avatars/original/missing.png",
avatar_static: "https://social.lou.lt/avatars/original/missing.png",
created_at: "2017-04-06T17:43:55.325Z",
display_name: "<NAME>", followers_count: 4,
following_count: 4,
header: "https://social.lou.lt/headers/original/missing.png",
header_static: "https://social.lou.lt/headers/original/missing.png",
id: 8039, locked: false, note: "", statuses_count: 3,
url: "https://social.lou.lt/@milmazz", username: "milmazz"}
"""
@spec verify_credentials(Hunter.Client.t()) :: Hunter.Account.t()
def verify_credentials(conn) do
Config.hunter_api().verify_credentials(conn)
end
@doc """
Make changes to the authenticated user
## Parameters
* `conn` - connection credentials
* `data` - data payload
## Possible keys for payload
* `display_name` - name to display in the user's profile
* `note` - new biography for the user
* `avatar` - base64 encoded image to display as the user's avatar (e.g. `data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAAUoAAADrCAYAAAA...`)
* `header` - base64 encoded image to display as the user's header image (e.g. `data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAAUoAAADrCAYAAAA...`)
"""
@spec update_credentials(Hunter.Client.t(), map) :: Hunter.Account.t()
def update_credentials(conn, data) do
Config.hunter_api().update_credentials(conn, data)
end
@doc """
Retrieve account
## Parameters
* `conn` - connection credentials
* `id` - account id
"""
@spec account(Hunter.Client.t(), non_neg_integer) :: Hunter.Account.t()
def account(conn, id) do
Config.hunter_api().account(conn, id)
end
@doc """
Get a list of followers
## Parameters
* `conn` - connection credentials
* `id` - account id
* `options` - options list
## Options
* `max_id` - get a list of followers with id less than or equal this value
* `since_id` - get a list of followers with id greater than this value
* `limit` - maximum number of followers to get, default: 40, maximum: 80
**Note:** `max_id` and `since_id` for next and previous pages are provided in
the `Link` header. It is **not** possible to use the `id` of the returned
objects to construct your own URLs, because the results are sorted by an
internal key.
"""
@spec followers(Hunter.Client.t(), non_neg_integer, Keyword.t()) :: [Hunter.Account.t()]
def followers(conn, id, options \\ []) do
Config.hunter_api().followers(conn, id, options)
end
@doc """
Get a list of followed accounts
## Parameters
* `conn` - connection credentials
* `id` - account id
* `options` - options list
## Options
* `max_id` - get a list of followings with id less than or equal this value
* `since_id` - get a list of followings with id greater than this value
* `limit` - maximum number of followings to get, default: 40, maximum: 80
**Note:** `max_id` and `since_id` for next and previous pages are provided in
the `Link` header. It is **not** possible to use the `id` of the returned
objects to construct your own URLs, because the results are sorted by an
internal key.
"""
@spec following(Hunter.Client.t(), non_neg_integer, Keyword.t()) :: [Hunter.Account.t()]
def following(conn, id, options \\ []) do
Config.hunter_api().following(conn, id, options)
end
@doc """
Follow a remote user
## Parameters
* `conn` - connection credentials
* `uri` - URI of the remote user, in the format of `username@domain`
"""
@spec follow_by_uri(Hunter.Client.t(), String.t()) :: Hunter.Account.t()
def follow_by_uri(conn, uri) do
Config.hunter_api().follow_by_uri(conn, uri)
end
@doc """
Search for accounts
## Parameters
* `conn` - connection credentials
* `options` - option list
## Options
* `q`: what to search for
* `limit`: maximum number of matching accounts to return, default: 40
"""
@spec search_account(Hunter.Client.t(), Keyword.t()) :: [Hunter.Account.t()]
def search_account(conn, options) do
opts = %{
q: Keyword.fetch!(options, :q),
limit: Keyword.get(options, :limit, 40)
}
Config.hunter_api().search_account(conn, opts)
end
@doc """
Retrieve user's blocks
## Parameters
* `conn` - connection credentials
## Options
* `max_id` - get a list of blocks with id less than or equal this value
* `since_id` - get a list of blocks with id greater than this value
* `limit` - maximum number of blocks to get, default: 40, max: 80
"""
@spec blocks(Hunter.Client.t(), Keyword.t()) :: [Hunter.Account.t()]
def blocks(conn, options \\ []) do
Config.hunter_api().blocks(conn, options)
end
@doc """
Retrieve a list of follow requests
## Parameters
* `conn` - connection credentials
* `options` - option list
## Options
* `max_id` - get a list of follow requests with id less than or equal this value
* `since_id` - get a list of follow requests with id greater than this value
* `limit` - maximum number of requests to get, default: 40, max: 80
"""
@spec follow_requests(Hunter.Client.t(), Keyword.t()) :: [Hunter.Account.t()]
def follow_requests(conn, options \\ []) do
Config.hunter_api().follow_requests(conn, options)
end
@doc """
Retrieve user's mutes
## Parameters
* `conn` - connection credentials
* `options` - option list
## Options
* `max_id` - get a list of mutes with id less than or equal this value
* `since_id` - get a list of mutes with id greater than this value
* `limit` - maximum number of mutes to get, default: 40, max: 80
"""
@spec mutes(Hunter.Client.t(), Keyword.t()) :: [Hunter.Account.t()]
def mutes(conn, options \\ []) do
Config.hunter_api().mutes(conn, options)
end
@doc """
Accepts a follow request
## Parameters
* `conn` - connection credentials
* `id` - follow request id
"""
@spec accept_follow_request(Hunter.Client.t(), non_neg_integer) :: boolean
def accept_follow_request(conn, id) do
Config.hunter_api().follow_request_action(conn, id, :authorize)
end
@doc """
Rejects a follow request
## Parameters
* `conn` - connection credentials
* `id` - follow request id
"""
@spec reject_follow_request(Hunter.Client.t(), non_neg_integer) :: boolean
def reject_follow_request(conn, id) do
Config.hunter_api().follow_request_action(conn, id, :reject)
end
@doc """
Fetch the list of users who reblogged the status.
## Parameters
* `conn` - connection credentials
* `id` - status identifier
* `options` - option list
## Options
* `max_id` - get a list of *reblogged by* ids less than or equal this value
* `since_id` - get a list of *reblogged by* ids greater than this value
* `limit` - maximum number of *reblogged by* to get, default: 40, max: 80
"""
@spec reblogged_by(Hunter.Client.t(), non_neg_integer, Keyword.t()) :: [Hunter.Account.t()]
def reblogged_by(conn, id, options \\ []) do
Config.hunter_api().reblogged_by(conn, id, options)
end
@doc """
Fetch the list of users who favourited the status
## Parameters
* `conn` - connection credentials
* `id` - status identifier
* `options` - option list
## Options
* `max_id` - get a list of *favourited by* ids less than or equal this value
* `since_id` - get a list of *favourited by* ids greater than this value
* `limit` - maximum number of *favourited by* to get, default: 40, max: 80
"""
@spec favourited_by(Hunter.Client.t(), non_neg_integer, Keyword.t()) :: [Hunter.Account.t()]
def favourited_by(conn, id, options \\ []) do
Config.hunter_api().favourited_by(conn, id, options)
end
end
|
lib/hunter/account.ex
| 0.901302 | 0.623492 |
account.ex
|
starcoder
|
defmodule ExMpesa.Stk do
@moduledoc """
Lipa na M-Pesa Online Payment API is used to initiate a M-Pesa transaction on behalf of a customer using STK Push. This is the same technique mySafaricom App uses whenever the app is used to make payments.
"""
import ExMpesa.MpesaBase
import ExMpesa.Util
@doc """
Initiates the Mpesa Lipa Online STK Push .
## Configuration
Add below config to dev.exs / prod.exs files
This asumes you have a clear understanding of how Daraja API works. See docs here https://developer.safaricom.co.ke/docs#lipa-na-m-pesa-online-payment
`config.exs`
```elixir
config :ex_mpesa,
consumer_key: "",
consumer_secret: "",
mpesa_short_code: "",
mpesa_passkey: "",
mpesa_callback_url: ""
```
## Parameters
attrs: - a map containing:
- `phone` - The MSISDN sending the funds(PhoneNumber).
- `amount` - The amount to be transacted.
- `reference` - Used with M-Pesa PayBills(AccountReference).
- `description` - A description of the transaction(TransactionDesc).
## Example
iex> ExMpesa.Stk.request(%{amount: 10, phone: "254724540000", reference: "reference", description: "description"})
{:ok,
%{
"CheckoutRequestID" => "ws_CO_010320202011179845",
"CustomerMessage" => "Success. Request accepted for processing",
"MerchantRequestID" => "25558-10595705-4",
"ResponseCode" => "0",
"ResponseDescription" => "Success. Request accepted for processing"
}}
"""
@spec request(map()) :: {:error, any()} | {:ok, any()}
def request(%{
amount: amount,
phone: phone,
reference: reference,
description: description
}) do
paybill = Application.get_env(:ex_mpesa, :mpesa_short_code)
passkey = Application.get_env(:ex_mpesa, :mpesa_passkey)
{:ok, timestamp} = generate_timestamp()
password = Base.encode64(paybill <> passkey <> timestamp)
payload = %{
"BusinessShortCode" => paybill,
"Password" => password,
"Timestamp" => timestamp,
"TransactionType" => "CustomerPayBillOnline",
"Amount" => amount,
"PartyA" => phone,
"PartyB" => paybill,
"PhoneNumber" => phone,
"CallBackURL" => Application.get_env(:ex_mpesa, :mpesa_callback_url),
"AccountReference" => reference,
"TransactionDesc" => description
}
make_request("/mpesa/stkpush/v1/processrequest", payload)
end
def request(_) do
{:error, "Required Parameters missing, 'phone, 'amount', 'reference', 'description'"}
end
@doc """
STK PUSH Transaction Validation
## Configuration
Add below config to dev.exs / prod.exs files (at this stage after STK, the config keys should be there)
This asumes you have a clear understanding of how Daraja API works. See docs here https://developer.safaricom.co.ke/docs#lipa-na-m-pesa-online-query-request
`config.exs`
```elixir
config :ex_mpesa,
consumer_key: "",
consumer_secret: "",
mpesa_short_code: "",
mpesa_passkey: "",
```
## Parameters
attrs: - a map containing:
- `checkout_request_id` - Checkout RequestID.
## Example
iex> ExMpesa.Stk.validate(%{checkout_request_id: "ws_CO_260820202102496165"})
{:ok,
%{
"CheckoutRequestID" => "ws_CO_260820202102496165",
"MerchantRequestID" => "11130-78831728-4",
"ResponseCode" => "0",
"ResponseDescription" => "The service request has been accepted successsfully",
"ResultCode" => "1032",
"ResultDesc" => "Request cancelled by user"
}
}
"""
@spec validate(map()) :: {:error, any()} | {:ok, any()}
def validate(%{checkout_request_id: checkout_request_id}) do
paybill = Application.get_env(:ex_mpesa, :mpesa_short_code)
passkey = Application.get_env(:ex_mpesa, :mpesa_passkey)
{:ok, timestamp} = generate_timestamp()
password = Base.encode64(paybill <> passkey <> timestamp)
payload = %{
"BusinessShortCode" => paybill,
"Password" => password,
"Timestamp" => timestamp,
"CheckoutRequestID" => checkout_request_id
}
make_request("/mpesa/stkpushquery/v1/query", payload)
end
def validate(_) do
{:error, "Required Parameter missing, 'CheckoutRequestID'"}
end
end
|
lib/ex_mpesa/stk.ex
| 0.831896 | 0.750804 |
stk.ex
|
starcoder
|
defmodule Nerves.Artifact.BuildRunners.Docker do
@moduledoc """
Produce an artifact for a package using Docker.
The Nerves Docker artifact build_runner will use docker to create the artifact
for the package. The output in Mix will be limited to the headlines from the
process and the full build log can be found in the file `build.log` located
root of the package path.
## Images
Docker containers will be created based off the image that is loaded.
By default, containers will use the default image
`nervesproject/nerves_system_br:latest`. Sometimes additional host tools
are required to build a package. Therefore, packages can provide their own
images by specifying it in the package config under `:build_runner_config`.
the file is specified as a tuple `{"path/to/Dockerfile", tag_name}`.
Example:
build_runner_config: [
docker: {"Dockerfile", "my_system:0.1.0"}
]
## Volumes and Cache
Nerves will mount several volumes to the container for use in building
the artifact.
Mounted from the host:
* `/nerves/env/<package.name>` - The package being built.
* `/nerves/env/platform` - The package platform package.
* `/nerves/host/artifacts` - The host artifact directory.
Nerves will also mount the host NERVES_DL_DIR to save downloaded assets the
build platform requires for producing the artifact.
This is mounted at `/nerves/dl`. This volume can significantly reduce build
times but has potential for corruption. If you suspect that your build is
failing due to a faulty downloaded cached data, you can manually mount
the offending container and remove the file from this location or delete the
entire directory.
Nerves uses a docker volume to attach the build files. The volume name is
defined as the package name and a unique id that is stored at
`ARTIFACT_DIR/.docker_id`. The build directory is mounted to the container at
`/nerves/build` and is configured as the current working directory.
## Cleanup
Periodically, you may want to destroy all unused volumes to clean up.
Please refer to the Docker documentation for more information on how to
do this.
When the build_runner is finished, the artifact is decompressed on the host at
the packages defined artifact directory.
"""
@behaviour Nerves.Artifact.BuildRunner
alias Nerves.Artifact
alias Nerves.Artifact.BuildRunners.Docker
import Docker.Utils
@version "~> 1.12 or ~> 1.12.0-rc2 or >= 17.0.0"
@working_dir "/nerves/build"
@doc """
Create an artifact for the package
Opts:
`make_args:` - Extra arguments to be passed to make.
For example:
You can configure the number of parallel jobs that buildroot
can use for execution. This is useful for situations where you may
have a machine with a lot of CPUs but not enough ram.
# mix.exs
defp nerves_package do
[
# ...
build_runner_opts: [make_args: ["PARALLEL_JOBS=8"]],
]
end
"""
@spec build(Nerves.Package.t(), Nerves.Package.t(), term) :: :ok
def build(pkg, _toolchain, opts) do
preflight(pkg)
{:ok, pid} = Nerves.Utils.Stream.start_link(file: build_log_path())
stream = IO.stream(pid, :line)
:ok = create_build(pkg, stream)
:ok = make(pkg, stream, opts)
Mix.shell().info("\n")
:ok = make_artifact(pkg, stream)
Mix.shell().info("\n")
{:ok, path} = copy_artifact(pkg, stream)
Mix.shell().info("\n")
_ = Nerves.Utils.Stream.stop(pid)
{:ok, path}
end
@spec archive(Nerves.Package.t(), Nerves.Package.t(), term) :: :ok
def archive(pkg, _toolchain, _opts) do
{:ok, pid} = Nerves.Utils.Stream.start_link(file: "archive.log")
stream = IO.stream(pid, :line)
make_artifact(pkg, stream)
copy_artifact(pkg, stream)
end
def clean(pkg) do
Docker.Volume.name(pkg)
|> Docker.Volume.delete()
Artifact.Cache.path(pkg)
|> File.rm_rf()
end
@doc """
Connect to a system configuration shell in a Docker container
"""
@spec system_shell(Nerves.Package.t()) :: :ok
def system_shell(pkg) do
preflight(pkg)
{_, image} = config(pkg)
platform_config = pkg.config[:platform_config][:defconfig]
defconfig = Path.join("/nerves/env/#{pkg.app}", platform_config)
initial_input = [
"echo Updating build directory.",
"echo This will take a while if it is the first time...",
"/nerves/env/platform/create-build.sh #{defconfig} #{@working_dir} >/dev/null"
]
mounts = Enum.join(mounts(pkg), " ")
ssh_agent = Enum.join(ssh_agent(), " ")
env_vars = Enum.join(env(), " ")
cmd =
"docker run --rm -it -w #{@working_dir} #{env_vars} #{mounts} #{ssh_agent} #{image} /bin/bash"
set_volume_permissions(pkg)
Mix.Nerves.Shell.open(cmd, initial_input)
end
defp preflight(pkg) do
Docker.Volume.id(pkg) || Docker.Volume.create_id(pkg)
name = Docker.Volume.name(pkg)
_ = host_check()
_ = config_check(pkg, name)
name
end
# Build Commands
defp create_build(pkg, stream) do
platform_config = pkg.config[:platform_config][:defconfig]
defconfig = Path.join("/nerves/env/#{pkg.app}", platform_config)
cmd = ["/nerves/env/platform/create-build.sh", defconfig, @working_dir]
shell_info("Starting Build... (this may take a while)")
run(pkg, cmd, stream)
end
defp make(pkg, stream, opts) do
make_args = Keyword.get(opts, :make_args, [])
run(pkg, ["make" | make_args], stream)
end
defp make_artifact(pkg, stream) do
name = Artifact.download_name(pkg)
shell_info("Creating artifact archive")
cmd = ["make", "system", "NERVES_ARTIFACT_NAME=#{name}"]
run(pkg, cmd, stream)
end
defp copy_artifact(pkg, stream) do
shell_info("Copying artifact archive to host")
name = Artifact.download_name(pkg) <> Artifact.ext(pkg)
cmd = ["cp", name, "/nerves/dl/#{name}"]
run(pkg, cmd, stream)
path = Artifact.download_path(pkg)
{:ok, path}
end
# Helpers
defp run(pkg, cmd, stream) do
set_volume_permissions(pkg)
{_dockerfile, image} = config(pkg)
args =
[
"run",
"--rm",
"-w=#{@working_dir}",
"-a",
"stdout",
"-a",
"stderr"
] ++ env() ++ mounts(pkg) ++ ssh_agent() ++ [image | cmd]
case Mix.Nerves.Utils.shell("docker", args, stream: stream) do
{_result, 0} ->
:ok
{_result, _} ->
Mix.raise("""
The Nerves Docker build_runner encountered an error while building:
-----
#{end_of_build_log()}
-----
See #{build_log_path()}.
""")
end
end
defp set_volume_permissions(pkg) do
{_dockerfile, image} = config(pkg)
# (chown)
# Set the permissions of the build volume
# to match those of the host user:group.
# (--rm)
# Remove the container when finished.
args =
[
"run",
"--rm",
"-w=#{@working_dir}"
] ++ env(:root) ++ mounts(pkg) ++ [image | ["chown", "#{uid()}:#{gid()}", @working_dir]]
case Mix.Nerves.Utils.shell("docker", args) do
{_result, 0} ->
:ok
{result, _} ->
Mix.raise("""
The Nerves Docker build_runner encountered an error while setting permissions:
#{inspect(result)}
""")
end
end
defp env(), do: env(uid(), gid())
defp env(:root), do: env(0, 0)
defp env(uid, gid) do
["--env", "UID=#{uid}", "--env", "GID=#{gid}"]
end
defp uid() do
{uid, _} = Nerves.Port.cmd("id", ["-u"])
String.trim(uid)
end
defp gid() do
{gid, _} = Nerves.Port.cmd("id", ["-g"])
String.trim(gid)
end
defp end_of_build_log() do
{lines, _rc} = Nerves.Port.cmd("tail", ["-16", build_log_path()])
lines
end
defp build_log_path() do
File.cwd!()
|> Path.join("build.log")
end
defp mounts(pkg) do
build_paths = build_paths(pkg)
build_volume = Docker.Volume.name(pkg)
download_dir = Nerves.Env.download_dir() |> Path.expand()
mounts = ["--env", "NERVES_BR_DL_DIR=/nerves/dl"]
mounts =
Enum.reduce(build_paths, mounts, fn {_, host, target}, acc ->
["--mount", "type=bind,src=#{host},target=#{target}" | acc]
end)
mounts = ["--mount", "type=bind,src=#{download_dir},target=/nerves/dl" | mounts]
["--mount", "type=volume,src=#{build_volume},target=#{@working_dir}" | mounts]
end
defp ssh_agent() do
ssh_auth_sock = System.get_env("SSH_AUTH_SOCK")
["-v", "#{ssh_auth_sock}:/ssh-agent", "-e", "SSH_AUTH_SOCK=/ssh-agent"]
end
defp build_paths(pkg) do
system_br = Nerves.Env.package(:nerves_system_br)
[
{:platform, system_br.path, "/nerves/env/platform"},
{:package, pkg.path, "/nerves/env/#{pkg.app}"}
]
end
defp host_check() do
try do
case Nerves.Port.cmd("docker", ["--version"]) do
{result, 0} ->
<<"Docker version ", vsn::binary>> = result
{:ok, requirement} = Version.parse_requirement(@version)
{:ok, vsn} = parse_docker_version(vsn)
unless Version.match?(vsn, requirement) do
error_invalid_version(vsn)
end
:ok
_ ->
error_not_installed()
end
rescue
ErlangError -> error_not_installed()
end
end
defp config_check(pkg, name) do
{dockerfile, tag} = config(pkg)
# Check for the Build Volume
unless Docker.Volume.exists?(name) do
Docker.Volume.create(name)
end
unless Docker.Image.exists?(tag) do
Docker.Image.pull(tag)
unless Docker.Image.exists?(tag) do
Docker.Image.create(dockerfile, tag)
end
end
:ok
end
defp config(pkg) do
{dockerfile, tag} =
(pkg.config[:build_runner_config] || [])
|> Keyword.get(:docker, default_docker_config())
dockerfile =
dockerfile
|> Path.relative_to_cwd()
|> Path.expand()
{dockerfile, tag}
end
defp default_docker_config() do
[platform] = Nerves.Env.packages_by_type(:system_platform)
dockerfile = Path.join(platform.path, "support/docker/#{platform.app}")
tag = "nervesproject/#{platform.app}:#{platform.version}"
{dockerfile, tag}
end
defp error_not_installed do
Mix.raise("""
Docker is not installed on your machine.
Please install docker #{@version} or later
""")
end
defp error_invalid_version(vsn) do
Mix.raise("""
Your version of docker: #{vsn}
does not meet the requirements: #{@version}
""")
end
def parse_docker_version(vsn) do
[vsn | _] = String.split(vsn, ",", parts: 2)
Regex.replace(~r/(\.|^)0+(?=\d)/, vsn, "\\1")
|> Version.parse()
end
end
|
lib/nerves/artifact/build_runners/docker.ex
| 0.841565 | 0.469155 |
docker.ex
|
starcoder
|
defmodule BusDetective.GTFS.StopSearch do
@moduledoc """
This module provides stop search functionality.
"""
import Ecto.Query
import Geo.PostGIS, only: [st_distance: 2]
alias BusDetective.GTFS.Substitutions
@substitutions Substitutions.build_substitutions()
def query_nearby(query, latitude, longitude) do
case is_nil(latitude) or is_nil(longitude) do
true ->
query
false ->
location = %Geo.Point{coordinates: {longitude, latitude}, srid: 4326}
from(s in query, order_by: st_distance(s.location, ^location))
end
end
def query_string(query, nil), do: query
def query_string(query, search_string) do
join_pg_search(query, build_ts_query(search_string))
end
defp build_ts_query(search_string) do
search_string
|> String.downcase()
|> String.split(~r{&| and })
|> Enum.map(&"(#{build_ts_term(&1)})")
|> Enum.join(" & ")
end
defp build_ts_term(term) do
term
|> String.trim()
|> String.split(" ")
|> Enum.map(&expand_substitutions(&1))
|> Enum.join(" & ")
end
defp expand_substitutions(lexeme) do
case @substitutions do
%{^lexeme => like_terms} ->
"('" <> Enum.join(like_terms, "' | '") <> "')"
_ ->
lexeme
end
end
defp join_pg_search(query, ts_query_terms) do
query
|> join(
:inner,
[stop],
pg_search in fragment(
~s{
SELECT "stops"."id" AS pg_search_id,
ts_rank(to_tsvector('english', coalesce("stops"."name"::text, '')) || to_tsvector('english', coalesce("stops"."code"::text, '')), to_tsquery('english', ?)), 0 AS rank
FROM "stops" WHERE to_tsvector('english', coalesce("stops"."name"::text, '')) || to_tsvector('english', coalesce("stops"."code"::text, '')) @@ to_tsquery('english', ?)
},
^ts_query_terms,
^ts_query_terms
),
stop.id == pg_search.pg_search_id
)
|> order_by([stop, pg_search], desc: pg_search.rank, asc: stop.id)
end
end
|
apps/bus_detective/lib/bus_detective/gtfs/stop_search.ex
| 0.679817 | 0.449151 |
stop_search.ex
|
starcoder
|
|QUESTIONNAME|
Find telephone numbers with parentheses
|QUESTION|
You've noticed that the club's member table has telephone numbers with very inconsistent formatting. You'd like to find all the telephone numbers that contain parentheses, returning the member ID and telephone number sorted by member ID.
|QUERY|
select memid, telephone from cd.members where telephone ~ '[()]';
|ANSWER|
We've chosen to answer this using regular expressions, although Postgres does provide other string functions like <c>POSITION</c> that would do the job at least as well. Postgres implements POSIX regular expression matching via the <c>~</c> operator. If you've used regular expressions before, the functionality of the operator will be very familiar to you.</p>
<p>As an alternative, you can use the SQL standard <c>SIMILAR TO</c> operator. The regular expressions for this have similarities to the POSIX standard, but a lot of differences as well. Some of the most notable differences are:</p>
<ul>
<li>As in the <c>LIKE</c> operator, <c>SIMILAR TO</c> uses the '_' character to mean 'any character', and the '%' character to mean 'any string'.
<li>A <c>SIMILAR TO</c> expression must match the whole string, not just a substring as in posix regular expressions. This means that you'll typically end up bracketing an expression in '%' characters.
<li>The '.' character does not mean 'any character' in <c>SIMILAR TO</c> regexes: it's just a plain character.
</ul>
<p>The <c>SIMILAR TO</c> equivalent of the given answer is shown below:</p>
<sql>select memid, telephone from cd.members where telephone similar to '%[()]%';</sql>
<p>Finally, it's worth noting that regular expressions usually don't use indexes. Generally you don't want your regex to be responsible for doing heavy lifting in your query, because it will be slow. If you need fuzzy matching that works fast, consider working out if your needs can be met by <a href="http://www.postgresql.org/docs/9.2/static/textsearch.html">full text search</a>.</p>
|HINT|
Look up the ~ or <c>SIMILAR TO</c> operators in the Postgres docs.
|SORTED|
1
|PAGEID|
9df31ff6-3f20-11e3-8372-0023df7f7ec4
|
questions/string/00017500-reg.ex
| 0.52829 | 0.527986 |
00017500-reg.ex
|
starcoder
|
defmodule TDMS.Parser do
@moduledoc """
This module is the main parser for TDMS files.
TDMS files organize data in a three-level hierarchy of objects.
The top level is comprised of a single object that holds file-specific information like author or title.
Each file can contain an unlimited number of groups, and each group can contain an unlimited number of channels.
In the following illustration, the file example_events.tdms contains two groups, each of which contains two channels:
- example_events.tdms
- Measured Data
- Amplitude Sweep
- Phase Sweep
- Events
- Time
- Description
For more details about the internal structure of TDMS files, see https://www.ni.com/product-documentation/5696/en/
"""
@lean_in_byte_size 28
@tdms_file_tag "TDSm"
@tdms_file_version_1 4712
@tdms_file_version_2 4713
alias TDMS.Parser.State
alias TDMS.Parser.ValueParser
alias TDMS.Parser.ParseError
@doc """
Parses the given TDMS file binary data and returns a hierarchical `TDMS.File` structure which
contains a list of `TDMS.Group` and `TDMS.Channel`.
## Examples
iex> TDMS.Parser.parse(File.read!("test/data/basic.tdms"))
%TDMS.File{
path: "/",
properties: [
%TDMS.Property{
data_type: :string,
name: "name",
value: "ni-crio-9068-190fdf5_20190609_235850.tdms"
}
],
groups: [
%TDMS.Group{
name: "Temperature",
path: "/'Temperature'",
properties: [
%TDMS.Property{data_type: :string, name: "name", value: "Temperature"}
]
channels: [
%TDMS.Channel{
data: [24.172693869632123, 24.238202284912816, 24.22418907461031, ...],
data_count: 201,
data_type: :double,
name: "ai.0",
path: "/'Temperature'/'ai.0'",
properties: [
%TDMS.Property{data_type: :string, name: "name", value: "ai.0"},
%TDMS.Property{
data_type: :string,
name: "datatype",
value: "DT_DOUBLE"
},
...
]
},
%TDMS.Channel{
data: [24.07053512461277, 24.136787008557807, 24.128304594848682, ...],
data_count: 201,
data_type: :double,
name: "ai.1",
path: "/'Temperature'/'ai.1'",
properties: [
%TDMS.Property{data_type: :string, name: "name", value: "ai.1"},
%TDMS.Property{
data_type: :string,
name: "datatype",
value: "DT_DOUBLE"
},
...
]
},
...
]
}
]
}
"""
def parse(stream) do
try do
{:ok} = validate_tdms_file(stream)
{:ok, state, _stream} = parse(stream, State.new())
build_tdms_file_hierarchy(state)
catch
:throw, %ParseError{message: message} -> {:error, message}
end
end
defp validate_tdms_file(stream) do
case parse_lead_in(stream) do
{:ok, :empty, _stream} ->
throw(ParseError.new("Empty file"))
{:ok, :no_lead_in, _stream} ->
throw(ParseError.new("No TDMS file"))
{:ok, _lead_in, _stream} ->
{:ok}
end
end
defp parse(stream, state) do
result = parse_lead_in(stream)
case result do
{:ok, :empty, stream} ->
state = State.set_lead_in(state, nil)
{:ok, state, stream}
{:ok, :no_lead_in, stream} ->
{state, stream} = parse_raw_data(stream, state)
parse(stream, state)
{:ok, lead_in, stream} ->
state = State.set_lead_in(state, lead_in)
{state, stream} = parse_metadata(stream, state)
{state, stream} = parse_raw_data(stream, state)
parse(stream, state)
end
end
defp parse_raw_data(stream, state) do
raw_data_indexes = State.get_raw_data_indexes(state)
{results, stream} =
parse_data(stream, raw_data_indexes, state.lead_in.interleaved, state.lead_in.endian)
state =
Enum.reduce(results, state, fn {path, data}, state ->
State.add_data(state, path, data)
end)
{state, stream}
end
defp parse_lead_in(stream) when byte_size(stream) == 0 do
{:ok, :empty, stream}
end
defp parse_lead_in(stream) when byte_size(stream) < @lean_in_byte_size do
{:ok, :no_lead_in, stream}
end
defp parse_lead_in(stream) do
<<tdms_tag::binary-size(4), stream_without_tdms_tag::binary>> = stream
case tdms_tag do
@tdms_file_tag -> parse_toc(stream_without_tdms_tag)
_ -> {:ok, :no_lead_in, stream}
end
end
defp parse_toc(stream) do
<<toc::little-unsigned-integer-size(32), stream::binary>> = stream
endian =
case TDMS.Parser.ToC.is_big_endian(toc) do
true -> :big
false -> :little
end
{version, stream} = ValueParser.parse_value(stream, :uint32, endian)
{segment_length, stream} = ValueParser.parse_value(stream, :uint64, endian)
{metadata_length, stream} = ValueParser.parse_value(stream, :uint64, endian)
lead_in = %{
toc: toc,
endian: endian,
interleaved: TDMS.Parser.ToC.is_interleaved(toc),
segment_length: segment_length,
metadata_length: metadata_length
}
case version do
@tdms_file_version_1 -> {:ok, lead_in, stream}
@tdms_file_version_2 -> {:ok, lead_in, stream}
version -> throw(ParseError.new("Unsupported TDMS version: #{version}"))
end
end
defp parse_metadata(stream, state) do
{number_of_objects, stream} = ValueParser.parse_value(stream, :uint32, state.lead_in.endian)
parse_paths(stream, number_of_objects, state)
end
defp build_tdms_file_hierarchy(state) do
grouped_paths =
Enum.group_by(state.paths, fn {path, _value} -> TDMS.Parser.Path.depth(path) end)
channels = build_channels(state, grouped_paths[3] || [])
groups = build_groups(grouped_paths[2] || [], channels)
{file_path, %{properties: properties}} = List.first(grouped_paths[1])
TDMS.File.new(file_path, properties, groups)
end
defp build_channels(state, paths) do
sort_paths(paths)
|> Enum.map(fn {path, %{raw_data_index: raw_data_index, properties: properties}} ->
data = State.get_data(state, path)
name = TDMS.Parser.Path.get_name(path)
name_property = TDMS.Property.new("name", :string, name)
type_property =
TDMS.Property.new(
"datatype",
:string,
ValueParser.data_type_to_property_value(raw_data_index.data_type)
)
TDMS.Channel.new(
path,
name,
raw_data_index.data_type,
length(data),
[name_property | [type_property | properties]],
data
)
end)
end
defp sort_paths(paths) do
Enum.sort_by(paths, fn {_path, %{order: order}} -> order end)
end
defp build_groups(paths, channels) do
sort_paths(paths)
|> Enum.map(fn {path, %{properties: properties}} ->
filtered_channels = filter_channel_by_group_path(channels, path)
name = TDMS.Parser.Path.get_name(path)
name_property = TDMS.Property.new("name", :string, name)
TDMS.Group.new(path, name, [name_property | properties], filtered_channels)
end)
end
defp filter_channel_by_group_path(channels, group_path) do
channels
|> Enum.filter(fn channel -> TDMS.Parser.Path.is_child(channel.path, group_path) end)
|> Enum.uniq_by(fn channel -> channel.path end)
end
defp parse_paths(stream, 0, state) do
{state, stream}
end
defp parse_paths(stream, number_of_objects, state) do
{state, stream} = parse_path(stream, state)
parse_paths(stream, number_of_objects - 1, state)
end
defp parse_path(stream, state) do
{path, stream} = ValueParser.parse_string(stream, state.lead_in.endian)
<<raw_data_index::binary-size(4), stream::binary>> = stream
read_raw_data_index(stream, path, raw_data_index, state)
end
defp read_raw_data_index(stream, path, raw_data_index, state) do
case parse_raw_data_index(stream, path, raw_data_index, state) do
{:previous, stream} ->
previous_path = State.get_path_info(state, path)
state = State.add_raw_data_index(state, previous_path.raw_data_index)
{state, stream}
{raw_data_index, stream} ->
{number_of_properties, stream} =
ValueParser.parse_value(stream, :uint32, state.lead_in.endian)
{properties, stream} = parse_properties(stream, number_of_properties, state, [])
state =
state
|> State.add_metadata(path, properties, raw_data_index)
|> State.add_raw_data_index(raw_data_index)
{state, stream}
end
end
defp parse_raw_data_index(stream, _path, <<0, 0, 0, 0>>, state) do
{_empty, stream} = ValueParser.parse_value(stream, :uint32, state.lead_in.endian)
{:previous, stream}
end
defp parse_raw_data_index(stream, path, <<255, 255, 255, 255>>, _state) do
{%{path: path, data_type: :double, number_of_values: 0}, stream}
end
defp parse_raw_data_index(_stream, _path, <<69, 12, 00, 00>>, _state) do
throw(ParseError.new("DAQmx Format Changing Scaler Parser is not implemented"))
end
defp parse_raw_data_index(_stream, _path, <<69, 13, 00, 00>>, _state) do
throw(ParseError.new("DAQmx Digital Line Scaler Parser is not implemented"))
end
defp parse_raw_data_index(stream, path, _raw_data_index, state) do
{data_type, stream} = ValueParser.parse_data_type(stream, state.lead_in.endian)
{array_dimension, stream} = ValueParser.parse_value(stream, :uint32, state.lead_in.endian)
if array_dimension != 1 do
throw(
ParseError.new(
"In TDMS file format version 2.0, 1 is the only valid value for array dimension"
)
)
end
{number_of_values, stream} = ValueParser.parse_value(stream, :uint64, state.lead_in.endian)
{total_size_bytes, stream} =
case data_type do
:string ->
ValueParser.parse_value(stream, :uint64, state.lead_in.endian)
_ ->
{nil, stream}
end
{%{
path: path,
data_type: data_type,
array_dimension: array_dimension,
number_of_values: number_of_values,
total_size_bytes: total_size_bytes
}, stream}
end
defp parse_properties(stream, 0, _state, properties) do
{Enum.reverse(properties), stream}
end
defp parse_properties(stream, number_of_properties, state, properties) do
{property, stream} = parse_property(stream, state)
parse_properties(stream, number_of_properties - 1, state, [property | properties])
end
defp parse_property(stream, state) do
{property_name, stream} = ValueParser.parse_string(stream, state.lead_in.endian)
{data_type, stream} = ValueParser.parse_data_type(stream, state.lead_in.endian)
{value, stream} = ValueParser.parse_value(stream, data_type, state.lead_in.endian)
{TDMS.Property.new(property_name, data_type, value), stream}
end
defp parse_data(stream, raw_data_indexes, false, endian) do
raw_data_indexes
|> Enum.reduce({%{}, stream}, fn raw_data_index, {results, stream} ->
parse_data_noninterleaved(stream, raw_data_index, endian, results)
end)
end
defp parse_data(stream, raw_data_indexes, true, endian) do
parse_data_interleaved(stream, raw_data_indexes, endian, %{})
end
defp parse_data_interleaved(stream, [], _endian, results) do
results = Enum.map(results, fn {path, data} -> {path, Enum.reverse(data)} end)
{results, stream}
end
defp parse_data_interleaved(stream, raw_data_indexes, endian, results) do
raw_data_indexes_with_data =
raw_data_indexes
|> Enum.map(fn index -> %{index | number_of_values: index.number_of_values - 1} end)
|> Enum.filter(fn index -> index.number_of_values >= 0 end)
{results, stream} =
parse_channels_interleaved(stream, raw_data_indexes_with_data, endian, results)
parse_data_interleaved(stream, raw_data_indexes_with_data, endian, results)
end
defp parse_channels_interleaved(stream, raw_data_indexes, endian, results) do
raw_data_indexes
|> Enum.reduce({results, stream}, fn raw_data_index, {results, stream} ->
{value, stream} = parse_channel_single_value(stream, raw_data_index.data_type, endian)
data = results[raw_data_index.path] || []
results = Map.put(results, raw_data_index.path, [value | data])
{results, stream}
end)
end
defp parse_channel_single_value(stream, data_type, endian) do
ValueParser.parse_value(stream, data_type, endian)
end
defp parse_data_noninterleaved(stream, raw_data_index, endian, results) do
{data, stream} = parse_channel_data(stream, raw_data_index, endian, [])
results = Map.put(results, raw_data_index.path, data)
{results, stream}
end
defp parse_channel_data(stream, nil, _endian, data) do
{data, stream}
end
defp parse_channel_data(
stream,
%{data_type: data_type, number_of_values: number_of_values},
endian,
data
) do
parse_channel_data(stream, data_type, number_of_values, endian, data)
end
defp parse_channel_data(stream, _data_type, 0, _endian, data) do
{Enum.reverse(data), stream}
end
defp parse_channel_data(stream, :string, number_of_values, endian, _data) do
ValueParser.parse_raw_strings(stream, number_of_values, endian)
end
defp parse_channel_data(stream, data_type, number_of_values, endian, data) do
{value, stream} = ValueParser.parse_value(stream, data_type, endian)
parse_channel_data(stream, data_type, number_of_values - 1, endian, [value | data])
end
end
|
lib/parser.ex
| 0.876337 | 0.548915 |
parser.ex
|
starcoder
|
defmodule Membrane.Pad do
@moduledoc """
Pads are units defined by elements and bins, allowing them to be linked with their
siblings. This module consists of pads typespecs and utils.
Each pad is described by its name, direction, availability, mode and possible caps.
For pads to be linkable, these properties have to be compatible. For more
information on each of them, check appropriate type in this module.
Each link can only consist of exactly two pads.
"""
use Bunch
use Bunch.Typespec
alias Membrane.{Buffer, Caps}
@typedoc """
Defines the term by which the pad instance is identified.
"""
@type ref_t :: name_t | {__MODULE__, name_t, dynamic_id_t}
@typedoc """
Possible id of dynamic pad
"""
@type dynamic_id_t :: any
@typedoc """
Defines the name of pad or group of dynamic pads
"""
@type name_t :: atom
@typedoc """
Defines possible pad directions:
- `:output` - data can only be sent through such pad,
- `:input` - data can only be received through such pad.
One cannot link two pads with the same direction.
"""
@type direction_t :: :output | :input
@typedoc """
Describes how an element sends and receives data.
Modes are strictly related to pad directions:
- `:push` output pad - element can send data through such pad whenever it wants.
- `:push` input pad - element has to deal with data whenever it comes through
such pad, and do it fast enough not to let data accumulate on such pad, what
may lead to overflow of element process erlang queue, which is highly unwanted.
- `:pull` output pad - element can send data through such pad only if it have
already received demand on the pad.
- `:pull` input pad - element receives through such pad only data that it has
previously demanded, so that no undemanded data can arrive.
Linking pads with different modes is possible, but only in case of output pad
working in push mode, and input in pull mode. In such case, however, error will
be raised whenever too many buffers accumulate on the input pad, waiting to be
processed.
For more information on transfering data and demands, see `t:demand_mode_t/0`,
`Membrane.Source`, `Membrane.Filter`, `Membrane.Endpoint`, `Membrane.Sink`.
"""
@type mode_t :: :push | :pull
@typedoc """
Defines the mode of handling and requesting demand on pads.
- `:manual` - demand is manually handled and requested. See `Membrane.Element.Action.demand_t`,
`Membrane.Element.Action.redemand_t`, `c:Membrane.Element.WithOutputPads.handle_demand/5`
- `:auto` - demand is managed automatically: the core ensures that there's demand
on each input pad (that has `demand_mode` set to `:auto`) whenever there's demand on all
output pads (that have `demand_mode` set to `:auto`). Currently works only for
`Membrane.Filter`s.
"""
@type demand_mode_t :: :manual | :auto
@typedoc """
Values used when defining pad availability:
- `:always` - a static pad, which can remain unlinked in `stopped` state only.
- `:on_request` - a dynamic pad, instance of which is created every time it is
linked to another pad. Thus linking the pad with _k_ other pads, creates _k_
instances of the pad, and links each with another pad.
"""
@list_type availability_t :: [:always, :on_request]
@typedoc """
Type describing availability mode of a created pad:
- `:static` - there always exist exactly one instance of such pad.
- `:dynamic` - multiple instances of such pad may be created and removed (which
entails executing `handle_pad_added` and `handle_pad_removed` callbacks,
respectively).
"""
@type availability_mode_t :: :static | :dynamic
@typedoc """
Describes how a pad should be declared in element or bin.
"""
@type spec_t :: output_spec_t | input_spec_t | bin_spec_t
@typedoc """
For bins there are exactly the same options for both directions.
Demand unit is derived from the first element inside the bin linked to the
given input.
"""
@type bin_spec_t :: {name_t(), [common_spec_options_t]}
@typedoc """
Describes how an output pad should be declared inside an element.
"""
@type output_spec_t :: {name_t(), [common_spec_options_t | {:demand_mode, demand_mode_t()}]}
@typedoc """
Describes how an input pad should be declared inside an element.
"""
@type input_spec_t ::
{name_t(),
[
common_spec_options_t
| {:demand_mode, demand_mode_t()}
| {:demand_unit, Buffer.Metric.unit_t()}
]}
@typedoc """
Pad options used in `t:spec_t/0`
"""
@type common_spec_options_t ::
{:availability, availability_t()}
| {:mode, mode_t()}
| {:caps, Caps.Matcher.caps_specs_t()}
| {:options, Keyword.t()}
@typedoc """
Type describing a pad. Contains data parsed from `t:spec_t/0`
"""
@type description_t :: %{
:availability => availability_t(),
:mode => mode_t(),
:name => name_t(),
:caps => Caps.Matcher.caps_specs_t(),
optional(:demand_unit) => Buffer.Metric.unit_t(),
:direction => direction_t(),
:options => nil | Keyword.t(),
optional(:demand_mode) => demand_mode_t()
}
@doc """
Creates a static pad reference.
"""
defmacro ref(name) do
quote do
unquote(name)
end
end
@doc """
Creates a dynamic pad reference.
"""
defmacro ref(name, id) do
quote do
{unquote(__MODULE__), unquote(name), unquote(id)}
end
end
defguard is_pad_ref(term)
when term |> is_atom or
(term |> is_tuple and term |> tuple_size == 3 and term |> elem(0) == __MODULE__ and
term |> elem(1) |> is_atom)
defguard is_pad_name(term) when is_atom(term)
defguard is_availability(term) when term in @availability_t
defguard is_availability_dynamic(availability) when availability == :on_request
defguard is_availability_static(availability) when availability == :always
@doc """
Returns pad availability mode for given availability.
"""
@spec availability_mode(availability_t) :: availability_mode_t
def availability_mode(:always), do: :static
def availability_mode(:on_request), do: :dynamic
@doc """
Returns the name for the given pad reference
"""
@spec name_by_ref(ref_t()) :: name_t()
def name_by_ref(ref(name, _id)) when is_pad_name(name), do: name
def name_by_ref(name) when is_pad_name(name), do: name
@spec opposite_direction(direction_t()) :: direction_t()
def opposite_direction(:input), do: :output
def opposite_direction(:output), do: :input
end
|
lib/membrane/pad.ex
| 0.910212 | 0.736685 |
pad.ex
|
starcoder
|
defmodule BSV do
@moduledoc """



BSV-ex is a general purpose library for building Bitcoin SV applications in
Elixir. The intent of this library is to be broadly comparable in scope, and
cross compatible with [Money Button's BSV Javascript library](https://github.com/moneybutton/bsv).
## Features
Currently this library offers the following functionality:
* Transaction parsing, construction, signing and serialization
* Keypair generation and address encoding and decoding
* BIP-39 mnemonic phrase generation and deterministic keys
* Bitcoin message signing (Electrum compatible)
* ECIES encryption/decryption (Electrum compatible)
* Wide range of both Bitcoin and non-Bitcoin specific crypto functions
Full documentation can be found at [https://hexdocs.pm/bsv](https://hexdocs.pm/bsv).
#### Note to developers
This is a new library and new codebase. As such developers should proceed with
caution and test using testnet and small value transactions. In future
versions the API is subject to change as the library is developed towards
maturity.
## Installation
The package is bundled with `libsecp256k1` NIF bindings. `libtool`, `automake`
and `autogen` are required in order for the package to compile.
The package can be installed by adding `bsv` to your list of dependencies in
`mix.exs`:
def deps do
[
{:bsv, "~> 0.2"}
]
end
## Usage
Many examples are demonstrated throught the documentation, but see the
following for some quick-start examples:
### Key pairs and addresses
For more examples refer to `BSV.KeyPair` and `BSV.Address`.
iex> keys = BSV.KeyPair.generate
%BSV.KeyPair{
network: :main,
private_key: <<1, 249, 98, 144, 230, 172, 5, 56, 197, 143, 133, 240, 144, 223, 25, 32, 55, 42, 159, 26, 128, 66, 149, 49, 235, 179, 116, 11, 209, 235, 240, 163>>,
public_key: <<3, 173, 251, 14, 108, 217, 224, 80, 133, 244, 200, 33, 191, 137, 80, 62, 141, 133, 166, 201, 224, 141, 101, 152, 144, 92, 237, 54, 220, 131, 58, 26, 4>>
}
iex> address = BSV.Address.from_public_key(keys)
...> |> BSV.Address.to_string
"1MzYtHPymTjgxx9npR6Pu9ZCUhtU9hHYTL"
### Mnemonic phrase and deterministic keys
For further details and examples refer to `BSV.Mnemonic`,
`BSV.Extended.PrivateKey`, `BSV.Extended.PublicKey` and `BSV.Extended.Children`.
iex> mnemonic = BSV.Mnemonic.generate
"various attitude grain market food wheat arena disagree soccer dust wrestle auction fiber wrestle sort wonder vital gym ill word amazing sniff have biology"
iex> master = BSV.Mnemonic.to_seed(mnemonic)
...> |> BSV.Extended.PrivateKey.from_seed
%BSV.Extended.PrivateKey{
chain_code: <<164, 12, 192, 154, 59, 209, 85, 172, 76, 7, 42, 138, 247, 125, 161, 30, 135, 25, 124, 160, 170, 234, 126, 162, 228, 146, 135, 232, 67, 181, 219, 91>>,
child_number: 0,
depth: 0,
fingerprint: <<0, 0, 0, 0>>,
key: <<111, 24, 247, 85, 107, 58, 162, 225, 135, 190, 185, 200, 226, 131, 68, 152, 159, 111, 232, 166, 21, 211, 235, 180, 140, 190, 109, 39, 31, 33, 107, 17>>,
network: :main,
version_number: <<4, 136, 173, 228>>
}
iex> child_address = master
...> |> BSV.Extended.Children.derive("m/44'/0'/0'/0/0")
...> |> BSV.Address.from_public_key
...> |> BSV.Address.to_string
"1F6fuP7HrBY8aeUazXZitaAsgpsJQFfUun"
### Creating transactions
For further details and examples refer to `BSV.Transaction`,
`BSV.Transaction.Input`, `BSV.Transaction.Output` and `BSV.Script`.
iex> script = %BSV.Script{}
...> |> BSV.Script.push(:OP_FALSE)
...> |> BSV.Script.push(:OP_RETURN)
...> |> BSV.Script.push("hello world")
%BSV.Script{chunks: [:OP_FALSE, :OP_RETURN, "hello world"]}
iex> output = %BSV.Transaction.Output{script: script}
%BSV.Transaction.Output{
amount: 0,
satoshis: 0,
script: %BSV.Script{
chunks: [:OP_FALSE, :OP_RETURN, "hello world"]
}
}
iex> tx = %BSV.Transaction{}
...> |> BSV.Transaction.spend_from(utxo)
...> |> BSV.Transaction.add_output(output)
...> |> BSV.Transaction.change_to("15KgnG69mTbtkx73vNDNUdrWuDhnmfCxsf")
...> |> BSV.Transaction.sign(private_key)
...> |> BSV.Transaction.serialize(encoding: :hex)
"010000000142123cac628be8df8bbf1fc21449c94bb8b81bc4a5960193be37688694626f49000000006b483045022100df13af549e5f6a23f70e0332856a0934a6fbbf7edceb19b15cafd8d3009ce12f02205ecf6b0f9456354de7c0b9d6b8877dac896b72edd9f7e3881b5ac69c82c03aac41210296207d8752d01b1cf8de77d258c02dd7280edc2bce9b59023311bbd395cbe93affffffff0100000000000000000e006a0b68656c6c6f20776f726c6400000000"
"""
end
|
lib/bsv.ex
| 0.865948 | 0.671538 |
bsv.ex
|
starcoder
|
defmodule Timber do
@moduledoc """
The functions in this module are high level convenience functions instended to define
the broader / public API of the Timber library. It is recommended to use these functions
instead of their deeper counterparts.
"""
alias Timber.Context
alias Timber.LocalContext
alias Timber.GlobalContext
@typedoc """
The target context to perform the operation.
- `:global` - This stores the context at a global level, meaning
it will be present on every log line, regardless of which process
generates the log line.
- `:local` - This stores the context in the Logger Metadata which
is local to the process
"""
@type context_location :: :local | :global
@doc """
Adds Timber context to the current process
See `add_context/2`
"""
@spec add_context(Context.element()) :: :ok
def add_context(data, location \\ :local)
@doc """
Adds context which will be included on log entries
The second parameter indicates where you want the context to be
stored. See `context_location` for more details.
"""
@spec add_context(Context.element(), context_location) :: :ok
def add_context(data, :local) do
LocalContext.add(data)
end
def add_context(data, :global) do
GlobalContext.add(data)
end
@doc """
Removes a key from Timber context on the current process.
See `remove_context_key/2`
"""
@spec remove_context_key(atom) :: :ok
def remove_context_key(key, location \\ :local)
@doc """
Removes a context key.
The second parameter indicates which context you want the key to be removed from.
"""
@spec remove_context_key(atom, context_location) :: :ok
def remove_context_key(key, :local) do
LocalContext.remove_key(key)
end
def remove_context_key(key, :global) do
GlobalContext.remove_key(key)
end
@doc """
Used to time runtime execution. For example, when timing a `Timber.Events.HTTPResponseEvent`:
```elixir
timer = Timber.start_timer()
# .... make request
time_ms = Timber.duration_ms(timer)
event = HTTPResponseEvent.new(status: 200, time_ms: time_ms)
message = HTTPResponseEvent.message(event)
Logger.info(message, event: event)
```
"""
defdelegate start_timer, to: Timber.Timer, as: :start
@doc """
Captures the duration in fractional milliseconds since the timer was started. See
`start_timer/0`.
"""
defdelegate duration_ms(timer), to: Timber.Timer
@doc false
def debug(message_fun) do
Timber.Config.debug_io_device()
|> debug(message_fun)
end
@doc false
def debug(nil, _message_fun) do
false
end
def debug(io_device, message_fun) when is_function(message_fun) do
IO.write(io_device, message_fun.())
end
end
|
lib/timber.ex
| 0.856047 | 0.797754 |
timber.ex
|
starcoder
|
defmodule AWS.AlexaForBusiness do
@moduledoc """
Alexa for Business helps you use Alexa in your organization. Alexa for
Business provides you with the tools to manage Alexa devices, enroll your
users, and assign skills, at scale. You can build your own context-aware
voice skills using the Alexa Skills Kit and the Alexa for Business API
operations. You can also make these available as private skills for your
organization. Alexa for Business makes it efficient to voice-enable your
products and services, thus providing context-aware voice experiences for
your customers. Device makers building with the Alexa Voice Service (AVS)
can create fully integrated solutions, register their products with Alexa
for Business, and manage them as shared devices in their organization.
"""
@doc """
Associates a skill with the organization under the customer's AWS account.
If a skill is private, the user implicitly accepts access to this skill
during enablement.
"""
def approve_skill(client, input, options \\ []) do
request(client, "ApproveSkill", input, options)
end
@doc """
Associates a contact with a given address book.
"""
def associate_contact_with_address_book(client, input, options \\ []) do
request(client, "AssociateContactWithAddressBook", input, options)
end
@doc """
Associates a device with the specified network profile.
"""
def associate_device_with_network_profile(client, input, options \\ []) do
request(client, "AssociateDeviceWithNetworkProfile", input, options)
end
@doc """
Associates a device with a given room. This applies all the settings from
the room profile to the device, and all the skills in any skill groups
added to that room. This operation requires the device to be online, or
else a manual sync is required.
"""
def associate_device_with_room(client, input, options \\ []) do
request(client, "AssociateDeviceWithRoom", input, options)
end
@doc """
Associates a skill group with a given room. This enables all skills in the
associated skill group on all devices in the room.
"""
def associate_skill_group_with_room(client, input, options \\ []) do
request(client, "AssociateSkillGroupWithRoom", input, options)
end
@doc """
Associates a skill with a skill group.
"""
def associate_skill_with_skill_group(client, input, options \\ []) do
request(client, "AssociateSkillWithSkillGroup", input, options)
end
@doc """
Makes a private skill available for enrolled users to enable on their
devices.
"""
def associate_skill_with_users(client, input, options \\ []) do
request(client, "AssociateSkillWithUsers", input, options)
end
@doc """
Creates an address book with the specified details.
"""
def create_address_book(client, input, options \\ []) do
request(client, "CreateAddressBook", input, options)
end
@doc """
Creates a recurring schedule for usage reports to deliver to the specified
S3 location with a specified daily or weekly interval.
"""
def create_business_report_schedule(client, input, options \\ []) do
request(client, "CreateBusinessReportSchedule", input, options)
end
@doc """
Adds a new conference provider under the user's AWS account.
"""
def create_conference_provider(client, input, options \\ []) do
request(client, "CreateConferenceProvider", input, options)
end
@doc """
Creates a contact with the specified details.
"""
def create_contact(client, input, options \\ []) do
request(client, "CreateContact", input, options)
end
@doc """
Creates a gateway group with the specified details.
"""
def create_gateway_group(client, input, options \\ []) do
request(client, "CreateGatewayGroup", input, options)
end
@doc """
Creates a network profile with the specified details.
"""
def create_network_profile(client, input, options \\ []) do
request(client, "CreateNetworkProfile", input, options)
end
@doc """
Creates a new room profile with the specified details.
"""
def create_profile(client, input, options \\ []) do
request(client, "CreateProfile", input, options)
end
@doc """
Creates a room with the specified details.
"""
def create_room(client, input, options \\ []) do
request(client, "CreateRoom", input, options)
end
@doc """
Creates a skill group with a specified name and description.
"""
def create_skill_group(client, input, options \\ []) do
request(client, "CreateSkillGroup", input, options)
end
@doc """
Creates a user.
"""
def create_user(client, input, options \\ []) do
request(client, "CreateUser", input, options)
end
@doc """
Deletes an address book by the address book ARN.
"""
def delete_address_book(client, input, options \\ []) do
request(client, "DeleteAddressBook", input, options)
end
@doc """
Deletes the recurring report delivery schedule with the specified schedule
ARN.
"""
def delete_business_report_schedule(client, input, options \\ []) do
request(client, "DeleteBusinessReportSchedule", input, options)
end
@doc """
Deletes a conference provider.
"""
def delete_conference_provider(client, input, options \\ []) do
request(client, "DeleteConferenceProvider", input, options)
end
@doc """
Deletes a contact by the contact ARN.
"""
def delete_contact(client, input, options \\ []) do
request(client, "DeleteContact", input, options)
end
@doc """
Removes a device from Alexa For Business.
"""
def delete_device(client, input, options \\ []) do
request(client, "DeleteDevice", input, options)
end
@doc """
When this action is called for a specified shared device, it allows
authorized users to delete the device's entire previous history of voice
input data and associated response data. This action can be called once
every 24 hours for a specific shared device.
"""
def delete_device_usage_data(client, input, options \\ []) do
request(client, "DeleteDeviceUsageData", input, options)
end
@doc """
Deletes a gateway group.
"""
def delete_gateway_group(client, input, options \\ []) do
request(client, "DeleteGatewayGroup", input, options)
end
@doc """
Deletes a network profile by the network profile ARN.
"""
def delete_network_profile(client, input, options \\ []) do
request(client, "DeleteNetworkProfile", input, options)
end
@doc """
Deletes a room profile by the profile ARN.
"""
def delete_profile(client, input, options \\ []) do
request(client, "DeleteProfile", input, options)
end
@doc """
Deletes a room by the room ARN.
"""
def delete_room(client, input, options \\ []) do
request(client, "DeleteRoom", input, options)
end
@doc """
Deletes room skill parameter details by room, skill, and parameter key ID.
"""
def delete_room_skill_parameter(client, input, options \\ []) do
request(client, "DeleteRoomSkillParameter", input, options)
end
@doc """
Unlinks a third-party account from a skill.
"""
def delete_skill_authorization(client, input, options \\ []) do
request(client, "DeleteSkillAuthorization", input, options)
end
@doc """
Deletes a skill group by skill group ARN.
"""
def delete_skill_group(client, input, options \\ []) do
request(client, "DeleteSkillGroup", input, options)
end
@doc """
Deletes a specified user by user ARN and enrollment ARN.
"""
def delete_user(client, input, options \\ []) do
request(client, "DeleteUser", input, options)
end
@doc """
Disassociates a contact from a given address book.
"""
def disassociate_contact_from_address_book(client, input, options \\ []) do
request(client, "DisassociateContactFromAddressBook", input, options)
end
@doc """
Disassociates a device from its current room. The device continues to be
connected to the Wi-Fi network and is still registered to the account. The
device settings and skills are removed from the room.
"""
def disassociate_device_from_room(client, input, options \\ []) do
request(client, "DisassociateDeviceFromRoom", input, options)
end
@doc """
Disassociates a skill from a skill group.
"""
def disassociate_skill_from_skill_group(client, input, options \\ []) do
request(client, "DisassociateSkillFromSkillGroup", input, options)
end
@doc """
Makes a private skill unavailable for enrolled users and prevents them from
enabling it on their devices.
"""
def disassociate_skill_from_users(client, input, options \\ []) do
request(client, "DisassociateSkillFromUsers", input, options)
end
@doc """
Disassociates a skill group from a specified room. This disables all skills
in the skill group on all devices in the room.
"""
def disassociate_skill_group_from_room(client, input, options \\ []) do
request(client, "DisassociateSkillGroupFromRoom", input, options)
end
@doc """
Forgets smart home appliances associated to a room.
"""
def forget_smart_home_appliances(client, input, options \\ []) do
request(client, "ForgetSmartHomeAppliances", input, options)
end
@doc """
Gets address the book details by the address book ARN.
"""
def get_address_book(client, input, options \\ []) do
request(client, "GetAddressBook", input, options)
end
@doc """
Retrieves the existing conference preferences.
"""
def get_conference_preference(client, input, options \\ []) do
request(client, "GetConferencePreference", input, options)
end
@doc """
Gets details about a specific conference provider.
"""
def get_conference_provider(client, input, options \\ []) do
request(client, "GetConferenceProvider", input, options)
end
@doc """
Gets the contact details by the contact ARN.
"""
def get_contact(client, input, options \\ []) do
request(client, "GetContact", input, options)
end
@doc """
Gets the details of a device by device ARN.
"""
def get_device(client, input, options \\ []) do
request(client, "GetDevice", input, options)
end
@doc """
Retrieves the details of a gateway.
"""
def get_gateway(client, input, options \\ []) do
request(client, "GetGateway", input, options)
end
@doc """
Retrieves the details of a gateway group.
"""
def get_gateway_group(client, input, options \\ []) do
request(client, "GetGatewayGroup", input, options)
end
@doc """
Retrieves the configured values for the user enrollment invitation email
template.
"""
def get_invitation_configuration(client, input, options \\ []) do
request(client, "GetInvitationConfiguration", input, options)
end
@doc """
Gets the network profile details by the network profile ARN.
"""
def get_network_profile(client, input, options \\ []) do
request(client, "GetNetworkProfile", input, options)
end
@doc """
Gets the details of a room profile by profile ARN.
"""
def get_profile(client, input, options \\ []) do
request(client, "GetProfile", input, options)
end
@doc """
Gets room details by room ARN.
"""
def get_room(client, input, options \\ []) do
request(client, "GetRoom", input, options)
end
@doc """
Gets room skill parameter details by room, skill, and parameter key ARN.
"""
def get_room_skill_parameter(client, input, options \\ []) do
request(client, "GetRoomSkillParameter", input, options)
end
@doc """
Gets skill group details by skill group ARN.
"""
def get_skill_group(client, input, options \\ []) do
request(client, "GetSkillGroup", input, options)
end
@doc """
Lists the details of the schedules that a user configured. A download URL
of the report associated with each schedule is returned every time this
action is called. A new download URL is returned each time, and is valid
for 24 hours.
"""
def list_business_report_schedules(client, input, options \\ []) do
request(client, "ListBusinessReportSchedules", input, options)
end
@doc """
Lists conference providers under a specific AWS account.
"""
def list_conference_providers(client, input, options \\ []) do
request(client, "ListConferenceProviders", input, options)
end
@doc """
Lists the device event history, including device connection status, for up
to 30 days.
"""
def list_device_events(client, input, options \\ []) do
request(client, "ListDeviceEvents", input, options)
end
@doc """
Retrieves a list of gateway group summaries. Use GetGatewayGroup to
retrieve details of a specific gateway group.
"""
def list_gateway_groups(client, input, options \\ []) do
request(client, "ListGatewayGroups", input, options)
end
@doc """
Retrieves a list of gateway summaries. Use GetGateway to retrieve details
of a specific gateway. An optional gateway group ARN can be provided to
only retrieve gateway summaries of gateways that are associated with that
gateway group ARN.
"""
def list_gateways(client, input, options \\ []) do
request(client, "ListGateways", input, options)
end
@doc """
Lists all enabled skills in a specific skill group.
"""
def list_skills(client, input, options \\ []) do
request(client, "ListSkills", input, options)
end
@doc """
Lists all categories in the Alexa skill store.
"""
def list_skills_store_categories(client, input, options \\ []) do
request(client, "ListSkillsStoreCategories", input, options)
end
@doc """
Lists all skills in the Alexa skill store by category.
"""
def list_skills_store_skills_by_category(client, input, options \\ []) do
request(client, "ListSkillsStoreSkillsByCategory", input, options)
end
@doc """
Lists all of the smart home appliances associated with a room.
"""
def list_smart_home_appliances(client, input, options \\ []) do
request(client, "ListSmartHomeAppliances", input, options)
end
@doc """
Lists all tags for the specified resource.
"""
def list_tags(client, input, options \\ []) do
request(client, "ListTags", input, options)
end
@doc """
Sets the conference preferences on a specific conference provider at the
account level.
"""
def put_conference_preference(client, input, options \\ []) do
request(client, "PutConferencePreference", input, options)
end
@doc """
Configures the email template for the user enrollment invitation with the
specified attributes.
"""
def put_invitation_configuration(client, input, options \\ []) do
request(client, "PutInvitationConfiguration", input, options)
end
@doc """
Updates room skill parameter details by room, skill, and parameter key ID.
Not all skills have a room skill parameter.
"""
def put_room_skill_parameter(client, input, options \\ []) do
request(client, "PutRoomSkillParameter", input, options)
end
@doc """
Links a user's account to a third-party skill provider. If this API
operation is called by an assumed IAM role, the skill being linked must be
a private skill. Also, the skill must be owned by the AWS account that
assumed the IAM role.
"""
def put_skill_authorization(client, input, options \\ []) do
request(client, "PutSkillAuthorization", input, options)
end
@doc """
Registers an Alexa-enabled device built by an Original Equipment
Manufacturer (OEM) using Alexa Voice Service (AVS).
"""
def register_a_v_s_device(client, input, options \\ []) do
request(client, "RegisterAVSDevice", input, options)
end
@doc """
Disassociates a skill from the organization under a user's AWS account. If
the skill is a private skill, it moves to an AcceptStatus of PENDING. Any
private or public skill that is rejected can be added later by calling the
ApproveSkill API.
"""
def reject_skill(client, input, options \\ []) do
request(client, "RejectSkill", input, options)
end
@doc """
Determines the details for the room from which a skill request was invoked.
This operation is used by skill developers.
"""
def resolve_room(client, input, options \\ []) do
request(client, "ResolveRoom", input, options)
end
@doc """
Revokes an invitation and invalidates the enrollment URL.
"""
def revoke_invitation(client, input, options \\ []) do
request(client, "RevokeInvitation", input, options)
end
@doc """
Searches address books and lists the ones that meet a set of filter and
sort criteria.
"""
def search_address_books(client, input, options \\ []) do
request(client, "SearchAddressBooks", input, options)
end
@doc """
Searches contacts and lists the ones that meet a set of filter and sort
criteria.
"""
def search_contacts(client, input, options \\ []) do
request(client, "SearchContacts", input, options)
end
@doc """
Searches devices and lists the ones that meet a set of filter criteria.
"""
def search_devices(client, input, options \\ []) do
request(client, "SearchDevices", input, options)
end
@doc """
Searches network profiles and lists the ones that meet a set of filter and
sort criteria.
"""
def search_network_profiles(client, input, options \\ []) do
request(client, "SearchNetworkProfiles", input, options)
end
@doc """
Searches room profiles and lists the ones that meet a set of filter
criteria.
"""
def search_profiles(client, input, options \\ []) do
request(client, "SearchProfiles", input, options)
end
@doc """
Searches rooms and lists the ones that meet a set of filter and sort
criteria.
"""
def search_rooms(client, input, options \\ []) do
request(client, "SearchRooms", input, options)
end
@doc """
Searches skill groups and lists the ones that meet a set of filter and sort
criteria.
"""
def search_skill_groups(client, input, options \\ []) do
request(client, "SearchSkillGroups", input, options)
end
@doc """
Searches users and lists the ones that meet a set of filter and sort
criteria.
"""
def search_users(client, input, options \\ []) do
request(client, "SearchUsers", input, options)
end
@doc """
Triggers an asynchronous flow to send text, SSML, or audio announcements to
rooms that are identified by a search or filter.
"""
def send_announcement(client, input, options \\ []) do
request(client, "SendAnnouncement", input, options)
end
@doc """
Sends an enrollment invitation email with a URL to a user. The URL is valid
for 30 days or until you call this operation again, whichever comes first.
"""
def send_invitation(client, input, options \\ []) do
request(client, "SendInvitation", input, options)
end
@doc """
Resets a device and its account to the known default settings. This clears
all information and settings set by previous users in the following ways:
<ul> <li> Bluetooth - This unpairs all bluetooth devices paired with your
echo device.
</li> <li> Volume - This resets the echo device's volume to the default
value.
</li> <li> Notifications - This clears all notifications from your echo
device.
</li> <li> Lists - This clears all to-do items from your echo device.
</li> <li> Settings - This internally syncs the room's profile (if the
device is assigned to a room), contacts, address books, delegation access
for account linking, and communications (if enabled on the room profile).
</li> </ul>
"""
def start_device_sync(client, input, options \\ []) do
request(client, "StartDeviceSync", input, options)
end
@doc """
Initiates the discovery of any smart home appliances associated with the
room.
"""
def start_smart_home_appliance_discovery(client, input, options \\ []) do
request(client, "StartSmartHomeApplianceDiscovery", input, options)
end
@doc """
Adds metadata tags to a specified resource.
"""
def tag_resource(client, input, options \\ []) do
request(client, "TagResource", input, options)
end
@doc """
Removes metadata tags from a specified resource.
"""
def untag_resource(client, input, options \\ []) do
request(client, "UntagResource", input, options)
end
@doc """
Updates address book details by the address book ARN.
"""
def update_address_book(client, input, options \\ []) do
request(client, "UpdateAddressBook", input, options)
end
@doc """
Updates the configuration of the report delivery schedule with the
specified schedule ARN.
"""
def update_business_report_schedule(client, input, options \\ []) do
request(client, "UpdateBusinessReportSchedule", input, options)
end
@doc """
Updates an existing conference provider's settings.
"""
def update_conference_provider(client, input, options \\ []) do
request(client, "UpdateConferenceProvider", input, options)
end
@doc """
Updates the contact details by the contact ARN.
"""
def update_contact(client, input, options \\ []) do
request(client, "UpdateContact", input, options)
end
@doc """
Updates the device name by device ARN.
"""
def update_device(client, input, options \\ []) do
request(client, "UpdateDevice", input, options)
end
@doc """
Updates the details of a gateway. If any optional field is not provided,
the existing corresponding value is left unmodified.
"""
def update_gateway(client, input, options \\ []) do
request(client, "UpdateGateway", input, options)
end
@doc """
Updates the details of a gateway group. If any optional field is not
provided, the existing corresponding value is left unmodified.
"""
def update_gateway_group(client, input, options \\ []) do
request(client, "UpdateGatewayGroup", input, options)
end
@doc """
Updates a network profile by the network profile ARN.
"""
def update_network_profile(client, input, options \\ []) do
request(client, "UpdateNetworkProfile", input, options)
end
@doc """
Updates an existing room profile by room profile ARN.
"""
def update_profile(client, input, options \\ []) do
request(client, "UpdateProfile", input, options)
end
@doc """
Updates room details by room ARN.
"""
def update_room(client, input, options \\ []) do
request(client, "UpdateRoom", input, options)
end
@doc """
Updates skill group details by skill group ARN.
"""
def update_skill_group(client, input, options \\ []) do
request(client, "UpdateSkillGroup", input, options)
end
@spec request(AWS.Client.t(), binary(), map(), list()) ::
{:ok, map() | nil, map()}
| {:error, term()}
defp request(client, action, input, options) do
client = %{client | service: "a4b"}
host = build_host("a4b", client)
url = build_url(host, client)
headers = [
{"Host", host},
{"Content-Type", "application/x-amz-json-1.1"},
{"X-Amz-Target", "AlexaForBusiness.#{action}"}
]
payload = encode!(client, input)
headers = AWS.Request.sign_v4(client, "POST", url, headers, payload)
post(client, url, payload, headers, options)
end
defp post(client, url, payload, headers, options) do
case AWS.Client.request(client, :post, url, payload, headers, options) do
{:ok, %{status_code: 200, body: body} = response} ->
body = if body != "", do: decode!(client, body)
{:ok, body, response}
{:ok, response} ->
{:error, {:unexpected_response, response}}
error = {:error, _reason} -> error
end
end
defp build_host(_endpoint_prefix, %{region: "local", endpoint: endpoint}) do
endpoint
end
defp build_host(_endpoint_prefix, %{region: "local"}) do
"localhost"
end
defp build_host(endpoint_prefix, %{region: region, endpoint: endpoint}) do
"#{endpoint_prefix}.#{region}.#{endpoint}"
end
defp build_url(host, %{:proto => proto, :port => port}) do
"#{proto}://#{host}:#{port}/"
end
defp encode!(client, payload) do
AWS.Client.encode!(client, payload, :json)
end
defp decode!(client, payload) do
AWS.Client.decode!(client, payload, :json)
end
end
|
lib/aws/generated/alexa_for_business.ex
| 0.741393 | 0.568955 |
alexa_for_business.ex
|
starcoder
|
defmodule ExIban.Validators do
@moduledoc """
Set of validation rules to perform on checking IBAN account number.
"""
import ExIban.Rules
import ExIban.Parser
@doc """
Performs validation checks:
- length greater or equal to 5 (:too_short)
- only ilegal chars used [A-Z0-9] (bad_chars)
- known country code (:unknown_country_code)
- correct length for given country (:bad_length)
- correct bban format for given country (:bad_format)
- checking digits (:bad_check_digits)
## Examples
iex> ExIban.Validators.issues("GB82")
{[:too_short], nil}
iex> ExIban.Validators.issues("GB_+)*")
{[:bad_chars, :bad_length, :bad_format, :bad_check_digits], {"GB", "_+", ")*", 6, "GB_+)*"}}
iex> ExIban.Validators.issues("GB82 WEST 1234 5698 7654 32")
{[], {"GB", "82", "WEST12345698765432", 22, "GB82WEST12345698765432"}}
"""
@spec issues(binary) :: {list, {bitstring, bitstring, bitstring, integer, bitstring} | nil}
def issues(iban) when byte_size(iban) < 5, do: {[:too_short], nil}
def issues(iban) do
iban
|> parse
|> do_validation
end
defp do_validation(iban) do
iban
|> check_chars
|> check_country_code
|> check_length
|> check_format
|> check_digits
end
defp check_chars({_, _, _, _, iban} = parsed_iban) do
cond do
not Regex.match?(~r/^[A-Z0-9]+$/, iban) -> {[:bad_chars], parsed_iban}
true -> {[], parsed_iban}
end
end
defp check_country_code({errors, {country_code, _, _, _, _} = parsed_iban}) do
cond do
rules |> Map.get(country_code) |> is_nil ->
{errors ++ [:unknown_country_code], parsed_iban}
true -> {errors, parsed_iban}
end
end
defp check_length({errors,
{country_code, _, _, iban_length, _} = parsed_iban}) do
cond do
rules |> Map.get(country_code, %{}) |> Map.get("length") != iban_length ->
{errors ++ [:bad_length], parsed_iban}
true -> {errors, parsed_iban}
end
end
defp check_format({errors,
{country_code, _, bban, _, _} = parsed_iban}) do
{:ok, reg} = rules
|> Map.get(country_code, %{})
|> Map.get("bban_pattern", "")
|> Regex.compile
cond do
not Regex.match?(reg, bban) -> {errors ++ [:bad_format], parsed_iban}
true -> {errors, parsed_iban}
end
end
defp check_digits({errors,
{country_code, check_digits, bban, _, _} = parsed_iban}) do
chars = String.to_char_list(bban <> country_code <> check_digits)
numbers = for byte <- chars, into: [] do
case byte do
byte when byte in 48..57 -> List.to_string([byte])
byte when byte in 65..90 -> Integer.to_string(byte - 55)
_ -> ""
end
end
cond do
numbers |> Enum.join |> String.to_integer |> rem(97) != 1 ->
{errors ++ [:bad_check_digits], parsed_iban}
true -> {errors, parsed_iban}
end
end
end
|
lib/exiban/validators.ex
| 0.799011 | 0.47591 |
validators.ex
|
starcoder
|
defmodule AWS.Config do
@moduledoc """
AWS Config
AWS Config provides a way to keep track of the configurations of all the
AWS resources associated with your AWS account. You can use AWS Config to
get the current and historical configurations of each AWS resource and also
to get information about the relationship between the resources. An AWS
resource can be an Amazon Compute Cloud (Amazon EC2) instance, an Elastic
Block Store (EBS) volume, an elastic network Interface (ENI), or a security
group. For a complete list of resources currently supported by AWS Config,
see [Supported AWS
Resources](https://docs.aws.amazon.com/config/latest/developerguide/resource-config-reference.html#supported-resources).
You can access and manage AWS Config through the AWS Management Console,
the AWS Command Line Interface (AWS CLI), the AWS Config API, or the AWS
SDKs for AWS Config. This reference guide contains documentation for the
AWS Config API and the AWS CLI commands that you can use to manage AWS
Config. The AWS Config API uses the Signature Version 4 protocol for
signing requests. For more information about how to sign a request with
this protocol, see [Signature Version 4 Signing
Process](https://docs.aws.amazon.com/general/latest/gr/signature-version-4.html).
For detailed information about AWS Config features and their associated
actions or commands, as well as how to work with AWS Management Console,
see [What Is AWS
Config](https://docs.aws.amazon.com/config/latest/developerguide/WhatIsConfig.html)
in the *AWS Config Developer Guide*.
"""
@doc """
Returns the current configuration items for resources that are present in
your AWS Config aggregator. The operation also returns a list of resources
that are not processed in the current request. If there are no unprocessed
resources, the operation returns an empty `unprocessedResourceIdentifiers`
list.
<note> <ul> <li> The API does not return results for deleted resources.
</li> <li> The API does not return tags and relationships.
</li> </ul> </note>
"""
def batch_get_aggregate_resource_config(client, input, options \\ []) do
request(client, "BatchGetAggregateResourceConfig", input, options)
end
@doc """
Returns the current configuration for one or more requested resources. The
operation also returns a list of resources that are not processed in the
current request. If there are no unprocessed resources, the operation
returns an empty unprocessedResourceKeys list.
<note> <ul> <li> The API does not return results for deleted resources.
</li> <li> The API does not return any tags for the requested resources.
This information is filtered out of the supplementaryConfiguration section
of the API response.
</li> </ul> </note>
"""
def batch_get_resource_config(client, input, options \\ []) do
request(client, "BatchGetResourceConfig", input, options)
end
@doc """
Deletes the authorization granted to the specified configuration aggregator
account in a specified region.
"""
def delete_aggregation_authorization(client, input, options \\ []) do
request(client, "DeleteAggregationAuthorization", input, options)
end
@doc """
Deletes the specified AWS Config rule and all of its evaluation results.
AWS Config sets the state of a rule to `DELETING` until the deletion is
complete. You cannot update a rule while it is in this state. If you make a
`PutConfigRule` or `DeleteConfigRule` request for the rule, you will
receive a `ResourceInUseException`.
You can check the state of a rule by using the `DescribeConfigRules`
request.
"""
def delete_config_rule(client, input, options \\ []) do
request(client, "DeleteConfigRule", input, options)
end
@doc """
Deletes the specified configuration aggregator and the aggregated data
associated with the aggregator.
"""
def delete_configuration_aggregator(client, input, options \\ []) do
request(client, "DeleteConfigurationAggregator", input, options)
end
@doc """
Deletes the configuration recorder.
After the configuration recorder is deleted, AWS Config will not record
resource configuration changes until you create a new configuration
recorder.
This action does not delete the configuration information that was
previously recorded. You will be able to access the previously recorded
information by using the `GetResourceConfigHistory` action, but you will
not be able to access this information in the AWS Config console until you
create a new configuration recorder.
"""
def delete_configuration_recorder(client, input, options \\ []) do
request(client, "DeleteConfigurationRecorder", input, options)
end
@doc """
Deletes the delivery channel.
Before you can delete the delivery channel, you must stop the configuration
recorder by using the `StopConfigurationRecorder` action.
"""
def delete_delivery_channel(client, input, options \\ []) do
request(client, "DeleteDeliveryChannel", input, options)
end
@doc """
Deletes the evaluation results for the specified AWS Config rule. You can
specify one AWS Config rule per request. After you delete the evaluation
results, you can call the `StartConfigRulesEvaluation` API to start
evaluating your AWS resources against the rule.
"""
def delete_evaluation_results(client, input, options \\ []) do
request(client, "DeleteEvaluationResults", input, options)
end
@doc """
Deletes pending authorization requests for a specified aggregator account
in a specified region.
"""
def delete_pending_aggregation_request(client, input, options \\ []) do
request(client, "DeletePendingAggregationRequest", input, options)
end
@doc """
Deletes the remediation configuration.
"""
def delete_remediation_configuration(client, input, options \\ []) do
request(client, "DeleteRemediationConfiguration", input, options)
end
@doc """
Deletes the retention configuration.
"""
def delete_retention_configuration(client, input, options \\ []) do
request(client, "DeleteRetentionConfiguration", input, options)
end
@doc """
Schedules delivery of a configuration snapshot to the Amazon S3 bucket in
the specified delivery channel. After the delivery has started, AWS Config
sends the following notifications using an Amazon SNS topic that you have
specified.
<ul> <li> Notification of the start of the delivery.
</li> <li> Notification of the completion of the delivery, if the delivery
was successfully completed.
</li> <li> Notification of delivery failure, if the delivery failed.
</li> </ul>
"""
def deliver_config_snapshot(client, input, options \\ []) do
request(client, "DeliverConfigSnapshot", input, options)
end
@doc """
Returns a list of compliant and noncompliant rules with the number of
resources for compliant and noncompliant rules.
<note> The results can return an empty result page, but if you have a
nextToken, the results are displayed on the next page.
</note>
"""
def describe_aggregate_compliance_by_config_rules(client, input, options \\ []) do
request(client, "DescribeAggregateComplianceByConfigRules", input, options)
end
@doc """
Returns a list of authorizations granted to various aggregator accounts and
regions.
"""
def describe_aggregation_authorizations(client, input, options \\ []) do
request(client, "DescribeAggregationAuthorizations", input, options)
end
@doc """
Indicates whether the specified AWS Config rules are compliant. If a rule
is noncompliant, this action returns the number of AWS resources that do
not comply with the rule.
A rule is compliant if all of the evaluated resources comply with it. It is
noncompliant if any of these resources do not comply.
If AWS Config has no current evaluation results for the rule, it returns
`INSUFFICIENT_DATA`. This result might indicate one of the following
conditions:
<ul> <li> AWS Config has never invoked an evaluation for the rule. To check
whether it has, use the `DescribeConfigRuleEvaluationStatus` action to get
the `LastSuccessfulInvocationTime` and `LastFailedInvocationTime`.
</li> <li> The rule's AWS Lambda function is failing to send evaluation
results to AWS Config. Verify that the role you assigned to your
configuration recorder includes the `config:PutEvaluations` permission. If
the rule is a custom rule, verify that the AWS Lambda execution role
includes the `config:PutEvaluations` permission.
</li> <li> The rule's AWS Lambda function has returned `NOT_APPLICABLE` for
all evaluation results. This can occur if the resources were deleted or
removed from the rule's scope.
</li> </ul>
"""
def describe_compliance_by_config_rule(client, input, options \\ []) do
request(client, "DescribeComplianceByConfigRule", input, options)
end
@doc """
Indicates whether the specified AWS resources are compliant. If a resource
is noncompliant, this action returns the number of AWS Config rules that
the resource does not comply with.
A resource is compliant if it complies with all the AWS Config rules that
evaluate it. It is noncompliant if it does not comply with one or more of
these rules.
If AWS Config has no current evaluation results for the resource, it
returns `INSUFFICIENT_DATA`. This result might indicate one of the
following conditions about the rules that evaluate the resource:
<ul> <li> AWS Config has never invoked an evaluation for the rule. To check
whether it has, use the `DescribeConfigRuleEvaluationStatus` action to get
the `LastSuccessfulInvocationTime` and `LastFailedInvocationTime`.
</li> <li> The rule's AWS Lambda function is failing to send evaluation
results to AWS Config. Verify that the role that you assigned to your
configuration recorder includes the `config:PutEvaluations` permission. If
the rule is a custom rule, verify that the AWS Lambda execution role
includes the `config:PutEvaluations` permission.
</li> <li> The rule's AWS Lambda function has returned `NOT_APPLICABLE` for
all evaluation results. This can occur if the resources were deleted or
removed from the rule's scope.
</li> </ul>
"""
def describe_compliance_by_resource(client, input, options \\ []) do
request(client, "DescribeComplianceByResource", input, options)
end
@doc """
Returns status information for each of your AWS managed Config rules. The
status includes information such as the last time AWS Config invoked the
rule, the last time AWS Config failed to invoke the rule, and the related
error for the last failure.
"""
def describe_config_rule_evaluation_status(client, input, options \\ []) do
request(client, "DescribeConfigRuleEvaluationStatus", input, options)
end
@doc """
Returns details about your AWS Config rules.
"""
def describe_config_rules(client, input, options \\ []) do
request(client, "DescribeConfigRules", input, options)
end
@doc """
Returns status information for sources within an aggregator. The status
includes information about the last time AWS Config verified authorization
between the source account and an aggregator account. In case of a failure,
the status contains the related error code or message.
"""
def describe_configuration_aggregator_sources_status(client, input, options \\ []) do
request(client, "DescribeConfigurationAggregatorSourcesStatus", input, options)
end
@doc """
Returns the details of one or more configuration aggregators. If the
configuration aggregator is not specified, this action returns the details
for all the configuration aggregators associated with the account.
"""
def describe_configuration_aggregators(client, input, options \\ []) do
request(client, "DescribeConfigurationAggregators", input, options)
end
@doc """
Returns the current status of the specified configuration recorder. If a
configuration recorder is not specified, this action returns the status of
all configuration recorders associated with the account.
<note> Currently, you can specify only one configuration recorder per
region in your account.
</note>
"""
def describe_configuration_recorder_status(client, input, options \\ []) do
request(client, "DescribeConfigurationRecorderStatus", input, options)
end
@doc """
Returns the details for the specified configuration recorders. If the
configuration recorder is not specified, this action returns the details
for all configuration recorders associated with the account.
<note> Currently, you can specify only one configuration recorder per
region in your account.
</note>
"""
def describe_configuration_recorders(client, input, options \\ []) do
request(client, "DescribeConfigurationRecorders", input, options)
end
@doc """
Returns the current status of the specified delivery channel. If a delivery
channel is not specified, this action returns the current status of all
delivery channels associated with the account.
<note> Currently, you can specify only one delivery channel per region in
your account.
</note>
"""
def describe_delivery_channel_status(client, input, options \\ []) do
request(client, "DescribeDeliveryChannelStatus", input, options)
end
@doc """
Returns details about the specified delivery channel. If a delivery channel
is not specified, this action returns the details of all delivery channels
associated with the account.
<note> Currently, you can specify only one delivery channel per region in
your account.
</note>
"""
def describe_delivery_channels(client, input, options \\ []) do
request(client, "DescribeDeliveryChannels", input, options)
end
@doc """
Returns a list of all pending aggregation requests.
"""
def describe_pending_aggregation_requests(client, input, options \\ []) do
request(client, "DescribePendingAggregationRequests", input, options)
end
@doc """
Returns the details of one or more remediation configurations.
"""
def describe_remediation_configurations(client, input, options \\ []) do
request(client, "DescribeRemediationConfigurations", input, options)
end
@doc """
Provides a detailed view of a Remediation Execution for a set of resources
including state, timestamps for when steps for the remediation execution
occur, and any error messages for steps that have failed. When you specify
the limit and the next token, you receive a paginated response.
"""
def describe_remediation_execution_status(client, input, options \\ []) do
request(client, "DescribeRemediationExecutionStatus", input, options)
end
@doc """
Returns the details of one or more retention configurations. If the
retention configuration name is not specified, this action returns the
details for all the retention configurations for that account.
<note> Currently, AWS Config supports only one retention configuration per
region in your account.
</note>
"""
def describe_retention_configurations(client, input, options \\ []) do
request(client, "DescribeRetentionConfigurations", input, options)
end
@doc """
Returns the evaluation results for the specified AWS Config rule for a
specific resource in a rule. The results indicate which AWS resources were
evaluated by the rule, when each resource was last evaluated, and whether
each resource complies with the rule.
<note> The results can return an empty result page. But if you have a
nextToken, the results are displayed on the next page.
</note>
"""
def get_aggregate_compliance_details_by_config_rule(client, input, options \\ []) do
request(client, "GetAggregateComplianceDetailsByConfigRule", input, options)
end
@doc """
Returns the number of compliant and noncompliant rules for one or more
accounts and regions in an aggregator.
<note> The results can return an empty result page, but if you have a
nextToken, the results are displayed on the next page.
</note>
"""
def get_aggregate_config_rule_compliance_summary(client, input, options \\ []) do
request(client, "GetAggregateConfigRuleComplianceSummary", input, options)
end
@doc """
Returns the resource counts across accounts and regions that are present in
your AWS Config aggregator. You can request the resource counts by
providing filters and GroupByKey.
For example, if the input contains accountID 12345678910 and region
us-east-1 in filters, the API returns the count of resources in account ID
12345678910 and region us-east-1. If the input contains ACCOUNT_ID as a
GroupByKey, the API returns resource counts for all source accounts that
are present in your aggregator.
"""
def get_aggregate_discovered_resource_counts(client, input, options \\ []) do
request(client, "GetAggregateDiscoveredResourceCounts", input, options)
end
@doc """
Returns configuration item that is aggregated for your specific resource in
a specific source account and region.
"""
def get_aggregate_resource_config(client, input, options \\ []) do
request(client, "GetAggregateResourceConfig", input, options)
end
@doc """
Returns the evaluation results for the specified AWS Config rule. The
results indicate which AWS resources were evaluated by the rule, when each
resource was last evaluated, and whether each resource complies with the
rule.
"""
def get_compliance_details_by_config_rule(client, input, options \\ []) do
request(client, "GetComplianceDetailsByConfigRule", input, options)
end
@doc """
Returns the evaluation results for the specified AWS resource. The results
indicate which AWS Config rules were used to evaluate the resource, when
each rule was last used, and whether the resource complies with each rule.
"""
def get_compliance_details_by_resource(client, input, options \\ []) do
request(client, "GetComplianceDetailsByResource", input, options)
end
@doc """
Returns the number of AWS Config rules that are compliant and noncompliant,
up to a maximum of 25 for each.
"""
def get_compliance_summary_by_config_rule(client, input, options \\ []) do
request(client, "GetComplianceSummaryByConfigRule", input, options)
end
@doc """
Returns the number of resources that are compliant and the number that are
noncompliant. You can specify one or more resource types to get these
numbers for each resource type. The maximum number returned is 100.
"""
def get_compliance_summary_by_resource_type(client, input, options \\ []) do
request(client, "GetComplianceSummaryByResourceType", input, options)
end
@doc """
Returns the resource types, the number of each resource type, and the total
number of resources that AWS Config is recording in this region for your
AWS account.
<p class="title"> **Example**
<ol> <li> AWS Config is recording three resource types in the US East
(Ohio) Region for your account: 25 EC2 instances, 20 IAM users, and 15 S3
buckets.
</li> <li> You make a call to the `GetDiscoveredResourceCounts` action and
specify that you want all resource types.
</li> <li> AWS Config returns the following:
<ul> <li> The resource types (EC2 instances, IAM users, and S3 buckets).
</li> <li> The number of each resource type (25, 20, and 15).
</li> <li> The total number of all resources (60).
</li> </ul> </li> </ol> The response is paginated. By default, AWS Config
lists 100 `ResourceCount` objects on each page. You can customize this
number with the `limit` parameter. The response includes a `nextToken`
string. To get the next page of results, run the request again and specify
the string for the `nextToken` parameter.
<note> If you make a call to the `GetDiscoveredResourceCounts` action, you
might not immediately receive resource counts in the following situations:
<ul> <li> You are a new AWS Config customer.
</li> <li> You just enabled resource recording.
</li> </ul> It might take a few minutes for AWS Config to record and count
your resources. Wait a few minutes and then retry the
`GetDiscoveredResourceCounts` action.
</note>
"""
def get_discovered_resource_counts(client, input, options \\ []) do
request(client, "GetDiscoveredResourceCounts", input, options)
end
@doc """
Returns a list of configuration items for the specified resource. The list
contains details about each state of the resource during the specified time
interval. If you specified a retention period to retain your
`ConfigurationItems` between a minimum of 30 days and a maximum of 7 years
(2557 days), AWS Config returns the `ConfigurationItems` for the specified
retention period.
The response is paginated. By default, AWS Config returns a limit of 10
configuration items per page. You can customize this number with the
`limit` parameter. The response includes a `nextToken` string. To get the
next page of results, run the request again and specify the string for the
`nextToken` parameter.
<note> Each call to the API is limited to span a duration of seven days. It
is likely that the number of records returned is smaller than the specified
`limit`. In such cases, you can make another call, using the `nextToken`.
</note>
"""
def get_resource_config_history(client, input, options \\ []) do
request(client, "GetResourceConfigHistory", input, options)
end
@doc """
Accepts a resource type and returns a list of resource identifiers that are
aggregated for a specific resource type across accounts and regions. A
resource identifier includes the resource type, ID, (if available) the
custom resource name, source account, and source region. You can narrow the
results to include only resources that have specific resource IDs, or a
resource name, or source account ID, or source region.
For example, if the input consists of accountID 12345678910 and the region
is us-east-1 for resource type `AWS::EC2::Instance` then the API returns
all the EC2 instance identifiers of accountID 12345678910 and region
us-east-1.
"""
def list_aggregate_discovered_resources(client, input, options \\ []) do
request(client, "ListAggregateDiscoveredResources", input, options)
end
@doc """
Accepts a resource type and returns a list of resource identifiers for the
resources of that type. A resource identifier includes the resource type,
ID, and (if available) the custom resource name. The results consist of
resources that AWS Config has discovered, including those that AWS Config
is not currently recording. You can narrow the results to include only
resources that have specific resource IDs or a resource name.
<note> You can specify either resource IDs or a resource name, but not
both, in the same request.
</note> The response is paginated. By default, AWS Config lists 100
resource identifiers on each page. You can customize this number with the
`limit` parameter. The response includes a `nextToken` string. To get the
next page of results, run the request again and specify the string for the
`nextToken` parameter.
"""
def list_discovered_resources(client, input, options \\ []) do
request(client, "ListDiscoveredResources", input, options)
end
@doc """
List the tags for AWS Config resource.
"""
def list_tags_for_resource(client, input, options \\ []) do
request(client, "ListTagsForResource", input, options)
end
@doc """
Authorizes the aggregator account and region to collect data from the
source account and region.
"""
def put_aggregation_authorization(client, input, options \\ []) do
request(client, "PutAggregationAuthorization", input, options)
end
@doc """
Adds or updates an AWS Config rule for evaluating whether your AWS
resources comply with your desired configurations.
You can use this action for custom AWS Config rules and AWS managed Config
rules. A custom AWS Config rule is a rule that you develop and maintain. An
AWS managed Config rule is a customizable, predefined rule that AWS Config
provides.
If you are adding a new custom AWS Config rule, you must first create the
AWS Lambda function that the rule invokes to evaluate your resources. When
you use the `PutConfigRule` action to add the rule to AWS Config, you must
specify the Amazon Resource Name (ARN) that AWS Lambda assigns to the
function. Specify the ARN for the `SourceIdentifier` key. This key is part
of the `Source` object, which is part of the `ConfigRule` object.
If you are adding an AWS managed Config rule, specify the rule's identifier
for the `SourceIdentifier` key. To reference AWS managed Config rule
identifiers, see [About AWS Managed Config
Rules](https://docs.aws.amazon.com/config/latest/developerguide/evaluate-config_use-managed-rules.html).
For any new rule that you add, specify the `ConfigRuleName` in the
`ConfigRule` object. Do not specify the `ConfigRuleArn` or the
`ConfigRuleId`. These values are generated by AWS Config for new rules.
If you are updating a rule that you added previously, you can specify the
rule by `ConfigRuleName`, `ConfigRuleId`, or `ConfigRuleArn` in the
`ConfigRule` data type that you use in this request.
The maximum number of rules that AWS Config supports is 150.
For information about requesting a rule limit increase, see [AWS Config
Limits](http://docs.aws.amazon.com/general/latest/gr/aws_service_limits.html#limits_config)
in the *AWS General Reference Guide*.
For more information about developing and using AWS Config rules, see
[Evaluating AWS Resource Configurations with AWS
Config](https://docs.aws.amazon.com/config/latest/developerguide/evaluate-config.html)
in the *AWS Config Developer Guide*.
"""
def put_config_rule(client, input, options \\ []) do
request(client, "PutConfigRule", input, options)
end
@doc """
Creates and updates the configuration aggregator with the selected source
accounts and regions. The source account can be individual account(s) or an
organization.
<note> AWS Config should be enabled in source accounts and regions you want
to aggregate.
If your source type is an organization, you must be signed in to the master
account and all features must be enabled in your organization. AWS Config
calls `EnableAwsServiceAccess` API to enable integration between AWS Config
and AWS Organizations.
</note>
"""
def put_configuration_aggregator(client, input, options \\ []) do
request(client, "PutConfigurationAggregator", input, options)
end
@doc """
Creates a new configuration recorder to record the selected resource
configurations.
You can use this action to change the role `roleARN` or the
`recordingGroup` of an existing recorder. To change the role, call the
action on the existing configuration recorder and specify a role.
<note> Currently, you can specify only one configuration recorder per
region in your account.
If `ConfigurationRecorder` does not have the **recordingGroup** parameter
specified, the default is to record all supported resource types.
</note>
"""
def put_configuration_recorder(client, input, options \\ []) do
request(client, "PutConfigurationRecorder", input, options)
end
@doc """
Creates a delivery channel object to deliver configuration information to
an Amazon S3 bucket and Amazon SNS topic.
Before you can create a delivery channel, you must create a configuration
recorder.
You can use this action to change the Amazon S3 bucket or an Amazon SNS
topic of the existing delivery channel. To change the Amazon S3 bucket or
an Amazon SNS topic, call this action and specify the changed values for
the S3 bucket and the SNS topic. If you specify a different value for
either the S3 bucket or the SNS topic, this action will keep the existing
value for the parameter that is not changed.
<note> You can have only one delivery channel per region in your account.
</note>
"""
def put_delivery_channel(client, input, options \\ []) do
request(client, "PutDeliveryChannel", input, options)
end
@doc """
Used by an AWS Lambda function to deliver evaluation results to AWS Config.
This action is required in every AWS Lambda function that is invoked by an
AWS Config rule.
"""
def put_evaluations(client, input, options \\ []) do
request(client, "PutEvaluations", input, options)
end
@doc """
Adds or updates the remediation configuration with a specific AWS Config
rule with the selected target or action. The API creates the
`RemediationConfiguration` object for the AWS Config rule. The AWS Config
rule must already exist for you to add a remediation configuration. The
target (SSM document) must exist and have permissions to use the target.
"""
def put_remediation_configurations(client, input, options \\ []) do
request(client, "PutRemediationConfigurations", input, options)
end
@doc """
Creates and updates the retention configuration with details about
retention period (number of days) that AWS Config stores your historical
information. The API creates the `RetentionConfiguration` object and names
the object as **default**. When you have a `RetentionConfiguration` object
named **default**, calling the API modifies the default object.
<note> Currently, AWS Config supports only one retention configuration per
region in your account.
</note>
"""
def put_retention_configuration(client, input, options \\ []) do
request(client, "PutRetentionConfiguration", input, options)
end
@doc """
Accepts a structured query language (SQL) `SELECT` command, performs the
corresponding search, and returns resource configurations matching the
properties.
For more information about query components, see the [ **Query Components**
](https://docs.aws.amazon.com/config/latest/developerguide/query-components.html)
section in the AWS Config Developer Guide.
"""
def select_resource_config(client, input, options \\ []) do
request(client, "SelectResourceConfig", input, options)
end
@doc """
Runs an on-demand evaluation for the specified AWS Config rules against the
last known configuration state of the resources. Use
`StartConfigRulesEvaluation` when you want to test that a rule you updated
is working as expected. `StartConfigRulesEvaluation` does not re-record the
latest configuration state for your resources. It re-runs an evaluation
against the last known state of your resources.
You can specify up to 25 AWS Config rules per request.
An existing `StartConfigRulesEvaluation` call for the specified rules must
complete before you can call the API again. If you chose to have AWS Config
stream to an Amazon SNS topic, you will receive a
`ConfigRuleEvaluationStarted` notification when the evaluation starts.
<note> You don't need to call the `StartConfigRulesEvaluation` API to run
an evaluation for a new rule. When you create a rule, AWS Config evaluates
your resources against the rule automatically.
</note> The `StartConfigRulesEvaluation` API is useful if you want to run
on-demand evaluations, such as the following example:
<ol> <li> You have a custom rule that evaluates your IAM resources every 24
hours.
</li> <li> You update your Lambda function to add additional conditions to
your rule.
</li> <li> Instead of waiting for the next periodic evaluation, you call
the `StartConfigRulesEvaluation` API.
</li> <li> AWS Config invokes your Lambda function and evaluates your IAM
resources.
</li> <li> Your custom rule will still run periodic evaluations every 24
hours.
</li> </ol>
"""
def start_config_rules_evaluation(client, input, options \\ []) do
request(client, "StartConfigRulesEvaluation", input, options)
end
@doc """
Starts recording configurations of the AWS resources you have selected to
record in your AWS account.
You must have created at least one delivery channel to successfully start
the configuration recorder.
"""
def start_configuration_recorder(client, input, options \\ []) do
request(client, "StartConfigurationRecorder", input, options)
end
@doc """
Runs an on-demand remediation for the specified AWS Config rules against
the last known remediation configuration. It runs an execution against the
current state of your resources. Remediation execution is asynchronous.
You can specify up to 100 resource keys per request. An existing
StartRemediationExecution call for the specified resource keys must
complete before you can call the API again.
"""
def start_remediation_execution(client, input, options \\ []) do
request(client, "StartRemediationExecution", input, options)
end
@doc """
Stops recording configurations of the AWS resources you have selected to
record in your AWS account.
"""
def stop_configuration_recorder(client, input, options \\ []) do
request(client, "StopConfigurationRecorder", input, options)
end
@doc """
Associates the specified tags to a resource with the specified resourceArn.
If existing tags on a resource are not specified in the request parameters,
they are not changed. When a resource is deleted, the tags associated with
that resource are deleted as well.
"""
def tag_resource(client, input, options \\ []) do
request(client, "TagResource", input, options)
end
@doc """
Deletes specified tags from a resource.
"""
def untag_resource(client, input, options \\ []) do
request(client, "UntagResource", input, options)
end
@spec request(map(), binary(), map(), list()) ::
{:ok, Poison.Parser.t | nil, Poison.Response.t} |
{:error, Poison.Parser.t} |
{:error, HTTPoison.Error.t}
defp request(client, action, input, options) do
client = %{client | service: "config"}
host = get_host("config", client)
url = get_url(host, client)
headers = [{"Host", host},
{"Content-Type", "application/x-amz-json-1.1"},
{"X-Amz-Target", "StarlingDoveService.#{action}"}]
payload = Poison.Encoder.encode(input, [])
headers = AWS.Request.sign_v4(client, "POST", url, headers, payload)
case HTTPoison.post(url, payload, headers, options) do
{:ok, response=%HTTPoison.Response{status_code: 200, body: ""}} ->
{:ok, nil, response}
{:ok, response=%HTTPoison.Response{status_code: 200, body: body}} ->
{:ok, Poison.Parser.parse!(body), response}
{:ok, _response=%HTTPoison.Response{body: body}} ->
error = Poison.Parser.parse!(body)
exception = error["__type"]
message = error["message"]
{:error, {exception, message}}
{:error, %HTTPoison.Error{reason: reason}} ->
{:error, %HTTPoison.Error{reason: reason}}
end
end
defp get_host(endpoint_prefix, client) do
if client.region == "local" do
"localhost"
else
"#{endpoint_prefix}.#{client.region}.#{client.endpoint}"
end
end
defp get_url(host, %{:proto => proto, :port => port}) do
"#{proto}://#{host}:#{port}/"
end
end
|
lib/aws/config.ex
| 0.89833 | 0.519399 |
config.ex
|
starcoder
|
defmodule DarknetToOnnx.ConvParams do
@moduledoc """
Helper class to store the hyper parameters of a Conv layer,
including its prefix name in the ONNX graph and the expected dimensions
of weights for convolution, bias, and batch normalization.
Additionally acts as a wrapper for generating safe names for all
weights, checking on feasible combinations.
"""
use Agent, restart: :transient
@doc """
Constructor based on the base node name (e.g. 101_convolutional), the batch
normalization setting, and the convolutional weights shape.
Keyword arguments:
node_name -- base name of this YOLO convolutional layer
batch_normalize -- bool value if batch normalization is used
conv_weight_dims -- the dimensions of this layer's convolutional weights
"""
# node_name, batch_normalize, conv_weight_dims) do
def start_link(opts) do
initial_state = %{
node_name: Keyword.fetch!(opts, :node_name),
batch_normalize: Keyword.fetch!(opts, :batch_normalize),
# TODO: assert len(conv_weight_dims) == 4
conv_weight_dims: Keyword.fetch!(opts, :conv_weight_dims)
}
Agent.start_link(fn -> initial_state end, name: String.to_atom(initial_state.node_name))
initial_state
end
def get_state(node_name) do
Agent.get(String.to_atom(node_name), fn state -> state end)
end
@doc """
Generates a name based on two string inputs,
and checks if the combination is valid.
"""
# The first def is a header needed to use default values in parameter list.
# In Elixir it is not possible to use default values in more than 1 function declaration.
def generate_param_name(node_name, param_category, suffix \\ nil)
def generate_param_name(node_name, param_category, suffix)
when suffix != nil and param_category in ["bn", "conv"] and
suffix in ["scale", "mean", "var", "weights", "bias"] and
is_binary(node_name) do
generate_param_name(get_state(node_name), param_category, suffix)
end
def generate_param_name(state, param_category, suffix)
when suffix != nil and param_category in ["bn", "conv"] and
suffix in ["scale", "mean", "var", "weights", "bias"] do
cond do
param_category == "bn" and state.batch_normalize != true and
suffix not in ["scale", "bias", "mean", "var"] ->
raise "Error in generate_param_name: wrong suffix " <> suffix <> " for bn category"
param_category == "conv" and suffix not in ["weights", "bias"] ->
if suffix == "bias" and state.batch_normalize != true do
raise "Error in generate_param_name: wrong suffix " <> suffix <> " for conv category"
else
raise "Error in generate_param_name: wrong suffix " <> suffix <> " for conv category"
end
true ->
state.node_name <> "_" <> param_category <> "_" <> suffix
end
end
end
|
lib/darknet_to_onnx/convparams.ex
| 0.7478 | 0.716888 |
convparams.ex
|
starcoder
|
defmodule Hunter.Card do
@moduledoc """
Card entity
This module defines a `Hunter.Card` struct and the main functions
for working with Cards
## Fields
* `url`- the url associated with the card
* `title` - the title of the card
* `description` - the card description
* `image` - the image associated with the card, if any
* `type` - `link`, `photo`, `video`, or `rich`
* `author_name` - name of the author/owner of the resource
* `author_url` - URL for the author/owner of the resource
* `provider_name` - name of the resource provider
* `provider_url` - url of the resource provider
* `html` - HTML required to display the resource
* `width` - width in pixels
* `height` - height in pixels
"""
alias Hunter.Config
@type t :: %__MODULE__{
url: String.t(),
title: String.t(),
description: String.t(),
image: String.t(),
type: String.t(),
author_name: String.t(),
author_url: String.t(),
provider_name: String.t(),
provider_url: String.t(),
html: String.t(),
width: non_neg_integer,
height: non_neg_integer
}
@derive [Poison.Encoder]
defstruct [
:url,
:title,
:description,
:image,
:type,
:author_name,
:author_url,
:provider_name,
:provider_url,
:html,
:width,
:height
]
@doc """
Retrieve a card associated with a status
## Parameters
* `conn` - connection credentials
* `id` - status id
## Examples
iex> conn = Hunter.new([base_url: "https://social.lou.lt", bearer_token: "<PASSWORD>"])
%Hunter.Client{base_url: "https://social.lou.lt", bearer_token: "<PASSWORD>"}
iex> Hunter.Card.card_by_status(conn, 118_635)
%Hunter.Card{description: "hunter - A Elixir client for Mastodon, a GNU Social compatible micro-blogging service",
image: "https://social.lou.lt/system/preview_cards/images/000/000/378/original/34700?1491626499",
title: "milmazz/hunter", url: "https://github.com/milmazz/hunter"}
"""
@spec card_by_status(Hunter.Client.t(), non_neg_integer) :: Hunter.Card.t()
def card_by_status(conn, id) do
Config.hunter_api().card_by_status(conn, id)
end
end
|
lib/hunter/card.ex
| 0.853088 | 0.53783 |
card.ex
|
starcoder
|
defmodule Yaps.PushBackend do
@moduledoc """
This module is used to define a push backend service.
When used, the following options are allowed:
* `:adapter` - the adapter to be used for the backend.
* `:env` - configures the repository to support environments
## Example
defmodule APNSBackend do
use Yaps.PushBackend, adapter: Yaps.Adapters.Apns
def conf do
[
certfile: "/path/to/certificate",
keyfile: "/path/to/key"
]
end
end
Most of the time, we want the repository to work with different
environments. In such cases, we can pass an `:env` option:
defmodule APNSBackend do
use Yaps.PushBackend, adapter: Yaps.Adapters.Apns, env: Mix.env
def conf(:prod) do
[
certfile: "/path/to/production/certificate",
keyfile: "/path/to/production/key"
]
end
def conf(:dev) do
[
certfile: "/path/to/development/certificate",
keyfile: "/path/to/development/key"
]
end
end
Notice that, when using the environment, developers should implement
`conf/1` which automatically passes the environment instead of `conf/0`.
Note the environment is only used at compilation time. That said, make
sure the `:build_per_environment` option is set to true (the default)
in your Mix project configuration.
"""
use Behaviour
@type t :: module
defmacro __using__(opts) do
adapter = Macro.expand(Keyword.fetch!(opts, :adapter), __CALLER__)
env = Keyword.get(opts, :env)
quote do
use unquote(adapter)
@behaviour Yaps.PushBackend
@env unquote(env)
import Application, only: [app_dir: 2]
if @env do
def conf do
conf(@env)
end
defoverridable conf: 0
end
def start_link do
unquote(adapter).start_link(__MODULE__, conf)
end
def stop do
unquote(adapter).stop(__MODULE__)
end
def send_push(recipient, payload, opts \\ []) do
Yaps.PushBackend.Backend.send_push(
__MODULE__,
unquote(adapter),
recipient,
payload,
opts
)
end
def adapter do
unquote(adapter)
end
end
end
@doc """
Should return the options that will be given to the push backend adapter. This
function must be implemented by the user.
"""
defcallback conf() :: Keyword.t
@doc """
Starts any connection pooling or supervision and return `{:ok, pid}`
or just `:ok` if nothing needs to be done.
Returns `{:error, {:already_started, pid}}` if the repo already
started or `{:error, term}` in case anything else goes wrong.
"""
defcallback start_link() :: {:ok, pid} | :ok |
{:error, {:already_started, pid}} |
{:error, term}
@doc """
Stops any connection pooling or supervision started with `start_link/1`.
"""
defcallback stop() :: :ok
@doc """
Sends a push notification.
"""
defcallback send_push(Bitstring, Bitstring, Keyword.t) :: :ok | {:error, term}
@doc """
Returns the adapter this backend is configured to use.
"""
defcallback adapter() :: Yaps.Adapter.t
end
|
lib/yaps/push_backend.ex
| 0.806586 | 0.469095 |
push_backend.ex
|
starcoder
|
defmodule Resty.Resource.Base do
alias Resty.Resource.Relations
@moduledoc """
This module is used to create **resource struct** that you'll then be able to
use with `Resty.Repo` and `Resty.Resource`.
## Using the module
`Resty.Resource.Base` is here to help you create resource structs. The
resource struct and its module holds informations about how to query the API
such as the *site*, *headers*, *path*, *auth* etc...
This module (`Resty.Resource.Base`) defines a lot of macros to configure
these options. You'll be able to call them right after calling
`use Resty.Resource.Base`.
```
defmodule MyResource do
use Resty.Resource.Base
set_site("site.tld")
set_resource_path("/posts")
define_attributes([:name])
end
```
### Primary key
By default resty resources have a primary key attriubute that defaults to
`:id`. If you want to use another field as the primary key you can set it
thanks to the `set_primary_key/1` macro.
### Attributes
Unlike *ActiveResource* Resty will need you to define which attributes
should be allowed on the resource.
They are defined thanks to the `define_attributes/1` macro. The attributes does not
support type casting, types are taken as they come from the configured
`Resty.Serializer`.
## Using the resource
Once you have a resource you can use it with `Resty.Repo` and `Resty.Resource`
in order to query the API or get informations about retrieved resources.
```
MyResource |> Resty.Repo.all!()
```
"""
defmacro __using__(_opts) do
quote do
import unquote(__MODULE__)
@before_compile unquote(__MODULE__)
@default_headers Resty.default_headers()
Module.register_attribute(__MODULE__, :attributes, accumulate: true)
Module.register_attribute(__MODULE__, :headers, accumulate: true)
Module.register_attribute(__MODULE__, :relations, accumulate: true)
Module.put_attribute(__MODULE__, :site, Resty.default_site())
Module.put_attribute(__MODULE__, :resource_path, "")
Module.put_attribute(__MODULE__, :primary_key, :id)
Module.put_attribute(__MODULE__, :include_root, false)
Module.put_attribute(__MODULE__, :extension, "")
Module.put_attribute(__MODULE__, :connection, Resty.default_connection())
Module.put_attribute(__MODULE__, :connection_params, [])
Module.put_attribute(__MODULE__, :auth, Resty.default_auth())
Module.put_attribute(__MODULE__, :auth_params, [])
Module.put_attribute(__MODULE__, :serializer, Resty.default_serializer())
Module.put_attribute(__MODULE__, :serializer_params, [])
end
end
@doc """
Define the given attributes on the resource struct.
"""
defmacro define_attributes(attributes) when is_list(attributes) do
quote do
for new_attribute <- unquote(attributes) do
Module.put_attribute(__MODULE__, :attributes, new_attribute)
end
end
end
@doc """
Set the `Resty.Connection` implementation that should be used to query this
resource.
"""
defmacro set_connection(connection, params \\ []) do
quote do
@connection unquote(connection)
@connection_params unquote(params)
end
end
@doc """
Set the `Resty.Serializer` implementation that should be used to serialize
and deserialize this resource.
"""
defmacro set_serializer(serializer, params \\ []) do
quote do
@serializer unquote(serializer)
@serializer_params unquote(params)
end
end
@doc """
Add a site to the resource
"""
defmacro set_site(site) do
quote(do: @site(unquote(site)))
end
@doc """
Add a path to the resource
"""
defmacro set_resource_path(path) do
quote(do: @resource_path(unquote(path)))
end
@doc """
Sets the resource primary key. By default it is `:id`.
"""
defmacro set_primary_key(name) do
quote(do: @primary_key(unquote(name)))
end
@doc """
Sets the resource extension. The extension will be added in the URL.
"""
defmacro set_extension(extension) do
quote(do: @extension(unquote(extension)))
end
@doc """
Set the `Resty.Auth` implementation that should be used to query this resource.
"""
defmacro with_auth(auth, params \\ []) do
quote do
@auth unquote(auth)
@auth_params unquote(params)
end
end
@doc """
Include the given root when serializing the resource
"""
defmacro include_root(value) do
quote(do: @include_root(unquote(value)))
end
@doc """
Add an header to the request sent from this resource
"""
defmacro add_header(name, value) when is_atom(name) do
quote(do: @headers({unquote(name), unquote(value)}))
end
@doc """
This will replace the default headers (`Resty.default_headers/0`) used by
this resource.
"""
defmacro set_headers(new_headers) do
quote(do: @default_headers(unquote(new_headers)))
end
defmacro belongs_to(resource, attribute_name, foreign_key) do
quote do
@attributes unquote(foreign_key)
@relations %Relations.BelongsTo{
related: unquote(resource),
attribute: unquote(attribute_name),
foreign_key: unquote(foreign_key)
}
end
end
defmacro __before_compile__(_env) do
quote do
@known_attributes [@primary_key] ++ @attributes
defstruct @known_attributes ++ [__persisted__: false]
@doc false
def site, do: @site
@doc false
def primary_key, do: @primary_key
@doc false
def resource_path, do: @resource_path
@doc false
def known_attributes, do: @known_attributes
@doc false
def serializer, do: {@serializer, @serializer_params}
@doc false
def include_root, do: @include_root
@doc false
def extension, do: @extension
@doc false
def headers, do: Keyword.merge(@default_headers, @headers)
@doc false
def connection, do: {@connection, @connection_params}
@doc false
def auth, do: {@auth, @auth_params}
@doc false
def relations, do: @relations
@doc """
Create a new resource with the given attributes.
"""
def build(attributes \\ []) do
Resty.Resource.Builder.build(__MODULE__, attributes)
end
end
end
end
|
lib/resty/resource/base.ex
| 0.837088 | 0.571318 |
base.ex
|
starcoder
|
defmodule Nebulex.Cache.Stats do
@moduledoc """
This module defines the supported built-in stats.
By default, each adapter is responsible for providing stats support.
However, Nebulex suggests supporting the built-in stats described
in this module, which are also supported by the built-in adapters.
## Usage
First of all, we define a cache:
defmodule MyApp.Cache do
use Nebulex.Cache,
otp_app: :nebulex,
adapter: Nebulex.Adapters.Local
end
Then we configure it enabling the stats, like so:
config :my_app, MyApp.Cache,
stats: true,
gc_interval: 86_400_000, #=> 1 day
max_size: 200_000,
gc_cleanup_min_timeout: 10_000,
gc_cleanup_max_timeout: 900_000
> Remember to add the cache on your application's supervision tree.
Since we are using a built-in adapter and the stats have been enabled
(`stats: true`), the stat counters will be automatically fed by the
adapter.
You can ask for the current stats values at any time by calling:
Nebulex.Cache.Stats.info(MyApp.Cache)
## Using stats helpers
You can inject the stats helpers in the cache like this:
defmodule MyApp.Cache do
use Nebulex.Cache,
otp_app: :nebulex,
adapter: Nebulex.Adapters.Local
# Use stats helpers
use Nebulex.Cache.Stats
end
### Retrieving stats info
MyApp.Cache.stats_info()
By calling this injected helper, the function `Nebulex.Cache.Stats.info/1`
is called under-the-hood, but the cache name is resolved automatically.
### Dispatching telemetry events
MyApp.Cache.dispatch_stats()
MyApp.Cache.dispatch_stats(event_prefix: [:my_cache, :stats])
By calling this injected helper, the function `Nebulex.Cache.Stats.dispatch/2`
is called under-the-hood, but the cache name is resolved automatically.
## Telemetry events
Integrating telemetry is very easy since with the helper function
`MyApp.Cache.dispatch_stats/1` (via the `__using__` macro) described
previously you can emit telemetry events with the current stats at any time.
What we need to resolve is, how to make it in such a way that every X period
of time the stats are emitted automatically.
To do so, we can use `:telemetry_poller` and define a custom measurement:
:telemetry_poller.start_link(
measurements: [
{MyApp.Cache, :dispatch_stats, []},
],
# configure sampling period - default is :timer.seconds(5)
period: :timer.seconds(10),
name: :my_cache_stats_poller
)
Or you can also start the `:telemetry_poller` process along with your
application supervision tree, like so:
def start(_type, _args) do
my_cache_stats_poller_opts = [
measurements: [
{MyApp.Cache, :dispatch_stats, []},
],
period: :timer.seconds(10),
name: :my_cache_stats_poller
]
children = [
{MyApp.Cache, []},
{:telemetry_poller, my_cache_stats_poller_opts}
]
opts = [strategy: :one_for_one, name: MyApp.Supervisor]
Supervisor.start_link(children, opts)
end
See [Nebulex Telemetry Guide](http://hexdocs.pm/nebulex/telemetry.html).
"""
# Stats Struct
defstruct hits: 0, misses: 0, writes: 0, evictions: 0, expirations: 0
@type t :: %__MODULE__{
hits: non_neg_integer,
misses: non_neg_integer,
writes: non_neg_integer,
evictions: non_neg_integer,
expirations: non_neg_integer
}
@type stat :: :hits | :misses | :writes | :evictions | :expirations
@doc false
defmacro __using__(_opts) do
quote do
alias Nebulex.Cache.Stats
@doc false
def stats_info do
Stats.info(get_dynamic_cache())
end
if Code.ensure_loaded?(:telemetry) do
@doc false
def dispatch_stats(opts \\ []) do
Stats.dispatch(get_dynamic_cache(), opts)
end
end
end
end
import Nebulex.Helpers
alias Nebulex.Adapter
## API
@doc """
Initializes the Erlang's counter to be used for the calling Cache and feed
the stat values; see the module documentation for more information about
the supported stats.
Returns `nil` is the option `:stats` is set to `false` or it is not set at
all; the stats will be skipped.
## Example
Nebulex.Cache.Stats.init(opts)
> **NOTE:** This function is normally called by the adapter in
case it supports the Nebulex suggested stats; the adapter
should feed `Nebulex.Cache.Stats.t()` counters.
See built-in adapters for more information about the usage.
"""
@spec init(Keyword.t()) :: :counters.counters_ref() | nil
def init(opts) do
case get_option(opts, :stats, &is_boolean(&1), false) do
true -> :counters.new(5, [:write_concurrency])
false -> nil
end
end
@doc """
Increments the `counter`'s stat `stat` by the given `incr` value.
## Examples
Nebulex.Cache.Stats.incr(stat_counter, :hits)
Nebulex.Cache.Stats.incr(stat_counter, :writes, 10)
> **NOTE:** This function is normally called by the adapter in
case it supports the Nebulex suggested stats; the adapter
should feed `Nebulex.Cache.Stats.t()` counters.
See built-in adapters for more information about the usage.
"""
@spec incr(:counters.counters_ref() | nil, stat, integer) :: :ok
def incr(counter, stat, incr \\ 1)
def incr(nil, _stat, _incr), do: :ok
def incr(ref, :hits, incr), do: :counters.add(ref, 1, incr)
def incr(ref, :misses, incr), do: :counters.add(ref, 2, incr)
def incr(ref, :writes, incr), do: :counters.add(ref, 3, incr)
def incr(ref, :evictions, incr), do: :counters.add(ref, 4, incr)
def incr(ref, :expirations, incr), do: :counters.add(ref, 5, incr)
@doc """
Returns the struct `Nebulex.Cache.Stats` with the current stats values for
the given cache name or counter reference. Normally, the cache name is
passed so that the counter reference is retrieved and handled internally.
Returns `nil` if the stats are disabled or if the adapter doesn't support
this feature.
## Example
iex> Nebulex.Cache.Stats.info(MyCache)
%Nebulex.Cache.Stats{
evictions: 0,
expirations: 0,
hits: 0,
misses: 0,
writes: 0
}
"""
@spec info(:counters.counters_ref() | atom | nil) :: t | nil
def info(nil), do: nil
def info(name) when is_atom(name) do
Adapter.with_meta(name, fn _adapter, meta ->
meta
|> Map.get(:stat_counter)
|> info()
end)
end
def info(ref) do
%__MODULE__{
hits: :counters.get(ref, 1),
misses: :counters.get(ref, 2),
writes: :counters.get(ref, 3),
evictions: :counters.get(ref, 4),
expirations: :counters.get(ref, 5)
}
end
if Code.ensure_loaded?(:telemetry) do
@doc """
Emits a telemetry event when called with the current stats count.
The `:measurements` map will include the current count for each stat:
* `:hits` - Current **hits** count.
* `:misses` - Current **misses** count.
* `:writes` - Current **writes** count.
* `:evictions` - Current **evictions** count.
* `:expirations` - Current **expirations** count.
The telemetry `:metadata` map will include the following fields:
* `:cache` - The cache module, or the name (if an explicit name has been
given to the cache).
Additionally, you can add your own metadata fields by given the option
`:metadata`.
## Options
* `:event_prefix` – The prefix of the telemetry event.
Defaults to `[:nebulex, :cache]`.
* `:metadata` – A map with additional metadata fields. Defaults to `%{}`.
## Examples
iex> Nebulex.Cache.Stats.dispatch(MyCache)
:ok
iex> Nebulex.Cache.Stats.dispatch(
...> MyCache,
...> event_prefix: [:my_cache],
...> metadata: %{tag: "tag1"}
...> )
:ok
"""
@spec dispatch(atom, Keyword.t()) :: :ok
def dispatch(cache_or_name, opts \\ []) do
if info = __MODULE__.info(cache_or_name) do
:telemetry.execute(
Keyword.get(opts, :event_prefix, [:nebulex, :cache]) ++ [:stats],
Map.from_struct(info),
opts |> Keyword.get(:metadata, %{}) |> Map.put(:cache, cache_or_name)
)
else
:ok
end
end
end
end
|
lib/nebulex/cache/stats.ex
| 0.887174 | 0.576304 |
stats.ex
|
starcoder
|
defmodule Sider do
@type key :: any()
@type value :: any()
@type args ::
%{
reap_interval: pos_integer(),
capacity: pos_integer(),
name: atom,
}
| %{
name: atom,
capacity: pos_integer()
}
| %{
capacity: pos_integer()
}
@moduledoc """
Sider is an in-memory key-value store with the following characteristics:
1. keys & values may be of any type
2. Key-value pairs expire - once set in the store, they are only valid for a given time
3. Sider has O(keys) + O(values) memory characteristics
4. The cache maintains consistent access times - It will not degrade when reaping expired values
The usage of Sider is as follows. Usually, it will be started under a supervisor with a given name
```
children = [
{Sider, %{capacity: 100, name: :my_cache}}
]
Supervisor.start_link(children, strategy: :one_for_one)
```
You can then call the cache via its given name, similar to this
```
Sider.get(:my_cache, :a)
```
"""
@doc """
Create a sider cache process. The behavior of the sider cache can be controlled by the following args
reap_interval: The number of milliseconds to wait before removing keys that have expired
capacity: The number of keys allowed in the store. This includes expired keys that have not been reaped.
## Examples
iex> {:ok, _pid} = Sider.start_link(%{reap_interval: 60_000, capacity: 1_000_000, name: :my_cache})
iex> :ok
:ok
"""
@spec start_link(args) :: GenServer.on_start()
def start_link(args) do
opts = case Map.fetch(args, :name) do
{:ok, name} -> [name: name]
:error -> []
end
args = %{
capacity: args.capacity,
reap_interval: Map.get(args, :reap_interval, 60_000)
}
GenServer.start_link(Sider.Impl, args, opts)
end
@spec child_spec(args) :: Supervisor.child_spec()
def child_spec(args) do
%{
id: __MODULE__,
start: {__MODULE__, :start_link, [args]},
type: :worker,
restart: :permanent,
shutdown: 500
}
end
@doc """
Returns an existing key, if it has not expired.
## Examples
iex> {:ok, pid} = Sider.start_link(%{reap_interval: 1, capacity: 100})
iex> Sider.set(pid, :a, :foo)
iex> {:ok, :foo} = Sider.get(pid, :a)
iex> {:error, :missing_key} = Sider.get(pid, :b)
iex> :ok
:ok
"""
@spec get(GenServer.server(), key) :: {:ok, value} | {:error, :missing_key}
def get(pid, key) do
GenServer.call(pid, {:get, key})
end
@doc """
Store a new key-value pair, with an optional timeout for when the pair should expire
Returns :ok if successful, or {:error, :max_capacity} if the cache is full
If you call set() on a key that already exists in the store, that key-pair will be overwritten
## Examples
iex> {:ok, pid} = Sider.start_link(%{reap_interval: 1000, capacity: 1})
iex> :ok = Sider.set(pid, :a, :foo, 1000) # Set a key with a value of :foo that expires after 1000ms
iex> :ok = Sider.set(pid, :a, {1, 2}, 1000) # Overwrite the key
iex> Sider.set(pid, :b, :bar) # The capacity is 1, so the key cannot be written
{:error, :max_capacity}
"""
@spec set(GenServer.server(), key, value, pos_integer() | nil) :: :ok | {:error, :max_capacity}
def set(pid, key, value, timeout \\ nil) do
GenServer.call(pid, {:set, key, value, timeout})
end
@doc """
Removes a key-value pair from the cache, if it exists.
This function no-ops if the key is non-existant
If you pass in the `only: :expired` option, the value will only be removed if the entry has expired
(See the timeout value in `Sider.set/4`)
## Examples
iex> {:ok, pid} = Sider.start_link(%{reap_interval: 1000, capacity: 100})
iex> Sider.set(pid, :a, :foo)
iex> Sider.remove(pid, :a)
nil
"""
@spec remove(GenServer.server(), key, [] | [{:only, :expired}]) :: nil
def remove(pid, key, opts \\ []) do
GenServer.call(pid, {:remove, key, opts})
end
end
|
lib/sider.ex
| 0.8789 | 0.755141 |
sider.ex
|
starcoder
|
defmodule Stripe.Invoice do
@moduledoc """
Work with Stripe invoice objects.
You can:
- Create an invoice
- Retrieve an invoice
- Update an invoice
Does not take options yet.
Stripe API reference: https://stripe.com/docs/api#invoice
"""
@type t :: %__MODULE__{}
defstruct [
:id, :object,
:amount_due, :application_fee, :attempt_count, :attempted,
:charge, :closed, :currency, :customer, :date, :description, :discount,
:ending_balance, :forgiven, :lines, :livemode, :metadata,
:next_payment_attempt, :paid, :period_end, :period_start,
:receipt_number, :starting_balance, :statement_descriptor,
:subscription, :subscription_proration_date, :subtotal, :tax,
:tax_percent, :total, :webhooks_delivered_at
]
@plural_endpoint "invoices"
@schema %{
amount_due: [:retrieve],
application_fee: [:create, :retrieve, :update],
attempt_count: [:retrieve],
attempted: [:retrieve],
charge: [:retrieve],
closed: [:retrieve],
currency: [:retrieve],
customer: [:retrieve],
date: [:retrieve],
description: [:create, :retrieve, :update],
discount: [:retrieve],
ending_balance: [:retrieve],
forgiven: [:retrieve, :update],
id: [:retrieve],
lines: [:retrieve],
livemode: [:retrieve],
metadata: [:create, :retrieve, :update],
next_payment_attempt: [:retrieve],
paid: [:retrieve],
period_end: [:retrieve],
period_start: [:retrieve],
receipt_number: [:retrieve],
starting_balance: [:retrieve],
statement_descriptor: [:retrieve, :update],
subscription: [:create, :retrieve],
subscription_proration_date: [:retrieve],
subtotal: [:retrieve],
tax: [:retrieve],
tax_percent: [:create, :retrieve, :update],
total: [:retrieve],
webhooks_delivered_at: [:retrieve]
}
@nullable_keys []
@doc """
Create an invoice.
"""
@spec create(map, Keyword.t) :: {:ok, t} | {:error, Stripe.api_error_struct}
def create(changes, opts \\ []) do
Stripe.Request.create(@plural_endpoint, changes, @schema, opts)
end
@doc """
Retrieve an invoice.
"""
@spec retrieve(binary, Keyword.t) :: {:ok, t} | {:error, Stripe.api_error_struct}
def retrieve(id, opts \\ []) do
endpoint = @plural_endpoint <> "/" <> id
Stripe.Request.retrieve(endpoint, opts)
end
@doc """
Update an invoice.
Takes the `id` and a map of changes.
"""
@spec update(binary, map, Keyword.t) :: {:ok, t} | {:error, Stripe.api_error_struct}
def update(id, changes, opts \\ []) do
endpoint = @plural_endpoint <> "/" <> id
Stripe.Request.update(endpoint, changes, @schema, @nullable_keys, opts)
end
@doc """
Retrieve an upcoming invoice.
"""
@spec upcoming(map, Keyword.t) :: {:ok, t} | {:error, Stripe.api_error_struct}
def upcoming(changes = %{customer: _customer}, opts \\ []) do
endpoint = @plural_endpoint <> "/upcoming"
Stripe.Request.retrieve(changes, endpoint, opts)
end
@doc """
List all invoices.
"""
@spec list(map, Keyword.t) :: {:ok, Stripe.List.t} | {:error, Stripe.api_error_struct}
def list(params \\ %{}, opts \\ []) do
endpoint = @plural_endpoint
Stripe.Request.retrieve(params, endpoint, opts)
end
end
|
lib/stripe/invoice.ex
| 0.739705 | 0.568805 |
invoice.ex
|
starcoder
|
defmodule Licensir.TableRex.Cell do
@moduledoc """
Defines a struct that represents a single table cell, and helper functions.
A cell stores both the original data _and_ the string-rendered version,
this decision was taken as a tradeoff: this way uses more memory to store
the table structure but the renderers gain the ability to get direct access
to the string-coerced data rather than having to risk repeated coercion or
handle their own storage of the computer values.
Fields:
* `raw_value`: The un-coerced original value
* `rendered_value`: The stringified value for rendering
* `wrapped_lines`: A list of 1 or more string values representing
the line(s) within the cell to be rendered
* `align`:
* `:left`: left align text in the cell.
* `:center`: center text in the cell.
* `:right`: right align text in the cell.
* `nil`: align text in cell according to column alignment.
* `color`: the ANSI color of the cell.
If creating a Cell manually: raw_value is the only required key to
enable that Cell to work well with the rest of TableRex. It should
be set to a piece of data that can be rendered to string.
"""
alias Licensir.TableRex.Cell
defstruct raw_value: nil, rendered_value: "", align: nil, color: nil, wrapped_lines: [""]
@type t :: %__MODULE__{}
@doc """
Converts the passed value to be a normalised %Cell{} struct.
If a non %Cell{} value is passed, this function returns a new
%Cell{} struct with:
* the `rendered_value` key set to the stringified binary of the
value passed in.
* the `raw_value` key set to original data passed in.
* any other options passed are applied over the normal struct
defaults, which allows overriding alignment & color.
If a %Cell{} is passed in with no `rendered_value` key, then the
`raw_value` key's value is rendered and saved against it, otherwise
the Cell is passed through untouched. This is so that advanced use
cases which require direct Cell creation and manipulation are not
hindered.
"""
@spec to_cell(Cell.t()) :: Cell.t()
def to_cell(%Cell{rendered_value: rendered_value} = cell)
when is_binary(rendered_value) and rendered_value != "" do
%Cell{cell | wrapped_lines: wrapped_lines(rendered_value)}
end
def to_cell(%Cell{raw_value: raw_value} = cell) do
rendered_value = to_string(raw_value)
%Cell{cell | rendered_value: rendered_value, wrapped_lines: wrapped_lines(rendered_value)}
end
@spec to_cell(any, list) :: Cell.t()
def to_cell(value, opts \\ [])
def to_cell(list, opts) when is_list(list) do
if List.improper?(list) do
list
|> to_string()
|> to_cell(opts)
else
list
|> Enum.join("\n")
|> to_cell(opts)
end
end
def to_cell(value, opts) do
opts = Enum.into(opts, %{})
rendered_value = to_string(value)
%Cell{
raw_value: value,
rendered_value: rendered_value,
wrapped_lines: wrapped_lines(rendered_value)
}
|> Map.merge(opts)
end
@spec height(Cell.t()) :: integer
def height(%Cell{wrapped_lines: lines}), do: length(lines)
defp wrapped_lines(value) when is_binary(value) do
String.split(value, "\n")
end
end
|
lib/table_rex/cell.ex
| 0.878725 | 0.865452 |
cell.ex
|
starcoder
|
defmodule Zaryn.Election.HypergeometricDistribution do
@moduledoc """
Hypergeometric distribution has the property to guarantee than even with 90% of malicious nodes
the risk that an honest cannot detect a fraudulent transaction is only 10^-9 or once chance in one billion.
(beyond the standards of the acceptable risk for aviation or nuclear)
Therefore it describes the probability of k success (detection of fraudulent operation) for `n`
drawn (verifications) without repetition with a total finite number of nodes `N` and by
considering a number N1 of malicious nodes (90%).
No matter how many nodes are running on the network, a control with a tiny part of the network (less than 200 nodes)
ensures the atomicity property of network transactions.
"""
use GenServer
def start_link(opts \\ []) do
GenServer.start_link(__MODULE__, opts, name: __MODULE__)
end
def init(_opts) do
executable = Application.app_dir(:zaryn, "/priv/c_dist/hypergeometric_distribution")
{:ok, %{executable: executable, previous_simulations: %{}}}
end
def handle_call(
{:run_simulation, nb_nodes},
_from,
state = %{executable: executable, previous_simulations: previous_simulations}
)
when is_integer(nb_nodes) and nb_nodes >= 0 do
case Map.get(previous_simulations, nb_nodes) do
nil ->
pid = Port.open({:spawn_executable, executable}, args: [Integer.to_string(nb_nodes)])
receive do
{^pid, {:data, data}} ->
{n, _} = :string.to_integer(data)
{:reply, n, put_in(state, [:previous_simulations, nb_nodes], n)}
end
simulation ->
{:reply, simulation, state}
end
end
@doc """
Execute the hypergeometric distribution simulation from a given number of nodes.
Because the simulation can take time when its number if big such as 100 000, the previous results
are stored in the GenServer state
## Examples
iex> HypergeometricDistribution.run_simulation(100)
84
iex> HypergeometricDistribution.run_simulation(1000)
178
iex> HypergeometricDistribution.run_simulation(10000)
195
"""
@spec run_simulation(pos_integer) :: pos_integer
def run_simulation(nb_nodes) when is_integer(nb_nodes) and nb_nodes > 0 do
GenServer.call(__MODULE__, {:run_simulation, nb_nodes}, 60_000)
end
end
|
lib/zaryn/election/hypergeometric_distribution.ex
| 0.880489 | 0.792103 |
hypergeometric_distribution.ex
|
starcoder
|
defmodule Number do
@moduledoc """
Number library.
"""
@doc """
Round
## Examples
iex> Number.round( 123 )
123
iex> Number.round( 123.456 )
123.456
iex> Number.round( 123.456, 2 )
123.46
iex> Number.round( 123.456, 1 )
123.5
"""
def round( value, precision \\ -1 ) # <- default parameter function is separately declared
def round( value, precision ) when is_float( value ) == true, do: if precision >= 0, do: value |> Float.round( precision ), else: value
def round( value, _ ) when is_integer( value ) == true, do: value
@doc """
To number return float or integer)
## Examples
iex> Number.to_number( 123 )
123
iex> Number.to_number( 123.456 )
123.456
iex> Number.to_number( Decimal.from_float( 123456.78 ) )
123456.78
iex> Number.to_number( "123456.78" )
123456.78
"""
def to_number( value ) when is_binary( value ) == true, do: value |> String.to_float
def to_number( value ), do: if Decimal.decimal?( value ) == true, do: value |> Decimal.to_float, else: value
@doc """
To string
## Examples
iex> Number.to_string( 123 )
"123"
iex> Number.to_string( 123.456 )
"123.456"
iex> Number.to_string( 123.456, 2 )
"123.46"
iex> Number.to_string( 123.456, 1 )
"123.5"
iex> Number.to_string( Decimal.from_float( 123.456 ) )
"123.456"
iex> Number.to_string( "123.456" )
"123.456"
"""
def to_string( value, precision \\ -1 ) # <- default parameter function is separately declared
def to_string( value, precision ) when is_float( value ) == true, do: value |> Number.round( precision ) |> Float.to_string
def to_string( value, _ ) when is_integer( value ) == true, do: value |> Integer.to_string
def to_string( value, precision ) when is_binary( value ) == true, do: value |> String.to_float |> Number.round( precision ) |> Float.to_string
def to_string( value, precision ), do: if Decimal.decimal?( value ) == true, do: Decimal.to_float( value ) |> Number.round( precision ) |> Float.to_string
@doc """
Pad number at zero (return string)
## Examples
iex> Number.pad_zero( 123 )
"00000000000123"
"""
def pad_zero( number, length \\ 14 ), do: pad( number, length, "0" )
@doc """
Pad number at any (return string)
## Examples
iex> Number.pad( 123, 6, "_" )
"___123"
"""
def pad( number, length, padding ) when is_integer( number ) do
number |> Integer.to_string |> String.pad_leading( length, padding )
end
@doc """
Calculate percent (return string)
## Examples
iex> Number.percent( 100, 8 )
"12.5%"
iex> Number.percent( 0, 8 )
"0.0%"
iex> Number.percent( 100, 0 )
"(n/a)"
"""
def percent( numerator, denominator, precision \\ 2 ) do
cond do
denominator == 0 || denominator == nil -> "(n/a)"
denominator != 0 -> Number.to_string( numerator / denominator, precision ) <> "%"
end
end
@doc """
Add comma (return string)
## Examples
iex> Number.add_comma( 123 )
"123"
iex> Number.add_comma( 1234 )
"1,234"
iex> Number.add_comma( 1234.56 )
"1,234.56"
iex> Number.add_comma( 123456.78 )
"123,456.78"
iex> Number.add_comma( 1234567.890123, -1 )
"1,234,567.890,123"
iex> Number.add_comma( 1234567.890123 )
"1,234,567.89"
iex> Number.add_comma( Decimal.from_float( 123456.78 ) )
"123,456.78"
iex> Number.add_comma( "123456.78" )
"123,456.78"
"""
def add_comma( value, precision \\ 2 ), do: value |> Number.to_string( precision ) |> _insert_comma
defp _insert_comma( value ), do: Regex.replace( ~r/(\d)(?=(\d\d\d)+(?!\d))/, value, "\\1," )
@doc """
To integer (return integer)
## Examples
iex> Number.to_integer( 1234 )
1234
iex> Number.to_integer( 1234.56 )
1234
iex> Number.to_integer( Decimal.from_float( 1234.56 ) )
1234
iex> Number.to_integer( "1234.56" )
1234
"""
def to_integer( value ), do: value |> Number.to_string |> _head_split_dot
defp _head_split_dot( value ), do: value |> String.split( "." ) |> List.first |> String.to_integer
@doc """
To percent (return string)
## Examples
iex> Number.to_percent( 123 )
"12300%"
iex> Number.to_percent( 0.123 )
"12.3%"
iex> Number.to_percent( 0.123456 )
"12.35%"
iex> Number.to_percent( Decimal.from_float( 0.123 ) )
"12.3%"
iex> Number.to_percent( Decimal.from_float( 0.123456 ) )
"12.35%"
iex> Number.to_percent( "0.123" )
"12.3%"
iex> Number.to_percent( "0.123456" )
"12.35%"
"""
def to_percent( value, precision \\ 2 ), do: Number.to_number( value ) * 100 |> Number.to_string( precision ) |> _add_percent
defp _add_percent( value ), do: value <> "%"
@doc """
Add sign (return string)
## Examples
iex> Number.add_sign( 0 )
"±0"
iex> Number.add_sign( 123 )
"+123"
iex> Number.add_sign( -123 )
"-123"
iex> Number.add_sign( 0.0 )
"±0.0"
iex> Number.add_sign( 0.00 )
"±0.0"
iex> Number.add_sign( 0.001 )
"+0.0"
iex> Number.add_sign( 0.001, 3 )
"+0.001"
iex> Number.add_sign( 0.001, -1 )
"+0.001"
iex> Number.add_sign( 0.09, 2 )
"+0.09"
iex> Number.add_sign( 0.09, 1 )
"+0.1"
iex> Number.add_sign( -0.001 )
"0.0"
iex> Number.add_sign( -0.001, 3 )
"-0.001"
iex> Number.add_sign( -0.001, -1 )
"-0.001"
"""
def add_sign( value, precision \\ 2 ) do
cond do
Number.to_number( value ) == 0 -> "±" <> Number.to_string( value, precision )
Number.to_number( value ) > 0 -> "+" <> Number.to_string( value, precision )
Number.to_number( value ) < 0 -> Number.to_string( value, precision )
end
end
end
|
lib/number.ex
| 0.715325 | 0.47658 |
number.ex
|
starcoder
|
defmodule MeshxRpc.Client.Worker do
@moduledoc false
@behaviour :gen_statem
@behaviour :poolboy_worker
alias MeshxRpc.App.T
alias MeshxRpc.Common.{Telemetry, Structs.Data, Structs.Svc}
alias MeshxRpc.Protocol.{Hsk, Block.Decode, Block.Encode}
@error_prefix :error_rpc
@error_prefix_remote :error_rpc_remote
@reconnect_result :ok_reconnect
@impl :poolboy_worker
def start_link([args, opts]), do: :gen_statem.start_link(__MODULE__, args, opts)
@impl :gen_statem
def callback_mode(), do: [:state_functions, :state_enter]
@impl :gen_statem
def init({data, node_ref_mfa, svc_ref_mfa, conn_ref_mfa}) do
data = %Data{data | local: Svc.init(node_ref_mfa, svc_ref_mfa, conn_ref_mfa)}
{:ok, :closed, data, [{:next_event, :internal, :connect}]}
end
@impl :gen_statem
def terminate(_reason, _state, %Data{} = data) when is_port(data.socket), do: :gen_tcp.close(data.socket)
# closed -> hsk -> idle -> send -> recv -> reply -> idle -> ...
# ________closed
def closed(:enter, :closed, _data), do: :keep_state_and_data
def closed(:enter, _old_state, %Data{} = data) do
:ok = :gen_tcp.close(data.socket)
Telemetry.execute(data)
{:keep_state, Data.reset_full(data)}
end
def closed(:internal, :connect, %Data{} = data) do
{_type, ip, port} = data.address
case :gen_tcp.connect(ip, port, data.socket_opts, data.timeout_connect) do
{:ok, socket} ->
data = %Data{data | socket: socket}
{:next_state, :hsk, data, [{:next_event, :internal, :start_hsk}]}
{:error, error} ->
data = %Data{data | result: {@error_prefix, error}}
Telemetry.execute(data)
{:keep_state_and_data, [{{:timeout, :reconnect}, T.rand_retry(data.retry_proxy_fail), []}]}
end
end
def closed({:timeout, :reconnect}, [], _data), do: {:keep_state_and_data, [{:next_event, :internal, :connect}]}
def closed({:call, from}, {:request, _request}, _data),
do: {:keep_state_and_data, [{:reply, from, {@error_prefix, :closed}}]}
# ________hsk
def hsk(:enter, :closed, %Data{} = data),
do: {:keep_state, %Data{data | state: :hsk} |> Data.start_time(:hsk), [{:state_timeout, data.timeout_hsk, :reconnect}]}
def hsk(:internal, :start_hsk, %Data{} = data) do
data = %Data{data | hsk_ref: System.unique_integer([:positive])}
payload = Hsk.encode(:req, data)
case :gen_tcp.send(data.socket, payload) do
:ok ->
{:keep_state, Data.inc_size(data, byte_size(payload), :send) |> Data.inc_blk(:send)}
{:error, error} ->
data = %Data{data | result: {@error_prefix, error}} |> Data.set_time(:hsk)
{:next_state, :closed, data, [{{:timeout, :reconnect}, T.rand_retry(data.retry_hsk_fail), []}]}
end
end
def hsk(:info, {:tcp, _socket, payload}, %Data{} = data) do
:inet.setopts(data.socket, active: :once)
case Hsk.decode(payload, %Data{} = data) do
{:ok, data} ->
data = %Data{data | result: :ok} |> Data.set_time(:hsk) |> Data.inc_size(byte_size(payload), :recv) |> Data.inc_blk(:recv)
Telemetry.execute(data)
{:next_state, :idle, data}
{:error, error, data} ->
data = %Data{data | result: {@error_prefix, error}} |> Data.set_time(:hsk)
{:next_state, :closed, data, [{{:timeout, :reconnect}, T.rand_retry(data.retry_hsk_fail), []}]}
{@error_prefix_remote, error} ->
data = %Data{data | result: {@error_prefix_remote, error}} |> Data.set_time(:hsk)
{:next_state, :closed, data, [{{:timeout, :reconnect}, T.rand_retry(data.retry_hsk_fail), []}]}
end
end
def hsk({:call, from}, {:request, _request}, _data), do: {:keep_state_and_data, [{:reply, from, {@error_prefix, :closed}}]}
def hsk(:state_timeout, :reconnect, %Data{} = data) do
data = %Data{data | result: {@error_prefix, :timeout_hsk}} |> Data.set_time(:hsk)
{:next_state, :closed, data, [{:next_event, :internal, :connect}]}
end
def hsk(:info, {:tcp_closed, _socket}, %Data{} = data) do
data = %Data{data | result: {@error_prefix, :tcp_closed}} |> Data.set_time(:hsk)
{:next_state, :closed, data, [{{:timeout, :reconnect}, T.rand_retry(data.retry_hsk_fail), []}]}
end
def hsk(:info, {:tcp_error, _socket, reason}, %Data{} = data) do
data = %Data{data | result: {@error_prefix, reason}} |> Data.set_time(:hsk)
{:next_state, :closed, data, [{{:timeout, :reconnect}, T.rand_retry(data.retry_hsk_fail), []}]}
end
# ________idle
def idle(:enter, :hsk, %Data{} = data) do
:inet.setopts(data.socket, packet: 4)
{:keep_state, %Data{data | state: :idle} |> Data.reset_request() |> Data.start_time(:idle),
[{:state_timeout, T.rand_retry(data.idle_reconnect), :reconnect}]}
end
def idle(:enter, :reply, %Data{} = data) do
Telemetry.execute(data)
{:keep_state, Data.reset_request(data) |> Data.start_time(:idle),
[{:state_timeout, T.rand_retry(data.idle_reconnect), :reconnect}]}
end
def idle({:call, from}, {:request, {fun_req, fun_name, args}}, %Data{} = data) do
data =
%Data{data | fun_name: fun_name, fun_req: fun_req, reply_to: from, req_ref: System.unique_integer([:positive])}
|> Data.set_time(:idle)
case Encode.encode(:request, data, args) do
{:ok, data} ->
{:next_state, :send, data, [{:next_event, :internal, :start}]}
{:error, e} ->
{:next_state, :closed, %Data{data | result: {@error_prefix, e}}, [{:next_event, :internal, :connect}]}
end
end
def idle(:state_timeout, :reconnect, %Data{} = data) do
data = %Data{data | result: @reconnect_result} |> Data.set_time(:idle)
{:next_state, :closed, data, [{:next_event, :internal, :connect}]}
end
def idle(:info, {:tcp_closed, _socket}, %Data{} = data) do
data = %Data{data | result: {@error_prefix, :tcp_closed}} |> Data.set_time(:idle)
{:next_state, :closed, data, [{{:timeout, :reconnect}, T.rand_retry(data.retry_idle_error), []}]}
end
def idle(:info, {:tcp_error, _socket, reason}, %Data{} = data) do
data = %Data{data | result: {@error_prefix, reason}} |> Data.set_time(:idle)
{:next_state, :closed, data, [{{:timeout, :reconnect}, T.rand_retry(data.retry_idle_error), []}]}
end
# ________send
def send(:enter, :idle, %Data{} = data) do
data = %Data{data | state: :send} |> Data.start_time(:send)
{:keep_state, data}
end
def send(:internal, :start, %Data{} = data) do
if is_nil(data.cks_mfa) do
{:keep_state, data, [{:next_event, :internal, :send}]}
else
{m, f, o} = data.cks_mfa
cks = apply(m, f, [hd(data.dta), o])
{:keep_state, %Data{data | cks_bin: cks}, [{:next_event, :internal, :send}]}
end
end
def send(:internal, :send, %Data{} = data) do
[blk | tail] = data.dta
{payload, data} =
cond do
is_nil(data.cks_mfa) ->
{blk, data}
Enum.empty?(tail) ->
{blk <> data.cks_bin, data}
true ->
next = hd(tail)
{m, f, o} = data.cks_mfa
len = length(tail)
from = self()
pid =
spawn_link(fn ->
cks = apply(m, f, [next, o])
send(from, {:cks_gen, cks, len, self()})
end)
cks_size = byte_size(data.cks_bin)
{blk <> <<cks_size::integer-unsigned-size(32)>> <> data.cks_bin, %Data{data | workers: pid}}
end
case :gen_tcp.send(data.socket, payload) do
:ok ->
data = Data.inc_size(data, byte_size(payload), :send) |> Data.inc_blk(:send)
if Enum.empty?(tail) do
{:next_state, :recv, Data.set_time(data, :send)}
else
if is_nil(data.cks_mfa),
do: {:keep_state, %Data{data | dta: tail}, [{:next_event, :internal, :send}]},
else: {:keep_state, %Data{data | dta: tail}, [{:state_timeout, data.timeout_cks, :timeout_cks}]}
end
{:error, :closed} ->
{:keep_state, %Data{data | result: {@error_prefix, :closed}},
[{:state_timeout, data.timeout_connect, :timeout_receive_err}]}
{:error, reason} ->
data = %Data{data | result: {@error_prefix, reason}} |> Data.set_time(:send)
{:next_state, :closed, data, [{:next_event, :internal, :connect}]}
end
end
def send(:info, {:cks_gen, cks, len, from}, %Data{} = data) do
if length(data.dta) == len and from == data.workers do
{:keep_state, %Data{data | cks_bin: cks}, [{:next_event, :internal, :send}]}
else
data = %Data{data | result: {@error_prefix, :invalid_state}} |> Data.set_time(:send)
{:next_state, :closed, data, [{:next_event, :internal, :connect}]}
end
end
def send(:state_timeout, :timeout_receive_err, %Data{} = data),
do:
{:next_state, :reply, %Data{data | result: {@error_prefix, :tcp_closed}} |> Data.set_time(:send),
[{:next_event, :internal, :reply_close}]}
def send(:state_timeout, :timeout_cks, %Data{} = data),
do:
{:next_state, :reply, %Data{data | result: {@error_prefix, :timeout_cks}} |> Data.set_time(:send),
[{:next_event, :internal, :reply_close}]}
def send(:info, {:tcp, _socket, payload}, %Data{} = data) do
:inet.setopts(data.socket, active: :once)
data = Data.inc_size(data, byte_size(payload), :recv) |> Data.inc_blk(:recv) |> Data.set_time(:send)
case Decode.decode(payload, data) do
{@error_prefix_remote, e} ->
{:next_state, :reply, %Data{data | result: {@error_prefix_remote, e}}, [{:next_event, :internal, :reply_close}]}
{:error, e} ->
{:next_state, :reply, %Data{data | result: {@error_prefix, e}}, [{:next_event, :internal, :reply_close}]}
_ ->
{:next_state, :reply, %Data{data | result: {@error_prefix, :invalid_state}}, [{:next_event, :internal, :reply_close}]}
end
end
def send(:info, {:tcp_closed, _socket}, %Data{} = data),
do:
{:next_state, :reply, %Data{data | result: {@error_prefix, :tcp_closed}} |> Data.set_time(:send),
[{:next_event, :internal, :reply_close}]}
def send(:info, {:tcp_error, _socket, reason}, %Data{} = data),
do:
{:next_state, :reply, %Data{data | result: {@error_prefix, reason}} |> Data.set_time(:send),
[{:next_event, :internal, :reply_close}]}
# ________recv
def recv(:enter, :send, %Data{} = data) do
data = %Data{data | dta: [], req_seq: nil, state: :recv, workers: []} |> Data.start_time(:exec)
{:keep_state, data}
end
def recv(:info, {:tcp, _socket, payload}, %Data{} = data) do
:inet.setopts(data.socket, active: :once)
data = if Enum.empty?(data.dta), do: Data.set_time(data, :exec, :recv), else: data
data = Data.inc_size(data, byte_size(payload), :recv) |> Data.inc_blk(:recv)
case Decode.decode(payload, %Data{} = data) do
:ok_ack ->
{:next_state, :reply, %Data{data | result: :ok} |> Data.set_time(:recv), [{:next_event, :internal, :reply}]}
{@error_prefix_remote, error} ->
{:next_state, :reply, %Data{data | result: {@error_prefix_remote, error}} |> Data.set_time(:recv),
[{:next_event, :internal, :reply_close}]}
{:error, err} ->
{:next_state, :reply, %Data{data | result: {@error_prefix, err}} |> Data.set_time(:recv),
[{:next_event, :internal, :reply_close}]}
{:cont, data, hdr, cks} ->
data = Data.maybe_cks(self(), data, hdr, cks)
{:keep_state, data}
{:ok, data, hdr, cks, ser_flag} ->
data = Data.maybe_cks(self(), data, hdr, cks)
case Decode.bin_to_args(data, ser_flag) do
{:ok, result, dser} ->
met = %{data.metrics | time: %{data.metrics.time | dser: dser}}
{:keep_state, %Data{data | result: result, state: :recv_fin, metrics: met}, [{:next_event, :internal, :wait_for_cks}]}
{:error, err} ->
{:next_state, :reply, %Data{data | result: {@error_prefix, err}} |> Data.set_time(:recv),
[{:next_event, :internal, :reply_terminate}]}
end
end
end
def recv(:info, {:cks_check, :valid, pid}, %Data{} = data) do
if Enum.member?(data.workers, pid) do
workers = List.delete(data.workers, pid)
data = %Data{data | workers: workers}
if data.state == :recv_fin,
do: {:keep_state, data, [{:next_event, :internal, :wait_for_cks}]},
else: {:keep_state, data}
else
{:next_state, :reply, %Data{data | state: :recv, result: {@error_prefix, :invalid_state}} |> Data.set_time(:recv),
[{:next_event, :internal, :reply_close}]}
end
end
def recv(:info, {:cks_check, :invalid}, %Data{} = data) do
act = if is_nil(data.cks_mfa), do: {:next_event, :internal, :reply_close}, else: {:next_event, :internal, :reply_terminate}
{:next_state, :reply, %Data{data | state: :recv, result: {@error_prefix, :invalid_cks}} |> Data.set_time(:recv), [act]}
end
def recv(:internal, :wait_for_cks, %Data{} = data) do
if Enum.empty?(data.workers),
do:
{:next_state, :reply, %Data{data | state: :recv, telemetry_result: :ok} |> Data.set_time(:recv),
[{:next_event, :internal, :reply}]},
else: {:keep_state_and_data, [{:state_timeout, data.timeout_cks, :timeout_cks}]}
end
def recv(:state_timeout, :timeout_cks, %Data{} = data) do
act = if is_nil(data.cks_mfa), do: {:next_event, :internal, :reply_close}, else: {:next_event, :internal, :reply_terminate}
{:next_state, :reply, %Data{data | state: :recv, result: {@error_prefix, :timeout_cks}} |> Data.set_time(:recv), [act]}
end
def recv(:info, {:tcp_closed, _socket}, %Data{} = data) do
act = if is_nil(data.cks_mfa), do: {:next_event, :internal, :reply_close}, else: {:next_event, :internal, :reply_terminate}
{:next_state, :reply, %Data{data | state: :recv, result: {@error_prefix, :tcp_closed}} |> Data.set_time(:recv), [act]}
end
def recv(:info, {:tcp_error, _socket, reason}, %Data{} = data) do
act = if is_nil(data.cks_mfa), do: {:next_event, :internal, :reply_close}, else: {:next_event, :internal, :reply_terminate}
{:next_state, :reply, %Data{data | state: :recv, result: {@error_prefix, reason}} |> Data.set_time(:recv), [act]}
end
# ________reply
def reply(:enter, :send, data), do: {:keep_state, %Data{data | state: :reply}}
def reply(:enter, :recv, data), do: {:keep_state, %Data{data | state: :reply}}
def reply(:internal, :reply, %Data{} = data) do
:ok = :gen_statem.reply(data.reply_to, data.result)
{:next_state, :idle, data}
end
def reply(:internal, :reply_close, %Data{} = data) do
:ok = :gen_statem.reply(data.reply_to, data.result)
{:next_state, :closed, data, [{:next_event, :internal, :connect}]}
end
def reply(:internal, :reply_terminate, %Data{} = data) do
:ok = :gen_statem.reply(data.reply_to, data.result)
Telemetry.execute(data)
{:stop, :normal}
end
def reply(:info, _any, %Data{} = _data), do: {:stop, :normal}
end
|
lib/client/worker.ex
| 0.625095 | 0.438545 |
worker.ex
|
starcoder
|
defmodule Tox.NaiveDateTime do
@moduledoc """
A set of functions to work with `NaiveDateTime`.
"""
alias Tox.IsoDays
@doc """
Shifts the `naive_datetime` by the given `duration`.
The `durations` is a keyword list of one or more durations of the type
`Tox.duration` e.g. `[year: 1, day: 5, minute: 500]`. All values will be
shifted from the largest to the smallest unit.
## Examples
iex> naive_datetime = ~N[2000-01-01 00:00:00]
iex> Tox.NaiveDateTime.shift(naive_datetime, year: 2)
~N[2002-01-01 00:00:00]
iex> Tox.NaiveDateTime.shift(naive_datetime, year: -2, month: 1, hour: 48)
~N[1998-02-03 00:00:00]
iex> Tox.NaiveDateTime.shift(naive_datetime, hour: 10, minute: 10, second: 10)
~N[2000-01-01 10:10:10]
Adding a month at the end of the month can update the day too.
iex> Tox.NaiveDateTime.shift(~N[2000-01-31 00:00:00], month: 1)
~N[2000-02-29 00:00:00]
For that reason it is important to know that all values will be shifted from the
largest to the smallest unit.
iex> naive_datetime = DateTime.from_naive!(~N[2000-01-30 00:00:00], "Europe/Oslo")
iex> Tox.NaiveDateTime.shift(naive_datetime, month: 1, day: 1)
~N[2000-03-01 00:00:00+01:00]
iex> naive_datetime |> Tox.NaiveDateTime.shift(month: 1) |> Tox.NaiveDateTime.shift(day: 1)
~N[2000-03-01 00:00:00+01:00]
iex> naive_datetime |> Tox.NaiveDateTime.shift(day: 1) |> Tox.NaiveDateTime.shift(month: 1)
~N[2000-02-29 00:00:00+01:00]
Using `shift/2` with a different calendar.
iex> ~N[2012-09-03 02:30:00]
...> |> NaiveDateTime.convert!(Cldr.Calendar.Ethiopic)
...> |> Tox.NaiveDateTime.shift(day: 6)
%NaiveDateTime{
calendar: Cldr.Calendar.Ethiopic,
year: 2004,
month: 13,
day: 4,
hour: 2,
minute: 30,
second: 0,
microsecond: {0, 0}
}
"""
@spec shift(Calendar.naive_datetime(), [Tox.duration()]) :: NaiveDateTime.t()
def shift(%{calendar: calendar, microsecond: {_, precision}} = naive_datetime, durations) do
naive_datetime
|> Tox.Date.shift(durations)
|> from_date_time(naive_datetime)
|> IsoDays.from_naive_datetime()
|> IsoDays.add(IsoDays.from_durations_time(durations, calendar, precision))
|> from_iso_days(calendar, precision)
end
@doc """
Returns true if `naive_datetime1` occurs after `naive_datetime2`.
## Examples
iex> Tox.NaiveDateTime.after?(
...> ~N[2020-06-14 15:01:43.999999],
...> ~N[2020-06-14 15:01:43.000001]
...> )
true
iex> Tox.NaiveDateTime.after?(
...> ~N[2020-06-14 15:01:43],
...> ~N[2020-06-14 15:01:43]
...> )
false
iex> Tox.NaiveDateTime.after?(
...> ~N[2020-06-14 15:01:43.000001],
...> ~N[2020-06-14 15:01:43.999999]
...> )
false
"""
defmacro after?(naive_datetime1, naive_datetime2) do
quote do
NaiveDateTime.compare(unquote(naive_datetime1), unquote(naive_datetime2)) == :gt
end
end
@doc """
Returns true if `naive_datetime1` occurs after `naive_datetime2` or both naive
datetimes are equal.
## Examples
iex> Tox.NaiveDateTime.after_or_equal?(
...> ~N[2020-06-14 15:01:43.999999],
...> ~N[2020-06-14 15:01:43.000001]
...> )
true
iex> Tox.NaiveDateTime.after_or_equal?(
...> ~N[2020-06-14 15:01:43],
...> ~N[2020-06-14 15:01:43]
...> )
true
iex> Tox.NaiveDateTime.after_or_equal?(
...> ~N[2020-06-14 15:01:43.000001],
...> ~N[2020-06-14 15:01:43.999999]
...> )
false
"""
defmacro after_or_equal?(naive_datetime1, naive_datetime2) do
quote do
NaiveDateTime.compare(unquote(naive_datetime1), unquote(naive_datetime2)) in [:gt, :eq]
end
end
@doc """
Returns true if both naive datetimes are equal.
## Examples
iex> Tox.NaiveDateTime.equal?(
...> ~N[2020-06-14 15:01:43.999999],
...> ~N[2020-06-14 15:01:43.000001]
...> )
false
iex> Tox.NaiveDateTime.equal?(
...> ~N[2020-06-14 15:01:43],
...> ~N[2020-06-14 15:01:43]
...> )
true
iex> Tox.NaiveDateTime.equal?(
...> ~N[2020-06-14 15:01:43.000001],
...> ~N[2020-06-14 15:01:43.999999]
...> )
false
"""
defmacro equal?(naive_datetime1, naive_datetime2) do
quote do
NaiveDateTime.compare(unquote(naive_datetime1), unquote(naive_datetime2)) == :eq
end
end
@doc """
Returns true if `naive_datetime1` occurs before `naive_datetime2`.
## Examples
iex> Tox.NaiveDateTime.before?(
...> ~N[2020-06-14 15:01:43.000001],
...> ~N[2020-06-14 15:01:43.999999]
...> )
true
iex> Tox.NaiveDateTime.before?(
...> ~N[2020-06-14 15:01:43],
...> ~N[2020-06-14 15:01:43]
...> )
false
iex> Tox.NaiveDateTime.before?(
...> ~N[2020-06-14 15:01:43.999999],
...> ~N[2020-06-14 15:01:43.000001]
...> )
false
"""
defmacro before?(naive_datetime1, naive_datetime2) do
quote do
NaiveDateTime.compare(unquote(naive_datetime1), unquote(naive_datetime2)) == :lt
end
end
@doc """
Returns true if `naive_datetime1` occurs before `naive_datetime2` or both
naive datetimes are equal.
## Examples
iex> Tox.NaiveDateTime.before_or_equal?(
...> ~N[2020-06-14 15:01:43.000001],
...> ~N[2020-06-14 15:01:43.999999]
...> )
true
iex> Tox.NaiveDateTime.before_or_equal?(
...> ~N[2020-06-14 15:01:43],
...> ~N[2020-06-14 15:01:43]
...> )
true
iex> Tox.NaiveDateTime.before_or_equal?(
...> ~N[2020-06-14 15:01:43.999999],
...> ~N[2020-06-14 15:01:43.000001]
...> )
false
"""
defmacro before_or_equal?(naive_datetime1, naive_datetime2) do
quote do
NaiveDateTime.compare(unquote(naive_datetime1), unquote(naive_datetime2)) in [:lt, :eq]
end
end
@doc """
Returns a naive datetime representing the start of the year.
## Examples
iex> Tox.NaiveDateTime.beginning_of_year(~N[2020-11-11 11:11:11])
~N[2020-01-01 00:00:00]
"""
@spec beginning_of_year(Calendar.naive_datetime()) :: Calendar.naive_datetime()
def beginning_of_year(naive_datetime),
do: beginning_of_day(%{naive_datetime | month: 1, day: 1})
@doc """
Returns a naive datetime representing the start of the month.
## Examples
iex> Tox.NaiveDateTime.beginning_of_month(~N[2020-11-11 11:11:11])
~N[2020-11-01 00:00:00]
"""
@spec beginning_of_month(Calendar.naive_datetime()) :: NaiveDateTime.t()
def beginning_of_month(naive_datetime) do
beginning_of_day(%{naive_datetime | day: 1})
end
@doc """
Returns a naive datetime representing the start of the week.
## Examples
iex> Tox.NaiveDateTime.beginning_of_week(~N[2020-07-22 11:11:11])
~N[2020-07-20 00:00:00]
"""
@spec beginning_of_week(Calendar.naive_datetime()) :: NaiveDateTime.t()
def beginning_of_week(naive_datetime) do
naive_datetime
|> shift(day: Tox.Calendar.beginning_of_week(naive_datetime))
|> beginning_of_day()
end
@doc """
Returns a naive datetime} representing the start of the day.
## Examples
iex> Tox.NaiveDateTime.beginning_of_day(~N[2020-03-29 13:00:00.123456])
~N[2020-03-29 00:00:00.000000]
"""
@spec beginning_of_day(Calendar.naive_datetime()) :: NaiveDateTime.t()
def beginning_of_day(
%{
calendar: calendar,
year: year,
month: month,
day: day,
microsecond: {_, precision}
} = naive_datetime
) do
case NaiveDateTime.new(year, month, day, 0, 0, 0, {0, precision}, calendar) do
{:ok, new_naive_datetime} ->
new_naive_datetime
{:error, reason} ->
raise ArgumentError,
"cannot set #{inspect(naive_datetime)} to beginning of day, " <>
"reason: #{inspect(reason)}"
end
end
@doc """
Returns a boolean indicating whether `naive_datetime` occurs between `from`
and `to`. The optional `boundaries` specifies whether `from` and `to` are
included or not. The possible value for `boundaries` are:
* `:open`: `from` and `to` are excluded
* `:closed`: `from` and `to` are included
* `:left_open`: `from` is excluded and `to` is included
* `:right_open`: `from` is included and `to` is excluded
## Examples
iex> from = ~N[2020-04-05 12:30:00]
iex> to = ~N[2020-04-15 12:30:00]
iex> Tox.NaiveDateTime.between?(~N[2020-04-01 12:00:00], from, to)
false
iex> Tox.NaiveDateTime.between?(~N[2020-04-11 12:30:00], from, to)
true
iex> Tox.NaiveDateTime.between?(~N[2020-04-21 12:30:00], from, to)
false
iex> Tox.NaiveDateTime.between?(from, from, to)
true
iex> Tox.NaiveDateTime.between?(to, from, to)
false
iex> Tox.NaiveDateTime.between?(from, from, to, :open)
false
iex> Tox.NaiveDateTime.between?(to, from, to, :open)
false
iex> Tox.NaiveDateTime.between?(from, from, to, :closed)
true
iex> Tox.NaiveDateTime.between?(to, from, to, :closed)
true
iex> Tox.NaiveDateTime.between?(from, from, to, :left_open)
false
iex> Tox.NaiveDateTime.between?(to, from, to, :left_open)
true
iex> Tox.NaiveDateTime.between?(~N[1900-01-01 00:00:00], to, from)
** (ArgumentError) from is equal or greater as to
"""
@spec between?(
Calendar.naive_datetime(),
Calendar.naive_datetime(),
Calendar.naive_datetime(),
Tox.boundaries()
) ::
boolean()
def between?(naive_datetime, from, to, boundaries \\ :right_open)
when boundaries in [:closed, :left_open, :right_open, :open] do
if NaiveDateTime.compare(from, to) in [:gt, :eq],
do: raise(ArgumentError, "from is equal or greater as to")
case {
NaiveDateTime.compare(naive_datetime, from),
NaiveDateTime.compare(naive_datetime, to),
boundaries
} do
{:lt, _, _} -> false
{_, :gt, _} -> false
{:eq, _, :closed} -> true
{:eq, _, :right_open} -> true
{_, :eq, :closed} -> true
{_, :eq, :left_open} -> true
{:gt, :lt, _} -> true
{_, _, _} -> false
end
end
@doc """
Returns a naive datetime representing the end of the year.
## Examples
iex> Tox.NaiveDateTime.end_of_year(~N[2020-03-29 01:00:00])
~N[2020-12-31 23:59:59.999999]
With the Ethiopic calendar.
iex> naive_datetime = NaiveDateTime.convert!(~N[2020-10-26 02:30:00], Cldr.Calendar.Ethiopic)
iex> to_string(naive_datetime)
"2013-02-16 02:30:00"
iex> naive_datetime |> Tox.NaiveDateTime.end_of_year() |> to_string()
"2013-13-05 23:59:59.999999"
"""
@spec end_of_year(Calendar.naive_datetime()) :: NaiveDateTime.t()
def end_of_year(%{calendar: calendar, year: year} = naive_datetime) do
month = calendar.months_in_year(year)
day = calendar.days_in_month(year, month)
end_of_day(%{naive_datetime | month: month, day: day})
end
@doc """
Returns a datetime} representing the end of the month.
## Examples
iex> Tox.NaiveDateTime.end_of_month(~N[2020-11-11 11:11:11])
~N[2020-11-30 23:59:59.999999]
"""
@spec end_of_month(Calendar.naive_datetime()) :: NaiveDateTime.t()
def end_of_month(%{calendar: calendar, year: year, month: month} = naive_datetime) do
day = calendar.days_in_month(year, month)
end_of_day(%{naive_datetime | day: day})
end
@doc """
Returns a datetime representing the end of the week.
## Examples
iex> Tox.NaiveDateTime.end_of_week(~N[2020-07-22 11:11:11])
~N[2020-07-26 23:59:59.999999]
"""
@spec end_of_week(Calendar.naive_datetime()) :: NaiveDateTime.t()
def end_of_week(%{calendar: calendar, year: year, month: month, day: day} = naive_datetime) do
day = Tox.days_per_week() - Tox.day_of_week(calendar, year, month, day)
naive_datetime
|> shift(day: day)
|> end_of_day()
end
@doc """
Returns datetime representing the end of the day.
## Examples
iex> Tox.NaiveDateTime.end_of_day(~N[2020-03-29 01:00:00])
~N[2020-03-29 23:59:59.999999]
"""
@spec end_of_day(Calendar.naive_datetime()) :: NaiveDateTime.t()
def end_of_day(%{calendar: calendar, year: year, month: month, day: day} = naive_datetime) do
{hour, minute, second, microsecond} = Tox.Time.max_tuple(calendar)
case NaiveDateTime.new(year, month, day, hour, minute, second, microsecond, calendar) do
{:ok, new_naive_datetime} ->
new_naive_datetime
{:error, reason} ->
raise ArgumentError,
"cannot set #{inspect(naive_datetime)} to end of day, " <>
"reason: #{inspect(reason)}"
end
end
@doc """
Returns an `{year, week}` representing the ISO week number for the specified
date.
This function is just defined for datetimes with `Calendar.ISO`.
## Example
iex> Tox.NaiveDateTime.week(~N[2017-01-01 01:00:00])
{2016, 52}
iex> Tox.NaiveDateTime.week(~N[2019-12-31 01:00:00])
{2020, 1}
iex> Tox.NaiveDateTime.week(~N[2020-01-01 01:00:00])
{2020, 1}
iex> ~N[2020-06-04 11:12:13]
...> |> NaiveDateTime.convert(Cldr.Calendar.Coptic)
...> |> Tox.NaiveDateTime.week()
** (FunctionClauseError) no function clause matching in Tox.NaiveDateTime.week/1
"""
@spec week(Calendar.datetime()) :: {Calendar.year(), non_neg_integer}
def week(%{calendar: Calendar.ISO} = naive_datetime), do: Tox.week(naive_datetime)
## Helpers
@doc false
@spec from_date_time(Calendar.date(), Calendar.time()) :: NaiveDateTime.t()
def from_date_time(
%{calendar: calendar, year: year, month: month, day: day},
%{
calendar: calendar,
hour: hour,
minute: minute,
second: second,
microsecond: microsecond
}
) do
{:ok, naive_datetime} =
NaiveDateTime.new(year, month, day, hour, minute, second, microsecond, calendar)
naive_datetime
end
@doc false
@spec from_iso_days(Calendar.iso_days(), Calendar.calendar(), non_neg_integer) ::
NaiveDateTime.t()
def from_iso_days(iso_days, calendar, precision) do
{year, month, day, hour, minute, second, {microsecond, _}} =
calendar.naive_datetime_from_iso_days(iso_days)
%NaiveDateTime{
calendar: calendar,
year: year,
month: month,
day: day,
hour: hour,
minute: minute,
second: second,
microsecond: {microsecond, precision}
}
end
end
|
lib/tox/naive_datetime.ex
| 0.923351 | 0.598928 |
naive_datetime.ex
|
starcoder
|
defmodule SpiderMan do
@moduledoc """
SpiderMan, a fast high-level web crawling & scraping framework for Elixir.
## Components
Each Spider had 3 components, each component has theirs work:
* [Downloader](SpiderMan.Component.Downloader.html): Download request.
* [Spider](SpiderMan.Component.Spider.html): Analyze web pages.
* [ItemProcessor](SpiderMan.Component.ItemProcessor.html): Store items.
Message flow: `Downloader` -> `Spider` -> `ItemProcessor`.
## Spider Life Cycle
0. `Spider.settings()`
1. Prepare For Start Stage
1. `Spider.prepare_for_start(:pre, state)`
2. `Spider.prepare_for_start_component(:downloader, state)`
3. `Spider.prepare_for_start_component(:spider, state)`
4. `Spider.prepare_for_start_component(:item_processor, state)`
5. `Spider.prepare_for_start(:post, state)`
2. `Spider.init(state)`
3. `Spider.handle_response(response, context)`
4. Prepare For Stop Stage
1. `Spider.prepare_for_stop_component(:downloader, state)`
2. `Spider.prepare_for_stop_component(:spider, state)`
3. `Spider.prepare_for_stop_component(:item_processor, state)`
4. `Spider.prepare_for_stop(state)`
"""
alias SpiderMan.{Configuration, Engine, Item, Request, Response}
@type spider :: module | atom
@typedoc """
#{Configuration.configuration_docs()}
"""
@type settings :: keyword
@type status :: :running | :suspended
@type request :: Request.t()
@type requests :: [request]
@type component :: :downloader | :spider | :item_processor
@type ets_stats :: [size: pos_integer, memory: pos_integer] | nil
@type prepare_for_start_stage :: :pre | :post
@callback handle_response(Response.t(), context :: map) :: %{
optional(:requests) => [Request.t()],
optional(:items) => [Item.t()]
}
@callback settings() :: settings
@callback init(state) :: state when state: Engine.state()
@callback prepare_for_start(prepare_for_start_stage, state) :: state when state: Engine.state()
@callback prepare_for_stop(Engine.state()) :: :ok
@callback prepare_for_start_component(component, options | false) :: options
when options: keyword
@callback prepare_for_stop_component(component, options :: keyword | false) :: :ok
@optional_callbacks settings: 0,
init: 1,
prepare_for_start: 2,
prepare_for_stop: 1,
prepare_for_start_component: 2,
prepare_for_stop_component: 2
@doc false
defmacro __using__(_opts \\ []) do
quote do
import SpiderMan.Utils,
only: [
build_request: 1,
build_request: 2,
build_request: 3,
build_requests: 1,
build_requests: 2,
build_requests: 3,
build_item: 2,
build_item: 3,
build_item: 4,
set_key: 2,
set_flag: 2
]
import SpiderMan, only: [insert_request: 2, insert_requests: 2]
@behaviour SpiderMan
end
end
@doc """
start a spider
"""
@spec start(spider, settings) :: Supervisor.on_start_child()
defdelegate start(spider, settings \\ []), to: SpiderMan.Application, as: :start_child
@doc "stop a spider"
@spec stop(spider) :: :ok | {:error, error} when error: :not_found | :running | :restarting
defdelegate stop(spider), to: SpiderMan.Application, as: :stop_child
@doc "fetch spider's status"
@spec status(spider) :: status
defdelegate status(spider), to: Engine
@doc "fetch spider's state"
@spec get_state(spider) :: Engine.state()
defdelegate get_state(spider), to: Engine
@doc "suspend a spider"
@spec suspend(spider, timeout) :: :ok
defdelegate suspend(spider, timeout \\ :infinity), to: Engine
@doc "continue a spider"
@spec continue(spider, timeout) :: :ok
defdelegate continue(spider, timeout \\ :infinity), to: Engine
@doc "retry failed events for a spider"
@spec retry_failed(spider, max_retries :: integer, timeout) :: {:ok, count :: integer}
defdelegate retry_failed(spider, max_retries \\ 3, timeout \\ :infinity), to: Engine
@doc "insert a request to spider"
@spec insert_request(spider, request) :: true | nil
def insert_request(spider, request) when is_struct(request, Request),
do: insert_requests(spider, [request])
@doc "insert multiple requests to spider"
@spec insert_requests(spider, requests) :: true | nil
def insert_requests(spider, requests) do
if info = :persistent_term.get(spider, nil) do
objects = Enum.map(requests, &{&1.key, &1})
:telemetry.execute(
[:spider_man, :downloader, :start],
%{count: length(objects)},
%{name: inspect(spider)}
)
:ets.insert(info.downloader_tid, objects)
end
end
@doc "fetch spider's statistics"
@spec stats(spider) :: [
status: status,
common_pipeline_tid: ets_stats,
downloader_tid: ets_stats,
failed_tid: ets_stats,
spider_tid: ets_stats,
item_processor_tid: ets_stats
]
def stats(spider) do
components =
:persistent_term.get(spider)
|> Enum.map(fn {key, tid} ->
{key,
tid
|> :ets.info()
|> Keyword.take([:size, :memory])}
end)
[{:status, Engine.status(spider)} | components]
end
@doc "fetch spider's statistics of all ets"
@spec ets_stats(spider) :: [
common_pipeline_tid: ets_stats,
downloader_tid: ets_stats,
failed_tid: ets_stats,
spider_tid: ets_stats,
item_processor_tid: ets_stats
]
def ets_stats(spider) do
:persistent_term.get(spider)
|> Enum.map(fn {key, tid} ->
{key,
tid
|> :ets.info()
|> Keyword.take([:size, :memory])}
end)
end
@spec components :: [component]
def components, do: [:downloader, :spider, :item_processor]
@doc "fetch component's statistics"
@spec stats(spider, component) :: ets_stats
def stats(spider, component) do
if info = :persistent_term.get(spider, nil) do
info[:"#{component}_tid"] |> :ets.info() |> Keyword.take([:size, :memory])
end
end
@spec run_until_zero(spider, settings, check_interval :: integer) :: millisecond :: integer
def run_until_zero(spider, settings \\ [], check_interval \\ 1500) do
run_until(spider, settings, fn ->
ets_list =
:persistent_term.get(spider)
|> Map.take([:downloader_tid, :failed_tid, :spider_tid])
fun = fn {_, tid} -> :ets.info(tid, :size) == 0 end
if Enum.all?(ets_list, fun) do
Process.sleep(check_interval)
if Enum.all?(ets_list, fun) do
:stop
else
check_interval
end
else
check_interval
end
end)
end
@spec run_until(spider, settings, fun) :: millisecond :: integer
def run_until(spider, settings \\ [], fun) when is_function(fun, 0) do
t1 = System.system_time(:millisecond)
{:ok, _} = start(spider, settings)
Process.sleep(1500)
_run_until(fun)
:ok = stop(spider)
System.system_time(:millisecond) - t1
end
defp _run_until(fun) do
case fun.() do
:stop ->
:stop
sleep_time when is_integer(sleep_time) ->
Process.sleep(sleep_time)
_run_until(fun)
_ ->
Process.sleep(100)
_run_until(fun)
end
end
@doc "list spiders where already started"
@spec list_spiders :: [spider]
def list_spiders do
SpiderMan.Supervisor
|> Supervisor.which_children()
|> Stream.reject(&match?({SpiderMan.Registry, _, _, _}, &1))
|> Enum.map(&elem(&1, 0))
end
@doc false
def periodic_measurements do
Enum.each(list_spiders(), &telemetry_execute(&1))
catch
_, _ -> :ok
end
@doc false
def telemetry_execute(spider) do
name = inspect(spider)
spider
|> ets_stats()
|> Enum.each(fn {tid, measurements} ->
:telemetry.execute([:spider_man, :ets], Map.new(measurements), %{name: name, tid: tid})
end)
catch
_, _ -> :ok
end
end
|
lib/spider_man.ex
| 0.859162 | 0.463019 |
spider_man.ex
|
starcoder
|
defmodule ExGherkin.Scanner do
@moduledoc false
alias __MODULE__.{
Context,
SyntaxError,
Token,
Utils
}
def tokenize!(content), do: tokenize(content)
def tokenize!(content, context = %Context{}), do: tokenize(content, context)
def tokenize(content), do: tokenize(content, Context.new())
def tokenize(content, context = %Context{}) do
content
|> Stream.with_index(1)
|> Stream.transform(context, fn {line, index}, context = %Context{} ->
{trimmed_line, column_count} = Utils.trim_line(line, context)
{tokenized, updated_context} =
map_to_token(
context.language,
trimmed_line,
index,
column_count,
Context.original_line(context, line)
)
{[Token.strip_record_name(tokenized)], updated_context}
end)
end
def map_to_token(_, trimmed_line = current_delimiter = "\"\"\"", index, column, context) do
handle_doc_string(trimmed_line, current_delimiter, :plain, index, column, context)
end
def map_to_token(_, trimmed_line = <<"\"\"\"", rest::binary>>, index, column, context) do
handle_doc_string(trimmed_line, "\"\"\"", String.to_atom(rest), index, column, context)
end
def map_to_token(_, trimmed_line = current_delimiter = "```", index, column, context) do
handle_doc_string(trimmed_line, current_delimiter, :plain, index, column, context)
end
def map_to_token(_, trimmed_line = <<"```", rest::binary>>, index, column, context) do
handle_doc_string(trimmed_line, "```", String.to_atom(rest), index, column, context)
end
def map_to_token(_, trimmed_line, index, _, context = %Context{doc_string: {_, _}}) do
trimmed_line
|> case do
"\\\"\\\"\\\"" -> {handle_plain_text("\"\"\"", index, 1), context}
"\\`\\`\\`" -> {handle_plain_text("```", index, 1), context}
"" -> {Token.content(index, 1, ""), context}
_ -> {handle_plain_text(trimmed_line, index, 1), context}
end
end
@languages ExGherkin.Scanner.LanguageSupport.all()
# @languages []
Enum.each(@languages, fn {language,
e = %{
feature: feature_phrasals,
rule: rule_phrasals,
background: background_phrasals,
scenario_outline: scenario_outline_phrasals,
example: example_phrasals,
given: given_phrasals,
when: when_phrasals,
then: then_phrasals,
but: but_phrasals,
and: and_phrasals,
examples: examples_phrasals,
direction: language_direction,
homonyms: homonym_phrasals
}} ->
IO.inspect(e, label: :phrasal)
Enum.each(homonym_phrasals, fn {phrasal, next_in_sequence_lookup} ->
{%{default: _default_homonym}, _next_in_sequence_lookup} =
Map.split(next_in_sequence_lookup, [:default])
def map_to_token(
unquote(language),
<<unquote(phrasal), rest::binary>>,
index,
column,
context = %Context{}
) do
{:token, _prev_keyword, _, _, _} = Context.peek(context)
# unquote(Macro.escape(next_in_sequence_lookup))
# |> Map.get(prev_keyword, unquote(default_homonym))
:and
|> case do
:given ->
handle_given(
unquote(language_direction),
unquote(phrasal),
rest,
index,
column,
context
)
:when ->
handle_when(
unquote(language_direction),
unquote(phrasal),
rest,
index,
column,
context
)
:then ->
handle_then(
unquote(language_direction),
unquote(phrasal),
rest,
index,
column,
context
)
:and ->
handle_and(
unquote(language_direction),
unquote(phrasal),
rest,
index,
column,
context
)
:but ->
handle_but(
unquote(language_direction),
unquote(phrasal),
rest,
index,
column,
context
)
end
end
end)
Enum.each(feature_phrasals, fn phrasal ->
def map_to_token(
unquote(language),
<<unquote(phrasal), ":", rest::binary>>,
index,
column,
context = %Context{}
) do
_language_direction = unquote(language_direction)
token = Token.feature(index, column, unquote(phrasal), rest)
{token, context |> Context.reset(:stepline) |> Context.push(token)}
end
end)
Enum.each(rule_phrasals, fn phrasal ->
def map_to_token(
unquote(language),
<<unquote(phrasal), ":", rest::binary>>,
index,
column,
context = %Context{}
) do
_language_direction = unquote(language_direction)
token = Token.rule(index, column, unquote(phrasal), rest)
{token, context |> Context.reset(:stepline) |> Context.push(token)}
end
end)
Enum.each(example_phrasals, fn phrasal ->
def map_to_token(
unquote(language),
<<unquote(phrasal), ":", rest::binary>>,
index,
column,
context = %Context{}
) do
_language_direction = unquote(language_direction)
token = Token.scenario(index, column, unquote(phrasal), rest)
{token, context |> Context.stepline() |> Context.push(token)}
end
end)
Enum.each(given_phrasals, fn phrasal ->
def map_to_token(
unquote(language),
<<unquote(phrasal), rest::binary>>,
index,
column,
context = %Context{}
) do
handle_given(unquote(language_direction), unquote(phrasal), rest, index, column, context)
end
end)
Enum.each(when_phrasals, fn phrasal ->
def map_to_token(
unquote(language),
<<unquote(phrasal), rest::binary>>,
index,
column,
context = %Context{}
) do
handle_when(unquote(language_direction), unquote(phrasal), rest, index, column, context)
end
end)
Enum.each(then_phrasals, fn phrasal ->
def map_to_token(
unquote(language),
<<unquote(phrasal), rest::binary>>,
index,
column,
context = %Context{}
) do
handle_then(unquote(language_direction), unquote(phrasal), rest, index, column, context)
end
end)
Enum.each(but_phrasals, fn phrasal ->
def map_to_token(
unquote(language),
<<unquote(phrasal), rest::binary>>,
index,
column,
context = %Context{}
) do
handle_but(unquote(language_direction), unquote(phrasal), rest, index, column, context)
end
end)
Enum.each(and_phrasals, fn phrasal ->
def map_to_token(
unquote(language),
<<unquote(phrasal), rest::binary>>,
index,
column,
context = %Context{}
) do
handle_and(unquote(language_direction), unquote(phrasal), rest, index, column, context)
end
end)
Enum.each(background_phrasals, fn phrasal ->
def map_to_token(
unquote(language),
<<unquote(phrasal), ":", rest::binary>>,
index,
column,
context = %Context{}
) do
_language_direction = unquote(language_direction)
token = Token.background(index, column, unquote(phrasal), String.trim_leading(rest))
{token, context |> Context.stepline() |> Context.push(token)}
end
end)
Enum.each(scenario_outline_phrasals, fn phrasal ->
def map_to_token(
unquote(language),
<<unquote(phrasal), ":", rest::binary>>,
index,
column,
context = %Context{}
) do
_language_direction = unquote(language_direction)
token = Token.scenario_outline(index, column, unquote(phrasal), rest)
{token, context |> Context.reset(:stepline) |> Context.push(token)}
end
end)
Enum.each(examples_phrasals, fn phrasal ->
def map_to_token(
unquote(language),
<<unquote(phrasal), ":", rest::binary>>,
index,
column,
context = %Context{}
) do
_language_direction = unquote(language_direction)
token = Token.scenarios(index, column, unquote(phrasal), String.trim_leading(rest))
{token, Context.push(context, token)}
end
end)
end)
def map_to_token(_, <<"|", rest::binary>>, index, column, context = %Context{}) do
text =
rest
|> String.trim_trailing("|")
|> Utils.data_table_pipe_splitter(column)
|> Enum.map(fn {offset_count, e} ->
{offset_count, String.trim(e, " ")}
end)
{Token.data_table(
index,
column,
"|",
text
), context}
end
def map_to_token(_, <<"@", rest::binary>>, index, column, context = %Context{}) do
{_, text} =
rest
|> String.split(" #")
|> List.first()
|> String.split("@")
|> Enum.reduce({column, []}, fn tag, {left_offset, tags} ->
{left_offset, trimmed_leading} = Utils.count_spaces_before(tag, left_offset)
{left_offset + String.length(trimmed_leading) + 1,
tags ++ [{left_offset, "@" <> String.trim_trailing(trimmed_leading)}]}
end)
{Token.tag(
index,
column,
"@",
text
), context}
end
def map_to_token(_, <<"# language:", rest::binary>>, index, column, context = %Context{}) do
language = String.trim(rest)
{Token.language(index, column, "#", language), Context.language(context, language)}
end
def map_to_token(_, <<"#language:", rest::binary>>, index, column, context = %Context{}) do
language = String.trim(rest)
{Token.language(index, column, "#", language), Context.language(context, language)}
end
def map_to_token(language, <<"#", rest::binary>>, index, column, context = %Context{}) do
language_test = String.split(rest, "language")
if length(language_test) == 2 do
[_, language_part] = language_test
language_test = String.split(language_part, ":")
if length(language_test) == 2 do
[_, language_part] = language_test
map_to_token(
language,
"# language:" <> String.trim(language_part),
index,
column,
context
)
else
handle_comment(index, column, context)
end
else
handle_comment(index, column, context)
end
end
def map_to_token(_, text, index, column, context = %Context{}) do
{text, column} =
context
|> Context.peek()
|> case do
token = {:token, :feature, _, _, _} ->
new_column = Token.column(token)
if column < new_column do
{text, new_column}
else
{Utils.pad_leading(text, column - new_column), new_column}
end
{:token, :scenario, _, _, _} ->
{Utils.pad_leading(text, column - 1), 1}
{:token, :rule, _, _, _} ->
{Utils.pad_leading(text, column - 1), 1}
{:token, :background, _, _, _} ->
{Utils.pad_leading(text, column - 1), 1}
_ ->
{text, column}
end
{handle_plain_text(text, index, column), context}
end
defp handle_doc_string(
_,
current_delimiter,
type,
index,
column,
context = %Context{doc_string: false}
) do
c = Context.doc_string(context, column, current_delimiter)
{Token.doc_string(index, column, current_delimiter, {current_delimiter, type}), c}
end
defp handle_doc_string(
trimmed_line,
current_delimiter,
type,
index,
column,
context = %Context{doc_string: {_, past_delimiter}}
) do
{current_delimiter == past_delimiter, type == :plain}
|> case do
{true, true} ->
{column, _} = Utils.count_spaces_before(context.original_line, 1)
{Token.doc_string(index, column, current_delimiter, {current_delimiter, type}),
Context.reset(context, :doc_string)}
{false, _} ->
{Token.content(index, 1, trimmed_line), context}
{true, false} ->
SyntaxError.raise(
"Docstring to be ended with an untyped delimiter. Kindly remove the type `#{type}` from `#{
trimmed_line
}` or use an alternate Docstring delimiter",
index,
column,
:ending_docstring_delim_typed
)
end
end
defp handle_comment(index, column, context) do
line_with_white_spaces_at_end_preserved =
context.original_line
|> String.trim_leading()
|> String.trim_leading("#")
|> String.trim_trailing("\n")
{Token.comment(index, column, "#", line_with_white_spaces_at_end_preserved), context}
end
def handle_given(_language_direction, phrasal, rest, index, column, context) do
token = Token.given(index, column, phrasal, rest)
{token, context |> Context.stepline() |> Context.push(token)}
end
def handle_when(_language_direction, phrasal, rest, index, column, context) do
token = Token._when(index, column, phrasal, rest)
{token, context |> Context.stepline() |> Context.push(token)}
end
def handle_then(_language_direction, phrasal, rest, index, column, context) do
token = Token.then(index, column, phrasal, rest)
{token, context |> Context.stepline() |> Context.push(token)}
end
def handle_and(_language_direction, phrasal, rest, index, column, context) do
if context.stepline do
token = Token._and(index, column, phrasal, rest)
{token, Context.push(context, token)}
else
{handle_plain_text(String.trim_trailing(context.original_line, "\n"), index, 1), context}
end
end
def handle_but(_language_direction, phrasal, rest, index, column, context) do
if context.stepline do
token = Token.but(index, column, phrasal, rest)
{token, Context.push(context, token)}
else
{handle_plain_text(String.trim_leading(context.original_line), index, column), context}
end
end
defp handle_plain_text("", index, _) do
Token.empty(index, 1)
end
defp handle_plain_text(text, index, column) do
Token.content(index, column, text)
end
end
|
lib/scanner/scanner.ex
| 0.706798 | 0.729077 |
scanner.ex
|
starcoder
|
defmodule MAVLink.Utils do
@moduledoc ~s"""
MAVLink support functions used during code generation and runtime
Parts of this module are ported from corresponding implementations
in mavutils.py
"""
use Bitwise, only_operators: true
import List, only: [flatten: 1]
import Enum, only: [sort_by: 2, join: 2, map: 2, reverse: 1]
@doc """
Sort parsed message fields into wire order according
to https://mavlink.io/en/guide/serialization.html
List extension fields separately so that we can
not include them for MAVLink 1 messages
"""
@spec wire_order([%{type: String.t, is_extension: boolean}]) :: [[%{}]]
def wire_order(fields) do
type_order_map = %{
uint64_t: 1,
int64_t: 1,
double: 1,
uint32_t: 2,
int32_t: 2,
float: 2,
uint16_t: 3,
int16_t: 3,
uint8_t: 4,
uint8_t_mavlink_version: 4,
int8_t: 4,
char: 4
}
[
sort_by(
Enum.filter(fields, & !&1.is_extension),
&Map.fetch(type_order_map, String.to_atom(&1.type))
),
Enum.filter(fields, & &1.is_extension)
]
end
def eight_bit_checksum(value) do
(value &&& 0xFF) ^^^ (value >>> 8)
end
@doc """
Calculate an x25 checksum of a list or binary based on
pymavlink mavcrc.x25crc
"""
@spec x25_crc([ ] | binary()) :: pos_integer
def x25_crc(list) when is_list(list) do
x25_crc(0xffff, flatten(list))
end
def x25_crc(bin) when is_binary(bin) do
x25_crc(0xffff, bin)
end
def x25_crc(crc, []), do: crc
def x25_crc(crc, <<>>), do: crc
def x25_crc(crc, [head | tail]) do
crc |> x25_accumulate(head) |> x25_crc(tail)
end
def x25_crc(crc, << head :: size(8), tail :: binary>>) do
crc |> x25_accumulate(head) |> x25_crc(tail)
end
defp x25_accumulate(crc, value) do
tmp = value ^^^ (crc &&& 0xff)
tmp = (tmp ^^^ (tmp <<< 4)) &&& 0xff
crc = (crc >>> 8) ^^^ (tmp <<< 8) ^^^ (tmp <<< 3) ^^^ (tmp >>> 4)
crc &&& 0xffff
end
@doc "Helper function for messages to pack string fields"
@spec pack_string(binary, non_neg_integer) :: binary
def pack_string(s, ordinality) do
s |> String.pad_trailing(ordinality, <<0>>)
end
@doc "Helper function for messages to pack array fields"
@spec pack_array(list(), integer, (any() -> binary())) :: binary()
def pack_array(a, ordinality, _) when length(a) > ordinality, do: {:error, "Maximum elements allowed is \#{ordinality}"}
def pack_array(a, ordinality, field_packer) when length(a) < ordinality, do: pack_array(a ++ [0], ordinality, field_packer)
def pack_array(a, _, field_packer), do: a |> map(field_packer) |> join(<<>>)
@doc "Helper function for decode() to unpack array fields"
# TODO bitstring generator instead? https://elixir-lang.org/getting-started/comprehensions.html
@spec unpack_array(binary(), (binary()-> {any(), list()})) :: list()
def unpack_array(bin, fun), do: unpack_array(bin, fun, [])
def unpack_array(<<>>, _, lst), do: reverse(lst)
def unpack_array(bin, fun, lst) do
{elem, rest} = fun.(bin)
unpack_array(rest, fun, [elem | lst])
end
@doc "Parse an ip address string into a tuple"
def parse_ip_address(address) when is_binary(address) do
parse_ip_address(String.split(address, "."), [], 0)
end
def parse_ip_address([], address, 4) do
List.to_tuple(reverse address)
end
def parse_ip_address([], _, _) do
{:error, :invalid_ip_address}
end
def parse_ip_address([component | rest], address, count) do
case Integer.parse(component) do
:error ->
{:error, :invalid_ip_address}
{n, _} ->
cond do
n >= 0 and n <= 255 ->
parse_ip_address(rest, [n | address], count + 1)
true ->
{:error, :invalid_ip_address}
end
end
end
def parse_positive_integer(port) when is_binary(port) do
case Integer.parse(port) do
:error ->
:error
{n, _} when n > 0 ->
n
_ ->
:error
end
end
def pack_float(f) when is_float(f), do: <<f::little-signed-float-size(32)>>
def pack_float(:nan), do: <<0, 0, 192, 127>> # Have received these from QGroundControl
def unpack_float(<<f::little-signed-float-size(32)>>), do: f
def unpack_float(<<0, 0, 192, 127>>), do: :nan
def pack_double(f) when is_float(f), do: <<f::little-signed-float-size(64)>>
def pack_double(:nan), do: <<0, 0, 0, 0, 0, 0, 248, 127>> # Quick test in C gave this for double NaN
def unpack_double(<<f::little-signed-float-size(64)>>), do: f
def unpack_double(<<0, 0, 0, 0, 0, 0, 248, 127>>), do: :nan
end
|
lib/mavlink/utils.ex
| 0.677687 | 0.415907 |
utils.ex
|
starcoder
|
defmodule Membrane.Caps.Audio.MPEG do
@moduledoc """
This module implements struct for caps representing MPEG audio stream.
See [MPEG Frame header documentation](https://www.mp3-tech.org/programmer/frame_header.html)
"""
@compile {:inline,
[
samples_per_frame: 2,
sound_of_silence: 0
]}
# MPEG version
@type version_t :: :v1 | :v2 | :v2_5
# MPEG layer
@type layer_t :: :layer1 | :layer2 | :layer3
# CRC enabled
@type crc_enabled_t :: boolean
# Bitrate
@type bitrate_t :: pos_integer | :free
# Sample rate of the audio.
@type sample_rate_t :: pos_integer
# Padding enabled
@type padding_enabled_t :: boolean
# Private bit set?
@type private_t :: boolean
# Channel mode
@type channel_mode_t :: :stereo | :joint_stereo | :dual_channel | :single_channel
# Mode extension (in Joint Stereo)
@type mode_extension_t :: :mode0 | :mode1 | :mode2 | :mode3 | nil
# Copyright bit set?
@type copyright_t :: boolean
# Original bit set?
@type original_t :: boolean
# Emphasis mode
@type emphasis_mode_t :: :none | :emphasis_50_15 | :reserved | :ccit_j_17
@type t :: %Membrane.Caps.Audio.MPEG{
version: version_t,
layer: layer_t,
crc_enabled: crc_enabled_t,
bitrate: bitrate_t,
sample_rate: sample_rate_t,
padding_enabled: padding_enabled_t,
private: private_t,
channel_mode: channel_mode_t,
mode_extension: mode_extension_t,
copyright: copyright_t,
original: original_t,
emphasis_mode: emphasis_mode_t
}
defstruct version: nil,
layer: nil,
crc_enabled: nil,
bitrate: nil,
channels: nil,
sample_rate: nil,
padding_enabled: nil,
private: nil,
channel_mode: nil,
mode_extension: nil,
copyright: nil,
original: nil,
emphasis_mode: nil
@doc """
Returns the size of MPEG audio frame header (4 bytes).
"""
@spec header_size() :: 4
def header_size(), do: 4
@doc """
Returns amount of raw audio samples that are in the frame for given
version/layer combination.
Inlined by the compiler.
"""
@spec samples_per_frame(version_t, layer_t) :: pos_integer
def samples_per_frame(:v1, :layer1), do: 384
def samples_per_frame(:v1, :layer2), do: 1152
def samples_per_frame(:v1, :layer3), do: 1152
def samples_per_frame(:v2, :layer1), do: 384
def samples_per_frame(:v2, :layer2), do: 1152
def samples_per_frame(:v2, :layer3), do: 576
def samples_per_frame(:v2_5, :layer1), do: 384
def samples_per_frame(:v2_5, :layer2), do: 1152
def samples_per_frame(:v2_5, :layer3), do: 576
@doc """
Returns the size of a frame in bytes. The result does not include
the size of a header.
"""
@spec frame_size(caps :: t) :: pos_integer
def frame_size(%__MODULE__{
version: version,
layer: layer,
bitrate: bitrate,
sample_rate: sample_rate,
padding_enabled: padding_enabled
}) do
# See row G at: http://www.mp3-tech.org/programmer/frame_header.html
padding =
case {padding_enabled, layer} do
{false, _} -> 0
{true, :layer1} -> 4
{true, _} -> 1
end
# FrameSize = Bitrate_kbps * 1000 / 8 * SamplesPerFrame / SampleRate_hz + Padding
div(bitrate * 125 * samples_per_frame(version, layer), sample_rate) + padding
end
@doc """
Returns one 'silent' frame along with its caps.
Inlined by the compiler.
"""
@spec sound_of_silence :: {binary, t}
def sound_of_silence do
payload =
<<255, 251, 16, 100, 0, 15, 240, 0, 0, 105, 0, 0, 0, 8, 0, 0, 13, 32, 0, 0, 1, 0, 0, 1, 164,
0, 0, 0, 32, 0, 0, 52, 128, 0, 0, 4, 76, 65, 77, 69, 51, 46, 49, 48,
48>> <>
<<85, 85, 85, 85, 85, 85, 85, 85, 85, 85, 85, 85, 85, 85, 85, 85, 85, 85, 85, 85, 85, 85,
85, 85, 85, 85, 85, 85, 85, 85, 85, 85, 85, 85, 85, 85, 85, 85, 85, 85, 85, 85, 85, 85,
85, 85, 85, 85, 85, 85, 85, 85, 85, 85, 85, 85, 85, 85, 85>>
caps = %__MODULE__{
bitrate: 32,
channel_mode: :joint_stereo,
channels: 2,
copyright: false,
crc_enabled: true,
emphasis_mode: :none,
layer: :layer3,
mode_extension: :mode2,
original: true,
padding_enabled: false,
private: false,
sample_rate: 44_100,
version: :v1
}
{payload, caps}
end
end
|
lib/membrane_caps_audio_mpeg.ex
| 0.880714 | 0.415492 |
membrane_caps_audio_mpeg.ex
|
starcoder
|
defmodule Statistics.Distributions.Hypergeometric do
@moduledoc """
Hypergeometric distribution.
It models the probability that an n numbers of trials
result in exactly k successes, with a population of pn items,
where pk are considered as successes.
"""
alias Statistics.Math
@doc """
The probability mass function
## Examples
iex> Statistics.Distributions.Hypergeometric.pmf(50, 5, 10).(4)
0.003964583058015066
"""
@spec pmf(non_neg_integer, non_neg_integer, non_neg_integer) :: fun
def pmf(pn, pk, n) do
combos = Math.combination(pn, n)
fn k ->
cond do
n < k ->
0.0
pn < n ->
0.0
pn == pk && n != k ->
0.0
pn == pk ->
1.0
true ->
xk = Math.to_int(k)
Math.combination(pk, xk) * Math.combination(pn - pk, n - xk) / combos
end
end
end
@doc """
The cumulative density function
## Examples
iex> Statistics.Distributions.Hypergeometric.cdf(52, 5, 13).(2)
0.9072328931572629
"""
@spec cdf(non_neg_integer, non_neg_integer, non_neg_integer) :: fun
def cdf(pn, pk, n) do
cpmf = pmf(pn, pk, n)
fn k ->
0..Math.to_int(Math.floor(k))
|> Enum.to_list()
|> Enum.map(fn i -> cpmf.(i) end)
|> Enum.sum()
end
end
@doc """
The percentile-point function
## Examples
iex> Statistics.Distributions.Hypergeometric.ppf(80, 20, 50).(0.1)
10
"""
@spec ppf(non_neg_integer, non_neg_integer, non_neg_integer) :: fun
def ppf(pn, pk, n) do
fn x ->
ppf_tande(x, cdf(pn, pk, n), 0)
end
end
# trial-and-error method which refines guesses
# to arbitrary number of decimal places
defp ppf_tande(x, tcdf, guess) do
g_cdf = tcdf.(guess)
cond do
x > g_cdf ->
ppf_tande(x, tcdf, guess + 1)
x <= g_cdf ->
guess
end
end
@doc """
Draw a random number from hypergeometric distribution
"""
@spec rand(non_neg_integer, non_neg_integer, non_neg_integer) :: non_neg_integer
def rand(pn, pk, n), do: rand(pk, pmf(pn, pk, n))
defp rand(pk, rpmf) do
x = Math.floor(Math.rand() * pk)
if rpmf.(x) > Math.rand() do
Float.round(x)
else
# keep trying
rand(pk, rpmf)
end
end
end
|
lib/statistics/distributions/hypergeometric.ex
| 0.847021 | 0.693187 |
hypergeometric.ex
|
starcoder
|
defmodule AWS.ECRPUBLIC do
@moduledoc """
Amazon Elastic Container Registry Public
Amazon Elastic Container Registry (Amazon ECR) is a managed container image
registry service.
Amazon ECR provides both public and private registries to host your container
images. You can use the familiar Docker CLI, or their preferred client, to push,
pull, and manage images. Amazon ECR provides a secure, scalable, and reliable
registry for your Docker or Open Container Initiative (OCI) images. Amazon ECR
supports public repositories with this API. For information about the Amazon ECR
API for private repositories, see [Amazon Elastic Container Registry API Reference](https://docs.aws.amazon.com/AmazonECR/latest/APIReference/Welcome.html).
"""
alias AWS.Client
alias AWS.Request
def metadata do
%AWS.ServiceMetadata{
abbreviation: "Amazon ECR Public",
api_version: "2020-10-30",
content_type: "application/x-amz-json-1.1",
credential_scope: nil,
endpoint_prefix: "api.ecr-public",
global?: false,
protocol: "json",
service_id: "ECR PUBLIC",
signature_version: "v4",
signing_name: "ecr-public",
target_prefix: "SpencerFrontendService"
}
end
@doc """
Checks the availability of one or more image layers within a repository in a
public registry.
When an image is pushed to a repository, each image layer is checked to verify
if it has been uploaded before. If it has been uploaded, then the image layer is
skipped.
This operation is used by the Amazon ECR proxy and is not generally used by
customers for pulling and pushing images. In most cases, you should use the
`docker` CLI to pull, tag, and push images.
"""
def batch_check_layer_availability(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "BatchCheckLayerAvailability", input, options)
end
@doc """
Deletes a list of specified images within a repository in a public registry.
Images are specified with either an `imageTag` or `imageDigest`.
You can remove a tag from an image by specifying the image's tag in your
request. When you remove the last tag from an image, the image is deleted from
your repository.
You can completely delete an image (and all of its tags) by specifying the
image's digest in your request.
"""
def batch_delete_image(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "BatchDeleteImage", input, options)
end
@doc """
Informs Amazon ECR that the image layer upload has completed for a specified
public registry, repository name, and upload ID.
You can optionally provide a `sha256` digest of the image layer for data
validation purposes.
When an image is pushed, the CompleteLayerUpload API is called once per each new
image layer to verify that the upload has completed.
This operation is used by the Amazon ECR proxy and is not generally used by
customers for pulling and pushing images. In most cases, you should use the
`docker` CLI to pull, tag, and push images.
"""
def complete_layer_upload(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "CompleteLayerUpload", input, options)
end
@doc """
Creates a repository in a public registry.
For more information, see [Amazon ECR repositories](https://docs.aws.amazon.com/AmazonECR/latest/userguide/Repositories.html)
in the *Amazon Elastic Container Registry User Guide*.
"""
def create_repository(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "CreateRepository", input, options)
end
@doc """
Deletes a repository in a public registry.
If the repository contains images, you must either delete all images in the
repository or use the `force` option which deletes all images on your behalf
before deleting the repository.
"""
def delete_repository(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "DeleteRepository", input, options)
end
@doc """
Deletes the repository policy associated with the specified repository.
"""
def delete_repository_policy(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "DeleteRepositoryPolicy", input, options)
end
@doc """
Returns the image tag details for a repository in a public registry.
"""
def describe_image_tags(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "DescribeImageTags", input, options)
end
@doc """
Returns metadata about the images in a repository in a public registry.
Beginning with Docker version 1.9, the Docker client compresses image layers
before pushing them to a V2 Docker registry. The output of the `docker images`
command shows the uncompressed image size, so it may return a larger image size
than the image sizes returned by `DescribeImages`.
"""
def describe_images(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "DescribeImages", input, options)
end
@doc """
Returns details for a public registry.
"""
def describe_registries(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "DescribeRegistries", input, options)
end
@doc """
Describes repositories in a public registry.
"""
def describe_repositories(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "DescribeRepositories", input, options)
end
@doc """
Retrieves an authorization token.
An authorization token represents your IAM authentication credentials and can be
used to access any Amazon ECR registry that your IAM principal has access to.
The authorization token is valid for 12 hours. This API requires the
`ecr-public:GetAuthorizationToken` and `sts:GetServiceBearerToken` permissions.
"""
def get_authorization_token(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "GetAuthorizationToken", input, options)
end
@doc """
Retrieves catalog metadata for a public registry.
"""
def get_registry_catalog_data(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "GetRegistryCatalogData", input, options)
end
@doc """
Retrieve catalog metadata for a repository in a public registry.
This metadata is displayed publicly in the Amazon ECR Public Gallery.
"""
def get_repository_catalog_data(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "GetRepositoryCatalogData", input, options)
end
@doc """
Retrieves the repository policy for the specified repository.
"""
def get_repository_policy(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "GetRepositoryPolicy", input, options)
end
@doc """
Notifies Amazon ECR that you intend to upload an image layer.
When an image is pushed, the InitiateLayerUpload API is called once per image
layer that has not already been uploaded. Whether or not an image layer has been
uploaded is determined by the BatchCheckLayerAvailability API action.
This operation is used by the Amazon ECR proxy and is not generally used by
customers for pulling and pushing images. In most cases, you should use the
`docker` CLI to pull, tag, and push images.
"""
def initiate_layer_upload(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "InitiateLayerUpload", input, options)
end
@doc """
List the tags for an Amazon ECR Public resource.
"""
def list_tags_for_resource(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "ListTagsForResource", input, options)
end
@doc """
Creates or updates the image manifest and tags associated with an image.
When an image is pushed and all new image layers have been uploaded, the
PutImage API is called once to create or update the image manifest and the tags
associated with the image.
This operation is used by the Amazon ECR proxy and is not generally used by
customers for pulling and pushing images. In most cases, you should use the
`docker` CLI to pull, tag, and push images.
"""
def put_image(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "PutImage", input, options)
end
@doc """
Create or updates the catalog data for a public registry.
"""
def put_registry_catalog_data(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "PutRegistryCatalogData", input, options)
end
@doc """
Creates or updates the catalog data for a repository in a public registry.
"""
def put_repository_catalog_data(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "PutRepositoryCatalogData", input, options)
end
@doc """
Applies a repository policy to the specified public repository to control access
permissions.
For more information, see [Amazon ECR Repository Policies](https://docs.aws.amazon.com/AmazonECR/latest/userguide/repository-policies.html)
in the *Amazon Elastic Container Registry User Guide*.
"""
def set_repository_policy(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "SetRepositoryPolicy", input, options)
end
@doc """
Associates the specified tags to a resource with the specified `resourceArn`.
If existing tags on a resource are not specified in the request parameters, they
are not changed. When a resource is deleted, the tags associated with that
resource are deleted as well.
"""
def tag_resource(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "TagResource", input, options)
end
@doc """
Deletes specified tags from a resource.
"""
def untag_resource(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "UntagResource", input, options)
end
@doc """
Uploads an image layer part to Amazon ECR.
When an image is pushed, each new image layer is uploaded in parts. The maximum
size of each image layer part can be 20971520 bytes (or about 20MB). The
UploadLayerPart API is called once per each new image layer part.
This operation is used by the Amazon ECR proxy and is not generally used by
customers for pulling and pushing images. In most cases, you should use the
`docker` CLI to pull, tag, and push images.
"""
def upload_layer_part(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "UploadLayerPart", input, options)
end
end
|
lib/aws/generated/ecrpublic.ex
| 0.885155 | 0.430985 |
ecrpublic.ex
|
starcoder
|
defmodule Volley.InOrderSubscription do
@moduledoc """
A subscription which guarantees ordering
An in-order subscription consumes an EventStoreDB stream in order, as if
subscribed via `Spear.subscribe/4`. InOrder subscriptions are simpler than
persistent subscriptions and can be used in cases where unordered processing
is too complicated or undesirable.
## Back-pressure
This producer primarily makes use of `GenStage`'s buffering capabilities to
provide back-pressure.
A plain subscription through `Spear.subscribe/4` has no back-pressure. For
very large streams, a subscriber process may become overwhelmed as the
process mailbox fills up with events as fast as they can be read from
the EventStoreDB.
This producer has two modes:
- subscription mode, where the stream is subscribed with `Spear.subscribe/4`
and events are emitted as soon as available
- reading mode, in which events are emitted on-demand of the consumer
This producer starts up in reading mode and emits events on-demand as long
as there are more events to be read. Once the producer reaches the current
end of the stream, it subscribes using `Spear.subscribe/4` in the switch
to subscription mode.
Once the producer has caught up to the end of the stream, it will only
receive newly appended events, and so may be less likely to become
overwhelmed. Sustained bursts in appends to the stream may eventually
overfill the `GenStage` buffer, though.
## Writing handlers for in-order subscriptions
Special care must be taken when writing a consumer for in-order subscriptions.
Consumers must implement blocking in order to preserve correct ordering
of events.
To implement blocking, a consumer must meet these three requirements
- only one consumer may subscribe to each producer
- the consumer must `Process.link/1` itself to the producer process
- the `c:GenStage.init/1` callback is a suitable place to perform this
linking
- the consumer must curate its stream position
Let's build a basic event handler for a in-order subscription with the
`GenStage` basics
```elixir
defmodule MyHandler do
use GenStage
def start_link(_) do
GenStage.start_link(__MODULE__, :ok)
end
@impl GenStage
def init(:ok) do
{:consumer, :ok, subscribe_to: [MyProducer]}
end
@impl GenStage
def handle_events(events, _from, state) do
IO.inspect(events, label: "events")
{:noreply, [], state}
end
end
```
This is a very minimal consumer that starts up, subscribes to `MyProducer`,
and handles events by outputting them with `IO.inspect/2`. We can start
up this handler and the producer like so:
```elixir
in_order_subscription_settings = [
name: MyProducer,
connection: MySpearClient,
stream_name: "some_stream",
..
]
[
{Volley.InOrderSubscription, in_order_subscription_settings},
MyHandler
]
|> Supervisor.start_link(strategy: :one_for_one)
```
This consumer doesn't follow our rules though. If we start it up, we'll see
that we're handling multiple events at once, and if we restart it we'll see
it start from the beginning of the stream. Let's restrict the number of
events it can process at once by tuning the `:max_demand` down to `1`:
```elixir
@impl GenStage
def init(:ok) do
{:consumer, :ok, subscribe_to: [{MyProducer, max_demand: 1}]}
end
```
Now we're handling events one-by-one in our handler so we can match on a
single event in `c:GenStage.handle_events/3`
```elixir
def handle_events([event], _from, state) do
IO.inspect(event.metadata.stream_revision, label: "handling event no.")
..
```
Note that it is simpler but not necessary to set a `:max_demand` of 1:
in order to handle events in order, the consumer must set its stream position
after every successful handle of an event, such as with a routine like so
```elixir
def handle_events(events, _from, state) do
Enum.each(events, fn event ->
:ok = handle_one_event(event)
:ok = update_stream_position(event)
end)
{:noreply, [], state}
end
```
A consumer can break ordering by attempting to handle all events in parallel
or without updating the stream position on every successful handle. Consider a
scenario where a consumer attempts to handle events `[1, 2, 3]`. If the
consumer successfully handles 1 and 3 but fails to handle 2, the consumer
cannot write a stream position number that fully describes its position in the
stream. This may not be a concern if whatever side-effects the handler is
committing are idempotent.
This producer reads events in chunks at least as large as the demand from
the consumer, so setting a very low `:max_demand` does not necessarily
increase the number of network calls.
Now if we start up our handler, we'll see it churning through each event
in order. Let's introduce a bit of failure into the handler though.
Defensive programming is typically discouraged in OTP applications, so let's
do a `raise/1` in our handling code to simulate a situation like a failing
bang (`!`) function or bad match:
```elixir
def handle_events([event], _from, state) do
revision = event.metadata.stream_revision
IO.inspect(revision, label: "handling event no.")
if revision == 42 do
raise "aaaaah!"
end
{:noreply, [], state}
end
```
Now if we run our pipeline on a stream longer than 42 events, the handler
will crash. Since producers and consumers are not linked by default in
`GenStage`, the exit of the consumer will leave the producer running.
This means that we will see output like
```
handling event no.: 41
handling event no.: 42
21:03:07.107 [error] GenServer #PID<0.266.0> terminating
** (RuntimeError) aaaaah!
handling event no.: 43
```
The default restart strategy of our consumer will start a new process which
will subscribe to the producer and handle the next event. This means that
event 42 is effectively skipped which breaks ordered processing. To remedy
this we need to notify the producer that the processing for an event has
failed by linking together the producer and consumer.
```elixir
@impl GenStage
def init(:ok) do
MyProducer |> GenServer.whereis() |> Process.link()
{:consumer, :ok, subscribe_to: [{MyProducer, max_demand: 1}]}
end
```
Now when the handler exits on event 42, it will also exit the producer.
With the producer and consumer that we have so far, this will result in
the consumer restarting processing from the beginning of the stream.
Again, this breaks ordering. We need the consumer to curate its position
in the stream in order to keep a consistent order.
For this example, we'll use `:ets` to hold the stream position of our handler
in memory. This is useful and easy to set up for an example, but `:ets`
is an in-memory cache which will clear out when the service stops. Production
storage for stream positions should be more persistent: e.g. a PostgreSQL
row or an mnesia record.
Outside of our supervision tree for the producer and consumer we'll create
an `:ets` table:
```elixir
:ets.new(:stream_positions, [:set, :public, :named_table])
```
And now add a function to our handler so the producer can restore a stream
position from this table
```elixir
def fetch_stream_position! do
case :ets.lookup(:stream_positions, __MODULE__) do
[{__MODULE__, position}] -> position
[] -> :start
end
end
```
And add that MFA to the producer's options:
```elixir
in_order_subscription_settings = [
name: MyProducer,
connection: MySpearClient,
stream_name: "some_stream",
restore_stream_position!: {MyHandler, :fetch_stream_position!, []},
subscribe_on_init?: {Function, :identity, [true]}
]
```
Now the producer will fetch the current stream position on start-up, so
even if the processes crash and need to be restarted, the handler will
keep a consistent position in the subscription.
Finally we'll store the stream position in the consumer. This should
only occur after the consumer has done any side-effects or processing
prone to failure. Ideally, the stream position should be persisted in a
transaction with any side-effects.
```elixir
@impl GenStage
def handle_events([event], _from, state) do
revision = event.metadata.stream_revision
IO.inspect(revision, label: "handling event no.")
if revision == 42 do
raise "aaaaah!"
end
:ets.insert(:stream_positions, {__MODULE__, revision})
{:noreply, [], state}
end
```
With this final change our consumer will read each event in the stream
in order, reach event 42, raise, retry event 42, raise, and then the
supervisor process will shut down. This is the essence of a blocking
subscription: once the pipeline reaches an event which it cannot process,
the entire pipeline is halted. This is generally an undesirable
behavior: a code-wise or manual change is usually needed to resolve the
blockage. Persistent subscriptions (see `Volley.PersistentSubscription`)
offer much more flexibility around ordering, batching, and concurrency
thanks to the asynchronous ack and nack workflow and the EventStoreDB's
parking system, but do not guarantee event ordering.
Altogether our handler looks like this:
```elixir
defmodule MyHandler do
use GenStage
def start_link(_) do
GenStage.start_link(__MODULE__, :ok)
end
@impl GenStage
def init(:ok) do
MyProducer |> GenServer.whereis() |> Process.link()
{:consumer, :ok, subscribe_to: [{MyProducer, max_demand: 1}]}
end
@impl GenStage
def handle_events([event], _from, state) do
revision = event.metadata.stream_revision
IO.inspect(revision, label: "handling event no.")
if revision == 42 do
raise "aaaaah!"
end
:ets.insert(:stream_positions, {__MODULE__, revision})
{:noreply, [], state}
end
def fetch_stream_position! do
case :ets.lookup(:stream_positions, __MODULE__) do
[{__MODULE__, position}] -> position
[] -> :start
end
end
end
```
## Configuration
* `:connection` - (required) a `t:Spear.Connection.t/0` to use for connecting
to the EventStoreDB
* `:stream_name` - (required) the EventStoreDB stream to read
* `:restore_stream_position!` - (required) a 0-arity function to invoke
to retrieve the stream position on start-up of the subscription.
This function should read from the source to which the consumer is writing
the stream position. A positive integer, a `t:Spear.Event.t/0`, or the
atoms `:start` or `:end` may be returned. `:start` starts the subscription
at the first event in the stream while end immediately subscribes the
producer to the end of the stream. This function may either be a function
capture (or anonymous function) or an MFA tuple.
* `:subscribe_on_init?: - (default: `fn -> true end`) a 0-arity function to
invoke which determines whether this producer should start producing events
after starting up. If this function returns false, the producer must
be subscribed manually by sending a `:subscribe` message. This function
may either be a function capture (or anonymous function) or an MFA tuple.
* `:subscribe_after` - (default: `0`) a period in ms to wait until the
producer should query the `:subscribe_on_init?` function. This can be useful
if the `:subscribe_on_init?` function reaches out to an external service
which may not be immediately available on start-up.
* `:read_opts` - (default: `[]`) options to pass to `Spear.read_stream/3`.
The `:max_count` option may be worth tuning to achieve good performance:
a stream of very small events may benefit from the batch-reading of a large
max-count while a stream of very large events may be overwhelmed by a large
max-count and need smaller read sizes.
Remaining options are passed to `GenStage.start_link/3` and the
`{:producer, state, opts}` tuple in `c:GenStage.init/1`.
"""
@default_read_size 100
use GenStage
import Volley
require Logger
defstruct [
:connection,
:subscription,
:stream_name,
:restore_stream_position!,
:self,
demand: 0,
subscribe_after: 0,
subscribe_on_init?: {Volley, :yes, []},
producing?: false,
read_opts: []
]
@doc false
def start_link(opts) do
{start_link_opts, opts} = pop_genserver_opts(opts)
GenStage.start_link(__MODULE__, opts, start_link_opts)
end
@impl GenStage
def init(opts) do
self = Keyword.get(opts, :name, self())
{producer_opts, opts} = pop_producer_opts(opts)
state =
struct(__MODULE__, opts)
|> Map.put(:self, self)
Process.send_after(self(), :check_auto_subscribe, subscribe_after(state))
{:producer, state, producer_opts}
end
@impl GenStage
def handle_demand(demand, state) do
with true <- state.producing?,
nil <- state.subscription,
{:ok, events} <- read_stream(state, demand) do
{:noreply, put_self(events, state), save_position(state, events)}
else
false ->
{:noreply, [], update_in(state.demand, &(&1 + demand))}
subscription when is_reference(subscription) ->
{:noreply, [], update_in(state.demand, &(&1 + demand))}
{:done, events} ->
GenStage.async_info(self(), :switch_to_subscription)
{:noreply, put_self(events, state), save_position(state, events)}
# coveralls-ignore-start
{:error, reason} ->
{:stop, reason, state}
# coveralls-ignore-stop
end
end
@impl GenStage
def handle_info(:subscribe, state) do
handle_demand(state.demand, %__MODULE__{state | producing?: true})
end
def handle_info(:check_auto_subscribe, state) do
identifier = "#{inspect(__MODULE__)} (#{inspect(state.self)})"
if do_function(state.subscribe_on_init?) do
Logger.info("#{identifier} subscribing to '#{state.stream_name}'")
GenStage.async_info(self(), :subscribe)
else
# coveralls-ignore-start
Logger.info("#{identifier} did not subscribe to '#{state.stream_name}'")
# coveralls-ignore-stop
end
{:noreply, [], state}
end
def handle_info(:switch_to_subscription, state) do
case subscribe(state) do
{:ok, sub} ->
{:noreply, [], put_in(state.subscription, sub)}
# coveralls-ignore-start
{:error, reason} ->
{:stop, reason, state}
# coveralls-ignore-stop
end
end
def handle_info(%Spear.Event{} = event, state) do
{:noreply, [put_self(event, state)], save_position(state, event)}
end
# coveralls-ignore-start
def handle_info(%Spear.Filter.Checkpoint{}, state) do
{:noreply, [], state}
end
def handle_info({:eos, reason}, state) do
{:stop, reason, state}
end
# coveralls-ignore-stop
defp read_stream(state, demand) do
read_size = Keyword.get(state.read_opts, :max_count, @default_read_size)
read_size = max(demand, read_size)
position = position(state)
# number of messages to drop because reading is inclusive on the :from
drop_count = if position == :start, do: 0, else: 1
opts =
Map.get(state, :read_opts, [])
|> Keyword.merge(
from: position,
max_count: read_size + drop_count
)
with position when position != :end <- position,
{:ok, events} <-
Spear.read_stream(state.connection, state.stream_name, opts),
events when length(events) < read_size <-
events |> Enum.drop(drop_count) do
{:done, events}
else
:end ->
{:done, []}
events when is_list(events) ->
{:ok, events}
# coveralls-ignore-start
error ->
error
# coveralls-ignore-stop
end
end
defp subscribe(state) do
opts =
Map.get(state, :read_opts, [])
|> Keyword.merge(from: position(state))
Spear.subscribe(state.connection, self(), state.stream_name, opts)
end
defp position(%{position: position}), do: position
defp position(%{restore_stream_position!: restore_function}) do
do_function(restore_function)
end
defp save_position(state, []), do: state
defp save_position(state, events) when is_list(events) do
save_position(state, List.last(events))
end
defp save_position(state, event) do
Map.put(state, :position, event)
end
defp put_self(events, state) when is_list(events) do
Enum.map(events, &put_self(&1, state))
end
defp put_self(%Spear.Event{} = event, state) do
put_in(event.metadata[:producer], state.self)
end
# coveralls-ignore-start
defp subscribe_after(%__MODULE__{subscribe_after: nil}),
do: Enum.random(3_000..5_000)
defp subscribe_after(%__MODULE__{subscribe_after: subscribe_after}),
do: subscribe_after
defp do_function(function) when is_function(function, 0) do
function.()
end
defp do_function({m, f, a}) do
apply(m, f, a)
end
# coveralls-ignore-stop
end
|
lib/volley/in_order_subscription.ex
| 0.890622 | 0.911535 |
in_order_subscription.ex
|
starcoder
|
defmodule NYSETL.Engines.E4.Transfer do
alias NYSETL.Commcare
alias NYSETL.Extra
@doc """
Looks for an existing index case for the provided case_id and county_id, and returns it if found.
Otherwise, creates a new index case and lab results that mirror an index case and lab results that have been transferred
in commcare.
The data field of the new index case is the result of merging the data field from the old index case plus
the data from the newly-transferred case in commcare.
The data fields of the lab results belonging to the new index case ONLY get data from the old lab results,
not from the newly-transferred lab results in commcare, because the current thinking is that ECLRS data is
more correct for lab results.
"""
def find_or_create_transferred_index_case_and_lab_results(index_case, destination_case_data, destination_county_id) do
case Commcare.get_index_case(case_id: destination_case_data.case_id, county_id: destination_county_id) do
{:ok, transferred_index_case} ->
copy_missing_lab_results_from_to(index_case, transferred_index_case, destination_case_data)
{:ok, transferred_index_case, :found}
{:error, :not_found} ->
{:ok, new_index_case} = create_transferred_index_case(index_case, destination_case_data, destination_county_id)
copy_missing_lab_results_from_to(index_case, new_index_case, destination_case_data)
{:ok, new_index_case, :created}
end
end
defp create_transferred_index_case(original_index_case, destination_case_data, destination_county_id) do
Commcare.create_index_case(%{
data: Extra.Map.merge_empty_fields(destination_case_data.properties, original_index_case.data),
case_id: destination_case_data.case_id,
county_id: destination_county_id,
person_id: original_index_case.person_id
})
end
defp copy_missing_lab_results_from_to(source_case, destination_case, destination_case_data) do
preexisting_accession_numbers = Commcare.get_lab_results(destination_case) |> Enum.map(& &1.accession_number)
source_case
|> Commcare.get_lab_results()
|> Enum.reject(&Enum.member?(preexisting_accession_numbers, &1.accession_number))
|> Enum.each(&create_transferred_lab_result(destination_case, &1, destination_case_data))
end
defp create_transferred_lab_result(transferred_index_case, old_lab_result, destination_case_data) do
found_or_blank_case_id = find_commcare_case_id_by_accession_number(old_lab_result.accession_number, destination_case_data)
%{
data: old_lab_result.data,
index_case_id: transferred_index_case.id,
accession_number: old_lab_result.accession_number,
case_id: found_or_blank_case_id
}
|> Commcare.create_lab_result()
|> case do
{:ok, lab_result} -> {:ok, lab_result}
error -> throw(error)
end
end
defp find_commcare_case_id_by_accession_number(accession_number, %{data: %{"child_cases" => child_cases}})
when is_map(child_cases) do
child_cases
|> Enum.find_value(fn
{case_id, %{"case_type" => "lab_result", "properties" => %{"accession_number" => ^accession_number}} = _case_data} ->
case_id
_other ->
nil
end)
end
defp find_commcare_case_id_by_accession_number(_accession_number, _case_data), do: nil
end
|
lib/nys_etl/engines/e4/transfer.ex
| 0.521959 | 0.612484 |
transfer.ex
|
starcoder
|
defmodule Sparklinex.Bar do
alias Sparklinex.Bar.Options
alias Sparklinex.ChartData
alias Sparklinex.MogrifyDraw
def draw(data, spec = %Options{height: height, background_color: background_color}) do
spec_with_width = %{spec | width: width(data, spec)}
normalized_data = ChartData.normalize_data(data, :bar)
canvas = MogrifyDraw.create_canvas(spec_with_width.width, height, background_color)
canvas
|> draw_rectangles(normalized_data, spec_with_width)
|> draw_target_line(data, spec_with_width)
end
defp width(data, %Options{step: step, bar_width: bar_width}) do
(length(data) * (step + bar_width) + 2) / 1
end
defp draw_rectangles(canvas, data, spec = %Options{}) do
max = Enum.max(data)
data
|> Enum.with_index()
|> Enum.reduce(
canvas,
fn {value, index}, acc -> draw_rectangle(acc, index, value, max, spec) end
)
end
defp draw_rectangle(
canvas,
index,
value,
max_value,
spec = %Options{height: height, step: step, bar_width: 1}
) do
height_from_top = height - value / max_value * height
left_edge = index * (1 + step) + 1
canvas
|> MogrifyDraw.draw_line({{left_edge, height}, {left_edge, height_from_top}}, rectangle_color(value, spec))
end
defp draw_rectangle(
canvas,
index,
value,
max_value,
spec = %Options{height: height, step: step, bar_width: bar_width}
) do
height_from_top = height - value / max_value * height
left_edge = index * (bar_width + step) + 1
canvas
|> MogrifyDraw.set_line_color("transparent")
|> MogrifyDraw.rectangle(
{left_edge, height},
{left_edge + bar_width - 1, height_from_top},
rectangle_color(value, spec)
)
end
defp rectangle_color(value, %Options{upper: boundary, above_color: above_color})
when value >= boundary do
above_color
end
defp rectangle_color(value, %Options{upper: boundary, below_color: below_color})
when value < boundary do
below_color
end
defp draw_target_line(canvas, _data, %Options{target: nil}) do
canvas
end
defp draw_target_line(canvas, data, %Options{
height: height,
width: width,
target: target,
target_color: color
}) do
norm_value = ChartData.normalize_value(target, Enum.min(data), Enum.max(data))
adjusted_target_value = height - 3 - norm_value / (101.0 / (height - 4))
canvas
|> MogrifyDraw.draw_line({{-5, adjusted_target_value}, {width + 5, adjusted_target_value}}, color)
end
end
|
lib/sparklinex/bar.ex
| 0.704058 | 0.499268 |
bar.ex
|
starcoder
|
defmodule Cassandrax.Query do
@moduledoc """
Provides the query macros.
Queries are used to retrieve or manipulate data from a repository (see Cassandrax.Keyspace).
"""
alias Cassandrax.Query.Builder
@type t :: %__MODULE__{}
@limit_default 100
@per_partition_limit_default 100
defstruct schema: nil,
select: [],
distinct: [],
from: nil,
wheres: [],
limit: nil,
per_partition_limit: nil,
group_bys: [],
order_bys: [],
allow_filtering: false
defmacro select(queryable, select \\ []), do: Builder.build(:select, queryable, select)
defmacro where(queryable, where \\ []), do: Builder.build(:where, queryable, where)
defmacro limit(queryable, limit \\ @limit_default), do: Builder.build(:limit, queryable, limit)
defmacro order_by(queryable, order_by \\ []), do: Builder.build(:order_bys, queryable, order_by)
defmacro group_by(queryable, group_by \\ []), do: Builder.build(:group_bys, queryable, group_by)
defmacro distinct(queryable, distinct \\ []), do: Builder.build(:distinct, queryable, distinct)
defmacro allow_filtering(queryable), do: Builder.build(:allow_filtering, queryable, true)
defmacro per_partition_limit(queryable, per_partition_limit \\ @per_partition_limit_default),
do: Builder.build(:per_partition_limit, queryable, per_partition_limit)
@doc """
A select query expression.
Selects the fields from the schema and any transformations that should be performed on the fields.
Any expression that is accepted in a query can be a select field.
Allows a list, tuple or a map. A full schema can also be selected. Only one select expression
allowed in a query. If there is no select expression, the full schema will be selected by
default.
Accepts a list of atoms where atoms refer to fields.
## Example
```
query = select(User, [:id])
%Cassandrax.Query{from: "users", schema: Cassandrax.User, select: [:id]} = query
```
"""
@callback select(queryable :: Cassandrax.Queryable.t(), select :: Keyword.t()) ::
Cassandrax.Query.t()
@doc """
A where query expression that works like an `AND` operator.
Used to filter the results. You can chain `where` expressions.
## Example
Single where clause.
```
query = where(User, id: 1)
```
You can chain where clauses.
```
query = User |> where(:id > 1) |> where(:user_name != "alice")
```
CassandraDB doesn't allow certain queries to be executed for performance reasons, such as `where`.
You may need to use `allow_filtering\0` to bypass this.
```
query = User |> allow_filtering() |> where(:id > 1) |> where(:user_name != "alice")
```
"""
@callback where(queryable :: Cassandrax.Queryable.t(), where :: Keyword.t()) ::
Cassandrax.Query.t()
@doc """
A limit query expression.
Limits the number of rows to be returned from the result. Requires an integer, fields cannot be included.
Default limit is 100.
Limit expressions are chainable, however, the last limit expression will take precedence.
## Example
```
query = limit(User, 200)
```
"""
@callback limit(queryable :: Cassandrax.Queryable.t(), limit :: Keyword.t()) ::
Cassandrax.Query.t()
@doc """
An order by query expression.
Orders the fields based on a given key or list of keys. Order by needs to be paired with a where clause, specifically with where clauses that have equality or `in`. You also need to setup the table correctly to be able to perform order by queries.
## Example Table Setup
```
statement = [
"CREATE TABLE IF NOT EXISTS ",
"MyKeyspace.ordered_(",
"id int, ",
"device_id int, ",
"value text, ",
"PRIMARY KEY (id, device_id))",
"WITH CLUSTERING ORDER BY (device_id DESC)"
]
Cassandrax.cql(MyConn, statement)
```
## Example
```
query = User |> allow_filtering() |> where(:id == 1) |> order_by([:device_id])
```
"""
@callback order_by(queryable :: Cassandrax.Queryable.t(), order_by :: Keyword.t()) ::
Cassandrax.Query.t()
@doc """
A group by query expression.
Allows to condense into a single row all selected rows that share the same values for a set of columns.
Only available for partition key level or at a clustering column level.
## Example
```
query = User |> allow_filtering() |> group_by([:id])
```
"""
@callback group_by(queryable :: Cassandrax.Queryable.t(), order_by :: Keyword.t()) ::
Cassandrax.Query.t()
@doc """
A distinct query expression.
Only returns the distinct records from the result. Only works with a list of partition_key(s).
##Example
```
query = distinct(TestSchema, [:id])
```
"""
@callback distinct(queryable :: Cassandrax.Queryable.t(), distinct :: Keyword.t()) ::
Cassandrax.Query.t()
@doc """
A query expression that enables filtering in certain Cassandra queries.
CassandraDB doesn't allow certain queries to be executed for performance reasons, such as `where`.
You need to set `ALLOW FILTERING` to bypass this block. More details in CassandraDB docs.
## Example
```
query = User |> allow_filtering() |> where(:id > 1) |> where(:user_name != "alice")
```
"""
@callback allow_filtering(queryable :: Cassandrax.Queryable.t(), allow_filtering :: Keyword.t()) ::
Cassandrax.Query.t()
@doc """
A per partition limit expression controls the number of results return from each partition.
Cassandra will then return only the first number of rows given in the `per_partition_limit`
(clustered by the partition key) from that partition, regardless of how many ocurences of when may be present.
More details in CassandraDB docs.
## Example
Default `per_partition_limit` is 100.
```
query = per_partition_limit(User)
```
Or you can set a custom `per_partition_limit`
```
query = per_partition_limit(User, 10)
```
"""
@callback per_partition_limit(
queryable :: Cassandrax.Queryable.t(),
per_partition_limit :: integer()
) :: Cassandrax.Query.t()
end
|
lib/cassandrax/query.ex
| 0.921534 | 0.901314 |
query.ex
|
starcoder
|
defmodule TicTacToe.Board do
@moduledoc """
Functions for interacting with a board.
"""
@type t :: triplet(triplet())
@type triplet(triple) :: [triple]
@type triplet :: [nil | player()]
@type player :: :player1 | :computer
@type position :: 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9
@type point :: {0 | 1 | 2, 0 | 1 | 2}
@spec new :: t()
def new do
[
[nil, nil, nil],
[nil, nil, nil],
[nil, nil, nil]
]
end
@spec receive_move(t(), player(), position()) :: t()
def receive_move(board, _, position) when position not in 1..9, do: board
def receive_move(board, player, position) do
point = indexes_from_position(position, board)
empty? = cell_empty?(board, point)
insert_into(board, point, player, empty?)
end
@spec insert_into(t(), point(), player(), boolean()) :: t()
defp insert_into(board, _, _, false), do: board
defp insert_into(board, point = {row_index, _}, player, _empty) do
new_row = build_new_row(board, point, player)
board |> List.replace_at(row_index, new_row)
end
@spec build_new_row(t(), point(), player()) :: t()
defp build_new_row(board, {row_index, column_index}, player) do
board
|> Enum.at(row_index)
|> List.replace_at(column_index, player)
end
@spec cell_empty?(t(), point()) :: boolean()
defp cell_empty?(board, {row_index, column_index}) do
board
|> Enum.at(row_index)
|> Enum.at(column_index) == nil
end
@spec indexes_from_position(position(), t()) :: point()
def indexes_from_position(position, board) do
row_index = div(position - 1, size(board))
column_index = rem(position - 1, size(board))
{row_index, column_index}
end
@spec position_from_indexes(point(), t()) :: position()
def position_from_indexes({row_index, column_index}, board) do
row_index * size(board) + column_index + 1
end
@spec row(t(), position()) :: triplet()
def row(board, position), do: board |> Enum.at(position - 1)
@spec rows(t()) :: t()
def rows(board), do: board
@spec columns(t()) :: t()
def columns(board), do: board |> transpose |> rows
@spec column(t(), position()) :: triplet()
def column(board, position), do: board |> transpose |> row(position)
@spec diagonal(t(), 1 | 2) :: triplet()
def diagonal(board, 1) do
for x <- 0..(size(board) - 1), do: board |> Enum.at(x) |> Enum.at(x)
end
def diagonal(board, 2) do
for {x, y} <- Enum.zip(0..(size(board) - 1), (size(board) - 1)..0),
do: board |> Enum.at(x) |> Enum.at(y)
end
@spec diagonals(t()) :: [triplet()]
def diagonals(board), do: [diagonal(board, 1), diagonal(board, 2)]
@spec empty_cells(t()) :: [point()]
def empty_cells(board) do
for x <- 0..(size(board) - 1),
y <- 0..(size(board) - 1),
board |> Enum.at(x) |> Enum.at(y) == nil,
do: {x, y}
end
@spec triplets(t()) :: [triplet()]
def triplets(board) do
rows(board) ++ columns(board) ++ diagonals(board)
end
@spec size(t()) :: non_neg_integer()
def size(board) do
length(board)
end
@spec transpose(t()) :: t()
defp transpose(board) do
board
|> Enum.zip()
|> Enum.map(&Tuple.to_list/1)
end
end
|
lib/tic_tac_toe/board.ex
| 0.8899 | 0.682871 |
board.ex
|
starcoder
|
defmodule ExploringMars.Mission.Direction do
@moduledoc """
This module defines functions that create and operate on directions in the
probe's coordinate space.
This module should change if the coordinate representation used in the
problem changes in degrees of freedom - for instance, if we decide the probe
could move in 3D space rather than 2D, or if the probe could move in 8
directions instead of 4. In that case, we would need to also change the
`Coordinate` module in order to handle the new directions.
This coupling between the `Direction` and `Coordinate` modules is, for now,
acceptable, because the `Direction`s to be handled are few. If we were to
choose a case where we would have more possible `Direction`s, it might
be worthwhile to create an `Axis` module which would specify the *axes of
movement* available to the probe, and make both `Direction` and `Coordinate`
modules depend on the `Axis` module - the `Direction` module would produce
which axis or axes correspond to each direction, and the `Coordinate` module
would know how to update the coordinate according to motion on each axis.
That way, we would mitigate the amount of code that should be changed in the
case of a change in coordinate system degrees of freedom.
"""
@typedoc """
A direction is represented by an atom - either :N, :E, :W or :S representing
North, East, West and South respectively. Use as `Direction.t`.
"""
@type t :: :N | :E | :W | :S
@doc """
Takes a `string` and tries to parse it as a direction.
## Examples
iex> Direction.from_string("N")
{:ok, :N}
iex> Direction.from_string("Not a direction")
{:no_parse, "Not a direction"}
"""
@spec from_string(String.t()) :: {:ok, t} | {:no_parse, String.t()}
def from_string(string) do
case string do
"N" -> {:ok, :N}
"E" -> {:ok, :E}
"W" -> {:ok, :W}
"S" -> {:ok, :S}
_ -> {:no_parse, string}
end
end
@doc """
Takes a `direction` and returns the direction obtained by turning left.
## Examples
iex> Direction.turn_left(:N)
:W
iex> Direction.turn_left(:Not_a_direction)
** (ArgumentError) argument is not a direction
"""
@spec turn_left(t) :: t
def turn_left(direction) do
case direction do
:N -> :W
:E -> :N
:W -> :S
:S -> :E
_ -> raise ArgumentError, message: "argument is not a direction"
end
end
@doc """
Takes a `direction` and returns the direction obtained by turning right.
## Examples
iex> Direction.turn_right(:N)
:E
iex> Direction.turn_right(:Not_a_direction)
** (ArgumentError) argument is not a direction
"""
@spec turn_right(t) :: t
def turn_right(direction) do
case direction do
:N -> :E
:E -> :S
:W -> :N
:S -> :W
_ -> raise ArgumentError, message: "argument is not a direction"
end
end
@doc """
Converts a `Direction.t` into a representation suitable for user-facing output.
## Examples
iex> Direction.pretty_print(:N)
"N"
"""
@spec pretty_print(t) :: String.t()
def pretty_print(direction), do: Atom.to_string(direction)
end
|
lib/exploring_mars/mission/direction.ex
| 0.924993 | 0.960988 |
direction.ex
|
starcoder
|
defmodule Plaid.Investments.Transactions do
@moduledoc """
Functions for Plaid `investments/transactions` endpoints.
"""
import Plaid, only: [make_request_with_cred: 4, validate_cred: 1]
alias Plaid.Utils
@derive Jason.Encoder
defstruct accounts: [],
item: nil,
securities: [],
investment_transactions: [],
total_investment_transactions: nil,
request_id: nil
@type t :: %__MODULE__{
accounts: [Plaid.Accounts.Account.t()],
item: Plaid.Item.t(),
securities: [Plaid.Investments.Security.t()],
investment_transactions: [Plaid.Investments.Transactions.Transaction.t()],
total_investment_transactions: integer,
request_id: String.t()
}
@type params :: %{required(atom) => String.t() | map}
@type config :: %{required(atom) => String.t()}
@endpoint :"investments/transactions"
defmodule Transaction do
@moduledoc """
Plaid Investments Transaction data structure.
"""
@derive Jason.Encoder
defstruct investment_transaction_id: nil,
account_id: nil,
security_id: nil,
date: nil,
name: nil,
quantity: nil,
amount: nil,
price: nil,
fees: nil,
type: nil,
iso_currency_code: nil,
unofficial_currency_code: nil,
cancel_transaction_id: nil
@type t :: %__MODULE__{
investment_transaction_id: String.t(),
account_id: String.t(),
security_id: String.t() | nil,
date: String.t(),
name: String.t(),
quantity: float,
amount: float,
price: float,
fees: float | nil,
type: String.t(),
iso_currency_code: String.t() | nil,
unofficial_currency_code: String.t() | nil,
cancel_transaction_id: String.t() | nil
}
end
@doc """
Gets user-authorized transaction data for investment accounts
Parameters
```
%{
access_token: "<PASSWORD>",
start_date: "2017-01-01",
end_date: "2017-03-31",
options: %{
account_ids: ["<KEY>"],
count: 20,
offset: 0
}
}
```
"""
@spec get(params, config | nil) ::
{:ok, Plaid.Investments.Tansactions.t()} | {:error, Plaid.Error.t()}
def get(params, config \\ %{}) do
config = validate_cred(config)
endpoint = "#{@endpoint}/get"
make_request_with_cred(:post, endpoint, config, params)
|> Utils.handle_resp(@endpoint)
end
end
|
lib/plaid/investments/transactions.ex
| 0.800926 | 0.611164 |
transactions.ex
|
starcoder
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.