code
stringlengths 114
1.05M
| path
stringlengths 3
312
| quality_prob
float64 0.5
0.99
| learning_prob
float64 0.2
1
| filename
stringlengths 3
168
| kind
stringclasses 1
value |
---|---|---|---|---|---|
defmodule Marbles7 do
@behaviour Cadex.Behaviour
@initial_conditions %{
box_A: 11,
box_B: 0
}
@partial_state_update_blocks [
%Cadex.Types.PartialStateUpdateBlock{
policies: [
:robot_1,
:robot_2
],
variables: [
{:box_A, :a},
{:box_B, :a}
]
},
%Cadex.Types.PartialStateUpdateBlock{
policies: [
:robot_1,
:robot_2
],
variables: [
{:box_A, :b},
{:box_B, :b}
]
}
]
@simulation_parameters %Cadex.Types.SimulationParameters{
T: 0..30,
N: 50
}
@impl true
def config do
%Cadex.Types.State{
sim: %{
simulation_parameters: @simulation_parameters,
partial_state_update_blocks: @partial_state_update_blocks
},
current_state: @initial_conditions
}
end
@robots_probabilities [0.5, 1/3]
def policy_helper(_params, _substep, _previous_states, current_state, capacity \\ 1) do
add_to_A = cond do
current_state[:box_A] > current_state[:box_B] -> -capacity
current_state[:box_A] < current_state[:box_B] -> capacity
true -> 0
end
{:ok, %{add_to_A: add_to_A, add_to_B: -add_to_A}}
end
@impl true
def policy(:robot_1, params, substep, previous_states, current_state) do
robot_ID = 1
cond do
:rand.uniform < @robots_probabilities |> Enum.at(robot_ID - 1) ->
policy_helper(params, substep, previous_states, current_state)
true -> {:ok, %{add_to_A: 0, add_to_B: 0}}
end
end
@impl true
def policy(:robot_2, params, substep, previous_states, current_state) do
robot_ID = 2
cond do
:rand.uniform < @robots_probabilities |> Enum.at(robot_ID - 1) ->
policy_helper(params, substep, previous_states, current_state)
true -> {:ok, %{add_to_A: 0, add_to_B: 0}}
end
end
@impl true
def update(_var = {:box_A, :a}, _params, _substep, _previous_states, _current_state, input) do
%{add_to_A: add_to_A} = input
increment = &(&1 + add_to_A)
{:ok, increment}
end
@impl true
def update(_var = {:box_B, :a}, _params, _substep, _previous_states, _current_state, input) do
%{add_to_B: add_to_B} = input
increment = &(&1 + add_to_B)
{:ok, increment}
end
def update(_var = {:box_A, :b}, _params, _substep, _previous_states, _current_state, input) do
%{add_to_A: add_to_A} = input
increment = &(&1 + add_to_A)
{:ok, increment}
end
@impl true
def update(_var = {:box_B, :b}, _params, _substep, _previous_states, _current_state, input) do
%{add_to_B: add_to_B} = input
increment = &(&1 + add_to_B)
{:ok, increment}
end
end
|
lib/examples/marbles_7.ex
| 0.722429 | 0.614192 |
marbles_7.ex
|
starcoder
|
defmodule ControlNode.Namespace.Initialize do
@moduledoc false
@state_name :initialize
# `Initialize` is the first state of the namespace FSM
require Logger
alias ControlNode.{Release, Namespace}
alias Namespace.Workflow
def callback_mode, do: :handle_event_function
def handle_event(:internal, current_state, :initialize, data)
when current_state in [:observe_release_state, :connect_release_state] do
%Workflow.Data{
namespace_spec: namespace_spec,
release_spec: release_spec,
release_state: release_state
} = data
release_state =
Release.initialize_state(
release_spec,
release_state.host,
namespace_spec.release_cookie
)
data = %Workflow.Data{data | release_state: release_state}
{next_state, actions} = Namespace.Workflow.next(@state_name, current_state, nil)
{:next_state, next_state, data, actions}
end
def handle_event(:internal, :load_release_state, :initialize, data) do
Logger.info("Loading release state")
%Workflow.Data{namespace_spec: namespace_spec, release_spec: release_spec} = data
%Release.State{} =
release_state =
Release.initialize_state(
release_spec,
data.release_state.host,
namespace_spec.release_cookie
)
Logger.info("Loaded release state", release_state: inspect(release_state))
data = %Workflow.Data{data | release_state: release_state}
if is_running?(release_state) do
%Release.State{version: version} = release_state
Logger.info("Release version #{version} running")
{state, actions} = Namespace.Workflow.next(@state_name, :running, version)
{:next_state, state, data, actions}
else
Logger.info("Release not running")
{state, actions} = Namespace.Workflow.next(@state_name, :not_running, :ignore)
{:next_state, state, data, actions}
end
end
def handle_event(
:internal,
{:load_release_state, version},
:initialize,
%Workflow.Data{deploy_attempts: deploy_attempts} = data
)
when deploy_attempts >= 5 do
Logger.error("Depoyment attempts exhausted, failed to deploy release version #{version}")
{state, actions} = Namespace.Workflow.next(@state_name, :not_running, :ignore)
data = %Workflow.Data{data | deploy_attempts: 0}
{:next_state, state, data, actions}
end
def handle_event(:internal, {:load_release_state, version}, :initialize, data) do
Logger.info("Loading release state, expected version #{version}")
%Workflow.Data{
namespace_spec: %Namespace.Spec{release_cookie: cookie},
release_spec: release_spec,
release_state: %Release.State{host: host_spec} = curr_release_state
} = data
# Flush the current release state
Release.terminate_state(release_spec, curr_release_state)
# Build a new release state view
%Release.State{version: current_version} =
release_state = Release.initialize_state(release_spec, host_spec, cookie)
{namespace_status, new_deploy_attempts} =
if is_current_version?(release_state, version) do
Logger.info("Release state loaded, current version #{version}")
{:running, 0}
else
Logger.warn("Release state loaded, expected version #{version} found #{current_version}")
{:partially_running, data.deploy_attempts}
end
data = %Workflow.Data{
data
| release_state: release_state,
deploy_attempts: new_deploy_attempts
}
{state, actions} = Namespace.Workflow.next(@state_name, namespace_status, version)
{:next_state, state, data, actions}
end
def handle_event({_call, from}, _event, _state, _data),
do: {:keep_state_and_data, [{:reply, from, :busy}]}
def handle_event(_any, _event, _state, _data), do: {:keep_state_and_data, []}
defp is_current_version?(%Release.State{version: version}, new_version) do
version == new_version
end
defp is_running?(%Release.State{status: status}) do
status == :running
end
end
|
lib/control_node/namespace/initialize.ex
| 0.694613 | 0.569075 |
initialize.ex
|
starcoder
|
defmodule AWS.KinesisAnalyticsV2 do
@moduledoc """
Amazon Kinesis Data Analytics is a fully managed service that you can use to
process and analyze streaming data using Java, SQL, or Scala.
The service enables you to quickly author and run Java, SQL, or Scala code
against streaming sources to perform time series analytics, feed real-time
dashboards, and create real-time metrics.
"""
@doc """
Adds an Amazon CloudWatch log stream to monitor application configuration
errors.
"""
def add_application_cloud_watch_logging_option(client, input, options \\ []) do
request(client, "AddApplicationCloudWatchLoggingOption", input, options)
end
@doc """
Adds a streaming source to your SQL-based Kinesis Data Analytics application.
You can add a streaming source when you create an application, or you can use
this operation to add a streaming source after you create an application. For
more information, see `CreateApplication`.
Any configuration update, including adding a streaming source using this
operation, results in a new version of the application. You can use the
`DescribeApplication` operation to find the current application version.
"""
def add_application_input(client, input, options \\ []) do
request(client, "AddApplicationInput", input, options)
end
@doc """
Adds an `InputProcessingConfiguration` to a SQL-based Kinesis Data Analytics
application.
An input processor pre-processes records on the input stream before the
application's SQL code executes. Currently, the only input processor available
is [AWS Lambda](https://docs.aws.amazon.com/lambda/).
"""
def add_application_input_processing_configuration(client, input, options \\ []) do
request(client, "AddApplicationInputProcessingConfiguration", input, options)
end
@doc """
Adds an external destination to your SQL-based Kinesis Data Analytics
application.
If you want Kinesis Data Analytics to deliver data from an in-application stream
within your application to an external destination (such as an Kinesis data
stream, a Kinesis Data Firehose delivery stream, or an AWS Lambda function), you
add the relevant configuration to your application using this operation. You can
configure one or more outputs for your application. Each output configuration
maps an in-application stream and an external destination.
You can use one of the output configurations to deliver data from your
in-application error stream to an external destination so that you can analyze
the errors.
Any configuration update, including adding a streaming source using this
operation, results in a new version of the application. You can use the
`DescribeApplication` operation to find the current application version.
"""
def add_application_output(client, input, options \\ []) do
request(client, "AddApplicationOutput", input, options)
end
@doc """
Adds a reference data source to an existing SQL-based Kinesis Data Analytics
application.
Kinesis Data Analytics reads reference data (that is, an Amazon S3 object) and
creates an in-application table within your application. In the request, you
provide the source (S3 bucket name and object key name), name of the
in-application table to create, and the necessary mapping information that
describes how data in an Amazon S3 object maps to columns in the resulting
in-application table.
"""
def add_application_reference_data_source(client, input, options \\ []) do
request(client, "AddApplicationReferenceDataSource", input, options)
end
@doc """
Adds a Virtual Private Cloud (VPC) configuration to the application.
Applications can use VPCs to store and access resources securely.
Note the following about VPC configurations for Kinesis Data Analytics
applications:
* VPC configurations are not supported for SQL applications.
* When a VPC is added to a Kinesis Data Analytics application, the
application can no longer be accessed from the Internet directly. To enable
Internet access to the application, add an Internet gateway to your VPC.
"""
def add_application_vpc_configuration(client, input, options \\ []) do
request(client, "AddApplicationVpcConfiguration", input, options)
end
@doc """
Creates a Kinesis Data Analytics application.
For information about creating a Kinesis Data Analytics application, see
[Creating an Application](https://docs.aws.amazon.com/kinesisanalytics/latest/java/getting-started.html).
"""
def create_application(client, input, options \\ []) do
request(client, "CreateApplication", input, options)
end
@doc """
Creates a snapshot of the application's state data.
"""
def create_application_snapshot(client, input, options \\ []) do
request(client, "CreateApplicationSnapshot", input, options)
end
@doc """
Deletes the specified application.
Kinesis Data Analytics halts application execution and deletes the application.
"""
def delete_application(client, input, options \\ []) do
request(client, "DeleteApplication", input, options)
end
@doc """
Deletes an Amazon CloudWatch log stream from an Kinesis Data Analytics
application.
"""
def delete_application_cloud_watch_logging_option(client, input, options \\ []) do
request(client, "DeleteApplicationCloudWatchLoggingOption", input, options)
end
@doc """
Deletes an `InputProcessingConfiguration` from an input.
"""
def delete_application_input_processing_configuration(client, input, options \\ []) do
request(client, "DeleteApplicationInputProcessingConfiguration", input, options)
end
@doc """
Deletes the output destination configuration from your SQL-based Kinesis Data
Analytics application's configuration.
Kinesis Data Analytics will no longer write data from the corresponding
in-application stream to the external output destination.
"""
def delete_application_output(client, input, options \\ []) do
request(client, "DeleteApplicationOutput", input, options)
end
@doc """
Deletes a reference data source configuration from the specified SQL-based
Kinesis Data Analytics application's configuration.
If the application is running, Kinesis Data Analytics immediately removes the
in-application table that you created using the
`AddApplicationReferenceDataSource` operation.
"""
def delete_application_reference_data_source(client, input, options \\ []) do
request(client, "DeleteApplicationReferenceDataSource", input, options)
end
@doc """
Deletes a snapshot of application state.
"""
def delete_application_snapshot(client, input, options \\ []) do
request(client, "DeleteApplicationSnapshot", input, options)
end
@doc """
Removes a VPC configuration from a Kinesis Data Analytics application.
"""
def delete_application_vpc_configuration(client, input, options \\ []) do
request(client, "DeleteApplicationVpcConfiguration", input, options)
end
@doc """
Returns information about a specific Kinesis Data Analytics application.
If you want to retrieve a list of all applications in your account, use the
`ListApplications` operation.
"""
def describe_application(client, input, options \\ []) do
request(client, "DescribeApplication", input, options)
end
@doc """
Returns information about a snapshot of application state data.
"""
def describe_application_snapshot(client, input, options \\ []) do
request(client, "DescribeApplicationSnapshot", input, options)
end
@doc """
Infers a schema for a SQL-based Kinesis Data Analytics application by evaluating
sample records on the specified streaming source (Kinesis data stream or Kinesis
Data Firehose delivery stream) or Amazon S3 object.
In the response, the operation returns the inferred schema and also the sample
records that the operation used to infer the schema.
You can use the inferred schema when configuring a streaming source for your
application. When you create an application using the Kinesis Data Analytics
console, the console uses this operation to infer a schema and show it in the
console user interface.
"""
def discover_input_schema(client, input, options \\ []) do
request(client, "DiscoverInputSchema", input, options)
end
@doc """
Lists information about the current application snapshots.
"""
def list_application_snapshots(client, input, options \\ []) do
request(client, "ListApplicationSnapshots", input, options)
end
@doc """
Returns a list of Kinesis Data Analytics applications in your account.
For each application, the response includes the application name, Amazon
Resource Name (ARN), and status.
If you want detailed information about a specific application, use
`DescribeApplication`.
"""
def list_applications(client, input, options \\ []) do
request(client, "ListApplications", input, options)
end
@doc """
Retrieves the list of key-value tags assigned to the application.
For more information, see [Using Tagging](https://docs.aws.amazon.com/kinesisanalytics/latest/java/how-tagging.html).
"""
def list_tags_for_resource(client, input, options \\ []) do
request(client, "ListTagsForResource", input, options)
end
@doc """
Starts the specified Kinesis Data Analytics application.
After creating an application, you must exclusively call this operation to start
your application.
"""
def start_application(client, input, options \\ []) do
request(client, "StartApplication", input, options)
end
@doc """
Stops the application from processing data.
You can stop an application only if it is in the running state. You can use the
`DescribeApplication` operation to find the application state.
"""
def stop_application(client, input, options \\ []) do
request(client, "StopApplication", input, options)
end
@doc """
Adds one or more key-value tags to a Kinesis Data Analytics application.
Note that the maximum number of application tags includes system tags. The
maximum number of user-defined application tags is 50. For more information, see
[Using Tagging](https://docs.aws.amazon.com/kinesisanalytics/latest/java/how-tagging.html).
"""
def tag_resource(client, input, options \\ []) do
request(client, "TagResource", input, options)
end
@doc """
Removes one or more tags from a Kinesis Data Analytics application.
For more information, see [Using Tagging](https://docs.aws.amazon.com/kinesisanalytics/latest/java/how-tagging.html).
"""
def untag_resource(client, input, options \\ []) do
request(client, "UntagResource", input, options)
end
@doc """
Updates an existing Kinesis Data Analytics application.
Using this operation, you can update application code, input configuration, and
output configuration.
Kinesis Data Analytics updates the `ApplicationVersionId` each time you update
your application.
You cannot update the `RuntimeEnvironment` of an existing application. If you
need to update an application's `RuntimeEnvironment`, you must delete the
application and create it again.
"""
def update_application(client, input, options \\ []) do
request(client, "UpdateApplication", input, options)
end
@spec request(AWS.Client.t(), binary(), map(), list()) ::
{:ok, map() | nil, map()}
| {:error, term()}
defp request(client, action, input, options) do
client = %{client | service: "kinesisanalytics"}
host = build_host("kinesisanalytics", client)
url = build_url(host, client)
headers = [
{"Host", host},
{"Content-Type", "application/x-amz-json-1.1"},
{"X-Amz-Target", "KinesisAnalytics_20180523.#{action}"}
]
payload = encode!(client, input)
headers = AWS.Request.sign_v4(client, "POST", url, headers, payload)
post(client, url, payload, headers, options)
end
defp post(client, url, payload, headers, options) do
case AWS.Client.request(client, :post, url, payload, headers, options) do
{:ok, %{status_code: 200, body: body} = response} ->
body = if body != "", do: decode!(client, body)
{:ok, body, response}
{:ok, response} ->
{:error, {:unexpected_response, response}}
error = {:error, _reason} -> error
end
end
defp build_host(_endpoint_prefix, %{region: "local", endpoint: endpoint}) do
endpoint
end
defp build_host(_endpoint_prefix, %{region: "local"}) do
"localhost"
end
defp build_host(endpoint_prefix, %{region: region, endpoint: endpoint}) do
"#{endpoint_prefix}.#{region}.#{endpoint}"
end
defp build_url(host, %{:proto => proto, :port => port}) do
"#{proto}://#{host}:#{port}/"
end
defp encode!(client, payload) do
AWS.Client.encode!(client, payload, :json)
end
defp decode!(client, payload) do
AWS.Client.decode!(client, payload, :json)
end
end
|
lib/aws/generated/kinesis_analytics_v2.ex
| 0.91006 | 0.694575 |
kinesis_analytics_v2.ex
|
starcoder
|
defmodule TzWorld do
@moduledoc """
Resolve a timezone name from coordinates.
"""
alias Geo.{Point, PointZ}
import TzWorld.Guards
@type backend :: module()
@reload_backends [
TzWorld.Backend.Memory,
TzWorld.Backend.Dets,
TzWorld.Backend.DetsWithIndexCache,
TzWorld.Backend.Ets,
TzWorld.Backend.EtsWithIndexCache,
]
@doc """
Returns the installed version of time
zone data
## Example
iex> TzWorld.version
{:ok, "2020a"}
"""
@spec version :: {:ok, String.t()} | {:error, :enoent}
def version do
fetch_backend().version()
end
@doc """
Reload the timezone geometry data.
This allows for the data to be reloaded,
typically with a new release, without
restarting the application.
"""
def reload_timezone_data do
Enum.map(@reload_backends, fn backend -> apply(backend, :reload_timezone_data, []) end)
end
@doc """
Returns the *first* timezone name found for the given
coordinates specified as either a `Geo.Point`,
a `Geo.PointZ` or a tuple `{lng, lat}`
## Arguments
* `point` is a `Geo.Point.t()` a `Geo.PointZ.t()` or
a tuple `{lng, lat}`
* `backend` is any backend access module.
## Returns
* `{:ok, timezone}` or
* `{:error, :time_zone_not_found}`
## Notes
Note that the point is always expressed as
`lng` followed by `lat`.
## Examples
iex> TzWorld.timezone_at(%Geo.Point{coordinates: {3.2, 45.32}})
{:ok, "Europe/Paris"}
iex> TzWorld.timezone_at({3.2, 45.32})
{:ok, "Europe/Paris"}
iex> TzWorld.timezone_at({0.0, 0.0})
{:error, :time_zone_not_found}
The algorithm starts by filtering out timezones whose bounding
box does not contain the given point.
Once filtered, the *first* timezone which contains the given
point is returned, or an error tuple if none of the
timezones match.
In rare cases, typically due to territorial disputes,
one or more timezones may apply to a given location.
This function returns the first time zone that matches.
"""
@spec timezone_at(Geo.Point.t(), backend) ::
{:ok, String.t()} | {:error, atom}
def timezone_at(point, backend \\ fetch_backend())
def timezone_at(%Point{} = point, backend) when is_atom(backend) do
backend.timezone_at(point)
end
@spec timezone_at(Geo.PointZ.t(), backend) ::
{:ok, String.t()} | {:error, atom}
def timezone_at(%PointZ{coordinates: {lng, lat, _alt}}, backend) when is_atom(backend) do
point = %Point{coordinates: {lng, lat}}
backend.timezone_at(point)
end
@spec timezone_at({lng :: number, lat :: number}, backend) ::
{:ok, String.t()} | {:error, atom}
def timezone_at({lng, lat}, backend) when is_lng(lng) and is_lat(lat) do
point = %Geo.Point{coordinates: {lng, lat}}
backend.timezone_at(point)
end
@doc """
Returns all timezone name found for the given
coordinates specified as either a `Geo.Point`,
a `Geo.PointZ` or a tuple `{lng, lat}`
## Arguments
* `point` is a `Geo.Point.t()` a `Geo.PointZ.t()` or
a tuple `{lng, lat}`
* `backend` is any backend access module.
## Returns
* `{:ok, timezone}` or
* `{:error, :time_zone_not_found}`
## Notes
Note that the point is always expressed as
`lng` followed by `lat`.
## Examples
iex> TzWorld.all_timezones_at(%Geo.Point{coordinates: {3.2, 45.32}})
{:ok, ["Europe/Paris"]}
iex> TzWorld.all_timezones_at({3.2, 45.32})
{:ok, ["Europe/Paris"]}
iex> TzWorld.all_timezones_at({0.0, 0.0})
{:ok, []}
The algorithm starts by filtering out timezones whose bounding
box does not contain the given point.
Once filtered, all timezones which contains the given
point is returned, or an error tuple if none of the
timezones match.
In rare cases, typically due to territorial disputes,
one or more timezones may apply to a given location.
This function returns all time zones that match.
"""
@spec all_timezones_at(Geo.Point.t(), backend) ::
{:ok, [String.t()]}
def all_timezones_at(point, backend \\ fetch_backend())
def all_timezones_at(%Point{} = point, backend) when is_atom(backend) do
backend.all_timezones_at(point)
end
@spec all_timezones_at(Geo.PointZ.t(), backend) ::
{:ok, [String.t()]}
def all_timezones_at(%PointZ{coordinates: {lng, lat, _alt}}, backend) when is_atom(backend) do
point = %Point{coordinates: {lng, lat}}
backend.all_timezones_at(point)
end
@spec all_timezones_at({lng :: number, lat :: number}, backend) ::
{:ok, [String.t()]}
def all_timezones_at({lng, lat}, backend) when is_lng(lng) and is_lat(lat) do
point = %Geo.Point{coordinates: {lng, lat}}
backend.all_timezones_at(point)
end
@doc false
def contains?(%Geo.MultiPolygon{} = multi_polygon, %Geo.Point{} = point) do
multi_polygon.coordinates
|> Enum.any?(fn polygon -> contains?(%Geo.Polygon{coordinates: polygon}, point) end)
end
def contains?(%Geo.Polygon{coordinates: [envelope | holes]}, %Geo.Point{coordinates: point}) do
interior?(envelope, point) && disjoint?(holes, point)
end
def contains?(bounding_boxes, point) when is_list(bounding_boxes) do
Enum.any?(bounding_boxes, &contains?(&1, point))
end
defp interior?(ring, {px, py}) do
ring = for {x, y} <- ring, do: {x - px, y - py}
crosses = count_crossing(ring)
rem(crosses, 2) == 1
end
defp disjoint?(rings, point) do
Enum.all?(rings, fn ring -> !interior?(ring, point) end)
end
defp count_crossing([_]), do: 0
defp count_crossing([{ax, ay}, {bx, by} | rest]) do
crosses = count_crossing([{bx, by} | rest])
if ay > 0 != by > 0 && (ax * by - bx * ay) / (by - ay) > 0 do
crosses + 1
else
crosses
end
end
@backend_precedence [
TzWorld.Backend.EtsWithIndexCache,
TzWorld.Backend.Memory,
TzWorld.Backend.DetsWithIndexCache,
TzWorld.Backend.Ets,
TzWorld.Backend.Dets
]
def fetch_backend do
Enum.find(@backend_precedence, &Process.whereis/1) ||
raise(RuntimeError,
"No TzWorld backend appears to be running. " <>
"please add one of #{inspect @backend_precedence} to your supervision tree"
)
end
end
|
lib/tz_world.ex
| 0.92607 | 0.640833 |
tz_world.ex
|
starcoder
|
defmodule PublicSuffix do
import PublicSuffix.{RemoteFileFetcher, RulesParser}
@moduledoc """
Implements the publicsuffix algorithm described at https://publicsuffix.org/list/.
Comments throughout this module are direct quotes from https://publicsuffix.org/list/,
showing how individual lines of code relate to the specification.
"""
@type options :: [ignore_private: boolean]
@doc """
Extracts the public suffix from the provided domain based on the publicsuffix.org rules.
## Examples
iex> public_suffix("foo.bar.com")
"com"
You can use the `ignore_private` keyword to exclude private (non-ICANN) domains.
iex> public_suffix("foo.github.io", ignore_private: false)
"github.io"
iex> public_suffix("foo.github.io", ignore_private: true)
"io"
iex> public_suffix("foo.github.io")
"github.io"
"""
@spec public_suffix(String.t, options) :: nil | String.t
def public_suffix(domain, options \\ []) when is_binary(domain) do
parse_domain(domain, options, 0)
end
@doc """
Extracts the _registrable_ part of the provided domain. The registrable
part is the public suffix plus one additional domain part. For example,
given a public suffix of `co.uk`, `example.co.uk` would be the registrable
domain part. If the domain does not contain a registrable part (for example,
if the domain is itself a public suffix), this function will return `nil`.
## Examples
iex> registrable_domain("foo.bar.com")
"bar.com"
iex> registrable_domain("com")
nil
You can use the `ignore_private` keyword to exclude private (non-ICANN) domains.
iex> registrable_domain("foo.github.io", ignore_private: false)
"foo.github.io"
iex> registrable_domain("foo.github.io", ignore_private: true)
"github.io"
iex> registrable_domain("foo.github.io")
"foo.github.io"
"""
@spec registrable_domain(String.t, options) :: nil | String.t
def registrable_domain(domain, options \\ []) when is_binary(domain) do
# "The registered or registrable domain is the public suffix plus one additional label."
parse_domain(domain, options, 1)
end
@doc """
Parses the provided domain and returns the prevailing rule based on the
publicsuffix.org rules. If no rules match, the prevailing rule is "*",
unless the provided domain has a leading dot, in which case the input is
invalid and the function returns `nil`.
## Examples
iex> prevailing_rule("foo.bar.com")
"com"
iex> prevailing_rule("co.uk")
"co.uk"
iex> prevailing_rule("foo.ck")
"*.ck"
iex> prevailing_rule("foobar.example")
"*"
You can use the `ignore_private` keyword to exclude private (non-ICANN) domains.
iex> prevailing_rule("foo.github.io", ignore_private: false)
"github.io"
iex> prevailing_rule("foo.github.io", ignore_private: true)
"io"
iex> prevailing_rule("foo.github.io")
"github.io"
"""
@spec prevailing_rule(String.t, options) :: nil | String.t
def prevailing_rule(domain, options \\ [])
def prevailing_rule("." <> _domain, _), do: nil
def prevailing_rule(domain, options) when is_binary(domain) do
domain
|> String.downcase
|> String.split(".")
|> find_prevailing_rule(options)
|> case do
{:exception, rule} -> "!" <> Enum.join(rule, ".")
{:normal, rule} -> Enum.join(rule, ".")
end
end
@doc """
Checks whether the provided domain matches an explicit rule in the
publicsuffix.org rules.
## Examples
iex> matches_explicit_rule?("foo.bar.com")
true
iex> matches_explicit_rule?("com")
true
iex> matches_explicit_rule?("foobar.example")
false
You can use the `ignore_private` keyword to exclude private (non-ICANN) domains.
"""
@spec matches_explicit_rule?(String.t | nil, options) :: boolean
def matches_explicit_rule?(domain, options \\ [])
def matches_explicit_rule?(nil, _options), do: false
def matches_explicit_rule?(domain, options) when is_binary(domain) do
!(prevailing_rule(domain, options) in [nil, "*"])
end
# Inputs with a leading dot should be treated as a special case.
# see https://github.com/publicsuffix/list/issues/208
defp parse_domain("." <> _domain, _, _), do: nil
defp parse_domain(domain, options, extra_label_parts) do
domain
# "The domain...must be canonicalized in the normal way for hostnames - lower-case"
|> String.downcase
# "A domain or rule can be split into a list of labels using the separator "." (dot)."
|> String.split(".")
|> extract_labels_using_rules(extra_label_parts, options)
|> case do
nil -> nil
labels -> Enum.join(labels, ".")
end
end
defp extract_labels_using_rules(labels, extra_label_parts, options) do
num_labels =
labels
|> find_prevailing_rule(options)
|> case do
# "If the prevailing rule is a exception rule, modify it by removing the leftmost label."
{:exception, labels} -> tl(labels)
{:normal, labels} -> labels
end
|> length
|> Kernel.+(extra_label_parts)
if length(labels) >= num_labels do
take_last_n(labels, num_labels)
else
nil
end
end
defp find_prevailing_rule(labels, options) do
allowed_rule_types = allowed_rule_types_for(options)
# "If more than one rule matches, the prevailing rule is the one which is an exception rule."
find_prevailing_exception_rule(labels, allowed_rule_types) ||
find_prevailing_normal_rule(labels, allowed_rule_types) ||
# "If no rules match, the prevailing rule is "*"."
{:normal, ["*"]}
end
data_file = Path.expand("../data/public_suffix_list.dat", __DIR__)
@external_resource data_file
raw_data = if Application.get_env(:public_suffix, :download_data_on_compile, false) do
case fetch_remote_file("https://publicsuffix.org/list/public_suffix_list.dat") do
{:ok, data} ->
IO.puts "PublicSuffix: fetched fresh data file for compilation."
data
{:error, error} ->
raise """
PublicSuffix: failed to fetch fresh data file for compilation:
#{inspect error}
Try again or change `download_data_on_compile` config to `false` to use the cached copy of the rules file.
"""
end
else
File.read!(data_file)
end
rule_maps = parse_rules(raw_data)
@exception_rules rule_maps.exception_rules
defp find_prevailing_exception_rule([], _allowed_rule_types), do: nil
defp find_prevailing_exception_rule([_ | suffix] = domain_labels, allowed_rule_types) do
if @exception_rules[domain_labels] in allowed_rule_types do
{:exception, domain_labels}
else
find_prevailing_exception_rule(suffix, allowed_rule_types)
end
end
@exact_match_rules rule_maps.exact_match_rules
@wild_card_rules rule_maps.wild_card_rules
defp find_prevailing_normal_rule([], _allowed_rule_types), do: nil
defp find_prevailing_normal_rule([_ | suffix] = domain_labels, allowed_rule_types) do
cond do
@exact_match_rules[domain_labels] in allowed_rule_types -> {:normal, domain_labels}
# TODO: "Wildcards are not restricted to appear only in the leftmost position"
@wild_card_rules[["*" | suffix]] in allowed_rule_types -> {:normal, ["*" | suffix]}
true -> find_prevailing_normal_rule(suffix, allowed_rule_types)
end
end
defp allowed_rule_types_for(options) do
if Keyword.get(options, :ignore_private, false) do
[:icann]
else
[:icann, :private]
end
end
defp take_last_n(list, n) do
list
|> Enum.reverse
|> Enum.take(n)
|> Enum.reverse
end
end
|
lib/public_suffix.ex
| 0.866571 | 0.429429 |
public_suffix.ex
|
starcoder
|
defmodule EllipticCurve.Utils.Der do
@moduledoc false
use Bitwise
@hexAt "\x00"
@hexB "\x02"
@hexC "\x03"
@hexD "\x04"
@hexF "\x06"
@hex0 "\x30"
@hex31 0x1F
@hex127 0x7F
@hex129 0xA0
@hex160 0x80
@hex224 0xE0
alias EllipticCurve.Utils.{BinaryAscii, Base64}
def encodeSequence(encodedPieces) do
Enum.sum(for piece <- encodedPieces, do: byte_size(piece))
|> (fn totalLength -> <<@hex0>> <> encodeLength(totalLength) <> Enum.join(encodedPieces) end).()
end
def encodeInteger(x) when x >= 0 do
bin =
x
|> Integer.to_string(16)
|> complementIntegerString()
|> BinaryAscii.binaryFromHex()
if getFirstByte(bin) <= @hex127 do
@hexB <> <<byte_size(bin)>> <> bin
else
@hexB <> <<byte_size(bin) + 1>> <> @hexAt <> bin
end
end
defp complementIntegerString(x) when rem(byte_size(x), 2) == 1 do
"0" <> x
end
defp complementIntegerString(x) do
x
end
def encodeOid([first | [second | pieces]]) when first <= 2 and second <= 39 do
([<<40 * first + second>>] ++ for(piece <- pieces, do: encodeNumber(piece)))
|> Enum.join()
|> (fn body -> @hexF <> encodeLength(byte_size(body)) <> body end).()
end
def encodeBitString(t) do
@hexC <> encodeLength(byte_size(t)) <> t
end
def encodeOctetString(t) do
@hexD <> encodeLength(byte_size(t)) <> t
end
def encodeConstructed(tag, value) do
<<@hex129 + tag>> <> encodeLength(byte_size(value)) <> value
end
def removeSequence(string) do
trimmedString = checkSequenceError(string, @hex0)
splitOnLength(trimmedString)
end
def removeInteger(string) do
trimmedString = checkSequenceError(string, @hexB)
{numberBytes, rest} = splitOnLength(trimmedString)
if getFirstByte(numberBytes) >= @hex160 do
throw("nBytes #{getFirstByte(numberBytes)} >= #{@hex160}")
end
{parsed, ""} =
Integer.parse(
BinaryAscii.hexFromBinary(numberBytes),
16
)
{
parsed,
rest
}
end
def removeObject(string) do
trimmedString = checkSequenceError(string, @hexF)
{body, rest} = splitOnLength(trimmedString)
[n0 | numbers] = removeObjectRecursion(body)
first = div(n0, 40)
second = n0 - 40 * first
{[first, second] ++ numbers, rest}
end
defp removeObjectRecursion(body) when byte_size(body) == 0 do
[]
end
defp removeObjectRecursion(body) do
{n, lengthLength} = readNumber(body)
numbers =
binary_part(body, lengthLength, byte_size(body) - lengthLength)
|> removeObjectRecursion()
[n | numbers]
end
def removeBitString(string) do
trimmedString = checkSequenceError(string, @hexC)
splitOnLength(trimmedString)
end
def removeOctetString(string) do
trimmedString = checkSequenceError(string, @hexD)
splitOnLength(trimmedString)
end
def removeConstructed(<<s0>> <> trimmedString) do
if (s0 &&& @hex224) != @hex129 do
throw("wanted constructed tag (0xa0-0xbf), got #{Integer.to_string(s0, 16)}")
end
{body, rest} = splitOnLength(trimmedString)
{
s0 &&& @hex31,
body,
rest
}
end
def fromPem(pem) do
pem
|> :binary.split(["\r", "\n", "\r\n"], [:global])
|> filterPemLine()
|> Enum.join()
|> Base64.decode()
end
defp filterPemLine([line | rest]) do
lines = filterPemLine(rest)
cleanLine = line |> String.trim()
if byte_size(cleanLine) == 0 or String.starts_with?(cleanLine, "-----") do
lines
else
[cleanLine | lines]
end
end
defp filterPemLine([]) do
[]
end
def toPem(der, name) do
b64 =
der
|> Base64.encode()
|> makeLines()
(["-----BEGIN #{name}-----\n"] ++ b64 ++ ["-----END #{name}-----\n"])
|> Enum.join()
end
defp makeLines(content) when byte_size(content) > 64 do
[
binary_part(content, 0, 64) <> "\n"
| makeLines(binary_part(content, 64, byte_size(content) - 64))
]
end
defp makeLines(content) do
[content <> "\n"]
end
def encodeLength(lengthValue) when lengthValue > 0 and lengthValue < @hex160 do
<<lengthValue>>
end
def encodeLength(lengthValue) when lengthValue > 0 do
lengthValue
|> Integer.to_string(16)
|> checkOddity()
|> BinaryAscii.binaryFromHex()
|> (fn s -> <<@hex160 ||| byte_size(s)>> <> s end).()
end
defp checkOddity(s) when rem(byte_size(s), 2) == 1 do
"0" <> s
end
defp checkOddity(s) do
s
end
def encodeNumber(n) do
encodeNumberRecursive(n)
|> finishEncoding()
end
defp encodeNumberRecursive(n) when n > 0 do
encodeNumberRecursive(n >>> 7) <> <<(n &&& @hex127) ||| @hex160>>
end
defp encodeNumberRecursive(_n) do
<<>>
end
defp finishEncoding(<<>>) do
<<0>>
end
defp finishEncoding(<<first>> <> rest) when byte_size(rest) > 0 do
<<first>> <> finishEncoding(rest)
end
defp finishEncoding(<<lastDigit>>) do
<<lastDigit &&& @hex127>>
end
defp readNumber(string, number \\ 0, lengthLength \\ 0) do
if lengthLength > byte_size(string) do
throw("ran out of length bytes")
end
if lengthLength > 0 and
(getFirstByte(binary_part(string, lengthLength - 1, 1)) &&& @hex160) == 0 do
{number, lengthLength}
else
readNumber(
string,
(number <<< 7) + (getFirstByte(binary_part(string, lengthLength, 1)) &&& @hex127),
lengthLength + 1
)
end
end
defp splitOnLength(string) do
{bodyLength, lengthLength} = readLength(string)
{
binary_part(string, lengthLength, bodyLength),
binary_part(
string,
bodyLength + lengthLength,
byte_size(string) - lengthLength - bodyLength
)
}
end
defp readLength(string) do
num = getFirstByte(string)
if (num &&& @hex160) == 0 do
{num &&& @hex127, 1}
else
lengthLength = num &&& @hex127
if lengthLength > byte_size(string) - 1 do
throw("ran out of length bytes")
end
{parsed, ""} =
Integer.parse(
BinaryAscii.hexFromBinary(binary_part(string, 1, lengthLength)),
16
)
{
parsed,
1 + lengthLength
}
end
end
defp checkSequenceError(<<first>> <> rest, start) do
if <<first>> != start do
throw("wanted sequence #{Base.encode16(start)}, got #{Base.encode16(<<first>>)}")
end
rest
end
defp getFirstByte(<<first>> <> _rest) do
first
end
end
|
lib/utils/der.ex
| 0.629775 | 0.436922 |
der.ex
|
starcoder
|
defmodule Remedy.ISO8601 do
@moduledoc """
`Ecto.Type` compatible limited ISO8601 type.
ISO8601 is an international standard covering the worldwide exchange and communication of date and time related data. It is maintained by the Geneva-based International Organization for Standardization (ISO) and was first published in 1988, with updates in 1991, 2000, 2004, and 2019. The standard aims to provide a well-defined, unambiguous method of representing calendar dates and times in worldwide communications, especially to avoid misinterpreting numeric dates and times when such data is transferred between countries with different conventions for writing numeric dates and times.
This implementation is limited in that it does not handle ranges of dates, and it does not handle time zones. It provides conveniences for conversion of Snowflakes, Unix Timestamps and DateTime structs. A field that accepts an ISO8601 date can therefore accept any of the mentioned formats. The data will be parsed and cast automatically.
## Casting
The following are examples of valid inputs for casting. Regardless of the format provided, values will be cast to a `t:binary/0` value for storage.
#### ISO8601 Timestamp
"2015-01-01T01:02:52.735Z"
#### Unix Timestamp
1420070400
#### DateTime Struct
%DateTime{year: 2015, month: 1, day: 1, hour: 1, minute: 2,
second: 52, millisecond: 735, microsecond: 0, nanosecond: 0, timezone: nil}
#### Discord Snowflake
"12345678901234567"
"""
import Remedy.TimeHelpers
use Ecto.Type
@typedoc "An ISO8601 Type."
@type t() :: String.t()
@typedoc "Castable to ISO8601 Type."
@type c() :: DateTime.t() | String.t() | ISO8601.t() | nil
@doc false
@impl true
@spec type :: :string
def type, do: :string
@doc false
@impl true
@spec cast(any) :: :error | {:ok, nil | binary}
def cast(value)
def cast(nil), do: {:ok, nil}
def cast(value) do
case to_iso8601(value) do
:error -> :error
value -> {:ok, value}
end
end
@doc false
@impl true
@spec dump(any) :: :error | {:ok, nil | binary}
def dump(nil), do: {:ok, nil}
def dump(value), do: {:ok, value}
@doc false
@impl true
@spec load(any) :: {:ok, t() | nil}
def load(value), do: {:ok, to_iso8601(value)}
@doc false
@impl true
def equal?(term1, term2), do: to_iso8601(term1) == to_iso8601(term2)
@doc false
@impl true
def embed_as(_value), do: :dump
end
|
lib/remedy/types/iso8601.ex
| 0.813016 | 0.712107 |
iso8601.ex
|
starcoder
|
defmodule MyList do
@moduledoc"""
Module that emulates functionality in Elixir's Enum module.
Created for Learn Elixir screencast series episode 06 - Recursion
"""
@doc"""
Calculates the length of a given `list`.
Returns the length of the `list` as an integer.
## Examples:
iex> MyList.length([])
0
iex> MyList.length([1, 2, 3])
3
"""
def length(list), do: do_length(list, 0)
@doc"""
Iterates over the contents of a given `list` and performs a
function on each element of the `list`.
Returns `:ok`.
## Examples:
iex> MyList.each([], fn(x) -> x end)
:ok
iex> MyList.each([1, 2, 3], fn(num) ->
...> IO.puts to_string(num)
...> end)
#=> "1"
#=> "2"
#=> "3"
:ok
"""
def each(list, fun), do: do_each(list, fun)
@doc"""
Iterates over the contents of a given `list`, performs a
function on each element of the `list`, and returns a new
list with the results.
Returns a list of results based on calling function `fun`
on each member of the given `list`.
## Examples:
iex> MyList.map([], fn(x) -> x end)
[]
iex> MyList.map([1, 2, 3], fn(n) -> n * 2 end)
[2, 4, 6]
"""
def map(list, fun), do: do_map(list, fun, [])
@doc"""
Sums the elements of a given `list`.
Returns the sum total of the `list` elements as an integer.
## Examples:
iex> MyList.sum([])
0
iex> MyList.sum([1, 2, 3])
6
"""
def sum(list), do: do_sum(list, 0)
@doc"""
Iterates over the contents of a given `list`, performs a
function `fun` on each element of the `list`, and returns the
result of recursively processing the return value of each of the
function calls.
Returns a result based on calling function `fun` on each member
of the given `list` as an integer.
## Examples:
iex> MyList.reduce([], fn(n, acc) -> acc + n end)
nil
iex> MyList.reduce([2], fn(_x, acc) -> acc + 5 end)
2
iex> MyList.reduce([1, 2, 3, 4], fn(n, acc) -> acc + n end)
10
iex> MyList.reduce([1, 2, 3, 4], fn(n, acc) -> acc * n end)
24
"""
def reduce(list, fun), do: do_reduce(list, fun)
@doc"""
Reverses the contents of a given `list`.
Returns a list.
## Examples:
iex> MyList.reverse([])
[]
iex> MyList.reverse([1])
[1]
iex> MyList.reverse([1, 2, 3])
[3, 2, 1]
iex> MyList.reverse(["a", "b", "c"])
["c", "b", "a"]
"""
def reverse(list), do: do_reverse(list)
# private
defp do_length([], count), do: count
defp do_length([_head | tail], count), do: do_length(tail, count + 1)
defp do_each([], _fun), do: :ok
defp do_each([head | tail], fun) do
fun.(head)
do_each(tail, fun)
end
defp do_map([], _fun, acc), do: :lists.reverse(acc)
defp do_map([head | tail], fun, acc) do
result = fun.(head)
acc = [result | acc]
do_map(tail, fun, acc)
end
defp do_sum([], count), do: count
defp do_sum([head | tail], count) do
count = count + head
do_sum(tail, count)
end
defp do_reduce([], _fun), do: nil
defp do_reduce([head | nil], _fun), do: head
defp do_reduce([head | tail], fun), do: do_reduce(tail, head, fun)
defp do_reduce([], acc, _fun), do: acc
defp do_reduce([head | tail], acc, fun) do
acc = fun.(head, acc)
do_reduce(tail, acc, fun)
end
defp do_reverse([]), do: []
defp do_reverse([head | tail]), do: do_reverse(tail, [head])
defp do_reverse([], acc), do: acc
defp do_reverse([head | tail], acc) do
acc = [head | acc]
do_reverse(tail, acc)
end
end
|
episode06/my_list/lib/my_list.ex
| 0.841028 | 0.636297 |
my_list.ex
|
starcoder
|
defmodule WithRetry.BackOff do
@moduledoc ~S"""
Helper module with different back off strategies and functionality.
"""
@doc ~S"""
A constant stream of timeouts.
## Example
```
iex> constant(2_000) |> Enum.to_list()
[2_000, 2_000, ...]
```
"""
@spec constant(pos_integer) :: Enumerable.t()
def constant(timeout \\ 1_000), do: Stream.unfold(0, &{timeout, &1 + 1})
@doc ~S"""
A linearly increasing stream of timeouts.
## Example
```
iex> linear(1_000, 1_500) |> Enum.to_list()
[1_000, 2_500, 4_000, ...]
```
"""
@spec linear(pos_integer, pos_integer) :: Enumerable.t()
def linear(base \\ 1_000, addition \\ 1_000), do: Stream.unfold(base, &{&1, &1 + addition})
@doc ~S"""
A exponentially increasing stream of timeouts.
## Example
```
iex> exponential(1_000, 2) |> Enum.to_list()
[1_000, 2_000, 4_000, ...]
```
"""
@spec exponential(pos_integer, pos_integer | float) :: Enumerable.t()
def exponential(base \\ 1_000, factor \\ 2), do: Stream.unfold(base, &{round(&1), &1 * factor})
@doc ~S"""
Caps a stream of timeouts to the given value.
## Example
```
iex> exponential(1_000, 2) |> cap(3_500) |> Enum.to_list()
[1_000, 2_000, 3_500, ...]
```
"""
@spec cap(Enumerable.t(), pos_integer) :: Enumerable.t()
def cap(back_off, cap), do: Stream.map(back_off, &if(&1 < cap, do: &1, else: cap))
@doc ~S"""
Caps a stream to the given maximum number of tries.
(Including the first attempt.)
See: `max_retry/2` to cap to any number of retries.
## Example
```
iex> exponential(1_000, 2) |> max_try(3) |> Enum.to_list()
[1_000, 2_000]
```
"""
@spec max_try(Enumerable.t(), pos_integer) :: Enumerable.t()
def max_try(back_off \\ constant(), max), do: max_retry(back_off, max - 1)
@doc ~S"""
Caps a stream to the given maximum number of retries.
(Excluding the first attempt.)
See: `max_try/2` to cap to any number of tries.
## Example
```
iex> exponential(1_000, 2) |> max_try(3) |> Enum.to_list()
[1_000, 2_000, 4_000]
```
"""
@spec max_retry(Enumerable.t(), pos_integer) :: Enumerable.t()
def max_retry(back_off \\ constant(), max),
do: Stream.transform(back_off, 0, &if(&2 < max, do: {[&1], &2 + 1}, else: {:halt, &2}))
@doc ~S"""
Limits a stream of timeouts to a maximum duration.
This includes the time spend doing processing the actually `with`.
See: `limit_wait/2` to limit the time spend waiting, excluding execution time.
The prediction is a best effort limitation and a long execution time might
bring the total time spend on executing the `with_try` over the set limit.
## Example
```
iex> exponential(1_000, 2) |> limit(7_000) |> Enum.to_list()
[1_000, 2_000]
```
*Note:*
You would expect `[1_000, 2_000, 4_000]` (sum: `7_000`),
but assuming a non zero execution time the `3_000 + execution time + 4_000`
would bring the total over the set `7_000` limit.
"""
@spec limit(Enumerable.t(), pos_integer) :: Enumerable.t()
def limit(back_off \\ constant(), limit) do
Stream.transform(
back_off,
:os.system_time(:milli_seconds),
&if(:os.system_time(:milli_seconds) - &2 + &1 <= limit, do: {[&1], &2}, else: {:halt, &2})
)
end
@doc ~S"""
Limits a stream of timeouts to a maximum duration.
This excludes the time spend doing processing the actually `with`.
See: `limit/2` to limit the total time, including execution time.
## Example
```
iex> exponential(1_000, 2) |> limit_wait(7_000) |> Enum.to_list()
[1_000, 2_000, 4_000]
```
"""
@spec limit_wait(Enumerable.t(), pos_integer) :: Enumerable.t()
def limit_wait(back_off \\ constant(), limit) do
Stream.transform(
back_off,
0,
&if(&2 + &1 <= limit, do: {[&1], &2 + &1}, else: {:halt, &2})
)
end
end
|
lib/with_retry/back_off.ex
| 0.947745 | 0.919823 |
back_off.ex
|
starcoder
|
defmodule Exfpm.Encoders.Pairs do
use Bitwise
def decode(_, pairs \\ [])
def decode(<<>>, pairs) do
pairs
end
def decode(data, pairs) do
case pair_length!(data) do
{:ok, name_size, value_size, data1} ->
case String.length(data1) >= name_size + value_size do
true ->
{name, data2} = String.split_at(data1, name_size)
{value, data3} = String.split_at(data2, value_size)
decode(data3, pairs ++ [{name, value}])
false -> IO.puts "error"
end
end
end
def pair_length!(data) do
case packet_length(data) do
{:error, reason1} -> raise reason1
{:ok, name_size, data1} -> case packet_length(data1) do
{:error, reason2} -> raise reason2
{:ok, value_size, data2} -> {:ok, name_size, value_size, data2}
end
end
end
def packet_length(<<size, data::binary>>) when size < 128 do
{:ok, size, data}
end
def packet_length(<<size,a1,a2,a3, data::binary>>) do
{:ok, size &&& (0x7F <<< 24) ||| a1 <<< 16 ||| a2 <<< 8 ||| a3, data}
end
def packet_length(data) do
{:error, :"Invalid packet length", data}
end
def encode(term) when is_binary(term) and byte_size(term) > 128 do
<<String.length(term) >>> 24 ||| 0x80>> <>
<<String.length(term) >>> 16 &&& 0xFF>> <>
<<String.length(term) >>> 8 &&& 0xFF>> <>
<<String.length(term) &&& 0xFF>>
end
@doc ~S"""
iex> Exfpm.Encoders.Pairs.encode(:foo)
<<3>>
iex> Exfpm.Encoders.Pairs.encode(:foo)
<<3>>
"""
def encode(term) when is_binary(term) and byte_size(term) < 128 do
<<String.length(term)>>
end
@doc ~S"""
iex> Exfpm.Encoders.Pairs.encode(:foo)
<<3>>
iex> Exfpm.Encoders.Pairs.encode(:a_larger_atom)
<<13>>
"""
def encode(term) when is_atom(term), do: encode(Atom.to_string(term))
def encode(term) when is_number(term), do: encode(to_string(term))
@doc ~S"""
iex> Exfpm.Encoders.Pairs.encode({:foo, "sample"})
<<3, 6, 102, 111, 111, 115, 97, 109, 112, 108, 101>>
"""
def encode({name, value}) do
encode(name) <> encode(value) <> to_string(name) <> to_string(value)
end
@doc ~S"""
iex> Exfpm.Encoders.Pairs.encode([foo: "sample", bar: "eggs"])
[<<3, 6, 102, 111, 111, 115, 97, 109, 112, 108, 101>>,
<<3, 4, 98, 97, 114, 101, 103, 103, 115>>]
"""
def encode([{name, value}|t]) do
encode({name, value}) <> encode(t)
end
def encode(pairs) when is_map(pairs), do: encode(Map.to_list(pairs))
def encode([]) do
""
end
end
|
lib/encoders/pairs.ex
| 0.5083 | 0.648202 |
pairs.ex
|
starcoder
|
defmodule Chatter.Gossip do
require Record
require Chatter.BroadcastID
require Chatter.NetID
alias Chatter.BroadcastID
alias Chatter.NetID
Record.defrecord :gossip,
current_id: nil,
seen_ids: [],
other_ids: [],
distribution_list: [],
payload: nil
@type t :: record( :gossip,
current_id: BroadcastID.t,
seen_ids: list(BroadcastID.t),
other_ids: list(NetID.t),
distribution_list: list(NetID.t),
payload: tuple )
@spec new(NetID.t, tuple) :: t
def new(my_id, data)
when NetID.is_valid(my_id) and
is_tuple(data) and
tuple_size(data) > 1
do
gossip(current_id: BroadcastID.new(my_id)) |> gossip(payload: data)
end
@spec new(NetID.t, integer, tuple) :: t
def new(my_id, seqno, data)
when NetID.is_valid(my_id) and
is_integer(seqno) and
seqno >= 0 and
is_tuple(data) and
tuple_size(data) > 1
do
gossip(current_id: BroadcastID.new(my_id) |> BroadcastID.seqno(seqno))
|> gossip(payload: data)
end
defmacro is_valid(data) do
case Macro.Env.in_guard?(__CALLER__) do
true ->
quote do
is_tuple(unquote(data)) and tuple_size(unquote(data)) == 6 and
:erlang.element(1, unquote(data)) == :gossip and
# broadcast id
BroadcastID.is_valid(:erlang.element(2, unquote(data))) and
# seen ids
is_list(:erlang.element(3, unquote(data))) and
# other ids
is_list(:erlang.element(4, unquote(data))) and
# distribution list
is_list(:erlang.element(5, unquote(data))) and
# payload
is_tuple(:erlang.element(6, unquote(data))) and
tuple_size(:erlang.element(6, unquote(data))) > 1
end
false ->
quote bind_quoted: binding() do
is_tuple(data) and tuple_size(data) == 6 and
:erlang.element(1, data) == :gossip and
# broadcast id
BroadcastID.is_valid(:erlang.element(2, data)) and
# seen ids
is_list(:erlang.element(3, data)) and
# other ids
is_list(:erlang.element(4, data)) and
# distribution list
is_list(:erlang.element(5, data)) and
# payload
is_tuple(:erlang.element(6, data)) and
tuple_size(:erlang.element(6, data)) > 1
end
end
end
defmacro is_valid_relaxed(data) do
case Macro.Env.in_guard?(__CALLER__) do
true ->
quote do
is_tuple(unquote(data)) and tuple_size(unquote(data)) == 6 and
:erlang.element(1, unquote(data)) == :gossip and
# broadcast id
BroadcastID.is_valid(:erlang.element(2, unquote(data))) and
# seen ids
is_list(:erlang.element(3, unquote(data))) and
# other ids
is_list(:erlang.element(4, unquote(data))) and
# distribution list
is_list(:erlang.element(5, unquote(data)))
end
false ->
quote bind_quoted: binding() do
is_tuple(data) and tuple_size(data) == 6 and
:erlang.element(1, data) == :gossip and
# broadcast id
BroadcastID.is_valid(:erlang.element(2, data)) and
# seen ids
is_list(:erlang.element(3, data)) and
# other ids
is_list(:erlang.element(4, data)) and
# distribution list
is_list(:erlang.element(5, data))
end
end
end
@spec valid?(t) :: boolean
def valid?(data)
when is_valid(data)
do
true
end
def valid?(_), do: false
@spec current_id(t) :: BroadcastID.t
def current_id(g)
when is_valid(g)
do
gossip(g, :current_id)
end
@spec seen_ids(t, list(BroadcastID.t)) :: t
def seen_ids(g, ids)
when is_valid(g) and
is_list(ids)
do
:ok = BroadcastID.validate_list!(ids)
gossip(g, seen_ids: ids)
end
@spec seen_ids(t) :: list(BroadcastID.t)
def seen_ids(g)
when is_valid(g)
do
gossip(g, :seen_ids)
end
@spec other_ids(t, list(NetID.t)) :: t
def other_ids(g, ids)
when is_valid(g) and
is_list(ids)
do
:ok = NetID.validate_list!(ids)
gossip(g, other_ids: ids)
end
@spec other_ids(t) :: list(NetID.t)
def other_ids(g)
when is_valid(g)
do
gossip(g, :other_ids)
end
@spec payload(t) :: tuple
def payload(g)
when is_valid(g)
do
gossip(g, :payload)
end
@spec payload(t, tuple) :: t
def payload(g, pl)
when is_valid(g) and
is_tuple(pl) and
tuple_size(pl) > 1
do
gossip(g, payload: pl)
end
@spec payload_relaxed(t, tuple) :: t
def payload_relaxed(g, pl)
when is_valid_relaxed(g) and
is_tuple(pl) and
tuple_size(pl) > 1
do
gossip(g, payload: pl)
end
@spec seen_netids(t) :: list(NetID.t)
def seen_netids(g)
when is_valid(g)
do
Enum.reduce(gossip(g, :seen_ids), [], fn(x, acc) ->
[Chatter.BroadcastID.origin(x)|acc]
end)
end
@spec distribution_list(t, list(NetID.t)) :: t
def distribution_list(g, ids)
when is_valid(g) and
is_list(ids)
do
:ok = NetID.validate_list!(ids)
gossip(g, distribution_list: ids)
end
@spec distribution_list(t) :: list(NetID.t)
def distribution_list(g)
when is_valid(g)
do
gossip(g, :distribution_list)
end
@spec remove_from_distribution_list(t, list(NetID.t)) :: t
def remove_from_distribution_list(g, [])
when is_valid(g)
do
g
end
def remove_from_distribution_list(g, to_remove)
when is_valid(g)
do
:ok = NetID.validate_list!(to_remove)
old_list = gossip(g, :distribution_list)
old_set = Enum.into(old_list, HashSet.new)
remove_set = Enum.into(to_remove, HashSet.new)
new_set = HashSet.difference(old_set, remove_set)
gossip(g, distribution_list: HashSet.to_list(new_set))
end
@spec add_to_distribution_list(t, list(NetID.t)) :: t
def add_to_distribution_list(g, [])
when is_valid(g)
do
g
end
def add_to_distribution_list(g, to_add)
when is_valid(g)
do
:ok = NetID.validate_list!(to_add)
old_list = gossip(g, :distribution_list)
old_set = Enum.into(old_list, HashSet.new)
add_set = Enum.into(to_add, HashSet.new)
new_set = HashSet.union(old_set, add_set)
gossip(g, distribution_list: HashSet.to_list(new_set))
end
@spec extract_netids(t) :: list(NetID.t)
def extract_netids(g)
when is_valid(g)
do
[gossip(g, :current_id) |> BroadcastID.origin | gossip(g, :distribution_list)] ++
Enum.map(gossip(g, :seen_ids), fn(x) -> BroadcastID.origin(x) end) ++
gossip(g, :other_ids)
|> Enum.uniq
end
@spec encode_with(t, map) :: binary
def encode_with(g, id_map)
when is_valid(g) and
is_map(id_map) # TODO: check map too ...
do
bin_current_id = gossip(g, :current_id) |> BroadcastID.encode_with(id_map)
bin_seen_ids = gossip(g, :seen_ids) |> BroadcastID.encode_list_with(id_map)
bin_other_ids = gossip(g, :other_ids) |> NetID.encode_list_with(id_map)
bin_distrib = gossip(g, :distribution_list) |> NetID.encode_list_with(id_map)
<< bin_current_id :: binary,
bin_seen_ids :: binary,
bin_other_ids :: binary,
bin_distrib :: binary >>
end
@spec decode_with(binary, map) :: {t, binary}
def decode_with(bin, id_map)
when is_binary(bin) and
byte_size(bin) > 0 and
is_map(id_map)
do
{decoded_current_id, remaining} = BroadcastID.decode_with(bin, id_map)
{decoded_seen_ids, remaining} = BroadcastID.decode_list_with(remaining, id_map)
{decoded_other_ids, remaining} = NetID.decode_list_with(remaining, id_map)
{decoded_distrib, remaining} = NetID.decode_list_with(remaining, id_map)
{ gossip([current_id: decoded_current_id,
seen_ids: decoded_seen_ids,
other_ids: decoded_other_ids,
distribution_list: decoded_distrib,
payload: :empty]),
remaining }
end
end
|
lib/gossip.ex
| 0.608827 | 0.443902 |
gossip.ex
|
starcoder
|
defmodule Cldr.Calendar.Preference do
alias Cldr.LanguageTag
@territory_preferences Cldr.Config.calendar_preferences()
@doc false
def territory_preferences do
@territory_preferences
end
@doc false
def preferences_for_territory(territory) do
with {:ok, territory} <- Cldr.validate_territory(territory) do
territory_preferences = territory_preferences()
default_territory = Cldr.default_territory()
the_world = Cldr.the_world()
preferences =
Map.get(territory_preferences, territory) ||
Map.get(territory_preferences, default_territory) ||
Map.get(territory_preferences, the_world)
{:ok, preferences}
end
end
@doc """
Returns the calendar module preferred for
a territory.
## Arguments
* `territory` is any valid ISO3166-2 code as
an `String.t` or upcased `atom()`
## Returns
* `{:ok, calendar_module}` or
* `{:error, {exception, reason}}`
## Examples
iex> Cldr.Calendar.Preference.calendar_from_territory :US
{:ok, Cldr.Calendar.US}
iex> Cldr.Calendar.Preference.calendar_from_territory :YY
{:error, {Cldr.UnknownTerritoryError, "The territory :YY is unknown"}}
## Notes
The overwhelming majority of territories have
`:gregorian` as their first preferred calendar
and therefore `Cldr.Calendar.Gregorian`
will be returned for most territories.
Returning any other calendar module would require:
1. That another calendar is preferred over `:gregorian`
for a territory
2. That a calendar module is available to support
that calendar.
As an example, Iran (territory `:IR`) prefers the
`:persian` calendar. If the optional library
[ex_cldr_calendars_persian](https://hex.pm/packages/ex_cldr_calendars_persian)
is installed, the calendar module `Cldr.Calendar.Persian` will
be returned. If it is not installed, `Cldr.Calendar.Gregorian`
will be returned as `:gregorian` is the second preference
for `:IR`.
"""
def calendar_from_territory(territory) when is_atom(territory) do
with {:ok, preferences} <- preferences_for_territory(territory),
{:ok, calendar_module} <- find_calendar(preferences) do
if calendar_module == Cldr.Calendar.default_calendar() do
Cldr.Calendar.calendar_for_territory(territory)
else
{:ok, calendar_module}
end
end
end
def calendar_from_territory(territory, calendar) when is_atom(territory) do
with {:ok, preferences} <- preferences_for_territory(territory),
{:ok, calendar_module} <- find_calendar(preferences, calendar) do
if calendar_module == Cldr.Calendar.default_calendar() do
Cldr.Calendar.calendar_for_territory(territory)
else
{:ok, calendar_module}
end
end
end
@deprecated "Use calendar_from_territory/1"
defdelegate calendar_for_territory(territory), to: __MODULE__, as: :calendar_from_territory
defp find_calendar(preferences) do
error = {:error, Cldr.unknown_calendar_error(preferences)}
Enum.reduce_while(preferences, error, fn calendar, acc ->
module = calendar_module(calendar)
if Code.ensure_loaded?(module) do
{:halt, {:ok, module}}
else
{:cont, acc}
end
end)
end
defp find_calendar(preferences, calendar) do
if preferred = Enum.find(preferences, &(&1 == calendar)) do
find_calendar([preferred])
else
find_calendar(preferences)
end
end
@doc """
Return the calendar module for a locale.
## Arguments
* `:locale` is any locale or locale name validated
by `Cldr.validate_locale/2`. The default is
`Cldr.get_locale()` which returns the locale
set for the current process
## Returns
* `{:ok, calendar_module}` or
* `{:error, {exception, reason}}`
## Examples
iex> Cldr.Calendar.Preference.calendar_from_locale "en-GB"
{:ok, Cldr.Calendar.GB}
iex> Cldr.Calendar.Preference.calendar_from_locale "en-GB-u-ca-gregory"
{:ok, Cldr.Calendar.GB}
iex> Cldr.Calendar.Preference.calendar_from_locale "en"
{:ok, Cldr.Calendar.US}
iex> Cldr.Calendar.Preference.calendar_from_locale "fa-IR"
{:ok, Cldr.Calendar.Persian}
iex> Cldr.Calendar.Preference.calendar_from_locale "fa-IR-u-ca-gregory"
{:ok, Cldr.Calendar.IR}
"""
def calendar_from_locale(locale \\ Cldr.get_locale())
def calendar_from_locale(%LanguageTag{locale: %{calendar: nil}} = locale) do
locale
|> Cldr.Locale.territory_from_locale()
|> calendar_from_territory
end
def calendar_from_locale(%LanguageTag{locale: %{calendar: calendar}} = locale) do
locale
|> Cldr.Locale.territory_from_locale()
|> calendar_from_territory(calendar)
end
def calendar_from_locale(%LanguageTag{} = locale) do
locale
|> Cldr.Locale.territory_from_locale()
|> calendar_from_territory
end
def calendar_from_locale(locale) when is_binary(locale) do
calendar_from_locale(locale, Cldr.default_backend!())
end
def calendar_from_locale(other) do
{:error, Cldr.Locale.locale_error(other)}
end
def calendar_from_locale(locale, backend) when is_binary(locale) do
with {:ok, locale} <- Cldr.validate_locale(locale, backend) do
calendar_from_locale(locale)
end
end
@deprecated "Use calendar_from_locale/1"
defdelegate calendar_for_locale(locale), to: __MODULE__, as: :calendar_from_locale
@base_calendar Cldr.Calendar
@known_calendars Cldr.known_calendars()
@calendar_modules @known_calendars
|> Enum.map(fn c ->
{c,
Module.concat(@base_calendar, c |> Atom.to_string() |> Macro.camelize())}
end)
|> Map.new()
def calendar_modules do
@calendar_modules
end
def calendar_module(calendar) when calendar in @known_calendars do
Map.fetch!(calendar_modules(), calendar)
end
def calendar_module(other) do
{:error, Cldr.unknown_calendar_error(other)}
end
def calendar_from_name(name) do
calendar_module = calendar_module(name)
if Code.ensure_loaded?(calendar_module) do
calendar_module
else
nil
end
end
end
|
lib/cldr/calendar/preference.ex
| 0.867864 | 0.499329 |
preference.ex
|
starcoder
|
defmodule Transformers do
@moduledoc """
Transforms nested maps and list of maps
"""
@doc """
Returns map (or list of maps) with keys deeply-transformed with the provided function.
## Examples
Camelize keys:
iex> data = %{"key_one" => 1, "key_two" => 2}
iex> Transformers.transform_keys(data, &Macro.camelize/1)
%{"KeyOne" => 1, "KeyTwo" => 2}
Lists and nested maps are traversed and transformed as well:
iex> data = %{"the_list" => [%{"map_one" => 1}, %{"map_two" => 2}]}
iex> Transformers.transform_keys(data, &Macro.camelize/1)
%{"TheList" => [%{"MapOne" => 1}, %{"MapTwo" => 2}]}
"""
def transform_keys(map, func) when is_map(map) do
try do
for {key, val} <- map,
into: %{},
do: {func.(key), transform_keys(val, func)}
rescue
Protocol.UndefinedError -> map
end
end
def transform_keys(list, func) when is_list(list) do
list
|> Enum.map(&transform_keys(&1, func))
end
def transform_keys(any, _), do: any
@doc """
Returns map (or list of maps) with values deeply-transformed with the provided function.
## Examples
Upcase values:
iex> data = %{"one" => "One", "two" => "Two"}
iex> Transformers.transform_values(data, &String.upcase/1)
%{"one" => "ONE", "two" => "TWO"}
Lists and nested maps are traversed and transformed as well:
iex> data = %{"list" => [%{"one" => "One"}, %{"two" => "Two"}]}
iex> Transformers.transform_values(data, &String.upcase/1)
%{"list" => [%{"one" => "ONE"}, %{"two" => "TWO"}]}
"""
def transform_values(map, func) when is_map(map) do
try do
for {key, val} <- map,
into: %{},
do: {key, transform_values(val, func)}
rescue
Protocol.UndefinedError -> map
end
end
def transform_values(list, func) when is_list(list) do
list
|> Enum.map(&transform_values(&1, func))
end
def transform_values(any, func) do
func.(any)
end
end
|
lib/transformers.ex
| 0.696991 | 0.568895 |
transformers.ex
|
starcoder
|
defmodule Tensorex do
@moduledoc """
Functions to operate basic commands with tensors.
"""
@typedoc """
Represents a tensor.
The data structure is a map with list of indices keys and numeric values. Zero values are
omitted. The shape is a list of each dimension at the order.
This module implements the `Access` behaviour. So that you can access elements of the tensor via
`tensor[indices]` syntax, where `indices` must be a list of `t:integer/0`s or `t:Range.t/0`s. See
`fetch/2` for concrete examples.
"""
@type t :: %Tensorex{
data: %{optional([non_neg_integer, ...]) => number},
shape: [pos_integer, ...]
}
defstruct [:data, :shape]
@doc """
Creates a new tensor from a list (of lists (of lists of ...)).
iex> Tensorex.from_list([1.1, 2.1, -5.3, 4])
%Tensorex{data: %{[0] => 1.1, [1] => 2.1, [2] => -5.3, [3] => 4}, shape: [4]}
iex> Tensorex.from_list([[1.1, 2.1, -5.3, 4 ],
...> [0.8, -8, 21.4, 3.3]])
%Tensorex{data: %{[0, 0] => 1.1, [0, 1] => 2.1, [0, 2] => -5.3, [0, 3] => 4 ,
[1, 0] => 0.8, [1, 1] => -8, [1, 2] => 21.4, [1, 3] => 3.3}, shape: [2, 4]}
iex> Tensorex.from_list([[[0.0, 0.0, 0.0],
...> [0.0, 0.0, 0.0]],
...> [[0.0, 0.0, 0.0],
...> [0.0, 0.0, 0.0]]])
%Tensorex{data: %{}, shape: [2, 2, 3]}
"""
@spec from_list(Enum.t()) :: t
def from_list(data), do: %Tensorex{data: build(data) |> Enum.into(%{}), shape: count(data)}
@spec count(Enum.t()) :: [pos_integer, ...]
defp count(data) do
[nested_shape] =
data
|> Stream.map(fn
value when is_number(value) -> []
nested_data -> count(nested_data)
end)
|> Enum.uniq()
[Enum.count(data) | nested_shape]
end
@spec build(Enum.t()) :: Enum.t()
defp build(data) do
data
|> Stream.with_index()
|> Stream.map(fn
{value, _} when value == 0 ->
[]
{value, index} when is_number(value) ->
[{[index], value}]
{nested_data, index} ->
build(nested_data)
|> Stream.map(fn {nested_indices, value} -> {[index | nested_indices], value} end)
end)
|> Stream.concat()
end
defguardp is_indices(indices, shape) when is_list(indices) and length(indices) <= length(shape)
@behaviour Access
@doc """
Returns a tensor or a number stored at the index.
The key can be a list of indices or ranges. If integer indices are given, it returns a tensor
or a numeric value specified by the index. If ranges are given, it returns a tensor consisting
partial elements.
Negative indices are counted from the end.
iex> Tensorex.from_list([[[ 1 , -3.1, 2 ],
...> [ 4 , 5 , -6.1],
...> [ 0.9 , -91.2, 11 ]],
...> [[10 , -30.1, 20 ],
...> [40 , 50 , -60.1],
...> [ 0.09, -910.2, 110 ]]])[[0, 0, 0]]
1
iex> Tensorex.from_list([[[ 1 , -3.1, 2 ],
...> [ 4 , 5 , -6.1],
...> [ 0.9 , -91.2, 11 ]],
...> [[10 , -30.1, 20 ],
...> [40 , 50 , -60.1],
...> [ 0.09, -910.2, 110 ]]])[[0, 0]]
%Tensorex{data: %{[0] => 1, [1] => -3.1, [2] => 2}, shape: [3]}
iex> Tensorex.from_list([[[ 1 , -3.1, 2 ],
...> [ 4 , 5 , -6.1],
...> [ 0.9 , -91.2, 11 ]],
...> [[10 , -30.1, 20 ],
...> [40 , 50 , -60.1],
...> [ 0.09, -910.2, 110 ]]])[[0]]
%Tensorex{data: %{[0, 0] => 1 , [0, 1] => -3.1, [0, 2] => 2 ,
[1, 0] => 4 , [1, 1] => 5 , [1, 2] => -6.1,
[2, 0] => 0.9, [2, 1] => -91.2, [2, 2] => 11 }, shape: [3, 3]}
iex> Tensorex.from_list([[[ 1 , -3.1, 2 ],
...> [ 4 , 5 , -6.1],
...> [ 0.9 , -91.2, 11 ]],
...> [[10 , -30.1, 20 ],
...> [40 , 50 , -60.1],
...> [ 0.09, -910.2, 110 ]]])[[2]]
nil
iex> Tensorex.from_list([[ 1, 2, 3],
...> [ 4, 5, 6],
...> [ 7, 8, 9],
...> [10, 11, 12]])[[1..2]]
%Tensorex{data: %{[0, 0] => 4, [0, 1] => 5, [0, 2] => 6,
[1, 0] => 7, [1, 1] => 8, [1, 2] => 9}, shape: [2, 3]}
iex> Tensorex.from_list([[ 1, 2, 3],
...> [ 4, 5, 6],
...> [ 7, 8, 9],
...> [10, 11, 12]])[[-2..-1]]
%Tensorex{data: %{[0, 0] => 7, [0, 1] => 8, [0, 2] => 9,
[1, 0] => 10, [1, 1] => 11, [1, 2] => 12}, shape: [2, 3]}
iex> Tensorex.from_list([[ 1, 2, 3],
...> [ 4, 5, 6],
...> [ 7, 8, 9],
...> [10, 11, 12]])[[1..2, 1..-1]]
%Tensorex{data: %{[0, 0] => 5, [0, 1] => 6,
[1, 0] => 8, [1, 1] => 9}, shape: [2, 2]}
iex> Tensorex.from_list([[ 1, 2, 3],
...> [ 4, 0, 6],
...> [ 7, 8, 9],
...> [10, 11, 12]])[[1, 1]]
0.0
"""
@spec fetch(t, [integer | Range.t(), ...]) :: {:ok, t | number} | :error
@impl true
def fetch(%Tensorex{data: store, shape: shape}, indices) when is_indices(indices, shape) do
case normalize_indices(indices, shape) do
{new_indices, false} -> {:ok, Map.get(store, new_indices, 0.0)}
{new_indices, true} -> {:ok, slice(store, shape, new_indices)}
:error -> :error
end
end
@spec slice(
%{optional([non_neg_integer, ...]) => number},
[pos_integer, ...],
[non_neg_integer | Range.t(non_neg_integer, non_neg_integer), ...]
) :: t
defp slice(store, shape, indices) do
indices_length = length(indices)
tail_length = length(shape) - indices_length
new_store =
store
|> Stream.filter(fn {index, _} -> index_in_range?(index, indices) end)
|> Stream.map(fn {index, value} ->
new_index =
Stream.zip(index, indices)
|> Stream.reject(&is_integer(elem(&1, 1)))
|> Stream.map(fn {element, range} -> Enum.find_index(range, &(&1 === element)) end)
|> Enum.concat(Enum.slice(index, indices_length, tail_length))
{new_index, value}
end)
|> Enum.into(%{})
new_shape =
indices
|> Stream.reject(&is_integer/1)
|> Stream.map(&Enum.count/1)
|> Enum.concat(Enum.slice(shape, indices_length, tail_length))
%Tensorex{data: new_store, shape: new_shape}
end
@spec normalize_indices([integer | Range.t(), ...], [pos_integer, ...]) ::
{[non_neg_integer | Range.t(non_neg_integer, non_neg_integer), ...],
slice_mode :: boolean}
| :error
defp normalize_indices(indices, shape) do
try do
Stream.zip(indices, shape)
|> Enum.map_reduce(length(indices) < length(shape), fn
{index, dimension}, acc when is_integer(index) and index < 0 and -dimension <= index ->
{index + dimension, acc}
{index, dimension}, acc when is_integer(index) and 0 <= index and index < dimension ->
{index, acc}
{index_start..index_end, dimension}, _
when index_start < 0 and -dimension <= index_start and
index_end < 0 and -dimension <= index_end ->
{(index_start + dimension)..(index_end + dimension), true}
{index_start..index_end, dimension}, _
when index_start < 0 and -dimension <= index_start and
0 <= index_end and index_end < dimension ->
{(index_start + dimension)..index_end, true}
{index_start..index_end, dimension}, _
when index_end < 0 and -dimension <= index_end and
0 <= index_start and index_start < dimension ->
{index_start..(index_end + dimension), true}
{index_start..index_end = index, dimension}, _
when 0 <= index_start and index_start < dimension and
0 <= index_end and index_end < dimension ->
{index, true}
end)
rescue
FunctionClauseError -> :error
end
end
@spec index_in_range?(
[non_neg_integer, ...],
[non_neg_integer | Range.t(non_neg_integer, non_neg_integer), ...]
) :: boolean
defp index_in_range?(index, indices) do
Stream.zip(index, indices)
|> Enum.all?(fn
{element, element} -> true
{element, _.._ = range} -> element in range
_ -> false
end)
end
@doc """
Returns a tensor or a number stored at the index and update it at the same time.
iex> get_and_update_in(
...> Tensorex.from_list([[[ 1 , -3.1, 2 ],
...> [ 4 , 5 , -6.1],
...> [ 0.9 , -91.2, 11 ]],
...> [[10 , -30.1, 20 ],
...> [40 , 50 , -60.1],
...> [ 0.09, -910.2, 110 ]]])[[0, 1, 0]], &{&1, &1 * 3.5})
{4, %Tensorex{data: %{[0, 0, 0] => 1 , [0, 0, 1] => -3.1, [0, 0, 2] => 2 ,
[0, 1, 0] => 14.0 , [0, 1, 1] => 5 , [0, 1, 2] => -6.1,
[0, 2, 0] => 0.9 , [0, 2, 1] => -91.2, [0, 2, 2] => 11 ,
[1, 0, 0] => 10 , [1, 0, 1] => -30.1, [1, 0, 2] => 20 ,
[1, 1, 0] => 40 , [1, 1, 1] => 50 , [1, 1, 2] => -60.1,
[1, 2, 0] => 0.09, [1, 2, 1] => -910.2, [1, 2, 2] => 110 }, shape: [2, 3, 3]}}
iex> get_and_update_in(
...> Tensorex.from_list([[ 1, 2, 3],
...> [ 4, 5, 6],
...> [ 7, 8, 9],
...> [10, 11, 12]])[[1..2, 1..2]],
...> &{&1, Tensorex.from_list([[13, 14],
...> [15, 16]])})
{%Tensorex{data: %{[0, 0] => 5, [0, 1] => 6,
[1, 0] => 8, [1, 1] => 9}, shape: [2, 2]},
%Tensorex{data: %{[0, 0] => 1, [0, 1] => 2, [0, 2] => 3,
[1, 0] => 4, [1, 1] => 13, [1, 2] => 14,
[2, 0] => 7, [2, 1] => 15, [2, 2] => 16,
[3, 0] => 10, [3, 1] => 11, [3, 2] => 12}, shape: [4, 3]}}
iex> get_and_update_in(
...> Tensorex.from_list([[ 1, 2, 3],
...> [ 4, 5, 6],
...> [ 7, 8, 9],
...> [10, 11, 12]])[[2]],
...> &{&1, Tensorex.from_list([0, 0, 16])})
{%Tensorex{data: %{[0] => 7, [1] => 8, [2] => 9}, shape: [3]},
%Tensorex{data: %{[0, 0] => 1, [0, 1] => 2, [0, 2] => 3,
[1, 0] => 4, [1, 1] => 5, [1, 2] => 6,
[2, 2] => 16,
[3, 0] => 10, [3, 1] => 11, [3, 2] => 12}, shape: [4, 3]}}
iex> get_and_update_in(
...> Tensorex.from_list([[ 1, 2],
...> [ 3, 4]])[[0..-1, 0..-1]],
...> &{&1, Tensorex.from_list([[-2, 0],
...> [ 0, -3]])})
{%Tensorex{data: %{[0, 0] => 1, [0, 1] => 2,
[1, 0] => 3, [1, 1] => 4}, shape: [2, 2]},
%Tensorex{data: %{[0, 0] => -2,
[1, 1] => -3}, shape: [2, 2]}}
iex> get_and_update_in(
...> Tensorex.zero([3, 2])[[1..-1, 1..-1]],
...> &{&1, Tensorex.from_list([[ 1],
...> [-1]])})
{%Tensorex{data: %{}, shape: [2, 1]},
%Tensorex{data: %{[1, 1] => 1,
[2, 1] => -1}, shape: [3, 2]}}
"""
@spec get_and_update(
t,
[integer | Range.t(), ...],
(t -> :pop | {any, t}) | (number -> :pop | {any, number})
) :: {any, t}
@impl true
def get_and_update(%Tensorex{data: store, shape: shape} = tensor, indices, fun)
when is_indices(indices, shape) and is_function(fun, 1) do
case normalize_indices(indices, shape) do
{new_indices, true} ->
case fun.(%{shape: partial_shape} = partial_tensor = slice(store, shape, new_indices)) do
:pop ->
{partial_tensor, %{tensor | data: drop(store, new_indices)}}
{get_value, %Tensorex{shape: ^partial_shape, data: updated_store}} ->
new_store =
updated_store
|> Enum.into(drop(store, new_indices), fn {index, value} ->
{mapped_indices, remaining_partial_indices} =
Enum.map_reduce(new_indices, index, fn
element, acc when is_integer(element) -> {element, acc}
range, [partial_index | acc] -> {Enum.fetch!(range, partial_index), acc}
end)
{mapped_indices ++ remaining_partial_indices, value}
end)
{get_value, %{tensor | data: new_store}}
end
{new_indices, false} ->
case fun.(value = Map.get(store, new_indices, 0.0)) do
:pop ->
{value, %{tensor | data: Map.delete(store, new_indices)}}
{get_value, updated_value} when updated_value == 0 ->
{get_value, %{tensor | data: Map.delete(store, new_indices)}}
{get_value, updated_value} when is_number(updated_value) ->
{get_value, %{tensor | data: Map.put(store, new_indices, updated_value)}}
end
end
end
@doc """
Pops the tensor or the number stored at the index out of the tensor.
iex> pop_in(
...> Tensorex.from_list([[[ 1 , -3.1, 2 ],
...> [ 4 , 5 , -6.1],
...> [ 0.9 , -91.2, 11 ]],
...> [[10 , -30.1, 20 ],
...> [40 , 50 , -60.1],
...> [ 0.09, -910.2, 110 ]]])[[0]])
{%Tensorex{data: %{[0, 0] => 1, [0, 1] => -3.1 , [0, 2] => 2 ,
[1, 0] => 4, [1, 1] => 5 , [1, 2] => -6.1,
[2, 0] => 0.9, [2, 1] => -91.2, [2, 2] => 11 }, shape: [3, 3]},
%Tensorex{data: %{[1, 0, 0] => 10 , [1, 0, 1] => -30.1, [1, 0, 2] => 20 ,
[1, 1, 0] => 40 , [1, 1, 1] => 50 , [1, 1, 2] => -60.1,
[1, 2, 0] => 0.09, [1, 2, 1] => -910.2, [1, 2, 2] => 110 }, shape: [2, 3, 3]}}
iex> pop_in(
...> Tensorex.from_list([[[ 1 , -3.1, 2 ],
...> [ 4 , 5 , -6.1],
...> [ 0.9 , -91.2, 11 ]],
...> [[10 , -30.1, 20 ],
...> [40 , 50 , -60.1],
...> [ 0.09, -910.2, 110 ]]])[[0, 1, 2]])
{-6.1, %Tensorex{data: %{[0, 0, 0] => 1 , [0, 0, 1] => -3.1, [0, 0, 2] => 2 ,
[0, 1, 0] => 4 , [0, 1, 1] => 5 ,
[0, 2, 0] => 0.9 , [0, 2, 1] => -91.2, [0, 2, 2] => 11 ,
[1, 0, 0] => 10 , [1, 0, 1] => -30.1, [1, 0, 2] => 20 ,
[1, 1, 0] => 40 , [1, 1, 1] => 50 , [1, 1, 2] => -60.1,
[1, 2, 0] => 0.09, [1, 2, 1] => -910.2, [1, 2, 2] => 110 }, shape: [2, 3, 3]}}
"""
@spec pop(t, [integer | Range.t(), ...]) :: {t | number, t}
@impl true
def pop(%Tensorex{data: store, shape: shape} = tensor, indices) do
case normalize_indices(indices, shape) do
{new_indices, true} ->
{slice(store, shape, new_indices), %{tensor | data: drop(store, new_indices)}}
{new_indices, false} ->
{Map.get(store, new_indices, 0.0), %{tensor | data: Map.delete(store, new_indices)}}
end
end
@spec drop(
%{optional([non_neg_integer, ...]) => number},
[non_neg_integer | Range.t(non_neg_integer, non_neg_integer)]
) :: %{optional([non_neg_integer, ...]) => number}
defp drop(store, indices) do
Stream.filter(store, fn {index, _} ->
Enum.any?(Stream.zip(index, indices), fn
{element, element} -> false
{element, _.._ = range} -> element not in range
_ -> true
end)
end)
|> Enum.into(%{})
end
defguardp is_positive_integer(number) when is_integer(number) and number > 0
@doc """
Returns a 2-rank tensor with all diagonal elements of 1.
iex> Tensorex.kronecker_delta(3)
%Tensorex{data: %{[0, 0] => 1,
[1, 1] => 1,
[2, 2] => 1}, shape: [3, 3]}
"""
@spec kronecker_delta(pos_integer) :: t
def kronecker_delta(dimension) when is_positive_integer(dimension) do
store = 0..(dimension - 1) |> Enum.into(%{}, &{[&1, &1], 1})
%Tensorex{data: store, shape: [dimension, dimension]}
end
@doc """
Returns a tensor with all of zero elements.
iex> Tensorex.zero([4, 4, 2])
%Tensorex{data: %{}, shape: [4, 4, 2]}
iex> Tensorex.zero([-5])
** (ArgumentError) expected a list of positive integers, got: [-5]
"""
@spec zero([pos_integer, ...]) :: t
def zero(shape) when is_list(shape) and length(shape) > 0 do
%Tensorex{data: %{}, shape: validate_shape!(shape)}
end
@spec validate_shape!([pos_integer, ...]) :: [pos_integer, ...]
defp validate_shape!(shape) do
if Enum.all?(shape, &is_positive_integer/1) do
shape
else
raise ArgumentError, "expected a list of positive integers, got: #{inspect(shape)}"
end
end
@doc """
Checks if the given tensor is upper triangular or not.
iex> Tensorex.triangular?(Tensorex.from_list([[2, 1, 3],
...> [0, 3, 6],
...> [0, 0, -9]]))
true
iex> Tensorex.triangular?(Tensorex.from_list([[2, 0, 0],
...> [0, 3, 0],
...> [3, 0, -9]]))
false
iex> Tensorex.triangular?(Tensorex.from_list([[[2, 5], [0, 1]],
...> [[0, 0], [0, -2]],
...> [[0, 0], [0, 0]]]))
true
iex> Tensorex.triangular?(Tensorex.from_list([[[2, 5], [0, 1]],
...> [[6, 0], [0, -2]],
...> [[0, 0], [0, 0]]]))
false
"""
@spec triangular?(t) :: boolean
def triangular?(%Tensorex{shape: [_]}), do: false
def triangular?(%Tensorex{data: store}) do
Enum.all?(store, fn {index, _} -> Enum.sort(index) == index end)
end
@doc """
Checks if the given tensor is diagonal or not.
iex> Tensorex.diagonal?(Tensorex.from_list([[2, 0, 0],
...> [0, 3, 0],
...> [0, 0, -9]]))
true
iex> Tensorex.diagonal?(Tensorex.from_list([[ 2 , 0, 0],
...> [ 0 , 3, 0],
...> [-5.3, 0, -9]]))
false
"""
@spec diagonal?(t) :: boolean
def diagonal?(%Tensorex{shape: [_]}), do: false
def diagonal?(%Tensorex{data: store}) do
Enum.all?(store, fn {index, _} -> Enum.count(Stream.uniq(index)) === 1 end)
end
@doc """
Returns a tensor where each element is the result of invoking `mapper` on each corresponding
element of the given tensor.
iex> Tensorex.map(Tensorex.from_list([[[ 0, 1, 2], [-3, -1, 1]],
...> [[-4, -2, 0], [ 1, 0, -1]]]), &(&1 * &1))
%Tensorex{data: %{ [0, 0, 1] => 1, [0, 0, 2] => 4, [0, 1, 0] => 9, [0, 1, 1] => 1, [0, 1, 2] => 1,
[1, 0, 0] => 16, [1, 0, 1] => 4, [1, 1, 0] => 1, [1, 1, 2] => 1}, shape: [2, 2, 3]}
iex> Tensorex.map(Tensorex.from_list([[[ 0, 1, 2], [-3, -1, 1]],
...> [[-4, -2, 0], [ 1, 0, -1]]]), &(&1 + 3))
%Tensorex{data: %{[0, 0, 0] => 3.0, [0, 0, 1] => 4, [0, 0, 2] => 5 , [0, 1, 1] => 2 , [0, 1, 2] => 4,
[1, 0, 0] => -1 , [1, 0, 1] => 1, [1, 0, 2] => 3.0, [1, 1, 0] => 4, [1, 1, 1] => 3.0, [1, 1, 2] => 2}, shape: [2, 2, 3]}
iex> Tensorex.map(Tensorex.from_list([[-3, -1, 1],
...> [-4, -2, 0],
...> [ 1, 0, -1]]),
...> fn
...> value, [index, index] -> value * value
...> value, _ -> value
...> end)
%Tensorex{data: %{[0, 0] => 9, [0, 1] => -1, [0, 2] => 1,
[1, 0] => -4, [1, 1] => 4,
[2, 0] => 1, [2, 2] => 1}, shape: [3, 3]}
"""
@spec map(t, ([pos_integer, ...], number -> number) | (number -> number)) :: t
def map(%Tensorex{data: store, shape: shape} = tensor, mapper) when is_function(mapper, 2) do
mapped_store =
all_indices(shape)
|> Stream.flat_map(fn index ->
case mapper.(Map.get(store, index, 0.0), index) do
value when value == 0 -> []
value -> [{index, value}]
end
end)
|> Enum.into(%{})
%{tensor | data: mapped_store}
end
def map(%Tensorex{} = tensor, mapper) when is_function(mapper, 1) do
map(tensor, fn value, _ -> mapper.(value) end)
end
@spec all_indices([pos_integer, ...]) :: Enum.t()
defp all_indices([dimension]), do: Stream.map(0..(dimension - 1), &[&1])
defp all_indices([dimension | shape]) do
all_indices(shape)
|> Stream.map(fn indices -> Stream.map(0..(dimension - 1), &[&1 | indices]) end)
|> Stream.concat()
end
@doc """
Returns a tensor where all of elements are the given value.
iex> Tensorex.fill([3, 4, 2], 2)
%Tensorex{data: %{[0, 0, 0] => 2, [0, 0, 1] => 2,
[0, 1, 0] => 2, [0, 1, 1] => 2,
[0, 2, 0] => 2, [0, 2, 1] => 2,
[0, 3, 0] => 2, [0, 3, 1] => 2,
[1, 0, 0] => 2, [1, 0, 1] => 2,
[1, 1, 0] => 2, [1, 1, 1] => 2,
[1, 2, 0] => 2, [1, 2, 1] => 2,
[1, 3, 0] => 2, [1, 3, 1] => 2,
[2, 0, 0] => 2, [2, 0, 1] => 2,
[2, 1, 0] => 2, [2, 1, 1] => 2,
[2, 2, 0] => 2, [2, 2, 1] => 2,
[2, 3, 0] => 2, [2, 3, 1] => 2}, shape: [3, 4, 2]}
iex> Tensorex.fill([2, 2, 5], 0.0)
%Tensorex{data: %{}, shape: [2, 2, 5]}
"""
@spec fill([pos_integer, ...], number) :: t
def fill(shape, value) when is_list(shape) and length(shape) > 0 and value == 0, do: zero(shape)
def fill(shape, value) when is_list(shape) and length(shape) > 0 and is_number(value) do
store = shape |> validate_shape!() |> all_indices() |> Enum.into(%{}, &{&1, value})
%Tensorex{data: store, shape: shape}
end
@doc """
Updates dimensions of each order.
If new shape has larger dimension than previous one, values at the increased indices are
considered to be zero. Otherwise if new shape has less dimension, it discards values at the
removed indices.
iex> Tensorex.reshape(Tensorex.from_list([[[ 1, 2, 3],
...> [ 4, 5, 6],
...> [ 7, 8, 9]],
...> [[-1, -2, -3],
...> [-4, -5, -6],
...> [-7, -8, -9]]]), [2, 2, 3])
%Tensorex{data: %{[0, 0, 0] => 1, [0, 0, 1] => 2, [0, 0, 2] => 3,
[0, 1, 0] => 4, [0, 1, 1] => 5, [0, 1, 2] => 6,
[1, 0, 0] => -1, [1, 0, 1] => -2, [1, 0, 2] => -3,
[1, 1, 0] => -4, [1, 1, 1] => -5, [1, 1, 2] => -6}, shape: [2, 2, 3]}
iex> Tensorex.reshape(Tensorex.from_list([[[ 1, 2, 3],
...> [ 4, 5, 6],
...> [ 7, 8, 9]],
...> [[-1, -2, -3],
...> [-4, -5, -6],
...> [-7, -8, -9]]]), [3, 3, 3])
%Tensorex{data: %{[0, 0, 0] => 1, [0, 0, 1] => 2, [0, 0, 2] => 3,
[0, 1, 0] => 4, [0, 1, 1] => 5, [0, 1, 2] => 6,
[0, 2, 0] => 7, [0, 2, 1] => 8, [0, 2, 2] => 9,
[1, 0, 0] => -1, [1, 0, 1] => -2, [1, 0, 2] => -3,
[1, 1, 0] => -4, [1, 1, 1] => -5, [1, 1, 2] => -6,
[1, 2, 0] => -7, [1, 2, 1] => -8, [1, 2, 2] => -9}, shape: [3, 3, 3]}
iex> Tensorex.reshape(Tensorex.from_list([[[ 1, 2, 3],
...> [ 4, 5, 6],
...> [ 7, 8, 9]],
...> [[-1, -2, -3],
...> [-4, -5, -6],
...> [-7, -8, -9]]]), [3, 2, 2])
%Tensorex{data: %{[0, 0, 0] => 1, [0, 0, 1] => 2,
[0, 1, 0] => 4, [0, 1, 1] => 5,
[1, 0, 0] => -1, [1, 0, 1] => -2,
[1, 1, 0] => -4, [1, 1, 1] => -5}, shape: [3, 2, 2]}
"""
@spec reshape(t, [pos_integer, ...]) :: t
def reshape(%Tensorex{data: store, shape: prev_shape}, shape)
when is_list(shape) and length(prev_shape) === length(shape) do
new_store =
store
|> Stream.filter(fn {indices, _} ->
Stream.zip(indices, shape) |> Enum.all?(&(elem(&1, 0) < elem(&1, 1)))
end)
|> Enum.into(%{})
%Tensorex{data: new_store, shape: shape}
end
@doc """
Returns if all corresponding elements are in (relative) tolerance or not.
iex> Tensorex.in_tolerance?(Tensorex.from_list([[0.0000001, 0.9999998],
...> [2.0 , -0.0000003]]),
...> Tensorex.from_list([[0 , 1 ],
...> [2 , 0 ]]), 1.0e-6)
true
iex> Tensorex.in_tolerance?(Tensorex.from_list([[0, 1],
...> [2, -1],
...> [3, 2]]),
...> Tensorex.from_list([[0, 1],
...> [2, -1]]), 1.0e-8)
false
iex> Tensorex.in_tolerance?(Tensorex.from_list([[0 , 1],
...> [2.0 , 0]]),
...> Tensorex.from_list([[0 , 1],
...> [2.000003 , 0]]), 1.0e-6)
false
iex> Tensorex.in_tolerance?(Tensorex.from_list([[1.8200340109e62, 1.0e-52 ],
...> [2.335142153e-41, 0 ]]),
...> Tensorex.from_list([[1.8200338243e62, 1.0000009e-52],
...> [2.335142296e-41, 3.242e-7 ]]), 1.0e-6)
true
iex> Tensorex.in_tolerance?(Tensorex.from_list([[1.8200440109e62, 1.0e-52 ],
...> [2.335142296e-41, 0 ]]),
...> Tensorex.from_list([[1.8200440109e62, 1.000002e-52],
...> [2.335142296e-41, 0 ]]), 1.0e-6)
false
iex> Tensorex.in_tolerance?(Tensorex.from_list([[1, 3],
...> [2, 0]]),
...> Tensorex.from_list([[1, 3],
...> [2, -5]]), 1.0e-6)
false
"""
@spec in_tolerance?(t, t, number) :: boolean
def in_tolerance?(
%Tensorex{data: store1, shape: shape},
%Tensorex{data: store2, shape: shape},
tolerance
)
when is_number(tolerance) and tolerance >= 0 do
Map.merge(store1, store2, fn _, value1, value2 ->
(value1 - value2) / max(abs(value1), abs(value2))
end)
|> Enum.all?(&(abs(elem(&1, 1)) <= tolerance))
end
def in_tolerance?(%Tensorex{}, %Tensorex{}, tolerance)
when is_number(tolerance) and tolerance >= 0 do
false
end
@doc """
Returns a permutation tensor (also called Levi-Civita or Eddington tensor).
iex> Tensorex.permutation(3)
%Tensorex{data: %{[0, 1, 2] => 1, [0, 2, 1] => -1,
[1, 0, 2] => -1, [1, 2, 0] => 1,
[2, 0, 1] => 1, [2, 1, 0] => -1}, shape: [3, 3, 3]}
"""
@spec permutation(pos_integer) :: t
def permutation(dimension) when is_integer(dimension) and dimension >= 2 do
store =
Stream.iterate([{[], 0}], fn acc ->
Stream.map(acc, fn {index, inversions} ->
Stream.iterate(0, &(&1 + 1))
|> Stream.take(dimension)
|> Stream.reject(&(&1 in index))
|> Stream.map(fn i -> {[i | index], Enum.count(index, &(&1 < i)) + inversions} end)
end)
|> Stream.concat()
end)
|> Enum.at(dimension)
|> Enum.into(%{}, fn
{index, inversions} when rem(inversions, 2) > 0 -> {index, -1}
{index, _} -> {index, 1}
end)
%Tensorex{data: store, shape: List.duplicate(dimension, dimension)}
end
@doc """
Inserts elements into the given index.
iex> Tensorex.insert_at(
...> Tensorex.from_list([[ 1, 2, 3],
...> [ 4, 5, 6],
...> [ 7, 8, 9],
...> [10, 11, 12]]),
...> 1,
...> 0,
...> Tensorex.from_list([13, 14, 15])
...> )
%Tensorex{data: %{[0, 0] => 1, [0, 1] => 2, [0, 2] => 3,
[1, 0] => 13, [1, 1] => 14, [1, 2] => 15,
[2, 0] => 4, [2, 1] => 5, [2, 2] => 6,
[3, 0] => 7, [3, 1] => 8, [3, 2] => 9,
[4, 0] => 10, [4, 1] => 11, [4, 2] => 12}, shape: [5, 3]}
iex> Tensorex.insert_at(
...> Tensorex.from_list([[ 1, 2, 3],
...> [ 4, 5, 6],
...> [ 7, 8, 9],
...> [10, 11, 12]]),
...> 2,
...> 1,
...> Tensorex.from_list([13, 14, 15, 16])
...> )
%Tensorex{data: %{[0, 0] => 1, [0, 1] => 2, [0, 2] => 13, [0, 3] => 3,
[1, 0] => 4, [1, 1] => 5, [1, 2] => 14, [1, 3] => 6,
[2, 0] => 7, [2, 1] => 8, [2, 2] => 15, [2, 3] => 9,
[3, 0] => 10, [3, 1] => 11, [3, 2] => 16, [3, 3] => 12}, shape: [4, 4]}
iex> Tensorex.insert_at(
...> Tensorex.from_list([10, 11, 12]),
...> 1,
...> 0,
...> 13
...> )
%Tensorex{data: %{[0] => 10, [1] => 13, [2] => 11, [3] => 12}, shape: [4]}
iex> Tensorex.insert_at(
...> Tensorex.from_list([10, 11, 12]),
...> 1,
...> 0,
...> 0
...> )
%Tensorex{data: %{[0] => 10, [2] => 11, [3] => 12}, shape: [4]}
iex> Tensorex.insert_at(
...> Tensorex.from_list([[ 1, 2, 3],
...> [ 4, 5, 6],
...> [ 7, 8, 9],
...> [10, 11, 12]]),
...> 2,
...> 1,
...> Tensorex.from_list([13, 14, 15, 16, 17])
...> )
** (RuntimeError) expected the shape of the inserted tensor to be [4], got: [5]
"""
@spec insert_at(t, non_neg_integer, non_neg_integer, t | number) :: t
def insert_at(%Tensorex{data: store, shape: [dimension]}, index, 0, value)
when is_integer(index) and index >= 0 and index < dimension and is_number(value) do
keys = Map.keys(store) |> Enum.filter(fn [i] -> i >= index end)
{tail, head} = Map.split(store, keys)
shifted = Enum.into(tail, head, fn {[i], v} -> {[i + 1], v} end)
new_store = if value == 0, do: shifted, else: Map.put(shifted, [index], value)
%Tensorex{data: new_store, shape: [dimension + 1]}
end
def insert_at(
%Tensorex{data: store, shape: shape},
index,
axis,
%Tensorex{data: sub_store, shape: sub_shape}
)
when is_integer(index) and index >= 0 and
is_integer(axis) and axis >= 0 and
length(shape) > 1 and axis < length(shape) do
unless (expected_shape = List.delete_at(shape, axis)) == sub_shape do
raise "expected the shape of the inserted tensor to be " <>
"#{inspect(expected_shape)}, got: #{inspect(sub_shape)}"
end
keys = Map.keys(store) |> Enum.filter(&(Enum.at(&1, axis) >= index))
{tail, head} = Map.split(store, keys)
new_store =
Enum.into(tail, head, fn {i, v} -> {List.update_at(i, axis, &(&1 + 1)), v} end)
|> Map.merge(
Enum.into(sub_store, %{}, fn {i, v} -> {List.insert_at(i, axis, index), v} end)
)
{dimension, ^sub_shape} = List.pop_at(shape, axis)
%Tensorex{data: new_store, shape: List.insert_at(sub_shape, axis, dimension + 1)}
end
@doc """
Removes elements at given index and closes up indices.
iex> Tensorex.delete_at(Tensorex.from_list([[ 1, 2, 3],
...> [ 4, 5, 6],
...> [ 7, 8, 9],
...> [10, 11, 12]]), 2, 0)
%Tensorex{data: %{[0, 0] => 1, [0, 1] => 2, [0, 2] => 3,
[1, 0] => 4, [1, 1] => 5, [1, 2] => 6,
[2, 0] => 10, [2, 1] => 11, [2, 2] => 12}, shape: [3, 3]}
iex> Tensorex.delete_at(Tensorex.from_list([[ 1, 2, 3],
...> [ 4, 5, 6],
...> [ 7, 8, 9],
...> [10, 11, 12]]), 1, 1)
%Tensorex{data: %{[0, 0] => 1, [0, 1] => 3,
[1, 0] => 4, [1, 1] => 6,
[2, 0] => 7, [2, 1] => 9,
[3, 0] => 10, [3, 1] => 12}, shape: [4, 2]}
iex> Tensorex.delete_at(Tensorex.from_list([1, 2, 3, 4, 5, 6]), 2, 0)
%Tensorex{data: %{[0] => 1, [1] => 2, [2] => 4, [3] => 5, [4] => 6}, shape: [5]}
"""
@spec delete_at(t, non_neg_integer, non_neg_integer) :: t
def delete_at(%Tensorex{data: store, shape: shape}, index, axis)
when is_integer(index) and index >= 0 and
is_integer(axis) and axis >= 0 and axis < length(shape) do
%{shift: shift_keys, drop: drop_keys} =
Map.keys(store)
|> Enum.group_by(fn key ->
case Enum.at(key, axis) do
^index -> :drop
i when i < index -> :leave
_ -> :shift
end
end)
{tail, head} = Map.drop(store, drop_keys) |> Map.split(shift_keys)
new_store = Enum.into(tail, head, fn {i, v} -> {List.update_at(i, axis, &(&1 - 1)), v} end)
%Tensorex{data: new_store, shape: List.update_at(shape, axis, &(&1 - 1))}
end
@doc """
Concats two tensors.
iex> Tensorex.concat(
...> Tensorex.from_list([[ 1, 2, 3],
...> [ 4, 5, 6]]),
...> Tensorex.from_list([[-1, -2, -3],
...> [-4, -5, -6],
...> [-7, -8, -9]]),
...> [0]
...> )
%Tensorex{data: %{[0, 0] => 1, [0, 1] => 2, [0, 2] => 3,
[1, 0] => 4, [1, 1] => 5, [1, 2] => 6,
[2, 0] => -1, [2, 1] => -2, [2, 2] => -3,
[3, 0] => -4, [3, 1] => -5, [3, 2] => -6,
[4, 0] => -7, [4, 1] => -8, [4, 2] => -9}, shape: [5, 3]}
iex> Tensorex.concat(
...> Tensorex.from_list([[ 1, 2, 3],
...> [ 4, 5, 6]]),
...> Tensorex.from_list([[-1, -2, -3],
...> [-4, -5, -6],
...> [-7, -8, -9]]),
...> [0, 1]
...> )
%Tensorex{data: %{[0, 0] => 1, [0, 1] => 2, [0, 2] => 3,
[1, 0] => 4, [1, 1] => 5, [1, 2] => 6,
[2, 3] => -1, [2, 4] => -2, [2, 5] => -3,
[3, 3] => -4, [3, 4] => -5, [3, 5] => -6,
[4, 3] => -7, [4, 4] => -8, [4, 5] => -9}, shape: [5, 6]}
"""
@spec concat(t, t, Enum.t()) :: t
def concat(%Tensorex{data: store1, shape: shape1}, %Tensorex{data: store2, shape: shape2}, axes)
when length(shape1) === length(shape2) do
{shape, diff} =
Enum.reduce(axes, shape2, fn axis, acc -> List.update_at(acc, axis, &(-&1)) end)
|> Stream.zip(shape1)
|> Stream.map(fn
{dimension2, dimension1} when dimension2 < 0 -> {dimension1 - dimension2, dimension1}
{dimension, dimension} -> {dimension, nil}
end)
|> Enum.unzip()
store =
Enum.into(store2, store1, fn {index, value} ->
new_index =
Enum.map(Stream.zip(index, diff), fn
{i, nil} -> i
{i, a} -> i + a
end)
{new_index, value}
end)
%Tensorex{data: store, shape: shape}
end
end
|
lib/tensorex.ex
| 0.842734 | 0.732329 |
tensorex.ex
|
starcoder
|
defprotocol ShEx.TripleExpression do
@moduledoc !"""
Triple expressions are used for defining patterns composed of triple constraints.
"""
@doc """
Asserts that a triple expression is matched by a set of triples that come from the neighbourhood of a node in an RDF graph.
"""
def matches(triple_expression, triples, graph, schema, association, state)
def min_cardinality(triple_expression)
def max_cardinality(triple_expression)
def predicates(triple_expression, state)
def triple_constraints(triple_expression, state)
def required_arcs(triple_expression, state)
end
defmodule ShEx.TripleExpression.Shared do
@moduledoc false
def min_cardinality(triple_expression), do: triple_expression.min || 1
def max_cardinality(triple_expression), do: triple_expression.max || 1
def check_cardinality(count, min, triple_expression, violations) when count < min do
%ShEx.Violation.MinCardinality{
triple_expression: triple_expression,
triple_expression_violations: violations,
cardinality: count
}
end
def check_cardinality(_, _, _, _), do: :ok
def triple_constraints_of_group(group, state) do
group.expressions
|> Enum.flat_map(&ShEx.TripleExpression.triple_constraints(&1, state))
|> MapSet.new()
|> MapSet.to_list()
end
def predicates_of_group(group, state) do
group.expressions
|> Enum.flat_map(&ShEx.TripleExpression.predicates(&1, state))
|> MapSet.new()
end
def required_arcs_of_group(group, state) do
Enum.reduce_while(group.expressions, nil, fn expression, arcs_type ->
expression
|> ShEx.TripleExpression.required_arcs(state)
|> case do
{:ok, first_arcs_type} when is_nil(first_arcs_type) ->
{:cont, {:ok, {arcs_type}}}
{:ok, ^arcs_type} ->
{:cont, {:ok, {arcs_type}}}
{:ok, _} ->
{:halt, {:ok, {:arcs_in, :arcs_out}}}
{:error, _} = error ->
{:halt, error}
end
end)
end
end
|
lib/shex/shape_expressions/triple_expression.ex
| 0.869742 | 0.604778 |
triple_expression.ex
|
starcoder
|
defmodule Muster.Game.Grid do
alias Muster.Game
alias Muster.Game.Tile
@type t :: [Tile.t()]
@grid_size 6
@spec new() :: t()
def new() do
[]
end
@spec put_tile_in_random_space(t(), Tile.value()) :: t()
def put_tile_in_random_space(tiles, value) do
{row, column} = tiles |> spaces |> Enum.random()
tile = %Tile{row: row, column: column, value: value}
sort([tile | tiles])
end
@spec put_ids(t(), next_id :: Tile.id()) :: {t(), next_id :: Tile.id()}
def put_ids(tiles, next_id) do
Enum.map_reduce(tiles, next_id, fn tile, next_id ->
if tile.id do
{tile, next_id}
else
{%{tile | id: next_id}, next_id + 1}
end
end)
end
defp spaces(tiles) do
indices = 0..(@grid_size - 1)
positions =
Enum.flat_map(indices, fn row ->
Enum.map(indices, fn column ->
{row, column}
end)
end)
tile_positions = Enum.map(tiles, fn tile -> {tile.row, tile.column} end)
positions -- tile_positions
end
defp sort(tiles) do
Enum.sort(tiles, Tile)
end
@spec move_tiles(t(), Game.direction()) :: t()
def move_tiles(tiles, :left) do
tiles
|> rows()
|> Enum.map(&move_tiles_in_row/1)
|> List.flatten()
end
def move_tiles(tiles, :right) do
tiles
|> reverse_columns()
|> move_tiles(:left)
|> reverse_columns()
end
def move_tiles(tiles, :up) do
tiles
|> transpose()
|> move_tiles(:left)
|> transpose()
end
def move_tiles(tiles, :down) do
tiles
|> transpose()
|> reverse_columns()
|> move_tiles(:left)
|> reverse_columns()
|> transpose()
end
defp rows(tiles) do
Enum.map(0..(@grid_size - 1), fn row_index ->
Enum.filter(tiles, fn tile -> tile.row == row_index end)
end)
end
@spec move_tiles_in_row([Tile.t()], Tile.index()) :: [Tile.t()]
def move_tiles_in_row(row, current_column \\ 0) do
case row do
[] ->
[]
[%{value: value} = tile | [%{value: value} | rest]] ->
[
%Tile{row: tile.row, column: current_column, value: value * 2}
| move_tiles_in_row(rest, current_column + 1)
]
[tile | rest] ->
[%{tile | column: current_column} | move_tiles_in_row(rest, current_column + 1)]
end
end
defp reverse_columns(tiles) do
tiles
|> Enum.map(&Tile.reverse_column(&1, @grid_size - 1))
|> sort()
end
defp transpose(tiles) do
tiles
|> Enum.map(&Tile.transpose/1)
|> sort()
end
@spec tile_present?(t(), Tile.value()) :: boolean()
def tile_present?(tiles, value) do
Enum.any?(tiles, fn tile -> tile.value == value end)
end
@spec count_spaces(t()) :: integer()
def count_spaces(tiles) do
tiles
|> spaces
|> length
end
end
|
apps/muster/lib/muster/game/grid.ex
| 0.775647 | 0.551513 |
grid.ex
|
starcoder
|
defmodule IBU.Code do
@spec parse(binary) :: {atom, map}
def parse(
<<66, 84, season::binary-size(4), 83, level::binary-size(5), 95, 95,
gender::binary-size(2), event::binary-size(2)>>
) do
{:ok, :cup,
%{
season: String.to_integer(season),
gender: parse_gender(gender),
event: parse_event_code(event),
level: parse_level(level)
}}
end
def parse(<<66, 84, country_code::binary-size(3), gender::binary-size(1)>>)
when gender in ["1", "2", "9"] do
{:ok, :team, %{country_code: country_code, gender: parse_gender(gender)}}
end
def parse(
<<66, 84, country_code::binary-size(3), gender::binary-size(1), birth_day::binary-size(2),
birth_month::binary-size(2), birth_year::binary-size(4), number::binary-size(2)>>
)
when gender in ["1", "2"] do
{:ok, :individual,
%{
country_code: country_code,
gender: parse_gender(gender),
birth_day: String.to_integer(birth_day),
birth_month: String.to_integer(birth_month),
birth_year: String.to_integer(birth_year),
number: String.to_integer(number)
}}
end
def parse(
<<66, 84, season::binary-size(4), 83, level::binary-size(5), race_number::binary-size(2),
gender::binary-size(2), event::binary-size(2)>>
)
when gender in ["SM", "SW", "MX"] and level in ["WRLCP", "WRLCH", "WRLOG"] do
{:ok, :race,
%{
season: String.to_integer(season),
level: parse_level(level),
race_number: parse_race_number(race_number),
gender: parse_gender(gender),
event: parse_event_code(event)
}}
end
def parse(
<<66, 84, season::binary-size(4), 83, level::binary-size(5),
competition_number::binary-size(2)>>
)
when level in ["WRLCP", "WRLCH", "WRLOG"] do
{:ok, :competition,
%{
season: String.to_integer(season),
level: parse_level(level),
competition_number: parse_race_number(competition_number)
}}
end
def parse(<<country_code::binary-size(3)>>) do
{:ok, :nation, %{country_code: country_code, gender: :mixed}}
end
def parse("MKD_FYROM") do
{:ok, :nation, %{country_code: "MKD", gender: :mixed}}
end
def parse(str), do: {:error, :unknown_format, str}
@spec parse_gender(binary) :: atom
defp parse_gender("SM"), do: :male
defp parse_gender("SW"), do: :female
defp parse_gender("MX"), do: :mixed
defp parse_gender("1"), do: :male
defp parse_gender("2"), do: :female
defp parse_gender("9"), do: :mixed
@spec parse_level(binary) :: atom
defp parse_level("WRLCH"), do: :world_championship
defp parse_level("WRLCP"), do: :world_cup
defp parse_level("WRLOG"), do: :olympics
@spec parse_event_code(binary) :: atom
defp parse_event_code("MS"), do: :mass_start
defp parse_event_code("RL"), do: :relay
defp parse_event_code("PU"), do: :pursuit
defp parse_event_code("SR"), do: :single_relay
defp parse_event_code("SP"), do: :sprint
defp parse_event_code("IN"), do: :individual
defp parse_event_code("NC"), do: :nations
defp parse_event_code("TS"), do: :overall
@spec parse_race_number(binary) :: atom
defp parse_race_number("__"), do: nil
defp parse_race_number(string), do: String.to_integer(string)
end
|
lib/ibu/code.ex
| 0.583203 | 0.580114 |
code.ex
|
starcoder
|
defmodule AOC.Day3 do
@moduledoc """
Solution to Day 3 of the Advent of code 2021
https://adventofcode.com/2021/day/3
"""
@doc """
Read the input file
Return the result as an array of arrays
[[0,1,0,1], [1,1,1,1]]
"""
@spec get_inputs(File) :: [String.t()]
def get_inputs(f \\ "lib/inputs/day3.txt") do
input =
File.read!(f)
|> String.trim()
|> String.split("\n")
input
|> Enum.map(&(String.codepoints(&1) |> Enum.map(fn s -> String.to_integer(s) end)))
end
@doc """
Count the sum of bits in each position
"""
def count_bits(arr \\ get_inputs()) do
Enum.reduce(arr, fn b, counts ->
Enum.with_index(counts) |> Enum.map(fn {v, i} -> v + Enum.at(b, i) end)
end)
end
@doc """
Find the most common bit given the count and number of entries.
Expects output of count_bits as the first arg and length of inputs as second arg.
Since only 0s and 1s are allowed, if the sum of bits in a position are greater than half
the count, most common bit is 1, else 0
"""
def most_common_bit(arr, c) do
arr |> Enum.map(&if &1 >= c / 2, do: 1, else: 0)
end
@doc """
Find the least common bit given the count and number of entries.
Expects output of count_bits as the first arg and length of inputs as second arg.
Since only 0s and 1s are allowed, if the sum of bits in a position are greater than half
the count, least common bit is 1, else 0
"""
def least_common_bit(arr, c) do
arr |> Enum.map(&if &1 < c / 2, do: 1, else: 0)
end
@doc """
Convert an array like [1,0,0,1] into an integer 9
"""
def bit_arr_to_int(arr) do
{i, _s} = arr |> Enum.join() |> Integer.parse(2)
i
end
@doc """
Use the binary numbers in your diagnostic report to calculate the gamma rate
and epsilon rate, then multiply them together. What is the power consumption of the submarine?
"""
def part1(f \\ "lib/inputs/day3.txt") do
counts =
f
|> get_inputs()
cnt = length(counts)
counts = count_bits(counts)
bit_arr_to_int(most_common_bit(counts, cnt)) * bit_arr_to_int(least_common_bit(counts, cnt))
end
@doc """
Given an array of bit inputs (as returned by get_inputs/1) and a criteria function to determine
which elements to select, return the winning element from the inputs.
crfn can be either most_common_bit or least_common_bit
"""
def rating(_a, _b, _c)
def rating([], _crfn, _i), do: :fail
def rating([a], _crfn, _i) do
a
end
def rating(arr, crfn, i) do
criteria = arr |> count_bits |> crfn.(length(arr))
rating(
arr
|> Enum.filter(&(Enum.at(&1, i) == Enum.at(criteria, i))),
crfn,
i + 1
)
end
@doc """
To find oxygen generator rating, determine the most common value (0 or 1) in the current bit position,
and keep only numbers with that bit in that position. If 0 and 1 are equally common,
keep values with a 1 in the position being considered.
"""
def o2_rating(arr) do
rating(arr, &most_common_bit/2, 0)
end
@doc """
To find CO2 scrubber rating, determine the least common value (0 or 1) in the current bit position,
and keep only numbers with that bit in that position. If 0 and 1 are equally common,
keep values with a 0 in the position being considered.
"""
def co2_rating(arr) do
rating(arr, &least_common_bit/2, 0)
end
@doc """
Use the binary numbers in your diagnostic report to calculate the oxygen generator rating and CO2 scrubber rating,
then multiply them together. What is the life support rating of the submarine?
"""
def part2(f \\ "lib/inputs/day3.txt") do
inputs = get_inputs(f)
bit_arr_to_int(co2_rating(inputs)) * bit_arr_to_int(o2_rating(inputs))
end
end
|
elixir/advent_of_code/lib/2021/day3.ex
| 0.793826 | 0.772574 |
day3.ex
|
starcoder
|
defmodule Cforum.Cites do
@moduledoc """
The Cites context.
"""
import Ecto.Query, warn: false
alias Cforum.Repo
alias Cforum.Cites.Cite
alias Cforum.Cites.Vote
alias Cforum.System
@doc """
Returns the list of cites.
## Examples
iex> list_cites()
[%Cite{}, ...]
"""
def list_cites(archived, query_params \\ [order: nil, limit: nil, search: nil]) do
from(
cite in Cite,
where: cite.archived == ^archived,
preload: [:votes, :user, :creator_user, message: [:forum, :thread]]
)
|> Cforum.PagingApi.set_limit(query_params[:limit])
|> Cforum.OrderApi.set_ordering(query_params[:order], desc: :cite_id)
|> Repo.all()
end
@doc """
Counts all archived (with `archived=true`) or unarchived (with
`archived=false`) cites.
## Examples
iex> count_cites()
0
"""
@spec count_cites(boolean()) :: integer()
def count_cites(archived \\ true) do
from(
cite in Cite,
select: count("*"),
where: cite.archived == ^archived
)
|> Repo.one!()
end
@doc """
Counts votable cites the user has not voted for, yet.
## Examples
iex> count_undecided_cites(%User{})
0
"""
@spec count_undecided_cites(%Cforum.Users.User{}) :: integer()
def count_undecided_cites(user) do
from(
cite in Cite,
where:
cite.archived == false and
fragment(
"NOT EXISTS (SELECT cite_id FROM cites_votes WHERE cite_id = ? AND user_id = ?)",
cite.cite_id,
^user.user_id
),
select: count("*")
)
|> Repo.one!()
end
@doc """
Lists the cites which are ready to be archived.
## Example
iex> list_cites_to_archive(2)
[%Cite{}]
"""
@spec list_cites_to_archive(integer()) :: [%Cite{}]
def list_cites_to_archive(min_age) do
from(
cite in Cite,
where: cite.archived == false and datetime_add(cite.created_at, ^min_age, "week") < ^NaiveDateTime.utc_now(),
preload: [:votes]
)
|> Repo.all()
end
@doc """
Gets a single cite.
Raises `Ecto.NoResultsError` if the Cite does not exist.
## Examples
iex> get_cite!(123)
%Cite{}
iex> get_cite!(456)
** (Ecto.NoResultsError)
"""
def get_cite!(id) do
Cite
|> Repo.get!(id)
|> Repo.preload([:votes, :user, :creator_user, message: [:forum, :thread]])
end
@doc """
Creates a cite.
## Examples
iex> create_cite(%{field: value})
{:ok, %Cite{}}
iex> create_cite(%{field: bad_value})
{:error, %Ecto.Changeset{}}
"""
def create_cite(current_user, attrs) do
System.audited("create", current_user, fn ->
%Cite{}
|> Cite.changeset(attrs, current_user)
|> Repo.insert()
end)
|> maybe_index_cite()
|> maybe_upvote_cite(current_user)
end
defp maybe_upvote_cite(val, nil), do: val
defp maybe_upvote_cite({:ok, cite}, user) do
vote(cite, user, :up)
{:ok, cite}
end
defp maybe_upvote_cite(val, _), do: val
@doc """
Updates a cite.
## Examples
iex> update_cite(cite, %{field: new_value})
{:ok, %Cite{}}
iex> update_cite(cite, %{field: bad_value})
{:error, %Ecto.Changeset{}}
"""
def update_cite(current_user, %Cite{} = cite, attrs) do
System.audited("update", current_user, fn ->
cite
|> Cite.changeset(attrs)
|> Repo.update()
end)
|> maybe_index_cite()
end
@doc """
Archives a cite (sets `archived` to `true`)
## Examples
iex> archive_cite(%Cite{})
{:ok, %Cite{}}
"""
@spec archive_cite(%Cite{}) :: {:ok, %Cite{}} | {:error, any()}
def archive_cite(%Cite{} = cite) do
System.audited("archive", nil, fn ->
cite
|> Ecto.Changeset.change(%{archived: true})
|> Repo.update()
end)
|> maybe_index_cite()
end
defp maybe_index_cite({:ok, cite}) do
Cforum.Jobs.CiteIndexerJob.enqueue(cite)
{:ok, cite}
end
defp maybe_index_cite(val), do: val
@doc """
Deletes a Cite.
## Examples
iex> delete_cite(cite)
{:ok, %Cite{}}
iex> delete_cite(cite)
{:error, %Ecto.Changeset{}}
"""
@spec delete_cite(%Cforum.Users.User{} | nil, %Cite{}) :: {:ok, %Cite{}} | {:error, any()}
def delete_cite(current_user, %Cite{} = cite) do
System.audited("destroy", current_user, fn ->
Repo.delete(cite)
end)
|> maybe_unindex_cite()
end
def archive_delete_cite(%Cite{} = cite) do
System.audited("archive-del", nil, fn -> Repo.delete(cite) end)
|> maybe_unindex_cite()
end
defp maybe_unindex_cite({:ok, cite}) do
Cforum.Jobs.CiteUnindexerJob.enqueue(cite)
{:ok, cite}
end
defp maybe_unindex_cite(val), do: val
@doc """
Returns an `%Ecto.Changeset{}` for tracking cite changes.
## Examples
iex> change_cite(cite)
%Ecto.Changeset{source: %Cite{}}
"""
def change_cite(%Cite{} = cite, attrs \\ %{}) do
Cite.changeset(cite, attrs)
end
@doc """
Calculates the score of a cite (as in upvotes - downvotes)
## Examples
iex> score(%Cite{})
1
"""
@spec score(%Cite{}) :: integer()
def score(cite) do
Enum.reduce(cite.votes, 0, fn
%Vote{vote_type: 0}, acc -> acc - 1
%Vote{vote_type: 1}, acc -> acc + 1
end)
end
@doc """
Counts the number of votes for a cite
## Examples
iex> no_votes(%Cite{})
0
"""
@spec no_votes(%Cite{}) :: non_neg_integer()
def no_votes(cite), do: length(cite.votes)
@doc """
Generates a score string for a cite
## Examples
iex> score_str(%Cite{})
"+1"
"""
@spec score_str(%Cite{}) :: String.t()
def score_str(cite), do: Cforum.Helpers.score_str(no_votes(cite), score(cite))
@doc """
Return true if the `user` has voted for `cite`
## Examples
iex> voted?(%Cite{}, %User{})
true
"""
@spec voted?(%Cite{}, %Cforum.Users.User{}) :: boolean()
def voted?(cite, user) when not is_nil(user),
do: Enum.find(cite.votes, fn vote -> vote.user_id == user.user_id end) != nil
@doc """
Return true if the `user` has voted for `cite` with vote `type` `:up` or `:down`
## Examples
iex> voted?(%Cite{}, %User{}, :up)
true
"""
@spec voted?(%Cite{}, %Cforum.Users.User{}, :up | :down | String.t()) :: boolean()
def voted?(cite, user, type) when not is_nil(user) and type in [:up, :down],
do: Enum.find(cite.votes, fn vote -> vote.user_id == user.user_id && vote.vote_type == Vote.vtype(type) end) != nil
def voted?(cite, user, "up"), do: voted?(cite, user, :up)
def voted?(cite, user, "down"), do: voted?(cite, user, :down)
def voted?(_, _, _), do: false
@doc """
Return true if the `user` has downvoted `cite`
## Examples
iex> downvoted?(%Cite{}, %User{})
false
"""
@spec downvoted?(%Cite{}, %Cforum.Users.User{} | nil) :: boolean()
def downvoted?(cite, user) when not is_nil(user), do: voted?(cite, user, :down)
def downvoted?(_, _), do: false
@doc """
Return true if the `user` has upvoted `cite`
## Examples
iex> upvoted?(%Cite{}, %User{})
true
"""
@spec upvoted?(%Cite{}, %Cforum.Users.User{} | nil) :: boolean()
def upvoted?(cite, user) when not is_nil(user), do: voted?(cite, user, :up)
def upvoted?(_, _), do: false
@doc """
Take back a vote of a `user` for a `cite`
## Examples
iex> take_back_vote(%Cite{}, %User{})
%Vote{}
"""
@spec take_back_vote(%Cite{}, %Cforum.Users.User{}) :: nil | %Cite{}
def take_back_vote(cite, user) do
v = Enum.find(cite.votes, fn vote -> vote.user_id == user.user_id end)
if v, do: Repo.delete(v)
v
end
@doc """
Vote as `user` for a `cite` with the type `type`
## Examples
iex> vote(%Cite{}, %User{}, "up")
{:ok, %Vote{}}
"""
@spec vote(%Cite{}, %Cforum.Users.User{}, :up | :down | String.t()) :: {:ok, %Cite{}} | {:error, %Ecto.Changeset{}}
def vote(cite, user, type) when type in [:up, :down, "up", "down"] do
%Vote{}
|> Vote.changeset(%{cite_id: cite.cite_id, user_id: user.user_id, vote_type: Vote.vtype(type)})
|> Repo.insert()
end
@doc """
Creates a `%Cite{}` struct from the map `object`
"""
@spec cite_from_json(map()) :: %Cite{}
def cite_from_json(object) do
%Cforum.Cites.Cite{}
|> Cite.json_changeset(object)
|> Ecto.Changeset.apply_changes()
|> Repo.preload([:user, :creator_user, message: [:thread]])
end
def cites_stats(months, :months) do
from(cite in Cforum.Cites.Cite,
select: {fragment("date_trunc('month', ?) AS created_at", cite.created_at), count("*")},
where: cite.created_at >= ago(^months, "month"),
group_by: fragment("1"),
order_by: fragment("1")
)
|> Repo.all()
|> Enum.map(fn {date, cnt} ->
%{cnt: cnt, date: date}
end)
end
end
|
lib/cforum/cites.ex
| 0.78403 | 0.405625 |
cites.ex
|
starcoder
|
defprotocol Membrane.Payload do
@moduledoc """
This protocol describes actions common to all payload types.
The most basic payload type is simply a binary for which `#{__MODULE__}`
is implemented by the Membrane Core.
"""
defmodule Behaviour do
@moduledoc """
Behaviour that should be implemented by every module that has
`Membrane.Payload` protocol implementation.
"""
@doc """
Creates an empty payload
"""
@callback empty() :: Membrane.Payload.t()
@doc """
Creates a new payload initialized with the given binary
"""
@callback new(binary()) :: Membrane.Payload.t()
end
@type t :: any()
@doc """
Returns total size of payload in bytes
"""
@spec size(payload :: t()) :: non_neg_integer()
def size(payload)
@doc """
Splits the payload at given position (1st part has the size equal to `at_pos` argument)
`at_pos` has to be greater than 0 and smaller than the size of payload, otherwise
an error is raised. This guarantees returned payloads are never empty.
"""
@spec split_at(payload :: t(), at_pos :: pos_integer()) :: {t(), t()}
def split_at(payload, at_pos)
@doc """
Concatenates the contents of two payloads.
"""
@spec concat(left :: t(), right :: t()) :: t()
def concat(left, right)
@doc """
Drops first `n` bytes of payload.
"""
@spec drop(payload :: t(), n :: non_neg_integer()) :: t()
def drop(payload, n)
@doc """
Converts payload into binary
"""
@spec to_binary(t()) :: binary()
def to_binary(payload)
@doc """
Returns a module responsible for this type of payload
and implementing `Membrane.Payload.Behaviour`
"""
@spec module(t()) :: module()
def module(payload)
end
defmodule Membrane.Payload.Binary do
@moduledoc """
`Membrane.Payload.Behaviour` implementation for binary payload.
Complements `Membrane.Payload` protocol implementation.
"""
@behaviour Membrane.Payload.Behaviour
@impl true
def empty(), do: <<>>
@impl true
def new(data) when is_binary(data) do
data
end
end
defimpl Membrane.Payload, for: BitString do
alias Membrane.Payload
@compile {:inline, module: 1}
@impl true
@spec size(payload :: binary()) :: pos_integer
def size(payload) when is_binary(payload) do
payload |> byte_size()
end
@impl true
@spec split_at(binary(), pos_integer) :: {binary(), binary()}
def split_at(payload, at_pos)
when is_binary(payload) and 0 < at_pos and at_pos < byte_size(payload) do
<<part1::binary-size(at_pos), part2::binary>> = payload
{part1, part2}
end
@impl true
@spec concat(left :: binary(), right :: binary()) :: binary()
def concat(left, right) when is_binary(left) and is_binary(right) do
left <> right
end
@impl true
@spec drop(payload :: binary(), bytes :: non_neg_integer()) :: binary()
def drop(payload, bytes) when is_binary(payload) do
<<_dropped::binary-size(bytes), rest::binary>> = payload
rest
end
@impl true
@spec to_binary(binary()) :: binary()
def to_binary(payload) when is_binary(payload) do
payload
end
@impl true
@spec module(binary()) :: module()
def module(_payload), do: Payload.Binary
end
|
lib/membrane/payload.ex
| 0.928813 | 0.533701 |
payload.ex
|
starcoder
|
defmodule ChallengeGov.LoginGov do
@moduledoc """
Helper functions to sign in with LoginGov
"""
use HTTPoison.Base
alias ChallengeGov.LoginGov.Token
def get_well_known_configuration(idp_authorize_url) do
idp_authorize_url
|> uri_join("/.well-known/openid-configuration")
|> get()
|> handle_response("Sorry, could not fetch well known configuration")
end
def get_public_key(jwks_uri) do
jwks_uri
|> get()
|> handle_response("Sorry, could not fetch public key")
|> case do
{:ok, body} -> {:ok, body |> Map.fetch!("keys") |> List.first()}
foo -> foo
end
end
def exchange_code_for_token(code, token_endpoint, jwt) do
body = %{
grant_type: "authorization_code",
code: code,
client_assertion_type: "urn:ietf:params:oauth:client-assertion-type:jwt-bearer",
client_assertion: jwt
}
token_endpoint
|> post(body, [{"Content-Type", "application/json"}])
|> handle_response("Sorry, could not exchange code")
end
def get_user_info(userinfo_endpoint, access_token) do
userinfo_endpoint
|> get([{"Authorization", "Bearer " <> access_token}])
|> handle_response("Sorry, could not fetch userinfo")
end
def build_authorization_url(client_id, acr_values, redirect_uri, idp_authorize_url) do
query = [
client_id: client_id,
response_type: "code",
acr_values: acr_values,
scope: "openid email profile:name",
redirect_uri: uri_join(redirect_uri, "/auth/result"),
state: random_value(),
nonce: random_value(),
prompt: "select_account"
]
idp_authorize_url <> "?" <> URI.encode_query(query)
end
def build_client_assertion(client_id, token_endpoint, private_key) do
claims = %{
iss: client_id,
sub: client_id,
aud: token_endpoint,
jti: random_value(),
nonce: random_value(),
exp: DateTime.to_unix(DateTime.utc_now()) + 1000
}
Token.generate_and_sign!(claims, Token.signer(private_key))
end
def load_private_key(nil, private_key_path) do
JOSE.JWK.from_pem_file(private_key_path)
end
def load_private_key(password, private_key_path) do
JOSE.JWK.from_pem_file(password, private_key_path)
end
def decode_jwt(id_token, public_key) do
Token.verify(id_token, Token.signer(public_key))
end
def build_logout_uri(id_token, end_session_endpoint, redirect_uri) do
end_session_endpoint <>
"?" <>
URI.encode_query(
id_token_hint: id_token,
post_logout_redirect_uri: redirect_uri,
state: random_value()
)
end
defp handle_response(response, msg) do
case response do
{:ok, %HTTPoison.Response{status_code: 200, body: body}} ->
{:ok, body}
{:ok, %HTTPoison.Response{status_code: status_code}} ->
{:error, "#{msg}: #{status_code}"}
{:error, %HTTPoison.Error{reason: reason}} ->
{:error, "#{msg}: #{reason}"}
end
end
defp random_value do
:crypto.strong_rand_bytes(16)
|> Base.encode16(case: :lower)
end
defp uri_join(uri, path) do
uri
|> URI.merge(path)
|> URI.to_string()
end
def process_request_body(body) do
Poison.encode!(body)
end
def process_response_body(body) do
Poison.decode!(body)
end
end
|
lib/challenge_gov/login_gov.ex
| 0.602296 | 0.407127 |
login_gov.ex
|
starcoder
|
defmodule ShopifyAPI.Plugs.AdminAuthenticator do
@moduledoc """
The ShopifyAPI.Plugs.AdminAuthenticator plug allows for easy admin authentication. The plug when included
in your route will verify Shopify signatures, that are added to the iframe call on admin page load, and
set a session cookie for the duration of the session.
The plug will assign the Shop, App and AuthToken to the Conn for easy access in your admin controller.
Make sure to include the App name in the path, in our example it is included directly in the path `"/shop-admin/:app"`.
## Example Usage
```elixir
# Router
pipeline :shop_admin do
plug ShopifyAPI.Plugs.AdminAuthenticator
end
scope "/shop-admin/:app", YourAppWeb do
pipe_through :browser
pipe_through :shop_admin
get "/", SomeAdminController, :index
end
```
"""
alias Plug.Conn
require Logger
@shopify_shop_header "x-shopify-shop-domain"
@session_key :shopify_api_admin_authenticated
@defaults [shopify_mount_path: "/shop"]
def init(opts), do: Keyword.merge(opts, @defaults)
def call(conn, options) do
if Conn.get_session(conn, @session_key) do
rehydrate_session(conn)
else
do_authentication(conn, options)
end
end
defp rehydrate_session(conn) do
with app_name <- Conn.get_session(conn, :app_name),
myshopify_domain <- Conn.get_session(conn, :shop_domain),
{:ok, app} <- ShopifyAPI.AppServer.get(app_name),
{:ok, shop} <- ShopifyAPI.ShopServer.get(myshopify_domain),
{:ok, auth_token} <- ShopifyAPI.AuthTokenServer.get(myshopify_domain, app_name) do
conn
|> assign_app(app)
|> assign_shop(shop)
|> assign_auth_token(auth_token)
else
error ->
Logger.debug("Failed to rehydrate session #{inspect(error)}")
conn
|> Conn.delete_session(@session_key)
|> Conn.resp(401, "Not Authorized.")
|> Conn.halt()
end
end
defp do_authentication(conn, options) do
with app_name <- conn.params["app"] || List.last(conn.path_info),
{:ok, app} <- ShopifyAPI.AppServer.get(app_name),
true <- valid_hmac?(app, conn.query_params),
myshopify_domain <- shop_domain_from_conn(conn),
{:ok, shop} <- ShopifyAPI.ShopServer.get(myshopify_domain),
{:ok, auth_token} <- ShopifyAPI.AuthTokenServer.get(myshopify_domain, app_name) do
# store the App and Shop name in the session for use on other page views
conn
|> assign_app(app)
|> assign_shop(shop)
|> assign_auth_token(auth_token)
|> Conn.put_session(:app_name, app_name)
|> Conn.put_session(:shop_domain, myshopify_domain)
|> Conn.put_session(@session_key, true)
else
false ->
Logger.info("#{__MODULE__} failed hmac validation")
conn
|> Conn.delete_session(@session_key)
|> Conn.resp(401, "Not Authorized.")
|> Conn.halt()
_ ->
install_url =
options[:shopify_mount_path] <>
"/install?app=up_sell&shop=" <> shop_domain_from_conn(conn)
conn
|> Conn.resp(:found, "")
|> Conn.put_resp_header("location", install_url)
|> Conn.halt()
end
end
defp assign_app(conn, app), do: Conn.assign(conn, :app, app)
defp assign_shop(conn, shop), do: Conn.assign(conn, :shop, shop)
defp assign_auth_token(conn, auth_token), do: Conn.assign(conn, :auth_token, auth_token)
defp shop_domain_from_conn(conn), do: shop_domain_from_header(conn) || conn.params["shop"]
defp shop_domain_from_header(conn),
do: conn |> Conn.get_req_header(@shopify_shop_header) |> List.first()
defp valid_hmac?(%ShopifyAPI.App{client_secret: secret}, params) do
params["hmac"] ==
params
|> Enum.reject(fn {key, _} -> key == "hmac" or key == "signature" end)
|> Enum.sort_by(&elem(&1, 0))
|> Enum.map_join("&", fn {key, value} -> key <> "=" <> value end)
|> ShopifyAPI.Security.base16_sha256_hmac(secret)
end
end
|
lib/shopify_api/plugs/admin_authenticator.ex
| 0.739705 | 0.493164 |
admin_authenticator.ex
|
starcoder
|
defmodule Liblink.Socket.Recvmsg.Impl do
use Liblink.Logger
alias Liblink.Socket.Device
alias Liblink.Socket.Recvmsg.Fsm
@moduledoc false
@opaque state_t :: map()
@type call_mode :: :sync | :async
@type consumer_t :: {atom, atom, list} | {atom, atom} | atom | pid | (iodata -> term)
@spec init() :: {:ok, state_t}
def init() do
{:ok, %{fsm: Fsm.new()}}
end
@spec halt(call_mode, state_t) :: {:reply, :ok, state_t}
def halt(mode, state) do
{fsm, data} = state.fsm
call_fsm(fn -> fsm.halt(data) end, mode, state)
end
@spec attach(Device.t(), call_mode, state_t) ::
{:reply, :ok, state_t} | {:reply, {:error, :badstate}, state_t}
def attach(device, mode, state) do
{fsm, data} = state.fsm
call_fsm(fn -> fsm.attach(device, data) end, mode, state)
end
@spec recvmsg(:sync, state_t) ::
{:reply, {:ok, iodata}, state_t}
| {:reply, {:error, :timeout}, state_t}
| {:reply, {:error, :empty}, state_t}
| {:reply, {:ok, :badstate}, state_t}
def recvmsg(:sync, state) do
{fsm, data} = state.fsm
call_fsm(fn -> fsm.recvmsg(data) end, :sync, state)
end
@spec poll(timeout, pid, :sync, state_t) ::
{:reply, {:ok, reference}, state_t}
| {:reply, {:error, :badstate}, state_t}
def poll(timeout, pid, :sync, state) do
{fsm, data} = state.fsm
with reply = {:reply, {:ok, tag}, _data} when is_reference(tag) <-
call_fsm(fn -> fsm.poll(pid, data) end, :sync, state) do
_ =
unless timeout == :infinity do
Process.send_after(self(), {:halt, :poll, tag}, timeout)
end
reply
end
end
@spec halt_poll(reference, call_mode, state_t) ::
{:reply, :ok, state_t}
| {:reply, {:error, :badstate}, state_t}
| {:noreply, state_t}
def halt_poll(tag, mode, state) do
{fsm, data} = state.fsm
call_fsm(fn -> fsm.halt_poll(tag, data) end, mode, state)
end
@spec consume(consumer_t, call_mode, state_t) ::
{:reply, :ok, state_t}
| {:reply, {:error, :badstate}, state_t}
| {:noreply, state_t}
def consume(consumer, mode, state) do
{fsm, data} = state.fsm
call_fsm(fn -> fsm.consume(consumer, data) end, mode, state)
end
@spec halt_consumer(call_mode, state_t) ::
{:reply, :ok, state_t}
| {:reply, {:error, :badstate}, state_t}
| {:noreply, state_t}
def halt_consumer(mode, state) do
{fsm, data} = state.fsm
call_fsm(fn -> fsm.halt_consumer(data) end, mode, state)
end
@spec on_liblink_message(iodata, :async, state_t) :: {:noreply, state_t}
def on_liblink_message(message, :async, state) do
{fsm, data} = state.fsm
call_fsm(fn -> fsm.on_liblink_message(message, data) end, :async, state)
end
@spec on_monitor_message(
{:DOWN, reference, :process, pid | {atom, atom}, atom},
:async,
state_t
) :: {:noreply, state_t}
def on_monitor_message(message, :async, state) do
{fsm, data} = state.fsm
call_fsm(fn -> fsm.on_monitor_message(message, data) end, :async, state)
end
@spec call_fsm((() -> Fsm.fsm_return()), call_mode, state_t) ::
{:noreply, state_t}
| {:reply, term, state_t}
| {:stop, :normal, state_t}
| {:stop, :normal, term, state_t}
defp call_fsm(event_fn, :async, state) do
case event_fn.() do
{:cont, next_state} ->
{:noreply, %{state | fsm: next_state}}
{:cont, _term, next_state} ->
{:noreply, %{state | fsm: next_state}}
{:halt, next_state} ->
{:stop, :normal, %{state | fsm: next_state}}
{:halt, _term, next_state} ->
{:stop, :normal, %{state | fsm: next_state}}
end
end
defp call_fsm(event_fn, :sync, state) do
case event_fn.() do
{:cont, next_state} ->
{:noreply, nil, %{state | fsm: next_state}}
{:cont, term, next_state} ->
{:reply, term, %{state | fsm: next_state}}
{:halt, next_state} ->
{:stop, :normal, nil, %{state | fsm: next_state}}
{:halt, term, next_state} ->
{:stop, :normal, term, %{state | fsm: next_state}}
end
end
end
|
lib/liblink/socket/recvmsg/impl.ex
| 0.757974 | 0.417628 |
impl.ex
|
starcoder
|
if Code.ensure_loaded?(Plug) do
defmodule PrimaAuth0Ex.Plug.VerifyAndValidateToken do
@moduledoc """
Plug to verify and validate bearer tokens
Usage:
plug PrimaAuth0Ex.Plug.VerifyAndValidateToken, required_permissions: ["some:permission"]
## Options
The following options can be set to customize the behavior of this plug:
* `audience: "my-audience"` sets the expected audience. Defaults to the audience set in `config.exs`.
* `required_permissions: ["p1", "p2"]` sets the set of permissions that clients are required to have.
Clients who do not have **all** the required permissions are forbidden from accessing the API.
Default is `[]`, ie. no permissions required, overridable from `config.exs`.
* `dry_run: false` when true allows clients to access the API even when their token is missing/invalid.
Mostly useful for testing purposes. Default is `false`, overridable from `config.exs`.
* `ignore_signature: false` when true, validates claims found in a token without verifying its signature.
Should only be enabled in dev/test environments, as it allows anyone to forge valid tokens.
Default is `false`, overridable from `config.exs`.
"""
import Plug.Conn
require Logger
def init(opts) do
if !Keyword.has_key?(opts, :required_permissions) do
raise "required_permissions configuration must be set"
end
opts
end
def call(%Plug.Conn{} = conn, opts) do
audience = Keyword.get(opts, :audience, global_audience())
dry_run? = Keyword.get(opts, :dry_run, global_dry_run())
ignore_signature = Keyword.get(opts, :ignore_signature, global_ignore_signature())
required_permissions = Keyword.get(opts, :required_permissions)
missing_auth_header_log_level =
Keyword.get(opts, :missing_auth_header_log_level, global_missing_auth_header_log_level())
if authorized?(conn, audience, required_permissions, ignore_signature, missing_auth_header_log_level),
do: conn,
else: forbidden(conn, dry_run?)
end
defp authorized?(conn, audience, required_permissions, ignore_signature, missing_auth_header_log_level) do
case get_req_header(conn, "authorization") do
[] ->
Logger.log(missing_auth_header_log_level, "Authorization header not found")
false
["Bearer " <> token] ->
valid_token?(token, audience, required_permissions, ignore_signature)
_other ->
Logger.warn("Authorization header malformed")
false
end
end
defp valid_token?(token, audience, required_permissions, ignore_signature) do
case PrimaAuth0Ex.verify_and_validate(token, audience, required_permissions, ignore_signature) do
{:ok, _} ->
true
{:error, error} ->
Logger.warn("Received invalid token",
audience: audience,
required_permissions: required_permissions,
error: inspect(error)
)
false
end
end
defp forbidden(conn, true = _dry_run?), do: conn
defp forbidden(conn, false = _dry_run?) do
conn
|> send_resp(:unauthorized, "Forbidden.")
|> halt()
end
defp global_audience,
do: :prima_auth0_ex |> Application.get_env(:server, []) |> Keyword.get(:audience)
defp global_dry_run,
do: :prima_auth0_ex |> Application.get_env(:server, []) |> Keyword.get(:dry_run, false)
defp global_ignore_signature,
do:
:prima_auth0_ex
|> Application.get_env(:server, [])
|> Keyword.get(:ignore_signature, false)
defp global_missing_auth_header_log_level,
do:
:prima_auth0_ex
|> Application.get_env(:server, [])
|> Keyword.get(:missing_auth_header_log_level, :warn)
end
end
|
lib/prima_auth0_ex/plug/verify_and_validate_token.ex
| 0.667039 | 0.435841 |
verify_and_validate_token.ex
|
starcoder
|
defmodule Tempus.Slots do
@moduledoc """
The ordered collection of slots, backed up by `AVLTree`.
This module implements `Enumerable` and `Collectable` interfaces.
### Examples
iex> slots = [
...> Tempus.Slot.wrap(~D|2020-08-07|),
...> Tempus.Slot.wrap(~D|2020-08-10|),
...> %Tempus.Slot{
...> from: ~U|2020-08-07 01:00:00Z|, to: ~U|2020-08-08 01:00:00Z|}]
...> Enum.into(slots, %Tempus.Slots{})
#Slots<[#Slot<[from: ~U[2020-08-07 00:00:00.000000Z], to: ~U[2020-08-08 01:00:00Z]]>, #Slot<[from: ~U[2020-08-10 00:00:00.000000Z], to: ~U[2020-08-10 23:59:59.999999Z]]>]>
iex> Enum.map(slots, & &1.from)
[~U[2020-08-07 00:00:00.000000Z], ~U[2020-08-10 00:00:00.000000Z], ~U[2020-08-07 01:00:00Z]]
"""
alias Tempus.{Slot, Slots}
use Tempus.Telemetria, action: :import
@empty AVLTree.new(&Slots.less/2)
defstruct slots: []
@typedoc "AVL Tree specialized for `Tempus` slots type"
@type avl_tree :: %AVLTree{
root: nil | Slot.t(),
size: non_neg_integer(),
less: (Slot.t(), Slot.t() -> boolean())
}
@type t :: %Slots{slots: [Slot.t()]}
@spec size(t()) :: integer()
@doc "Returns the number of slots"
def size(%Slots{slots: slots}), do: length(slots)
@spec avl_tree(t()) :: avl_tree()
@doc "Returns the AVL Tree instance of slots"
def avl_tree(%Slots{slots: slots}), do: Enum.into(slots, @empty)
@spec merge(this :: t(), other :: Enumerable.t()) :: t()
@doc """
Merges `other` into `this` slots instance. `other` might be `Enum` _or_ `Stream`.
When `other` is a stream, it gets terminated immediately after the last element
in `this`.
### Examples
iex> slots = [
...> Tempus.Slot.wrap(~D|2020-08-07|),
...> Tempus.Slot.wrap(~D|2020-08-10|)
...> ] |> Enum.into(%Tempus.Slots{})
iex> other = [
...> %Tempus.Slot{from: ~U|2020-08-07 23:00:00Z|, to: ~U|2020-08-08 12:00:00Z|},
...> %Tempus.Slot{from: ~U|2020-08-12 23:00:00Z|, to: ~U|2020-08-12 23:30:00Z|}
...> ]
iex> Tempus.Slots.merge(slots, other)
#Slots<[#Slot<[from: ~U[2020-08-07 00:00:00.000000Z], to: ~U[2020-08-08 12:00:00Z]]>, #Slot<[from: ~U[2020-08-10 00:00:00.000000Z], to: ~U[2020-08-10 23:59:59.999999Z]]>, #Slot<[from: ~U[2020-08-12 23:00:00Z], to: ~U[2020-08-12 23:30:00Z]]>]>
iex> Tempus.Slots.merge(slots, Stream.map(other, & &1))
#Slots<[#Slot<[from: ~U[2020-08-07 00:00:00.000000Z], to: ~U[2020-08-08 12:00:00Z]]>, #Slot<[from: ~U[2020-08-10 00:00:00.000000Z], to: ~U[2020-08-10 23:59:59.999999Z]]>]>
"""
@telemetria level: :info
def merge(%Slots{} = this, %Stream{} = other),
do: do_merge_stream(this, other)
@telemetria level: :info
def merge(%Slots{} = this, other) when is_function(other),
do: do_merge_stream(this, other)
@telemetria level: :info
def merge(%Slots{} = this, %Slot{} = slot),
do: add(this, slot)
@telemetria level: :info
def merge(%Slots{} = this, other) do
if is_nil(Enumerable.impl_for(other)) do
raise Tempus.ArgumentError, expected: Enum, passed: other
end
Enum.reduce(other, this, &add(&2, &1))
end
@spec do_merge_stream(this :: t(), other :: Enumerable.t()) :: t()
defp do_merge_stream(%Slots{slots: []}, other),
do: %Slots{slots: Enum.take(other, 1)}
defp do_merge_stream(%Slots{slots: slots} = this, other) do
other =
other
|> Stream.take_while(&(&1 |> Slot.wrap() |> less(List.last(slots))))
|> Enum.to_list()
merge(this, other)
end
@spec add(t(), Slot.origin()) :: t()
@doc """
Adds another slot to the slots collection.
Joins slots intersecting with the new one, if any.
### Example
iex> Tempus.Slots.add(%Tempus.Slots{}, Tempus.Slot.wrap(~D|2020-08-07|))
#Slots<[#Slot<[from: ~U[2020-08-07 00:00:00.000000Z], to: ~U[2020-08-07 23:59:59.999999Z]]>]>
iex> %Tempus.Slots{}
...> |> Tempus.Slots.add(Tempus.Slot.wrap(~D|2020-08-07|))
...> |> Tempus.Slots.add(Tempus.Slot.wrap(~D|2020-08-10|))
...> |> Tempus.Slots.add(%Tempus.Slot{
...> from: ~U|2020-08-07 01:00:00Z|, to: ~U|2020-08-08 01:00:00Z|})
#Slots<[#Slot<[from: ~U[2020-08-07 00:00:00.000000Z], to: ~U[2020-08-08 01:00:00Z]]>, #Slot<[from: ~U[2020-08-10 00:00:00.000000Z], to: ~U[2020-08-10 23:59:59.999999Z]]>]>
"""
@telemetria level: :debug
def add(%Slots{slots: []}, slot),
do: %Slots{slots: [Slot.wrap(slot)]}
@telemetria level: :debug
def add(%Slots{slots: slots}, slot) do
slot = Slot.wrap(slot)
case Enum.split_with(slots, &(Slot.strict_compare(&1, slot) == :lt)) do
{^slots, []} ->
%Slots{slots: slots ++ [slot]}
{head, slots} ->
tail =
case Enum.split_with(slots, &(Slot.strict_compare(&1, slot) == :gt)) do
{^slots, []} ->
[slot | slots]
{tail, joint} ->
[Enum.reduce(joint, slot, &Slot.join([&1, &2])) | tail]
end
%Slots{slots: head ++ tail}
end
end
@spec inverse(slots :: Slots.t(), tails :: :keep | :discard) :: Slots.t()
@doc """
Inverses `Slots` returning the new `Slots` instance with slots set where
there were blanks.
### Example
iex> [
...> Tempus.Slot.wrap(~D|2020-08-07|),
...> Tempus.Slot.wrap(~D|2020-08-08|),
...> Tempus.Slot.wrap(~D|2020-08-10|),
...> Tempus.Slot.wrap(~D|2020-08-12|)
...> ] |> Enum.into(%Tempus.Slots{})
...> |> Tempus.Slots.inverse()
%Tempus.Slots{slots: [
%Tempus.Slot{from: nil, to: ~U[2020-08-06 23:59:59.999999Z]},
%Tempus.Slot{from: ~U[2020-08-09 00:00:00.000000Z], to: ~U[2020-08-09 23:59:59.999999Z]},
%Tempus.Slot{from: ~U[2020-08-11 00:00:00.000000Z], to: ~U[2020-08-11 23:59:59.999999Z]},
%Tempus.Slot{from: ~U[2020-08-13 00:00:00.000000Z], to: nil}]}
iex> [
...> %Tempus.Slot{to: ~U[2020-08-08 23:59:59.999999Z]},
...> Tempus.Slot.wrap(~D|2020-08-10|),
...> %Tempus.Slot{from: ~U[2020-08-12 00:00:00.000000Z]}
...> ] |> Enum.into(%Tempus.Slots{})
...> |> Tempus.Slots.inverse()
%Tempus.Slots{slots: [
%Tempus.Slot{from: ~U[2020-08-09 00:00:00.000000Z], to: ~U[2020-08-09 23:59:59.999999Z]},
%Tempus.Slot{from: ~U[2020-08-11 00:00:00.000000Z], to: ~U[2020-08-11 23:59:59.999999Z]}
]}
"""
def inverse(slots, tails \\ :keep)
@telemetria level: :info
def inverse(%Slots{slots: []} = slots, _), do: slots
@telemetria level: :info
def inverse(%Slots{slots: slots}, tails) do
tail =
slots
|> Enum.chunk_every(2, 1)
|> Enum.reduce([], fn
[%Slot{to: from}, %Slot{from: to}], acc ->
slot = Slot.shift(%Slot{from: from, to: to}, from: 1, to: -1)
if Slot.valid?(slot), do: [slot | acc], else: acc
[%Slot{to: from}], acc ->
if tails == :keep and not is_nil(from),
do: [Slot.shift(%Slot{from: from}, from: 1) | acc],
else: acc
end)
|> Enum.sort({:asc, Slot})
slots =
if tails == :keep and not is_nil(hd(slots).from),
do: [Slot.shift(%Slot{to: hd(slots).from}, to: -1) | tail],
else: tail
%Slots{slots: slots}
end
@spec wrap(Slot.t()) :: Slots.t()
@doc since: "0.3.0"
@doc """
Wraps the argument into a slots instance. For `nil` it’d be an empty slots.
For everything else it’d call `Slot.wrap/1` on an argument and add it to empty slots.
## Examples
iex> Tempus.Slots.wrap(~D|2020-08-06|)
#Slots<[#Slot<[from: ~U[2020-08-06 00:00:00.000000Z], to: ~U[2020-08-06 23:59:59.999999Z]]>]>
"""
def wrap(nil), do: %Slots{}
def wrap(slot), do: Slots.add(%Slots{}, Slot.wrap(slot))
@spec less(s1 :: Slot.t(), s2 :: Slot.t()) :: boolean()
@doc false
def less(%Slot{} = s1, %Slot{} = s2),
do: Slot.strict_compare(s1, s2) == :lt
defimpl Enumerable do
@moduledoc false
def reduce(%Slots{slots: slots}, {state, acc}, fun),
do: Enumerable.reduce(slots, {state, acc}, fun)
def member?(%Slots{slots: slots}, value),
do: Enumerable.member?(slots, value)
def count(%Slots{slots: %AVLTree{size: size}}), do: {:ok, size}
def slice(_) do
{:error, __MODULE__}
end
end
defimpl Collectable do
@moduledoc false
alias Tempus.Slots
def into(original) do
{
original,
fn
slots, {:cont, value} -> Slots.add(slots, value)
slots, :done -> slots
_, :halt -> :ok
end
}
end
end
defimpl Inspect do
@moduledoc false
import Inspect.Algebra
def inspect(%Tempus.Slots{slots: slots}, opts) do
concat(["#Slots<", to_doc(Enum.to_list(slots), opts), ">"])
end
end
end
|
lib/slots.ex
| 0.873059 | 0.5425 |
slots.ex
|
starcoder
|
defmodule Weaver.Step do
@moduledoc """
Core processing logic for each chunk of streamed data.
"""
alias Weaver.{Marker, Resolvers}
def process_resolved(resolved, step, cache, parent_ref, field) do
case analyze_resolved(resolved, step) do
{:entire_data, []} ->
meta = meta_delete_all(cache, parent_ref, field)
{[], meta, nil}
{:entire_data, objs} ->
meta =
[{:add, parent_ref, field, Resolvers.start_marker(objs)}] ++
meta_delete_all(cache, parent_ref, field)
{objs, meta, nil}
{:last_data, objs} ->
meta =
[{:del, parent_ref, field, step.prev_chunk_end}] ++
meta_delete_all(cache, parent_ref, field, less_than: step.prev_chunk_end.val)
{objs, meta, nil}
# no gap or gap not closed -> continue with this marker
{:continue, objs, new_chunk_end} ->
meta = [
first_meta(step, resolved, parent_ref, field),
{:add, parent_ref, field, new_chunk_end}
]
next = %{step | prev_chunk_end: new_chunk_end, count: step.count + length(objs)}
{objs, meta, next}
# gap closed -> look up the next chunk start in next iteration
{:gap_closed, objs} ->
meta = [
first_meta(step, resolved, parent_ref, field),
{:del, parent_ref, field, step.next_chunk_start}
]
next = %{
step
| prev_chunk_end: :not_loaded,
next_chunk_start: :not_loaded,
refreshed: true,
count: step.count + length(objs)
}
{objs, meta, next}
end
end
defp first_meta(step = %{prev_chunk_end: %Marker{}}, _resolved, parent_ref, field) do
{:del, parent_ref, field, step.prev_chunk_end}
end
defp first_meta(_step, {:continue, objs, _marker}, parent_ref, field) do
{:add, parent_ref, field, Resolvers.start_marker(objs)}
end
defp meta_delete_all(cache, parent_ref, field, opts \\ []) do
cache
|> markers!(parent_ref, field, opts)
|> Enum.map(&{:del, parent_ref, field, &1})
end
defp analyze_resolved({:done, objs}, %{prev_chunk_end: %Marker{}}) do
{:last_data, objs}
end
defp analyze_resolved({:done, objs}, _) do
{:entire_data, objs}
end
# no gap
defp analyze_resolved({:continue, objs, new_chunk_end}, %{next_chunk_start: nil}) do
{:continue, objs, new_chunk_end}
end
# gap closed?
defp analyze_resolved({:continue, objs, new_chunk_end}, step = %{}) do
case Enum.split_while(objs, &before_marker?(&1, step.next_chunk_start)) do
{objs, []} -> {:continue, objs, new_chunk_end}
{objs, __} -> {:gap_closed, objs}
end
end
defp before_marker?(obj, marker) do
Resolvers.marker_val(obj) > marker.val &&
Resolvers.id_for(obj) != marker.ref.id
end
def load_markers(step = %{next_chunk_start: val}, _opts, _cache, _parent_ref, _field)
when val != :not_loaded do
step
end
def load_markers(step, _opts, nil, _parent_ref, _field) do
%{step | next_chunk_start: nil}
end
def load_markers(step, _opts, _cache, nil, _field) do
%{step | next_chunk_start: nil}
end
def load_markers(step = %{refreshed: false}, %{refresh: true}, cache, parent_ref, field) do
next_chunk_start =
markers!(cache, parent_ref, field, limit: 1)
|> List.first()
%{step | next_chunk_start: next_chunk_start}
end
def load_markers(step, %{backfill: true}, cache, parent_ref, field) do
markers!(cache, parent_ref, field, limit: 3)
|> Enum.split_while(&(&1.type != :chunk_end))
|> case do
{_refresh_end, [prev_chunk_end | rest]} ->
%{step | prev_chunk_end: prev_chunk_end, next_chunk_start: List.first(rest)}
_else ->
%{step | next_chunk_start: nil}
end
end
defp markers!(nil, _parent_ref, _field, _opts), do: []
defp markers!({mod, cache_opts}, parent_ref, field, opts) do
mod.markers!(parent_ref, field, Keyword.merge(cache_opts, opts))
end
defp markers!(mod, parent_ref, field, opts) do
mod.markers!(parent_ref, field, opts)
end
end
|
lib/weaver/step.ex
| 0.667473 | 0.420124 |
step.ex
|
starcoder
|
defmodule ExDag.DAG do
@moduledoc """
Represents a DAG dag
"""
alias ExDag.DAG.DAGTask
alias ExDag.DAG.DAGTaskRun
require Logger
@derive {Inspect, only: [:dag_id, :status]}
@derive {Jason.Encoder, only: [:dag_id, :status]}
@status_init :init
@enforce_keys [:dag_id, :g]
defstruct dag_id: nil,
status: :init,
g: nil,
completed: nil,
running: nil,
failed: nil,
timer: nil,
tasks: nil,
task_runs: nil,
task_deps: nil,
handler: nil,
task_handler: nil
@status_running :running
@status_done :done
@status_init :init
@root :__root
@type t :: %__MODULE__{
dag_id: String.t(),
status: atom(),
g: Graph.t(),
completed: map(),
running: map(),
failed: map(),
tasks: map(),
task_runs: map(),
task_deps: map(),
handler: atom() | nil
}
@doc """
Create a new DAG dag struct
"""
def new(dag_id) when is_binary(dag_id) and byte_size(dag_id) > 0 do
new(dag_id, nil, nil)
end
def new(_dag_id) do
{:error, :invalid_dag_id}
end
def new(dag_id, handler, task_handler)
when is_binary(dag_id) and byte_size(dag_id) > 0 and is_atom(handler) do
g = Graph.new(type: :directed)
# |> Graph.add_vertex(@root)
# root_task = DAGTask.new(id: @root, handler: :none)
running = %{}
failed = %{}
completed = %{}
tasks = %{}
task_runs = %{}
task_deps = %{}
struct!(__MODULE__,
dag_id: dag_id,
g: g,
failed: failed,
running: running,
completed: completed,
tasks: tasks,
task_runs: task_runs,
task_deps: task_deps,
handler: handler,
status: :init,
task_handler: task_handler
)
end
def get_tasks(%__MODULE__{} = dag) do
Map.keys(dag.tasks) -- [@root]
end
@spec set_handler(ExDag.DAG.t(), atom) :: ExDag.DAG.t()
def set_handler(%__MODULE__{} = dag, handler) when is_atom(handler) do
%__MODULE__{dag | handler: handler}
end
def set_default_task_handler(%__MODULE__{} = dag, handler) when is_atom(handler) do
%__MODULE__{dag | task_handler: handler}
end
def set_tasks_handler(%__MODULE__{} = dag, handler) when is_atom(handler) do
tasks =
Enum.map(dag.tasks, fn {key, %DAGTask{} = t} ->
{key, %DAGTask{t | handler: handler}}
end)
|> Map.new()
%__MODULE__{dag | tasks: tasks}
end
@doc """
Returns true or false if DAG has valid DAG structure
"""
def validate_for_run(%__MODULE__{g: g}) do
Graph.is_tree?(g)
end
def validate_for_run(_dag) do
false
end
@spec get_task(ExDag.DAG.t(), any) :: any
def get_task(%__MODULE__{} = dag, task_id) do
Map.get(dag.tasks, task_id)
end
@spec add_task(ExDag.DAG.t(), keyword | ExDag.DAG.DAGTask.t()) ::
{:error, :invalid_task | :no_parent_task | :task_exists} | {:ok, ExDag.DAG.t()}
def add_task(dag, task_or_opts) do
do_add_task(dag, task_or_opts)
end
@spec add_task!(ExDag.DAG.t(), keyword | ExDag.DAG.DAGTask.t()) :: ExDag.DAG.t()
def add_task!(dag, task_or_opts) do
case do_add_task(dag, task_or_opts) do
{:ok, %__MODULE__{} = new_dag} ->
new_dag
error ->
throw(error)
end
end
@spec add_task!(ExDag.DAG.t(), ExDag.DAG.DAGTask.t(), any) :: ExDag.DAG.t()
def add_task!(dag, task_or_opts, parent_task_id) do
case add_task(dag, task_or_opts, parent_task_id) do
{:ok, %__MODULE__{} = new_dag} ->
new_dag
error ->
throw(error)
end
end
@spec add_task(ExDag.DAG.t(), ExDag.DAG.DAGTask.t(), any) ::
{:error, :invalid_task | :no_parent_task | :task_exists} | {:ok, ExDag.DAG.t()}
def add_task(
%__MODULE__{status: @status_init, task_handler: default_handler} = dag,
%DAGTask{handler: handler} = task,
parent_task_id
) do
case get_task(dag, parent_task_id) do
%DAGTask{} = parent_task ->
task =
if is_nil(handler) do
%DAGTask{task | handler: default_handler}
else
task
end
add_task_with_parent(dag, task, parent_task)
_ ->
{:error, :no_parent_task}
end
end
defp do_add_task(%__MODULE__{status: @status_init, task_handler: task_handler} = dag, opts)
when is_list(opts) do
parent = Keyword.get(opts, :parent, nil)
opts =
case Keyword.get(opts, :handler, nil) do
nil ->
Keyword.merge(opts, handler: task_handler)
_handler ->
opts
end
if is_nil(parent) do
task = DAGTask.new(opts)
if DAGTask.validate(task) do
do_add_task(dag, task)
else
{:error, :invalid_dag_task}
end
else
opts = Keyword.delete(opts, :parent)
task = DAGTask.new(opts)
if DAGTask.validate(task) do
add_task(dag, task, parent)
else
{:error, :invalid_dag_task}
end
end
end
defp do_add_task(
%__MODULE__{task_handler: default_handler} = dag,
%DAGTask{handler: handler} = task
)
when is_atom(default_handler) or is_atom(handler) do
task =
if is_nil(handler) do
%DAGTask{task | handler: default_handler}
else
task
end
Logger.info("Adding new task: #{inspect(Map.from_struct(task))}")
case DAGTask.validate(task) do
true ->
case Map.has_key?(dag.tasks, task.id) do
false ->
tasks = Map.put(dag.tasks, task.id, task)
dag = %__MODULE__{dag | tasks: tasks}
{:ok, update_graph(dag, Graph.add_vertex(dag.g, task.id, {:info, task}))}
_ ->
{:error, :task_exists}
end
_ ->
{:error, :invalid_task}
end
end
defp add_task_with_parent(dag, task, parent_task) do
case DAGTask.validate(task) do
true ->
case do_add_task(dag, task) do
{:ok, dag} ->
add_dependency(dag, parent_task.id, task.id)
error ->
error
end
_any ->
{:error, :invalid_task}
end
end
defp add_dependency(%__MODULE__{status: @status_init} = dag, %DAGTask{id: task1_id}, %DAGTask{
id: task2_id
}) do
add_dependency(dag, task1_id, task2_id)
end
defp add_dependency(%__MODULE__{status: @status_init} = dag, task1_id, task2_id) do
# add edge and update label with deps
if Map.has_key?(dag.tasks, task1_id) and Map.has_key?(dag.tasks, task2_id) do
edge = Graph.Edge.new(task1_id, task2_id)
updated_g =
dag.g
|> Graph.add_edge(edge)
|> Graph.label_vertex(task1_id, {:deps, task2_id})
dag = update_graph(dag, updated_g)
{:ok, %__MODULE__{dag | task_deps: build_task_deps(dag)}}
else
{:error, :invalid_task}
end
end
@doc """
Returns a list of tasks that the given task depends on
"""
@spec get_deps(ExDag.DAG.t(), task_id :: any) :: list()
def get_deps(%__MODULE__{} = dag, task_id) do
Map.get(dag.task_deps, task_id, [])
end
@spec get_deps_map(ExDag.DAG.t()) :: map()
def get_deps_map(%__MODULE__{} = dag) do
dag.task_deps
end
@doc """
Returns all the runs for a DAG task
"""
@spec get_runs(ExDag.DAG.t(), task_id :: any) :: list()
def get_runs(%__MODULE__{} = dag, task_id) do
dag.task_runs
|> Map.get(task_id, [])
end
defp update_graph(dag, g) do
%__MODULE__{dag | g: g}
end
defp build_task_deps(%__MODULE__{} = dag) do
dag.g
|> Graph.edges()
|> Enum.group_by(fn %Graph.Edge{v1: task1_id} ->
task1_id
end)
|> Enum.map(fn {task_id, deps} ->
{task_id, Enum.map(deps, & &1.v2)}
end)
|> Map.new()
end
def status_running() do
@status_running
end
def status_done() do
@status_done
end
def status_init() do
@status_init
end
@doc """
Returns True if the last task (or tasks) in the DAG is completed
"""
@spec completed?(ExDag.DAG.t()) :: boolean
def completed?(%__MODULE__{} = dag) do
tasks = get_last_tasks(dag)
Enum.all?(tasks, fn
{_task_id, %DAGTask{} = task} ->
DAGTask.is_completed(task)
task_id ->
DAGTask.is_completed(get_task(dag, task_id))
end)
end
@spec get_last_tasks(ExDag.DAG.t()) :: list
def get_last_tasks(%__MODULE__{} = dag) do
for v <- Graph.vertices(dag.g), Graph.in_degree(dag.g, v) == 0 do
v
end
end
def sorted_tasks(%__MODULE__{tasks: tasks} = dag) do
task_ids = Map.keys(tasks)
# Deps: %{a: [:b, :c], c: [:d, :e], d: [:f, :g], e: [:h, :i]}
# Sorted Ids: [:a, :c, :e, :i, :h, :d, :g, :f, :b]
# Deps: %{a: [:b, :c], c: [:d, :e], d: [:f, :g], e: [:h, :i]}
# Sorted Ids: [:a, :b, :c, :d, :f, :g, :e, :h, :i]
g =
Graph.new(type: :directed)
|> Graph.add_vertices(task_ids)
edges =
Enum.map(dag.task_deps, fn {task_id, deps} ->
Enum.map(deps, fn dep -> {task_id, dep} end)
end)
|> List.flatten()
g = Graph.add_edges(g, edges)
sorted_ids = Graph.postorder(g)
sorted_ids
|> Enum.reverse()
|> Enum.map(fn task_id ->
{task_id, Map.from_struct(get_task(dag, task_id))}
end)
|> Map.new()
end
def get_completed_tasks(%__MODULE__{} = dag) do
Enum.filter(dag.tasks, fn task ->
DAGTask.is_completed(task)
end)
end
def get_pending_tasks(%__MODULE__{} = dag) do
Enum.filter(dag.tasks, fn task ->
DAGTask.is_pending(task)
end)
end
def get_running_tasks(%__MODULE__{} = dag) do
Enum.filter(dag.tasks, fn task ->
DAGTask.is_running(task)
end)
end
@doc """
Clear failed taks. This is necessary for resuming DAGs
"""
@spec clear_failed_tasks_runs(ExDag.DAG.t()) :: ExDag.DAG.t()
def clear_failed_tasks_runs(%__MODULE__{tasks: tasks} = dag) do
failed_ids =
tasks
|> Map.keys()
|> Enum.filter(fn t_id ->
!should_run_task(dag, t_id)
end)
Logger.info("Failed tasks: #{inspect(failed_ids)}")
tasks =
Enum.reduce(tasks, tasks, fn {task_id, task}, t ->
if Enum.member?(failed_ids, task_id) do
Map.put(t, task_id, %DAGTask{task | last_run: nil})
else
t
end
end)
%__MODULE__{dag | tasks: tasks}
end
def should_run_task(%__MODULE__{} = dag, task_id) do
%DAGTask{last_run: last_run} = task = Map.get(dag.tasks, task_id)
failed = DAGTask.status_failed()
case last_run do
%DAGTaskRun{status: ^failed} ->
if task.stop_on_failure do
false
else
task_runs = Map.get(dag.task_runs, task_id)
task.retries && Enum.count(task_runs) < task.retries
end
_ ->
true
end
end
defimpl String.Chars, for: __MODULE__ do
def to_string(dag) do
"#DAG{tasks: #{inspect(dag.tasks)}}"
end
end
end
|
lib/ex_dag/dag/dag.ex
| 0.771628 | 0.476153 |
dag.ex
|
starcoder
|
defmodule Helper.Converter.EditorToHTML.Frags.Image do
@moduledoc """
parse editor.js's block fragments, use for test too
see https://editorjs.io/
"""
import Helper.Validator.Guards, only: [g_none_empty_str: 1]
alias Helper.Converter.EditorToHTML.Class
alias Helper.Types, as: T
@class get_in(Class.article(), ["image"])
@spec get_item(:single | :gallery | :jiugongge, T.editor_image_item()) :: T.html()
def get_item(
:single,
%{
"src" => src,
"width" => width,
"height" => height
} = data
)
when g_none_empty_str(width) and g_none_empty_str(height) do
caption = get_caption(data)
~s(<div class="#{@class["single_image_wrapper"]}">
<a href=#{src} class="glightbox" data-glightbox="type:image;description: #{caption}">
<img class="#{@class["single_image"]}" style="width:#{width}; height:#{height}" src="#{
src
}" alt="image" />
</a>
</div>)
end
def get_item(:single, %{"src" => src} = data) do
caption = get_caption(data)
~s(<div class="#{@class["single_image_wrapper"]}">
<a href=#{src} class="glightbox" data-glightbox="type:image;description: #{caption}">
<img class="#{@class["single_image"]}" src="#{src}" alt="image" />
</a>
</div>)
end
def get_item(:jiugongge, %{"src" => src} = data) do
caption = get_caption(data)
# image_wrapper_class = @class["jiugongge-image"]
~s(<div class="#{@class["jiugongge_image_block"]}">
<a href=#{src} class="glightbox" data-glightbox="type:image;description: #{caption}">
<img class="#{@class["jiugongge_image"]}" src="#{src}" alt="image" />
</a>
</div>)
end
def get_item(:gallery, %{"src" => src, "index" => index} = data) do
caption = get_caption(data)
# IO.inspect(index, label: "index -> ")
~s(<div class="#{@class["gallery_image_block"]}">
<a href=#{src} class="glightbox" data-glightbox="type:image;description: #{caption}">
<img class="#{@class["gallery_image"]}" src="#{src}" alt="image" data-index="#{index}" />
</a>
</div>)
end
@spec get_minimap([T.editor_image_item()]) :: T.html()
def get_minimap(items) do
items_content =
Enum.reduce(items, "", fn item, acc ->
acc <> frag(:minimap_image, item)
end)
~s(<div class="#{@class["gallery_minimap"]}">
#{items_content}
</div>)
end
defp frag(:minimap_image, %{"src" => src, "index" => index}) do
~s(<img class="#{@class["gallery_minimap_image"]}" src="#{src}" data-index="#{index}"/>)
end
def get_caption(%{"caption" => caption}) when g_none_empty_str(caption), do: caption
def get_caption(_), do: ""
def get_caption(:html, %{"caption" => caption}) when g_none_empty_str(caption) do
~s(<div class="#{@class["image_caption"]}">#{caption}</div>)
end
def get_caption(:html, _), do: ""
end
|
lib/helper/converter/editor_to_html/frags/image.ex
| 0.673514 | 0.404449 |
image.ex
|
starcoder
|
defmodule Ipfinder.Validation.Firewallvalidation do
@moduledoc """
# IPFinder elixir Client Library
The official elixir client library for the [IPFinder.io](https://ipfinder.io) get details for :
- IP address details (city, region, country, postal code, latitude and more ..)
- ASN details (Organization name, registry,domain,comany_type, and more .. )
- Firewall by supported formats details (apache_allow, nginx_deny, CIDR , and more ..)
- IP Address Ranges by the Organization name details (list_asn, list_prefixes , and more ..)
- service status details (queriesPerDay, queriesLeft, key_type, key_info)
- Get Domain IP (asn, organization,country_code ....)
- Get Domain IP history (total_ip, list_ip,organization,asn ....)
- Get list Domain By ASN, Country,Ranges (select_by , total_domain , list_domain ....)
- [GitHub ipfinder elixir](https://github.com/ipfinder-io/ip-finder-elixir)
- [ipfinder](https://ipfinder.io/)
## Documentation for Firewall validation.
"""
@moduledoc since: "1.0.0"
@doc """
Helper method for validating an IPFINDER Firewall and Format
## Parameters
* `by` - AS number as (e.g. AS1) or country ISO 3166-1 alpha-2 country code (e.g. US)
* `format` - formats supported are apache_allow, apache_deny,nginx_allow,nginx_deny, CIDR, linux_iptables, netmask, inverse_netmask, web_config_allow, web_config_deny, cisco_acl, peer_guardian_2, network_object, cisco_bit_bucket, juniper_junos, microtik
"""
@doc since: "1.0.0"
def validate(by, format) do
regexa =
~r/^((as|AS)(\d+)|(A(D|E|F|G|I|L|M|N|O|R|S|T|Q|U|W|X|Z)|B(A|B|D|E|F|G|H|I|J|L|M|N|O|R|S|T|V|W|Y|Z)|C(A|C|D|F|G|H|I|K|L|M|N|O|R|U|V|X|Y|Z)|D(E|J|K|M|O|Z)|E(C|E|G|H|R|S|T)|F(I|J|K|M|O|R)|G(A|B|D|E|F|G|H|I|L|M|N|P|Q|R|S|T|U|W|Y)|H(K|M|N|R|T|U)|I(D|E|Q|L|M|N|O|R|S|T)|J(E|M|O|P)|K(E|G|H|I|M|N|P|R|W|Y|Z)|L(A|B|C|I|K|R|S|T|U|V|Y)|M(A|C|D|E|F|G|H|K|L|M|N|O|Q|P|R|S|T|U|V|W|X|Y|Z)|N(A|C|E|F|G|I|L|O|P|R|U|Z)|OM|P(A|E|F|G|H|K|L|M|N|R|S|T|W|Y)|QA|R(E|O|S|U|W)|S(A|B|C|D|E|G|H|I|J|K|L|M|N|O|R|T|V|Y|Z)|T(C|D|F|G|H|J|K|L|M|N|O|R|T|V|W|Z)|U(A|G|M|S|Y|Z)|V(A|C|E|G|I|N|U)|W(F|S)|Y(E|T)|Z(A|M|W)))$/
if String.match?(by, regexa) == false do
raise "Invalid Firewall string please use AS number or ISO 3166-1 alpha-2 country"
end
regexf =
~r/^(apache_allow|apache_deny|nginx_allow|nginx_deny|CIDR|linux_iptables|netmask|inverse_netmask|web_config_allow|web_config_deny|cisco_acl|peer_guardian_2|network_object|cisco_bit_bucket|juniper_junos|microtik)$/
if String.match?(format, regexf) == false do
raise "Invalid Format supported format https://ipfinder.io/docs/?shell#firewall"
end
end
end
|
lib/ipfinder/Validation/Firewallvalidation.ex
| 0.710025 | 0.607809 |
Firewallvalidation.ex
|
starcoder
|
defmodule Brahman.Dns.Zones do
@moduledoc """
convert into erldns record
"""
require Record
require Logger
@dns_header "erldns/include/erldns.hrl"
for {name, field} <- Record.extract_all(from_lib: @dns_header) do
Record.defrecord(name, field)
end
# API functions
@spec put(name :: String.t(), [map()]) :: :ok | {:error, :reason}
def put(name, records) do
rr_record = to_records(name, records, [])
zone = zone(
name: name,
version: :crypto.hash(:sha, :erlang.term_to_binary(rr_record)),
authority: rr_record,
records_by_name: %{name => rr_record},
keysets: []
)
:erldns_zone_cache.put_zone(name, zone)
end
@spec get(String.t()) :: [map()]
def get(name) do
case :erldns_zone_cache.get_authority(name) do
{:ok, records} ->
to_map(records, [])
_ ->
[]
end
end
@spec delete(String.t()) :: :ok
def delete(name), do: :erldns_zone_cache.delete_zone(name)
# private functions
@spec to_map([record(:dns_rr)], [map()]) :: [map()]
defp to_map([], acc), do: Enum.reverse(acc)
defp to_map([dns_rr(type: type, ttl: ttl, data: data0) | rest], acc) do
case to_map(data0) do
data when is_map(data) ->
rr_map = %{type: int_to_type(type), ttl: ttl, data: data}
to_map(rest, [rr_map | acc])
end
catch
:throw, reason ->
:ok = Logger.warn(fn -> "to_map error: reason = #{inspect(reason)}" end)
to_map(rest, acc)
end
@spec to_map(tuple()) :: map()
defp to_map(dns_rrdata_a(ip: ip)), do: %{ip: "#{:inet.ntoa(ip)}"}
defp to_map(dns_rrdata_aaaa(ip: ip)), do: %{ip: "#{:inet.ntoa(ip)}"}
defp to_map(dns_rrdata_cname(dname: dname)), do: %{dname: dname}
defp to_map(dns_rrdata_rp(mbox: mbox, txt: txt)), do: %{mbox: mbox, txt: txt}
defp to_map(dns_rrdata_txt(txt: txt)), do: %{txt: txt}
defp to_map(dns_rrdata_spf(spf: spf)), do: %{spf: spf}
defp to_map(dns_rrdata_ns(dname: name)), do: %{dname: name}
defp to_map(
dns_rrdata_srv(
priority: priority,
weight: weight,
port: port,
target: target
)
) do
%{priority: priority, weight: weight, port: port, target: target}
end
defp to_map(
dns_rrdata_sshfp(
alg: alg,
fp_type: fp_type,
fp: fp
)
) do
%{alg: alg, fp_type: fp_type, fp: fp}
end
defp to_map(
dns_rrdata_mx(
exchange: exchange,
preference: preference
)
) do
%{exchange: exchange, preference: preference}
end
defp to_map(
dns_rrdata_naptr(
order: order,
preference: preference,
flags: flags,
services: services,
regexp: regexp
)
) do
%{order: order, preference: preference, flags: flags, services: services, regexp: regexp}
end
defp to_map(
dns_rrdata_soa(
mname: mname,
rname: rname,
serial: serial,
refresh: refresh,
retry: retry,
expire: expire,
minimum: minimum
)
) do
%{
mname: mname,
rname: rname,
serial: serial,
refresh: refresh,
retry: retry,
expire: expire,
minimum: minimum
}
end
defp to_map(_undefined), do: throw(:unknown)
@spec to_records(String.t(), [map()], tuple()) :: tuple()
defp to_records(_name, [], acc), do: Enum.reverse(acc)
defp to_records(name, [%{type: type, ttl: ttl, data: data0} = rr | rest], acc) do
case to_record(type, data0) do
data when is_tuple(data) ->
rr_name = if rr[:name], do: rr[:name], else: name
rr = dns_rr(name: rr_name, type: type_to_int(type), ttl: ttl, data: data)
to_records(name, rest, [rr | acc])
end
catch
:throw, reason ->
:ok = Logger.warn(fn -> "to_records error: reason = #{inspect(reason)}" end)
to_records(name, rest, acc)
end
@spec to_record(String.t(), map()) :: tuple() | {:error, :unknown}
defp to_record("A", data) do
case :inet.parse_address(~c"#{data.ip}") do
{:ok, ip} ->
dns_rrdata_a(ip: ip)
{:error, _} ->
throw(:ip4_address)
end
end
defp to_record("AAAA", data) do
case :inet.parse_address(~c"#{data.ip}") do
{:ok, ip} ->
dns_rrdata_aaaa(ip: ip)
{:error, _} ->
throw(:ip6_address)
end
end
defp to_record("CNAME", data),
do: dns_rrdata_cname(dname: data.dname)
defp to_record("NS", data),
do: dns_rrdata_ns(dname: data.dname)
defp to_record("RP", data),
do: dns_rrdata_rp(mbox: data.mbox, txt: data.txt)
defp to_record("TXT", data),
do: dns_rrdata_txt(txt: data.txt)
defp to_record("SPF", data),
do: dns_rrdata_spf(spf: data.spf)
defp to_record("SRV", data),
do:
dns_rrdata_srv(
priority: data.priority,
weight: data.weight,
port: data.port,
target: data.target
)
defp to_record("SSHFP", data),
do:
dns_rrdata_sshfp(
alg: data.alg,
fp_type: data.fp_type,
fp: Base.decode16!(data.fp, case: :mixed)
)
defp to_record("MX", data),
do:
dns_rrdata_mx(
exchange: data.exchange,
preference: data.preference
)
defp to_record("NAPTR", data),
do:
dns_rrdata_naptr(
order: data.order,
preference: data.preference,
flags: data.flags,
services: data.services,
regexp: data.regexp
)
defp to_record("SOA", data),
do:
dns_rrdata_soa(
mname: data.mname,
rname: data.rname,
serial: data.serial,
refresh: data.refresh,
retry: data.retry,
expire: data.expire,
minimum: data.minimum
)
defp to_record(_type, _data), do: {:error, :unknown}
@spec type_to_int(String.t()) :: non_neg_integer()
defp type_to_int("A"), do: 1
defp type_to_int("NS"), do: 2
defp type_to_int("MD"), do: 3
defp type_to_int("MF"), do: 4
defp type_to_int("CNAME"), do: 5
defp type_to_int("SOA"), do: 6
defp type_to_int("MB"), do: 7
defp type_to_int("MG"), do: 8
defp type_to_int("MR"), do: 9
defp type_to_int("NULL"), do: 10
defp type_to_int("WKS"), do: 11
defp type_to_int("PTR"), do: 12
defp type_to_int("HINFO"), do: 13
defp type_to_int("MINFO"), do: 14
defp type_to_int("MX"), do: 15
defp type_to_int("TXT"), do: 16
defp type_to_int("RP"), do: 17
defp type_to_int("AFSDB"), do: 18
defp type_to_int("X25"), do: 19
defp type_to_int("ISDN"), do: 20
defp type_to_int("RT"), do: 21
defp type_to_int("NSAP"), do: 22
defp type_to_int("SIG"), do: 24
defp type_to_int("KEY"), do: 25
defp type_to_int("PX"), do: 26
defp type_to_int("GPOS"), do: 27
defp type_to_int("AAAA"), do: 28
defp type_to_int("LOC"), do: 29
defp type_to_int("NXT"), do: 30
defp type_to_int("EID"), do: 31
defp type_to_int("NIMLOC"), do: 32
defp type_to_int("SRV"), do: 33
defp type_to_int("ATMA"), do: 34
defp type_to_int("NAPTR"), do: 35
defp type_to_int("KX"), do: 36
defp type_to_int("CERT"), do: 37
defp type_to_int("DNAME"), do: 39
defp type_to_int("SINK"), do: 40
defp type_to_int("OPT"), do: 41
defp type_to_int("APL"), do: 42
defp type_to_int("DS"), do: 43
defp type_to_int("SSHFP"), do: 44
defp type_to_int("IPSECKEY"), do: 45
defp type_to_int("RRSIG"), do: 46
defp type_to_int("NSEC"), do: 47
defp type_to_int("DNSKEY"), do: 48
defp type_to_int("NSEC3"), do: 50
defp type_to_int("NSEC3PARAM"), do: 51
defp type_to_int("DHCID"), do: 49
defp type_to_int("HIP"), do: 55
defp type_to_int("NINFO"), do: 56
defp type_to_int("RKEY"), do: 57
defp type_to_int("TALINK"), do: 58
defp type_to_int("SPF"), do: 99
defp type_to_int("UINFO"), do: 100
defp type_to_int("UID"), do: 101
defp type_to_int("GID"), do: 102
defp type_to_int("UNSPEC"), do: 103
defp type_to_int("TKEY"), do: 249
defp type_to_int("TSIG"), do: 250
defp type_to_int("IXFR"), do: 251
defp type_to_int("AXFR"), do: 252
defp type_to_int("MAILB"), do: 253
defp type_to_int("MAILA"), do: 254
defp type_to_int("ANY"), do: 255
defp type_to_int("CAA"), do: 257
defp type_to_int("DLV"), do: 32769
@spec int_to_type(non_neg_integer()) :: String.t()
defp int_to_type(1), do: "A"
defp int_to_type(2), do: "NS"
defp int_to_type(3), do: "MD"
defp int_to_type(4), do: "MF"
defp int_to_type(5), do: "CNAME"
defp int_to_type(6), do: "SOA"
defp int_to_type(7), do: "MB"
defp int_to_type(8), do: "MG"
defp int_to_type(9), do: "MR"
defp int_to_type(10), do: "NULL"
defp int_to_type(11), do: "WKS"
defp int_to_type(12), do: "PTR"
defp int_to_type(13), do: "HINFO"
defp int_to_type(14), do: "MINFO"
defp int_to_type(15), do: "MX"
defp int_to_type(16), do: "TXT"
defp int_to_type(17), do: "RP"
defp int_to_type(18), do: "AFSDB"
defp int_to_type(19), do: "X25"
defp int_to_type(20), do: "ISDN"
defp int_to_type(21), do: "RT"
defp int_to_type(22), do: "NSAP"
defp int_to_type(24), do: "SIG"
defp int_to_type(25), do: "KEY"
defp int_to_type(26), do: "PX"
defp int_to_type(27), do: "GPOS"
defp int_to_type(28), do: "AAAA"
defp int_to_type(29), do: "LOC"
defp int_to_type(30), do: "NXT"
defp int_to_type(31), do: "EID"
defp int_to_type(32), do: "NIMLOC"
defp int_to_type(33), do: "SRV"
defp int_to_type(34), do: "ATMA"
defp int_to_type(35), do: "NAPTR"
defp int_to_type(36), do: "KX"
defp int_to_type(37), do: "CERT"
defp int_to_type(39), do: "DNAME"
defp int_to_type(40), do: "SINK"
defp int_to_type(41), do: "OPT"
defp int_to_type(42), do: "APL"
defp int_to_type(43), do: "DS"
defp int_to_type(44), do: "SSHFP"
defp int_to_type(45), do: "IPSECKEY"
defp int_to_type(46), do: "RRSIG"
defp int_to_type(47), do: "NSEC"
defp int_to_type(48), do: "DNSKEY"
defp int_to_type(50), do: "NSEC3"
defp int_to_type(51), do: "NSEC3PARAM"
defp int_to_type(49), do: "DHCID"
defp int_to_type(55), do: "HIP"
defp int_to_type(56), do: "NINFO"
defp int_to_type(57), do: "RKEY"
defp int_to_type(58), do: "TALINK"
defp int_to_type(99), do: "SPF"
defp int_to_type(100), do: "UINFO"
defp int_to_type(101), do: "UID"
defp int_to_type(102), do: "GID"
defp int_to_type(103), do: "UNSPEC"
defp int_to_type(249), do: "TKEY"
defp int_to_type(250), do: "TSIG"
defp int_to_type(251), do: "IXFR"
defp int_to_type(252), do: "AXFR"
defp int_to_type(253), do: "MAILB"
defp int_to_type(254), do: "MAILA"
defp int_to_type(255), do: "ANY"
defp int_to_type(257), do: "CAA"
defp int_to_type(32769), do: "DLV"
end
|
lib/brahman/dns/zones.ex
| 0.725162 | 0.504883 |
zones.ex
|
starcoder
|
defmodule LearnKit.Knn do
@moduledoc """
Module for k-nearest neighbours (knn) algorithm
"""
defstruct data_set: []
alias LearnKit.Knn
use Knn.Classify
@type label :: atom
@type feature :: [integer]
@type point :: {label, feature}
@type features :: [feature]
@type data_set :: [{label, features}]
@doc """
Creates classifier with empty data_set
## Examples
iex> classifier = LearnKit.Knn.new
%LearnKit.Knn{data_set: []}
"""
@spec new() :: %Knn{data_set: []}
def new, do: Knn.new([])
@doc """
Creates classifier with data_set
## Parameters
- data_set: Keyword list with labels and features in tuples
## Examples
iex> classifier = LearnKit.Knn.new([{:a1, [[1, 2], [2, 3]]}, {:b1, [[-1, -2]]}])
%LearnKit.Knn{data_set: [a1: [[1, 2], [2, 3]], b1: [[-1, -2]]]}
"""
@spec new(data_set) :: %Knn{data_set: data_set}
def new(data_set) when is_list(data_set), do: %Knn{data_set: data_set}
@doc """
Add train data to classifier
## Parameters
- classifier: %LearnKit.Knn{}
- train data: tuple with label and feature
## Examples
iex> classifier = classifier |> LearnKit.Knn.add_train_data({:a1, [-1, -1]})
%LearnKit.Knn{data_set: [a1: [[-1, -1]]]}
"""
@spec add_train_data(%Knn{data_set: data_set}, point) :: %Knn{data_set: data_set}
def add_train_data(%Knn{data_set: data_set}, {key, value}) when is_atom(key) and is_list(value) do
features = if Keyword.has_key?(data_set, key), do: data_set[key], else: []
data_set = Keyword.put(data_set, key, [value | features])
%Knn{data_set: data_set}
end
@doc """
Classify label of the new feature
## Parameters
- classifier: %LearnKit.Knn{}
- options: keyword list with options
## Options
- feature: feature for classification, required, example: [1, 2, 3]
- k: number of nearest neighbours, default is 3, optional
- algorithm: brute, optional
- weight: uniform/distance, default is uniform, optional
- normalization: none/minimax/z_normalization, default is none, optional
## Examples
iex> classifier |> LearnKit.Knn.classify([feature: [-1, -2], k: 3, weight: "distance"])
{:ok, :a1}
"""
@spec classify(%Knn{data_set: data_set}, [tuple]) :: {:ok, label}
def classify(%Knn{data_set: data_set}, options) when is_list(options) do
cond do
!Keyword.has_key?(options, :feature) ->
{:error, "Feature option is required"}
!is_list(options[:feature]) ->
{:error, "Feature option must be presented as array"}
Keyword.has_key?(options, :k) && (!is_integer(options[:k]) || options[:k] <= 0) ->
{:error, "K option must be positive integer"}
true ->
options = Keyword.merge([k: 3, algorithm: "brute", weight: "uniform", normalization: "none"], options)
{label, _} = prediction(data_set, options)
{:ok, label}
end
end
end
|
lib/learn_kit/knn.ex
| 0.897544 | 0.52543 |
knn.ex
|
starcoder
|
defmodule Mix.Tasks.Shipit do
use Mix.Task
@shortdoc "Publishes new Hex package version"
@moduledoc """
ShipIt automates Hex package publishing to avoid common mistakes.
mix shipit BRANCH VERSION
It automates these steps:
* ensure there are no uncommited changes in the working tree
* ensure current branch matches the given branch
* ensure local branch is in sync with remote branch
* ensure project version in mix.exs matches the given version
* ensure CHANGELOG.md contains an entry for the version
* ensure LICENSE.md file is present
* create a git tag and push it
* publish to Hex.pm and HexDocs.pm
A `--dry-run` option might be given to only perform local checks.
"""
@changelog "CHANGELOG.md"
@licenses ["LICENSE.md", "LICENSE"]
@switches [dry_run: :boolean]
def run(args) do
case OptionParser.parse(args, strict: @switches) do
{opts, [branch, version], []} ->
project = Mix.Project.config()
version = normalize_version(project, version)
check_working_tree()
check_branch(branch)
check_changelog(version)
check_license()
check_dot_shipit(branch, version)
check_remote_branch(branch)
unless opts[:dry_run] do
publish()
create_and_push_tag(version)
end
_ ->
Mix.raise("Usage: mix shipit BRANCH VERSION [--dry-run]")
end
end
defp check_working_tree() do
{out, 0} = System.cmd("git", ["status", "--porcelain"])
if out != "" do
Mix.raise("Found uncommitted changes in the working tree")
end
end
defp check_branch(expected) do
current = current_branch()
if expected != current do
Mix.raise("Expected branch #{inspect(expected)} does not match current #{inspect(current)}")
end
end
defp check_remote_branch(local_branch) do
{_, 0} = System.cmd("git", ["fetch"])
case System.cmd("git", [
"rev-parse",
"--symbolic-full-name",
"--abbrev-ref",
"#{local_branch}@{upstream}"
]) do
{_out, 0} ->
true
{_, _} ->
Mix.raise("Aborting due to git error")
end
{out, 0} = System.cmd("git", ["status", "--branch", local_branch, "--porcelain"])
if String.contains?(out, "ahead") do
Mix.raise("Local branch is ahead of the remote branch, aborting")
end
if String.contains?(out, "behind") do
Mix.raise("Local branch is behind the remote branch, aborting")
end
end
defp current_branch() do
{branch, 0} = System.cmd("git", ["rev-parse", "--abbrev-ref", "HEAD"])
String.trim(branch)
end
defp normalize_version(project, "v" <> rest), do: normalize_version(project, rest)
defp normalize_version(project, version) do
check_version(version, project[:version])
"v#{version}"
end
defp check_version(version, mix_version) do
if version != mix_version do
Mix.raise("Expected #{inspect(version)} to match mix.exs version #{inspect(mix_version)}")
end
end
defp check_changelog(version) do
unless File.exists?(@changelog) do
Mix.raise("#{@changelog} is missing")
end
unless File.read!(@changelog) |> String.contains?(version) do
Mix.raise("#{@changelog} does not include an entry for #{version}")
end
end
defp check_license do
unless Enum.any?(@licenses, &File.exists?(&1)) do
Mix.raise("LICENSE file is missing, add LICENSE.md or LICENSE")
end
end
defp check_dot_shipit(branch, version) do
dot_shipit = ".shipit.exs"
if File.exists?(dot_shipit) do
binding = [branch: branch, version: version]
File.read!(dot_shipit) |> Code.eval_string(binding, file: dot_shipit)
end
end
defp create_and_push_tag(version) do
Mix.shell().info("Creating tag #{version}...")
{_, 0} = System.cmd("git", ["tag", version, "-a", "-m", version])
Mix.shell().info("done\n")
Mix.shell().info("Pushing tag #{version}...")
{_, 0} = System.cmd("git", ["push", "origin", version])
Mix.shell().info("done\n")
end
defp publish do
Mix.Tasks.Hex.Publish.run([])
end
end
|
lib/mix/tasks/shipit.ex
| 0.664105 | 0.444685 |
shipit.ex
|
starcoder
|
defmodule Operate.Cache do
@moduledoc """
Operate cache specification.
A cache is responsible for storing and retrieving tapes and ops from a
cache, and if necessary instructing an adapter to fetch items from a data
source.
Operate comes bundled with a `ConCache` ETS cache, although by default runs
without any caching.
## Creating a cache
A cache must implement both of the following callbacks:
* `c:fetch_tx/3` - function that takes a txid and returns a `t:Operate.BPU.Transaction.t/0`
* `c:fetch_ops/3` - function that takes a list of Op references and returns a list of `t:Operate.Op.t/0` functions.
The third argument in both functions is a tuple containing the adapter module
and a keyword list of options to pass to the adapter.
defmodule MyCache do
use Operate.Cache
def fetch_tx(txid, opts, {adapter, adapter_opts}) do
ttl = Keyword.get(opts, :ttl, 3600)
Cache.fetch_or_store(txid, ttl: ttl, fn ->
adapter.fetch_tx(txid, adapter_opts)
end)
end
end
Using the above example, Operate can be configured with:
{Operate, [
cache: {MyCache, [ttl: 3600]}
]}
"""
defmacro __using__(opts \\ []) do
quote bind_quoted: [opts: opts] do
@behaviour Operate.Cache
def fetch_tx(txid, _options \\ [], {adapter, adapter_opts}),
do: adapter.fetch_tx(txid, adapter_opts)
def fetch_tx!(txid, options \\ [], {adapter, adapter_opts}) do
case fetch_tx(txid, options, {adapter, adapter_opts}) do
{:ok, tape} -> tape
{:error, err} -> raise err
end
end
def fetch_tx_by(query, _options \\ [], {adapter, adapter_opts}),
do: adapter.fetch_tx_by(query, adapter_opts)
def fetch_tx_by!(txid, options \\ [], {adapter, adapter_opts}) do
case fetch_tx_by(txid, options, {adapter, adapter_opts}) do
{:ok, tape} -> tape
{:error, err} -> raise err
end
end
def fetch_ops(refs, _options \\ [], {adapter, adapter_opts}),
do: adapter.fetch_ops(refs, adapter_opts)
def fetch_ops!(refs, options \\ [], {adapter, adapter_opts}) do
case fetch_ops(refs, options, {adapter, adapter_opts}) do
{:ok, result} -> result
{:error, err} -> raise err
end
end
defoverridable fetch_tx: 2, fetch_tx: 3,
fetch_tx!: 2, fetch_tx!: 3,
fetch_tx_by: 2, fetch_tx_by: 3,
fetch_tx_by!: 2, fetch_tx_by!: 3,
fetch_ops: 2, fetch_ops: 3,
fetch_ops!: 2, fetch_ops!: 3
end
end
@doc """
Loads a transaction from the cache by the given txid, or delegates to job to
the specified adapter. Returns the result in an `:ok` / `:error` tuple pair.
"""
@callback fetch_tx(String.t, keyword, {module, keyword}) ::
{:ok, Operate.Tape.t} |
{:error, String.t}
@doc """
As `c:fetch_tx/3`, but returns the transaction or raises an exception.
"""
@callback fetch_tx!(String.t, keyword, {module, keyword}) :: Operate.Tape.t
@doc """
Loads a list of transactions from the cache by the given query map, or
delegates to job to the specified adapter. Returns the result in an
`:ok` / `:error` tuple pair.
"""
@callback fetch_tx_by(map, keyword, {module, keyword}) ::
{:ok, [Operate.Tape.t, ...]} |
{:error, String.t}
@doc """
As `c:fetch_tx_by/3`, but returns the result or raises an exception.
"""
@callback fetch_tx_by!(map, keyword, {module, keyword}) ::
[Operate.Tape.t, ...]
@doc """
Loads Ops from the cache by the given procedure referneces, or delegates
the job to the specified adapter. Returns the result in an `:ok` / `:error`
tuple pair.
"""
@callback fetch_ops(list, keyword, {module, keyword}) ::
{:ok, [Operate.Op.t, ...]} |
{:error, String.t}
@doc """
As `c:fetch_ops/3`, but returns the result or raises an exception.
"""
@callback fetch_ops!(list, keyword, {module, keyword}) ::
[Operate.Op.t, ...]
end
|
lib/operate/cache.ex
| 0.782288 | 0.672708 |
cache.ex
|
starcoder
|
defmodule Andy.BrickPi.LegoSensor do
@moduledoc "Lego sensor access."
require Logger
import Andy.BrickPi.Sysfs
alias Andy.Device
alias Andy.BrickPi.{
ColorSensor,
TouchSensor,
InfraredSensor,
UltrasonicSensor,
GyroSensor,
IRSeekerSensor
}
@sys_path "/sys/class/lego-sensor"
@prefix "sensor"
@driver_regex ~r/(lego-ev3|ht-nxt)-(?<sensor>.+)/i
@mode_switch_delay 100
@doc "Get the currently connected lego sensors"
def sensors() do
files =
case File.ls(@sys_path) do
{:ok, files} ->
files
{:error, reason} ->
Logger.warn("Failed getting sensor files: #{inspect(reason)}")
[]
end
files
|> Enum.filter(&String.starts_with?(&1, @prefix))
|> Enum.map(&init_sensor("#{@sys_path}/#{&1}"))
end
@doc "Is this type of device a sensor?"
def sensor?(device_type) do
device_type in [:touch, :infrared, :color, :ultrasonic, :gyro, :ir_seeker]
end
@doc "Get the list of senses from a sensor"
def senses(sensor) do
apply(module_for(sensor), :senses, [sensor])
end
@doc "Read the value of a sense from a sensor"
# {value, updated_sensor} - value can be nil
def read(sensor, sense) do
try do
apply(module_for(sensor), :read, [sensor, sense])
rescue
error ->
Logger.warn("#{inspect(error)} when reading #{inspect(sense)} from #{inspect(sensor)}")
{nil, sensor}
end
end
@doc "Get how long to pause between reading a sense from a sensor. In msecs"
def pause(sensor) do
apply(module_for(sensor), :pause, [sensor])
end
@doc "Get the resolution of a sensor (the delta between essentially identical readings). Nil or an integer."
def sensitivity(sensor, sense) do
apply(module_for(sensor), :sensitivity, [sensor, sense])
end
@doc "Is this the ultrasonic sensor?"
def ultrasonic?(sensor) do
sensor.type == :ultrasonic
end
@doc "Is this the gyro sensor?"
def gyro?(sensor) do
sensor.type == :gyro
end
@doc "Is this the color sensor?"
def color?(sensor) do
sensor.type == :color
end
@doc "Is this the touch sensor?"
def touch?(sensor) do
sensor.type == :touch
end
@doc "Is this the infrared sensor?"
def infrared?(sensor) do
sensor.type == :infrared
end
@doc "Is this the IR seeker sensor?"
def ir_seeker?(sensor) do
sensor.type == :ir_seeker
end
@doc "Set the sensor's mode"
def set_mode(sensor, mode) do
current_mode = get_attribute(sensor, "mode", :string) != mode
if current_mode != mode do
Logger.info(
"Switching mode of #{sensor.path} to #{inspect(mode)} from #{inspect(current_mode)}"
)
set_attribute(sensor, "mode", mode)
# Give time for the mode switch
:timer.sleep(@mode_switch_delay)
case get_attribute(sensor, "mode", :string) do
same_mode when same_mode == mode ->
%Device{sensor | props: %{sensor.props | mode: mode}}
other ->
Logger.warn("Mode is still #{other}. Retrying to set mode to #{mode}")
:timer.sleep(@mode_switch_delay)
set_mode(sensor, mode)
end
else
sensor
end
end
@doc "Get the sensor mode"
def mode(sensor) do
sensor.props.mode
end
#### PRIVATE
defp module_for(sensor) do
module_for_type(sensor.type)
end
defp module_for_type(type) do
case type do
:touch -> TouchSensor
:color -> ColorSensor
:infrared -> InfraredSensor
:ultrasonic -> UltrasonicSensor
:ir_seeker -> IRSeekerSensor
:gyro -> GyroSensor
end
end
defp init_sensor(path) do
port_name = read_sys(path, "address")
driver_name = read_sys(path, "driver_name")
%{"sensor" => type_name} = Regex.named_captures(@driver_regex, driver_name)
type =
case type_name do
"us" -> :ultrasonic
"gyro" -> :gyro
"color" -> :color
"touch" -> :touch
"ir" -> :infrared
"ir-seek-v2" -> :ir_seeker
end
sensor = %Device{
mod: module_for_type(type),
class: :sensor,
path: path,
port: port_name,
type: type
}
mode = get_attribute(sensor, "mode", :string)
%Device{sensor | props: %{mode: mode}}
end
end
|
lib/andy/systems/brickpi/sensors/lego_sensor.ex
| 0.50708 | 0.429968 |
lego_sensor.ex
|
starcoder
|
defmodule AssertHTML.Matcher do
@moduledoc false
alias AssertHTML
alias AssertHTML.{Parser, Selector}
@compile {:inline, raise_match: 3}
@typep assert_or_refute :: :assert | :refute
## ----------------------------------------------------
## Collection
@doc """
Gets html by selector and raise error if it doesn't exists
# Options
* `once` - only one element
* `skip_refute` - do not raise error if element exists for refute
"""
@spec selector(AssertHTML.context(), binary(), list()) :: AssertHTML.html()
def selector({matcher, html}, selector, options \\ []) when is_binary(html) and is_binary(selector) do
docs = Parser.find(html, selector)
# found more than one element
if options[:once] && length(docs) > 1 do
raise_match(matcher, matcher == :assert, fn
:assert ->
"Found more than one element by `#{selector}` selector.\nPlease use `#{selector}:first-child`, `#{selector}:nth-child(n)` for limiting search area.\n\n\t#{
html
}\n"
:refute ->
"Selector `#{selector}` succeeded, but should have failed.\n\n\t#{html}\n"
end)
end
raise_match(matcher, docs == [], fn
:assert ->
"Element `#{selector}` not found.\n\n\t#{html}\n"
:refute ->
if options[:skip_refute],
do: nil,
else: "Selector `#{selector}` succeeded, but should have failed.\n\n\t#{html}\n"
end)
Parser.to_html(docs)
end
@doc """
Check count of elements on selector
"""
@spec count(AssertHTML.context(), binary(), integer()) :: any()
def count({matcher, html}, selector, check_value) do
count_elements = Parser.count(html, selector)
raise_match(matcher, count_elements != check_value, fn
:assert ->
[
message: "Expected #{check_value} element(s). Got #{count_elements} element(s).",
left: count_elements,
right: check_value
]
:refute ->
[
message: "Expected different number of element(s), but received equal",
left: count_elements,
right: check_value
]
end)
end
@doc """
Check count of elements on selector
"""
@spec min(AssertHTML.context(), binary(), integer()) :: any()
def min({matcher, html}, selector, min_value) do
count_elements = Parser.count(html, selector)
raise_match(matcher, count_elements < min_value, fn
:assert ->
[
message: "Expected at least #{min_value} element(s). Got #{count_elements} element(s).",
left: count_elements,
right: min_value
]
:refute ->
[
message: "Expected at most #{min_value} element(s). Got #{count_elements} element(s).",
left: count_elements,
right: min_value
]
end)
end
@doc """
Check count of elements on selector
"""
@spec max(AssertHTML.context(), binary(), integer()) :: any()
def max({matcher, html}, selector, max_value) do
count_elements = Parser.count(html, selector)
raise_match(matcher, count_elements > max_value, fn
:assert ->
[
message: "Expected at most #{max_value} element(s). Got #{count_elements} element(s).",
left: count_elements,
right: max_value
]
:refute ->
[
message: "Expected at least #{max_value} element(s). Got #{count_elements} element(s).",
left: count_elements,
right: max_value
]
end)
end
## ----------------------------------------------------
## Element
@spec attributes(AssertHTML.context(), AssertHTML.attributes()) :: any()
def attributes({matcher, html}, attributes) when is_list(attributes) do
attributes
|> Enum.into(%{}, fn {k, v} -> {to_string(k), v} end)
|> Enum.each(fn {attribute, check_value} ->
attr_value = Selector.attribute(html, attribute)
match_attribute(matcher, attribute, check_value, attr_value, html)
end)
end
@spec contain(AssertHTML.context(), Regex.t()) :: any()
def contain({matcher, html}, %Regex{} = value) when is_binary(html) do
raise_match(matcher, !Regex.match?(value, html), fn
:assert ->
[
message: "Value not matched.",
left: value,
right: html,
expr: "assert_html(#{inspect(value)})"
]
:refute ->
[
message: "Value `#{inspect(value)}` matched, but shouldn't.",
left: value,
right: html,
expr: "assert_html(#{inspect(value)})"
]
end)
end
@spec contain(AssertHTML.context(), AssertHTML.html()) :: any()
def contain({matcher, html}, value) when is_binary(html) and is_binary(value) do
raise_match(matcher, !String.contains?(html, value), fn
:assert ->
[
message: "Value not found.",
left: value,
right: html,
expr: "assert_html(#{inspect(value)})"
]
:refute ->
[
message: "Value `#{inspect(value)}` found, but shouldn't.",
left: value,
right: html,
expr: "assert_html(#{inspect(value)})"
]
end)
end
@spec match_attribute(
assert_or_refute,
AssertHTML.attribute_name(),
AssertHTML.value(),
binary() | nil,
AssertHTML.html()
) :: no_return
defp match_attribute(matcher, attribute, check_value, attr_value, html)
# attribute should exists
defp match_attribute(matcher, attribute, check_value, attr_value, html) when check_value in [nil, true, false] do
raise_match(matcher, if(check_value, do: attr_value == nil, else: attr_value != nil), fn
:assert ->
if check_value,
do: "Attribute `#{attribute}` should exists.\n\n\t#{html}\n",
else: "Attribute `#{attribute}` shouldn't exists.\n\n\t#{html}\n"
:refute ->
if check_value,
do: "Attribute `#{attribute}` shouldn't exists.\n\n\t#{html}\n",
else: "Attribute `#{attribute}` should exists.\n\n\t#{html}\n"
end)
end
# attribute should not exists
defp match_attribute(matcher, attribute, _check_value, nil = _attr_value, html) do
raise_match(matcher, matcher == :assert, fn
_ -> "Attribute `#{attribute}` not found.\n\n\t#{html}\n"
end)
end
defp match_attribute(matcher, attribute, %Regex{} = check_value, attr_value, html) do
raise_match(matcher, !Regex.match?(check_value, attr_value), fn _ ->
[
message: "Matching `#{attribute}` attribute failed.\n\n\t#{html}.\n",
left: check_value,
right: attr_value
]
end)
end
defp match_attribute(matcher, "class", check_value, attr_value, html) do
for check_class <- String.split(to_string(check_value), " ") do
raise_match(matcher, !String.contains?(attr_value, check_class), fn
:assert -> "Class `#{check_class}` not found in `#{attr_value}` class attribute\n\n\t#{html}\n"
:refute -> "Class `#{check_class}` found in `#{attr_value}` class attribute\n\n\t#{html}\n"
end)
end
end
defp match_attribute(matcher, attribute, check_value, attr_value, html) do
str_check_value = to_string(check_value)
raise_match(matcher, str_check_value != attr_value, fn _ ->
[
message: "Comparison `#{attribute}` attribute failed.\n\n\t#{html}.\n",
left: str_check_value,
right: attr_value
]
end)
end
defp raise_match(check, condition, message_fn) when check in [:assert, :refute] do
cond do
check == :assert -> condition
check == :refute -> !condition
true -> false
end
|> if do
message_or_args = message_fn.(check)
if message_or_args do
args = (is_list(message_or_args) && message_or_args) || [message: message_or_args]
raise ExUnit.AssertionError, args
end
end
end
end
|
lib/assert_html/matcher.ex
| 0.849488 | 0.603727 |
matcher.ex
|
starcoder
|
defmodule OMG.Eth.Blockchain.Transaction.Signature do
@moduledoc """
Defines helper functions for signing and getting the signature
of a transaction, as defined in Appendix F of the Yellow Paper.
For any of the following functions, if chain_id is specified,
it's assumed that we're post-fork and we should follow the
specification EIP-155 from:
https://github.com/ethereum/EIPs/blob/master/EIPS/eip-155.md
Extracted from: https://github.com/exthereum/blockchain
"""
require Integer
alias OMG.Eth.Blockchain.Transaction
alias OMG.Eth.Blockchain.Transaction.Hash
@type private_key :: <<_::256>>
@doc """
Takes a given transaction and returns a version signed
with the given private key. This is defined in Eq.(216) and
Eq.(217) of the Yellow Paper.
## Examples
iex> OMG.Eth.Blockchain.Transaction.Signature.sign_transaction(%OMG.Eth.Blockchain.Transaction{nonce: 5, gas_price: 6, gas_limit: 7, to: <<>>, value: 5, init: <<1>>}, <<1::256>>)
%OMG.Eth.Blockchain.Transaction{data: <<>>, gas_limit: 7, gas_price: 6, init: <<1>>, nonce: 5, r: 97037709922803580267279977200525583527127616719646548867384185721164615918250, s: 31446571475787755537574189222065166628755695553801403547291726929250860527755, to: "", v: 27, value: 5}
iex> OMG.Eth.Blockchain.Transaction.Signature.sign_transaction(%OMG.Eth.Blockchain.Transaction{nonce: 5, gas_price: 6, gas_limit: 7, to: <<>>, value: 5, init: <<1>>}, <<1::256>>, 1)
%OMG.Eth.Blockchain.Transaction{data: <<>>, gas_limit: 7, gas_price: 6, init: <<1>>, nonce: 5, r: 25739987953128435966549144317523422635562973654702886626580606913510283002553, s: 41423569377768420285000144846773344478964141018753766296386430811329935846420, to: "", v: 38, value: 5}
"""
@spec sign_transaction(Transaction.t(), private_key, integer() | nil) :: Transaction.t()
def sign_transaction(trx, private_key, chain_id \\ nil) do
{v, r, s} =
trx
|> Hash.transaction_hash(chain_id)
|> Hash.sign_hash(private_key, chain_id)
%{trx | v: v, r: r, s: s}
end
end
|
apps/omg_eth/lib/omg_eth/blockchain/transaction/signature.ex
| 0.858378 | 0.699306 |
signature.ex
|
starcoder
|
defmodule SMSFactor.SendSMS do
@moduledoc """
Wrappers around **Send SMS** section of SMSFactor API.
"""
@typedoc """
Params for sending SMS campaign.
- `text` **(required)** : Your message
- `value` **(required)** : The recipients' numbers
- `pushtype` : The push type (alert or marketing)
- `delay` : Sending date Y-m-d H:i:s
- `sender` : Allows you to customize the sender
- `gsmsmsid` : An id of your choice to link it to its delivery report
## Example
```elixir
{
"sms": {
"message": {
"text": "Message via API",
"pushtype": "alert",
"sender": "Illidan",
"delay": "2021-06-06 10:28:32"
},
"recipients": {
"gsm": [
{
"gsmsmsid": "100",
"value": "33612345678"
},
{
"gsmsmsid": "101",
"value": "33612345677"
}
]
}
}
}
```
"""
@type send_campaign_params() :: %{sms: %{atom() => any()}}
@typedoc """
Params for sending SMS.
- `token` **(required if not in headers)** : Your token
- `text` **(required)** : Your message
- `to` **(required)** : Your destination
- `pushtype` : The push type (alert or marketing)
- `delay` : Sending date Y-m-d H:i:s
- `sender` : Allows you to customize the sender
- `gsmsmsid` : An id of your choice to link it to its delivery report
## Example
```elixir
{
"text": "Message via API",
"to": ""33612345678"
"pushtype": "alert",
"sender": "Illidan",
"delay": "2021-06-06 10:28:32",
"gsmsmsid": "100"
}
```
"""
@type send_sms_params() :: %{atom() => any()}
@spec single_message(Tesla.Client.t(), send_sms_params()) :: Tesla.Env.result()
def single_message(client, params), do: Tesla.get(client, "/send", query: params)
@spec campaign(Tesla.Client.t(), send_campaign_params()) :: Tesla.Env.result()
def campaign(client, params), do: Tesla.post(client, "/send", params)
@spec campaign_with_list(Tesla.Client.t(), send_campaign_params()) :: Tesla.Env.result()
def campaign_with_list(client, params), do: Tesla.post(client, "/send/lists", params)
@spec simulate_single_message(Tesla.Client.t(), send_sms_params()) :: Tesla.Env.result()
def simulate_single_message(client, params) do
Tesla.get(client, "/send/simulate", query: params)
end
@spec simulate_campaign(Tesla.Client.t(), send_campaign_params()) :: Tesla.Env.result()
def simulate_campaign(client, params), do: Tesla.post(client, "/send/lists/simulate", params)
@spec cancel_campaign(Tesla.Client.t(), String.t()) :: Tesla.Env.result()
def cancel_campaign(client, campaign_id), do: Tesla.delete(client, "/send/#{campaign_id}")
end
|
lib/sms_factor/send_sms.ex
| 0.855157 | 0.751967 |
send_sms.ex
|
starcoder
|
defmodule AlphaVantage.StockTimeSeries do
@moduledoc """
A set of functions for fetching realtime and historical global equity data from [Alpha Vantage](www.alphavantage.co/documentation/#time-series-data) in 4 different temporal resolutions:
(1) daily, (2) weekly, (3) monthly, and (4) intraday.
Daily, weekly, and monthly time series contain 20+ years of historical data.
"""
alias AlphaVantage.Gateway
@doc """
Returns intraday time series (timestamp, open, high, low, close, volume) of the equity specified.
Please reference https://www.alphavantage.co/documentation/#intraday for more detail.
## Parameters
**Required**
- `:symbol`
The name of the security of your choice, provided as a string.
For example: `"MSFT"`
- `:interval`
Time interval between two consecutive data points in the time series.
The following values are supported and accepted as strings: `"1min"`, `"5min"`, `"15min"`, `"30min"`, `"60min"`
_Optional_ (accepted as a keyword list)
- `:datatype`
- `"map"` returns a map (default);
- `"json"` returns JSON format;
- `"csv"` returns a CSV (comma separated value) file string.
- `:outputsize`
- `"compact"` returns only the latest 100 data points in the intraday time series (default);
- `"full"` returns the full-length intraday time series.
The `"compact"` option is recommended if you would like to reduce the data size of each API call.
"""
@spec intraday(String.t(), String.t(), Keyword.t()) :: Gateway.response()
def intraday(symbol, interval, opts \\ []) do
params = [function: "TIME_SERIES_INTRADAY", symbol: symbol, interval: interval]
AlphaVantage.query(Keyword.merge(params, opts))
end
@doc """
Returns historical intraday time series for the trailing 2 years, covering over 2 million data points per ticker.
The intraday data is derived from the Securities Information Processor (SIP) market-aggregated data.
You can query both raw (as-traded) and split/dividend-adjusted intraday data from this endpoint.
Common use cases for this API include data visualization, trading simulation/backtesting, and machine learning and deep learning applications with a longer horizon.
Please reference https://www.alphavantage.co/documentation/#intraday-extended for more detail.
Note: To ensure optimal API response time, this endpoint uses the CSV format which is more memory-efficient than JSON.
## Parameters
**Required**
- `:symbol`
The name of the security of your choice, provided as a string.
For example: `"MSFT"`
- `:interval`
Time interval between two consecutive data points in the time series.
The following values are supported and accepted as strings: `"1min"`, `"5min"`, `"15min"`, `"30min"`, `"60min"`
- `:slice`
Two years of minute-level intraday data contains over 2 million data points, which can take up to Gigabytes of memory.
To ensure optimal API response speed, the trailing 2 years of intraday data is evenly divided into 24 "slices" - `"year1month1"`, `"year1month2"`, `"year1month3"`, ..., `"year1month11"`, `"year1month12"`, `"year2month1"`, `"year2month2"`, `"year2month3"`, ..., `"year2month11"`, `"year2month12"`.
Each slice is a 30-day window, with `"year1month1"` being the most recent and `"year2month12"` being the farthest from today.
By default, `slice: "year1month1"`.
_Optional_ (accepted as a keyword list)
- `:adjusted`
By default, `adjusted: true` and the output time series is adjusted by historical split and dividend events.
Set `adjusted: false` to query raw (as-traded) intraday values.
"""
@spec intraday_extended_history(String.t(), String.t(), String.t(), Keyword.t()) ::
Gateway.response()
def intraday_extended_history(symbol, interval, slice, opts \\ []) do
params = [
function: "TIME_SERIES_INTRADAY_EXTENDED",
symbol: symbol,
interval: interval,
slice: slice,
datatype: "csv"
]
AlphaVantage.query(Keyword.merge(params, opts))
end
@doc """
Returns daily time series (date, daily open, daily high, daily low, daily close, daily volume) of the global equity specified, covering 20+ years of historical data.
The most recent data point is the prices and volume information of the current trading day, updated realtime.
Please reference https://www.alphavantage.co/documentation/#daily for more detail.
## Parameters
**Required**
- `:symbol`
The name of the security of your choice, provided as a string.
For example: `"MSFT"`
_Optional_ (accepted as a keyword list)
- `:datatype`
- `"map"` returns a map (default);
- `"json"` returns JSON format;
- `"csv"` returns a CSV (comma separated value) file string.
- `:outputsize`
- `"compact"` returns only the latest 100 data points in the intraday time series (default);
- `"full"` returns the full-length intraday time series.
The `"compact"` option is recommended if you would like to reduce the data size of each API call.
"""
@spec daily(String.t(), Keyword.t()) :: Gateway.response()
def daily(symbol, opts \\ []) do
params = [function: "TIME_SERIES_DAILY", symbol: symbol]
AlphaVantage.query(Keyword.merge(params, opts))
end
@doc """
Returns daily time series (date, daily open, daily high, daily low, daily close, daily volume, daily adjusted close, and split/dividend events) of the global equity specified, covering 20+ years of historical data.
The most recent data point is the prices and volume information of the current trading day, updated realtime.
Please reference https://www.alphavantage.co/documentation/#dailyadj for more detail.
## Parameters
**Required**
- `:symbol`
The name of the security of your choice, provided as a string.
For example: `"MSFT"`
_Optional_ (accepted as a keyword list)
- `:datatype`
- `"map"` returns a map (default);
- `"json"` returns JSON format;
- `"csv"` returns a CSV (comma separated value) file string.
- `:outputsize`
- `"compact"` returns only the latest 100 data points in the intraday time series (default);
- `"full"` returns the full-length intraday time series.
The `"compact"` option is recommended if you would like to reduce the data size of each API call.
"""
@spec daily_adjusted(String.t(), Keyword.t()) :: Gateway.response()
def daily_adjusted(symbol, opts \\ []) do
params = [function: "TIME_SERIES_DAILY_ADJUSTED", symbol: symbol]
AlphaVantage.query(Keyword.merge(params, opts))
end
@doc """
Returns weekly time series (last trading day of each week, weekly open, weekly high, weekly low, weekly close, weekly volume) of the global equity specified, covering 20+ years of historical data.
The latest data point is the prices and volume information for the week (or partial week) that contains the current trading day, updated realtime.
Please reference https://www.alphavantage.co/documentation/#weekly for more detail.
## Parameters
**Required**
- `:symbol`
The name of the security of your choice, provided as a string.
For example: `"MSFT"`
_Optional_ (accepted as a keyword list)
- `:datatype`
- `"map"` returns a map (default);
- `"json"` returns JSON format;
- `"csv"` returns a CSV (comma separated value) file string.
- `:outputsize`
- `"compact"` returns only the latest 100 data points in the intraday time series (default);
- `"full"` returns the full-length intraday time series.
The `"compact"` option is recommended if you would like to reduce the data size of each API call.
"""
@spec weekly(String.t(), Keyword.t()) :: Gateway.response()
def weekly(symbol, opts \\ []) do
params = [function: "TIME_SERIES_WEEKLY", symbol: symbol]
AlphaVantage.query(Keyword.merge(params, opts))
end
@doc """
Returns weekly adjusted time series (last trading day of each week, weekly open, weekly high, weekly low, weekly close, weekly adjusted close, weekly volume, weekly dividend) of the global equity specified, covering 20+ years of historical data.
The latest data point is the prices and volume information for the week (or partial week) that contains the current trading day, updated realtime.
Please reference https://www.alphavantage.co/documentation/#weeklyadj for more detail.
## Parameters
**Required**
- `:symbol`
The name of the security of your choice, provided as a string.
For example: `"MSFT"`
_Optional_ (accepted as a keyword list)
- `:datatype`
- `"map"` returns a map (default);
- `"json"` returns JSON format;
- `"csv"` returns a CSV (comma separated value) file string.
"""
@spec weekly_adjusted(String.t(), Keyword.t()) :: Gateway.response()
def weekly_adjusted(symbol, opts \\ []) do
params = [function: "TIME_SERIES_WEEKLY_ADJUSTED", symbol: symbol]
AlphaVantage.query(Keyword.merge(params, opts))
end
@doc """
Returns monthly time series (last trading day of each month, monthly open, monthly high, monthly low, monthly close, monthly volume) of the global equity specified, covering 20+ years of historical data.
The latest data point is the prices and volume information for the month (or partial month) that contains the current trading day, updated realtime.
Please reference https://www.alphavantage.co/documentation/#monthly for more detail.
## Parameters
**Required**
- `:symbol`
The name of the security of your choice, provided as a string.
For example: `"MSFT"`
_Optional_ (accepted as a keyword list)
- `:datatype`
- `"map"` returns a map (default);
- `"json"` returns JSON format;
- `"csv"` returns a CSV (comma separated value) file string.
"""
@spec monthly(String.t(), Keyword.t()) :: Gateway.response()
def monthly(symbol, opts \\ []) do
params = [function: "TIME_SERIES_MONTHLY", symbol: symbol]
AlphaVantage.query(Keyword.merge(params, opts))
end
@doc """
Returns monthly adjusted time series (last trading day of each month, monthly open, monthly high, monthly low, monthly close, monthly adjusted close, monthly volume, monthly dividend) of the equity specified, covering 20+ years of historical data.
The latest data point is the prices and volume information for the month (or partial month) that contains the current trading day, updated realtime.
Please reference https://www.alphavantage.co/documentation/#monthlyadj for more detail.
## Parameters
**Required**
- `:symbol`
The name of the security of your choice, provided as a string.
For example: `"MSFT"`
_Optional_ (accepted as a keyword list)
- `:datatype`
- `"map"` returns a map (default);
- `"json"` returns JSON format;
- `"csv"` returns a CSV (comma separated value) file string.
"""
@spec monthly_adjusted(String.t(), Keyword.t()) :: Gateway.response()
def monthly_adjusted(symbol, opts \\ []) do
params = [function: "TIME_SERIES_MONTHLY_ADJUSTED", symbol: symbol]
AlphaVantage.query(Keyword.merge(params, opts))
end
@doc """
Returns the latest price and volume information for a security of your choice.
Please reference https://www.alphavantage.co/documentation/#latestprice for more detail.
## Parameters
**Required**
- `:symbol`
The name of the security of your choice, provided as a string.
For example: `"MSFT"`
_Optional_ (accepted as a keyword list)
- `:datatype`
- `"map"` returns a map (default);
- `"json"` returns JSON format;
- `"csv"` returns a CSV (comma separated value) file string.
"""
@spec quote(String.t(), Keyword.t()) :: Gateway.response()
def quote(symbol, opts \\ []) do
params = [function: "GLOBAL_QUOTE", symbol: symbol]
AlphaVantage.query(Keyword.merge(params, opts))
end
@doc """
Returns the best-matching symbols and market information based on keywords of your choice.
The search results also contain match scores that provide you with the full flexibility to develop your own search and filtering logic.
Please reference https://www.alphavantage.co/documentation/#symbolsearch for more detail.
## Parameters
**Required**
- `:keywords`
A text string of your choice.
For example: keywords=`"microsoft"`
_Optional_ (accepted as a keyword list)
- `:datatype`
- `"map"` returns a map (default);
- `"json"` returns JSON format;
- `"csv"` returns a CSV (comma separated value) file string.
"""
@spec search(String.t(), Keyword.t()) :: Gateway.response()
def search(keywords, opts \\ []) do
params = [function: "SYMBOL_SEARCH", keywords: keywords]
AlphaVantage.query(Keyword.merge(params, opts))
end
end
|
lib/alpha_vantage/stock_time_series.ex
| 0.914825 | 0.889912 |
stock_time_series.ex
|
starcoder
|
defmodule Kitt.Message.TIM do
@moduledoc """
Defines the structure and instantiation function
for creating a J2735-compliant Traveler Information message
A `TIM` defines a generic message type used to relay various advisory
notifications to equipped devices, including but not limited to vehicles
"""
@typedoc "Defines the structure of a TravelerInformation message and the data elements comprising its fields"
@type t :: %__MODULE__{
msgCnt: non_neg_integer(),
timeStamp: Kitt.Types.minute_of_year(),
packetID: non_neg_integer(),
urlB: String.t(),
dataFrames: [traveler_data_frame()],
regional: [Kitt.Types.regional_extension()]
}
@type traveler_data_frame :: %{
sspTimRights: non_neg_integer(),
frameType:
:unknown
| :advisory
| :roadSignage
| :commercialSignage,
msdId: msg_id(),
startYear: non_neg_integer(),
startTime: non_neg_integer(),
durationTime: non_neg_integer(),
priority: non_neg_integer(),
sspLocationRights: non_neg_integer(),
regions: [geographical_path()],
sspMsgRights1: non_neg_integer(),
sspMsgRights2: non_neg_integer(),
content: content(),
url: String.t()
}
@type msg_id ::
{:furtherInfoID, non_neg_integer()}
| {:roadSignID, road_sign_id()}
@type road_sign_id :: %{
position: Kitt.Types.position_3d(),
viewAngle: Kitt.Types.angle(),
mutcdCode:
:none
| :regulatory
| :warning
| :maintenance
| :motoristService
| :guide
| :rec,
crc: non_neg_integer()
}
@type geographical_path :: %{
name: String.t(),
id: Kitt.Types.road_segment_reference_id(),
anchor: Kitt.Types.position_3d(),
laneWidth: non_neg_integer(),
directionality: direction_of_use(),
closedPath: boolean(),
direction: Kitt.Types.angle(),
description: description(),
regional: [Kitt.Types.regional_extension()]
}
@type direction_of_use ::
:unavailable
| :forward
| :reverse
| :both
@type description ::
{:path, Kitt.Types.offset_system()}
| {:geometry, geometric_projection()}
| {:oldRegion, valid_region()}
| {:regional, [Kitt.Types.regional_extension()]}
@type valid_region :: %{
direction: Kitt.Types.angle(),
extent: Kitt.Types.extent(),
area: area()
}
@type area ::
{:shapePointSet, shape_point_set()}
| {:circle, circle()}
| {:regionPointSet, region_point_set()}
@type shape_point_set :: %{
achor: Kitt.Types.position_3d(),
laneWidth: non_neg_integer(),
directionality: direction_of_use()
}
@type region_point_set :: %{
anchor: Kitt.Types.position_3d(),
scale: non_neg_integer(),
nodeList: [region_offset()]
}
@type region_offset :: %{
xOffset: integer(),
yOffset: integer(),
zOffset: integer()
}
@type geometric_projection :: %{
direction: Kitt.Types.angle(),
extent: Kitt.Types.extent(),
laneWidth: non_neg_integer(),
circle: circle(),
regional: [Kitt.Types.regional_extension()]
}
@type circle :: %{
center: Kitt.Types.position_3d(),
radius: non_neg_integer(),
units:
:centimeter
| :"cm2-5"
| :decimeter
| :meter
| :kilometer
| :foot
| :yard
| :mile
}
@type content ::
{:advisory, [itis_item()]}
| {:workZone, itis_item()}
| {:genericSign, itis_item()}
| {:speedLimit, itis_item()}
| {:exitService, itis_item()}
@type itis_item ::
{:itis, non_neg_integer()}
| {:text, String.t()}
@derive Jason.Encoder
@enforce_keys [:msgCnt, :dataFrames]
defstruct [:dataFrames, :msgCnt, :packetID, :regional, :timeStamp, :urlB]
@doc """
Produces a `TIM` message struct from an equivalent map or keyword input
"""
@spec new(map() | keyword()) :: t()
def new(message), do: struct(__MODULE__, message)
@doc """
Returns the `TIM` identifying integer
"""
@spec type_id() :: non_neg_integer()
def type_id(), do: :DSRC.travelerInformation()
@doc """
Returns the `TIM` identifying atom recognized by the ASN1 spec
"""
@spec type() :: atom()
def type(), do: :TravelerInformation
end
|
lib/kitt/message/tim.ex
| 0.832747 | 0.537102 |
tim.ex
|
starcoder
|
defmodule Tracer do
alias Tracer.Pattern
alias Tracer.Collector
alias Tracer.Utils
@moduledoc """
"""
@doc """
## Options
The following options are available:
* `:node` - specified the node, on which trace should be started.
* `:limit` - specifies the limit, that should be used on collectable process.
Limit options are merged with actually setted. It is possible to specify it
per configuration as env `:limit` for application `:exrun`.
The following limit options are available:
* `:time` - specifies the time in milliseconds, where should the rate be
applied. Default specified by environments. (Default: 1000)
* `:rate` - specifies the limit of trace messages per time, if trace messages
will be over this limit, the collectable process will stop and clear all traces.
Default specified by environments. (Default: 250)
* `:overall` - set the absolute limit for messages. After reaching this limit, the
collactable process will clear all traces and stops. Default specified by environments.
(Default: nil)
Additionally limit can be specified as `limit: 5`, than it equivalent to `limit: %{overall: 5}`
* `:formatter_local` - flag for setting, where formatter process should be started.
If set to `false`, then the formatter process will be started on remote node, if set
to `true`, on a local machine. Defaults set to `false`. Tracer can trace on nodes,
where elixir is not installed. If formatter_local set to true, there will be only 2
modules loaded on remote erlang node (Tracer.Utils and Tracer.Collector), which forward
messages to the connected node. If formatter_local set to false, than formatter started
on remote node and it load all modules from elixir application, because for formatting
traces there should be loaded at least all Inspect modules.
* `:formatter` - own formatter function, example because you try to trace different
inspect function. Formatter is either a fun or tuple `{module, function}`.
* `:stack` - stacktrace for the process call should be printed
* `:exported` - only exported functions should be printed
* `:no_return` - no returns should be printed for a calls
* `:pid` - specify pid, you want to trace, otherwise all processes are traced
* `io` - specify io process, which should handle io from a tracer
## Examples
iex> import Tracer # should be to simplify using of trace
nil
iex> trace :lists.seq
{:ok, 2}
iex> trace :lists.seq/2
{:ok, 1}
iex> trace :lists.seq(1, 10)
{:ok, 2}
iex> trace :lists.seq(a, b) when a < 10 and b > 25
{:ok, 2}
iex> trace :maps.get(:undefined, _), [:stack]
{:ok, 2}
iex> trace :maps.get/2, [limit: %{overall: 100, rate: 50, time: 50}]
{:ok, 2}
"""
defmacro trace(to_trace, options \\ []) do
pattern = Pattern.compile(to_trace, options) |> Macro.escape(unquote: true)
quote do
Tracer.trace_run(unquote(pattern), unquote(options))
end
end
def trace_run(compiled_pattern, options \\ []) do
node = options[:node] || node()
limit = limit_opts(options)
formatter_options = Keyword.put_new(options, :formatter_local, false)
unless Process.get(:__tracer__), do: Process.put(:__tracer__, %{})
check_node(node, formatter_options)
Collector.ensure_started(node)
{:group_leader, group_leader} = Process.info(self(), :group_leader)
io = options[:io] || group_leader
process_spec = options[:pid] || :all
trace_opts = [:call, :timestamp]
Collector.enable(node, io, limit, process_spec, trace_opts, formatter_options)
Collector.trace_pattern(node, compiled_pattern)
end
defp limit_opts(options) do
case options[:limit] do
nil -> default_limit()
limit when is_integer(limit) -> Map.merge(default_limit(), %{overall: limit})
limit when is_map(limit) -> limit
end
end
defp default_limit() do
Application.get_env(:exrun, :limit, %{rate: 250, time: 1000}) |> Enum.into(%{})
end
defp bootstrap(node, formatter_options) do
applications = :rpc.call(node, :application, :loaded_applications, [])
Utils.load_modules(node, [Utils, Collector])
unless formatter_options[:formatter_local] do
modules =
case :lists.keyfind(:elixir, 1, applications) do
{:elixir, _, _} ->
[]
_ ->
{:ok, modules} = :application.get_key(:elixir, :modules)
modules
end
Utils.load_modules(node, [Tracer.Formatter | modules])
end
end
defp check_node(node, formatter_options) do
if node() == node do
:ok
else
tracer_conf = Process.get(:__tracer__)
node_conf =
case :maps.get(node, tracer_conf, nil) do
nil -> %{loaded: false}
node_conf -> node_conf
end
unless node_conf.loaded do
bootstrap(node, formatter_options)
Process.put(:__tracer__, :maps.put(node, %{node_conf | loaded: true}, tracer_conf))
end
end
end
def get_config(key), do: Process.get(:__tracer__) |> get_in([key])
@doc """
Stop tracing
"""
def trace_off(options \\ []) do
Collector.stop(options[:node] || node())
end
end
|
lib/tracer.ex
| 0.885384 | 0.500366 |
tracer.ex
|
starcoder
|
defmodule Akin.SubstringSort do
@moduledoc """
Use Chunk Sorting to compare two strings using substrings.
Ratio is based on difference in string length
* if words are of similar in length according to Akin.Strategy.determine/2
* ratio is String.jaro_distance
* if words are of dissimilar in length according to Akin.Strategy.determine/2
* ratio is Akin.SubstringComparison.similarity/2 * @ratio * scale (determined by Akin.Strategy)
"""
@behaviour Akin.Task
alias Akin.{Corpus, Strategy, Helper.SubstringComparison}
@bias 0.95
@spec compare(%Corpus{}, %Corpus{}, Keyword.t()) :: float()
@doc """
This strategy splits the strings on spaces, sorts the list of strings, joins them
together again, and then compares them by applying the Jaro-Winkler distance metric.
## Examples
iex> Akin.SubstringSort.compare(Akin.Util.compose("Alice in Wonderland"), Akin.Util.compose("Alice's Adventures in Wonderland"))
0.63
iex> StringCompare.SubstringSort.substring_similarity("Oscar-<NAME>"}, %Akin.Corpus{string: "<NAME>"}, Akin.Util.compose("Alice's Adventures in Wonderland"))
1.0
"""
def compare(%Corpus{} = left, %Corpus{} = right, _opts \\ []) do
case Strategy.determine(left.string, right.string) do
:standard ->
similarity(left.list, right.list)
{:substring, scale} ->
substring_similarity(left.list, right.list) * @bias * scale
{:error, _} ->
0.0
end
end
defp substring_similarity(left, right) do
similarity(left, right, SubstringComparison)
end
defp similarity(left, right) do
left =
left
|> Enum.sort()
|> Enum.join()
right =
right
|> Enum.sort()
|> Enum.join()
String.jaro_distance(left, right)
end
defp similarity(left, right, ratio_mod) do
left =
left
|> Enum.sort()
|> Enum.join()
right =
right
|> Enum.sort()
|> Enum.join()
ratio_mod.similarity(left, right)
end
end
|
lib/akin/algorithms/substring_sort.ex
| 0.89647 | 0.510985 |
substring_sort.ex
|
starcoder
|
defmodule Muscat.Fraction do
@moduledoc """
This module provides some simple operations for fraction.
"""
@type fraction_tuple :: {numerator :: integer(), denominator :: neg_integer() | pos_integer()}
@type t :: %__MODULE__{
numerator: integer(),
denominator: integer() | :any,
sign: :positive | :negative
}
defstruct [:numerator, :denominator, :sign]
defguard is_zero_fraction(fraction)
when is_struct(fraction, __MODULE__) and fraction.numerator == 0
@doc """
Creates a fraction from integer value or tuple.
```
Fraction.new(2)
#=> %{numerator: 2, denominator: 1, sign: :positive}
Fraction.new(0)
#=> %{numerator: 0, denominator: :any, sign: :positive}
Fraction.new({1, 2})
#=> %{numerator: 1, denominator: 2, sign: :positive}
```
"""
@spec new(integer() | fraction_tuple()) :: __MODULE__.t()
def new(value) when is_integer(value), do: new(value, 1)
def new({numerator, denominator}), do: new(numerator, denominator)
@doc """
Creates a fraction with numerator and denominator.
Both numerator and denominator are integers.(and the denominator can't be `0`).
It doesn't matter whether the sign of the fraction is at the numerator or denominator.
## About 0
- If numerator is `0`, the denominator in result is :any and sign is positive.
- If denominator is `0`, it will raise.
```
Fraction.new(0, 1)
#=> %{numerator: 0, denominator: :any, sign: :positive}
Fraction.new(1, 2)
#=> %{numerator: 1, denominator: 2, sign: :positive}
Fraction.new(-1, -2)
#=> %{numerator: 1, denominator: 2, sign: :positive}
Fraction.new(-1, 2)
#=> %{numerator: 1, denominator: 2, sign: :negative}
Fraction.new(1, -2)
#=> %{numerator: 1, denominator: 2, sign: :negative}
```
"""
@spec new(numerator :: integer(), denominator :: neg_integer() | pos_integer()) ::
__MODULE__.t()
def new(_numerator, 0) do
raise ArgumentError, "The denominator can't be 0."
end
def new(0, denominator) when is_integer(denominator) do
%__MODULE__{numerator: 0, denominator: :any, sign: :positive}
end
def new(numerator, denominator) when is_integer(numerator) and is_integer(denominator) do
sign =
cond do
numerator < 0 and denominator < 0 -> :positive
numerator > 0 and denominator > 0 -> :positive
true -> :negative
end
%__MODULE__{
numerator: Kernel.abs(numerator),
denominator: Kernel.abs(denominator),
sign: sign
}
end
def new(_, _) do
raise ArgumentError, "Both numerator and denominator are integers."
end
@doc """
Compare two fractions and returns `true` if they are equal, otherwise `false`.
Fractions will be reduced first and then compared. It means `1/2` is equal to `2/4`.
```
fraction1 = Fraction.new(1280, 2560)
fraction2 = Fraction.new(1, 2)
Fraction.equal?(fraction1, fraction2)
#=> true
```
"""
@spec equal?(__MODULE__.t(), __MODULE__.t()) :: boolean()
def equal?(%__MODULE__{} = fraction1, %__MODULE__{} = fraction2) do
fraction1 = reduce(fraction1)
fraction2 = reduce(fraction2)
compare(fraction1, fraction2) == :eq
end
@doc """
Compare two fractions with returning `:eq`, `:lt` and `:gt` .
```
fraction1 = Fraction.new(1280, 2560)
fraction2 = Fraction.new(1, 2)
Fraction.equal?(fraction1, fraction2)
#=> :eq
```
"""
@spec compare(__MODULE__.t(), __MODULE__.t()) :: :gt | :lt | :eq
def compare(%{sign: :positive}, %{sign: :negative}), do: :gt
def compare(%{sign: :negative}, %{sign: :positive}), do: :lt
def compare(
%{numerator: numerator, denominator: denominator, sign: sign},
%{numerator: numerator, denominator: denominator, sign: sign}
),
do: :eq
def compare(fraction1, fraction2) do
fraction1 = reduce(fraction1)
fraction2 = reduce(fraction2)
case minus(fraction1, fraction2) do
fraction when is_zero_fraction(fraction) -> :eq
%{sign: :positive} -> :gt
%{sign: :negative} -> :lt
end
end
@doc """
Reduce the fraction to the simplest.
```
Fraction.new(1280, 2560)
|> Fraction.reduce()
#=> %{numerator: 1, denominator: 2, sign: :positive}
```
"""
@spec reduce(__MODULE__.t()) :: __MODULE__.t()
def reduce(fraction) when is_zero_fraction(fraction), do: fraction
def reduce(%__MODULE__{numerator: numerator, denominator: denominator} = fraction) do
max_common_divisor = Integer.gcd(numerator, denominator)
%{
fraction
| numerator: div(numerator, max_common_divisor),
denominator: div(denominator, max_common_divisor)
}
end
@doc """
Fraction `+` operation without reduction.
```
Fraction.new(1, 2)
|> Fraction.add(Fraction.new(1, 3))
#=> %{numerator: 5, denominator: 6, sign: :positive}
Fraction.new(2, 4)
|> Fraction.add(Fraction.new(1, 3))
#=> %{numerator: 10, denominator: 12, sign: :positive}
```
"""
@spec add(__MODULE__.t(), __MODULE__.t()) :: __MODULE__.t()
def add(fraction1, fraction2) when is_zero_fraction(fraction1), do: fraction2
def add(fraction1, fraction2) when is_zero_fraction(fraction2), do: fraction1
def add(
%__MODULE__{denominator: denominator} = fraction1,
%__MODULE__{denominator: denominator} = fraction2
) do
numerator =
signed_number(fraction1.sign).(fraction1.numerator) +
signed_number(fraction2.sign).(fraction2.numerator)
new(numerator, denominator)
end
def add(%__MODULE__{} = fraction1, %__MODULE__{} = fraction2) do
numerator =
signed_number(fraction1.sign).(fraction1.numerator * fraction2.denominator) +
signed_number(fraction2.sign).(fraction2.numerator * fraction1.denominator)
new(numerator, fraction1.denominator * fraction2.denominator)
end
defp signed_number(:positive), do: &Kernel.+/1
defp signed_number(:negative), do: &Kernel.-/1
@doc """
Fraction `-` operation without reduction.
```
Fraction.new(1, 3)
|> Fraction.minus(Fraction.new(1, 2))
#=> %{numerator: 1, denominator: 6, sign: :negative}
Fraction.new(5, 6)
|> Fraction.minus(Fraction.new(1, 6))
#=> %{numerator: 4, denominator: 6, sign: :positive}
```
"""
@spec minus(__MODULE__.t(), __MODULE__.t()) :: __MODULE__.t()
def minus(fraction, fraction), do: new(0)
def minus(fraction1, fraction2) do
fraction2 |> opposite() |> add(fraction1)
end
@doc """
Fraction `*` operation without reduction.
```
Fraction.new(1, 3)
|> Fraction.multi(Fraction.new(1, 2))
#=> %{numerator: 1, denominator: 6, sign: :positive}
Fraction.new(2, 3)
|> Fraction.multi(Fraction.new(1, 6))
#=> %{numerator: 2, denominator: 18, sign: :positive}
```
"""
@spec multi(__MODULE__.t(), __MODULE__.t()) :: __MODULE__.t()
def multi(fraction, _fraction2) when is_zero_fraction(fraction), do: new(0)
def multi(_fraction1, fraction) when is_zero_fraction(fraction), do: new(0)
def multi(fraction1, fraction2) do
new(
signed_number(fraction1.sign).(fraction1.numerator) *
signed_number(fraction2.sign).(fraction2.numerator),
fraction1.denominator * fraction2.denominator
)
end
@doc """
Fraction `/` operation without reduction.
```
Fraction.new(1, 3)
|> Fraction.divide(Fraction.new(1, 2))
#=> %{numerator: 2, denominator: 3, sign: :positive}
Fraction.new(2, 4)
|> Fraction.divide(Fraction.new(1, 2))
#=> %{numerator: 4, denominator: 4, sign: :positive}
```
"""
@spec divide(__MODULE__.t(), __MODULE__.t()) :: __MODULE__.t()
def divide(fraction, _fraction) when is_zero_fraction(fraction), do: fraction
def divide(_fraction, fraction) when is_zero_fraction(fraction), do: raise(ArithmeticError)
def divide(fraction1, fraction2) do
fraction2 |> inverse() |> multi(fraction1)
end
@doc "Same to `inverse/1`"
@spec reciprocal(__MODULE__.t()) :: __MODULE__.t()
def reciprocal(fraction), do: inverse(fraction)
@doc """
Give the fraction reciprocal.
If the given numerator is `0`, then raise `ArithmeticError`.
```
Fraction.new(1, 2)
|> Fraction.inverse()
#=> %{numerator: 2, denominator: 1, sign: :positive}
```
"""
@spec inverse(__MODULE__.t()) :: __MODULE__.t()
def inverse(fraction) when is_zero_fraction(fraction),
do: raise(ArithmeticError)
def inverse(%__MODULE__{numerator: numerator, denominator: denominator} = fraction) do
%{fraction | numerator: denominator, denominator: numerator}
end
@doc """
Give the opposite fraction
If the given numerator is `0`, returns fraction itself.
```
Fraction.new(1, 2)
|> Fraction.opposite()
#=> %{numerator: 1, denominator: 2, sign: :negative}
Fraction.new(0, 2)
|> Fraction.opposite()
#=> %{numerator: 0, denominator: :any, sign: :positive}
```
"""
@spec opposite(__MODULE__.t()) :: __MODULE__.t()
def opposite(fraction) when is_zero_fraction(fraction), do: fraction
def opposite(%__MODULE__{sign: sign} = fraction) do
%{fraction | sign: opposite_sign(sign)}
end
defp opposite_sign(:positive), do: :negative
defp opposite_sign(:negative), do: :positive
@doc "Same to `opposite/1`"
@spec negate(__MODULE__.t()) :: __MODULE__.t()
def negate(fraction), do: opposite(fraction)
@doc "Return the absolute value of fraction."
@spec abs(__MODULE__.t()) :: __MODULE__.t()
def abs(%__MODULE__{sign: :positive} = fraction), do: fraction
def abs(%__MODULE__{sign: :negative} = fraction), do: %{fraction | sign: :positive}
@doc """
Round a fraction to an arbitrary number of fractional digits.
### Options
- `:precision` - between `0` and `15` . It uses `Float.round/2` to round.
"""
@spec to_float(__MODULE__.t()) :: float()
@spec to_float(__MODULE__.t(), opts :: [precision: non_neg_integer()]) :: float()
def to_float(fraction, opts \\ [])
def to_float(%__MODULE__{numerator: 0}, _opts), do: 0.0
def to_float(%__MODULE__{numerator: numerator, denominator: denominator, sign: sign}, opts) do
value = signed_number(sign).(numerator / denominator)
case opts[:precision] do
nil -> value
precision when precision in 0..15 -> Float.round(value, precision)
_ -> raise ArgumentError, "precision should be in 0..15"
end
end
end
|
lib/muscat/fraction.ex
| 0.937868 | 0.880797 |
fraction.ex
|
starcoder
|
defmodule Surgex.Guide.SoftwareDesign do
@moduledoc """
Higher level application design and engineering guidelines.
"""
@doc """
An `else` block should be provided for `with` when it forwards cases from external files.
## Reasoning
The `with` clause allows to omit `else` entirely if its only purpose is to amend the specific
series of matches filled between `with` and `do`. In such cases, all non-matching outputs are
forwarded (or "bubbled up") by `with`. This is a cool feature that allows to reduce the amount of
redundant negative matches when there's no need to amend them.
It may however become a readability and maintenance problem when `with` calls to complex, external
code from separate files, which makes it hard to reason about the complete set of possible
outcomes of the whole `with` block. Therefore, it's encouraged to provide an `else` which lists
a complete set of possible negative scenarios, even if they are not mapped to a different output.
## Examples
Preferred:
defmodule RegistrationService do
def call(attrs) do
with {:ok, user} <- CreateUserFromAttributesService.call(attrs),
:ok <- SendUserWelcomeEmailService.call(user)
do
{:ok, user}
else
{:error, changeset = %Ecto.Changeset{}} -> {:error, changeset}
{:error, :not_available} -> {:error, :not_available}
end
end
end
Unclear cross-module flow:
defmodule RegistrationService do
def call(attrs) do
with {:ok, user} <- CreateUserFromAttributesService.call(attrs),
:ok <- SendUserWelcomeEmailService.call(user)
do
{:ok, user}
end
end
end
"""
def with_else_usage, do: nil
@doc """
Redundant `else` block should not be provided for the `with` directive.
## Reasoning
In cases when all the code called in `with` resides in the same file (or in a standard library)
and when none of `else` clauses would override the negative path's output, it's more semantic and
descriptive to simply drop the `else` entirely. It's worth noting that `else` blocks in `with`
bring an additional maintenance cost so it should be excused by either of conditions mentioned
above.
## Examples
Preferred:
defmodule RegistrationService do
def call(attrs) do
with {:ok, user} <- insert_user(attrs),
:ok <- send_welcome_email(user)
do
{:ok, user}
end
end
defp insert_user(attrs), do: # ...
defp send_welcome_email(user), do: # ...
end
Redundant and hard to maintain `else`:
defmodule RegistrationService do
def call(attrs) do
with {:ok, user} <- insert_user(attrs),
:ok <- send_welcome_email(user)
do
{:ok, user}
else
{:error, :insertion_error_a} -> {:error, :insertion_error_a}
{:error, :insertion_error_b} -> {:error, :insertion_error_b}
{:error, :insertion_error_c} -> {:error, :insertion_error_c}
{:error, :mailing_service_error_a} -> {:error, :mailing_service_error_a}
{:error, :mailing_service_error_b} -> {:error, :mailing_service_error_b}
{:error, :mailing_service_error_c} -> {:error, :mailing_service_error_c}
end
end
defp insert_user(attrs), do: # ...
defp send_welcome_email(user), do: # ...
end
"""
def with_else_redundancy, do: nil
@doc """
Matches in a `with-else` block should be placed in occurrence order.
## Reasoning
Doing this will make it much easier to reason about the whole flow of the `with` block, which
tends to be quite complex and a core of flow control.
## Examples
Preferred:
defmodule RegistrationService do
def call(attrs) do
with {:ok, user} <- CreateUserFromAttributesService.call(attrs),
:ok <- SendUserWelcomeEmailService.call(user)
do
{:ok, user}
else
{:error, changeset = %Ecto.Changeset{}} -> {:error, changeset}
{:error, :not_available} -> {:error, :not_available}
end
end
end
Unclear flow:
defmodule RegistrationService do
def call(attrs) do
with {:ok, user} <- CreateUserFromAttributesService.call(attrs),
:ok <- SendUserWelcomeEmailService.call(user)
do
{:ok, user}
else
{:error, :not_available} -> {:error, :not_available}
{:error, changeset = %Ecto.Changeset{}} -> {:error, changeset}
end
end
end
"""
def with_else_order, do: nil
@doc """
Errors from external contexts should be mapped to have a meaning in the current context.
## Reasoning
Elixir allows to match and forward everything in `case` and `with-else` match clauses (which are
often used to control the high level application flow) or to simply omit `else` for `with`. This
often results in bubbling up errors, such as those in `{:error, reason}` tuples, to the next
context in which those errors are ambiquous or not fitting the context into which they traverse.
For instance, `{:error, :forbidden}` returned from a HTTP client is ambiguous and not fitting the
context of a service or controller that calls it. The following questions are unanswered:
- what exactly is forbidden?
- why would I care if it's forbidden and not, for instance, temporarily unavailable?
- what actually went wrong?
- how does it map to actual input args?
A reverse case is also possible when errors in lower contexts are intentionally named to match
upper context expectations, breaking the separation of concerns. For instance, a service may
return `{:error, :not_found}` or `{:error, :forbidden}` in order to implicitly fall into fallback
controller's expectations, even though a more descriptive error naming could've been invented.
Therefore, care should be put into naming errors in a way that matters in the contexts where
they're born and into leveraging `case` and `with-else` constructs to re-map ambiguous or not
fitting errors into a meaningful and fitting ones when they travel across context bounds.
## Examples
Preferred:
defmodule RegistrationService do
def call(attrs) do
with {:ok, user} <- CreateUserFromAttributesService.call(attrs),
:ok <- SendUserWelcomeEmailService.call(user)
do
{:ok, user}
else
{:error, changeset = %Ecto.Changeset{}} -> {:error, :invalid_attributes, changeset}
{:error, :not_available} -> {:error, :mailing_service_not_available}
end
end
end
Ambiguous and "out of context" errors:
defmodule RegistrationService do
def call(attrs) do
with {:ok, user} <- CreateUserFromAttributesService.call(attrs),
:ok <- SendUserWelcomeEmailService.call(user)
do
{:ok, user}
else
{:error, changeset = %Ecto.Changeset{}} -> {:error, changeset}
{:error, :not_available} -> {:error, :not_available}
end
end
end
"""
def error_mapping, do: nil
@doc """
Non-false moduledoc should be filled only for global, context-external app modules.
## Reasoning
Filling moduledoc results in adding the module to module list in the documentation. Therefore, it
makes little sense to use it only to leave a comment about internal mechanics of specific module
or its meaning in the context of a closed application domain. For such cases, regular comments
should be used. This will yield a clean documentation with eagle-eye overview of the system and
its parts that can be directly used from global or external perspective.
## Example
Preferred:
defmodule MyProject.Accounts do
@moduledoc \"""
Account management system.
\"""
@doc \"""
Registers an user account.
\"""
def register(attrs) do
MyProject.Accounts.RegistrationService.call(attrs)
end
end
defmodule MyProject.Accounts.RegistrationService do
@moduledoc false
# Fails on occasion due to Postgres connection issue.
# Works best on Fridays.
def call(attrs) do
# ...
end
end
Unnecessary external-ization and comment duplication:
defmodule MyProject.Accounts do
@moduledoc \"""
Account management system.
\"""
@doc \"""
Registers an user account.
\"""
def register(attrs) do
MyProject.Accounts.RegistrationService.call(attrs)
end
end
defmodule MyProject.Accounts.RegistrationService do
@moduledoc \"""
Registers an user account.
Fails on occasion due to Postgres connection issue.
Works best on Fridays.
\"""
def call(attrs) do
# ...
end
end
"""
def moduledoc_usage, do: nil
@doc """
Usage of `import` directive at module level or without the `only` option should be avoided.
## Reasoning
When importing at module level, one adds a set of foreign functions to the module that may
conflict with existing ones. This gets worse when multiple modules are imported and their names
start to clash with each other. When project complexity increases over time and the preference for
imports over aliases grows, the developer will sooner or later be forced to name functions in a
custom to-be-imported module in a way that scopes them in a target module and/or avoids naming
conflicts with other to-be-imported modules. This results in bad function naming - names start to
be unnecessarily long or to repeat the module name in a function name.
When importing without the `only` option, it's unclear without visiting the source of imported
module what exact function names and arities come from the external place. This makes the code
harder to reason about.
## Examples
Preferred:
defmodule User do
def full_name(%{first_name: first_name, last_name: last_name}) do
import Enum, only: [join: 2]
join([first_name, last_name])
end
end
Too wide scope:
defmodule User do
import Enum, only: [join: 2]
def full_name(%{first_name: first_name, last_name: last_name}) do
join([first_name, last_name])
end
end
Unknown imports:
defmodule User do
def full_name(%{first_name: first_name, last_name: last_name}) do
import Enum
join([first_name, last_name])
end
end
"""
def import_usage, do: nil
@doc """
Tests should only `use` support test case modules that they need.
## Reasoning
If specific test only unit tests a module without using a web request, it shouldn't use `ConnCase`
and if it doesn't create records, it shouldn't use `DataCase`. For many tests, `ExUnit.Case` will
be enough of a support.
This yields more semantic test headers and avoids needlessly importing and abusing of more complex
support files.
## Examples
Preferred:
defmodule MyProject.Web.MyControllerTest do
use MyProject.Web.ConnCase
end
defmodule MyProject.MyServiceTest do
use MyProject.DataCase
end
defmodule NeitherControllerNorDatabaseTest do
use ExUnit.Case
end
Test support file abuse:
defmodule MyProject.MyServiceTest do
use MyProject.Web.ConnCase
end
defmodule NeitherControllerNorDatabaseTest do
use MyProject.DataCase
end
"""
def test_case_usage, do: nil
@doc """
Sequential variable names, like `user1`, should respect underscore naming (and be avoided).
## Reasoning
Sequential variable names should be picked only as a last resort, since they're hard to express
in underscore notation and are non-descriptive. For instance, in comparison function
`compare(integer_1, integer_2)` can be replaced with `compare(integer, other_integer)`.
Sequence number added as suffix without the underscore is a breakage of underscore naming and
looks especially bad when the name consists of more than one word, like `user_location1`.
## Examples
Preferred:
def compare(integer, other_integer), do: # ...
Preferred as last resort:
def add_three_nums(integer_1, integer_2, integer_3), do: # ...
Plain ugly:
def concat(file_name1, file_name2), do: # ...
"""
def sequential_variable_naming, do: nil
@doc """
Predicate function names shouldn't start with `is_` and should end with `?`.
## Reasoning
It's an Elixir convention to name predicate functions with a `?` suffix. It leverages the fact
that this character can appear as function name suffix to make it easier to differentiate such
functions from others.
It's also an Elixir convention not to name predicate functions with a `is_` prefix, since that
prefix is reserved for guard-enabled predicate macros.
> Note that this rule doesn't apply to service functions that return success tuples instead of
plain boolean values.
## Examples
Preferred:
def active?(user), do: true
Function that pretends to be a guard:
def is_active?(user), do: true
Function that pretends not to be a predicate:
def active(user), do: true
"""
def predicate_function_naming, do: nil
@doc """
Function clauses should be grouped together, ie. without a blank line between them.
## Reasoning
This allows to easily read a whole set of specific function's clauses and spot the start and end
of the whole story of that specific function.
## Examples
Preferred:
def active?(%User{confirmed_at: nil}), do: false
def active?(%User{}), do: true
def deleted?(%User{deleted_at: nil}), do: false
def deleted?(%User{}), do: true
No obvious visual bounds for each function:
def active?(%User{confirmed_at: nil}), do: false
def active?(%User{}), do: true
def deleted?(%User{deleted_at: nil}), do: false
def deleted?(%User{}), do: true
"""
def function_clause_grouping, do: nil
@doc """
Functions should be grouped by their relationship rather than by "public then private".
## Reasoning
The existence of a `def` + `defp` directive pair allows to leave behind the old habits for
defining all the public functions before private ones. Keeping related functions next to each
other allows to read the code faster and to easily get the grasp of the whole module flow.
The best rule of thumb is to place every private function directly below first other function that
calls it.
## Examples
Preferred:
def a, do: b()
defp a_helper, do: nil
def b, do: nil
defp b_helper, do: nil
Harder to read:
def a, do: b()
def b, do: nil
defp a_helper, do: nil
defp b_helper, do: nil
"""
def function_order, do: nil
@doc """
Functions should not include more than one level of block nesting.
## Reasoning
Constructs like `with`, `case`, `cond`, `if` or `fn` often need their own vertical space in order
to make them readable, avoid cluttering and explicitly express dependencies needed by each block.
Therefore, if they appear within each other, it should be preferred to extract the nested logic to
separate function. This will often yield a good chance to replace some of these constructs with
preferred solution of pattern matching function arguments.
## Examples
Preferred:
def calculate_total_cart_price(cart, items_key \\\\ :items, omit_below \\\\ 0) do
reduce_cart_items_price(cart[items_key], omit_below)
end
defp sum_cart_items_price(nil, _omit_below), do: 0
defp sum_cart_items_price(items, omit_below) do
Enum.reduce(items, 0, &reduce_cart_item_price(&1, &2, omit_below))
end
defp reduce_cart_item_price(%{price: price}, total, omit_below) when price < omit_below do
total
end
defp reduce_cart_item_price(%{price: price}, total, _omit_below) do
total + price
end
Cluttered and without obvious variable dependencies (`items_key` is not used in the deepest block
while `omit_below` is):
def calculate_total_cart_price(cart, items_key \\\\ :items, omit_below \\\\ 0) do
if cart[items_key] do
Enum.reduce(cart[items_key], 0, fn %{price: price}, total ->
if price < omit_below do
total
else
total + price
end
end)
else
0
end
end
"""
def nesting_depth, do: nil
@doc """
Flow control directives should be leveraged to yield compact and readable code.
## Reasoning
Each of flow control directives (`if`, `cond`, `case`, `with`) has its own purpose, but sometimes
more than one of them can be used to achieve the same goal. In such cases, the one that yields the
most compact and readable code should be picked.
## Examples
Preferred:
with {:ok, user} <- load_user(id),
{:ok, avatar} <- load_user_avatar(user)
do
{:ok, user, avatar}
end
Redundant `case` equivalent of the above:
case load_user(id) do
{:ok, user} ->
case load_user_avatar(user) do
{:ok, avatar} ->
{:ok, user, avatar}
error -> error
end
error -> error
end
"""
def flow_directive_usage, do: nil
@doc """
The `unless` directive should never be used with an `else` block or with logical operators.
## Reasoning
The `unless` directive is confusing and hard to reason about when used with more complex
conditions or an alternative code path (which could be read as "unless unless"). Therefore, in
such cases it should be rewritten as an `if`.
## Examples
Preferred:
unless user.confirmed, do: raise("user is not confirmed")
if user.banned and not(user.vip) do
raise("user is banned")
else
confirm_action(user)
end
Too hard to read:
unless not(user.banned) or user.vip do
confirm_action(user)
else
raise("user is banned")
end
"""
def unless_usage, do: nil
@doc """
Pattern matching should be preferred over line-by-line destructuring of maps and structs.
## Reasoning
Pattern matching can be used to vastly simplify destructuring of complicated structures, so it
should be used whenever possible, instead of taking out field by field via a struct getter (`.`)
or an access operator (`[]`).
It's supported in function clauses, so extensive use of the feature will also encourage writing
more pattern-matched functions, which should in turn yield a code easier to parse for Elixir
developers. Function headers with long matches can be easily broken into multiple lines and
indented in a clean way, so the length of a match should not be the factor for making a decision
about using or not using it.
Even outside of function clauses, pattern matching is a blazing fast VM-supported feature that,
combined with guards unwrapped at compilation time, should yield the best possible code
performance.
It's also worth mentioning that pattern matching can be also done inside of the `assert` macro
in `ExUnit` in order to write selective, nicely diffed assertions on maps and structs.
Pattern matching should not be preferred over functions from `Keyword` module for destructuring
option lists, even if they can hold only one possible option at a time.
## Examples
Preferred in function clauses:
def create_user_from_json_api_document(%{
"data" => %{
"id" => id,
"attributes" => %{
"name" => name,
"email" => email,
"phone" => phone
}
}
}, mailing_enabled) do
user = insert_user(id, name, email, phone)
if mailing_enabled, do: send_welcome_email(user)
end
Preferred in tests:
assert %User{
name: "John",
phone: "+48 600 700 800"
} == CreateUserAction(name: "John", email: email_sequence(), phone: "+48 600 700 800")
Cluttered:
id = doc["data"]["id"]
name = doc["data"]["attributes"]["name"]
email = doc["data"]["attributes"]["email"]
phone = doc["data"]["attributes"]["phone"]
"""
def pattern_matching_usage, do: nil
@doc """
Kernel macros for working with nested structures should be preferred over manual assembly.
This is about macros from the `*_in` family in the `Elixir.Kernel` module, like `pop_in`,
`put_in` or `update_in`.
## Reasoning
Using these macros can vastly reduce the amount of code amd ensure that the complexity of digging
and modifying nested structures is handled in the fastest way possible, as guaranteed by relying
on a standard library. Implementing these flows manually leads to repetitive code and an open door
for extra bugs.
## Examples
Preferred:
opts = [
user: [
name: "John",
email: "<EMAIL>"
]
]
opts_with_phone = put_in opts[:user][:phone], "+48 600 700 800"
Unneeded complexity:
opts = [
user: [
name: "John",
email: "<EMAIL>"
]
]
user_with_phone = Keyword.put(opts[:user], :phone, "+48 600 700 800")
opts_with_phone = Keyword.put(opts, :user, user_with_phone)
"""
def nested_struct_macro_usage, do: nil
@doc """
Keyword lists and tuples should be preferred over maps and lists for passing options.
## Reasoning
Keyword lists and tuples are a standard, conventional means for passing internal information
between Elixir modules.
Keyword lists enforce a usage of atoms for keys and allow to pass single key more than once and in
specific order when that's desired (and provide a `merge` function for when that's not desired).
The price for last two feats is that they are not pattern-matchable (and should never be pattern
matched) in cases when order and duplication is not important - functions from the
`Elixir.Keyword` module should be used in those cases. Ot the other hand, pattern matching may
come handy when parsing options with significant order of keys.
Tuples declare a syntax for short, efficient, predefined lists and are useful in simpler and
convention-driven cases, in which key naming is not needed. For instance, there's an established
convention to return `{:ok, result}`/`{:error, reason}` tuples from actions that can succeed or
fail without throwing.
## Examples
Preferred:
defp create_user(attrs, opts \\\\ []) do
# required option
auth_scope = Keyword.fetch!(opts, :send_welcome_email, false)
# options with defaults
send_welcome_email = Keyword.get(opts, :send_welcome_email, false)
mark_as_confirmed = Keyword.get(opts, :mark_as_confirmed, true)
case Repo.insert(%User{}, attrs) do
{:ok, user} ->
final_user =
user
|> send_email(send_welcome_email)
|> confirm(mark_as_confirmed)
{:ok, final_user}
{:error, changeset} ->
{:error, map_changeset_errors_to_error_reason(changeset.errors)}
end
end
Invalid usage of maps over keyword lists:
defp create_user(attrs, opts = %{}) do
# ...
end
Invalid usage of lists over tuples:
defp create_user(attrs) do
# ...
[:ok, user]
end
"""
def option_format, do: nil
@doc ~S"""
Errors should be thrown as close to the spot of failure and unhandled unless required.
## Reasoning
Throwing an exception (or using a throwing equivalent of a standard library function) allows to
avoid spending an additional time on inventing failure paths in the code and handling them higher
in the call stack.
It may be tempting to go with an error return value, such as the `{:error, ...}` tuple, in order
to let the code higher in the call stack to decide what to do in a specific situation, but that
only makes sense when it makes sense, ie. there exists a valid case higher in the call stack that
would want to do something other than throwing or returning a meaningless, generic error.
Otherwise, when a hard system-wide failure ends up not being an exception, it may look like a step
towards reusability, but it's really anti-semantic and the specific code unit (function or module)
stops telling the whole story, ie. multiple files must be read in order to come up with a simple
conclusion that we really end up with an exception anyway.
This may be extra important during a debugging session, since the closer an exception happens to
the spot of failure, the easier it is for developer to understand the real reason behind it. For
the same reason, it's always better to use a throwing equivalent of a standard library function
(with the `!` suffix) in places which don't handle the negative scenario anyway further down the
pipe.
## Examples
Preferred:
def do_something_external(params) do
required = Keyword.fetch!(params, :required)
optional = Keyword.get(params, :optional)
integer =
params
|> Keyword.fetch!(:integer)
|> String.to_integer
case external_api_call(required, optional, integer) do
%{status: 200, body: body} ->
body["result"]
%{status: error_status, body: error_body} ->
raise("External API error #{error_status}: #{inspect error_body}")
end
end
Bad code (read the explanation below):
def do_something_external(params) do
required = Keyword.get(params, :required)
optional = Keyword.get(params, :optional)
{integer, _} =
params
|> Keyword.fetch!(:integer)
|> Integer.parse
case external_api_call(required, optional, integer) do
%{status: 200, body: body} ->
{:ok, body["result"]}
_ ->
{:error, :external_api_failed}
end
end
There are following problems in the code above:
- not throwing on forgotten `:required_option` as early as possible will yield problems further
down the pipe that will be hard to debug since debugging session will have to track the issue
back to the original spot that we could've thrown at since the beginning
- not using optimal standard library means for throwing a descriptive error for failed string to
integer conversion (`String.to_integer`) will yield a less descriptive match error (and the
match-all on a 2nd elem of tuple from `Integer.parse` may produce bugs)
- returning `{:error, :external_api_failed}` on failure from external API will force the caller of
`do_something_external` to handle this case, so it makes sense only if we can actually do
something that makes sense (other than raising, silencing the issue or making it ambiguous)
"""
def error_handling, do: nil
@doc ~S"""
Functions should return `:ok`/`:error` when both success and failure paths are present.
## Reasoning
First of all, we do want to adhere to the long-standing Elixir convention of returning
`:ok`/`:error` atoms from functions. They may either be stand-alone (simple `:ok`/`:error` when
there's nothing more to add) or wrapped in a tuple with extra contextual info, such as `{:ok,
fetched_data}` or `{:error, reason}`. Tuples may be mixed with stand-alone atoms, eg. the same
function may return `:ok` upon success (since there's nothing more to add upon success) while
multiple distinct error paths may return `{:error, reason}` to make them distinct to the caller.
That said, there's a case when usage of this pattern may make the code more confusing. It's when
specific code simply cannot fail. If it cannot fail, then it doesn't make sense to make it tell
its caller that something went ok. In such cases, the function should simply return the value that
was asked for (`fetched_data` in example above) or `nil` if there's nothing to return (eg. when a
non-failing function only creates side effects).
This fits nicely into the way the Elixir standard library is designed (eg. `Map.get/2` never fails
so it only returns the value but `Map.fetch/2` does fail so it returns `{:ok, value}` or
`:error`). As such, this rule makes our code consistent with Elixir conventions and community code
that's supposed to follow them.
> Refer to the `Surgex.Guide.SoftwareDesign.error_handling/0` rule in order to learn when to
actually implement the failure path.
## Examples
Preferred:
def print_debug_info(message) do
IO.puts(message)
nil
end
def remove_file(path) do
if File.exists?(path)
:ok = File.rm(path)
else
{:error, :file_not_found}
end
end
Confusing `:ok` when there's no failure path (`IO.puts/1` returns `:ok`):
def print_debug_info(message) do
IO.puts(message)
end
def remove_file(path) do
if File.exists?(path)
:ok = File.rm(path)
else
raise("No such file: #{inspect path}")
end
end
Lack of `:ok` when there's a failure path (`File.read!/1` returns the file content):
def read_file(path) do
if File.exists?(path)
File.read!(path)
else
{:error, :file_not_found}
end
end
"""
def return_ok_error_usage, do: nil
end
|
lib/surgex/guide/software_design.ex
| 0.832407 | 0.632616 |
software_design.ex
|
starcoder
|
defmodule AdventOfCode.Day09 do
@moduledoc false
use AdventOfCode
defmodule Point, do: defstruct(value: nil, coordinates: nil, neighbors: [])
def part1(input) do
grid = preprocess_input(input)
Enum.reduce(grid, [], fn {_, %Point{neighbors: neighbors, value: value} = point}, acc ->
neighbor_points = Enum.map(neighbors, &Map.get(grid, &1))
if Enum.all?(neighbor_points, fn %Point{value: neighbor_value} -> value < neighbor_value end),
do: [point | acc],
else: acc
end)
|> Enum.map(fn %Point{value: value} -> value end)
|> Enum.map(&(&1 + 1))
|> Enum.sum()
end
def part2(input) do
grid = preprocess_input(input)
Enum.reduce(grid, [], fn {_, %Point{neighbors: neighbors, value: value} = point}, acc ->
neighbor_points = Enum.map(neighbors, &Map.get(grid, &1))
if Enum.all?(neighbor_points, fn %Point{value: neighbor_value} -> value < neighbor_value end),
do: [point | acc],
else: acc
end)
|> Enum.map(fn %Point{} = point -> find_basin_points(grid, point, []) end)
|> Enum.map(&Enum.count/1)
|> Enum.sort(:desc)
|> Enum.take(3)
|> Enum.product()
end
defp find_basin_points(grid, %Point{} = low_point, all_basin_points) do
Enum.reduce(low_point.neighbors, all_basin_points, fn neighbor_coordinates, acc ->
%Point{value: neighbor_value} = neighbor_point = Map.get(grid, neighbor_coordinates)
if low_point.value < neighbor_value && neighbor_value != 9,
do: find_basin_points(grid, neighbor_point, [low_point | acc]),
else: [low_point | acc]
end)
|> Enum.uniq()
end
defp preprocess_input(input) do
grid =
input
|> String.trim()
|> String.split("\n")
|> Enum.map(fn line ->
line
|> String.trim()
|> String.split("")
|> Enum.reject(&(&1 == ""))
|> Enum.map(&String.to_integer/1)
end)
|> Enum.with_index()
|> Enum.reduce(%{}, fn {values, y}, acc ->
values
|> Enum.with_index()
|> Enum.map(fn {value, x} ->
{{x, y}, %Point{value: value, coordinates: {x, y}}}
end)
|> Map.new()
|> Map.merge(acc)
end)
Enum.map(grid, fn {coordinates, %Point{} = point} ->
{coordinates, Map.put(point, :neighbors, find_neighbors(grid, point))}
end)
|> Map.new()
end
defp find_neighbors(grid, %Point{coordinates: {x, y}}) do
neighbor_coordinates = [
{x + 1, y},
{x - 1, y},
{x, y + 1},
{x, y - 1}
]
Enum.filter(grid, fn {coordinates, _} -> Enum.member?(neighbor_coordinates, coordinates) end)
|> Enum.map(fn {coordinates, _} -> coordinates end)
end
end
|
lib/day09.ex
| 0.661486 | 0.602032 |
day09.ex
|
starcoder
|
defmodule ID3 do
@moduledoc """
Read/Write ID3 tags. All tags will be written as `ID3v2.4`.
This module uses [`rust-id3`](https://github.com/jameshurst/rust-id3/) inside, so it follows the restrictions there.
(Currently ID3v1/v2 reading is supported, and all writing will be done as ID3v2.4)
### Examples
Modifying a tag.
iex> {:ok, tag} = ID3.get_tag("audio.mp3")
iex> new_tag = %{tag | year: 2018}
iex> :ok = ID3.write_tag("audio.mp3", new_tag)
### Why read/write a struct?
Since this is implemented as a NIF, read/writes will open and close the files every call.
We could directly map functions in the `id3::Tag` crate, but it will lack performance, and is also unsafe to do that.
Handling major frames combined as a `Tag` struct will let us allow to edit or copy them in Elixir/Erlang worlds, which is more pleasant for OTP users.
"""
alias ID3.{Native, Tag}
@doc """
Reads a set of major frames from the given mp3 file, as a `Tag` struct.
### Examples
iex> {:ok, tag} = ID3.get_tag("audio.mp3")
iex> tag
%ID3.Tag{
album_artist: "Queen",
artist: "Queen",
album: "News of the World",
date_recorded: ~N[1977-10-07 00:00:00],
date_released: ~N[1981-11-02 00:00:00],
disc: 1,
duration: nil,
genre: "Rock",
pictures: [
%ID3.Picture{
data: <<255, 216, 255, 224, 0, 16, 74, 70, 73, 70, 0, 1, 1, 0, 0, 72, 0,
72, 0, 0, 255, 225, 3, 88, 69, 120, 105, 102, 0, 0, 77, 77, 0, 42, 0, 0,
0, 8, 0, 9, ...>>,
description: "",
mime_type: "image/jpeg",
picture_type: :CoverFront
}
],
title: "We Will Rock You",
total_discs: 1,
total_tracks: 17,
track: 16,
year: 1981
}
iex> ID3.get_tag("not_an_mp3.some")
{:error, :file_open_error}
"""
@spec get_tag(Path.t()) :: {:ok, Tag.t()} | {:error, :file_open_error}
defdelegate get_tag(path), to: Native, as: :get_major_frames
def get_tag!(path), do: get_tag(path) |> bangify!
@doc """
Writes a set of major tags to the given mp3 file.
Setting `nil` for a certain key will remove the previously set value. *It won't bypass it.* Be careful!
### Examples
iex> ID3.write_tag(%{ID3.Tag.new() | year: 2016}, "audio.mp3") # removes other tags of "audio.mp3" too.
:ok
"""
@spec write_tag(Tag.t(), Path.t()) :: :ok | {:error, :file_open_error | :tag_write_error}
defdelegate write_tag(tag, path), to: Native, as: :write_major_frames
def write_tag!(tag, path), do: write_tag(tag, path) |> bangify!
defmodule TagIOError do
defexception [:message]
def exception(value) do
msg = "Panic reading tags. Rust ID3 error: #{inspect(value)}"
%__MODULE__{message: msg}
end
end
defp bangify!({:ok, term}), do: term
defp bangify!({:error, msg}), do: raise(TagIOError, msg)
end
|
lib/id3.ex
| 0.860911 | 0.482612 |
id3.ex
|
starcoder
|
defmodule ModuleDependencyVisualizer do
alias ModuleDependencyVisualizer.AnalyzeCode
@moduledoc """
This is the public interface for this simple little tool to parse a file or
list of files for dependencies between modules. It will use the `dot` command
to generate a graph PNG for us thanks to graphviz.
"""
@doc """
Analyzes a given list of file paths (absolute or relative), creates the
necessary Graphviz file, and then creates the graph and opens it.
"""
@spec run(list, list) :: :ok
def run(file_paths, options) do
dependency_list =
file_paths
|> analyze()
|> filter(options)
|> reverse_edges(options)
nodes_with_attributes = add_colors(dependency_list, Keyword.get(options, :colors, []))
dependency_list
|> create_gv_file(nodes_with_attributes)
|> create_and_open_graph()
:ok
end
def analyze(file_paths) do
AnalyzeCode.analyze(file_paths)
end
@doc """
removes all modules not matching the given list of include: names
"""
@spec filter([{String.t(), String.t()}], include: String.t()) :: [{String.t(), String.t()}]
def filter(deps_graph, opts \\ []) when is_list(opts) do
include_from = Keyword.get(opts, :include, [])
exclude_to = Keyword.get(opts, :exclude, [])
remove_all_graphs_not_connected_to =
Keyword.get(opts, :remove_all_graphs_not_connected_to, :keep_all_graphs)
deps_graph
|> Enum.filter(fn {from, _to} -> contains_include_from(include_from, from) end)
|> Enum.reject(fn {_from, to} -> exclude_to_contains?(exclude_to, to) end)
|> RemoveUnconnectedGraphs.remove_unconnected_graphs(remove_all_graphs_not_connected_to)
end
@doc """
reverse the direction on edges
"""
@spec reverse_edges([{String.t(), String.t()}], [{String.t(), String.t()}]) :: [
{String.t(), String.t()}
]
def reverse_edges(deps_graph, opts) do
edges_to_reverse = Keyword.get(opts, :edges_to_reverse, [])
deps_graph
|> Enum.map(fn {from, to} = deps_edge ->
case edge_matches?(edges_to_reverse, deps_edge) do
true ->
{to, from}
false ->
deps_edge
end
end)
end
defp edge_matches?(edges_to_reverse, {dep_from, dep_to}) when is_list(edges_to_reverse) do
Enum.any?(edges_to_reverse, fn {remove_from, remove_to} ->
matches?(dep_from, remove_from) && matches?(dep_to, remove_to)
end)
end
defp contains_include_from([], _from), do: true
defp contains_include_from(include_to, from) do
Enum.any?(include_to, fn list_elem when is_binary(list_elem) ->
String.contains?(from, list_elem)
end)
end
defp exclude_to_contains?(list, value) when is_binary(value) and is_list(list) do
Enum.any?(list, fn list_elem ->
matches?(value, list_elem)
end)
end
defp matches?(value, pattern) when is_binary(pattern) do
String.contains?(value, pattern)
end
defp matches?(value, pattern) do
Regex.match?(pattern, value)
end
@doc """
Creates nodes associated with the given color. Used for rendering of the graph with graphviz.
"""
def add_colors(dependency_list, color_definitions) do
dependency_list
|> distinct_nodes()
|> Enum.map(fn node ->
Enum.find_value(color_definitions, fn {pattern, color} ->
if Regex.match?(pattern, node) do
node_with_color(node, color)
end
end)
end)
|> Enum.reject(&is_nil/1)
end
defp distinct_nodes(dependency_list) do
dependency_list
|> Enum.reduce(MapSet.new(), fn {from, to}, set ->
MapSet.put(set, from) |> MapSet.put(to)
end)
|> MapSet.to_list()
end
defp node_with_color(name, color) do
{name, [fillcolor: color, style: "filled"]}
end
@doc """
Takes a list of dependencies and returns a string that is a valid `dot` file.
"""
@spec create_gv_file(list, list) :: String.t()
def create_gv_file(dependency_list, nodes_with_attributes) do
nodes =
Enum.map(nodes_with_attributes, fn {node, attributes} ->
serialized_attributes = Enum.map(attributes, fn {name, value} -> "#{name}=#{value}" end)
~s| "#{node}" [#{Enum.join(serialized_attributes, ", ")}];|
end)
body = Enum.map(dependency_list, fn {mod1, mod2} -> " \"#{mod1}\" -> \"#{mod2}\";" end)
"digraph G {\n#{Enum.join(body, "\n")}\n#{Enum.join(nodes, "\n")}\n}\n"
end
@doc """
This creates the graphviz file on disk, then runs the `dot` command to
generate the graph as a PNG, and opens that PNG for you.
"""
@spec create_and_open_graph(String.t()) :: {Collectable.t(), exit_status :: non_neg_integer}
def create_and_open_graph(gv_file) do
gv_file_path = "./output.gv"
graph_path = "./graph.png"
File.write(gv_file_path, gv_file)
System.cmd("dot", ["-Tpng", gv_file_path, "-o", graph_path])
System.cmd("open", [graph_path])
end
end
|
lib/module_dependency_visualizer.ex
| 0.778018 | 0.477859 |
module_dependency_visualizer.ex
|
starcoder
|
defmodule DataQuacker.Builder do
@moduledoc false
alias DataQuacker.Context
alias DataQuacker.Matcher
alias DataQuacker.Skipper
alias DataQuacker.Sourcer
alias DataQuacker.Transformer
alias DataQuacker.Validator
def call(
source,
%{__name__: schema_name, matchers: matchers, rows: schema_rows} = _schema,
support_data,
adapter
) do
with {:ok, headers} <- adapter.get_headers(source),
{:ok, source_rows} <- adapter.get_rows(source),
context <-
support_data
|> Context.new()
|> Context.update_metadata(:schema, schema_name),
{:ok, column_mappings} <- Matcher.call(headers, matchers, context) do
build_source_rows(source_rows, schema_rows, column_mappings, context, adapter)
end
end
defp build_source_rows(
_source_rows,
_schema_rows,
_column_mappings,
_context,
_adapter,
_acc \\ [],
_all_ok? \\ true
)
defp build_source_rows(
[source_row | rest],
schema_rows,
column_mappings,
context,
adapter,
acc,
all_ok?
) do
context = Context.increment_row(context)
source_row = adapter.get_row(source_row)
{result, context} = do_build_source_row(source_row, schema_rows, column_mappings, context)
build_source_rows(
rest,
schema_rows,
column_mappings,
context,
adapter,
result ++ acc,
all_ok? and
Enum.all?(result, fn
{:ok, _res} -> true
_el -> false
end)
)
end
defp build_source_rows([], _schema_rows, _column_mappings, _context, _adapter, acc, true),
do: {:ok, acc}
defp build_source_rows([], _schema_rows, _column_mappings, _context, _adapter, acc, false),
do: {:error, acc}
defp do_build_source_row({:ok, source_row}, schema_rows, column_mappings, context) do
values = parse_row_values(source_row, column_mappings)
build_schema_rows(schema_rows, values, context)
end
defp do_build_source_row(error, _schema_rows, _column_mappings, _context), do: error
defp build_schema_rows(_schema_rows, _values, _context, acc \\ [])
defp build_schema_rows([row | rest], values, context, acc) do
case do_build_schema_row(row, values, context) do
:skip -> build_schema_rows(rest, values, context, acc)
{:ok, fields, context} -> build_schema_rows(rest, values, context, [{:ok, fields} | acc])
error -> build_schema_rows(rest, values, context, [error | acc])
end
end
defp build_schema_rows([], _values, context, acc), do: {acc, context}
defp do_build_schema_row(
%{
__index__: row_index,
fields: fields,
validators: validators,
transformers: transformers,
skip_if: skip_if
},
values,
context
) do
with context <- Context.update_metadata(context, :row, row_index),
{:ok, fields, context} <- fields |> Enum.into([]) |> build_fields(values, context),
{:ok, fields, context} <- Transformer.call(fields, transformers, context),
:ok <- Validator.call(fields, validators, context),
false <- Skipper.call(fields, skip_if, context) do
{:ok, fields, context}
else
true -> :skip
error -> error
end
end
defp build_fields(_fields, _values, _context, _acc \\ %{})
defp build_fields([{field_name, field} | fields], values, context, acc) do
case do_build_field(field, values, context) do
:skip ->
build_fields(fields, values, context, acc)
{:ok, field, context} ->
build_fields(fields, values, context, Map.put(acc, field_name, field))
error ->
error
end
end
defp build_fields([], _values, context, acc), do: {:ok, acc, context}
defp do_build_field(
%{
__name__: field_name,
validators: validators,
transformers: transformers,
skip_if: skip_if
} = field,
values,
context
) do
with context <- Context.update_metadata(context, :field, field_name),
{:ok, value, context} <- do_build_field_value(field, values, context),
{:ok, value, context} <- Transformer.call(value, transformers, context),
:ok <- Validator.call(value, validators, context),
false <- Skipper.call(value, skip_if, context) do
{:ok, value, context}
else
true -> :skip
error -> error
end
end
defp do_build_field_value(%{__type__: :sourced, source: source}, values, context) do
{:ok, Sourcer.call(source, values, context), context}
end
defp do_build_field_value(%{__type__: :wrapper, subfields: subfields}, values, context) do
subfields
|> Enum.into([])
|> build_fields(values, context)
end
defp parse_row_values(row, column_mappings) do
column_mappings
|> Enum.map(fn {target, index} -> {target, Enum.at(row, index)} end)
|> Enum.into(%{})
end
end
|
lib/data_quacker/builder.ex
| 0.699973 | 0.50415 |
builder.ex
|
starcoder
|
defmodule Sigaws.Provider do
@moduledoc """
This behavior defines the callbacks expected from an implementation needed
for signature verification.
The `pre_verification` callback is expected to use the context data to
verify/validate the request. All the information available for verification
are passed in `Sigaws.Ctxt`. This callback should return `:ok` when
verification passes or return `{:error, atom, binary}` when it fails.
At the minimum return an error when:
- region is not one of supported regions
- service is not one of supported services
- request expired (based on `signed_at_amz_dt` and `expires_in`)
The `signing_key` callback is called only when
`pre_verification` succeeds without any error. This key should be generated
as outlined
[here](http://docs.aws.amazon.com/general/latest/gr/signature-v4-examples.html).
The returned key is used to recompute the signature to verify against.
A helper function to generate this (`Sigaws.Util.signing_key/4`) is provided
for convenience. This approach of relying on a callback to get signing key
instead of requiring the secret enables better key managment if desired.
"""
alias Sigaws.Ctxt
@doc """
Validate signature info in the signed request.
Use this to validate that only supported regions/services are accepted.
Expiration check should be performed if the corresponding attribute is set.
Sigaws will halt the verification process when this returns an errror. That
same error is returned to the caller.
| Returns | When |
|:------- |:---- |
| `{:error, :expired, ""}` | Check `Sigaws.Util.check_expiration/1` |
| `{:error, :unknown, "region"}` | Region not supported |
| `{:error, :unknown, "service"}` | Service not supported |
| `{:error, atom, binary}` | For other errors as defined by the implementation |
| `:ok` | Verification passes |
"""
@callback pre_verification(ctxt :: Ctxt.t()) :: :ok | {:error, reason :: atom, info :: binary}
@doc """
Return the signing key to be used for verification based on access key ID
provided in the signature verification context.
Return an error if there is no valid secret for the information provided.
This will in turn halt the verification process resulting in signature
verification failure.
| Returns | When |
|:------- |:---- |
| `{:error, :unknown, "access_key"}` | Access key is unknown |
| `{:error, atom, binary}` | For other errors as defined by the implementation |
| `{:ok, binary}` | Valid signing key is generated |
"""
@callback signing_key(ctxt :: Ctxt.t()) ::
{:ok, key :: binary} | {:error, reason :: atom, info :: binary}
end
|
lib/sigaws/provider.ex
| 0.869202 | 0.610294 |
provider.ex
|
starcoder
|
defmodule GraphQL.Query do
@moduledoc """
Functions to create and modify query representations.
"""
alias GraphQL.{Node, Variable}
@enforce_keys [:operation, :name, :fields]
defstruct [:operation, :name, :fields, :fragments, :variables]
@typedoc """
A struct that represents a GraphQL query or mutation.
The `:operation` field can be `:query`, for a query operation, or `:mutation`,
for a mutation operation.
The `:name` field is the name of the query or mutation. GraphQL does not
require a name for operations, but this struct will enforce its presence in
order to enrich trace and logging information.
The `:fields` field is a list of `GraphQL.Node` structs. This the
list of roof fields of a query or mutation.
The `:fragments` field is also a list of `GraphQL.Node` structs,
but intended to only keep fragment nodes, as they are usually placed after
the root fields in a typical GraphQL query/mutation.
The `:variables` fields is a list of `GraphQL.Variable` structs,
that represents the expected variables during the request. Note that this list
is the _definition_ of variables, not the _values_ of them.
"""
@type t :: %__MODULE__{
operation: :query | :mutation,
name: String.t(),
fields: [Node.t()],
fragments: [Node.t()] | nil,
variables: [Variable.t()] | nil
}
@doc """
Creates a new query struct for a 'query' operation from a keyword list.
"""
@spec query(Keyword.t()) :: t()
def query(options) do
options = Keyword.put(options, :operation, :query)
struct(__MODULE__, options)
end
@doc """
Creates a new query struct for a 'mutation' operation from a keyword list.
"""
@spec mutation(Keyword.t()) :: t()
def mutation(options) do
options = Keyword.put(options, :operation, :mutation)
struct(__MODULE__, options)
end
@doc """
Adds a field to a query.
The `field` argument must be a `GraphQL.Node` struct and its
`:node_type` must be `:field`.
## Examples
iex> f1 = GraphQL.Node.field(:field)
%GraphQL.Node{node_type: :field, name: :field}
iex> f2 = GraphQL.Node.field(:other_field)
%GraphQL.Node{node_type: :field, name: :other_field}
iex> q = %GraphQL.Query{operation: :query, name: "MyQuery", fields: [f1]}
%GraphQL.Query{operation: :query, name: "MyQuery", fields: [f1]}
iex> add_field(q, f2)
%GraphQL.Query{name: "MyQuery", operation: :query, fields: [f2, f1]}
"""
@spec add_field(t(), Node.t()) :: t()
def add_field(%__MODULE__{fields: fields} = query, %Node{node_type: :field} = field) do
fields = if(fields == nil, do: [], else: fields)
%__MODULE__{query | fields: [field | fields]}
end
@doc """
Adds a fragment to a query.
The `field` argument must be a `GraphQL.Node` struct and its
`:node_type` must be `:field`.
## Examples
iex> f1 = GraphQL.Node.fragment("personFields", "Person", [GraphQL.Node.field(:field)])
%GraphQL.Node{node_type: :fragment, name: "personFields", type: "Person", nodes: [%GraphQL.Node{node_type: :field, name: :field}]}
iex> f2 = GraphQL.Node.fragment("userFields", "User", [GraphQL.Node.field(:another_field)])
%GraphQL.Node{node_type: :fragment, name: "userFields", type: "User", nodes: [%GraphQL.Node{node_type: :field, name: :another_field}]}
iex> q = %GraphQL.Query{operation: :query, name: "MyQuery", fields: [], fragments: [f1]}
%GraphQL.Query{operation: :query, name: "MyQuery", fields: [], fragments: [f1]}
iex> add_fragment(q, f2)
%GraphQL.Query{name: "MyQuery", operation: :query, fields: [], fragments: [f2, f1]}
"""
@spec add_fragment(t(), Node.t()) :: t()
def add_fragment(
%__MODULE__{fragments: fragments} = query,
%Node{node_type: :fragment} = fragment
) do
fragments = if(fragments == nil, do: [], else: fragments)
%__MODULE__{query | fragments: [fragment | fragments]}
end
@doc """
Add a new variable to an existing query
## Examples
iex> v1 = %GraphQL.Variable{name: "id", type: "Integer"}
%GraphQL.Variable{name: "id", type: "Integer"}
iex> v2 = %GraphQL.Variable{name: "slug", type: "String"}
%GraphQL.Variable{name: "slug", type: "String"}
iex> q = %GraphQL.Query{operation: :query, name: "MyQuery", fields: [], variables: [v1]}
%GraphQL.Query{operation: :query, name: "MyQuery", fields: [], variables: [v1]}
iex> add_variable(q, v2)
%GraphQL.Query{operation: :query, name: "MyQuery", fields: [], variables: [v2, v1]}
"""
@spec add_variable(t(), Variable.t()) :: t()
def add_variable(%__MODULE__{variables: variables} = query, %Variable{} = variable) do
variables = if(variables == nil, do: [], else: variables)
%__MODULE__{query | variables: [variable | variables]}
end
@doc """
Combine two queries into one query, merging fields, variables and fragments.
The two queries must have the same operation.
"""
@spec merge(t(), t(), String.t()) :: {:ok, t()} | {:error, any()}
def merge(
%__MODULE__{operation: operation} = query_a,
%__MODULE__{operation: operation} = query_b,
name
) do
with {:ok, variables} <- merge_variables(query_a.variables || [], query_b.variables || []) do
{:ok,
%__MODULE__{
name: name,
operation: operation,
fields: (query_a.fields || []) ++ (query_b.fields || []),
fragments: (query_a.fragments || []) ++ (query_b.fragments || []),
variables: variables
}}
else
error -> error
end
end
defp merge_variables(set_a, set_b) do
repeated_vars =
for v_a <- set_a, v_b <- set_b, reduce: [] do
acc ->
if GraphQL.Variable.same?(v_a, v_b) do
[v_a | acc]
else
acc
end
end
case repeated_vars do
[] ->
{:ok, set_a ++ set_b}
_ ->
var_names =
repeated_vars
|> Enum.map(&"\"#{&1.name}\"")
|> Enum.join(", ")
{:error, "variables declared twice: #{var_names}"}
end
end
@doc """
Combines a list of queries into one query, merging fields, variables and fragments.
All queries must have the same operation.
"""
@spec merge_many([t()], String.t()) :: {:ok, t()} | {:error, any()}
def merge_many(queries, name \\ nil)
def merge_many([%__MODULE__{} = query], name) do
if name != nil do
{:ok, %__MODULE__{query | name: name}}
else
{:ok, query}
end
end
def merge_many([first_query | remaining_queries], name) do
result =
Enum.reduce_while(remaining_queries, first_query, fn query, result ->
case merge(query, result, name) do
{:ok, merged_query} ->
{:cont, merged_query}
{:error, error} ->
{:halt, {:error, error}}
end
end)
case result do
%__MODULE__{} = query -> {:ok, query}
error -> error
end
end
end
|
lib/graphql/query.ex
| 0.921816 | 0.66943 |
query.ex
|
starcoder
|
defmodule Caylir.Graph do
@moduledoc """
Defines a connection to a Cayley instance.
## Graph Definition
To start connecting to a Cayley instance you need to define a connection
module:
defmodule MyGraph do
use Caylir.Graph, otp_app: :my_app
end
If you intend to use initializer modules this module then needs to be
inserted into the supervision tree of your application:
children = [
# ...
MyGraph,
# ...
]
## Graph Configuration
Referring back to the previously mentioned module:
defmodule MyGraph do
use Caylir.Graph, otp_app: :my_app
end
This connection will fetch it's configuration from the
application environment as defined by `:otp_app`:
config :my_app, MyGraph,
host: "cayley.example.com"
As an alternative you can define the configuration
in the module definition itself:
defmodule MyGraph do
use Caylir.Graph,
config: [
host: "cayley.example.com"
]
Both inline and `:otp_app` configuration can be mixed. In this case the
application configuration will overwrite any inline values.
For more information on how (and what) to configure your connection please
refer to the documentation of `Caylir.Graph.Config`.
## Graph Usage
### Querying Data
By default a query is sent to the Gizmo language endpoint:
MyGraph.query("graph.Vertex('graph').Out('connection').All()")
If you want to send a query to a language specific endpoint not configured
as your default (e.g. if you are mixing Gizmo and GraphQL queries) you can
pass the `:language` option:
MyGraph.query(query, language: :gizmo)
MyGraph.query(query, language: :graphql)
If you want to limit the number of results you can pass the `:limit` option:
MyGraph.query(query, limit: 3)
MyGraph.query(query, limit: -1)
Passing `-1` will return "unlimited" results while also deactivating any
potential default limits implied by Cayley itself.
By default a query has a timeout of 5 seconds (5000ms). If you want to change
that layout to a lower or higher value you can pass the `:timeout` option:
MyGraph.query(query, timeout: 250)
### Writing Data
You can write a single quad:
MyGraph.write(%{
subject: "graph",
predicate: "connection",
object: "target"
})
Or multiple quads at once:
MyGraph.write([quad_1, quad_2])
### Deleting Data
You can delete a single quad:
MyGraph.delete(%{
subject: "graph",
predicate: "connection",
object: "target"
})
Or multiple quads at once:
MyGraph.delete([quad_1, quad_2])
"""
defmacro __using__(opts) do
quote do
alias Caylir.Graph.Config
alias Caylir.Graph.Initializer
alias Caylir.Graph.Request
@behaviour unquote(__MODULE__)
@otp_app unquote(opts[:otp_app])
@config unquote(opts[:config] || [])
def child_spec(_) do
initializer = Module.concat(__MODULE__, Initializer)
spec = %{graph: __MODULE__, initializer: initializer}
%{
id: initializer,
start: {Initializer, :start_link, [spec]}
}
end
def config, do: Config.config(@otp_app, __MODULE__, @config)
def delete(quad, opts \\ []), do: Request.delete(quad, __MODULE__, opts)
def query(query, opts \\ []), do: Request.query(query, __MODULE__, opts)
def shape(query, opts \\ []), do: Request.shape(query, __MODULE__, opts)
def write(quad, opts \\ []), do: Request.write(quad, __MODULE__, opts)
end
end
@type t_quad :: %{
:subject => binary,
:predicate => binary,
:object => binary,
optional(:label) => binary
}
@type t_delete :: :ok | {:error, String.t()}
@type t_query :: any | {:error, String.t()}
@type t_write :: :ok | {:error, String.t()}
@doc """
Returns a supervisable graph child_spec.
"""
@callback child_spec(_ignored :: term) :: Supervisor.child_spec()
@doc """
Returns the graph configuration.
"""
@callback config() :: Keyword.t()
@doc """
Deletes a quad from the graph.
"""
@callback delete(quad :: t_quad | [t_quad], opts :: Keyword.t()) :: t_delete
@doc """
Queries the graph.
"""
@callback query(query :: String.t(), opts :: Keyword.t()) :: t_query
@doc """
Gets the shape of a query.
"""
@callback shape(query :: String.t(), opts :: Keyword.t()) :: t_query
@doc """
Writes a quad to the graph.
"""
@callback write(quad :: t_quad | [t_quad], opts :: Keyword.t()) :: t_write
end
|
lib/caylir/graph.ex
| 0.831006 | 0.506713 |
graph.ex
|
starcoder
|
defmodule Kiq.Pool do
@moduledoc """
Kiq maintains a fixed pool of Redix connections that are used by processes to
communicate with Redis.
The pool is a fixed set of supervised connections. Calling `checkout/1` will
return a random connection pid. The Redis connection is fully duplexed,
making it safe for multiple processes to use the same connection
simultaneously. Connections never need to be checked back in.
## Ad-Hoc Usage
Each supervised Kiq instance will have its own pool. The pool name is derived
from the module name, i.e. the module `MyApp.Kiq` would have a supervised
pool registered as `MyApp.Kiq.Pool`. The name can be used to checkout
connections and execute commands in the console.
For example, to get a list of the queues that are currently active:
MyApp.Kiq.Pool
|> Kiq.Pool.checkout()
|> Redix.command(["SMEMBERS", "queues"])
"""
use GenServer
alias Kiq.Config
@type options :: [config: Config.t(), name: GenServer.name()]
defmodule State do
@moduledoc false
defstruct [:pool_name, :pool_size]
end
@doc false
@spec start_link(opts :: options()) :: GenServer.on_start()
def start_link(opts) do
{name, opts} = Keyword.pop(opts, :name)
GenServer.start_link(__MODULE__, opts, name: name)
end
@doc """
Get the pid of a supervised Redix connection.
Connections are randomly allocated and don't need to be checked back in.
"""
@spec checkout(GenServer.server()) :: pid()
def checkout(server \\ __MODULE__) do
GenServer.call(server, :checkout)
end
@doc false
@spec worker_name(binary() | atom(), non_neg_integer()) :: module()
def worker_name(pool_name, index) do
Module.concat([pool_name, "N#{index}"])
end
# Server
@impl GenServer
def init(config: %Config{pool_name: pool_name, pool_size: pool_size}) do
{:ok, %State{pool_name: pool_name, pool_size: pool_size}}
end
@impl GenServer
def handle_call(:checkout, _from, state) do
index = rem(System.unique_integer([:positive]), state.pool_size)
pid =
state.pool_name
|> worker_name(index)
|> Process.whereis()
{:reply, pid, state}
end
end
|
lib/kiq/pool.ex
| 0.82887 | 0.401981 |
pool.ex
|
starcoder
|
defmodule Plymio.Funcio.Enum.Map.Collate do
@moduledoc ~S"""
Map and Collate Patterns for Enumerables.
These functions map the elements of an *enum* and then collates the
results according to one of the defined *patterns*.
See `Plymio.Funcio` for overview and documentation terms.
"""
use Plymio.Funcio.Attribute
@type error :: Plymio.Funcio.error()
@type opts :: Plymio.Funcio.opts()
@type opzioni :: Plymio.Funcio.opzioni()
import Plymio.Funcio.Error,
only: [
new_error_result: 1
]
import Plymio.Fontais.Guard,
only: [
is_value_unset_or_nil: 1
]
import Plymio.Fontais.Option,
only: [
opts_merge: 1,
opzioni_merge: 1
]
import Plymio.Funcio.Map.Utility,
only: [
reduce_map1_funs: 1
]
import Plymio.Funcio.Enum.Map,
only: [
map_concurrent_enum: 2
]
import Plymio.Funcio.Enum.Collate,
only: [
collate0_enum: 1,
collate1_enum: 1,
collate2_enum: 1
]
@doc ~S"""
`map_collate0_enum/2` take an *enum* and *map/1*, applies the
*map/1* to each element of the *enum* and collates the results
according to *pattern 0*.
## Examples
iex> fun = fn v -> {:ok, v} end
...> [1,2,3] |> map_collate0_enum(fun)
{:ok, [1,2,3]}
iex> fun = fn
...> 3 -> {:error, %ArgumentError{message: "argument is 3"}}
...> v -> {:ok, v}
...> end
...> {:error, error} = [1,2,3] |> map_collate0_enum(fun)
...> error |> Exception.message
"argument is 3"
iex> fun = :not_a_fun
...> {:error, error} = [1,2,3] |> map_collate0_enum(fun)
...> error |> Exception.message
"map/1 function invalid, got: :not_a_fun"
iex> fun = fn v -> {:ok, v} end
...> {:error, error} = 42 |> map_collate0_enum(fun)
...> error |> Exception.message
...> |> String.starts_with?("protocol Enumerable not implemented for 42")
true
"""
@since "0.1.0"
@spec map_collate0_enum(any, any) :: {:ok, list} | {:error, error}
def map_collate0_enum(enum, fun) do
with {:ok, fun} <- fun |> reduce_map1_funs do
try do
enum
|> Enum.reduce_while(
[],
fn value, values ->
value
|> fun.()
|> case do
{:error, %{__struct__: _}} = result ->
{:halt, result}
{:ok, value} ->
{:cont, [value | values]}
value ->
{:halt, new_error_result(m: "pattern0 result invalid", v: value)}
end
end
)
|> case do
{:error, %{__exception__: true}} = result -> result
values -> {:ok, values |> Enum.reverse()}
end
rescue
error ->
{:error, error}
end
else
{:error, %{__exception__: true}} = result -> result
end
end
@doc ~S"""
`map_concurrent_collate0_enum/2` works like `map_collate0_enum/2`
except that the *map/1* function is applied to each element of the
*enum* concurrently.
## Examples
iex> fun = fn v -> {:ok, v} end
...> [1,2,3] |> map_concurrent_collate0_enum(fun)
{:ok, [1,2,3]}
iex> fun = fn
...> 3 -> {:error, %ArgumentError{message: "argument is 3"}}
...> v -> {:ok, v}
...> end
...> {:error, error} = [1,2,3] |> map_concurrent_collate0_enum(fun)
...> error |> Exception.message
"argument is 3"
iex> fun = :not_a_fun
...> {:error, error} = [1,2,3] |> map_concurrent_collate0_enum(fun)
...> error |> Exception.message
"map/1 function invalid, got: :not_a_fun"
iex> fun = fn v -> {:ok, v} end
...> {:error, error} = 42 |> map_concurrent_collate0_enum(fun)
...> error |> Exception.message
...> |> String.starts_with?("protocol Enumerable not implemented for 42")
true
"""
@since "0.1.0"
@spec map_concurrent_collate0_enum(any, any) :: {:ok, list} | {:error, error}
def map_concurrent_collate0_enum(enum, fun) do
try do
with {:ok, results} <- enum |> map_concurrent_enum(fun),
{:ok, _} = result <- results |> collate0_enum do
result
else
{:error, %{__exception__: true}} = result -> result
end
rescue
error ->
{:error, error}
end
end
@doc ~S"""
`map_collate0_opts_enum/2` works like `map_collate0_enum/2` but
assumes each `value` in the `{:ok, collated_values}` result is an
*opts* and calls `Plymio.Fontais.Option.opts_merge/1` with
`collated_values` returning, on success, `{:ok, opts}`.
## Examples
iex> fun = fn v -> {:ok, v} end
...> [a: 1, b: 2, c: 3] |> map_collate0_opts_enum(fun)
{:ok, [a: 1, b: 2, c: 3]}
iex> fun = fn v -> {:ok, v} end
...> [[a: 1], [b: 2], [c: 3]] |> map_collate0_opts_enum(fun)
{:ok, [a: 1, b: 2, c: 3]}
iex> fun = fn v -> {:ok, [d: v]} end
...> [1,2,3] |> map_collate0_opts_enum(fun)
{:ok, [d: 1, d: 2, d: 3]}
iex> fun = fn
...> {k,3} -> {:error, %ArgumentError{message: "argument for #{inspect k} is 3"}}
...> v -> {:ok, v}
...> end
...> {:error, error} = [a: 1, b: 2, c: 3] |> map_collate0_opts_enum(fun)
...> error |> Exception.message
"argument for :c is 3"
"""
@since "0.1.0"
@spec map_collate0_opts_enum(any, any) :: {:ok, opts} | {:error, error}
def map_collate0_opts_enum(enum, fun) do
with {:ok, values} <- enum |> map_collate0_enum(fun),
{:ok, _opts} = result <- values |> opts_merge do
result
else
{:error, %{__exception__: true}} = result -> result
end
end
@doc ~S"""
`map_collate0_opts_enum/2` works like `map_collate0_enum/2` but
assumes each `value` in the `{:ok, collated_values}` result is an
*opzioni* and calls `Plymio.Fontais.Option.opzioni_merge/1` with
`collated_values` returning, on success, `{:ok, opzioni}`.
## Examples
iex> fun = fn v -> {:ok, v} end
...> [a: 1, b: 2, c: 3] |> map_collate0_opzioni_enum(fun)
{:ok, [[a: 1, b: 2, c: 3]]}
iex> fun = fn v -> {:ok, v} end
...> [[a: 1], [b: 2], [c: 3]] |> map_collate0_opzioni_enum(fun)
{:ok, [[a: 1], [b: 2], [c: 3]]}
iex> fun = fn v -> {:ok, v} end
...> [[[a: 1], [b: 2]], [c: 3], [[d: 4]]] |> map_collate0_opzioni_enum(fun)
{:ok, [[a: 1], [b: 2], [c: 3], [d: 4]]}
iex> fun = fn v -> {:ok, [[d: v]]} end
...> [1,2,3] |> map_collate0_opzioni_enum(fun)
{:ok, [[d: 1], [d: 2], [d: 3]]}
iex> fun = fn
...> [{k,3}] -> {:error, %ArgumentError{message: "argument for #{inspect k} is 3"}}
...> v -> {:ok, v}
...> end
...> {:error, error} = [[[a: 1], [b: 2]], [c: 3], [[d: 4]]] |> map_collate0_opzioni_enum(fun)
...> error |> Exception.message
"argument for :c is 3"
"""
@since "0.1.0"
@spec map_collate0_opzioni_enum(any, any) :: {:ok, opzioni} | {:error, error}
def map_collate0_opzioni_enum(enum, fun) do
with {:ok, values} <- enum |> map_collate0_enum(fun),
{:ok, _opts} = result <- values |> opzioni_merge do
result
else
{:error, %{__exception__: true}} = result -> result
end
end
@doc ~S"""
`map_concurrent_collate0_opts_enum/2` works like
`map_collate0_opts_enum/2` but maps each each element of the *enum*
concurrently.
## Examples
iex> fun = fn v -> {:ok, v} end
...> [a: 1, b: 2, c: 3] |> map_concurrent_collate0_opts_enum(fun)
{:ok, [a: 1, b: 2, c: 3]}
iex> fun = fn v -> {:ok, v} end
...> [[a: 1], [b: 2], [c: 3]] |> map_concurrent_collate0_opts_enum(fun)
{:ok, [a: 1, b: 2, c: 3]}
iex> fun = fn v -> {:ok, [d: v]} end
...> [1,2,3] |> map_concurrent_collate0_opts_enum(fun)
{:ok, [d: 1, d: 2, d: 3]}
iex> fun = fn
...> {k,3} -> {:error, %ArgumentError{message: "argument for #{inspect k} is 3"}}
...> v -> {:ok, v}
...> end
...> {:error, error} = [a: 1, b: 2, c: 3] |> map_concurrent_collate0_opts_enum(fun)
...> error |> Exception.message
"argument for :c is 3"
"""
@since "0.1.0"
@spec map_concurrent_collate0_opts_enum(any, any) :: {:ok, opts} | {:error, error}
def map_concurrent_collate0_opts_enum(enum, fun) do
with {:ok, values} <- enum |> map_concurrent_collate0_enum(fun),
{:ok, _opts} = result <- values |> opts_merge do
result
else
{:error, %{__exception__: true}} = result -> result
end
end
@doc ~S"""
`map_concurrent_collate0_opzioni_enum/2` works like
`map_collate0_opzioni_enum/2` but maps each element of the *enum*
concurrently.
## Examples
iex> fun = fn v -> {:ok, v} end
...> [a: 1, b: 2, c: 3] |> map_concurrent_collate0_opzioni_enum(fun)
{:ok, [[a: 1, b: 2, c: 3]]}
iex> fun = fn v -> {:ok, v} end
...> [[a: 1], [b: 2], [c: 3]] |> map_concurrent_collate0_opzioni_enum(fun)
{:ok, [[a: 1], [b: 2], [c: 3]]}
iex> fun = fn v -> {:ok, v} end
...> [[[a: 1], [b: 2]], [c: 3], [[d: 4]]] |> map_concurrent_collate0_opzioni_enum(fun)
{:ok, [[a: 1], [b: 2], [c: 3], [d: 4]]}
iex> fun = fn v -> {:ok, [[d: v]]} end
...> [1,2,3] |> map_concurrent_collate0_opzioni_enum(fun)
{:ok, [[d: 1], [d: 2], [d: 3]]}
iex> fun = fn
...> [{k,3}] -> {:error, %ArgumentError{message: "argument for #{inspect k} is 3"}}
...> v -> {:ok, v}
...> end
...> {:error, error} = [[[a: 1], [b: 2]], [c: 3], [[d: 4]]] |> map_concurrent_collate0_opzioni_enum(fun)
...> error |> Exception.message
"argument for :c is 3"
"""
@since "0.1.0"
@spec map_concurrent_collate0_opzioni_enum(any, any) :: {:ok, opzioni} | {:error, error}
def map_concurrent_collate0_opzioni_enum(enum, fun) do
with {:ok, values} <- enum |> map_collate0_enum(fun),
{:ok, _opts} = result <- values |> opzioni_merge do
result
else
{:error, %{__exception__: true}} = result -> result
end
end
@doc ~S"""
`map_collate1_enum/2` take an *enum* and *map/1*, applies the
*map/1* to each element of the *enum* and collates the results
according to *pattern 1*.
## Examples
iex> fun = fn v -> {:ok, v} end
...> [1,2,3] |> map_collate1_enum(fun)
{:ok, [1,2,3]}
iex> fun = fn
...> 3 -> 3
...> v -> {:ok, v}
...> end
...> [1,2,3] |> map_collate1_enum(fun)
{:ok, [1,2,3]}
iex> fun = fn
...> 3 -> {:error, %ArgumentError{message: "argument is 3"}}
...> v -> {:ok, v}
...> end
...> {:error, error} = [1,2,3] |> map_collate1_enum(fun)
...> error |> Exception.message
"argument is 3"
iex> fun = :not_a_fun
...> {:error, error} = [1,2,3] |> map_collate1_enum(fun)
...> error |> Exception.message
"map/1 function invalid, got: :not_a_fun"
iex> fun = fn v -> {:ok, v} end
...> {:error, error} = 42 |> map_collate1_enum(fun)
...> error |> Exception.message
...> |> String.starts_with?("protocol Enumerable not implemented for 42")
true
"""
@since "0.1.0"
@spec map_collate1_enum(any, any) :: {:ok, list} | {:error, error}
def map_collate1_enum(enum, fun) do
with {:ok, fun} <- fun |> reduce_map1_funs do
try do
enum
|> Enum.reduce_while(
[],
fn value, values ->
value
|> fun.()
|> case do
{:error, %{__struct__: _}} = result -> {:halt, result}
{:ok, value} -> {:cont, [value | values]}
value -> {:cont, [value | values]}
end
end
)
|> case do
{:error, %{__exception__: true}} = result -> result
values -> {:ok, values |> Enum.reverse()}
end
rescue
error ->
{:error, error}
end
else
{:error, %{__exception__: true}} = result -> result
end
end
@doc ~S"""
`map_concurrent_collate1_enum/2` works like `map_collate1_enum/2`
but maps each element of the *enum* concurrently.
## Examples
iex> fun = fn v -> {:ok, v} end
...> [1,2,3] |> map_concurrent_collate1_enum(fun)
{:ok, [1,2,3]}
iex> fun = fn
...> 3 -> 3
...> v -> {:ok, v}
...> end
...> [1,2,3] |> map_concurrent_collate1_enum(fun)
{:ok, [1,2,3]}
iex> fun = fn
...> 3 -> {:error, %ArgumentError{message: "argument is 3"}}
...> v -> {:ok, v}
...> end
...> {:error, error} = [1,2,3] |> map_concurrent_collate1_enum(fun)
...> error |> Exception.message
"argument is 3"
iex> fun = :not_a_fun
...> {:error, error} = [1,2,3] |> map_concurrent_collate1_enum(fun)
...> error |> Exception.message
"map/1 function invalid, got: :not_a_fun"
iex> fun = fn v -> {:ok, v} end
...> {:error, error} = 42 |> map_concurrent_collate1_enum(fun)
...> error |> Exception.message
...> |> String.starts_with?("protocol Enumerable not implemented for 42")
true
"""
@since "0.1.0"
@spec map_concurrent_collate1_enum(any, any) :: {:ok, list} | {:error, error}
def map_concurrent_collate1_enum(enum, fun) do
try do
with {:ok, results} <- enum |> map_concurrent_enum(fun),
{:ok, _} = result <- results |> collate1_enum do
result
else
{:error, %{__exception__: true}} = result -> result
end
rescue
error ->
{:error, error}
end
end
@doc ~S"""
`map_collate2_enum/2` take an *enum* and *map/1*, applies the
*map/1* to each element of the *enum* and collates the results
according to *pattern 2* but dropping results that are either `nil` or [*the unset value*](https://hexdocs.pm/plymio_fontais/Plymio.Fontais.html#module-the-unset-value).
## Examples
iex> fun = fn v -> {:ok, v} end
...> [1,2,3] |> map_collate2_enum(fun)
{:ok, [1,2,3]}
iex> fun = fn
...> 3 -> {:error, %ArgumentError{message: "argument is 3"}}
...> v -> {:ok, v}
...> end
...> {:error, error} = [1,2,3] |> map_collate2_enum(fun)
...> error |> Exception.message
"argument is 3"
iex> fun = fn
...> 1 -> nil
...> 3 -> nil
...> 5 -> Plymio.Fontais.Guard.the_unset_value()
...> v -> {:ok, v}
...> end
...> [1,2,3,4,5] |> map_collate2_enum(fun)
{:ok, [2,4]}
iex> fun1 = fn
...> 1 -> nil
...> 3 -> nil
...> 5 -> Plymio.Fontais.Guard.the_unset_value()
...> v -> {:ok, v}
...> end
...> fun2 = fn
...> v when Plymio.Fontais.Guard.is_value_unset_or_nil(v) -> 42
...> {:ok, v} -> {:ok, v * v * v}
...> end
...> [1,2,3,4,5] |> map_collate2_enum([fun1, fun2])
{:ok, [42,8,42,64,42]}
iex> fun = :not_a_fun
...> {:error, error} = [1,2,3] |> map_collate2_enum(fun)
...> error |> Exception.message
"map/1 function invalid, got: :not_a_fun"
iex> fun = fn v -> {:ok, v} end
...> {:error, error} = 42 |> map_collate2_enum(fun)
...> error |> Exception.message
...> |> String.starts_with?("protocol Enumerable not implemented for 42")
true
"""
@since "0.1.0"
@spec map_collate2_enum(any, any) :: {:ok, list} | {:error, error}
def map_collate2_enum(enum, fun) do
with {:ok, fun} <- fun |> reduce_map1_funs do
try do
enum
|> Enum.reduce_while(
[],
fn value, values ->
value
|> fun.()
|> case do
x when is_value_unset_or_nil(x) -> {:cont, values}
{:error, %{__struct__: _}} = result -> {:halt, result}
{:ok, value} -> {:cont, [value | values]}
value -> {:cont, [value | values]}
end
end
)
|> case do
{:error, %{__exception__: true}} = result -> result
values -> {:ok, values |> Enum.reverse()}
end
rescue
error ->
{:error, error}
end
else
{:error, %{__exception__: true}} = result -> result
end
end
@doc ~S"""
`map_concurrent_collate2_enum/2` works like `map_collate2_enum/2`
but maps each element of the *enum* concurrently.
## Examples
iex> fun = fn v -> {:ok, v} end
...> [1,2,3] |> map_concurrent_collate2_enum(fun)
{:ok, [1,2,3]}
iex> fun = fn
...> 3 -> {:error, %ArgumentError{message: "argument is 3"}}
...> v -> {:ok, v}
...> end
...> {:error, error} = [1,2,3] |> map_concurrent_collate2_enum(fun)
...> error |> Exception.message
"argument is 3"
iex> fun = fn
...> 1 -> nil
...> 3 -> nil
...> 5 -> Plymio.Fontais.Guard.the_unset_value()
...> v -> {:ok, v}
...> end
...> [1,2,3,4,5] |> map_concurrent_collate2_enum(fun)
{:ok, [2,4]}
iex> fun1 = fn
...> 1 -> nil
...> 3 -> nil
...> 5 -> Plymio.Fontais.Guard.the_unset_value()
...> v -> {:ok, v}
...> end
...> fun2 = fn
...> v when Plymio.Fontais.Guard.is_value_unset_or_nil(v) -> 42
...> {:ok, v} -> {:ok, v * v * v}
...> end
...> [1,2,3,4,5] |> map_concurrent_collate2_enum([fun1, fun2])
{:ok, [42,8,42,64,42]}
iex> fun = :not_a_fun
...> {:error, error} = [1,2,3] |> map_concurrent_collate2_enum(fun)
...> error |> Exception.message
"map/1 function invalid, got: :not_a_fun"
iex> fun = fn v -> {:ok, v} end
...> {:error, error} = 42 |> map_concurrent_collate2_enum(fun)
...> error |> Exception.message
...> |> String.starts_with?("protocol Enumerable not implemented for 42")
true
"""
@since "0.1.0"
@spec map_concurrent_collate2_enum(any, any) :: {:ok, list} | {:error, error}
def map_concurrent_collate2_enum(enum, fun) do
try do
with {:ok, results} <- enum |> map_concurrent_enum(fun),
{:ok, _} = result <- results |> collate2_enum do
result
else
{:error, %{__exception__: true}} = result -> result
end
rescue
error ->
{:error, error}
end
end
end
|
lib/funcio/enum/map/collate.ex
| 0.834576 | 0.429429 |
collate.ex
|
starcoder
|
defmodule Matrix.ConnectionManager do
@moduledoc """
Handles all communication between agent centers in cluster.
This module is used for registration / unregistration of new agent centers.
Also it provides neccesary functions for handshake mechanism between new agent center
and master node.
"""
import Matrix.Retry
require Logger
alias Matrix.{Env, AgentCenter, Cluster, Agents, AgentManager}
@doc """
Registers non-master node to cluster.
"""
@spec register_self :: {:ok | :error}
def register_self do
register_self(master_node: Env.is_master_node?)
end
defp register_self(master_node: false) do
url = "#{Env.master_node_url}/node"
body = Poison.encode! %{data: [Env.this]}
headers = [
{"Content-Type", "application/json"}
]
case HTTPoison.post(url, body, headers) do
{:ok, %HTTPoison.Response{status_code: 200}} ->
Logger.info "Node registered successfully"
:ok
{:ok, response} ->
body = Poison.decode! response.body
Logger.error "Node registration failed: #{body["error"]}"
:error
_ ->
Logger.error "Node registration failed: Unknown error"
:error
end
end
defp register_self(master_node: true), do: nil
@doc """
Registers given agent center in cluster.
If this node is master that rest of the cluster is updated with new agent center.
Successful handshake between new agent center and master one is required before
adding new node to cluster.
## Example
ConnectionManager.register_agent_center(%AgentCenter{aliaz: "Mars", address: "MilkyWay"})
"""
@spec register_agent_center(agent_center :: Matrix.AgentCenter.t) :: {{:ok, String.t} | {:error, String.t}}
def register_agent_center(agent_center) do
register_agent_center(agent_center, master_node: Env.is_master_node?)
end
defp register_agent_center(agent_center, master_node: true) do
case handshake(agent_center) do
{:ok, _} ->
add_agent_center(agent_center)
result = {:error, _} ->
Logger.error "Agent center '#{agent_center.aliaz}' handshake failed"
remove_agent_center(agent_center.aliaz)
result
end
end
defp register_agent_center(agent_center, master_node: false) do
add_agent_center(agent_center)
end
defp add_agent_center(agent_center) do
case Cluster.register_node(agent_center) do
:ok ->
Logger.warn "Agent center '#{agent_center.aliaz}' registered successfully"
{:ok, ""}
:exists ->
{:error, "Node already exists"}
end
end
@doc """
Clears agent center related data from this node and
sends message to the other agent centers to do so.
## Example
ConnectionManager.remove_agent_center("Mars")
"""
@spec remove_agent_center(aliaz :: String.t) :: :ok
def remove_agent_center(aliaz) do
clear_agent_center_data(aliaz)
agent_centers()
|> Enum.each(fn %AgentCenter{address: address} ->
url = "#{address}/node/#{aliaz}"
HTTPoison.delete(url)
end)
end
@doc """
Deletes all data related to given agent center:
* agent types
* running agents
Agent center is removed from cluster too.
## Example
ConnectionManager.clear_agent_center_data("Mars")
"""
@spec clear_agent_center_data(agent_center :: String.t) :: any
def clear_agent_center_data(agent_center) do
Cluster.unregister_node(agent_center)
Agents.delete_types_for(agent_center)
Agents.delete_running_for(agent_center)
Logger.warn "'#{agent_center}' removed from cluster"
# Update clients via web sockets
Matrix.Endpoint.broadcast! "agents", "types:update", Agents.types
Matrix.Endpoint.broadcast! "agents", "running:update", %{agents: Agents.running}
end
@doc """
Returns list of all agent centers in cluster except this one.
"""
@spec agent_centers :: list(Matrix.AgentCenter.t)
def agent_centers do
Cluster.nodes
|> Enum.reject(fn %AgentCenter{aliaz: aliaz} ->
Env.this_aliaz == aliaz
end)
end
defp handshake(agent_center) do
agent_center
|> get_new_agent_center_agent_types
|> update_other_agent_centers
|> update_new_agent_center
end
defp update_other_agent_centers({:ok, agent_center}) do
agent_center
|> send_new_agent_center
|> send_new_agent_center_agent_types
end
defp update_other_agent_centers({:error, reason}), do: {:error, reason}
defp update_new_agent_center({:ok, agent_center}) do
agent_center
|> send_agent_centers
|> send_agent_types
|> send_running_agents
end
defp update_new_agent_center({:error, reason}), do: {:error, reason}
defp get_new_agent_center_agent_types(agent_center) do
url = "#{agent_center.address}/agents/classes"
retry delay: 500, count: 1 do
case HTTPoison.get(url) do
{:ok, %HTTPoison.Response{status_code: 200, body: body}} ->
Poison.decode!(body)
|> AgentManager.add_agent_types
true
_ ->
Logger.warn "[RETRY] GET #{url}"
false
end
after
{:ok, agent_center}
else
{:error, :handshake_failed}
end
end
defp send_new_agent_center(agent_center) do
agent_centers()
|> Enum.map(fn %AgentCenter{address: address} ->
url = "#{address}/node"
body = Poison.encode! %{data: [agent_center]}
headers = [
{"Content-Type", "application/json"}
]
retry_post url: url, body: body, headers: headers
end)
|> Enum.any?(fn {status, _} -> status == :error end)
|> case do
false -> {:ok, agent_center}
true -> {:error, :handshake_failed}
end
end
defp send_new_agent_center_agent_types({:ok, agent_center}) do
agent_centers()
|> Enum.map(fn %AgentCenter{address: address} ->
url = "#{address}/agents/classes"
body = Poison.encode! %{data: %{agent_center.aliaz => Agents.types_for(agent_center.aliaz)}}
headers = [
{"Content-Type", "application/json"}
]
retry_post url: url, body: body, headers: headers
end)
|> Enum.any?(fn {status, _} -> status == :error end)
|> case do
false -> {:ok, agent_center}
true -> {:error, :handshake_failed}
end
end
defp send_new_agent_center_agent_types(error), do: error
defp send_agent_centers(agent_center) do
url = "#{agent_center.address}/node"
body = Poison.encode! %{data: Cluster.nodes}
headers = [
{"Content-Type", "application/json"}
]
case retry_post url: url, body: body, headers: headers do
{:ok, _} -> {:ok, agent_center}
error -> error
end
end
defp send_agent_types({:ok, agent_center}) do
url = "#{agent_center.address}/agents/classes"
body = Poison.encode! %{data: Agents.types}
headers = [
{"Content-Type", "application/json"}
]
case retry_post url: url, body: body, headers: headers do
{:ok, _} -> {:ok, agent_center}
error -> error
end
end
defp send_agent_types(error), do: error
defp send_running_agents({:ok, agent_center}) do
url = "#{agent_center.address}/agents/running"
body = Poison.encode! %{data: Agents.running_per_agent_center}
headers = [
{"Content-Type", "application/json"}
]
case retry_post url: url, body: body, headers: headers do
{:ok, _} -> {:ok, agent_center}
error -> error
end
end
defp send_running_agents(error), do: error
end
|
lib/matrix/connection_manager.ex
| 0.868172 | 0.489626 |
connection_manager.ex
|
starcoder
|
defmodule Comeonin do
@moduledoc """
Comeonin is a password hashing library that aims to make the
secure validation of passwords as straightforward as possible.
It also provides extensive documentation to help
developers keep their apps secure.
Comeonin supports bcrypt and pbkdf2_sha512.
## Use
This module offers the following convenience functions:
* create_hash -- check password strength before hashing it
* create_user -- update a map with the password hash
See the documentation for each function for more details.
If you do not need this extra functionality, you can hash a password
by using the `hashpwsalt` function -- using either Comeonin.Bcrypt or
Comeonin.Pbkdf2.
See each module's documentation for more information about
all the available options.
If you want more control over the generation of the salt, and, in
the case of pbkdf2, the length of salt, you can use the `gen_salt`
function and then pass the output to the `hashpass` function.
To check a password against the stored hash, use the `checkpw`
function. This takes two arguments: the plaintext password and
the stored hash.
There is also a `dummy_checkpw` function, which takes no arguments
and is to be used when the username cannot be found. It performs a hash,
but then returns false. This can be used to make user enumeration more
difficult. If an attacker already knows, or can guess, the username,
this function will not be of any use, and so if you are going to use
this function, it should be used with a policy of creating usernames
that are not made public and are difficult to guess.
## Choosing an algorithm
Bcrypt and pbkdf2_sha512 are both highly secure key derivation functions.
They have no known vulnerabilities and their algorithms have been used
and widely reviewed for at least 10 years. They are also designed
to be `future-adaptable` (see the section below about speed / complexity
for more details), and so we do not recommend one over the other.
However, if your application needs to use a hashing function that has been
recommended by a recognized standards body, then you will need to
use pbkdf2_sha512, which has been recommended by NIST.
## Adjusting the speed / complexity of bcrypt and pbkdf2
Both bcrypt and pbkdf2 are designed to be computationally intensive and
slow. This limits the number of attempts an attacker can make within a
certain time frame. In addition, they can be configured to run slower,
which can help offset some of the hardware improvements made over time.
It is recommended to make the key derivation function as slow as the
user can tolerate. The actual recommended time for the function will vary
depending on the nature of the application. According to the following NIST
recommendations (http://csrc.nist.gov/publications/nistpubs/800-132/nist-sp800-132.pdf),
having the function take several seconds might be acceptable if the user
only has to login once every session. However, if an application requires
the user to login several times an hour, it would probably be better to
limit the hashing function to about 250 milliseconds.
To help you decide how slow to make the function, this module provides
convenience timing functions for bcrypt and pbkdf2.
## Further information
Visit our wiki (https://github.com/elixircnx/comeonin/wiki)
for links to further information about these and related issues.
"""
alias Comeonin.Config
alias Comeonin.Password
@doc """
A function to help the developer decide how many log_rounds to use
when using bcrypt.
The number of log_rounds can be increased to make the bcrypt hashing
function more complex, and slower. The minimum number is 4 and the maximum is 31.
The default is 12, but this is not necessarily the recommended number.
The ideal number of log_rounds will depend on the nature of your application
and the hardware being used.
The `bcrypt_log_rounds` value can be set in the config file. See the
documentation for `Comeonin.Config` for more details.
"""
def time_bcrypt(log_rounds \\ 12) do
salt = Comeonin.Bcrypt.gen_salt(log_rounds)
{time, _} = :timer.tc(Comeonin.Bcrypt, :hashpass, ["password", salt])
Mix.shell.info "Log rounds: #{log_rounds}, Time: #{div(time, 1000)} ms"
end
@doc """
A function to help the developer decide how many rounds to use
when using pbkdf2.
The number of rounds can be increased to make the pbkdf2 hashing function slower.
The maximum number of rounds is 4294967295. The default is 60_000, but this
is not necessarily the recommended number. The ideal number of log_rounds
will depend on the nature of your application and the hardware being used.
The `pbkdf2_rounds` value can be set in the config file. See the
documentation for `Comeonin.Config` for more details.
"""
def time_pbkdf2(rounds \\ 60_000) do
salt = Comeonin.Pbkdf2.gen_salt
{time, _} = :timer.tc(Comeonin.Pbkdf2, :hashpass, ["password", salt, rounds])
Mix.shell.info "Rounds: #{rounds}, Time: #{div(time, 1000)} ms"
end
@doc """
A function that provides options to check the strength of a password
before hashing it. The password is then hashed only if the password is
considered strong enough. For more details about password strength,
read the documentation for the Comeonin.Password module.
The default hashing algorithm is bcrypt, but this can be changed by
setting the value of `crypto_mod` to `:pbkdf2` in the config file.
## Options
There are two options:
* min_length -- minimum allowable length of the password
* extra_chars -- check for punctuation characters and digits
The default value for min_length is 8 characters if extra_chars is true,
but 12 characters is extra_chars is false. extra_chars is true by default.
## Examples
The following examples will produce password hashes:
Comeonin.create_hash("longpassword", [extra_chars: false])
Comeonin.create_hash("passwordwithjustletters", [min_length: 16, extra_chars: false])
This example will raise an error because the password is not long enough for a password
with no punctuation characters or digits:
iex> Comeonin.create_hash("password", [extra_chars: false])
{:error, "The password should be at least 12 characters long."}
This last example will raise an error because there are no punctuation characters or
digits in it:
iex> Comeonin.create_hash("password")
{:error, "The password should contain at least one number and one punctuation character."}
"""
def create_hash(password, opts \\ []) do
crypto_mod = Config.get_crypto_mod
case Password.strong_password?(password, opts) do
true -> {:ok, crypto_mod.hashpwsalt(password)}
false -> {:ok, crypto_mod.hashpwsalt(password)}
message -> {:error, message}
end
end
@doc """
This function takes a map with a password in it, removes the password
and adds an entry for the password hash. This can be used after collecting
user data and before adding it to the database.
This uses the `create_hash` function, which can be used to check password
strength before hashing it.
When looking for the password, this function looks for a key which is either
named "password" (a string) or :password (an atom). If it does not find
either key, it will raise an error.
As with the `create_hash` function, you can decide not to check password
strength by setting the second argument to false.
## Examples
All of the following will work ok:
%{"name" => "fred", "password" => <PASSWORD>"} |> Comeonin.create_user
%{name: "fred", password: <PASSWORD>"} |> Comeonin.create_user
%{name: "fred", password: "<PASSWORD>"} |> Comeonin.create_user([extra_chars: false])
The next example will raise an error because the key "password" or :password
could not be found:
iex> %{["name"] => "fred", ["password", "password_admin"] => <PASSWORD>"} |> Comeonin.create_user
{:error, ~s(We could not find the password. The password key should be either :password or "password".)}
This example will raise an error because the password is not long enough:
iex> %{name: "fred", password: "<PASSWORD>"} |> Comeonin.create_user
{:error, "The password should be at least 8 characters long."}
"""
def create_user(user_params, opts \\ [])
def create_user(%{password: password} = user_params, opts) do
Map.delete(user_params, :password) |> create_map(password, :password_hash, opts)
end
def create_user(%{"password" => password} = user_params, opts) do
Map.delete(user_params, "password") |> create_map(password, "password_hash", opts)
end
def create_user(_, _) do
{:error, ~s(We could not find the password. The password key should be either :password or "password".)}
end
defp create_map(user_params, password, hash_key, opts) do
case create_hash(password, opts) do
{:ok, password_hash} -> {:ok, Map.put_new(user_params, hash_key, password_hash)}
{:error, message} -> {:error, message}
end
end
end
|
deps/comeonin/lib/comeonin.ex
| 0.86626 | 0.780537 |
comeonin.ex
|
starcoder
|
defmodule Ecto.Adapters.SQL.Sandbox do
@moduledoc ~S"""
A pool for concurrent transactional tests.
The sandbox pool is implemented on top of an ownership mechanism.
When started, the pool is in automatic mode, which means using
the repository will automatically check connections out as with
any other pool. The only difference is that connections are not
checked back in automatically but by explicitly calling `checkin/2`.
The `mode/2` function can be used to change the pool mode to
`:manual`. In this case, each connection must be explicitly
checked out before use. This is useful when paired with
`checkout/2` which by default wraps the connection in a transaction.
This means developers have a safe mechanism for running concurrent
tests against the database.
## Example
The first step is to configure your database to use the
`Ecto.Adapters.SQL.Sandbox` pool. You set those options in your
`config/config.exs`:
config :my_app, Repo,
pool: Ecto.Adapters.SQL.Sandbox
Since you don't want those options in your production database, we
typically recommend to create a `config/test.exs` and add the
following to the bottom of your `config/config.exs` file:
import_config "config/#{Mix.env}.exs"
Now with the test database properly configured, you can write
transactional tests:
# At the end of your test_helper.exs
# Set the pool mode to manual for explicitly checkouts
Ecto.Adapters.SQL.Sandbox.mode(TestRepo, :manual)
defmodule PostTest do
# Once the model is manual, tests can also be async
use ExUnit.Case, async: true
setup do
# Explicitly get a connection before each test
:ok = Ecto.Adapters.SQL.Sandbox.checkout(TestRepo)
end
test "create comment" do
# Use the repository as usual
assert %Post{} = TestRepo.insert!(%Post{})
end
end
## Options
Because the sandbox is implemented on top of the
`DBConnection.Ownership` module, you can check the module
documentation to see which options available to configure
the ownership mode when desired.
"""
@doc """
Returns the begin transaction query for sandbox.
"""
@callback begin_sandbox :: term
@doc """
Returns the rollback transaction query for sandbox.
"""
@callback rollback_sandbox :: term
defmodule Connection do
@moduledoc false
@behaviour DBConnection
def connect({conn_mod, state}) do
case conn_mod.init(state) do
{:ok, state} -> {:ok, {conn_mod, state}}
{:error, _} = err -> err
end
end
def disconnect(err, {conn_mod, state}) do
conn_mod.disconnect(err, state)
end
def checkout(state), do: proxy(:checkout, state, [])
def checkin(state), do: proxy(:checkin, state, [])
def ping(state), do: proxy(:ping, state, [])
def handle_begin(opts, state) do
opts = [mode: :savepoint] ++ opts
proxy(:handle_begin, state, [opts])
end
def handle_commit(opts, state) do
opts = [mode: :savepoint] ++ opts
proxy(:handle_commit, state, [opts])
end
def handle_rollback(opts, state) do
opts = [mode: :savepoint] ++ opts
proxy(:handle_rollback, state, [opts])
end
def handle_prepare(query, opts, state),
do: proxy(:handle_prepare, state, [query, opts])
def handle_execute(query, params, opts, state),
do: proxy(:handle_execute, state, [query, params, opts])
def handle_execute_close(query, params, opts, state),
do: proxy(:handle_execute_close, state, [query, params, opts])
def handle_close(query, opts, state),
do: proxy(:handle_close, state, [query, opts])
def handle_info(msg, state),
do: proxy(:handle_info, state, [msg])
defp proxy(fun, {conn_mod, state}, args) do
result = apply(conn_mod, fun, args ++ [state])
pos = :erlang.tuple_size(result)
:erlang.setelement(pos, result, {conn_mod, :erlang.element(pos, result)})
end
end
defmodule Pool do
@moduledoc false
@behaviour DBConnection.Pool
def start_link(_module, _opts) do
raise "should never be invoked"
end
def child_spec(_module, _opts, _child_opts) do
raise "should never be invoked"
end
def checkout(pool, opts) do
pool_mod = opts[:sandbox_pool]
case pool_mod.checkout(pool, opts) do
{:ok, pool_ref, conn_mod, conn_state} ->
query = opts[:repo].__sql__.begin_sandbox
case sandbox_query(query, opts, conn_mod, conn_state) do
{:ok, _, conn_state} ->
{:ok, pool_ref, Connection, {conn_mod, conn_state}}
{_error_or_disconnect, err, conn_state} ->
pool_mod.disconnect(pool_ref, err, conn_state, opts)
end
error ->
error
end
end
def checkin(pool_ref, {conn_mod, conn_state}, opts) do
pool_mod = opts[:sandbox_pool]
query = opts[:repo].__sql__.rollback_sandbox
case sandbox_query(query, opts, conn_mod, conn_state) do
{:ok, _, conn_state} ->
pool_mod.checkin(pool_ref, conn_state, opts)
{_error_or_disconnect, err, conn_state} ->
pool_mod.disconnect(pool_ref, err, conn_state, opts)
end
end
def disconnect(owner, exception, state, opts) do
opts[:sandbox_pool].disconnect(owner, exception, state, opts)
end
def stop(owner, reason, state, opts) do
opts[:sandbox_pool].stop(owner, reason, state, opts)
end
defp sandbox_query(query, opts, conn_mod, conn_state) do
query = DBConnection.Query.parse(query, opts)
case conn_mod.handle_prepare(query, opts, conn_state) do
{:ok, query, conn_state} ->
query = DBConnection.Query.describe(query, opts)
sandbox_execute(query, opts, conn_mod, conn_state)
other ->
other
end
end
defp sandbox_execute(query, opts, conn_mod, conn_state) do
params = DBConnection.Query.encode(query, [], opts)
conn_mod.handle_execute_close(query, params, opts, conn_state)
end
end
@doc """
Sets the mode for the `repo` pool.
The mode can be `:auto` or `:manual`.
"""
def mode(repo, mode) when mode in [:auto, :manual] do
{name, opts} = repo.__pool__
if opts[:pool] != DBConnection.Ownership do
raise """
cannot configure sandbox with pool #{inspect opts[:pool]}.
To use the SQL Sandbox, configure your repository pool as:
pool: #{inspect __MODULE__}
"""
end
DBConnection.Ownership.ownership_mode(name, mode, opts)
end
@doc """
Checks a connection out for the given `repo`.
The process calling `checkout/2` will own the connection
until it calls `checkin/2` or until it crashes when then
the connection will be automatically reclaimed by the pool.
## Options
* `:sandbox` - when true the connection is wrapped in
a transaction. Defaults to true. WHen
"""
def checkout(repo, opts \\ []) do
{name, opts} =
if Keyword.get(opts, :sandbox, true) do
proxy_pool(repo)
else
repo.__pool__
end
DBConnection.Ownership.ownership_checkout(name, opts)
end
@doc """
Checks in the connection back into the sandbox pool.
"""
def checkin(repo, _opts \\ []) do
{name, opts} = repo.__pool__
DBConnection.Ownership.ownership_checkin(name, opts)
end
@doc """
Allows the `allow` process to use the connection owned by `owner`.
"""
def allow(repo, owner, allow, _opts \\ []) do
{name, opts} = repo.__pool__
DBConnection.Ownership.ownership_allow(name, owner, allow, opts)
end
defp proxy_pool(repo) do
{name, opts} = repo.__pool__
{pool, opts} = Keyword.pop(opts, :ownership_pool, DBConnection.Poolboy)
{name, [repo: repo, sandbox_pool: pool, ownership_pool: Pool] ++ opts}
end
end
|
lib/ecto/adapters/sql/sandbox.ex
| 0.902079 | 0.629917 |
sandbox.ex
|
starcoder
|
defmodule RefData do
@moduledoc """
RefData is a library for Phoenix projects that lets you provide reference data
for your forms (e.g. Gender) without using a database table. It has been written
as tool for POC development but can be used in PROD for fields that are common
and do not form part of complex queries.
"""
defmacro __using__(_opts) do
quote do
def list_all_keys() do
GenServer.call(RefData.Server, {:get_all_keys})
end
def get(key) do
GenServer.call(RefData.Server, {key})
end
def get(key, disabled: list) do
GenServer.call(RefData.Server, {key, disabled: list})
end
end
end
@doc """
Returns a list of all key values from the underlying data store.
It will match the keys used in the individual json files
## Examples
iex> RefData.list_all_keys()
["key1", "key2"]
"""
@callback list_all_keys() :: List
@doc """
Returns a list of data for the provided key. If the json defines
grouped data it will return grouped data.
## Examples
iex(1)> MyRefData.get("gender")
[
[key: "Male", value: "Male"],
[key: "Female", value: "Female"],
[key: "Non-binary", value: "non-binary"]
]
iex(1)> MyRefData.get("countries")
[
Asia: [
[key: "Australia", value: "Australia"],
[key: "New Zealand", value: "New Zealand"]
],
Americas: [
[key: "Canada", value: "Canada"],
[key: "USA", value: "USA"]]
]
"""
@callback get(key :: String) :: List
@doc """
You can pass params to the get function. Keywords available
- disabled: [] - Will return the data with the listed fields disabled
## Example
iex(1)> MyRefData.get("gender", disabled: ["Female"])
[
[key: "Male", value: "Male"],
[key: "Female", value: "Female", disabled: true],
[key: "Non-binary", value: "Non-binary"]
]
"""
@callback get(key :: String, {:disabled, []}) :: List
end
|
lib/ref_data.ex
| 0.805556 | 0.400368 |
ref_data.ex
|
starcoder
|
defmodule Integer do
@moduledoc """
Functions for working with integers.
Some functions that work on integers are found in `Kernel`:
* `abs/1`
* `div/2`
* `max/2`
* `min/2`
* `rem/2`
"""
import Bitwise
@doc """
Determines if `integer` is odd.
Returns `true` if the given `integer` is an odd number,
otherwise it returns `false`.
Allowed in guard clauses.
## Examples
iex> Integer.is_odd(5)
true
iex> Integer.is_odd(6)
false
iex> Integer.is_odd(-5)
true
iex> Integer.is_odd(0)
false
"""
defguard is_odd(integer) when is_integer(integer) and (integer &&& 1) == 1
@doc """
Determines if an `integer` is even.
Returns `true` if the given `integer` is an even number,
otherwise it returns `false`.
Allowed in guard clauses.
## Examples
iex> Integer.is_even(10)
true
iex> Integer.is_even(5)
false
iex> Integer.is_even(-10)
true
iex> Integer.is_even(0)
true
"""
defguard is_even(integer) when is_integer(integer) and (integer &&& 1) == 0
@doc """
Computes `base` raised to power of `exponent`.
Both `base` and `exponent` must be integers.
The exponent must be zero or positive.
See `Float.pow/2` for exponentiation of negative
exponents as well as floats.
## Examples
iex> Integer.pow(2, 0)
1
iex> Integer.pow(2, 1)
2
iex> Integer.pow(2, 10)
1024
iex> Integer.pow(2, 11)
2048
iex> Integer.pow(2, 64)
0x10000000000000000
iex> Integer.pow(3, 4)
81
iex> Integer.pow(4, 3)
64
iex> Integer.pow(-2, 3)
-8
iex> Integer.pow(-2, 4)
16
iex> Integer.pow(2, -2)
** (ArithmeticError) bad argument in arithmetic expression
"""
@doc since: "1.12.0"
@spec pow(integer, non_neg_integer) :: integer
def pow(base, exponent) when is_integer(base) and is_integer(exponent) do
if exponent < 0, do: :erlang.error(:badarith, [base, exponent])
guarded_pow(base, exponent)
end
# https://en.wikipedia.org/wiki/Exponentiation_by_squaring
defp guarded_pow(_, 0), do: 1
defp guarded_pow(b, 1), do: b
defp guarded_pow(b, e) when (e &&& 1) == 0, do: guarded_pow(b * b, e >>> 1)
defp guarded_pow(b, e), do: b * guarded_pow(b * b, e >>> 1)
@doc """
Computes the modulo remainder of an integer division.
`Integer.mod/2` uses floored division, which means that
the result will always have the sign of the `divisor`.
Raises an `ArithmeticError` exception if one of the arguments is not an
integer, or when the `divisor` is `0`.
## Examples
iex> Integer.mod(5, 2)
1
iex> Integer.mod(6, -4)
-2
"""
@doc since: "1.4.0"
@spec mod(integer, neg_integer | pos_integer) :: integer
def mod(dividend, divisor) do
remainder = rem(dividend, divisor)
if remainder * divisor < 0 do
remainder + divisor
else
remainder
end
end
@doc """
Performs a floored integer division.
Raises an `ArithmeticError` exception if one of the arguments is not an
integer, or when the `divisor` is `0`.
`Integer.floor_div/2` performs *floored* integer division. This means that
the result is always rounded towards negative infinity.
If you want to perform truncated integer division (rounding towards zero),
use `Kernel.div/2` instead.
## Examples
iex> Integer.floor_div(5, 2)
2
iex> Integer.floor_div(6, -4)
-2
iex> Integer.floor_div(-99, 2)
-50
"""
@doc since: "1.4.0"
@spec floor_div(integer, neg_integer | pos_integer) :: integer
def floor_div(dividend, divisor) do
if dividend * divisor < 0 and rem(dividend, divisor) != 0 do
div(dividend, divisor) - 1
else
div(dividend, divisor)
end
end
@doc """
Returns the ordered digits for the given `integer`.
An optional `base` value may be provided representing the radix for the returned
digits. This one must be an integer >= 2.
## Examples
iex> Integer.digits(123)
[1, 2, 3]
iex> Integer.digits(170, 2)
[1, 0, 1, 0, 1, 0, 1, 0]
iex> Integer.digits(-170, 2)
[-1, 0, -1, 0, -1, 0, -1, 0]
"""
@spec digits(integer, pos_integer) :: [integer, ...]
def digits(integer, base \\ 10)
when is_integer(integer) and is_integer(base) and base >= 2 do
do_digits(integer, base, [])
end
defp do_digits(integer, base, acc) when abs(integer) < base, do: [integer | acc]
defp do_digits(integer, base, acc),
do: do_digits(div(integer, base), base, [rem(integer, base) | acc])
@doc """
Returns the integer represented by the ordered `digits`.
An optional `base` value may be provided representing the radix for the `digits`.
Base has to be an integer greater than or equal to `2`.
## Examples
iex> Integer.undigits([1, 2, 3])
123
iex> Integer.undigits([1, 4], 16)
20
iex> Integer.undigits([])
0
"""
@spec undigits([integer], pos_integer) :: integer
def undigits(digits, base \\ 10) when is_list(digits) and is_integer(base) and base >= 2 do
do_undigits(digits, base, 0)
end
defp do_undigits([], _base, acc), do: acc
defp do_undigits([digit | _], base, _) when is_integer(digit) and digit >= base,
do: raise(ArgumentError, "invalid digit #{digit} in base #{base}")
defp do_undigits([digit | tail], base, acc) when is_integer(digit),
do: do_undigits(tail, base, acc * base + digit)
@doc """
Parses a text representation of an integer.
An optional `base` to the corresponding integer can be provided.
If `base` is not given, 10 will be used.
If successful, returns a tuple in the form of `{integer, remainder_of_binary}`.
Otherwise `:error`.
Raises an error if `base` is less than 2 or more than 36.
If you want to convert a string-formatted integer directly to an integer,
`String.to_integer/1` or `String.to_integer/2` can be used instead.
## Examples
iex> Integer.parse("34")
{34, ""}
iex> Integer.parse("34.5")
{34, ".5"}
iex> Integer.parse("three")
:error
iex> Integer.parse("34", 10)
{34, ""}
iex> Integer.parse("f4", 16)
{244, ""}
iex> Integer.parse("Awww++", 36)
{509216, "++"}
iex> Integer.parse("fab", 10)
:error
iex> Integer.parse("a2", 38)
** (ArgumentError) invalid base 38
"""
@spec parse(binary, 2..36) :: {integer, binary} | :error
def parse(binary, base \\ 10)
def parse(_binary, base) when base not in 2..36 do
raise ArgumentError, "invalid base #{inspect(base)}"
end
def parse(binary, base) when is_binary(binary) do
case count_digits(binary, base) do
0 ->
:error
count ->
{digits, rem} = :erlang.split_binary(binary, count)
{:erlang.binary_to_integer(digits, base), rem}
end
end
defp count_digits(<<sign, rest::bits>>, base) when sign in '+-' do
case count_digits_nosign(rest, base, 1) do
1 -> 0
count -> count
end
end
defp count_digits(<<rest::bits>>, base) do
count_digits_nosign(rest, base, 0)
end
digits = [{?0..?9, -?0}, {?A..?Z, 10 - ?A}, {?a..?z, 10 - ?a}]
for {chars, diff} <- digits,
char <- chars do
digit = char + diff
defp count_digits_nosign(<<unquote(char), rest::bits>>, base, count)
when base > unquote(digit) do
count_digits_nosign(rest, base, count + 1)
end
end
defp count_digits_nosign(<<_::bits>>, _, count), do: count
# TODO: Remove Integer.to_string/1 once the minimum supported version is
# Erlang/OTP 22, since it is covered by the now BIF Integer.to_string/2.
# Please reapply commit <PASSWORD>.
@doc """
Returns a binary which corresponds to the text representation
of `integer`.
Inlined by the compiler.
## Examples
iex> Integer.to_string(123)
"123"
iex> Integer.to_string(+456)
"456"
iex> Integer.to_string(-789)
"-789"
iex> Integer.to_string(0123)
"123"
"""
@spec to_string(integer) :: String.t()
def to_string(integer) do
:erlang.integer_to_binary(integer)
end
@doc """
Returns a binary which corresponds to the text representation
of `integer` in the given `base`.
`base` can be an integer between 2 and 36.
Inlined by the compiler.
## Examples
iex> Integer.to_string(100, 16)
"64"
iex> Integer.to_string(-100, 16)
"-64"
iex> Integer.to_string(882_681_651, 36)
"ELIXIR"
"""
@spec to_string(integer, 2..36) :: String.t()
def to_string(integer, base) do
:erlang.integer_to_binary(integer, base)
end
# TODO: Remove Integer.to_charlist/1 once the minimum supported version is
# Erlang/OTP 22, since it is covered by the now BIF Integer.to_charlist/2.
# Please reapply commit 2622fd6b0aa419a983a899a1fbdb5deefba3d85d.
@doc """
Returns a charlist which corresponds to the text representation of the given `integer`.
Inlined by the compiler.
## Examples
iex> Integer.to_charlist(123)
'123'
iex> Integer.to_charlist(+456)
'456'
iex> Integer.to_charlist(-789)
'-789'
iex> Integer.to_charlist(0123)
'123'
"""
@spec to_charlist(integer) :: charlist
def to_charlist(integer) do
:erlang.integer_to_list(integer)
end
@doc """
Returns a charlist which corresponds to the text representation of `integer` in the given `base`.
`base` can be an integer between 2 and 36.
Inlined by the compiler.
## Examples
iex> Integer.to_charlist(100, 16)
'64'
iex> Integer.to_charlist(-100, 16)
'-64'
iex> Integer.to_charlist(882_681_651, 36)
'ELIXIR'
"""
@spec to_charlist(integer, 2..36) :: charlist
def to_charlist(integer, base) do
:erlang.integer_to_list(integer, base)
end
@doc """
Returns the greatest common divisor of the two given integers.
The greatest common divisor (GCD) of `integer1` and `integer2` is the largest positive
integer that divides both `integer1` and `integer2` without leaving a remainder.
By convention, `gcd(0, 0)` returns `0`.
## Examples
iex> Integer.gcd(2, 3)
1
iex> Integer.gcd(8, 12)
4
iex> Integer.gcd(8, -12)
4
iex> Integer.gcd(10, 0)
10
iex> Integer.gcd(7, 7)
7
iex> Integer.gcd(0, 0)
0
"""
@doc since: "1.5.0"
@spec gcd(integer, integer) :: non_neg_integer
def gcd(integer1, integer2) when is_integer(integer1) and is_integer(integer2) do
gcd_positive(abs(integer1), abs(integer2))
end
defp gcd_positive(0, integer2), do: integer2
defp gcd_positive(integer1, 0), do: integer1
defp gcd_positive(integer1, integer2), do: gcd_positive(integer2, rem(integer1, integer2))
@doc false
@deprecated "Use Integer.to_charlist/1 instead"
def to_char_list(integer), do: Integer.to_charlist(integer)
@doc false
@deprecated "Use Integer.to_charlist/2 instead"
def to_char_list(integer, base), do: Integer.to_charlist(integer, base)
end
|
lib/elixir/lib/integer.ex
| 0.956033 | 0.637341 |
integer.ex
|
starcoder
|
defmodule Plug.Parsers.JSON do
@moduledoc """
Parses JSON request body.
JSON arrays are parsed into a `"_json"` key to allow
proper param merging.
An empty request body is parsed as an empty map.
## Options
All options supported by `Plug.Conn.read_body/2` are also supported here.
They are repeated here for convenience:
* `:length` - sets the maximum number of bytes to read from the request,
defaults to 8_000_000 bytes
* `:read_length` - sets the amount of bytes to read at one time from the
underlying socket to fill the chunk, defaults to 1_000_000 bytes
* `:read_timeout` - sets the timeout for each socket read, defaults to
15_000ms
So by default, `Plug.Parsers` will read 1_000_000 bytes at a time from the
socket with an overall limit of 8_000_000 bytes.
"""
@behaviour Plug.Parsers
def init(opts) do
{decoder, opts} = Keyword.pop(opts, :json_decoder)
{body_reader, opts} = Keyword.pop(opts, :body_reader, {Plug.Conn, :read_body, []})
validate_decoder!(decoder)
{body_reader, decoder, opts}
end
defp validate_decoder!(nil) do
raise ArgumentError, "JSON parser expects a :json_decoder option"
end
defp validate_decoder!({module, fun, args})
when is_atom(module) and is_atom(fun) and is_list(args) do
arity = length(args) + 1
unless Code.ensure_compiled?(module) and function_exported?(module, fun, arity) do
raise ArgumentError,
"invalid :json_decoder option. Undefined function " <>
Exception.format_mfa(module, fun, arity)
end
end
defp validate_decoder!(decoder) when is_atom(decoder) do
unless Code.ensure_compiled?(decoder) do
raise ArgumentError,
"invalid :json_decoder option. The module #{inspect(decoder)} is not " <>
"loaded and could not be found"
end
unless function_exported?(decoder, :decode!, 1) do
raise ArgumentError,
"invalid :json_decoder option. The module #{inspect(decoder)} must " <>
"implement decode!/1"
end
end
defp validate_decoder!(decoder) do
raise ArgumentError,
"the :json_decoder option expects a module, or a three-element " <>
"tuple in the form of {module, function, extra_args}, got: #{inspect(decoder)}"
end
def parse(conn, "application", subtype, _headers, {{mod, fun, args}, decoder, opts}) do
if subtype == "json" or String.ends_with?(subtype, "+json") do
apply(mod, fun, [conn, opts | args]) |> decode(decoder)
else
{:next, conn}
end
end
def parse(conn, _type, _subtype, _headers, _opts) do
{:next, conn}
end
defp decode({:ok, "", conn}, _decoder) do
{:ok, %{}, conn}
end
defp decode({:ok, body, conn}, decoder) do
case apply_mfa_or_module(body, decoder) do
terms when is_map(terms) ->
{:ok, terms, conn}
terms ->
{:ok, %{"_json" => terms}, conn}
end
rescue
e -> raise Plug.Parsers.ParseError, exception: e
end
defp decode({:more, _, conn}, _decoder) do
{:error, :too_large, conn}
end
defp decode({:error, :timeout}, _decoder) do
raise Plug.TimeoutError
end
defp decode({:error, _}, _decoder) do
raise Plug.BadRequestError
end
defp apply_mfa_or_module(body, decoder) when is_atom(decoder) do
decoder.decode!(body)
end
defp apply_mfa_or_module(body, {module_name, function_name, extra_args}) do
apply(module_name, function_name, [body | extra_args])
end
end
|
deps/plug/lib/plug/parsers/json.ex
| 0.806662 | 0.437403 |
json.ex
|
starcoder
|
defmodule AdventOfCode.Day10 do
def test_adapters(adapters, current_joltage) do
test_adapters(adapters, current_joltage, 0, 0)
end
def test_adapters([], _, num_ones, num_threes) do
{num_ones, num_threes}
end
def test_adapters([adapter|adapters], current_joltage, num_ones, num_threes) do
{num_ones, num_threes} =
case adapter - current_joltage do
1 -> {num_ones + 1, num_threes}
3 -> {num_ones, num_threes + 1}
end
test_adapters(adapters, adapter, num_ones, num_threes)
end
@spec fill_row(integer, [integer], integer, integer, Matrex.t()) :: Matrex.t()
def fill_row(_, [], _row, _col, mat) do
mat
end
def fill_row(adapter, [other_adapter | rest], row, col, mat) do
diff = abs(adapter - other_adapter)
value = AdventOfCode.boolean_to_integer(diff <= 3)
new_mat = Matrex.set(mat, row, col, value)
fill_row(adapter, rest, row, col + 1, new_mat)
end
@spec build_adjacency_matrix([integer], integer, integer, Matrex.t()) :: Matrex.t()
def build_adjacency_matrix([], _row, _col, mat) do
mat
end
def build_adjacency_matrix([adapter | adapters], row, col, mat) do
mat = fill_row(adapter, adapters, row, col, mat)
build_adjacency_matrix(adapters, row + 1, col + 1, mat)
end
@spec find_adapter_combination([integer]) :: {Matrex.t(), float}
def find_adapter_combination(adapters) do
mat = Matrex.zeros(length(adapters))
adj = build_adjacency_matrix(adapters, 1, 2, mat)
{_, count} = Enum.reduce(1..length(adapters) + 1, {adj, 0}, fn _, {acc, count} ->
acc = Matrex.dot(acc, adj)
count = count + Matrex.at(acc, 1, length(adapters))
{acc, count}
end)
{adj, count}
end
def day10() do
adapters =
"day10_input"
|> AdventOfCode.read_file()
|> Enum.map(fn x ->
{value, _} = Integer.parse(x)
value
end)
additional_adapter = Enum.max(adapters) + 3
adapters = [additional_adapter|adapters]
adapters = Enum.sort(adapters)
{num_ones, num_threes} = test_adapters(adapters, 0)
part1 = num_ones * num_threes
part2 = find_adapter_combination([0|adapters])
{adapters, part1, part2}
end
end
|
lib/day10.ex
| 0.698227 | 0.646865 |
day10.ex
|
starcoder
|
defmodule Pummpcomm.Session.Exchange.ReadCarbRatios do
@moduledoc """
Read open intervals for carb ratios throughout the day
"""
alias Pummpcomm.{Carbohydrates, Insulin}
alias Pummpcomm.Session.{Command, Response}
# Constants
@opcode 0x8A
# Functions
@doc """
Decodes `Pummpcomm.Session.Response.t` to schedule of `Pummpcomm.Insulin.carbohydrates_per_unit` ratio for each
open interval at `start`.
"""
@spec decode(Response.t(), Pummpcomm.PumpModel.pump_model()) :: {
:ok,
%{
units: Carbohydrates.units(),
schedule: [
%{
ratio: Insulin.carbohydrates_per_unit(),
start: NaiveDateTime.t()
}
]
}
}
def decode(%Response{opcode: @opcode, data: <<units::8, count::8, rest::binary>>}, model_number)
when rem(model_number, 100) >= 23 do
{:ok, %{units: decode_units(units), schedule: decode_larger_carb_ratio(rest, [], count)}}
end
def decode(%Response{opcode: @opcode, data: <<units::8, rest::binary>>}, _) do
{:ok, %{units: decode_units(units), schedule: decode_smaller_carb_ratio(rest, [], 8)}}
end
@doc """
Makes `Pummpcomm.Session.Command.t` to read carb ratios from pump with `pump_serial`
"""
@spec make(Command.pump_serial()) :: Command.t()
def make(pump_serial) do
%Command{opcode: @opcode, pump_serial: pump_serial}
end
## Private Functions
defp basal_time(raw_time) do
Timex.now()
|> Timex.beginning_of_day()
|> Timex.shift(minutes: 30 * raw_time)
|> DateTime.to_time()
end
defp decode_larger_carb_ratio(_, carb_ratios, count) when count == 0,
do: Enum.reverse(carb_ratios)
defp decode_larger_carb_ratio(
<<raw_time::8, raw_ratio::binary-size(2), rest::binary>>,
carb_ratios,
count
) do
carb_ratio = %{ratio: decode_ratio(raw_ratio), start: basal_time(raw_time)}
decode_larger_carb_ratio(rest, [carb_ratio | carb_ratios], count - 1)
end
defp decode_ratio(<<raw_ratio::8>>), do: raw_ratio / 1
defp decode_ratio(<<0x00::8, raw_ratio::8>>), do: raw_ratio / 10
defp decode_ratio(<<raw_ratio::16>>), do: raw_ratio / 1000
defp decode_smaller_carb_ratio(_, carb_ratios, count) when count == 0,
do: Enum.reverse(carb_ratios)
defp decode_smaller_carb_ratio(<<_::8, raw_ratio::8, _::binary>>, carb_ratios, _)
when raw_ratio == 0 and length(carb_ratios) > 0,
do: Enum.reverse(carb_ratios)
defp decode_smaller_carb_ratio(
<<raw_time::8, raw_ratio::binary-size(1), rest::binary>>,
carb_ratios,
count
) do
carb_ratio = %{ratio: decode_ratio(raw_ratio), start: basal_time(raw_time)}
decode_smaller_carb_ratio(rest, [carb_ratio | carb_ratios], count - 1)
end
defp decode_units(0x01), do: :grams
defp decode_units(0x02), do: :exchanges
end
|
lib/pummpcomm/session/exchange/read_carb_ratios.ex
| 0.876911 | 0.485661 |
read_carb_ratios.ex
|
starcoder
|
defmodule AstraeaVirgoWeb.LanguageView do
use AstraeaVirgoWeb, :view
@moduledoc """
Response for Language API
"""
@doc """
Response
## index.json
Response for index Langauges API: `GET /api/languages`
Response: list of Object
| field | type | required | descript |
|-----------------|----------|----------|------------------------------------------------|
| id | ID | yes | |
| name | string | yes | |
| extensions | string[] | yes | language extension list, example ["cpp", "cc"] |
| time_multiplier | double | yes | ratio of language time to topic requirements |
| mem_multiplier | double | yes | ratio of language memory to topic requirements |
Example:
```json
[
{
"id": "Cpp",
"name": "C++ 11 (GCC v4.9)",
"extensions": ["cpp", "cc"],
"time_multiplier": 1.0,
"mem_multiplier": 1.0
},
{
"id": "Python3",
"name": "Python 3 (v3.8)",
"extensions": ["py"],
"time_multiplier": 3.0,
"mem_multiplier": 3.0
}
]
```
## show.json
Response for show Language API:
- `GET /api/languages/<language_id>`
- `PUT /api/languages/<language_id>`
Response: Object
| field | type | required | descript |
|-----------------|----------|----------|------------------------------------------------|
| id | ID | yes | |
| name | string | yes | |
| extensions | string[] | yes | language extension list, example ["cpp", "cc"] |
| time_multiplier | double | yes | ratio of language time to topic requirements |
| mem_multiplier | double | yes | ratio of language memory to topic requirements |
Example:
```json
{
"id": "cpp",
"name": "C++ 11 (GCC v4.9)",
"extensions": ["cpp", "cc"],
"time_multiplier": 1.0,
"mem_multiplier": 1.0
}
```
## create.json
Response for create Language API: `POST /api/languages`
Response: Object
| field | type | required | null | descript |
|-------------|------|----------|------|---------------|
| language_id | ID | yes | no | 编程语言的 ID |
Example:
```json
{"language_id": "cpp"}
```
"""
def render("index.json", assigns), do: assigns.data
def render("show.json", assigns), do: assigns.data
def render("create.json", assigns) do
%{
language_id: assigns.language_id
}
end
end
|
lib/virgo_web/views/language_view.ex
| 0.871502 | 0.614727 |
language_view.ex
|
starcoder
|
defmodule EDS.Fixtures.FunctionCalls.SharedFunctions do
alias EDS.Fixtures.FunctionCalls
defmacro __using__(_opts) do
quote do
def public_call(), do: public_stub()
def private_call(), do: private_stub()
def public_stub(), do: true
def public_recursion(), do: public_recursive_stub(0)
def private_recursion(), do: private_recursive_stub(0)
def public_recursive_stub(2), do: true
def public_recursive_stub(call_count),
do: public_recursive_stub(call_count + 1)
def interpreted_call(),
do: FunctionCalls.Interpreted.public_stub()
def non_interpreted_call(),
do: FunctionCalls.NonInterpreted.public_stub()
def interpreted_reentry_call(),
do: FunctionCalls.Interpreted.reentry()
def non_interpreted_reentry_call(),
do: FunctionCalls.NonInterpreted.reentry()
def reentry(), do: FunctionCalls.public_stub()
defp private_stub(), do: true
defp private_recursive_stub(2), do: true
defp private_recursive_stub(call_count),
do: private_recursive_stub(call_count + 1)
end
end
end
defmodule EDS.Fixtures.FunctionCalls.NonInterpreted do
use EDS.Fixtures.FunctionCalls.SharedFunctions
end
defmodule EDS.Fixtures.FunctionCalls.Interpreted do
use EDS.Fixtures.FunctionCalls.SharedFunctions
end
defmodule EDS.Fixtures.FunctionCalls do
use EDS.Fixtures.FunctionCalls.SharedFunctions
alias __MODULE__.{
Interpreted,
NonInterpreted
}
def internal_public_call(), do: public_stub()
def internal_private_call(), do: private_stub()
def internal_public_recursion(), do: public_recursive_stub(0)
def internal_private_recursion(), do: private_recursive_stub(0)
def external_non_interpreted_public_call(),
do: NonInterpreted.public_call()
def external_non_interpreted_private_call(),
do: NonInterpreted.private_call()
def external_non_interpreted_public_recursion(),
do: NonInterpreted.public_recursion()
def external_non_interpreted_private_recursion(),
do: NonInterpreted.private_recursion()
def external_non_interpreted_to_interpreted_call(),
do: NonInterpreted.interpreted_call()
def external_non_interpreted_to_interpreted_reentry_call(),
do: NonInterpreted.non_interpreted_reentry_call()
def external_interpreted_public_call(),
do: Interpreted.public_call()
def external_interpreted_private_call(),
do: Interpreted.private_call()
def external_interpreted_public_recursion(),
do: Interpreted.public_recursion()
def external_interpreted_private_recursion(),
do: Interpreted.private_recursion()
def external_interpreted_to_non_interpreted_call(),
do: Interpreted.non_interpreted_call()
def external_interpreted_to_non_interpreted_reentry_call(),
do: Interpreted.non_interpreted_reentry_call()
end
|
test/fixtures/function_calls.ex
| 0.569374 | 0.403949 |
function_calls.ex
|
starcoder
|
defmodule HLL do
@moduledoc """
Default HyperLogLog module.
Note that this module is not Redis compatible. Use alternative `HLL.Redis` module
if you need to interact with Redis and need it to be Redis compatible.
This module uses `:erlang.phash2` as hash function.
## Example
iex> hll = HLL.new(14)
iex> hll = Enum.reduce(1..2000, hll, fn i, acc -> HLL.add(acc, i) end)
iex> HLL.cardinality(hll)
1998
## Serialization
It has two representations, sparse (space-efficient for low cardinality) and dense
(space-efficient for high cardinality). When encode HyperLogLog with `HLL.encode`,
this module would automatically choose the representation with smaller encoded size.
# sparse representation:
<<fc00:db20:35b:7399::5, (p - 8)::4, index0::p, count0::6, index1::p, count1::6 ..., padding::xx>>
# dense representation:
<<fd00:c2b6:b24b:be67:2827:688d:e6a1:6a3b, (p - 8)::4, count0::6, count1::6, count2::6 ...>>
"""
use Bitwise
alias HLL.Helper
@type t :: {__MODULE__, 8..16, map()}
# "New cardinality estimation algorithms for HyperLogLog sketches" paper
# suggests min precision to be 8. (page 8, formula 13)
@p_min 8
@p_max 16
@p_range @p_min..@p_max
@doc """
Create a HyperLogLog instance with specified precision in range from 8 to 16.
## Example
iex> HLL.new(12)
{HLL, 12, %{}}
iex> HLL.new(14)
{HLL, 14, %{}}
"""
@spec new(8..16) :: t()
def new(p) when p in @p_range do
{__MODULE__, p, %{}}
end
@doc """
Add a value to HyperLogLog instance.
## Example
iex> h = HLL.new(12)
{HLL, 12, %{}}
iex> HLL.add(h, "hello")
{HLL, 12, %{1581 => 2}}
"""
@spec add(t(), any()) :: t()
def add({__MODULE__, p, map} = hll, item) do
{index, count} = hash(p, item)
case map do
%{^index => value} when value >= count ->
hll
_ ->
{__MODULE__, p, Map.put(map, index, count)}
end
end
@doc """
Merge multiple HyperLogLog instances into one.
## Example
iex> h1 = HLL.new(12) |> HLL.add("foo")
iex> h2 = HLL.new(12) |> HLL.add("bar")
iex> h3 = HLL.new(12) |> HLL.add("foo") |> HLL.add("bar")
iex> h_merged = HLL.merge([h1, h2])
iex> h3 == h_merged
true
"""
@spec merge([t()]) :: t()
def merge([{_, p, _} | _] = list_of_hll) do
result =
list_of_hll
|> Enum.map(fn {__MODULE__, ^p, map} -> map end)
|> Helper.merge_hll_maps()
{__MODULE__, p, result}
end
@doc """
Estimate cardinality of HyperLogLog instance.
## Example
iex> h = HLL.new(14)
iex> HLL.cardinality(h)
0
iex> h = HLL.add(h, "foo")
iex> HLL.cardinality(h)
1
iex> h = HLL.add(h, "bar")
iex> HLL.cardinality(h)
2
"""
@spec cardinality(t()) :: non_neg_integer()
def cardinality({__MODULE__, p, map} = _hll) do
Helper.estimate_cardinality(p, map_size(map), Map.values(map))
end
@doc """
Encode HyperLogLog instance to HLL binary format.
## Example
iex> HLL.new(14) |> HLL.encode()
<<6>>
iex> HLL.new(14) |> HLL.add("foo") |> HLL.encode()
<<6, 9, 164, 16>>
iex> HLL.new(14) |> HLL.add("foo") |> HLL.add("bar") |> HLL.encode()
<<6, 9, 164, 16, 219, 129, 0>>
"""
@spec encode(t()) :: binary()
def encode(hll)
@doc """
Decode HLL binary format to HyperLogLog instance.
## Example
iex> h = HLL.new(14) |> HLL.add("foo")
{HLL, 14, %{617 => 1}}
iex> encoded = HLL.encode(h)
<<6, 9, 164, 16>>
iex> HLL.decode(encoded)
{HLL, 14, %{617 => 1}}
"""
@spec decode(binary()) :: t()
def decode(hll_binary)
# <<format::4, p_code::4, entries, padding>>
for p <- @p_range do
m = 1 <<< p
dense_size = m * 6
encode_sparse = String.to_atom("encode_sparse_p#{p}")
def encode({__MODULE__, unquote(p), map}) do
sparse_size = unquote(p + 6) * map_size(map)
if sparse_size < unquote(dense_size) do
# encode sparse
[<<0::4, unquote(p - @p_min)::4>> | unquote(encode_sparse)(Map.to_list(map), [])]
else
# encode dense
entries =
Enum.reduce(unquote(div(m, 8) - 1)..0, [], fn i, acc ->
index = i * 8
b0 = Map.get(map, index, 0)
b1 = Map.get(map, index + 1, 0)
b2 = Map.get(map, index + 2, 0)
b3 = Map.get(map, index + 3, 0)
b4 = Map.get(map, index + 4, 0)
b5 = Map.get(map, index + 5, 0)
b6 = Map.get(map, index + 6, 0)
b7 = Map.get(map, index + 7, 0)
[<<bfd00:c2b6:b24b:be67:2827:688d:e6a1:6a3b, b1::6, bfd00:c2b6:b24b:be67:2827:688d:e6a1:6a3b, b3::6, bfd00:a516:7c1b:17cd:6d81:2137:bd2a:2c5b, b5::6, b6::6, b7::6>> | acc]
end)
[<<fd00:c2b6:b24b:be67:2827:688d:e6a1:6a3b, unquote(p - @p_min)::4>>, entries]
end
|> IO.iodata_to_binary()
end
compute_sparse_padding_size = fn n ->
8 - rem(n * (p + 6), 8)
end
defp unquote(encode_sparse)(
[
{i1, c1},
{i2, c2},
{i3, c3},
{i4, c4},
{i5, c5},
{i6, c6},
{i7, c7},
{i8, c8} | rest
],
acc
) do
unquote(encode_sparse)(rest, [
<<i1::unquote(p), c1::6, i2::unquote(p), c2::6, i3::unquote(p), c3::6, i4::unquote(p),
c4::6, i5::unquote(p), c5::6, i6::unquote(p), c6::6, i7::unquote(p), c7::6,
i8::unquote(p), c8::6>>
| acc
])
end
defp unquote(encode_sparse)(
[{i1, c1}, {i2, c2}, {i3, c3}, {i4, c4}, {i5, c5}, {i6, c6}, {i7, c7}],
acc
) do
[
acc,
<<i1::unquote(p), c1::6, i2::unquote(p), c2::6, i3::unquote(p), c3::6, i4::unquote(p),
c4::6, i5::unquote(p), c5::6, i6::unquote(p), c6::6, i7::unquote(p), c7::6,
0::unquote(compute_sparse_padding_size.(7))>>
]
end
defp unquote(encode_sparse)([{i1, c1}, {i2, c2}, {i3, c3}, {i4, c4}, {i5, c5}, {i6, c6}], acc) do
[
acc,
<<i1::unquote(p), c1::6, i2::unquote(p), cfd00:c2b6:b24b:be67:2827:688d:e6a1:6a3b, i3::unquote(p), cfd00:a516:7c1b:17cd:6d81:2137:bd2a:2c5b, i4::unquote(p),
c4::6, i5::unquote(p), c5::6, i6::unquote(p), c6::6,
0::unquote(compute_sparse_padding_size.(6))>>
]
end
defp unquote(encode_sparse)([{i1, c1}, {i2, c2}, {i3, c3}, {i4, c4}, {i5, c5}], acc) do
[
acc,
<<i1::unquote(p), c1::6, i2::unquote(p), c2::6, i3::unquote(p), c3::6, i4::unquote(p),
c4::6, i5::unquote(p), c5::6, 0::unquote(compute_sparse_padding_size.(5))>>
]
end
defp unquote(encode_sparse)([{i1, c1}, {i2, c2}, {i3, c3}, {i4, c4}], acc) do
[
acc,
<<i1::unquote(p), cfd00:c2b6:b24b:be67:2827:688d:e6a1:6a3b, i2::unquote(p), c2::6, i3::unquote(p), c3::6, i4::unquote(p),
c4::6, 0::unquote(compute_sparse_padding_size.(4))>>
]
end
defp unquote(encode_sparse)([{i1, c1}, {i2, c2}, {i3, c3}], acc) do
[
acc,
<<i1::unquote(p), c1::6, i2::unquote(p), c2::6, i3::unquote(p), c3::6,
0::unquote(compute_sparse_padding_size.(3))>>
]
end
defp unquote(encode_sparse)([{i1, c1}, {i2, c2}], acc) do
[
acc,
<<i1::unquote(p), c1::6, i2::unquote(p), c2::6,
0::unquote(compute_sparse_padding_size.(2))>>
]
end
defp unquote(encode_sparse)([{i1, c1}], acc) do
[acc, <<i1::unquote(p), c1::6, 0::unquote(compute_sparse_padding_size.(1))>>]
end
defp unquote(encode_sparse)([], acc) do
acc
end
decode_sparse = String.to_atom("decode_sparse_p#{p}")
def decode(<<0::4, unquote(p - @p_min)::4, rest::bits>>) do
map = unquote(decode_sparse)(rest, []) |> Map.new()
{__MODULE__, unquote(p), map}
end
defp unquote(decode_sparse)(<<index::unquote(p), count::6, rest::bits>>, acc) do
unquote(decode_sparse)(rest, [{index, count} | acc])
end
defp unquote(decode_sparse)(<<_padding::bits>>, acc) do
acc
end
decode_dense = String.to_atom("decode_dense_p#{p}")
def decode(<<1::4, unquote(p - @p_min)::4, rest::bits>>) do
map = unquote(decode_dense)(rest, 0, []) |> Map.new()
{__MODULE__, unquote(p), map}
end
defp unquote(decode_dense)(<<0::6, rest::bits>>, index, acc) do
unquote(decode_dense)(rest, index + 1, acc)
end
defp unquote(decode_dense)(<<value::6, rest::bits>>, index, acc) do
unquote(decode_dense)(rest, index + 1, [{index, value} | acc])
end
defp unquote(decode_dense)(<<>>, unquote(1 <<< p), acc) do
acc
end
end
@range_32 1 <<< 32
defp hash(p, item) do
<<index::size(p), rest::bits>> = <<:erlang.phash2(item, @range_32)::32>>
count_zeros(rest, index, 1, item)
end
defp count_zeros(<<1::1, _::bits>>, index, count, _item) do
{index, count}
end
defp count_zeros(<<0::1, rest::bits>>, index, count, item) do
count_zeros(rest, index, count + 1, item)
end
defp count_zeros(<<>>, index, count, item) do
count_zeros2(<<:erlang.phash2([item], @range_32)::32>>, index, count)
end
defp count_zeros2(<<fc00:db20:35b:7399::5, _::bits>>, index, count) do
{index, count}
end
defp count_zeros2(<<0::1, rest::bits>>, index, count) do
count_zeros2(rest, index, count + 1)
end
defp count_zeros2(<<>>, index, count) do
{index, count}
end
end
|
lib/hll.ex
| 0.895626 | 0.442034 |
hll.ex
|
starcoder
|
defmodule Adventofcode.Day01BlocksAway do
def blocks_away(instructions),
do: instructions |> decode |> get_end_pos |> get_distance
def first_revisit(instructions),
do: instructions |> decode |> get_first_revisit_pos |> get_distance
def get_distance({x, y}), do: abs(x) + abs(y)
def get_end_pos(
instructions,
direction \\ :north,
positions \\ [{0, 0}])
def get_end_pos([], _dir, [{x, y} | _tail]), do: {x, y}
def get_end_pos([{turn, steps} | tail], dir, positions),
do: get_end_pos([{steps} | tail], update_direction(turn, dir), positions)
def get_end_pos([{0} | tail], dir, positions),
do: get_end_pos(tail, dir, positions)
def get_end_pos([{steps} | tail], dir, positions),
do: get_end_pos([{steps - 1} | tail], dir, positions |> move(dir))
def get_first_revisit_pos(
instructions,
direction \\ :north,
positions \\ [{0, 0}])
def get_first_revisit_pos([], _dir, [{x, y} | _tail]), do: {x, y}
def get_first_revisit_pos([{turn, steps} | tail], dir, positions),
do: get_first_revisit_pos([{steps} | tail], update_direction(turn, dir), positions)
def get_first_revisit_pos([{0} | tail], dir, [last | rest] = positions) do
if rest |> Enum.member?(last) do
get_first_revisit_pos([], dir, positions)
else
get_first_revisit_pos(tail, dir, positions)
end
end
def get_first_revisit_pos([{steps} | tail], dir, [last | rest] = positions) do
if rest |> Enum.member?(last) do
get_first_revisit_pos([], dir, positions)
else
get_first_revisit_pos([{steps - 1} | tail], dir, positions |> move(dir))
end
end
def move([{x, y} | tail], :north), do: [{x, y - 1}, {x, y} | tail]
def move([{x, y} | tail], :east), do: [{x + 1, y}, {x, y} | tail]
def move([{x, y} | tail], :south), do: [{x, y + 1}, {x, y} | tail]
def move([{x, y} | tail], :west), do: [{x - 1, y}, {x, y} | tail]
def update_direction(:R, :north), do: :east
def update_direction(:R, :east), do: :south
def update_direction(:R, :south), do: :west
def update_direction(:R, :west), do: :north
def update_direction(:L, :north), do: :west
def update_direction(:L, :east), do: :north
def update_direction(:L, :south), do: :east
def update_direction(:L, :west), do: :south
def decode(instructions) when is_binary(instructions),
do: do_decode(instructions |> String.split(", "))
def decode(instructions) when is_list(instructions),
do: do_decode(instructions)
defp do_decode(instructions, result \\ [])
defp do_decode([], result), do: result |> Enum.reverse
defp do_decode(["R" <> num | tail], result),
do: do_decode(tail, [{:R, num |> String.to_integer } | result])
defp do_decode(["L" <> num | tail], result),
do: do_decode(tail, [{:L, num |> String.to_integer } | result])
end
|
lib/day_01_blocks_away.ex
| 0.600891 | 0.770939 |
day_01_blocks_away.ex
|
starcoder
|
defmodule Mola do
@moduledoc """
Compare various poker hand strengths
No validation is done on the "sanity" of any combination of cards.
Card rank should be "2", "3", "4", "5", "6", "7", "8", "9", "T", "J", "Q", "K", "A"
Suits should be "c", "d", "h", "s"
Cards should be provided as a string:
- "Ac Kc Qc Jc Tc"
- "AcKcQcJcTc"
- "🃑🃞🃝🃛🃚"
or a list of tuples: [{"A", "c"}, {"K", "c"}, {"Q", "c"}, {"J", "c"}, {"T", "c"}]
Hands which cannot be evaluated are silently stripped from the results
As long as the cards to hand selection rules are the same, the evaluators should work for
less popular variants.
As such, wider boards (with 6 community cards), Pineapple-style (3 personal card
hold 'em), 8-card stud, and Big-O-ish high (5 personal card Omaha) are all supported.
Community card (board) games can even vary both.
Please note that compilation can be very slow while providing very fast hand evaluation.
"""
@doc """
Compare 5 card high poker hands
Selects best 5 cards for each player and then orders players
Supply:
- a list of {description, cards} tuples for comparison
- a list of community cards, if applicable.
- an options keyword list:
- hand_selection: (:any, :omaha), defaults to :any
- deck: (:standard, :short), defaults to :standard
Returns a sorted list of tuples: [{description, rank, :hand_descriptor}]
## Examples
iex> Mola.ranked_high_hands([{"P1", "2c 3c 4c 5c 7s"}, {"P2", "2s 3s 4s 5s 6c"}, {"P3", "Ac As 7h 7c Kc"}])
[ {"P2", 1608, :six_high_straight}, {"P3", 2534, :aces_and_sevens}, {"P1", 7462, :seven_high} ]
iex> Mola.ranked_high_hands([{"P1", "2c 3c 4c 5c 7s 5d"}, {"P2", "2s 3s 4s 5s 6c Ks"}, {"P3", "Ac As 7h 7c Kc 7d"}])
[ {"P3", 251, :sevens_full_over_aces}, {"P2", 1144, :king_high_flush}, {"P1", 5519, :pair_of_fives} ]
iex> Mola.ranked_high_hands([{"BB", "🃔🃕"}, {"UTG", "AdAh"}, {"CO", "3d 3s"}], "Ac 2c 3h Td 3c")
[ {"BB", 10, :five_high_straight_flush}, {"CO", 143, :four_treys}, {"UTG", 177, :aces_full_over_treys} ]
iex> Mola.ranked_high_hands([{"BB", "4c 5d As Tc"}, {"UTG", "Ad Ah Th Ts"}, {"CO", "9c 3s Jc 8d"}], "Ac 2c Td Jd 3c", hand_selection: :omaha)
[ {"CO", 655, :ace_high_flush}, {"BB", 746, :ace_high_flush}, {"UTG", 1631, :three_aces} ]
iex> Mola.ranked_high_hands([{"BB", "7c 9c"}, {"UTG", "🃁🂱"}, {"CO", "8d 8s"}], "Ac 6c 8h Td 8c", deck: :short)
[ {"BB", 6, :nine_high_straight_flush}, {"CO", 55, :four_eights}, {"UTG", 204, :aces_full_over_eights} ]
"""
def ranked_high_hands(hands, community \\ [], opts \\ [])
def ranked_high_hands(hands, community, opts) do
{select, deck, _, _} = parse_opts(opts)
do_ranking(hands, community, select, deck)
end
defp parse_opts(opts) do
{hs, d, tbd} =
{Keyword.get(opts, :hand_selection, :any), Keyword.get(opts, :deck, :standard),
Keyword.get(opts, :deal, community: 0, personal: 0)}
{c, p} = {Keyword.get(tbd, :community, 0), Keyword.get(tbd, :personal, 0)}
{hs, d, c, p}
end
@doc """
Enumerates possible wins going word and returns a winner percentage for each supplied hand
Supply `community` for board games and `seen` for any additional exposed cards
This does not enforce any rules on board or hand size.
Options are as per `ranked_high_hands` with an additional keyword list.
Defaults to:
- deal: [community: 0, personal: 0]
Note that dealing additional personal cards is not yet implemented.
## Examples
iex> Mola.equity([{"BB", "Ah Kh"}, {"CO", "Jd Td"}], "Ad Kd Ts", [], deal: [community: 2])
[{"BB", 51.92}, {"CO", 47.17}, {"BB=CO", 0.91}]
"""
def equity(hands, community \\ [], seen \\ [], opts \\ [])
def equity(hands, community, seen, opts) do
{_, deck, _, tbdp} = parsed = parse_opts(opts)
nhands = Enum.map(hands, &normalize_hand/1)
ncomm = normalize_hand(community)
nseen = normalize_hand(seen)
remain =
[ncomm, nseen | nhands]
|> Enum.reduce(Mola.Poker5High.full_deck(deck), fn {_, c}, d -> d -- c end)
case tbdp do
0 -> board_winners(nhands, ncomm, remain, parsed)
_ -> :unimplemented
end
end
defp board_winners(hands, ncomm, remain, {selection, deck, tbdc, _}) do
{cd, common} = ncomm
tbdc
|> comb(remain)
|> Flow.from_enumerable()
|> Flow.map(fn dealt -> hands |> do_ranking({cd, common ++ dealt}, selection, deck) end)
|> Enum.to_list()
|> tabulate_results
end
defp tabulate_results(winners, acc \\ %{})
defp tabulate_results([], acc) do
ways = acc |> Map.values() |> Enum.sum()
acc
|> Enum.reduce([], fn {k, v}, a -> [{k, Float.round(100 * v / ways, 2)} | a] end)
|> Enum.sort_by(&elem(&1, 1), &>=/2)
end
defp tabulate_results([[] | t], acc), do: tabulate_results(t, acc)
defp tabulate_results([h | t], acc) do
{_, top_score, _} = h |> List.first()
winner_key =
h
|> Enum.filter(fn {_, s, _} -> s == top_score end)
|> Enum.map(fn {d, _, _} -> d end)
|> Enum.join("=")
tabulate_results(t, Map.update(acc, winner_key, 1, fn s -> s + 1 end))
end
defp do_ranking(hands, [], _, deck) do
hands
|> Enum.map(&normalize_hand/1)
|> Enum.map(fn h -> best5ofpile(h, deck) end)
|> Enum.reject(fn {_, _, hd} -> hd == :error end)
|> Enum.sort_by(&elem(&1, 1))
end
defp do_ranking(hands, community, :any, deck) do
{_, common} = normalize_hand(community)
hands
|> Enum.map(fn h ->
{desc, cards} = normalize_hand(h)
{desc, cards ++ common}
end)
|> do_ranking([], :any, deck)
end
defp do_ranking(hands, community, :omaha, deck) do
{_, common} = normalize_hand(community)
common_poss = comb(3, common)
hands
|> Enum.map(fn h ->
{desc, cards} = normalize_hand(h)
[best | _] =
common_poss
|> build_full(comb(2, cards))
|> Enum.map(fn p -> {desc, p} end)
|> do_ranking([], :omaha, deck)
best
end)
|> Enum.sort_by(&elem(&1, 1))
end
defp best5ofpile({desc, pile}, which) do
res =
comb(5, pile)
|> Enum.map(fn h -> Mola.Poker5High.rank_tuple({desc, h}, which) end)
|> Enum.reject(fn h -> h == :error end)
|> Enum.sort_by(&elem(&1, 1))
case res do
[best | _] -> best
[] -> {desc, 1_000_000, :error}
end
end
defp normalize_hand(full) when not is_tuple(full), do: normalize_hand({"placeholder", full})
defp normalize_hand({_, hand} = full) when is_list(hand), do: full
defp normalize_hand({desc, hand}) when is_binary(hand) do
{desc, read_cards(String.graphemes(hand), [])}
end
defp read_cards(cards, acc)
defp read_cards([], acc), do: Enum.reverse(acc)
defp read_cards([" " | t], acc), do: read_cards(t, acc)
defp read_cards([c | t], acc) when byte_size(c) > 1,
do: read_cards(t, [Mola.Unicard.tomola(c) | acc])
defp read_cards([r | t], acc) do
[s | rest] = t
read_cards(rest, [{r, s} | acc])
end
defp comb(0, _), do: [[]]
defp comb(_, []), do: []
defp comb(m, [h | t]) do
for(l <- comb(m - 1, t), do: [h | l]) ++ comb(m, t)
end
defp build_full(first, second, acc \\ [])
defp build_full([], _, acc), do: acc
defp build_full(_, [], acc), do: acc
defp build_full([h | t], all, acc), do: build_full(t, all, acc ++ build_item(all, h, []))
defp build_item([], _, acc), do: acc
defp build_item(_, [], acc), do: acc
defp build_item([h | t], i, acc), do: build_item(t, i, acc ++ [h ++ i])
end
|
lib/mola.ex
| 0.736401 | 0.630514 |
mola.ex
|
starcoder
|
defmodule Ace.HTTP.Channel do
@moduledoc """
Reference to a single HTTP request/response exchange, within the context of a connection.
- With HTTP/2 a channel corresponds to a single stream.
- With HTTP/1.1 pipelining a single connection can support multiple channels.
The channel struct provides a normalised structure regardless of which version of the protocol is used.
A channel struct also contains all information about the connection.
- TODO consider calling this exchange instead of channel.
- TODO add functions like `cleartext?` `http_version?` `transport_version` that pull information from socket.
"""
@type t :: %__MODULE__{
endpoint: pid,
id: integer,
socket: Ace.Socket.t()
}
@enforce_keys [
:endpoint,
:id,
:socket
]
defstruct @enforce_keys
@doc """
Monitor the process managing the local endpoint of the connection containing a channel.
"""
@spec monitor_endpoint(t()) :: reference()
def monitor_endpoint(%__MODULE__{endpoint: endpoint}) do
Process.monitor(endpoint)
end
@doc """
Send a list of message parts over a HTTP channel.
"""
@spec send(t(), [Raxx.part()]) :: {:ok, t()} | {:error, :connection_closed}
def send(channel, parts)
def send(channel, []) do
{:ok, channel}
end
def send(channel = %__MODULE__{}, parts) do
GenServer.call(channel.endpoint, {:send, channel, parts})
catch
# NOTE `GenServer.call` exits if the target process has already exited.
# A connection closing will also stop workers (this process).
# However this case can still occur due to race conditions.
:exit, {:noproc, _} ->
{:error, :connection_closed}
# A timeout will occur when the endpoint process has become inactive.
# The worker assumes the connection has closed because in case of an inactive endpoint.
# If the endpoint process returns to normal working
# it will have a DOWN message from the worker and handle handle that appropriatly
:exit, {:timeout, _} ->
{:error, :connection_closed}
end
@doc """
Send an acknowledgement that the `Ace.HTTP.Worker` has received a request part
"""
@spec ack(t()) :: :ok
def ack(channel = %__MODULE__{}) do
Kernel.send(channel.endpoint, :ack)
:ok
end
end
|
lib/ace/http/channel.ex
| 0.732496 | 0.434821 |
channel.ex
|
starcoder
|
defmodule ActivestorageEx.Variant do
@moduledoc """
Image blobs can have variants that are the result of a set of transformations
applied to the original. These variants are used to create thumbnails,
fixed-size avatars, or any other derivative image from the original.
Variants rely on ImageMagick for the actual transformations.
## Examples
Variants are a struct with the following fields:
```
%Variant{
key: String,
content_type: String,
filename: String,
transformations: [Map]
}
```
"""
@enforce_keys [:key, :content_type, :filename, :transformations]
defstruct key: nil,
content_type: nil,
filename: nil,
transformations: nil
@web_image_content_types ["image/png", "image/jpeg", "image/jpg", "image/gif"]
alias ActivestorageEx.Blob
alias ActivestorageEx.Variant
@doc """
Returns an identifying key for a given `%Blob{}` and set of transformations
## Parameters
- `blob`: A `%Blob{}` representing a root image. Presumably from the database
- `transformations`: An ordered list of maps that represent valid ImageMagick commands
## Examples
Generating a key from a blob and list of transformations
```
blob = %Blob{}
transformations = [%{resize: "50x50^"}, %{extent: "50x50"}]
Variant.key(blob, transformations) # variant/blob_key/variant_key
```
"""
def key(%Blob{} = blob, transformations) do
variant_key = ActivestorageEx.Variation.key(transformations)
hashed_variant_key = :crypto.hash(:sha256, variant_key) |> Base.encode16(case: :lower)
"variants/#{blob.key}/#{hashed_variant_key}"
end
@doc """
Returns an identifying key for a given `%Variant{}`.
Delegates to `Variant.key(%Blob{}, transformations)`,
handling the transformations automatically
## Parameters
- `variant`: A `%Variant{}` created from a blob and list of transformations
## Examples
Generating a key automatically from a variant
```
variant = %Variant{}
Variant.key(variant) # variant/blob_key/variant_key
```
"""
def key(%Variant{} = variant) do
blob = struct(Blob, Map.from_struct(variant))
key(blob, variant.transformations)
end
@doc """
Returns a variant matching `blob` and `transformations`
or creates one if it doesn't exist
## Parameters
- `blob`: A `%Blob{}` representing a root image. Presumably from the database
- `transformations`: An ordered list of maps that represent valid ImageMagick commands
## Examples
Retrieve a variant from a blob and list of transformations
```
blob = %Blob{}
transformations = [%{resize: "50x50^"}, %{extent: "50x50"}]
Variant.processed(blob, transformations) # %Variant{}
```
"""
def processed(%Blob{} = blob, transformations) do
variant = struct(Variant, Map.put(Map.from_struct(blob), :transformations, transformations))
case processed?(variant) do
true -> variant
_ -> process(variant)
end
end
@doc """
Returns a URL with the information required to represent a variant,
taking the current file service into account
## Parameters
- `blob`: A `%Blob{}` representing a root image. Presumably from the database
- `transformations`: An ordered list of maps that represent valid ImageMagick commands
## Examples
Retrieve a service URL from a blob and list of transformations
```
blob = %Blob{}
transformations = [%{resize: "50x50^"}, %{extent: "50x50"}]
Variant.service_url(blob, transformations) # /active_storage/...
```
"""
def service_url(%Blob{} = blob, transformations) do
key(blob, transformations)
|> ActivestorageEx.service().url(%{
content_type: content_type(blob),
filename: filename(blob),
token_duration: Application.get_env(:activestorage_ex, :link_expiration)
})
end
@doc """
Returns a URL with the information required to represent a variant,
taking the current file service into account.
Delgates to `Variant.service_url(%Blob{}, transformations)`
## Parameters
- `variant`: A `%Variant{}` created from a blob and list of transformations
## Examples
Retrieve a service URL from a variant directly
```
variant = %Variant{}
Variant.service_url(variant) # /active_storage/...
```
"""
def service_url(%Variant{} = variant) do
blob = struct(Blob, Map.from_struct(variant))
service_url(blob, variant.transformations)
end
defp content_type(%Blob{} = blob) do
cond do
invalid_image_content_type(blob) -> "image/png"
true -> blob.content_type
end
end
defp filename(%Blob{} = blob) do
cond do
invalid_image_content_type(blob) -> Path.basename(blob.filename) <> ".png"
true -> blob.filename
end
end
defp processed?(%Variant{} = variant) do
key(variant) |> ActivestorageEx.service().exist?()
end
defp process(%Variant{} = variant) do
key = variant.key
filepath = key <> Path.extname(variant.filename)
image = %Mogrify.Image{path: filepath} |> Mogrify.create()
tempfile_location = image.path
with {:ok, _} <- download_image(key, tempfile_location) do
image
|> transform(variant)
|> format(variant)
|> upload(variant)
end
remove_temp_file(tempfile_location)
variant
end
defp download_image(key, filepath) do
ActivestorageEx.service().stream_download(key, filepath)
end
defp transform(image, variant) do
ActivestorageEx.Variation.transform(variant.transformations, image.path)
end
defp format(image, variant) do
cond do
invalid_image_content_type(variant) -> image |> Mogrify.format("png")
true -> image
end
end
defp upload(image, variant) do
ActivestorageEx.service().upload(image, key(variant))
end
defp remove_temp_file(filepath) do
File.rm(filepath)
end
defp invalid_image_content_type(variant) do
!Enum.member?(@web_image_content_types, variant.content_type)
end
end
|
lib/variant.ex
| 0.932176 | 0.9463 |
variant.ex
|
starcoder
|
defmodule Hypex.Register do
@moduledoc """
This module defines the behaviour required by all internal Hypex register
structures.
Assuming all of the function callbacks defined in this module are implemented
correctly, it should be possible to use as an implementation inside a Hypex.
This makes it possible to provide custom implementations of the underlying
register without having to modify the actual source of Hypex.
"""
@doc """
Invoked to initialize a set of registers.
`width` is the desired width of the registers and should be used to determine
how large the register set should be. Calls to `init/1` should always return
a new register set.
"""
@callback init(width :: number) :: register :: Register.t
@doc """
Invoked after operating on registers on a bit level.
This function will receive a list of bits as created by the `to_list/1` callback.
The result of calling this should return a register set in the same form as when
first being initialized.
"""
@callback from_list([ bit :: number ]) :: register :: Register.t
@doc """
Invoked when operating on registers on a bit level.
This function should operate in tandem with `from_list/1` to convert between
a register set and a list of bits.
"""
@callback to_list(register :: Register.t) :: [ bit :: number ]
@doc """
Invoked to retrieve a specific bit register.
`idx` refers to the head of the hashes value, and `width` refers to the width
of the register. The `get_value/3` callback should use these values when finding
the required register.
"""
@callback get_value(register :: Register.t, idx :: number, width :: number) :: result :: number
@doc """
Invoked to set a bit register with a given value.
Similar to the `get_value/3` callback, we supply `idx` and `width` to allow the
callback to determine where the value should be written.
"""
@callback set_value(register :: Register.t, idx :: number, width :: number, value :: number) :: register :: Register.t
@doc """
Invoked when there's a need to iterate/accumulate a register.
"""
@callback reduce(register :: Register.t, width :: number, acc :: any, (number, any -> any)) :: acc :: any
@typedoc """
Register implementations currently available
"""
@opaque t :: :array.array(number) | bitstring
end
|
lib/hypex/register.ex
| 0.847353 | 0.628094 |
register.ex
|
starcoder
|
defmodule Ports.Rumble.Board.Common do
alias Harbor.Utils.SimpleId
alias Ports.Rumble.Tile
alias Ports.Rumble.Group
@tile_width 100
def tile_width(), do: @tile_width
@tile_height 130
def tile_height(), do: @tile_height
@spec can_snap_to(Tile.t(), Tile.t(), 0 | 1, any()) :: boolean()
def can_snap_to(tile, snap_to, snap_side, state) do
tiles = state.milestone.tiles
groups = state.milestone.groups
group = Map.get(groups, snap_to.group_id)
case group.group_type do
:set ->
colors =
Enum.reduce(group.children, [], fn x, acc ->
tile = Map.get(tiles, x)
[tile.data.color | acc]
end)
tile.data.value == snap_to.data.value and not Enum.member?(colors, tile.data.color)
:run ->
if tile.data.color == snap_to.data.color do
if snap_side == 0,
do: tile.data.value == snap_to.data.value - 1,
else: tile.data.value == snap_to.data.value + 1
else
false
end
end
end
@spec can_create_group(Tile.t(), Tile.t()) :: {Group.group_type(), boolean()}
def can_create_group(tile, snap_to) do
if tile.data.color == snap_to.data.color do
# Only snap if the tiles are consecutive values
{:run, abs(tile.data.value - snap_to.data.value) == 1}
else
# They must be the same exact number
{:set, tile.data.value == snap_to.data.value}
end
end
def create_group(id, children, type) do
%Group{
id: id,
children: children,
group_type: type
}
end
def set_iter_tiles(tiles, func) do
Enum.reduce(Map.values(tiles), %{}, fn x, acc ->
new = func.(x)
Map.put(acc, x.id, new)
end)
end
def get_group_influencer(group) do
count = Enum.count(group.children)
case rem(Enum.count(group.children), 2) do
0 -> {:even, Enum.at(group.children, trunc(count / 2 - 1))}
_ -> {:odd, Enum.at(group.children, trunc((count + 1) / 2 - 1))}
end
end
def get_group_center(group, tiles) do
case get_group_influencer(group) do
{:even, id} ->
tile = Map.get(tiles, id)
{tile.x + @tile_width, tile.y + @tile_height}
{:odd, id} ->
tile = Map.get(tiles, id)
{tile.x + @tile_width / 2, tile.y + @tile_height}
end
end
end
|
harbor/lib/ports/rumble/board/common.ex
| 0.721645 | 0.427636 |
common.ex
|
starcoder
|
defmodule Dynamo.Loader do
@moduledoc """
This module is responsible for managing code reloading used
in development environments in Dynamo.
The reloader is enabled per Elixir process, so each process
which requires reloading semantics must explicitly enabled
it with `Dynamo.Loader.enable`.
The `Dynamo.Loader` is part of the Dynamo OTP application
and one is started per node, regardless the number of
dynamos in the node.
"""
use GenServer.Behaviour
@doc """
Enables the reloader in the given process and returns
`:ok`. In case the reloader server is disabled, it works
as noop and returns `:error`.
"""
def enable do
if Process.whereis(__MODULE__) do
Process.put(:elixir_ensure_compiled, true)
Process.flag(:error_handler, Dynamo.Loader.ErrorHandler)
:ok
else
:error
end
end
@doc """
Starts the `Dynamo.Loader` server. Usually called
internally by Dynamo. The given `paths` must be expanded.
"""
def append_paths(paths) do
unless Process.whereis(__MODULE__) do
{ :module, _ } = Code.ensure_loaded(Dynamo.Loader.ErrorHandler)
Dynamo.Supervisor.start_child(Dynamo.Supervisor, __MODULE__, [])
end
:gen_server.cast(__MODULE__, { :append_paths, paths })
end
@doc """
Register a callback that is invoked every time modules are purged.
"""
def on_purge(fun) when is_function(fun) do
:gen_server.cast(__MODULE__, { :on_purge, fun })
end
@doc """
Tries to load the missing module. It returns `:ok` if a file
for the given module could be found and `:error` otherwise.
Note it does not actually ensure the module was loaded (only
that the related file was required).
"""
def load_missing(module) do
case atom_to_binary(module) do
"Elixir." <> _ ->
path = Mix.Utils.underscore(module) <> ".ex"
dirs = :gen_server.call(__MODULE__, :paths)
dir = Enum.find dirs, fn(dir) -> File.regular?(Path.join(dir, path)) end
if dir do
file = Path.join(dir, path)
tuples =
try do
Code.require_file(file) || []
catch
kind, reason ->
stacktrace = System.stacktrace
Code.unload_files [file]
:erlang.raise(kind, reason, stacktrace)
end
modules = for { mod, _ } <- tuples, do: mod
:gen_server.cast(__MODULE__, { :loaded, file, modules })
:ok
else
:notfound
end
_ ->
:notfound
end
end
@doc """
Checks if any of the `.ex` files in the registered paths
were updated and if so, purges all automatically compiled
and loaded modules, and "unrequire" the relevant files.
"""
def conditional_purge do
case :gen_server.call(__MODULE__, :conditional_purge) do
:ok -> :ok
{ :purged, callbacks } ->
for callback <- callbacks, do: callback.()
:purged
end
end
## Backend
defmodule Config do
defstruct [loaded_modules: [], loaded_files: [], paths: nil, updated_at: { { 1970, 1, 1 }, { 0, 0, 0 } }, on_purge: []]
end
@doc false
def start_link do
:gen_server.start({ :local, __MODULE__ }, __MODULE__, [], [])
end
@doc false
def stop do
:gen_server.call(__MODULE__, :stop)
end
@doc false
def init(paths) do
{ :ok, %Config{paths: paths} }
end
@doc false
def handle_call(:paths, _from, %Config{paths: paths} = config) do
{ :reply, paths, config }
end
def handle_call(:conditional_purge, _from, %Config{paths: paths, updated_at: updated_at} = config) do
last_modified = last_modified(paths, updated_at)
if last_modified == updated_at do
{ :reply, :ok, config }
else
purge_all(config)
unload_all(config)
{ :reply, { :purged, Enum.reverse(config.on_purge) },
%Config{config | loaded_modules: [], loaded_files: [], updated_at: last_modified} }
end
end
def handle_call(:stop, _from, config) do
{ :stop, :normal, :ok, config }
end
def handle_call(arg, from, config) do
super(arg, from, config)
end
@doc false
def handle_cast({ :loaded, file, modules }, %Config{loaded_modules: loaded_modules, loaded_files: loaded_files} = config) do
{ :noreply, %Config{config | loaded_modules: modules ++ loaded_modules, loaded_files: [file|loaded_files]} }
end
def handle_cast({ :on_purge, fun }, %Config{on_purge: on_purge} = config) do
{ :noreply, %Config{config | on_purge: [fun|on_purge] } }
end
def handle_cast({ :append_paths, paths }, %Config{} = config) do
updated_at = last_modified(paths, config.updated_at)
{ :noreply, %Config{config | paths: config.paths ++ paths, updated_at: updated_at} }
end
def handle_cast(arg, config) do
super(arg, config)
end
## Helpers
defp purge_all(config) do
Enum.each config.loaded_modules, fn(mod) ->
:code.purge(mod)
:code.delete(mod)
end
end
defp unload_all(config) do
Code.unload_files config.loaded_files
end
defp last_modified(paths, updated_at) do
Enum.reduce paths, updated_at, fn(path, acc) ->
Enum.reduce Path.wildcard("#{path}/**/*.ex"), acc, &max_last_modified(&1, &2)
end
end
defp max_last_modified(path, latest) do
case File.stat(path) do
{ :ok, %File.Stat{mtime: mtime} } -> max(latest, mtime)
{ :error, _ } -> latest
end
end
end
|
lib/dynamo/loader.ex
| 0.783864 | 0.424859 |
loader.ex
|
starcoder
|
defmodule AdventOfCode2019.SpacePolice do
@moduledoc """
Day 11 — https://adventofcode.com/2019/day/11
"""
@spec part1(Enumerable.t()) :: integer
def part1(in_stream) do
in_stream
|> paint(0)
|> map_size()
end
@spec part2(Enumerable.t()) :: String.t()
def part2(in_stream) do
in_stream
|> paint(1)
|> draw()
|> String.trim_trailing()
end
@spec paint(Enumerable.t(), integer) :: map
def paint(in_stream, start) do
in_stream
|> Stream.map(&AdventOfCode2019.IntcodeComputer.load_program/1)
|> Enum.take(1)
|> List.first()
|> AdventOfCode2019.PaintingRobot.paint(start)
end
@spec draw(map) :: String.t()
defp draw(hull) do
{{min_x, max_x}, {min_y, max_y}} =
Map.keys(hull)
|> Enum.reduce({{0, 0}, {0, 0}}, &find_limits/2)
draw(min_x, max_y, {{min_x, max_x}, {min_y - 1, max_y}}, hull, "")
end
@type pos :: {integer, integer}
@spec find_limits(pos, {pos, pos}) :: {pos, pos}
defp find_limits({x, y}, {{min_x, max_x}, {min_y, max_y}}),
do: {{min(min_x, x), max(max_x, x)}, {min(min_y, y), max(max_y, y)}}
@spec draw(integer, integer, {pos, pos}, map, String.t()) :: String.t()
defp draw(_x, min_y, {_limits_x, {min_y, _max_y}}, _hull, display), do: display
defp draw(max_x, y, {{min_x, max_x}, _limits_y} = limits, hull, display) do
panel = draw_panel(Map.get(hull, {max_x, y}, 0))
draw(min_x, y - 1, limits, hull, "#{display}#{panel}\n")
end
defp draw(x, y, limits, hull, display) do
panel = draw_panel(Map.get(hull, {x, y}, 0))
draw(x + 1, y, limits, hull, "#{display}#{panel}")
end
@spec draw_panel(integer) :: String.t()
defp draw_panel(0), do: "░░"
defp draw_panel(1), do: "▓▓"
end
defmodule AdventOfCode2019.PaintingRobot do
@moduledoc """
Day 11 — Painting robot — https://adventofcode.com/2019/day/11
"""
require AdventOfCode2019.IntcodeComputer
@spec paint(map, integer) :: map
def paint(program, start),
do: paint(:noop, {program, 0, 0}, start, start, {0, 0}, :up, %{}, :paint)
@type pos :: {integer, integer}
@type state :: {map, integer, integer}
@spec paint(atom, state, integer, integer, pos, atom, map, atom) :: map
defp paint(:done, _state, _input, _output, _pos, _dir, hull, _action), do: hull
defp paint(:output, state, input, output, pos, dir, hull, :paint) do
hull = Map.put(hull, pos, output)
{result, state, output} = AdventOfCode2019.IntcodeComputer.step(state, [input])
paint(result, state, input, output, pos, dir, hull, :move)
end
defp paint(:output, state, _input, output, pos, dir, hull, :move) do
{pos, dir} = move(output, pos, dir)
input = Map.get(hull, pos, 0)
{result, state, output} = AdventOfCode2019.IntcodeComputer.step(state, [input])
paint(result, state, input, output, pos, dir, hull, :paint)
end
defp paint(_result, state, input, _output, pos, dir, hull, action) do
{result, state, output} = AdventOfCode2019.IntcodeComputer.step(state, [input])
paint(result, state, input, output, pos, dir, hull, action)
end
@spec move(integer, pos, atom) :: {pos, atom}
defp move(0, {x, y}, :up), do: {{x - 1, y}, :left}
defp move(0, {x, y}, :left), do: {{x, y - 1}, :down}
defp move(0, {x, y}, :down), do: {{x + 1, y}, :right}
defp move(0, {x, y}, :right), do: {{x, y + 1}, :up}
defp move(1, {x, y}, :up), do: {{x + 1, y}, :right}
defp move(1, {x, y}, :right), do: {{x, y - 1}, :down}
defp move(1, {x, y}, :down), do: {{x - 1, y}, :left}
defp move(1, {x, y}, :left), do: {{x, y + 1}, :up}
end
|
lib/advent_of_code_2019/day11.ex
| 0.772574 | 0.539105 |
day11.ex
|
starcoder
|
defmodule Figlet.Linebreaker do
@moduledoc """
This module contains the logic that determines where to put linebreaks when
converting a charlist to a specific FIGlet font representation.
It relies on lists of integer codepoints (a.k.a. charlists) instead of utf8
encoded string binaries (a.k.a. strings) to bypass any encoding related
confusion.
"""
alias Figlet.Font
# # these characters may be _replaced_ by a line break
# @vanishing_breakables [?\s]
# # a line break may follow these characters, but they are not replaced
# @breakables [?-]
@breakables %{
?\s => :replace,
?- => :keep
}
@doc """
Any newlines included in the input `charlist` will cause a hard break.
- `charlist` is a character list containing integer codepoints
- `font` a `%Figlet.Font{}` struct
- `width` an integer representing the character width of the terminal
- `opts` is a keyword list of options.
## Options
`:overflow` - `:trim`, `:break`
## Examples
iex> Figlet.Linebreaker.split('this is a test', font, 4)
['this', 'is a', 'test']
"""
@spec split(charlist(), font :: Font.t(), width :: integer(), opts :: keyword()) ::
{:ok, list()} | {:error, any()}
def split(charlist, font, width, opts \\ [])
def split(charlist, font, width, opts)
when is_list(charlist) and is_integer(width) and width > 0 do
# tuples = associate(charlist, font)
{:ok, []}
end
defp parse([], acc), do: acc
defp parse([h | tail], acc) do
end
# turn the charlist into tuples: {codepoint, length}
def measure(charlist, font) do
charlist
|> Enum.map(fn codepoint ->
{codepoint, font.char_map[84].width}
end)
end
@doc """
'this is a test' -> ['this', 'is', 'a', 'test']
String.split("this is a big-old test", ~r/\s|\-/)
["this", "is", "a", "big", "old", "test"]
["this", "is", "a", "big-", "old", "test"]
"""
def chunkify(charlist, breakables) do
charlist
|> Enum.reduce(
[],
fn
codepoint, [] ->
[codepoint]
{codepoint, length}, acc ->
nil
end
)
end
end
|
lib/figlet/linebreaker.ex
| 0.73782 | 0.486392 |
linebreaker.ex
|
starcoder
|
defmodule Behaviour do
@moduledoc """
Mechanism for handling behaviours.
This module is deprecated. Instead of `defcallback/1` and
`defmacrocallback/1`, the `@callback` and `@macrocallback`
module attributes can be used (respectively). See the
documentation for `Module` for more information on these
attributes.
Instead of `MyModule.__behaviour__(:callbacks)`,
`MyModule.behaviour_info(:callbacks)` can be used.
"""
@moduledoc deprecated: "Use @callback and @macrocallback attributes instead"
@doc """
Defines a function callback according to the given type specification.
"""
defmacro defcallback(spec) do
do_defcallback(:def, split_spec(spec, quote(do: term)))
end
@doc """
Defines a macro callback according to the given type specification.
"""
defmacro defmacrocallback(spec) do
do_defcallback(:defmacro, split_spec(spec, quote(do: Macro.t())))
end
defp split_spec({:when, _, [{:::, _, [spec, return]}, guard]}, _default) do
{spec, return, guard}
end
defp split_spec({:when, _, [spec, guard]}, default) do
{spec, default, guard}
end
defp split_spec({:::, _, [spec, return]}, _default) do
{spec, return, []}
end
defp split_spec(spec, default) do
{spec, default, []}
end
defp do_defcallback(kind, {spec, return, guards}) do
case Macro.decompose_call(spec) do
{name, args} ->
do_callback(kind, name, args, return, guards)
_ ->
raise ArgumentError, "invalid syntax in #{kind}callback #{Macro.to_string(spec)}"
end
end
defp do_callback(kind, name, args, return, guards) do
fun = fn
{:::, _, [left, right]} ->
ensure_not_default(left)
ensure_not_default(right)
left
other ->
ensure_not_default(other)
other
end
:lists.foreach(fun, args)
spec =
quote do
unquote(name)(unquote_splicing(args)) :: unquote(return) when unquote(guards)
end
case kind do
:def -> quote(do: @callback(unquote(spec)))
:defmacro -> quote(do: @macrocallback(unquote(spec)))
end
end
defp ensure_not_default({:\\, _, [_, _]}) do
raise ArgumentError, "default arguments \\\\ not supported in defcallback/defmacrocallback"
end
defp ensure_not_default(_), do: :ok
@doc false
defmacro __using__(_) do
quote do
warning =
"the Behaviour module is deprecated. Instead of using this module, " <>
"use the @callback and @macrocallback module attributes. See the " <>
"documentation for Module for more information on these attributes"
IO.warn(warning)
@doc false
def __behaviour__(:callbacks) do
__MODULE__.behaviour_info(:callbacks)
end
def __behaviour__(:docs) do
{:docs_v1, _, :elixir, _, _, _, docs} = Code.fetch_docs(__MODULE__)
for {{kind, name, arity}, line, _, doc, _} <- docs, kind in [:callback, :macrocallback] do
case kind do
:callback -> {{name, arity}, line, :def, __behaviour__doc_value(doc)}
:macrocallback -> {{name, arity}, line, :defmacro, __behaviour__doc_value(doc)}
end
end
end
defp __behaviour__doc_value(:none), do: nil
defp __behaviour__doc_value(:hidden), do: false
defp __behaviour__doc_value(%{"en" => doc}), do: doc
import unquote(__MODULE__)
end
end
end
|
lib/elixir/lib/behaviour.ex
| 0.775009 | 0.40751 |
behaviour.ex
|
starcoder
|
defmodule Mix.Dep do
@moduledoc false
@doc """
The Mix.Dep a struct keeps information about your project dependencies.
It contains:
* `scm` - a module representing the source code management tool (SCM)
operations
* `app` - the application name as an atom
* `requirement` - a binary or regex with the dependency's requirement
* `status` - the current status of the dependency, check
`Mix.Dep.format_status/1` for more info
* `opts` - the options given by the developer
* `deps` - dependencies of this dependency
* `top_level` - true if dependency was defined in the top-level project
* `manager` - the project management, possible values:
`:rebar` | `:mix` | `:make` | `nil`
* `from` - path to the file where the dependency was defined
* `extra` - a slot for adding extra configuration based on the manager;
the information on this field is private to the manager and should not be
relied on
A dependency is in two specific states: loaded and unloaded.
When a dependency is unloaded, it means Mix only parsed its specification
and made no attempt to actually load the dependency or validate its
status. When the dependency is loaded, it means Mix attempted to fetch,
load and validate it, the status is set in the status field.
Furthermore, in the `opts` fields, Mix keeps some internal options, which
can be accessed by SCMs:
* `:app` - the application name
* `:dest` - the destination path for the dependency
* `:lock` - the lock information retrieved from mix.lock
* `:build` - the build path for the dependency
"""
defstruct scm: nil, app: nil, requirement: nil, status: nil, opts: [],
deps: [], top_level: false, extra: [], manager: nil, from: nil
@type t :: %__MODULE__{
scm: module,
app: atom,
requirement: String.t | Regex.t,
status: atom,
opts: Keyword.t,
top_level: boolean,
manager: :rebar | :mix | :make | nil,
from: String.t,
extra: term}
@doc """
Returns all children dependencies for the current project,
as well as the defined apps in case of umbrella projects.
The children dependencies returned by this function were
not loaded yet.
## Exceptions
This function raises an exception if any of the dependencies
provided in the project are in the wrong format.
"""
defdelegate children(), to: Mix.Dep.Loader
@doc """
Returns loaded dependencies recursively as a `Mix.Dep` struct.
## Exceptions
This function raises an exception if any of the dependencies
provided in the project are in the wrong format.
"""
def loaded(opts) do
Mix.Dep.Converger.converge(nil, nil, opts, &{&1, &2, &3}) |> elem(0)
end
@doc """
Receives a list of dependency names and returns loaded `Mix.Dep`s.
Logs a message if the dependency could not be found.
## Exceptions
This function raises an exception if any of the dependencies
provided in the project are in the wrong format.
"""
def loaded_by_name(given, all_deps \\ nil, opts) do
all_deps = all_deps || loaded(opts)
# Ensure all apps are atoms
apps = to_app_names(given)
# We need to keep the order of deps, loaded/1 properly orders them
deps = Enum.filter(all_deps, &(&1.app in apps))
Enum.each apps, fn(app) ->
unless Enum.any?(all_deps, &(&1.app == app)) do
Mix.raise "Unknown dependency #{app} for environment #{Mix.env}"
end
end
deps
end
@doc """
Runs the given `fun` inside the given dependency project by
changing the current working directory and loading the given
project onto the project stack.
It is expected a loaded dependency as argument.
"""
def in_dependency(dep, post_config \\ [], fun)
def in_dependency(%Mix.Dep{app: app, opts: opts, scm: scm}, config, fun) do
# Set the app_path to be the one stored in the dependency.
# This is important because the name of application in the
# mix.exs file can be different than the actual name and we
# choose to respect the one in the mix.exs
config =
Keyword.merge(Mix.Project.deps_config, config)
|> Keyword.put(:app_path, opts[:build])
|> Keyword.put(:build_scm, scm)
env = opts[:env] || :prod
old_env = Mix.env
try do
Mix.env(env)
Mix.Project.in_project(app, opts[:dest], config, fun)
after
Mix.env(old_env)
end
end
@doc """
Formats the status of a dependency.
"""
def format_status(%Mix.Dep{status: {:ok, _vsn}}),
do: "ok"
def format_status(%Mix.Dep{status: {:noappfile, path}}),
do: "could not find an app file at #{Path.relative_to_cwd(path)}. " <>
"This may happen if the dependency was not yet compiled, " <>
"or you specified the wrong application name in your deps, " <>
"or the dependency indeed has no app file (then you can pass app: false as option)"
def format_status(%Mix.Dep{status: {:invalidapp, path}}),
do: "the app file at #{Path.relative_to_cwd(path)} is invalid"
def format_status(%Mix.Dep{status: {:invalidvsn, vsn}}),
do: "the app file contains an invalid version: #{inspect vsn}"
def format_status(%Mix.Dep{status: {:nomatchvsn, vsn}, requirement: req}),
do: "the dependency does not match the requirement #{inspect req}, got #{inspect vsn}"
def format_status(%Mix.Dep{status: {:lockmismatch, _}}),
do: "lock mismatch: the dependency is out of date (run `mix deps.get` to fetch locked version)"
def format_status(%Mix.Dep{status: :lockoutdated}),
do: "lock outdated: the lock is outdated compared to the options in your mixfile"
def format_status(%Mix.Dep{status: :nolock}),
do: "the dependency is not locked"
def format_status(%Mix.Dep{status: :compile}),
do: "the dependency build is outdated, please run `#{mix_env_var}mix deps.compile`"
def format_status(%Mix.Dep{app: app, status: {:divergedreq, other}} = dep) do
"the dependency #{app}\n" <>
"#{dep_status(dep)}" <>
"\n does not match the requirement specified\n" <>
"#{dep_status(other)}" <>
"\n Ensure they match or specify one of the above in your deps and set `override: true`"
end
def format_status(%Mix.Dep{app: app, status: {:divergedonly, other}} = dep) do
recommendation =
if Keyword.has_key?(other.opts, :only) do
"Ensure the parent dependency specifies a superset of the child one in"
else
"Remove the :only restriction from"
end
"the dependency #{app}\n" <>
"#{dep_status(dep)}" <>
"\n does not match the environments calculated for\n" <>
"#{dep_status(other)}" <>
"\n #{recommendation} your dep"
end
def format_status(%Mix.Dep{app: app, status: {:diverged, other}} = dep) do
"different specs were given for the #{app} app:\n" <>
"#{dep_status(dep)}#{dep_status(other)}" <>
"\n Ensure they match or specify one of the above in your deps and set `override: true`"
end
def format_status(%Mix.Dep{app: app, status: {:overridden, other}} = dep) do
"the dependency #{app} in #{Path.relative_to_cwd(dep.from)} is overriding a child dependency:\n" <>
"#{dep_status(dep)}#{dep_status(other)}" <>
"\n Ensure they match or specify one of the above in your deps and set `override: true`"
end
def format_status(%Mix.Dep{status: {:unavailable, _}, scm: scm}) do
if scm.fetchable? do
"the dependency is not available, run `mix deps.get`"
else
"the dependency is not available"
end
end
def format_status(%Mix.Dep{status: {:elixirlock, _}}),
do: "the dependency was built with an out-of-date elixir version, run `#{mix_env_var}mix deps.compile`"
def format_status(%Mix.Dep{status: {:scmlock, _}}),
do: "the dependency was built with another SCM, run `#{mix_env_var}mix deps.compile`"
defp dep_status(%Mix.Dep{app: app, requirement: req, opts: opts, from: from}) do
info = {app, req, Dict.drop(opts, [:dest, :lock, :env, :build])}
"\n > In #{Path.relative_to_cwd(from)}:\n #{inspect info}\n"
end
@doc """
Checks the lock for the given dependency and update its status accordingly.
"""
def check_lock(%Mix.Dep{scm: scm, app: app, opts: opts} = dep, lock) do
if rev = lock[app] do
opts = Keyword.put(opts, :lock, rev)
end
if available?(dep) do
case scm.lock_status(opts) do
:mismatch ->
status = if rev, do: {:lockmismatch, rev}, else: :nolock
%{dep | status: status, opts: opts}
:outdated ->
# Don't include the lock in the dependency if it is outdated
%{dep | status: :lockoutdated}
:ok ->
check_manifest(%{dep | opts: opts}, opts[:build])
end
else
%{dep | opts: opts}
end
end
defp check_manifest(%{scm: scm} = dep, build_path) do
vsn = System.version
case Mix.Dep.Lock.status(build_path) do
{:ok, old_vsn, _} when old_vsn != vsn ->
%{dep | status: {:elixirlock, old_vsn}}
{:ok, _, old_scm} when old_scm != scm ->
%{dep | status: {:scmlock, old_scm}}
_ ->
dep
end
end
@doc """
Returns `true` if the dependency is ok.
"""
def ok?(%Mix.Dep{status: {:ok, _}}), do: true
def ok?(%Mix.Dep{}), do: false
@doc """
Checks if a dependency is available. Available dependencies
are the ones that can be loaded.
"""
def available?(%Mix.Dep{status: {:unavailable, _}}), do: false
def available?(%Mix.Dep{status: {:overridden, _}}), do: false
def available?(%Mix.Dep{status: {:diverged, _}}), do: false
def available?(%Mix.Dep{status: {:divergedreq, _}}), do: false
def available?(%Mix.Dep{status: {:divergedonly, _}}), do: false
def available?(%Mix.Dep{}), do: true
@doc """
Formats a dependency for printing.
"""
def format_dep(%Mix.Dep{scm: scm, app: app, status: status, opts: opts}) do
version =
case status do
{:ok, vsn} when vsn != nil -> "#{vsn} "
_ -> ""
end
"#{app} #{version}(#{scm.format(opts)})"
end
@doc """
Returns all load paths for the given dependency.
Automatically derived from source paths.
"""
def load_paths(%Mix.Dep{opts: opts} = dep) do
build_path = Path.dirname(opts[:build])
Enum.map source_paths(dep), fn path ->
Path.join [build_path, Path.basename(path), "ebin"]
end
end
@doc """
Returns all source paths.
Source paths are the directories that contains ebin files for a given
dependency. All managers, except rebar, have only one source path.
"""
def source_paths(%Mix.Dep{manager: :rebar, opts: opts, extra: extra}) do
# Add root dir and all sub dirs with ebin/ directory
sub_dirs = Enum.map(extra[:sub_dirs] || [], fn path ->
Path.join(opts[:dest], path)
end)
[opts[:dest] | sub_dirs]
|> Enum.map(&Path.wildcard(&1))
|> Enum.concat
|> Enum.filter(fn p -> p |> Path.join("ebin") |> File.dir? end)
end
def source_paths(%Mix.Dep{opts: opts}) do
[opts[:dest]]
end
@doc """
Return `true` if dependency is a mix project.
"""
def mix?(%Mix.Dep{manager: manager}) do
manager == :mix
end
@doc """
Return `true` if dependency is a rebar project.
"""
def rebar?(%Mix.Dep{manager: manager}) do
manager == :rebar
end
@doc """
Return `true` if dependency is a make project.
"""
def make?(%Mix.Dep{manager: manager}) do
manager == :make
end
## Helpers
defp mix_env_var do
if Mix.env == :dev do
""
else
"MIX_ENV=#{Mix.env} "
end
end
defp to_app_names(given) do
Enum.map given, fn(app) ->
if is_binary(app), do: String.to_atom(app), else: app
end
end
end
|
lib/mix/lib/mix/dep.ex
| 0.84626 | 0.596815 |
dep.ex
|
starcoder
|
defmodule GenServer do
@moduledoc """
A behaviour module for implementing the server of a client-server relation.
A GenServer is a process like any other Elixir process and it can be used
to keep state, execute code asynchronously and so on. The advantage of using
a generic server process (GenServer) implemented using this module is that it
will have a standard set of interface functions and include functionality for
tracing and error reporting. It will also fit into a supervision tree.
## Example
The GenServer behaviour abstracts the common client-server interaction.
Developers are only required to implement the callbacks and functionality
they are interested in.
Let's start with a code example and then explore the available callbacks.
Imagine we want a GenServer that works like a stack, allowing us to push
and pop items:
defmodule Stack do
use GenServer
# Callbacks
def handle_call(:pop, _from, [h | t]) do
{:reply, h, t}
end
def handle_cast({:push, item}, state) do
{:noreply, [item | state]}
end
end
# Start the server
{:ok, pid} = GenServer.start_link(Stack, [:hello])
# This is the client
GenServer.call(pid, :pop)
#=> :hello
GenServer.cast(pid, {:push, :world})
#=> :ok
GenServer.call(pid, :pop)
#=> :world
We start our `Stack` by calling `start_link/3`, passing the module
with the server implementation and its initial argument (a list
representing the stack containing the item `:hello`). We can primarily
interact with the server by sending two types of messages. **call**
messages expect a reply from the server (and are therefore synchronous)
while **cast** messages do not.
Every time you do a `GenServer.call/3`, the client will send a message
that must be handled by the `c:handle_call/3` callback in the GenServer.
A `cast/2` message must be handled by `c:handle_cast/2`.
## use GenServer and callbacks
There are 6 callbacks required to be implemented in a `GenServer`. By
adding `use GenServer` to your module, Elixir will automatically define
all 6 callbacks for you, leaving it up to you to implement the ones
you want to customize.
`use GenServer` also defines a `child_spec/1` function, allowing the
defined module to be put under a supervision tree. The generated
`child_spec/1` can be customized with the following options:
* `:id` - the child specification id, defaults to the current module
* `:start` - how to start the child process (defaults to calling `__MODULE__.start_link/1`)
* `:restart` - when the child should be restarted, defaults to `:permanent`
* `:shutdown` - how to shut down the child
For example:
use GenServer, restart: :transient, shutdown: 10_000
See the `Supervisor` docs for more information.
## Name Registration
Both `start_link/3` and `start/3` support the `GenServer` to register
a name on start via the `:name` option. Registered names are also
automatically cleaned up on termination. The supported values are:
* an atom - the GenServer is registered locally with the given name
using `Process.register/2`.
* `{:global, term}`- the GenServer is registered globally with the given
term using the functions in the [`:global` module](http://www.erlang.org/doc/man/global.html).
* `{:via, module, term}` - the GenServer is registered with the given
mechanism and name. The `:via` option expects a module that exports
`register_name/2`, `unregister_name/1`, `whereis_name/1` and `send/2`.
One such example is the [`:global` module](http://www.erlang.org/doc/man/global.html) which uses these functions
for keeping the list of names of processes and their associated PIDs
that are available globally for a network of Elixir nodes. Elixir also
ships with a local, decentralized and scalable registry called `Registry`
for locally storing names that are generated dynamically.
For example, we could start and register our `Stack` server locally as follows:
# Start the server and register it locally with name MyStack
{:ok, _} = GenServer.start_link(Stack, [:hello], name: MyStack)
# Now messages can be sent directly to MyStack
GenServer.call(MyStack, :pop) #=> :hello
Once the server is started, the remaining functions in this module (`call/3`,
`cast/2`, and friends) will also accept an atom, or any `:global` or `:via`
tuples. In general, the following formats are supported:
* a `pid`
* an `atom` if the server is locally registered
* `{atom, node}` if the server is locally registered at another node
* `{:global, term}` if the server is globally registered
* `{:via, module, name}` if the server is registered through an alternative
registry
If there is an interest to register dynamic names locally, do not use
atoms, as atoms are never garbage collected and therefore dynamically
generated atoms won't be garbage collected. For such cases, you can
set up your own local registry by using the `Registry` module.
## Client / Server APIs
Although in the example above we have used `GenServer.start_link/3` and
friends to directly start and communicate with the server, most of the
time we don't call the `GenServer` functions directly. Instead, we wrap
the calls in new functions representing the public API of the server.
Here is a better implementation of our Stack module:
defmodule Stack do
use GenServer
# Client
def start_link(default) do
GenServer.start_link(__MODULE__, default)
end
def push(pid, item) do
GenServer.cast(pid, {:push, item})
end
def pop(pid) do
GenServer.call(pid, :pop)
end
# Server (callbacks)
def handle_call(:pop, _from, [h | t]) do
{:reply, h, t}
end
def handle_call(request, from, state) do
# Call the default implementation from GenServer
super(request, from, state)
end
def handle_cast({:push, item}, state) do
{:noreply, [item | state]}
end
def handle_cast(request, state) do
super(request, state)
end
end
In practice, it is common to have both server and client functions in
the same module. If the server and/or client implementations are growing
complex, you may want to have them in different modules.
## Receiving "regular" messages
The goal of a `GenServer` is to abstract the "receive" loop for developers,
automatically handling system messages, support code change, synchronous
calls and more. Therefore, you should never call your own "receive" inside
the GenServer callbacks as doing so will cause the GenServer to misbehave.
Besides the synchronous and asynchronous communication provided by `call/3`
and `cast/2`, "regular" messages sent by functions such `Kernel.send/2`,
`Process.send_after/4` and similar, can be handled inside the `c:handle_info/2`
callback.
`c:handle_info/2` can be used in many situations, such as handling monitor
DOWN messages sent by `Process.monitor/1`. Another use case for `c:handle_info/2`
is to perform periodic work, with the help of `Process.send_after/4`:
defmodule MyApp.Periodically do
use GenServer
def start_link do
GenServer.start_link(__MODULE__, %{})
end
def init(state) do
schedule_work() # Schedule work to be performed on start
{:ok, state}
end
def handle_info(:work, state) do
# Do the desired work here
schedule_work() # Reschedule once more
{:noreply, state}
end
defp schedule_work() do
Process.send_after(self(), :work, 2 * 60 * 60 * 1000) # In 2 hours
end
end
## Debugging with the :sys module
GenServers, as [special processes](http://erlang.org/doc/design_principles/spec_proc.html),
can be debugged using the [`:sys` module](http://www.erlang.org/doc/man/sys.html). Through various hooks, this module
allows developers to introspect the state of the process and trace
system events that happen during its execution, such as received messages,
sent replies and state changes.
Let's explore the basic functions from the
[`:sys` module](http://www.erlang.org/doc/man/sys.html) used for debugging:
* `:sys.get_state/2` - allows retrieval of the state of the process.
In the case of a GenServer process, it will be the callback module state,
as passed into the callback functions as last argument.
* `:sys.get_status/2` - allows retrieval of the status of the process.
This status includes the process dictionary, if the process is running
or is suspended, the parent PID, the debugger state, and the state of
the behaviour module, which includes the callback module state
(as returned by `:sys.get_state/2`). It's possible to change how this
status is represented by defining the optional `c:GenServer.format_status/2`
callback.
* `:sys.trace/3` - prints all the system events to `:stdio`.
* `:sys.statistics/3` - manages collection of process statistics.
* `:sys.no_debug/2` - turns off all debug handlers for the given process.
It is very important to switch off debugging once we're done. Excessive
debug handlers or those that should be turned off, but weren't, can
seriously damage the performance of the system.
* `:sys.suspend/2` - allows to suspend a process so that it only
replies to system messages but no other messages. A suspended process
can be reactivated via `:sys.resume/2`.
Let's see how we could use those functions for debugging the stack server
we defined earlier.
iex> {:ok, pid} = Stack.start_link([])
iex> :sys.statistics(pid, true) # turn on collecting process statistics
iex> :sys.trace(pid, true) # turn on event printing
iex> Stack.push(pid, 1)
*DBG* <0.122.0> got cast {push,1}
*DBG* <0.122.0> new state [1]
:ok
iex> :sys.get_state(pid)
[1]
iex> Stack.pop(pid)
*DBG* <0.122.0> got call pop from <0.80.0>
*DBG* <0.122.0> sent 1 to <0.80.0>, new state []
1
iex> :sys.statistics(pid, :get)
{:ok,
[start_time: {{2016, 7, 16}, {12, 29, 41}},
current_time: {{2016, 7, 16}, {12, 29, 50}},
reductions: 117, messages_in: 2, messages_out: 0]}
iex> :sys.no_debug(pid) # turn off all debug handlers
:ok
iex> :sys.get_status(pid)
{:status, #PID<0.122.0>, {:module, :gen_server},
[["$initial_call": {Stack, :init, 1}, # pdict
"$ancestors": [#PID<0.80.0>, #PID<0.51.0>]],
:running, # :running | :suspended
#PID<0.80.0>, # parent
[], # debugger state
[header: 'Status for generic server <0.122.0>', # module status
data: [{'Status', :running}, {'Parent', #PID<0.80.0>},
{'Logged events', []}], data: [{'State', [1]}]]]}
## Learn more
If you wish to find out more about gen servers, the Elixir Getting Started
guide provides a tutorial-like introduction. The documentation and links
in Erlang can also provide extra insight.
* [GenServer – Elixir's Getting Started Guide](http://elixir-lang.org/getting-started/mix-otp/genserver.html)
* [`:gen_server` module documentation](http://www.erlang.org/doc/man/gen_server.html)
* [gen_server Behaviour – OTP Design Principles](http://www.erlang.org/doc/design_principles/gen_server_concepts.html)
* [Clients and Servers – Learn You Some Erlang for Great Good!](http://learnyousomeerlang.com/clients-and-servers)
"""
@doc """
Invoked when the server is started. `start_link/3` or `start/3` will
block until it returns.
`args` is the argument term (second argument) passed to `start_link/3`.
Returning `{:ok, state}` will cause `start_link/3` to return
`{:ok, pid}` and the process to enter its loop.
Returning `{:ok, state, timeout}` is similar to `{:ok, state}`
except `handle_info(:timeout, state)` will be called after `timeout`
milliseconds if no messages are received within the timeout.
Returning `{:ok, state, :hibernate}` is similar to
`{:ok, state}` except the process is hibernated before entering the loop. See
`c:handle_call/3` for more information on hibernation.
Returning `:ignore` will cause `start_link/3` to return `:ignore` and the
process will exit normally without entering the loop or calling `c:terminate/2`.
If used when part of a supervision tree the parent supervisor will not fail
to start nor immediately try to restart the `GenServer`. The remainder of the
supervision tree will be (re)started and so the `GenServer` should not be
required by other processes. It can be started later with
`Supervisor.restart_child/2` as the child specification is saved in the parent
supervisor. The main use cases for this are:
* The `GenServer` is disabled by configuration but might be enabled later.
* An error occurred and it will be handled by a different mechanism than the
`Supervisor`. Likely this approach involves calling `Supervisor.restart_child/2`
after a delay to attempt a restart.
Returning `{:stop, reason}` will cause `start_link/3` to return
`{:error, reason}` and the process to exit with reason `reason` without
entering the loop or calling `c:terminate/2`.
"""
@callback init(args :: term) ::
{:ok, state}
| {:ok, state, timeout | :hibernate}
| :ignore
| {:stop, reason :: any}
when state: any
@doc """
Invoked to handle synchronous `call/3` messages. `call/3` will block until a
reply is received (unless the call times out or nodes are disconnected).
`request` is the request message sent by a `call/3`, `from` is a 2-tuple
containing the caller's PID and a term that uniquely identifies the call, and
`state` is the current state of the `GenServer`.
Returning `{:reply, reply, new_state}` sends the response `reply` to the
caller and continues the loop with new state `new_state`.
Returning `{:reply, reply, new_state, timeout}` is similar to
`{:reply, reply, new_state}` except `handle_info(:timeout, new_state)` will be
called after `timeout` milliseconds if no messages are received.
Returning `{:reply, reply, new_state, :hibernate}` is similar to
`{:reply, reply, new_state}` except the process is hibernated and will
continue the loop once a message is in its message queue. If a message is
already in the message queue this will be immediately. Hibernating a
`GenServer` causes garbage collection and leaves a continuous heap that
minimises the memory used by the process.
Hibernating should not be used aggressively as too much time could be spent
garbage collecting. Normally it should only be used when a message is not
expected soon and minimising the memory of the process is shown to be
beneficial.
Returning `{:noreply, new_state}` does not send a response to the caller and
continues the loop with new state `new_state`. The response must be sent with
`reply/2`.
There are three main use cases for not replying using the return value:
* To reply before returning from the callback because the response is known
before calling a slow function.
* To reply after returning from the callback because the response is not yet
available.
* To reply from another process, such as a task.
When replying from another process the `GenServer` should exit if the other
process exits without replying as the caller will be blocking awaiting a
reply.
Returning `{:noreply, new_state, timeout | :hibernate}` is similar to
`{:noreply, new_state}` except a timeout or hibernation occurs as with a
`:reply` tuple.
Returning `{:stop, reason, reply, new_state}` stops the loop and `c:terminate/2`
is called with reason `reason` and state `new_state`. Then the `reply` is sent
as the response to call and the process exits with reason `reason`.
Returning `{:stop, reason, new_state}` is similar to
`{:stop, reason, reply, new_state}` except a reply is not sent.
If this callback is not implemented, the default implementation by
`use GenServer` will fail with a `RuntimeError` exception with a message:
attempted to call `GenServer` but no `handle_call/3` clause was provided.
"""
@callback handle_call(request :: term, from, state :: term) ::
{:reply, reply, new_state}
| {:reply, reply, new_state, timeout | :hibernate}
| {:noreply, new_state}
| {:noreply, new_state, timeout | :hibernate}
| {:stop, reason, reply, new_state}
| {:stop, reason, new_state}
when reply: term, new_state: term, reason: term
@doc """
Invoked to handle asynchronous `cast/2` messages.
`request` is the request message sent by a `cast/2` and `state` is the current
state of the `GenServer`.
Returning `{:noreply, new_state}` continues the loop with new state `new_state`.
Returning `{:noreply, new_state, timeout}` is similar to
`{:noreply, new_state}` except `handle_info(:timeout, new_state)` will be
called after `timeout` milliseconds if no messages are received.
Returning `{:noreply, new_state, :hibernate}` is similar to
`{:noreply, new_state}` except the process is hibernated before continuing the
loop. See `c:handle_call/3` for more information.
Returning `{:stop, reason, new_state}` stops the loop and `c:terminate/2` is
called with the reason `reason` and state `new_state`. The process exits with
reason `reason`.
If this callback is not implemented, the default implementation by
`use GenServer` will fail with a `RuntimeError` exception with a message:
attempted to call `GenServer` but no `handle_cast/2` clause was provided.
"""
@callback handle_cast(request :: term, state :: term) ::
{:noreply, new_state}
| {:noreply, new_state, timeout | :hibernate}
| {:stop, reason :: term, new_state}
when new_state: term
@doc """
Invoked to handle all other messages.
`msg` is the message and `state` is the current state of the `GenServer`. When
a timeout occurs the message is `:timeout`.
Return values are the same as `c:handle_cast/2`.
If this callback is not implemented, the default implementation by
`use GenServer` will return `{:noreply, state}`.
"""
@callback handle_info(msg :: :timeout | term, state :: term) ::
{:noreply, new_state}
| {:noreply, new_state, timeout | :hibernate}
| {:stop, reason :: term, new_state}
when new_state: term
@doc """
Invoked when the server is about to exit. It should do any cleanup required.
`reason` is exit reason and `state` is the current state of the `GenServer`.
The return value is ignored.
`c:terminate/2` is called if a callback (except `c:init/1`) does one of the
following:
* returns a `:stop` tuple
* raises
* calls `Kernel.exit/1`
* returns an invalid value
* the `GenServer` traps exits (using `Process.flag/2`) *and* the parent
process sends an exit signal
If part of a supervision tree, a `GenServer`'s `Supervisor` will send an exit
signal when shutting it down. The exit signal is based on the shutdown
strategy in the child's specification. If it is `:brutal_kill` the `GenServer`
is killed and so `c:terminate/2` is not called. However if it is a timeout the
`Supervisor` will send the exit signal `:shutdown` and the `GenServer` will
have the duration of the timeout to call `c:terminate/2` - if the process is
still alive after the timeout it is killed.
If the `GenServer` receives an exit signal (that is not `:normal`) from any
process when it is not trapping exits it will exit abruptly with the same
reason and so not call `c:terminate/2`. Note that a process does *NOT* trap
exits by default and an exit signal is sent when a linked process exits or its
node is disconnected.
Therefore it is not guaranteed that `c:terminate/2` is called when a `GenServer`
exits. For such reasons, we usually recommend important clean-up rules to
happen in separated processes either by use of monitoring or by links
themselves. For example if the `GenServer` controls a `port` (e.g.
`:gen_tcp.socket`) or `t:File.io_device/0`, they will be closed on receiving a
`GenServer`'s exit signal and do not need to be closed in `c:terminate/2`.
If `reason` is not `:normal`, `:shutdown`, nor `{:shutdown, term}` an error is
logged.
"""
@callback terminate(reason, state :: term) :: term
when reason: :normal | :shutdown | {:shutdown, term}
@doc """
Invoked to change the state of the `GenServer` when a different version of a
module is loaded (hot code swapping) and the state's term structure should be
changed.
`old_vsn` is the previous version of the module (defined by the `@vsn`
attribute) when upgrading. When downgrading the previous version is wrapped in
a 2-tuple with first element `:down`. `state` is the current state of the
`GenServer` and `extra` is any extra data required to change the state.
Returning `{:ok, new_state}` changes the state to `new_state` and the code
change is successful.
Returning `{:error, reason}` fails the code change with reason `reason` and
the state remains as the previous state.
If `c:code_change/3` raises the code change fails and the loop will continue
with its previous state. Therefore this callback does not usually contain side effects.
"""
@callback code_change(old_vsn, state :: term, extra :: term) ::
{:ok, new_state :: term}
| {:error, reason :: term}
| {:down, term}
when old_vsn: term
@doc """
Invoked in some cases to retrieve a formatted version of the `GenServer` status.
This callback can be useful to control the *appearance* of the status of the
`GenServer`. For example, it can be used to return a compact representation of
the `GenServer`'s state to avoid having large state terms printed.
* one of `:sys.get_status/1` or `:sys.get_status/2` is invoked to get the
status of the `GenServer`; in such cases, `reason` is `:normal`
* the `GenServer` terminates abnormally and logs an error; in such cases,
`reason` is `:terminate`
`pdict_and_state` is a two-elements list `[pdict, state]` where `pdict` is a
list of `{key, value}` tuples representing the current process dictionary of
the `GenServer` and `state` is the current state of the `GenServer`.
"""
@callback format_status(reason, pdict_and_state :: list) :: term
when reason: :normal | :terminate
@optional_callbacks format_status: 2
@typedoc "Return values of `start*` functions"
@type on_start :: {:ok, pid} | :ignore | {:error, {:already_started, pid} | term}
@typedoc "The GenServer name"
@type name :: atom | {:global, term} | {:via, module, term}
@typedoc "Options used by the `start*` functions"
@type options :: [option]
@typedoc "Option values used by the `start*` functions"
@type option ::
{:debug, debug}
| {:name, name}
| {:timeout, timeout}
| {:spawn_opt, Process.spawn_opt()}
@typedoc "Debug options supported by the `start*` functions"
@type debug :: [:trace | :log | :statistics | {:log_to_file, Path.t()}]
@typedoc "The server reference"
@type server :: pid | name | {atom, node}
@typedoc """
Tuple describing the client of a call request.
`pid` is the PID of the caller and `tag` is a unique term used to identify the
call.
"""
@type from :: {pid, tag :: term}
@doc false
defmacro __using__(opts) do
quote location: :keep do
@behaviour GenServer
@opts unquote(opts)
@doc false
def child_spec(arg) do
default = %{
id: __MODULE__,
start: {__MODULE__, :start_link, [arg]}
}
Supervisor.child_spec(default, @opts)
end
defoverridable child_spec: 1
@doc false
def init(args) do
{:ok, args}
end
@doc false
def handle_call(msg, _from, state) do
proc =
case Process.info(self(), :registered_name) do
{_, []} -> self()
{_, name} -> name
end
# We do this to trick Dialyzer to not complain about non-local returns.
case :erlang.phash2(1, 1) do
0 ->
raise "attempted to call GenServer #{inspect(proc)} but no handle_call/3 clause was provided"
1 ->
{:stop, {:bad_call, msg}, state}
end
end
@doc false
def handle_info(msg, state) do
proc =
case Process.info(self(), :registered_name) do
{_, []} -> self()
{_, name} -> name
end
pattern = '~p ~p received unexpected message in handle_info/2: ~p~n'
:error_logger.error_msg(pattern, [__MODULE__, proc, msg])
{:noreply, state}
end
@doc false
def handle_cast(msg, state) do
proc =
case Process.info(self(), :registered_name) do
{_, []} -> self()
{_, name} -> name
end
# We do this to trick Dialyzer to not complain about non-local returns.
case :erlang.phash2(1, 1) do
0 ->
raise "attempted to cast GenServer #{inspect(proc)} but no handle_cast/2 clause was provided"
1 ->
{:stop, {:bad_cast, msg}, state}
end
end
@doc false
def terminate(_reason, _state) do
:ok
end
@doc false
def code_change(_old, state, _extra) do
{:ok, state}
end
defoverridable GenServer
end
end
@doc """
Starts a `GenServer` process linked to the current process.
This is often used to start the `GenServer` as part of a supervision tree.
Once the server is started, the `c:init/1` function of the given `module` is
called with `args` as its arguments to initialize the server. To ensure a
synchronized start-up procedure, this function does not return until `c:init/1`
has returned.
Note that a `GenServer` started with `start_link/3` is linked to the
parent process and will exit in case of crashes from the parent. The GenServer
will also exit due to the `:normal` reasons in case it is configured to trap
exits in the `c:init/1` callback.
## Options
* `:name` - used for name registration as described in the "Name
registration" section of the module documentation
* `:timeout` - if present, the server is allowed to spend the given number of
milliseconds initializing or it will be terminated and the start function
will return `{:error, :timeout}`
* `:debug` - if present, the corresponding function in the [`:sys` module](http://www.erlang.org/doc/man/sys.html) is invoked
* `:spawn_opt` - if present, its value is passed as options to the
underlying process as in `Process.spawn/4`
## Return values
If the server is successfully created and initialized, this function returns
`{:ok, pid}`, where `pid` is the PID of the server. If a process with the
specified server name already exists, this function returns
`{:error, {:already_started, pid}}` with the PID of that process.
If the `c:init/1` callback fails with `reason`, this function returns
`{:error, reason}`. Otherwise, if it returns `{:stop, reason}`
or `:ignore`, the process is terminated and this function returns
`{:error, reason}` or `:ignore`, respectively.
"""
@spec start_link(module, any, options) :: on_start
def start_link(module, args, options \\ []) when is_atom(module) and is_list(options) do
do_start(:link, module, args, options)
end
@doc """
Starts a `GenServer` process without links (outside of a supervision tree).
See `start_link/3` for more information.
"""
@spec start(module, any, options) :: on_start
def start(module, args, options \\ []) when is_atom(module) and is_list(options) do
do_start(:nolink, module, args, options)
end
defp do_start(link, module, args, options) do
case Keyword.pop(options, :name) do
{nil, opts} ->
:gen.start(:gen_server, link, module, args, opts)
{atom, opts} when is_atom(atom) ->
:gen.start(:gen_server, link, {:local, atom}, module, args, opts)
{{:global, _term} = tuple, opts} ->
:gen.start(:gen_server, link, tuple, module, args, opts)
{{:via, via_module, _term} = tuple, opts} when is_atom(via_module) ->
:gen.start(:gen_server, link, tuple, module, args, opts)
{other, _} ->
raise ArgumentError, """
expected :name option to be one of:
* nil
* atom
* {:global, term}
* {:via, module, term}
Got: #{inspect(other)}
"""
end
end
@doc """
Synchronously stops the server with the given `reason`.
The `c:terminate/2` callback of the given `server` will be invoked before
exiting. This function returns `:ok` if the server terminates with the
given reason; if it terminates with another reason, the call exits.
This function keeps OTP semantics regarding error reporting.
If the reason is any other than `:normal`, `:shutdown` or
`{:shutdown, _}`, an error report is logged.
"""
@spec stop(server, reason :: term, timeout) :: :ok
def stop(server, reason \\ :normal, timeout \\ :infinity) do
:gen.stop(server, reason, timeout)
end
@doc """
Makes a synchronous call to the `server` and waits for its reply.
The client sends the given `request` to the server and waits until a reply
arrives or a timeout occurs. `c:handle_call/3` will be called on the server
to handle the request.
`server` can be any of the values described in the "Name registration"
section of the documentation for this module.
## Timeouts
`timeout` is an integer greater than zero which specifies how many
milliseconds to wait for a reply, or the atom `:infinity` to wait
indefinitely. The default value is `5000`. If no reply is received within
the specified time, the function call fails and the caller exits. If the
caller catches the failure and continues running, and the server is just late
with the reply, it may arrive at any time later into the caller's message
queue. The caller must in this case be prepared for this and discard any such
garbage messages that are two-element tuples with a reference as the first
element.
"""
@spec call(server, term, timeout) :: term
def call(server, request, timeout \\ 5000) do
case whereis(server) do
nil ->
exit({:noproc, {__MODULE__, :call, [server, request, timeout]}})
pid when pid == self() ->
exit({:calling_self, {__MODULE__, :call, [server, request, timeout]}})
pid ->
try do
:gen.call(pid, :"$gen_call", request, timeout)
catch
:exit, reason ->
exit({reason, {__MODULE__, :call, [server, request, timeout]}})
else
{:ok, res} -> res
end
end
end
@doc """
Sends an asynchronous request to the `server`.
This function always returns `:ok` regardless of whether
the destination `server` (or node) exists. Therefore it
is unknown whether the destination `server` successfully
handled the message.
`c:handle_cast/2` will be called on the server to handle
the request. In case the `server` is on a node which is
not yet connected to the caller one, the call is going to
block until a connection happens. This is different than
the behaviour in OTP's `:gen_server` where the message
is sent by another process in this case, which could cause
messages to other nodes to arrive out of order.
"""
@spec cast(server, term) :: :ok
def cast(server, request)
def cast({:global, name}, request) do
try do
:global.send(name, cast_msg(request))
:ok
catch
_, _ -> :ok
end
end
def cast({:via, mod, name}, request) do
try do
mod.send(name, cast_msg(request))
:ok
catch
_, _ -> :ok
end
end
def cast({name, node}, request) when is_atom(name) and is_atom(node),
do: do_send({name, node}, cast_msg(request))
def cast(dest, request) when is_atom(dest) or is_pid(dest), do: do_send(dest, cast_msg(request))
@doc """
Casts all servers locally registered as `name` at the specified nodes.
This function returns immediately and ignores nodes that do not exist, or where the
server name does not exist.
See `multi_call/4` for more information.
"""
@spec abcast([node], name :: atom, term) :: :abcast
def abcast(nodes \\ [node() | Node.list()], name, request) when is_list(nodes) and is_atom(name) do
msg = cast_msg(request)
_ = for node <- nodes, do: do_send({name, node}, msg)
:abcast
end
defp cast_msg(req) do
{:"$gen_cast", req}
end
defp do_send(dest, msg) do
try do
send(dest, msg)
:ok
catch
_, _ -> :ok
end
end
@doc """
Calls all servers locally registered as `name` at the specified `nodes`.
First, the `request` is sent to every node in `nodes`; then, the caller waits
for the replies. This function returns a two-element tuple `{replies,
bad_nodes}` where:
* `replies` - is a list of `{node, reply}` tuples where `node` is the node
that replied and `reply` is its reply
* `bad_nodes` - is a list of nodes that either did not exist or where a
server with the given `name` did not exist or did not reply
`nodes` is a list of node names to which the request is sent. The default
value is the list of all known nodes (including this node).
To avoid that late answers (after the timeout) pollute the caller's message
queue, a middleman process is used to do the actual calls. Late answers will
then be discarded when they arrive to a terminated process.
## Examples
Assuming the `Stack` GenServer mentioned in the docs for the `GenServer`
module is registered as `Stack` in the `:"foo@my-machine"` and
`:"bar@my-machine"` nodes:
GenServer.multi_call(Stack, :pop)
#=> {[{:"foo@my-machine", :hello}, {:"bar@my-machine", :world}], []}
"""
@spec multi_call([node], name :: atom, term, timeout) ::
{replies :: [{node, term}], bad_nodes :: [node]}
def multi_call(nodes \\ [node() | Node.list()], name, request, timeout \\ :infinity) do
:gen_server.multi_call(nodes, name, request, timeout)
end
@doc """
Replies to a client.
This function can be used to explicitly send a reply to a client that called
`call/3` or `multi_call/4` when the reply cannot be specified in the return
value of `c:handle_call/3`.
`client` must be the `from` argument (the second argument) accepted by
`c:handle_call/3` callbacks. `reply` is an arbitrary term which will be given
back to the client as the return value of the call.
Note that `reply/2` can be called from any process, not just the GenServer
that originally received the call (as long as that GenServer communicated the
`from` argument somehow).
This function always returns `:ok`.
## Examples
def handle_call(:reply_in_one_second, from, state) do
Process.send_after(self(), {:reply, from}, 1_000)
{:noreply, state}
end
def handle_info({:reply, from}, state) do
GenServer.reply(from, :one_second_has_passed)
{:noreply, state}
end
"""
@spec reply(from, term) :: :ok
def reply(client, reply)
def reply({to, tag}, reply) when is_pid(to) do
try do
send(to, {tag, reply})
:ok
catch
_, _ -> :ok
end
end
@doc """
Returns the `pid` or `{name, node}` of a GenServer process, or `nil` if
no process is associated with the given `server`.
## Examples
For example, to lookup a server process, monitor it and send a cast to it:
process = GenServer.whereis(server)
monitor = Process.monitor(process)
GenServer.cast(process, :hello)
"""
@spec whereis(server) :: pid | {atom, node} | nil
def whereis(server)
def whereis(pid) when is_pid(pid), do: pid
def whereis(name) when is_atom(name) do
Process.whereis(name)
end
def whereis({:global, name}) do
case :global.whereis_name(name) do
pid when is_pid(pid) -> pid
:undefined -> nil
end
end
def whereis({:via, mod, name}) do
case apply(mod, :whereis_name, [name]) do
pid when is_pid(pid) -> pid
:undefined -> nil
end
end
def whereis({name, local}) when is_atom(name) and local == node() do
Process.whereis(name)
end
def whereis({name, node} = server) when is_atom(name) and is_atom(node) do
server
end
end
|
lib/elixir/lib/gen_server.ex
| 0.89875 | 0.650211 |
gen_server.ex
|
starcoder
|
defmodule LoggerJSON.JasonSafeFormatter do
@moduledoc """
Utilities for converting metadata into data structures that can be safely passed to `Jason.encode!/1`.
"""
@doc """
Produces metadata that is "safe" for calling Jason.encode!/1 on without errors.
This means that unexpected Logger metadata won't cause logging crashes.
Current formatting is...
* Maps: as is
* Printable binaries: as is
* Numbers: as is
* Structs that don't implement Jason.Encoder: converted to maps
* Tuples: converted to lists
* Keyword lists: converted to Maps
* everything else: inspected
"""
@spec format(any()) :: any()
def format(%Jason.Fragment{} = data) do
data
end
def format(nil), do: nil
def format(true), do: true
def format(false), do: false
def format(data) when is_atom(data), do: data
def format(%_struct{} = data) do
if jason_implemented?(data) do
data
else
data
|> Map.from_struct()
|> format()
end
end
def format(%{} = data) do
for {key, value} <- data, into: %{}, do: {format_map_key(key), format(value)}
end
def format([{key, _} | _] = data) when is_atom(key) do
Enum.into(data, %{}, fn
{key, value} -> {format_map_key(key), format(value)}
end)
rescue
_ -> for(d <- data, do: format(d))
end
def format({key, data}) when is_binary(key) or is_atom(key), do: %{format_map_key(key) => format(data)}
def format(data) when is_tuple(data), do: data |> Tuple.to_list() |> format()
def format(data) when is_number(data), do: data
def format(data) when is_binary(data), do: format_binary(data)
def format(data) when is_list(data), do: for(d <- data, do: format(d))
def format(data), do: inspect(data, pretty: true, width: 80)
defp format_map_key(key) when is_binary(key), do: format_binary(key)
defp format_map_key(key) when is_atom(key) or is_number(key), do: key
defp format_map_key(key), do: inspect(key)
defp format_binary(data) when is_binary(data) do
if String.valid?(data) && String.printable?(data) do
data
else
inspect(data)
end
end
def jason_implemented?(data) do
impl = Jason.Encoder.impl_for(data)
impl && impl != Jason.Encoder.Any
end
end
|
lib/logger_json/jason_safe_formatter.ex
| 0.767472 | 0.529993 |
jason_safe_formatter.ex
|
starcoder
|
defmodule Bitcoin.Protocol.Types.BlockHeader do
alias Bitcoin.Protocol.Types.VarInteger
# Block version information, based upon the software version creating this block
defstruct version: 0,
# char[32], The hash value of the previous block this particular block references
previous_block:
<<0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0>>,
# char[32], The reference to a Merkle tree collection which is a hash of all transactions related to this block
merkle_root:
<<0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0>>,
# uint32_t, A Unix timestamp recording when this block was created (Currently limited to dates before the year 2106!)
timestamp: 0,
# uint32_t, The calculated difficulty target being used for this block
bits: 0,
# uint32_t, The nonce used to generate this block… to allow variations of the header and compute different hashes
nonce: 0,
# count - Bitcoin.Protocol.Types.Integer, number of transaction entries in this block
transaction_count: 0
@type t :: %__MODULE__{
version: integer,
previous_block: Bitcoin.Block.t_hash(),
merkle_root: Bitcoin.t_hash(),
timestamp: non_neg_integer,
bits: non_neg_integer,
nonce: non_neg_integer,
transaction_count: non_neg_integer
}
@spec parse(binary) :: t
def parse(payload) do
{data, <<>>} = parse_stream(payload)
data
end
@spec parse_stream(binary) :: {t, binary}
def parse_stream(data) do
<<version::little-integer-size(32), previous_block::bytes-size(32),
merkle_root::bytes-size(32), timestamp::unsigned-little-integer-size(32),
bits::unsigned-little-integer-size(32), nonce::unsigned-little-integer-size(32),
payload::binary>> = data
{transaction_count, payload} = VarInteger.parse_stream(payload)
{%__MODULE__{
version: version,
previous_block: previous_block,
merkle_root: merkle_root,
timestamp: timestamp,
bits: bits,
nonce: nonce,
transaction_count: transaction_count
}, payload}
end
@spec serialize(t) :: binary
def serialize(%__MODULE__{} = s) do
# https://en.bitcoin.it/wiki/Protocol_documentation#headers says tx_count can be > 0
# https://bitcoin.org/en/developer-reference#headers says it's always 0x00
# ¯\_(ツ)_/¯
<<
s.version::little-integer-size(32),
s.previous_block::bytes-size(32),
s.merkle_root::bytes-size(32),
s.timestamp::unsigned-little-integer-size(32),
s.bits::unsigned-little-integer-size(32),
s.nonce::unsigned-little-integer-size(32)
>> <>
VarInteger.serialize(s.transaction_count)
end
end
|
lib/bitcoin/protocol/types/block_header.ex
| 0.69368 | 0.40072 |
block_header.ex
|
starcoder
|
defmodule Jason.DecodeError do
@type t :: %__MODULE__{position: integer, data: String.t}
defexception [:position, :token, :data]
def message(%{position: position, token: token}) when is_binary(token) do
"unexpected sequence at position #{position}: #{inspect token}"
end
def message(%{position: position, data: data}) when position == byte_size(data) do
"unexpected end of input at position #{position}"
end
def message(%{position: position, data: data}) do
byte = :binary.at(data, position)
str = <<byte>>
if String.printable?(str) do
"unexpected byte at position #{position}: " <>
"#{inspect byte, base: :hex} (#{inspect str})"
else
"unexpected byte at position #{position}: " <>
"#{inspect byte, base: :hex}"
end
end
end
defmodule Jason.Decoder do
@moduledoc false
import Bitwise
alias Jason.{DecodeError, Codegen}
import Codegen, only: [bytecase: 2, bytecase: 3]
# @compile :native
# We use integers instead of atoms to take advantage of the jump table
# optimization
@terminate 0
@array 1
@key 2
@object 3
def parse(data, opts) when is_binary(data) do
key_decode = key_decode_function(opts)
string_decode = string_decode_function(opts)
try do
value(data, data, 0, [@terminate], key_decode, string_decode)
catch
{:position, position} ->
{:error, %DecodeError{position: position, data: data}}
{:token, token, position} ->
{:error, %DecodeError{token: token, position: position, data: data}}
else
value ->
{:ok, value}
end
end
defp key_decode_function(%{keys: :atoms}), do: &String.to_atom/1
defp key_decode_function(%{keys: :atoms!}), do: &String.to_existing_atom/1
defp key_decode_function(%{keys: :strings}), do: &(&1)
defp key_decode_function(%{keys: fun}) when is_function(fun, 1), do: fun
defp string_decode_function(%{strings: :copy}), do: &:binary.copy/1
defp string_decode_function(%{strings: :reference}), do: &(&1)
defp value(data, original, skip, stack, key_decode, string_decode) do
bytecase data do
_ in '\s\n\t\r', rest ->
value(rest, original, skip + 1, stack, key_decode, string_decode)
_ in '0', rest ->
number_zero(rest, original, skip, stack, key_decode, string_decode, 1)
_ in '123456789', rest ->
number(rest, original, skip, stack, key_decode, string_decode, 1)
_ in '-', rest ->
number_minus(rest, original, skip, stack, key_decode, string_decode)
_ in '"', rest ->
string(rest, original, skip + 1, stack, key_decode, string_decode, 0)
_ in '[', rest ->
array(rest, original, skip + 1, stack, key_decode, string_decode)
_ in '{', rest ->
object(rest, original, skip + 1, stack, key_decode, string_decode)
_ in ']', rest ->
empty_array(rest, original, skip + 1, stack, key_decode, string_decode)
_ in 't', rest ->
case rest do
<<"rue", rest::bits>> ->
continue(rest, original, skip + 4, stack, key_decode, string_decode, true)
<<_::bits>> ->
error(original, skip)
end
_ in 'f', rest ->
case rest do
<<"alse", rest::bits>> ->
continue(rest, original, skip + 5, stack, key_decode, string_decode, false)
<<_::bits>> ->
error(original, skip)
end
_ in 'n', rest ->
case rest do
<<"ull", rest::bits>> ->
continue(rest, original, skip + 4, stack, key_decode, string_decode, nil)
<<_::bits>> ->
error(original, skip)
end
_, rest ->
error(rest, original, skip + 1, stack, key_decode, string_decode)
<<_::bits>> ->
error(original, skip)
end
end
defp number_minus(<<?0, rest::bits>>, original, skip, stack, key_decode, string_decode) do
number_zero(rest, original, skip, stack, key_decode, string_decode, 2)
end
defp number_minus(<<byte, rest::bits>>, original, skip, stack, key_decode, string_decode)
when byte in '123456789' do
number(rest, original, skip, stack, key_decode, string_decode, 2)
end
defp number_minus(<<_rest::bits>>, original, skip, _stack, _key_decode, _string_decode) do
error(original, skip + 1)
end
defp number(<<byte, rest::bits>>, original, skip, stack, key_decode, string_decode, len)
when byte in '0123456789' do
number(rest, original, skip, stack, key_decode, string_decode, len + 1)
end
defp number(<<?., rest::bits>>, original, skip, stack, key_decode, string_decode, len) do
number_frac(rest, original, skip, stack, key_decode, string_decode, len + 1)
end
defp number(<<e, rest::bits>>, original, skip, stack, key_decode, string_decode, len) when e in 'eE' do
prefix = binary_part(original, skip, len)
number_exp_copy(rest, original, skip + len + 1, stack, key_decode, string_decode, prefix)
end
defp number(<<rest::bits>>, original, skip, stack, key_decode, string_decode, len) do
int = String.to_integer(binary_part(original, skip, len))
continue(rest, original, skip + len, stack, key_decode, string_decode, int)
end
defp number_frac(<<byte, rest::bits>>, original, skip, stack, key_decode, string_decode, len)
when byte in '0123456789' do
number_frac_cont(rest, original, skip, stack, key_decode, string_decode, len + 1)
end
defp number_frac(<<_rest::bits>>, original, skip, _stack, _key_decode, _string_decode, len) do
error(original, skip + len)
end
defp number_frac_cont(<<byte, rest::bits>>, original, skip, stack, key_decode, string_decode, len)
when byte in '0123456789' do
number_frac_cont(rest, original, skip, stack, key_decode, string_decode, len + 1)
end
defp number_frac_cont(<<e, rest::bits>>, original, skip, stack, key_decode, string_decode, len)
when e in 'eE' do
number_exp(rest, original, skip, stack, key_decode, string_decode, len + 1)
end
defp number_frac_cont(<<rest::bits>>, original, skip, stack, key_decode, string_decode, len) do
token = binary_part(original, skip, len)
float = try_parse_float(token, token, skip)
continue(rest, original, skip + len, stack, key_decode, string_decode, float)
end
defp number_exp(<<byte, rest::bits>>, original, skip, stack, key_decode, string_decode, len)
when byte in '0123456789' do
number_exp_cont(rest, original, skip, stack, key_decode, string_decode, len + 1)
end
defp number_exp(<<byte, rest::bits>>, original, skip, stack, key_decode, string_decode, len)
when byte in '+-' do
number_exp_sign(rest, original, skip, stack, key_decode, string_decode, len + 1)
end
defp number_exp(<<_rest::bits>>, original, skip, _stack, _key_decode, _string_decode, len) do
error(original, skip + len)
end
defp number_exp_sign(<<byte, rest::bits>>, original, skip, stack, key_decode, string_decode, len)
when byte in '0123456789' do
number_exp_cont(rest, original, skip, stack, key_decode, string_decode, len + 1)
end
defp number_exp_sign(<<_rest::bits>>, original, skip, _stack, _key_decode, _string_decode, len) do
error(original, skip + len)
end
defp number_exp_cont(<<byte, rest::bits>>, original, skip, stack, key_decode, string_decode, len)
when byte in '0123456789' do
number_exp_cont(rest, original, skip, stack, key_decode, string_decode, len + 1)
end
defp number_exp_cont(<<rest::bits>>, original, skip, stack, key_decode, string_decode, len) do
token = binary_part(original, skip, len)
float = try_parse_float(token, token, skip)
continue(rest, original, skip + len, stack, key_decode, string_decode, float)
end
defp number_exp_copy(<<byte, rest::bits>>, original, skip, stack, key_decode, string_decode, prefix)
when byte in '0123456789' do
number_exp_cont(rest, original, skip, stack, key_decode, string_decode, prefix, 1)
end
defp number_exp_copy(<<byte, rest::bits>>, original, skip, stack, key_decode, string_decode, prefix)
when byte in '+-' do
number_exp_sign(rest, original, skip, stack, key_decode, string_decode, prefix, 1)
end
defp number_exp_copy(<<_rest::bits>>, original, skip, _stack, _key_decode, _string_decode, _prefix) do
error(original, skip)
end
defp number_exp_sign(<<byte, rest::bits>>, original, skip, stack, key_decode, string_decode, prefix, len)
when byte in '0123456789' do
number_exp_cont(rest, original, skip, stack, key_decode, string_decode, prefix, len + 1)
end
defp number_exp_sign(<<_rest::bits>>, original, skip, _stack, _key_decode, _string_decode, _prefix, len) do
error(original, skip + len)
end
defp number_exp_cont(<<byte, rest::bits>>, original, skip, stack, key_decode, string_decode, prefix, len)
when byte in '0123456789' do
number_exp_cont(rest, original, skip, stack, key_decode, string_decode, prefix, len + 1)
end
defp number_exp_cont(<<rest::bits>>, original, skip, stack, key_decode, string_decode, prefix, len) do
suffix = binary_part(original, skip, len)
string = prefix <> ".0e" <> suffix
prefix_size = byte_size(prefix)
initial_skip = skip - prefix_size - 1
final_skip = skip + len
token = binary_part(original, initial_skip, prefix_size + len + 1)
float = try_parse_float(string, token, initial_skip)
continue(rest, original, final_skip, stack, key_decode, string_decode, float)
end
defp number_zero(<<?., rest::bits>>, original, skip, stack, key_decode, string_decode, len) do
number_frac(rest, original, skip, stack, key_decode, string_decode, len + 1)
end
defp number_zero(<<e, rest::bits>>, original, skip, stack, key_decode, string_decode, len) when e in 'eE' do
number_exp_copy(rest, original, skip + len + 1, stack, key_decode, string_decode, "0")
end
defp number_zero(<<rest::bits>>, original, skip, stack, key_decode, string_decode, len) do
continue(rest, original, skip + len, stack, key_decode, string_decode, 0)
end
@compile {:inline, array: 6}
defp array(rest, original, skip, stack, key_decode, string_decode) do
value(rest, original, skip, [@array, [] | stack], key_decode, string_decode)
end
defp empty_array(<<rest::bits>>, original, skip, stack, key_decode, string_decode) do
case stack do
[@array, [] | stack] ->
continue(rest, original, skip, stack, key_decode, string_decode, [])
_ ->
error(original, skip - 1)
end
end
defp array(data, original, skip, stack, key_decode, string_decode, value) do
bytecase data do
_ in '\s\n\t\r', rest ->
array(rest, original, skip + 1, stack, key_decode, string_decode, value)
_ in ']', rest ->
[acc | stack] = stack
value = :lists.reverse(acc, [value])
continue(rest, original, skip + 1, stack, key_decode, string_decode, value)
_ in ',', rest ->
[acc | stack] = stack
value(rest, original, skip + 1, [@array, [value | acc] | stack], key_decode, string_decode)
_, _rest ->
error(original, skip)
<<_::bits>> ->
empty_error(original, skip)
end
end
@compile {:inline, object: 6}
defp object(rest, original, skip, stack, key_decode, string_decode) do
key(rest, original, skip, [[] | stack], key_decode, string_decode)
end
defp object(data, original, skip, stack, key_decode, string_decode, value) do
bytecase data do
_ in '\s\n\t\r', rest ->
object(rest, original, skip + 1, stack, key_decode, string_decode, value)
_ in '}', rest ->
skip = skip + 1
[key, acc | stack] = stack
final = [{key_decode.(key), value} | acc]
continue(rest, original, skip, stack, key_decode, string_decode, :maps.from_list(final))
_ in ',', rest ->
skip = skip + 1
[key, acc | stack] = stack
acc = [{key_decode.(key), value} | acc]
key(rest, original, skip, [acc | stack], key_decode, string_decode)
_, _rest ->
error(original, skip)
<<_::bits>> ->
empty_error(original, skip)
end
end
defp key(data, original, skip, stack, key_decode, string_decode) do
bytecase data do
_ in '\s\n\t\r', rest ->
key(rest, original, skip + 1, stack, key_decode, string_decode)
_ in '}', rest ->
case stack do
[[] | stack] ->
continue(rest, original, skip + 1, stack, key_decode, string_decode, %{})
_ ->
error(original, skip)
end
_ in '"', rest ->
string(rest, original, skip + 1, [@key | stack], key_decode, string_decode, 0)
_, _rest ->
error(original, skip)
<<_::bits>> ->
empty_error(original, skip)
end
end
defp key(data, original, skip, stack, key_decode, string_decode, value) do
bytecase data do
_ in '\s\n\t\r', rest ->
key(rest, original, skip + 1, stack, key_decode, string_decode, value)
_ in ':', rest ->
value(rest, original, skip + 1, [@object, value | stack], key_decode, string_decode)
_, _rest ->
error(original, skip)
<<_::bits>> ->
empty_error(original, skip)
end
end
# TODO: check if this approach would be faster:
# https://git.ninenines.eu/cowlib.git/tree/src/cow_ws.erl#n469
# http://bjoern.hoehrmann.de/utf-8/decoder/dfa/
defp string(data, original, skip, stack, key_decode, string_decode, len) do
bytecase data, 128 do
_ in '"', rest ->
string = string_decode.(binary_part(original, skip, len))
continue(rest, original, skip + len + 1, stack, key_decode, string_decode, string)
_ in '\\', rest ->
part = binary_part(original, skip, len)
escape(rest, original, skip + len, stack, key_decode, string_decode, part)
_ in unquote(0x00..0x1F), _rest ->
error(original, skip + len)
_, rest ->
string(rest, original, skip, stack, key_decode, string_decode, len + 1)
<<char::utf8, rest::bits>> when char <= 0x7FF ->
string(rest, original, skip, stack, key_decode, string_decode, len + 2)
<<char::utf8, rest::bits>> when char <= 0xFFFF ->
string(rest, original, skip, stack, key_decode, string_decode, len + 3)
<<_char::utf8, rest::bits>> ->
string(rest, original, skip, stack, key_decode, string_decode, len + 4)
<<_::bits>> ->
empty_error(original, skip + len)
end
end
defp string(data, original, skip, stack, key_decode, string_decode, acc, len) do
bytecase data, 128 do
_ in '"', rest ->
last = binary_part(original, skip, len)
string = IO.iodata_to_binary([acc | last])
continue(rest, original, skip + len + 1, stack, key_decode, string_decode, string)
_ in '\\', rest ->
part = binary_part(original, skip, len)
escape(rest, original, skip + len, stack, key_decode, string_decode, [acc | part])
_ in unquote(0x00..0x1F), _rest ->
error(original, skip + len)
_, rest ->
string(rest, original, skip, stack, key_decode, string_decode, acc, len + 1)
<<char::utf8, rest::bits>> when char <= 0x7FF ->
string(rest, original, skip, stack, key_decode, string_decode, acc, len + 2)
<<char::utf8, rest::bits>> when char <= 0xFFFF ->
string(rest, original, skip, stack, key_decode, string_decode, acc, len + 3)
<<_char::utf8, rest::bits>> ->
string(rest, original, skip, stack, key_decode, string_decode, acc, len + 4)
<<_::bits>> ->
empty_error(original, skip + len)
end
end
defp escape(data, original, skip, stack, key_decode, string_decode, acc) do
bytecase data do
_ in 'b', rest ->
string(rest, original, skip + 2, stack, key_decode, string_decode, [acc | '\b'], 0)
_ in 't', rest ->
string(rest, original, skip + 2, stack, key_decode, string_decode, [acc | '\t'], 0)
_ in 'n', rest ->
string(rest, original, skip + 2, stack, key_decode, string_decode, [acc | '\n'], 0)
_ in 'f', rest ->
string(rest, original, skip + 2, stack, key_decode, string_decode, [acc | '\f'], 0)
_ in 'r', rest ->
string(rest, original, skip + 2, stack, key_decode, string_decode, [acc | '\r'], 0)
_ in '"', rest ->
string(rest, original, skip + 2, stack, key_decode, string_decode, [acc | '\"'], 0)
_ in '/', rest ->
string(rest, original, skip + 2, stack, key_decode, string_decode, [acc | '/'], 0)
_ in '\\', rest ->
string(rest, original, skip + 2, stack, key_decode, string_decode, [acc | '\\'], 0)
_ in 'u', rest ->
escapeu(rest, original, skip, stack, key_decode, string_decode, acc)
_, _rest ->
error(original, skip + 1)
<<_::bits>> ->
empty_error(original, skip)
end
end
defmodule Unescape do
@moduledoc false
import Bitwise
@digits Enum.concat([?0..?9, ?A..?F, ?a..?f])
def unicode_escapes(chars1 \\ @digits, chars2 \\ @digits) do
for char1 <- chars1, char2 <- chars2 do
{(char1 <<< 8) + char2, integer8(char1, char2)}
end
end
defp integer8(char1, char2) do
(integer4(char1) <<< 4) + integer4(char2)
end
defp integer4(char) when char in ?0..?9, do: char - ?0
defp integer4(char) when char in ?A..?F, do: char - ?A + 10
defp integer4(char) when char in ?a..?f, do: char - ?a + 10
defp token_error_clause(original, skip, len) do
quote do
_ ->
token_error(unquote_splicing([original, skip, len]))
end
end
defmacro escapeu_first(int, last, rest, original, skip, stack, key_decode, string_decode, acc) do
clauses = escapeu_first_clauses(last, rest, original, skip, stack, key_decode, string_decode, acc)
quote location: :keep do
case unquote(int) do
unquote(clauses ++ token_error_clause(original, skip, 6))
end
end
end
defp escapeu_first_clauses(last, rest, original, skip, stack, key_decode, string_decode, acc) do
for {int, first} <- unicode_escapes(),
not (first in 0xDC..0xDF) do
escapeu_first_clause(int, first, last, rest, original, skip, stack, key_decode, string_decode, acc)
end
end
defp escapeu_first_clause(int, first, last, rest, original, skip, stack, key_decode, string_decode, acc)
when first in 0xD8..0xDB do
hi =
quote bind_quoted: [first: first, last: last] do
0x10000 + ((((first &&& 0x03) <<< 8) + last) <<< 10)
end
args = [rest, original, skip, stack, key_decode, string_decode, acc, hi]
[clause] =
quote location: :keep do
unquote(int) -> escape_surrogate(unquote_splicing(args))
end
clause
end
defp escapeu_first_clause(int, first, last, rest, original, skip, stack, key_decode, string_decode, acc)
when first <= 0x00 do
skip = quote do: (unquote(skip) + 6)
acc =
quote bind_quoted: [acc: acc, first: first, last: last] do
if last <= 0x7F do
# 0?????
[acc, last]
else
# 110xxxx?? 10?????
byte1 = ((0b110 <<< 5) + (first <<< 2)) + (last >>> 6)
byte2 = (0b10 <<< 6) + (last &&& 0b111111)
[acc, byte1, byte2]
end
end
args = [rest, original, skip, stack, key_decode, string_decode, acc, 0]
[clause] =
quote location: :keep do
unquote(int) -> string(unquote_splicing(args))
end
clause
end
defp escapeu_first_clause(int, first, last, rest, original, skip, stack, key_decode, string_decode, acc)
when first <= 0x07 do
skip = quote do: (unquote(skip) + 6)
acc =
quote bind_quoted: [acc: acc, first: first, last: last] do
# 110xxx?? 10??????
byte1 = ((0b110 <<< 5) + (first <<< 2)) + (last >>> 6)
byte2 = (0b10 <<< 6) + (last &&& 0b111111)
[acc, byte1, byte2]
end
args = [rest, original, skip, stack, key_decode, string_decode, acc, 0]
[clause] =
quote location: :keep do
unquote(int) -> string(unquote_splicing(args))
end
clause
end
defp escapeu_first_clause(int, first, last, rest, original, skip, stack, key_decode, string_decode, acc)
when first <= 0xFF do
skip = quote do: (unquote(skip) + 6)
acc =
quote bind_quoted: [acc: acc, first: first, last: last] do
# 1110xxxx 10xxxx?? 10??????
byte1 = (0b1110 <<< 4) + (first >>> 4)
byte2 = ((0b10 <<< 6) + ((first &&& 0b1111) <<< 2)) + (last >>> 6)
byte3 = (0b10 <<< 6) + (last &&& 0b111111)
[acc, byte1, byte2, byte3]
end
args = [rest, original, skip, stack, key_decode, string_decode, acc, 0]
[clause] =
quote location: :keep do
unquote(int) -> string(unquote_splicing(args))
end
clause
end
defmacro escapeu_last(int, original, skip) do
clauses = escapeu_last_clauses()
quote location: :keep do
case unquote(int) do
unquote(clauses ++ token_error_clause(original, skip, 6))
end
end
end
defp escapeu_last_clauses() do
for {int, last} <- unicode_escapes() do
[clause] =
quote do
unquote(int) -> unquote(last)
end
clause
end
end
defmacro escapeu_surrogate(int, last, rest, original, skip, stack, key_decode, string_decode, acc,
hi) do
clauses = escapeu_surrogate_clauses(last, rest, original, skip, stack, key_decode, string_decode, acc, hi)
quote location: :keep do
case unquote(int) do
unquote(clauses ++ token_error_clause(original, skip, 12))
end
end
end
defp escapeu_surrogate_clauses(last, rest, original, skip, stack, key_decode, string_decode, acc, hi) do
digits1 = 'Dd'
digits2 = Stream.concat([?C..?F, ?c..?f])
for {int, first} <- unicode_escapes(digits1, digits2) do
escapeu_surrogate_clause(int, first, last, rest, original, skip, stack, key_decode, string_decode, acc, hi)
end
end
defp escapeu_surrogate_clause(int, first, last, rest, original, skip, stack, key_decode, string_decode, acc, hi) do
skip = quote do: unquote(skip) + 12
acc =
quote bind_quoted: [acc: acc, first: first, last: last, hi: hi] do
lo = ((first &&& 0x03) <<< 8) + last
[acc | <<(hi + lo)::utf8>>]
end
args = [rest, original, skip, stack, key_decode, string_decode, acc, 0]
[clause] =
quote do
unquote(int) ->
string(unquote_splicing(args))
end
clause
end
end
defp escapeu(<<int1::16, int2::16, rest::bits>>, original, skip, stack, key_decode, string_decode, acc) do
require Unescape
last = escapeu_last(int2, original, skip)
Unescape.escapeu_first(int1, last, rest, original, skip, stack, key_decode, string_decode, acc)
end
defp escapeu(<<_rest::bits>>, original, skip, _stack, _key_decode, _string_decode, _acc) do
empty_error(original, skip)
end
# @compile {:inline, escapeu_last: 3}
defp escapeu_last(int, original, skip) do
require Unescape
Unescape.escapeu_last(int, original, skip)
end
defp escape_surrogate(<<?\\, ?u, int1::16, int2::16, rest::bits>>, original,
skip, stack, key_decode, string_decode, acc, hi) do
require Unescape
last = escapeu_last(int2, original, skip + 6)
Unescape.escapeu_surrogate(int1, last, rest, original, skip, stack, key_decode, string_decode, acc, hi)
end
defp escape_surrogate(<<_rest::bits>>, original, skip, _stack, _key_decode, _string_decode, _acc, _hi) do
error(original, skip + 6)
end
defp try_parse_float(string, token, skip) do
:erlang.binary_to_float(string)
catch
:error, :badarg ->
token_error(token, skip)
end
defp error(<<_rest::bits>>, _original, skip, _stack, _key_decode, _string_decode) do
throw {:position, skip - 1}
end
defp empty_error(_original, skip) do
throw {:position, skip}
end
@compile {:inline, error: 2, token_error: 2, token_error: 3}
defp error(_original, skip) do
throw {:position, skip}
end
defp token_error(token, position) do
throw {:token, token, position}
end
defp token_error(token, position, len) do
throw {:token, binary_part(token, position, len), position}
end
@compile {:inline, continue: 7}
defp continue(rest, original, skip, stack, key_decode, string_decode, value) do
case stack do
[@terminate | stack] ->
terminate(rest, original, skip, stack, key_decode, string_decode, value)
[@array | stack] ->
array(rest, original, skip, stack, key_decode, string_decode, value)
[@key | stack] ->
key(rest, original, skip, stack, key_decode, string_decode, value)
[@object | stack] ->
object(rest, original, skip, stack, key_decode, string_decode, value)
end
end
defp terminate(<<byte, rest::bits>>, original, skip, stack, key_decode, string_decode, value)
when byte in '\s\n\r\t' do
terminate(rest, original, skip + 1, stack, key_decode, string_decode, value)
end
defp terminate(<<>>, _original, _skip, _stack, _key_decode, _string_decode, value) do
value
end
defp terminate(<<_rest::bits>>, original, skip, _stack, _key_decode, _string_decode, _value) do
error(original, skip)
end
end
|
lib/decoder.ex
| 0.522689 | 0.557785 |
decoder.ex
|
starcoder
|
defmodule Sanbase.Balance do
import __MODULE__.SqlQuery
import Sanbase.Utils.Transform, only: [maybe_unwrap_ok_value: 1]
import Sanbase.Clickhouse.HistoricalBalance.Utils,
only: [maybe_update_first_balance: 2, maybe_fill_gaps_last_seen_balance: 1]
alias Sanbase.ClickhouseRepo
alias Sanbase.Model.Project
@type slug :: String.t()
@type address :: String.t()
@type interval :: String.t()
@type operator :: Sanbase.Metric.SqlQuery.Helper.operator()
@doc ~s"""
Return timeseries OHLC data for balances. For every point in time
return the first, max, min and last balances for an `interval` period
of time starting with that datetime.
"""
@spec historical_balance_ohlc(list(address), slug, DateTime.t(), DateTime.t(), interval) ::
{:ok,
list(%{
datetime: DateTime.t(),
open_balance: number(),
high_balance: number(),
low_balance: number(),
close_balance: number()
})}
| {:error, String.t()}
def historical_balance_ohlc([], _slug, _from, _to, _interval), do: {:ok, []}
def historical_balance_ohlc(address, slug, from, to, interval) do
with {:ok, {decimals, _infr, blockchain}} <- info_by_slug(slug) do
address = transform_address(address, blockchain)
do_historical_balance_ohlc(
address,
slug,
decimals,
blockchain,
from,
to,
interval
)
end
end
@doc ~s"""
Return timeseries data for balances. For every point in time
return the last balance that is associated with that datetime.s
"""
@spec historical_balance(address, slug, DateTime.t(), DateTime.t(), interval) ::
{:ok, list(%{datetime: DateTime.t(), balance: number()})} | {:error, String.t()}
def historical_balance(address, slug, from, to, interval)
when is_binary(address) do
with {:ok, {decimals, _infr, blockchain}} <- info_by_slug(slug) do
address = transform_address(address, blockchain)
do_historical_balance(
address,
slug,
decimals,
blockchain,
from,
to,
interval
)
end
end
@doc ~s"""
Return the balance changes data for every address in the list in the specified
time range.
"""
@spec balance_change(list(), String.t(), DateTime.t(), DateTime.t()) ::
{:ok,
%{
address: String.t(),
balance_start: number(),
balance_end: number(),
balance_change_amount: number(),
balance_change_percent: number()
}}
| {:error, String.t()}
def balance_change([], _slug, _from, _to), do: {:ok, []}
def balance_change(address_or_addresses, slug, from, to) do
with {:ok, {decimals, _infr, blockchain}} <- info_by_slug(slug) do
addresses = List.wrap(address_or_addresses) |> transform_address(blockchain)
do_balance_change(addresses, slug, decimals, blockchain, from, to)
end
end
@doc ~s"""
Return the combined balance changes over time (one for every time bucket). This
does not return the balance changes for every address separately, but sums all
the changes for a given date so it must be used with addresses that belong
to the same entity such as the wallets of a given crypto project. This the
transfers between those wallets can be ignored and only transfers going outside
the set or coming in are counted.
"""
@spec historical_balance_changes(list(address), slug, DateTime.t(), DateTime.t(), interval) ::
{:ok,
list(%{
datetime: DateTime.t(),
balance_change_amount: number(),
balance_change_percent: number()
})}
| {:error, String.t()}
def historical_balance_changes([], _slug, _from, _to, _interval),
do: {:ok, []}
def historical_balance_changes(address_or_addresses, slug, from, to, interval) do
with {:ok, {decimals, _infr, blockchain}} <- info_by_slug(slug) do
addresses = List.wrap(address_or_addresses) |> transform_address(blockchain)
do_historical_balance_changes(
addresses,
slug,
decimals,
blockchain,
from,
to,
interval
)
end
end
@doc ~s"""
Return the last known balance at or before `datetime` for every address
provided as the first argument.
"""
@spec last_balance_before(address | list(address), slug, DateTime.t()) ::
{:ok, %{address => number()}} | {:error, String.t()}
def last_balance_before(address_or_addresses, slug, datetime) do
with {:ok, {decimals, _infr, blockchain}} <- info_by_slug(slug) do
addresses = List.wrap(address_or_addresses) |> transform_address(blockchain)
do_last_balance_before(addresses, slug, decimals, blockchain, datetime)
end
end
@doc ~s"""
Return a list of all the assets that a given address holds. For every
such asset return the slug and current balance. If some project is not
in Santiment's database it is not shown.
"""
@spec assets_held_by_address(address) ::
{:ok, list(%{slug: slug, balance: number()})} | {:error, String.t()}
def assets_held_by_address(address) do
address = transform_address(address, :unknown)
{query, args} = assets_held_by_address_query(address)
ClickhouseRepo.query_transform(query, args, fn [slug, balance] ->
%{
slug: slug,
balance: balance
}
end)
end
@doc ~s"""
Return all addresses that have balance that matches a set of filters.
The operator shows how the comparison must be done (:greater_than, :less_than, etc.any)
and the balance is compared against the `threshold`. The addresses
that match this filter are returned.
Note that filters like `greater_than 0` or `less_than 100000` can return
many addresses. Because of this there is a built-in limit of 10000.
"""
@spec addresses_by_filter(slug, operator, number(), Keyword.t()) ::
{:ok} | {:error, String.t()}
def addresses_by_filter(slug, operator, threshold, opts) do
with {:ok, {decimals, infr, _blockchain}} <- info_by_slug(slug),
{:ok, table} <- realtime_balances_table(slug, infr) do
{query, args} =
addresses_by_filter_query(
slug,
decimals,
operator,
threshold,
table,
opts
)
ClickhouseRepo.query_transform(query, args, fn [address, balance] ->
%{
address: address,
balance: balance
}
end)
end
end
@doc ~s"""
Return the first datetime for which there is a balance record for a
given address/slug pair.
"""
@spec first_datetime(address, slug) :: {:ok, DateTime.t()} | {:error, String.t()}
def first_datetime(address, slug) do
with {:ok, {_decimals, _infr, blockchain}} <- info_by_slug(slug) do
address = transform_address(address, blockchain)
{query, args} = first_datetime_query(address, slug, blockchain)
ClickhouseRepo.query_transform(query, args, fn [unix] ->
DateTime.from_unix!(unix)
end)
|> maybe_unwrap_ok_value()
end
end
@doc ~s"""
Return the current balance for every address provided and a given slu
"""
@spec current_balance(address | list(address), slug) ::
{:ok, [%{address: address, balance: number()}]} | {:error, String.t()}
def current_balance(address_or_addresses, slug) do
with {:ok, {decimals, infr, blockchain}} <- info_by_slug(slug),
{:ok, table} <- realtime_balances_table_or_nil(slug, infr) do
addresses = List.wrap(address_or_addresses) |> transform_address(blockchain)
do_current_balance(addresses, slug, decimals, blockchain, table)
end
end
def current_balance_top_addresses(slug, opts) do
with {:ok, {decimals, infrastructure, blockchain}} <- info_by_slug(slug),
{:ok, table} <- realtime_balances_table(slug, infrastructure) do
{query, args} = top_addresses_query(slug, decimals, blockchain, table, opts)
ClickhouseRepo.query_transform(query, args, fn [address, balance] ->
%{
address: address,
infrastructure: infrastructure,
balance: balance
}
end)
end
end
def realtime_balances_table_or_nil(slug, infr) do
case realtime_balances_table(slug, infr) do
{:ok, table} -> {:ok, table}
_ -> {:ok, nil}
end
end
def realtime_balances_table("ethereum", "ETH"),
do: {:ok, "eth_balances_realtime"}
def realtime_balances_table(_, "ETH"), do: {:ok, "erc20_balances_realtime"}
def realtime_balances_table(slug, _infrastructure),
do: {:error, "The slug #{slug} does not have support for realtime balances"}
def supported_infrastructures(),
do: ["ETH", "BTC", "BCH", "LTC", "BNB", "BEP2", "XRP"]
def blockchain_from_infrastructure("ETH"), do: "ethereum"
def blockchain_from_infrastructure("BTC"), do: "bitcoin"
def blockchain_from_infrastructure("BCH"), do: "bitcoin-cash"
def blockchain_from_infrastructure("LTC"), do: "litecoin"
def blockchain_from_infrastructure("BNB"), do: "binance"
def blockchain_from_infrastructure("BEP2"), do: "binance"
def blockchain_from_infrastructure("XRP"), do: "ripple"
def blockchain_from_infrastructure(_), do: :unsupported_blockchain
# Private functions
defp do_current_balance(addresses, slug, decimals, blockchain, table) do
{query, args} = current_balance_query(addresses, slug, decimals, blockchain, table)
ClickhouseRepo.query_transform(query, args, fn [address, balance] ->
%{
address: address,
balance: balance
}
end)
end
defp do_balance_change(addresses, slug, decimals, blockchain, from, to) do
{query, args} = balance_change_query(addresses, slug, decimals, blockchain, from, to)
ClickhouseRepo.query_transform(query, args, fn
[address, balance_start, balance_end, balance_change] ->
%{
address: address,
balance_start: balance_start,
balance_end: balance_end,
balance_change_amount: balance_change,
balance_change_percent: Sanbase.Math.percent_change(balance_start, balance_end)
}
end)
end
defp do_historical_balance_changes(
addresses,
slug,
decimals,
blockchain,
from,
to,
interval
) do
{query, args} =
historical_balance_changes_query(
addresses,
slug,
decimals,
blockchain,
from,
to,
interval
)
ClickhouseRepo.query_transform(query, args, fn [unix, balance_change] ->
%{
datetime: DateTime.from_unix!(unix),
balance_change_amount: balance_change
}
end)
end
defp do_last_balance_before(
address_or_addresse,
slug,
decimals,
blockchain,
datetime
) do
addresses =
address_or_addresse
|> List.wrap()
|> Enum.map(&transform_address(&1, blockchain))
{query, args} = last_balance_before_query(addresses, slug, decimals, blockchain, datetime)
case ClickhouseRepo.query_transform(query, args, & &1) do
{:ok, list} ->
# If an address does not own the given coin/token, it will be missing from the
# result. Iterate it like this in order to fill the missing values with 0
map = Map.new(list, fn [address, balance] -> {address, balance} end)
result = Enum.into(addresses, %{}, &{&1, Map.get(map, &1, 0)})
{:ok, result}
{:error, error} ->
{:error, error}
end
end
defp do_historical_balance(
address,
slug,
decimals,
blockchain,
from,
to,
interval
) do
{query, args} =
historical_balance_query(
address,
slug,
decimals,
blockchain,
from,
to,
interval
)
ClickhouseRepo.query_transform(query, args, fn [unix, value, has_changed] ->
%{
datetime: DateTime.from_unix!(unix),
balance: value,
has_changed: has_changed
}
end)
|> maybe_update_first_balance(fn ->
case do_last_balance_before(address, slug, decimals, blockchain, from) do
{:ok, %{^address => balance}} -> {:ok, balance}
{:error, error} -> {:error, error}
end
end)
|> maybe_fill_gaps_last_seen_balance()
end
defp do_historical_balance_ohlc(
address,
slug,
decimals,
blockchain,
from,
to,
interval
) do
{query, args} =
historical_balance_ohlc_query(
address,
slug,
decimals,
blockchain,
from,
to,
interval
)
ClickhouseRepo.query_transform(
query,
args,
fn [unix, open, high, low, close, has_changed] ->
%{
datetime: DateTime.from_unix!(unix),
open_balance: open,
high_balance: high,
low_balance: low,
close_balance: close,
has_changed: has_changed
}
end
)
|> maybe_update_first_balance(fn ->
case do_last_balance_before(address, slug, decimals, blockchain, from) do
{:ok, %{^address => balance}} -> {:ok, balance}
{:error, error} -> {:error, error}
end
end)
|> maybe_fill_gaps_last_seen_balance()
end
defp transform_address("0x" <> _rest = address, :unknown),
do: String.downcase(address)
defp transform_address(address, :unknown) when is_binary(address), do: address
defp transform_address(addresses, :unknown) when is_list(addresses),
do: addresses |> List.flatten() |> Enum.map(&transform_address(&1, :unknown))
defp transform_address(address, "ethereum") when is_binary(address),
do: String.downcase(address)
defp transform_address(addresses, "ethereum") when is_list(addresses),
do: addresses |> List.flatten() |> Enum.map(&String.downcase/1)
defp transform_address(address, _) when is_binary(address), do: address
defp transform_address(addresses, _) when is_list(addresses),
do: List.flatten(addresses)
defp info_by_slug(slug) do
case Project.contract_info_infrastructure_by_slug(slug) do
{:ok, _contract, decimals, infr} ->
case blockchain_from_infrastructure(infr) do
:unsupported_blockchain ->
{:error,
"""
Project with slug #{slug} has #{infr} infrastructure which does not \
have support for historical balances.
"""}
blockchain ->
decimals = maybe_override_decimals(blockchain, decimals)
{:ok, {decimals, infr, blockchain}}
end
{:error, {:missing_contract, error}} ->
{:error, error}
end
end
# The values for all other chains except ethereum (ethereum itself and all ERC20 assets)
# are stored already divided by the decimals. In these cases replace decimals with 0
# so the division of 10^0 will do nothing.
defp maybe_override_decimals("ethereum", decimals), do: decimals
defp maybe_override_decimals(_blockchain, _decimal), do: 0
end
|
lib/sanbase/balances/balance.ex
| 0.867766 | 0.412412 |
balance.ex
|
starcoder
|
defmodule ABNF.Interpreter do
@moduledoc """
This modules implements the Grammar.t interpreter. Applying a Grammar.t to the
given input will result in a CaptureResult.t or an exception.
Copyright 2015 <NAME> <<EMAIL>>
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
alias ABNF.Util, as: Util
alias ABNF.Grammar, as: Grammar
alias ABNF.CaptureResult, as: Res
require Logger
@doc """
Parses the given input using the given grammar.
"""
@spec apply(
Grammar.t(),
String.t(),
charlist,
term
) :: ABNF.CaptureResult.t() | no_return
def apply(grammar, rule_str, input, state \\ nil) do
rule_str = Util.rulename(rule_str)
case parse_real(grammar, %{element: :rulename, value: rule_str}, input, state) do
nil ->
nil
{
r_string_text,
r_string_tokens,
r_values,
r_state,
r_rest
} ->
%Res{
string_text: r_string_text,
string_tokens: r_string_tokens,
values: r_values,
state: r_state,
input: input,
rest: r_rest
}
end
end
defp parse_real(
grammar,
e = %{element: :rule, value: a, code: c},
input,
state
) do
case parse_real(grammar, a, input, state) do
nil ->
nil
r = {
r_string_text,
r_string_tokens,
r_values,
r_state,
r_rest
} ->
if is_nil(c) do
r
else
try do
{m, f, _} = c
case :erlang.apply(m, f, [
r_state,
r_string_text,
r_string_tokens,
r_values
]) do
{:ok, state} ->
{
r_string_text,
r_string_tokens,
r_values,
state,
r_rest
}
{:ok, state, val} ->
{
r_string_text,
r_string_tokens,
[val],
state,
r_rest
}
r ->
raise ArgumentError,
"Unexpected result for rule #{inspect(e)} #{inspect(r)}"
end
rescue
ex ->
Logger.error(
"Unexpected result for rule " <>
" when running code #{inspect(e.code)}"
)
stacktrace = __STACKTRACE__
reraise ex, stacktrace
end
end
end
end
defp parse_real(
grammar,
%{element: :prose_val, value: v},
input,
state
) do
parse_real(grammar, %{element: :rulename, value: v}, input, state)
end
defp parse_real(
grammar,
%{element: :alternation, value: alternation},
input,
state
) do
run_concs(grammar, alternation, input, state, nil)
end
defp parse_real(
_grammar,
%{element: :num_range, value: %{from: from, to: to}},
input,
state
) do
case input do
[char | rest] ->
if(char >= from and char <= to) do
result = [char]
{
result,
[result],
[result],
state,
rest
}
else
nil
end
_ ->
nil
end
end
defp parse_real(
_grammar,
%{element: :char_val, value: %{regex: r, length: l}},
input,
state
) do
case :re.run(input, r) do
{:match, _} ->
{s1, rest} = :lists.split(l, input)
{
s1,
[s1],
[s1],
state,
rest
}
_ ->
nil
end
end
defp parse_real(_grammar, %{element: :num_concat, value: list}, input, state) do
case num_concat(list, input) do
nil ->
nil
{match, rest} ->
{
match,
[match],
[match],
state,
rest
}
end
end
defp parse_real(grammar, %{element: :rulename, value: e}, input, state) do
value =
case :maps.find(e, grammar) do
{:ok, value} -> value
_ -> raise ArgumentError, "Rule #{e} not found in #{Map.keys(grammar)}"
end
parse_real(grammar, value, input, state)
end
defp parse_real(grammar, %{element: :group, value: e}, input, state) do
parse_real(grammar, e, input, state)
end
defp parse_real(grammar, %{element: :option, value: e}, input, state) do
case parse_real(grammar, e, input, state) do
nil ->
{
'',
[''],
[],
state,
input
}
r ->
r
end
end
defp num_concat(list, input, acc \\ [])
defp num_concat([], input, acc) do
match = :lists.reverse(acc)
{match, input}
end
defp num_concat([char1 | rest_list], [char2 | rest_input], acc) do
if char1 === char2 do
num_concat(rest_list, rest_input, [char1 | acc])
else
nil
end
end
defp repetition(
grammar,
e = %{element: :repetition, value: %{from: from, to: to, value: v}},
input,
state,
acc = {
acc_string_text,
acc_string_tokens,
acc_values,
_acc_state,
_acc_rest
}
) do
case parse_real(grammar, v, input, state) do
nil ->
if length(acc_values) >= from do
acc
else
nil
end
{
r_string_text,
_r_string_tokens,
r_values,
r_state,
r_rest
} ->
{
_acc_string_text,
_acc_string_tokens,
acc_values,
_acc_state,
_acc_rest
} =
acc = {
[r_string_text | acc_string_text],
[r_string_text | acc_string_tokens],
[r_values | acc_values],
r_state,
r_rest
}
if length(acc_values) === to do
acc
else
# Check for from:0 to: :infinity and empty match
if r_string_text === '' do
acc
else
repetition(grammar, e, r_rest, r_state, acc)
end
end
end
end
defp concatenation(
grammar,
[c = %{value: value = %{from: from}} | cs],
input,
state,
acc,
next_match \\ nil
) do
r =
if is_nil(next_match) do
repetition(grammar, c, input, state, {
[],
[],
[],
state,
input
})
else
next_match
end
if is_nil(r) do
nil
else
# This one matches, but we need to check if the next one also matches
# and try with one less repetition if not (backtracking)
{
_r_string_text,
r_string_tokens,
r_values,
r_state,
r_rest
} = r
case cs do
[next_c | _next_cs] ->
case repetition(grammar, next_c, r_rest, r_state, {
[],
[],
[],
r_state,
r_rest
}) do
nil ->
match_length = length(r_string_tokens)
to = match_length - 1
if to > 0 and to >= from do
c_val = :maps.put(:to, to, value)
c = :maps.put(:value, c_val, c)
[h_string_tokens | t_string_tokens] = r_string_tokens
[_h_values | t_values] = r_values
rest = :lists.append(h_string_tokens, r_rest)
r = {
t_string_tokens,
t_string_tokens,
t_values,
r_state,
rest
}
concatenation(grammar, [c | cs], input, state, acc, r)
else
if from === 0 do
r = {
'',
[],
[],
state,
input
}
acc =
{
_acc_string_text,
_acc_string_tokens,
_acc_values,
acc_state,
acc_rest
} = conc_result(r, acc)
concatenation(grammar, cs, acc_rest, acc_state, acc)
else
nil
end
end
next_r ->
# Next one matches, we're cool. Go on, and pass on the next match
# so it's not parsed again.
acc =
{
_acc_string_text,
_acc_string_tokens,
_acc_values,
acc_state,
acc_rest
} = conc_result(r, acc)
concatenation(grammar, cs, acc_rest, acc_state, acc, next_r)
end
[] ->
acc = conc_result(r, acc)
prep_result(acc)
end
end
end
defp run_concs(_grammar, [], _input, _state, acc) do
case acc do
nil -> nil
{_, r} -> r
end
end
defp run_concs(grammar, [%{value: value} | concs], input, state, acc) do
case concatenation(grammar, value, input, state, {
[],
[],
[],
state,
input
}) do
nil ->
run_concs(grammar, concs, input, state, acc)
r = {
r_string_text,
_r_string_tokens,
_r_values,
_r_state,
_r_rest
} ->
case acc do
nil ->
l = :erlang.iolist_size(r_string_text)
run_concs(grammar, concs, input, state, {l, r})
{last_l, _last_r} ->
l = :erlang.iolist_size(r_string_text)
if last_l >= l do
run_concs(grammar, concs, input, state, acc)
else
run_concs(grammar, concs, input, state, {l, r})
end
end
end
end
defp prep_result({
r_string_text,
r_string_tokens,
r_values,
r_state,
r_rest
}) do
{
:lists.flatten(:lists.reverse(r_string_text)),
:lists.map(&:lists.flatten/1, :lists.reverse(r_string_tokens)),
:lists.reverse(:lists.map(&:lists.reverse/1, r_values)),
r_state,
r_rest
}
end
defp conc_result(
{
r_string_text,
_r_string_tokens,
r_values,
r_state,
r_rest
},
{
acc_string_text,
acc_string_tokens,
acc_values,
_acc_state,
_acc_rest
}
) do
m = :lists.reverse(r_string_text)
{
[m | acc_string_text],
[m | acc_string_tokens],
[r_values | acc_values],
r_state,
r_rest
}
end
end
|
lib/ex_abnf/interpreter.ex
| 0.743168 | 0.567337 |
interpreter.ex
|
starcoder
|
defmodule Bme680 do
@moduledoc """
Provides a high level abstraction to interface with the
BME680 environmental sensor on Linux platforms.
"""
use Bitwise
use GenServer
defmodule State do
@moduledoc false
defstruct port: nil, subscribers: [], async_subscribers: [], measuring: false
end
defmodule Measurement do
@moduledoc false
defstruct temperature: nil, pressure: nil, humidity: nil, gas_resistance: nil
end
@doc """
Starts and links the `Bme680` GenServer.
Options:
- `i2c_device_number` is the number of the i2c device, e.g. 1 for `/dev/i2c-1`
- `i2c_address` i2c address of the sensor. It can be only `0x76` or `0x77`
- `temperature_offset` is an offset, in degrees Celsius, that will be
subtracted to temperature measurements in order to compensate for the internal
heating of the device. It's typically around 4 or 5 degrees, and also
affects relative humidity calculations
"""
@spec start_link(
[
i2c_device_number: integer,
i2c_address: 0x76 | 0x77,
temperature_offset: non_neg_integer
],
[term]
) :: GenServer.on_start()
def start_link(bme_opts \\ [], opts \\ []) do
i2c_device_number = Keyword.get(bme_opts, :i2c_device_number, 1)
i2c_address = Keyword.get(bme_opts, :i2c_address, 0x76)
temperature_offset = Keyword.get(bme_opts, :temperature_offset, 0)
if Enum.member?([0x76, 0x77], i2c_address) do
arg = [i2c_device_number, i2c_address, temperature_offset]
GenServer.start_link(__MODULE__, arg, opts)
else
{:error, "invalid i2c address #{i2c_address}. Valid values are 0x76 and 0x77"}
end
end
@doc """
Perform a measurement on the BME680 sensor and synchronously return it
Measurements are structs like:
```
%Bme680.Measurement{
temperature: 21.74,
pressure: 1090.52,
humidity: 45.32,
gas_resistance: 10235
}
```
"""
@spec measure(GenServer.server()) :: %Measurement{
temperature: float,
pressure: float,
humidity: float,
gas_resistance: integer | nil
}
def measure(pid) do
GenServer.call(pid, :measure)
end
@doc """
Perform a measurement on the BME680 sensor and asynchronously send the result
as a message to the pid in `send_to`
"""
@spec measure_async(GenServer.server(), pid) :: :ok
def measure_async(pid, send_to) do
GenServer.cast(pid, {:measure_async, send_to})
end
@doc """
Gracefully stops the `Bme680` GenServer.
"""
@spec stop(GenServer.server()) :: :ok
def stop(pid) do
GenServer.cast(pid, :stop)
end
# GenServer callbacks
def init([i2c_device_number, i2c_address, temperature_offset]) do
executable_dir =
Application.get_env(:elixir_bme680, :executable_dir, :code.priv_dir(:elixir_bme680))
port =
Port.open({:spawn_executable, executable_dir ++ '/bme680'}, [
{:args, ["#{i2c_device_number}", "#{i2c_address}", "#{temperature_offset}"]},
{:line, 64},
:use_stdio,
:binary,
:exit_status
])
{:ok, %State{port: port, measuring: false, subscribers: []}}
end
def handle_call(
:measure,
from,
state = %State{port: port, subscribers: subscribers, measuring: measuring}
) do
unless measuring, do: Port.command(port, "measure\n")
{:noreply, %State{state | measuring: true, subscribers: [from | subscribers]}}
end
def handle_cast(
{:measure_async, pid},
state = %State{port: port, async_subscribers: subs, measuring: measuring}
) do
unless measuring, do: Port.command(port, "measure\n")
{:noreply, %State{state | measuring: true, async_subscribers: [pid | subs]}}
end
def handle_cast(:stop, state) do
{:stop, :normal, state}
end
def handle_info({p, {:data, {:eol, line}}}, %State{
port: p,
subscribers: subs,
async_subscribers: async_subs
}) do
measurement = decode_measurement(line)
for pid <- subs, do: GenServer.reply(pid, measurement)
for pid <- async_subs, do: send(pid, measurement)
{:noreply, %State{port: p, measuring: false, subscribers: [], async_subscribers: []}}
end
def handle_info({port, {:exit_status, exit_status}}, state = %State{port: port}) do
{:stop, exit_status, state}
end
# Private helper functions
defp decode_measurement(line) do
case line |> String.trim() |> String.split(",", trim: true) do
["T:" <> t, "P:" <> p, "H:" <> h, "G:" <> g] ->
%Measurement{
temperature: String.to_float(t),
pressure: String.to_float(p),
humidity: String.to_float(h),
gas_resistance: String.to_integer(g)
}
["T:" <> t, "P:" <> p, "H:" <> h] ->
%Measurement{
temperature: String.to_float(t),
pressure: String.to_float(p),
humidity: String.to_float(h)
}
_ ->
{:error, "Measurement failed"}
end
end
end
|
lib/elixir_bme680.ex
| 0.866683 | 0.852629 |
elixir_bme680.ex
|
starcoder
|
defmodule ArangoXEcto.Schema do
@moduledoc """
This module is a helper to automatically specify the primary key.
The primary key is the Arango `_key` field but the _id field is also provided.
Schema modules should use this module by add `use ArangoXEcto.Schema` to the module. The only
exception to this is if the collection is an edge collection, in that case refer to ArangoXEcto.Edge.
## Example
defmodule MyProject.Accounts.User do
use ArangoXEcto.Schema
import Ecto.Changeset
schema "users" do
field :first_name, :string
field :last_name, :string
timestamps()
end
@doc false
def changeset(app, attrs) do
app
|> cast(attrs, [:first_name, :last_name])
|> validate_required([:first_name, :last_name])
end
end
"""
defmacro __using__(_) do
quote do
use Ecto.Schema
import unquote(__MODULE__)
@primary_key {:id, :binary_id, autogenerate: true, source: :_key}
@foreign_key_type :binary_id
end
end
@doc """
Defines an outgoing relationship of many objects
Behind the scenes this defines a many to many relationship so that Ecto can load the relationship using the built
in functions.
The use of this function **MUST** be accompanied by a `incoming/3` definition in the other target node.
This will also define the `__id__` field if it is not already defined so that ecto can map the relationship.
## Example
defmodule MyProject.User do
use ArangoXEcto.Schema
schema "users" do
field :name, :string
# Will use the automatically generated edge
outgoing :posts, MyProject.Post
# Will use the UserPosts edge
outgoing :posts, MyProject.Post, edge: MyProject.UserPosts
end
end
"""
defmacro outgoing(name, target, opts \\ []) do
quote do
opts = unquote(opts)
try do
field(:__id__, :binary_id, source: :_id, read_after_writes: true)
rescue
ArgumentError -> :ok
end
many_to_many(unquote(name), unquote(target),
join_through:
Keyword.get(opts, :edge, ArangoXEcto.edge_module(__MODULE__, unquote(target))),
join_keys: [_from: :__id__, _to: :__id__],
on_replace: :delete
)
end
end
@doc """
Defines an outgoing relationship of one object
Unlike `outgoing/3`, this does not create a graph relation and instead places the `_id` in a field in the incoming
schema. This **MUST** be accompanied by a `one_incoming/3` definition in the other target schema.
Behind the scenes this injects the `__id__` field to store the `_id` value and uses the built-in Ecto `has_one/3`
function.
Options passed to the `opts` attribute are passed to the `has_many/3` definition. Refrain from overriding the
`:references` and `:foreign_key` attributes unless you know what you are doing.
## Example
defmodule MyProject.User do
use ArangoXEcto.Schema
schema "users" do
field :name, :string
one_outgoing :best_post, MyProject.Post
end
end
"""
defmacro one_outgoing(name, target, opts \\ []) do
quote do
opts = unquote(opts)
try do
field(:__id__, :binary_id, source: :_id, read_after_writes: true)
rescue
ArgumentError -> :ok
end
has_one(unquote(name), unquote(target),
references: :__id__,
foreign_key: Ecto.Association.association_key(__MODULE__, "id"),
on_replace: :delete
)
end
end
@doc """
Defines an incoming relationship
Behind the scenes this defines a many to many relationship so that Ecto can load the relationship using the built
in functions.
The use of this function **MUST** be accompanied by a `outgoing/3` definition in the other target node.
This will also define the `__id__` field if it is not already defined so that ecto can map the relationship.
## Example
defmodule MyProject.Post do
use ArangoXEcto.Schema
schema "posts" do
field :title, :string
# Will use the automatically generated edge
incoming :users, MyProject.User
# Will use the UserPosts edge
incoming :users, MyProject.User, edge: MyProject.UserPosts
end
end
"""
defmacro incoming(name, source, opts \\ []) do
quote do
opts = unquote(opts)
try do
field(:__id__, :binary_id, source: :_id, read_after_writes: true)
rescue
ArgumentError -> :ok
end
many_to_many(unquote(name), unquote(source),
join_through:
Keyword.get(
opts,
:edge,
ArangoXEcto.edge_module(__MODULE__, unquote(source), create: false)
),
join_keys: [_to: :__id__, _from: :__id__],
on_replace: :delete
)
end
end
@doc """
Defines an incoming relationship of one object
Unlike `incoming/3`, this does not create a graph relation and instead places the `_id` in a field. If the value
passed to the name attribute is `:user` then the foreign key created on this schema will be `:user_id` and will
store the full `_id` of that user. By storing the full `_id`, you are still able to perform full AQL queries.
This **MUST** be accompanied by a `one_outgoing/3` definition in the other target schema.
Behind the scenes this injects the `__id__` field to store the `_id` value and uses the built-in Ecto `belongs_to/3`
function.
Options passed to the `opts` attribute are passed to the `belongs_to/3` definition. Refrain from overriding the
`:references` and `:foreign_key` attributes unless you know what you are doing.
## Example
defmodule MyProject.Post do
use ArangoXEcto.Schema
schema "posts" do
field :title, :string
one_incoming :user, MyProject.User
end
end
"""
defmacro one_incoming(name, source, opts \\ []) do
quote do
opts = unquote(opts)
try do
field(:__id__, :binary_id, source: :_id, read_after_writes: true)
rescue
ArgumentError -> :ok
end
belongs_to(unquote(name), unquote(source),
references: :__id__,
foreign_key: unquote(name) |> build_foreign_key(),
on_replace: :delete
)
end
end
@spec build_foreign_key(atom()) :: atom()
def build_foreign_key(name) do
name
|> Atom.to_string()
|> Kernel.<>("_id")
|> String.to_atom()
end
end
|
lib/arangox_ecto/schema.ex
| 0.777933 | 0.428323 |
schema.ex
|
starcoder
|
defprotocol ExAws.Dynamo.Encodable do
@type t :: any
@doc "Converts an elixir value into a map tagging the value with its dynamodb type"
def encode(value, options)
end
defimpl ExAws.Dynamo.Encodable, for: Atom do
def encode(true, _), do: %{"BOOL" => "true"}
def encode(false, _), do: %{"BOOL" => "false"}
def encode(nil, _), do: %{"NULL" => "true"}
def encode(value, _), do: %{"S" => value |> Atom.to_string()}
end
defimpl ExAws.Dynamo.Encodable, for: Integer do
def encode(val, _) do
%{"N" => val |> Integer.to_string()}
end
end
defimpl ExAws.Dynamo.Encodable, for: Float do
def encode(val, _) do
%{"N" => String.Chars.Float.to_string(val)}
end
end
defimpl ExAws.Dynamo.Encodable, for: HashDict do
def encode(hashdict, _) do
%{"M" => ExAws.Dynamo.Encodable.Map.do_encode(hashdict)}
end
end
defimpl ExAws.Dynamo.Encodable, for: Any do
defmacro __deriving__(module, struct, options) do
deriving(module, struct, options)
end
def deriving(module, _struct, options) do
extractor =
if only = options[:only] do
quote(do: Map.take(struct, unquote(only)))
else
quote(do: :maps.remove(:__struct__, struct))
end
quote do
defimpl ExAws.Dynamo.Encodable, for: unquote(module) do
def encode(struct, options) do
ExAws.Dynamo.Encodable.Map.encode(unquote(extractor), options)
end
end
end
end
def encode(_, _), do: raise("ExAws.Dynamo.Encodable does not fallback to any")
end
defimpl ExAws.Dynamo.Encodable, for: Map do
defmacro __deriving__(module, struct, options) do
ExAws.Dynamo.Encodable.Any.deriving(module, struct, options)
end
def encode(map, options) do
%{"M" => do_encode(map, options)}
end
def do_encode(map, only: only) do
map
|> Map.take(only)
|> do_encode
end
def do_encode(map, except: except) do
:maps.without(except, map)
|> do_encode
end
def do_encode(map, _), do: do_encode(map)
def do_encode(map) do
Enum.reduce(map, %{}, fn
{_, ""}, map ->
map
{k, v}, map when is_binary(k) ->
Map.put(map, k, ExAws.Dynamo.Encodable.encode(v, []))
{k, v}, map ->
key = String.Chars.to_string(k)
Map.put(map, key, ExAws.Dynamo.Encodable.encode(v, []))
end)
end
end
defimpl ExAws.Dynamo.Encodable, for: BitString do
def encode(val, _) do
%{"S" => val}
end
end
defimpl ExAws.Dynamo.Encodable, for: List do
alias ExAws.Dynamo.Encodable
def encode([], _), do: %{"L" => []}
@doc """
Dynamodb offers typed sets and L, a generic list of typed attributes.
"""
def encode(list, _) do
typed_values =
for value <- list do
Encodable.encode(value, [])
end
%{"L" => typed_values}
end
end
defimpl ExAws.Dynamo.Encodable, for: MapSet do
def encode(mapset, _) do
cond do
MapSet.size(mapset) == 0 ->
raise "Cannot determine a proper data type for an empty MapSet"
Enum.all?(mapset, &is_number/1) ->
%{"NS" => number_set_to_list(mapset)}
Enum.all?(mapset, &is_binary/1) ->
%{"SS" => MapSet.to_list(mapset)}
true ->
raise "All elements in a MapSet must be only numbers or only strings"
end
end
defp number_set_to_list(number_mapset) do
number_mapset
|> MapSet.to_list()
|> Enum.map(fn
n when is_integer(n) -> Integer.to_string(n)
n when is_float(n) -> Float.to_string(n)
end)
end
end
|
lib/ex_aws/dynamo/encodable.ex
| 0.672762 | 0.436082 |
encodable.ex
|
starcoder
|
defmodule Faker.Date do
import Faker.Util, only: [pick: 1]
@moduledoc """
Functions for generating dates
"""
@doc """
Returns a random date of birth for a person with an age specified by a number or range
"""
@spec date_of_birth(integer | Range.t()) :: Date.t()
def date_of_birth(age_or_range \\ 18..99)
def date_of_birth(age) when is_integer(age) do
{{year_now, month_now, day_now}, _time} = :calendar.local_time()
earliest_year = year_now - (age + 1)
potential_earliest_date = {earliest_year, month_now, day_now + 1}
potential_latest_date = {earliest_year + 1, month_now, day_now}
earliest_date =
if :calendar.valid_date(potential_earliest_date),
do: {earliest_year, month_now, day_now + 1},
else: {earliest_year, 3, 1}
latest_date =
if :calendar.valid_date(potential_latest_date),
do: {earliest_year + 1, month_now, day_now},
else: {earliest_year + 1, 2, 28}
earliest_as_seconds = :calendar.datetime_to_gregorian_seconds({earliest_date, {0, 0, 0}})
lastest_as_seconds = :calendar.datetime_to_gregorian_seconds({latest_date, {23, 59, 59}})
{chosen_date, _time} =
earliest_as_seconds..lastest_as_seconds
|> pick()
|> :calendar.gregorian_seconds_to_datetime()
{:ok, result} = Date.from_erl(chosen_date)
result
end
def date_of_birth(age_range) do
age_range
|> pick()
|> date_of_birth()
end
@doc """
Returns a random date in the past up to N days, today not included
"""
@spec backward(integer) :: Date.t()
def backward(days) do
forward(-days)
end
@doc """
Returns a random date in the future up to N days, today not included
"""
@spec forward(integer) :: Date.t()
def forward(days) do
days
|> Faker.DateTime.forward()
|> DateTime.to_date()
end
@doc """
Returns a random date between two dates
## Examples
iex> Faker.Date.between(~D[2010-12-10], ~D[2016-12-25])
~D[2013-06-07]
iex> Faker.Date.between(~D[2000-12-20], ~D[2000-12-25])
~D[2000-12-20]
iex> Faker.Date.between(~D[2000-02-02], ~D[2016-02-05])
~D[2014-10-23]
iex> Faker.Date.between(~D[2010-12-20], ~D[2010-12-25])
~D[2010-12-21]
"""
@spec between(Date.t(), Date.t()) :: Date.t()
def between(from, to) do
from
|> Faker.DateTime.between(to)
|> DateTime.to_date()
end
end
|
lib/faker/date.ex
| 0.866345 | 0.528959 |
date.ex
|
starcoder
|
defmodule Gringotts.Response do
@moduledoc """
Defines the Response `struct` and some utilities.
All `Gringotts` public API calls will return a `Response.t` wrapped in an
`:ok` or `:error` `tuple`. It is guaranteed that an `:ok` will be returned
only when the request succeeds at the gateway, ie, no error occurs.
"""
defstruct [
:success, :id, :token, :status_code, :gateway_code, :reason, :message,
:avs_result, :cvc_result, :raw, :fraud_review
]
@typedoc """
The standard Response from `Gringotts`.
| Field | Type | Description |
|----------------|-------------------|---------------------------------------|
| `success` | `boolean` | Indicates the status of the\
transaction. |
| `id` | `String.t` | Gateway supplied identifier of the\
transaction. |
| `token` | `String.t` | Gateway supplied `token`. _This is\
different from `Response.id`_. |
| `status_code` | `non_neg_integer` | `HTTP` response code. |
| `gateway_code` | `String.t` | Gateway's response code "as-is". |
| `message` | `String.t` | String describing the response status.|
| `avs_result` | `map` | Address Verification Result.\
Schema: `%{street: String.t,\
zip_code: String.t}` |
| `cvc_result` | `String.t` | Result of the [CVC][cvc] validation. |
| `reason` | `String.t` | Explain the `reason` of error, in\
case of error. `nil` otherwise. |
| `raw` | `String.t` | Raw response from the gateway. |
| `fraud_review` | `term` | Gateway's risk assessment of the\
transaction. |
## Notes
1. It is not guaranteed that all fields will be populated for all calls, and
some gateways might insert non-standard fields. Please refer the Gateways'
docs for that information.
2. `success` is deprecated in `v1.1.0` and will be removed in `v1.2.0`.
3. For some actions the Gateway returns an additional token, say as reponse to
a customer tokenization/registration. In such cases the `id` is not
useable because it refers to the transaction, the `token` is.
> On the other hand for authorizations or captures, there's no `token`.
4. The schema of `fraud_review` is Gateway specific.
[cvc]: https://en.wikipedia.org/wiki/Card_security_code
"""
@type t:: %__MODULE__{
success: boolean,
id: String.t,
token: String.t,
status_code: non_neg_integer,
gateway_code: String.t,
reason: String.t,
message: String.t,
avs_result: %{street: String.t, zip_code: String.t},
cvc_result: String.t,
raw: String.t,
fraud_review: term
}
def success(opts \\ []) do
new(true, opts)
end
def error(opts \\ []) do
new(false, opts)
end
defp new(success, opts) do
Map.merge(%__MODULE__{success: success}, Enum.into(opts, %{}))
end
end
|
lib/gringotts/response.ex
| 0.883044 | 0.778565 |
response.ex
|
starcoder
|
defmodule Absinthe do
@moduledoc """
Documentation for the Absinthe package, a toolkit for building GraphQL
APIs with Elixir.
For usage information, see [the documentation](http://hexdocs.pm/absinthe), which
includes guides, API information for important modules, and links to useful resources.
"""
defmodule ExecutionError do
@moduledoc """
An error during execution.
"""
defexception message: "execution failed"
end
defmodule AnalysisError do
@moduledoc """
An error during analysis.
"""
defexception message: "analysis failed"
end
@type result_selection_t :: %{
String.t() =>
nil
| integer
| float
| boolean
| binary
| atom
| result_selection_t
| [result_selection_t]
}
@type result_error_t ::
%{message: String.t()}
| %{message: String.t(), locations: [%{line: pos_integer, column: integer}]}
@type result_t ::
%{data: nil | result_selection_t}
| %{data: nil | result_selection_t, errors: [result_error_t]}
| %{errors: [result_error_t]}
@doc """
Evaluates a query document against a schema, with options.
## Options
* `:adapter` - The name of the adapter to use. See the `Absinthe.Adapter`
behaviour and the `Absinthe.Adapter.Passthrough` and
`Absinthe.Adapter.LanguageConventions` modules that implement it.
(`Absinthe.Adapter.LanguageConventions` is the default value for this option.)
* `:operation_name` - If more than one operation is present in the provided
query document, this must be provided to select which operation to execute.
* `:variables` - A map of provided variable values to be used when filling in
arguments in the provided query document.
* `:context` -> A map of the execution context.
* `:root_value` -> A root value to use as the source for toplevel fields.
* `:analyze_complexity` -> Whether to analyze the complexity before
executing an operation.
* `:max_complexity` -> An integer (or `:infinity`) for the maximum allowed
complexity for the operation being executed.
## Examples
```
\"""
query GetItemById($id: ID) {
item(id: $id) {
name
}
}
\"""
|> Absinthe.run(App.Schema, variables: %{"id" => params[:item_id]})
```
See the `Absinthe` module documentation for more examples.
"""
@type run_opts :: [
context: %{},
adapter: Absinthe.Adapter.t(),
root_value: term,
operation_name: String.t(),
analyze_complexity: boolean,
variables: %{optional(String.t()) => any()},
max_complexity: non_neg_integer | :infinity
]
@type run_result :: {:ok, result_t} | {:error, String.t()}
@spec run(
binary | Absinthe.Language.Source.t() | Absinthe.Language.Document.t(),
Absinthe.Schema.t(),
run_opts
) :: run_result
def run(document, schema, options \\ []) do
pipeline =
schema
|> Absinthe.Pipeline.for_document(options)
case Absinthe.Pipeline.run(document, pipeline) do
{:ok, %{result: result}, _phases} ->
{:ok, result}
{:error, msg, _phases} ->
{:error, msg}
end
end
@doc """
Evaluates a query document against a schema, without options.
## Options
See `run/3` for the available options.
"""
@spec run!(
binary | Absinthe.Language.Source.t() | Absinthe.Language.Document.t(),
Absinthe.Schema.t(),
Keyword.t()
) :: result_t | no_return
def run!(input, schema, options \\ []) do
case run(input, schema, options) do
{:ok, result} -> result
{:error, err} -> raise ExecutionError, message: err
end
end
end
|
lib/absinthe.ex
| 0.881341 | 0.866246 |
absinthe.ex
|
starcoder
|
defmodule Conversion.Distance do
@moduledoc """
Converts from one distance to another, e.g. feet to inches
"""
@typedoc """
Distance measurement, e.g. :feet, inches
"""
@type measurement ::
:miles
| :yards
| :feet
| :inches
| :kilometers
| :meters
| :centimeters
| :millimeters
| :micrometers
| :nanometers
@spec convert(measurement_value :: float(), from :: measurement(), to :: measurement()) ::
float()
def convert(measurement_value, :miles, :miles), do: measurement_value
def convert(measurement_value, :miles, :yards), do: measurement_value * 1_760
def convert(measurement_value, :miles, :feet), do: measurement_value * 5_280
def convert(measurement_value, :miles, :inches), do: measurement_value * 63_360
def convert(measurement_value, :miles, :kilometers), do: measurement_value * 1.609344
def convert(measurement_value, :miles, :meters), do: measurement_value * 1_609.344
def convert(measurement_value, :miles, :centimeters), do: measurement_value * 160_934.4
def convert(measurement_value, :miles, :milllimeters), do: measurement_value * 1_609_344
def convert(measurement_value, :miles, :micrometers), do: measurement_value * 1_609_344_000
def convert(measurement_value, :miles, :nanometers), do: measurement_value * 1_609_344_000_000
def convert(measurement_value, :yards, :yards), do: measurement_value
def convert(measurement_value, :yards, :miles), do: measurement_value / 1_760
def convert(measurement_value, :yards, :feet), do: measurement_value * 3
def convert(measurement_value, :yards, :inches), do: measurement_value * 36
def convert(measurement_value, :yards, :kilometers), do: measurement_value * 0.000914
def convert(measurement_value, :yards, :meters), do: measurement_value * 0.9144
def convert(measurement_value, :yards, :centimeters), do: measurement_value * 91.44
def convert(measurement_value, :yards, :milllimeters), do: measurement_value * 914.4
def convert(measurement_value, :yards, :micrometers), do: measurement_value * 914_400
def convert(measurement_value, :yards, :nanometers), do: measurement_value * 914_400_000
def convert(measurement_value, :feet, :feet), do: measurement_value
def convert(measurement_value, :feet, :miles), do: measurement_value / 5_280
def convert(measurement_value, :feet, :yards), do: measurement_value / 3
def convert(measurement_value, :feet, :inches), do: measurement_value * 12
def convert(measurement_value, :feet, :kilometers), do: measurement_value * 0.000305
def convert(measurement_value, :feet, :meters), do: measurement_value * 0.3048
def convert(measurement_value, :feet, :centimeters), do: measurement_value * 30.48
def convert(measurement_value, :feet, :milllimeters), do: measurement_value * 304.8
def convert(measurement_value, :feet, :micrometers), do: measurement_value * 304_800
def convert(measurement_value, :feet, :nanometers), do: measurement_value * 304_800_000
def convert(measurement_value, :inches, :inches), do: measurement_value
def convert(measurement_value, :inches, :miles), do: measurement_value / 63_360
def convert(measurement_value, :inches, :yards), do: measurement_value / 36
def convert(measurement_value, :inches, :feet), do: measurement_value / 12
def convert(measurement_value, :inches, :kilometers), do: measurement_value * 2.54e-5
def convert(measurement_value, :inches, :meters), do: measurement_value * 0.0254
def convert(measurement_value, :inches, :centimeters), do: measurement_value * 2.54
def convert(measurement_value, :inches, :milllimeters), do: measurement_value * 25.4
def convert(measurement_value, :inches, :micrometers), do: measurement_value * 25_400
def convert(measurement_value, :inches, :nanometers), do: measurement_value * 25_400_000
def convert(measurement_value, :kilometers, :kilometers), do: measurement_value
def convert(measurement_value, :kilometers, :miles), do: measurement_value * 0.621371
def convert(measurement_value, :kilometers, :yards), do: measurement_value * 1_093.613298
def convert(measurement_value, :kilometers, :feet), do: measurement_value * 3_280.839895
def convert(measurement_value, :kilometers, :inches), do: measurement_value * 39_370.07874
def convert(measurement_value, :kilometers, :meters), do: measurement_value * 1_000
def convert(measurement_value, :kilometers, :centimeters), do: measurement_value * 100_000
def convert(measurement_value, :kilometers, :milllimeters), do: measurement_value * 1_000_000
def convert(measurement_value, :kilometers, :micrometers), do: measurement_value * 1_000_000_000
def convert(measurement_value, :kilometers, :nanometers),
do: measurement_value * 1_000_000_000_000
def convert(measurement_value, :meters, :meters), do: measurement_value
def convert(measurement_value, :meters, :miles), do: measurement_value / 0.000621
def convert(measurement_value, :meters, :yards), do: measurement_value * 1.093613
def convert(measurement_value, :meters, :feet), do: measurement_value * 3.28084
def convert(measurement_value, :meters, :inches), do: measurement_value * 39.370079
def convert(measurement_value, :meters, :kilometers), do: measurement_value / 1000
def convert(measurement_value, :meters, :centimeters), do: measurement_value * 100
def convert(measurement_value, :meters, :milllimeters), do: measurement_value * 1000
def convert(measurement_value, :meters, :micrometers), do: measurement_value * 1_000_000
def convert(measurement_value, :meters, :nanometers), do: measurement_value * 1_000_000_000
def convert(measurement_value, :centimeters, :centimeters), do: measurement_value
def convert(measurement_value, :centimeters, :miles), do: measurement_value * 6.2137e-6
def convert(measurement_value, :centimeters, :yards), do: measurement_value * 0.010936
def convert(measurement_value, :centimeters, :feet), do: measurement_value / 30.48
def convert(measurement_value, :centimeters, :inches), do: measurement_value / 2.54
def convert(measurement_value, :centimeters, :kilometers), do: measurement_value / 100_000
def convert(measurement_value, :centimeters, :meters), do: measurement_value / 100
def convert(measurement_value, :centimeters, :milllimeters), do: measurement_value * 10
def convert(measurement_value, :centimeters, :micrometers), do: measurement_value * 10_000
def convert(measurement_value, :centimeters, :nanometers), do: measurement_value * 10_000_000
def convert(measurement_value, :millimeters, :milllimeters), do: measurement_value
def convert(measurement_value, :millimeters, :miles), do: measurement_value / 1_609_344
def convert(measurement_value, :millimeters, :yards), do: measurement_value * 0.001094
def convert(measurement_value, :millimeters, :feet), do: measurement_value * 0.003281
def convert(measurement_value, :millimeters, :inches), do: measurement_value / 25.4
def convert(measurement_value, :millimeters, :kilometers), do: measurement_value / 1_000_000
def convert(measurement_value, :millimeters, :meters), do: measurement_value / 1_000
def convert(measurement_value, :millimeters, :centimeters), do: measurement_value / 10
def convert(measurement_value, :millimeters, :micrometers), do: measurement_value * 1_000
def convert(measurement_value, :millimeters, :nanometers), do: measurement_value * 1_000_000
def convert(measurement_value, :micrometers, :micrometers), do: measurement_value
def convert(measurement_value, :micrometers, :miles), do: measurement_value / 1_609_344_000
def convert(measurement_value, :micrometers, :yards), do: measurement_value / 914_400
def convert(measurement_value, :micrometers, :feet), do: measurement_value / 304_800
def convert(measurement_value, :micrometers, :inches), do: measurement_value / 25_400
def convert(measurement_value, :micrometers, :kilometers), do: measurement_value / 1_000_000_000
def convert(measurement_value, :micrometers, :meters), do: measurement_value / 1_000_000
def convert(measurement_value, :micrometers, :centimeters), do: measurement_value / 10_000
def convert(measurement_value, :micrometers, :milllimeters), do: measurement_value / 1_000
def convert(measurement_value, :micrometers, :nanometers), do: measurement_value * 1_000
def convert(measurement_value, :nanometers, :nanometers), do: measurement_value
def convert(measurement_value, :nanometers, :miles), do: measurement_value / 1_609_344_000_000
def convert(measurement_value, :nanometers, :yards), do: measurement_value / 914_400_000
def convert(measurement_value, :nanometers, :feet), do: measurement_value / 304_800_000
def convert(measurement_value, :nanometers, :inches), do: measurement_value / 25_400_000
def convert(measurement_value, :nanometers, :kilometers),
do: measurement_value / 1_000_000_000_000
def convert(measurement_value, :nanometers, :meters), do: measurement_value / 1_000_000_000
def convert(measurement_value, :nanometers, :centimeters), do: measurement_value / 10_000_000
def convert(measurement_value, :nanometers, :milllimeters), do: measurement_value / 1_000_000
def convert(measurement_value, :nanometers, :micrometers), do: measurement_value / 1_000
end
|
lib/conversion/distance.ex
| 0.873835 | 0.928603 |
distance.ex
|
starcoder
|
defmodule Calendar.TimeZoneDatabase do
@moduledoc """
This module defines a behaviour for providing time zone data.
IANA provides time zone data that includes data about different
UTC offsets and standard offsets for time zones.
"""
@typedoc """
A period where a certain combination of UTC offset, standard offset and zone
abbreviation is in effect.
For instance one period could be the summer of 2018 in "Europe/London" where summer time /
daylight saving time is in effect and lasts from spring to autumn. At autumn the `std_offset`
changes along with the `zone_abbr` so a different period is needed during winter.
"""
@type time_zone_period :: %{
optional(any) => any,
utc_offset: Calendar.utc_offset(),
std_offset: Calendar.std_offset(),
zone_abbr: Calendar.zone_abbr()
}
@typedoc """
Limit for when a certain time zone period begins or ends.
A beginning is inclusive. An ending is exclusive. Eg. if a period is from
2015-03-29 01:00:00 and until 2015-10-25 01:00:00, the period includes and
begins from the begining of 2015-03-29 01:00:00 and lasts until just before
2015-10-25 01:00:00.
A beginning or end for certain periods are infinite. For instance the latest
period for time zones without DST or plans to change. However for the purpose
of this behaviour they are only used for gaps in wall time where the needed
period limits are at a certain time.
"""
@type time_zone_period_limit :: Calendar.naive_datetime()
@doc """
Time zone period for a point in time in UTC for a specific time zone.
Takes a time zone name and a point in time for UTC and returns a
`time_zone_period` for that point in time.
"""
@doc since: "1.8.0"
@callback time_zone_period_from_utc_iso_days(Calendar.iso_days(), Calendar.time_zone()) ::
{:ok, time_zone_period}
| {:error, :time_zone_not_found | :utc_only_time_zone_database}
@doc """
Possible time zone periods for a certain time zone and wall clock date and time.
When the provided `datetime` is ambiguous a tuple with `:ambiguous` and two possible
periods. The periods in the list are sorted with the first element being the one that begins first.
When the provided `datetime` is in a gap - for instance during the "spring forward" when going
from winter time to summer time, a tuple with `:gap` and two periods with limits are returned
in a nested tuple. The first nested two-tuple is the period before the gap and a naive datetime
with a limit for when the period ends (wall time). The second nested two-tuple is the period
just after the gap and a datetime (wall time) for when the period begins just after the gap.
If there is only a single possible period for the provided `datetime`, the a tuple with `:single`
and the `time_zone_period` is returned.
"""
@doc since: "1.8.0"
@callback time_zone_periods_from_wall_datetime(Calendar.naive_datetime(), Calendar.time_zone()) ::
{:ok, time_zone_period}
| {:ambiguous, time_zone_period, time_zone_period}
| {:gap, {time_zone_period, time_zone_period_limit},
{time_zone_period, time_zone_period_limit}}
| {:error, :time_zone_not_found | :utc_only_time_zone_database}
end
defmodule Calendar.UTCOnlyTimeZoneDatabase do
@moduledoc """
Built-in time zone database that works only in Etc/UTC.
For all other time zones, it returns `{:error, :utc_only_time_zone_database}`.
"""
@behaviour Calendar.TimeZoneDatabase
@impl true
def time_zone_period_from_utc_iso_days(_, "Etc/UTC"),
do: {:ok, %{std_offset: 0, utc_offset: 0, zone_abbr: "UTC"}}
def time_zone_period_from_utc_iso_days(_, _),
do: {:error, :utc_only_time_zone_database}
@impl true
def time_zone_periods_from_wall_datetime(_, "Etc/UTC"),
do: {:ok, %{std_offset: 0, utc_offset: 0, zone_abbr: "UTC"}}
def time_zone_periods_from_wall_datetime(_, _),
do: {:error, :utc_only_time_zone_database}
end
|
lib/elixir/lib/calendar/time_zone_database.ex
| 0.926404 | 0.84124 |
time_zone_database.ex
|
starcoder
|
defmodule AWS.Codeartifact do
@moduledoc """
AWS CodeArtifact is a fully managed artifact repository compatible with
language-native package managers and build tools such as npm, Apache Maven,
and pip. You can use CodeArtifact to share packages with development teams
and pull packages. Packages can be pulled from both public and CodeArtifact
repositories. You can also create an upstream relationship between a
CodeArtifact repository and another repository, which effectively merges
their contents from the point of view of a package manager client.
**AWS CodeArtifact Components**
Use the information in this guide to help you work with the following
CodeArtifact components:
<ul> <li> **Repository**: A CodeArtifact repository contains a set of
[package
versions](https://docs.aws.amazon.com/codeartifact/latest/ug/welcome.html#welcome-concepts-package-version),
each of which maps to a set of assets, or files. Repositories are polyglot,
so a single repository can contain packages of any supported type. Each
repository exposes endpoints for fetching and publishing packages using
tools like the ** `npm` ** CLI, the Maven CLI (** `mvn` **), and ** `pip`
**. You can create up to 100 repositories per AWS account.
</li> <li> **Domain**: Repositories are aggregated into a higher-level
entity known as a *domain*. All package assets and metadata are stored in
the domain, but are consumed through repositories. A given package asset,
such as a Maven JAR file, is stored once per domain, no matter how many
repositories it's present in. All of the assets and metadata in a domain
are encrypted with the same customer master key (CMK) stored in AWS Key
Management Service (AWS KMS).
Each repository is a member of a single domain and can't be moved to a
different domain.
The domain allows organizational policy to be applied across multiple
repositories, such as which accounts can access repositories in the domain,
and which public repositories can be used as sources of packages.
Although an organization can have multiple domains, we recommend a single
production domain that contains all published artifacts so that teams can
find and share packages across their organization.
</li> <li> **Package**: A *package* is a bundle of software and the
metadata required to resolve dependencies and install the software.
CodeArtifact supports
[npm](https://docs.aws.amazon.com/codeartifact/latest/ug/using-npm.html),
[PyPI](https://docs.aws.amazon.com/codeartifact/latest/ug/using-python.html),
and [Maven](https://docs.aws.amazon.com/codeartifact/latest/ug/using-maven)
package formats.
In CodeArtifact, a package consists of:
<ul> <li> A *name* (for example, `webpack` is the name of a popular npm
package)
</li> <li> An optional namespace (for example, `@types` in `@types/node`)
</li> <li> A set of versions (for example, `1.0.0`, `1.0.1`, `1.0.2`, etc.)
</li> <li> Package-level metadata (for example, npm tags)
</li> </ul> </li> <li> **Package version**: A version of a package, such as
`@types/node 12.6.9`. The version number format and semantics vary for
different package formats. For example, npm package versions must conform
to the [Semantic Versioning specification](https://semver.org/). In
CodeArtifact, a package version consists of the version identifier,
metadata at the package version level, and a set of assets.
</li> <li> **Upstream repository**: One repository is *upstream* of another
when the package versions in it can be accessed from the repository
endpoint of the downstream repository, effectively merging the contents of
the two repositories from the point of view of a client. CodeArtifact
allows creating an upstream relationship between two repositories.
</li> <li> **Asset**: An individual file stored in CodeArtifact associated
with a package version, such as an npm `.tgz` file or Maven POM and JAR
files.
</li> </ul> CodeArtifact supports these operations:
<ul> <li> `AssociateExternalConnection`: Adds an existing external
connection to a repository.
</li> <li> `CopyPackageVersions`: Copies package versions from one
repository to another repository in the same domain.
</li> <li> `CreateDomain`: Creates a domain
</li> <li> `CreateRepository`: Creates a CodeArtifact repository in a
domain.
</li> <li> `DeleteDomain`: Deletes a domain. You cannot delete a domain
that contains repositories.
</li> <li> `DeleteDomainPermissionsPolicy`: Deletes the resource policy
that is set on a domain.
</li> <li> `DeletePackageVersions`: Deletes versions of a package. After a
package has been deleted, it can be republished, but its assets and
metadata cannot be restored because they have been permanently removed from
storage.
</li> <li> `DeleteRepository`: Deletes a repository.
</li> <li> `DeleteRepositoryPermissionsPolicy`: Deletes the resource policy
that is set on a repository.
</li> <li> `DescribeDomain`: Returns a `DomainDescription` object that
contains information about the requested domain.
</li> <li> `DescribePackageVersion`: Returns a `
[PackageVersionDescription](https://docs.aws.amazon.com/codeartifact/latest/APIReference/API_PackageVersionDescription.html)
` object that contains details about a package version.
</li> <li> `DescribeRepository`: Returns a `RepositoryDescription` object
that contains detailed information about the requested repository.
</li> <li> `DisposePackageVersions`: Disposes versions of a package. A
package version with the status `Disposed` cannot be restored because they
have been permanently removed from storage.
</li> <li> `DisassociateExternalConnection`: Removes an existing external
connection from a repository.
</li> <li> `GetAuthorizationToken`: Generates a temporary authorization
token for accessing repositories in the domain. The token expires the
authorization period has passed. The default authorization period is 12
hours and can be customized to any length with a maximum of 12 hours.
</li> <li> `GetDomainPermissionsPolicy`: Returns the policy of a resource
that is attached to the specified domain.
</li> <li> `GetPackageVersionAsset`: Returns the contents of an asset that
is in a package version.
</li> <li> `GetPackageVersionReadme`: Gets the readme file or descriptive
text for a package version.
</li> <li> `GetRepositoryEndpoint`: Returns the endpoint of a repository
for a specific package format. A repository has one endpoint for each
package format:
<ul> <li> `npm`
</li> <li> `pypi`
</li> <li> `maven`
</li> </ul> </li> <li> `GetRepositoryPermissionsPolicy`: Returns the
resource policy that is set on a repository.
</li> <li> `ListDomains`: Returns a list of `DomainSummary` objects. Each
returned `DomainSummary` object contains information about a domain.
</li> <li> `ListPackages`: Lists the packages in a repository.
</li> <li> `ListPackageVersionAssets`: Lists the assets for a given package
version.
</li> <li> `ListPackageVersionDependencies`: Returns a list of the direct
dependencies for a package version.
</li> <li> `ListPackageVersions`: Returns a list of package versions for a
specified package in a repository.
</li> <li> `ListRepositories`: Returns a list of repositories owned by the
AWS account that called this method.
</li> <li> `ListRepositoriesInDomain`: Returns a list of the repositories
in a domain.
</li> <li> `PutDomainPermissionsPolicy`: Attaches a resource policy to a
domain.
</li> <li> `PutRepositoryPermissionsPolicy`: Sets the resource policy on a
repository that specifies permissions to access it.
</li> <li> `UpdatePackageVersionsStatus`: Updates the status of one or more
versions of a package.
</li> <li> `UpdateRepository`: Updates the properties of a repository.
</li> </ul>
"""
@doc """
Adds an existing external connection to a repository. One external
connection is allowed per repository.
<note> A repository can have one or more upstream repositories, or an
external connection.
</note>
"""
def associate_external_connection(client, input, options \\ []) do
path_ = "/v1/repository/external-connection"
headers = []
{query_, input} =
[
{"domain", "domain"},
{"domainOwner", "domain-owner"},
{"externalConnection", "external-connection"},
{"repository", "repository"},
]
|> AWS.Request.build_params(input)
request(client, :post, path_, query_, headers, input, options, nil)
end
@doc """
Copies package versions from one repository to another repository in the
same domain.
<note> You must specify `versions` or `versionRevisions`. You cannot
specify both.
</note>
"""
def copy_package_versions(client, input, options \\ []) do
path_ = "/v1/package/versions/copy"
headers = []
{query_, input} =
[
{"destinationRepository", "destination-repository"},
{"domain", "domain"},
{"domainOwner", "domain-owner"},
{"format", "format"},
{"namespace", "namespace"},
{"package", "package"},
{"sourceRepository", "source-repository"},
]
|> AWS.Request.build_params(input)
request(client, :post, path_, query_, headers, input, options, nil)
end
@doc """
Creates a domain. CodeArtifact *domains* make it easier to manage multiple
repositories across an organization. You can use a domain to apply
permissions across many repositories owned by different AWS accounts. An
asset is stored only once in a domain, even if it's in multiple
repositories.
Although you can have multiple domains, we recommend a single production
domain that contains all published artifacts so that your development teams
can find and share packages. You can use a second pre-production domain to
test changes to the production domain configuration.
"""
def create_domain(client, input, options \\ []) do
path_ = "/v1/domain"
headers = []
{query_, input} =
[
{"domain", "domain"},
]
|> AWS.Request.build_params(input)
request(client, :post, path_, query_, headers, input, options, nil)
end
@doc """
Creates a repository.
"""
def create_repository(client, input, options \\ []) do
path_ = "/v1/repository"
headers = []
{query_, input} =
[
{"domain", "domain"},
{"domainOwner", "domain-owner"},
{"repository", "repository"},
]
|> AWS.Request.build_params(input)
request(client, :post, path_, query_, headers, input, options, nil)
end
@doc """
Deletes a domain. You cannot delete a domain that contains repositories. If
you want to delete a domain with repositories, first delete its
repositories.
"""
def delete_domain(client, input, options \\ []) do
path_ = "/v1/domain"
headers = []
{query_, input} =
[
{"domain", "domain"},
{"domainOwner", "domain-owner"},
]
|> AWS.Request.build_params(input)
request(client, :delete, path_, query_, headers, input, options, nil)
end
@doc """
Deletes the resource policy set on a domain.
"""
def delete_domain_permissions_policy(client, input, options \\ []) do
path_ = "/v1/domain/permissions/policy"
headers = []
{query_, input} =
[
{"domain", "domain"},
{"domainOwner", "domain-owner"},
{"policyRevision", "policy-revision"},
]
|> AWS.Request.build_params(input)
request(client, :delete, path_, query_, headers, input, options, nil)
end
@doc """
Deletes one or more versions of a package. A deleted package version cannot
be restored in your repository. If you want to remove a package version
from your repository and be able to restore it later, set its status to
`Archived`. Archived packages cannot be downloaded from a repository and
don't show up with list package APIs (for example, `
[ListackageVersions](https://docs.aws.amazon.com/codeartifact/latest/APIReference/API_ListPackageVersions.html)
`), but you can restore them using `
[UpdatePackageVersionsStatus](https://docs.aws.amazon.com/codeartifact/latest/APIReference/API_UpdatePackageVersionsStatus.html)
`.
"""
def delete_package_versions(client, input, options \\ []) do
path_ = "/v1/package/versions/delete"
headers = []
{query_, input} =
[
{"domain", "domain"},
{"domainOwner", "domain-owner"},
{"format", "format"},
{"namespace", "namespace"},
{"package", "package"},
{"repository", "repository"},
]
|> AWS.Request.build_params(input)
request(client, :post, path_, query_, headers, input, options, nil)
end
@doc """
Deletes a repository.
"""
def delete_repository(client, input, options \\ []) do
path_ = "/v1/repository"
headers = []
{query_, input} =
[
{"domain", "domain"},
{"domainOwner", "domain-owner"},
{"repository", "repository"},
]
|> AWS.Request.build_params(input)
request(client, :delete, path_, query_, headers, input, options, nil)
end
@doc """
Deletes the resource policy that is set on a repository. After a resource
policy is deleted, the permissions allowed and denied by the deleted policy
are removed. The effect of deleting a resource policy might not be
immediate.
<important> Use `DeleteRepositoryPermissionsPolicy` with caution. After a
policy is deleted, AWS users, roles, and accounts lose permissions to
perform the repository actions granted by the deleted policy.
</important>
"""
def delete_repository_permissions_policy(client, input, options \\ []) do
path_ = "/v1/repository/permissions/policies"
headers = []
{query_, input} =
[
{"domain", "domain"},
{"domainOwner", "domain-owner"},
{"policyRevision", "policy-revision"},
{"repository", "repository"},
]
|> AWS.Request.build_params(input)
request(client, :delete, path_, query_, headers, input, options, nil)
end
@doc """
Returns a [ `DomainDescription`
](https://docs.aws.amazon.com/codeartifact/latest/APIReference/API_DomainDescription.html)
object that contains information about the requested domain.
"""
def describe_domain(client, domain, domain_owner \\ nil, options \\ []) do
path_ = "/v1/domain"
headers = []
query_ = []
query_ = if !is_nil(domain_owner) do
[{"domain-owner", domain_owner} | query_]
else
query_
end
query_ = if !is_nil(domain) do
[{"domain", domain} | query_]
else
query_
end
request(client, :get, path_, query_, headers, nil, options, nil)
end
@doc """
Returns a [ `PackageVersionDescription`
](https://docs.aws.amazon.com/codeartifact/latest/APIReference/API_PackageVersionDescription.html)
object that contains information about the requested package version.
"""
def describe_package_version(client, domain, domain_owner \\ nil, format, namespace \\ nil, package, package_version, repository, options \\ []) do
path_ = "/v1/package/version"
headers = []
query_ = []
query_ = if !is_nil(repository) do
[{"repository", repository} | query_]
else
query_
end
query_ = if !is_nil(package_version) do
[{"version", package_version} | query_]
else
query_
end
query_ = if !is_nil(package) do
[{"package", package} | query_]
else
query_
end
query_ = if !is_nil(namespace) do
[{"namespace", namespace} | query_]
else
query_
end
query_ = if !is_nil(format) do
[{"format", format} | query_]
else
query_
end
query_ = if !is_nil(domain_owner) do
[{"domain-owner", domain_owner} | query_]
else
query_
end
query_ = if !is_nil(domain) do
[{"domain", domain} | query_]
else
query_
end
request(client, :get, path_, query_, headers, nil, options, nil)
end
@doc """
Returns a `RepositoryDescription` object that contains detailed information
about the requested repository.
"""
def describe_repository(client, domain, domain_owner \\ nil, repository, options \\ []) do
path_ = "/v1/repository"
headers = []
query_ = []
query_ = if !is_nil(repository) do
[{"repository", repository} | query_]
else
query_
end
query_ = if !is_nil(domain_owner) do
[{"domain-owner", domain_owner} | query_]
else
query_
end
query_ = if !is_nil(domain) do
[{"domain", domain} | query_]
else
query_
end
request(client, :get, path_, query_, headers, nil, options, nil)
end
@doc """
Removes an existing external connection from a repository.
"""
def disassociate_external_connection(client, input, options \\ []) do
path_ = "/v1/repository/external-connection"
headers = []
{query_, input} =
[
{"domain", "domain"},
{"domainOwner", "domain-owner"},
{"externalConnection", "external-connection"},
{"repository", "repository"},
]
|> AWS.Request.build_params(input)
request(client, :delete, path_, query_, headers, input, options, nil)
end
@doc """
Deletes the assets in package versions and sets the package versions'
status to `Disposed`. A disposed package version cannot be restored in your
repository because its assets are deleted.
To view all disposed package versions in a repository, use `
[ListackageVersions](https://docs.aws.amazon.com/codeartifact/latest/APIReference/API_ListPackageVersions.html)
` and set the `
[status](https://docs.aws.amazon.com/codeartifact/latest/APIReference/API_ListPackageVersions.html#API_ListPackageVersions_RequestSyntax)
` parameter to `Disposed`.
To view information about a disposed package version, use `
[ListPackageVersions](https://docs.aws.amazon.com/codeartifact/latest/APIReference/API_ListPackageVersions.html)
` and set the `
[status](https://docs.aws.amazon.com/API_ListPackageVersions.html#codeartifact-ListPackageVersions-response-status)
` parameter to `Disposed`.
"""
def dispose_package_versions(client, input, options \\ []) do
path_ = "/v1/package/versions/dispose"
headers = []
{query_, input} =
[
{"domain", "domain"},
{"domainOwner", "domain-owner"},
{"format", "format"},
{"namespace", "namespace"},
{"package", "package"},
{"repository", "repository"},
]
|> AWS.Request.build_params(input)
request(client, :post, path_, query_, headers, input, options, nil)
end
@doc """
Generates a temporary authentication token for accessing repositories in
the domain. This API requires the `codeartifact:GetAuthorizationToken` and
`sts:GetServiceBearerToken` permissions.
<note> CodeArtifact authorization tokens are valid for a period of 12 hours
when created with the `login` command. You can call `login` periodically to
refresh the token. When you create an authorization token with the
`GetAuthorizationToken` API, you can set a custom authorization period, up
to a maximum of 12 hours, with the `durationSeconds` parameter.
The authorization period begins after `login` or `GetAuthorizationToken` is
called. If `login` or `GetAuthorizationToken` is called while assuming a
role, the token lifetime is independent of the maximum session duration of
the role. For example, if you call `sts assume-role` and specify a session
duration of 15 minutes, then generate a CodeArtifact authorization token,
the token will be valid for the full authorization period even though this
is longer than the 15-minute session duration.
See [Using IAM
Roles](https://docs.aws.amazon.com/IAM/latest/UserGuide/id_roles_use.html)
for more information on controlling session duration.
</note>
"""
def get_authorization_token(client, input, options \\ []) do
path_ = "/v1/authorization-token"
headers = []
{query_, input} =
[
{"domain", "domain"},
{"domainOwner", "domain-owner"},
{"durationSeconds", "duration"},
]
|> AWS.Request.build_params(input)
request(client, :post, path_, query_, headers, input, options, nil)
end
@doc """
Returns the resource policy attached to the specified domain.
<note> The policy is a resource-based policy, not an identity-based policy.
For more information, see [Identity-based policies and resource-based
policies
](https://docs.aws.amazon.com/IAM/latest/UserGuide/access_policies_identity-vs-resource.html)
in the *AWS Identity and Access Management User Guide*.
</note>
"""
def get_domain_permissions_policy(client, domain, domain_owner \\ nil, options \\ []) do
path_ = "/v1/domain/permissions/policy"
headers = []
query_ = []
query_ = if !is_nil(domain_owner) do
[{"domain-owner", domain_owner} | query_]
else
query_
end
query_ = if !is_nil(domain) do
[{"domain", domain} | query_]
else
query_
end
request(client, :get, path_, query_, headers, nil, options, nil)
end
@doc """
Returns an asset (or file) that is in a package. For example, for a Maven
package version, use `GetPackageVersionAsset` to download a `JAR` file, a
`POM` file, or any other assets in the package version.
"""
def get_package_version_asset(client, asset, domain, domain_owner \\ nil, format, namespace \\ nil, package, package_version, package_version_revision \\ nil, repository, options \\ []) do
path_ = "/v1/package/version/asset"
headers = []
query_ = []
query_ = if !is_nil(repository) do
[{"repository", repository} | query_]
else
query_
end
query_ = if !is_nil(package_version_revision) do
[{"revision", package_version_revision} | query_]
else
query_
end
query_ = if !is_nil(package_version) do
[{"version", package_version} | query_]
else
query_
end
query_ = if !is_nil(package) do
[{"package", package} | query_]
else
query_
end
query_ = if !is_nil(namespace) do
[{"namespace", namespace} | query_]
else
query_
end
query_ = if !is_nil(format) do
[{"format", format} | query_]
else
query_
end
query_ = if !is_nil(domain_owner) do
[{"domain-owner", domain_owner} | query_]
else
query_
end
query_ = if !is_nil(domain) do
[{"domain", domain} | query_]
else
query_
end
query_ = if !is_nil(asset) do
[{"asset", asset} | query_]
else
query_
end
case request(client, :get, path_, query_, headers, nil, options, nil) do
{:ok, body, response} when not is_nil(body) ->
body =
[
{"X-AssetName", "assetName"},
{"X-PackageVersion", "packageVersion"},
{"X-PackageVersionRevision", "packageVersionRevision"},
]
|> Enum.reduce(body, fn {header_name, key}, acc ->
case List.keyfind(response.headers, header_name, 0) do
nil -> acc
{_header_name, value} -> Map.put(acc, key, value)
end
end)
{:ok, body, response}
result ->
result
end
end
@doc """
Gets the readme file or descriptive text for a package version. For
packages that do not contain a readme file, CodeArtifact extracts a
description from a metadata file. For example, from the
`<description>` element in the `pom.xml` file of a Maven package.
The returned text might contain formatting. For example, it might contain
formatting for Markdown or reStructuredText.
"""
def get_package_version_readme(client, domain, domain_owner \\ nil, format, namespace \\ nil, package, package_version, repository, options \\ []) do
path_ = "/v1/package/version/readme"
headers = []
query_ = []
query_ = if !is_nil(repository) do
[{"repository", repository} | query_]
else
query_
end
query_ = if !is_nil(package_version) do
[{"version", package_version} | query_]
else
query_
end
query_ = if !is_nil(package) do
[{"package", package} | query_]
else
query_
end
query_ = if !is_nil(namespace) do
[{"namespace", namespace} | query_]
else
query_
end
query_ = if !is_nil(format) do
[{"format", format} | query_]
else
query_
end
query_ = if !is_nil(domain_owner) do
[{"domain-owner", domain_owner} | query_]
else
query_
end
query_ = if !is_nil(domain) do
[{"domain", domain} | query_]
else
query_
end
request(client, :get, path_, query_, headers, nil, options, nil)
end
@doc """
Returns the endpoint of a repository for a specific package format. A
repository has one endpoint for each package format:
<ul> <li> `npm`
</li> <li> `pypi`
</li> <li> `maven`
</li> </ul>
"""
def get_repository_endpoint(client, domain, domain_owner \\ nil, format, repository, options \\ []) do
path_ = "/v1/repository/endpoint"
headers = []
query_ = []
query_ = if !is_nil(repository) do
[{"repository", repository} | query_]
else
query_
end
query_ = if !is_nil(format) do
[{"format", format} | query_]
else
query_
end
query_ = if !is_nil(domain_owner) do
[{"domain-owner", domain_owner} | query_]
else
query_
end
query_ = if !is_nil(domain) do
[{"domain", domain} | query_]
else
query_
end
request(client, :get, path_, query_, headers, nil, options, nil)
end
@doc """
Returns the resource policy that is set on a repository.
"""
def get_repository_permissions_policy(client, domain, domain_owner \\ nil, repository, options \\ []) do
path_ = "/v1/repository/permissions/policy"
headers = []
query_ = []
query_ = if !is_nil(repository) do
[{"repository", repository} | query_]
else
query_
end
query_ = if !is_nil(domain_owner) do
[{"domain-owner", domain_owner} | query_]
else
query_
end
query_ = if !is_nil(domain) do
[{"domain", domain} | query_]
else
query_
end
request(client, :get, path_, query_, headers, nil, options, nil)
end
@doc """
Returns a list of `
[DomainSummary](https://docs.aws.amazon.com/codeartifact/latest/APIReference/API_PackageVersionDescription.html)
` objects for all domains owned by the AWS account that makes this call.
Each returned `DomainSummary` object contains information about a domain.
"""
def list_domains(client, input, options \\ []) do
path_ = "/v1/domains"
headers = []
query_ = []
request(client, :post, path_, query_, headers, input, options, nil)
end
@doc """
Returns a list of [ `AssetSummary`
](https://docs.aws.amazon.com/codeartifact/latest/APIReference/API_AssetSummary.html)
objects for assets in a package version.
"""
def list_package_version_assets(client, input, options \\ []) do
path_ = "/v1/package/version/assets"
headers = []
{query_, input} =
[
{"domain", "domain"},
{"domainOwner", "domain-owner"},
{"format", "format"},
{"maxResults", "max-results"},
{"namespace", "namespace"},
{"nextToken", "next-token"},
{"package", "package"},
{"packageVersion", "version"},
{"repository", "repository"},
]
|> AWS.Request.build_params(input)
request(client, :post, path_, query_, headers, input, options, nil)
end
@doc """
Returns the direct dependencies for a package version. The dependencies are
returned as [ `PackageDependency`
](https://docs.aws.amazon.com/codeartifact/latest/APIReference/API_PackageDependency.html)
objects. CodeArtifact extracts the dependencies for a package version from
the metadata file for the package format (for example, the `package.json`
file for npm packages and the `pom.xml` file for Maven). Any package
version dependencies that are not listed in the configuration file are not
returned.
"""
def list_package_version_dependencies(client, input, options \\ []) do
path_ = "/v1/package/version/dependencies"
headers = []
{query_, input} =
[
{"domain", "domain"},
{"domainOwner", "domain-owner"},
{"format", "format"},
{"namespace", "namespace"},
{"nextToken", "next-token"},
{"package", "package"},
{"packageVersion", "version"},
{"repository", "repository"},
]
|> AWS.Request.build_params(input)
request(client, :post, path_, query_, headers, input, options, nil)
end
@doc """
Returns a list of [ `PackageVersionSummary`
](https://docs.aws.amazon.com/codeartifact/latest/APIReference/API_PackageVersionSummary.html)
objects for package versions in a repository that match the request
parameters.
"""
def list_package_versions(client, input, options \\ []) do
path_ = "/v1/package/versions"
headers = []
{query_, input} =
[
{"domain", "domain"},
{"domainOwner", "domain-owner"},
{"format", "format"},
{"maxResults", "max-results"},
{"namespace", "namespace"},
{"nextToken", "next-token"},
{"package", "package"},
{"repository", "repository"},
{"sortBy", "sortBy"},
{"status", "status"},
]
|> AWS.Request.build_params(input)
request(client, :post, path_, query_, headers, input, options, nil)
end
@doc """
Returns a list of [ `PackageSummary`
](https://docs.aws.amazon.com/codeartifact/latest/APIReference/API_PackageSummary.html)
objects for packages in a repository that match the request parameters.
"""
def list_packages(client, input, options \\ []) do
path_ = "/v1/packages"
headers = []
{query_, input} =
[
{"domain", "domain"},
{"domainOwner", "domain-owner"},
{"format", "format"},
{"maxResults", "max-results"},
{"namespace", "namespace"},
{"nextToken", "next-token"},
{"packagePrefix", "package-prefix"},
{"repository", "repository"},
]
|> AWS.Request.build_params(input)
request(client, :post, path_, query_, headers, input, options, nil)
end
@doc """
Returns a list of [ `RepositorySummary`
](https://docs.aws.amazon.com/codeartifact/latest/APIReference/API_RepositorySummary.html)
objects. Each `RepositorySummary` contains information about a repository
in the specified AWS account and that matches the input parameters.
"""
def list_repositories(client, input, options \\ []) do
path_ = "/v1/repositories"
headers = []
{query_, input} =
[
{"maxResults", "max-results"},
{"nextToken", "next-token"},
{"repositoryPrefix", "repository-prefix"},
]
|> AWS.Request.build_params(input)
request(client, :post, path_, query_, headers, input, options, nil)
end
@doc """
Returns a list of [ `RepositorySummary`
](https://docs.aws.amazon.com/codeartifact/latest/APIReference/API_RepositorySummary.html)
objects. Each `RepositorySummary` contains information about a repository
in the specified domain and that matches the input parameters.
"""
def list_repositories_in_domain(client, input, options \\ []) do
path_ = "/v1/domain/repositories"
headers = []
{query_, input} =
[
{"administratorAccount", "administrator-account"},
{"domain", "domain"},
{"domainOwner", "domain-owner"},
{"maxResults", "max-results"},
{"nextToken", "next-token"},
{"repositoryPrefix", "repository-prefix"},
]
|> AWS.Request.build_params(input)
request(client, :post, path_, query_, headers, input, options, nil)
end
@doc """
Sets a resource policy on a domain that specifies permissions to access it.
"""
def put_domain_permissions_policy(client, input, options \\ []) do
path_ = "/v1/domain/permissions/policy"
headers = []
query_ = []
request(client, :put, path_, query_, headers, input, options, nil)
end
@doc """
Sets the resource policy on a repository that specifies permissions to
access it.
"""
def put_repository_permissions_policy(client, input, options \\ []) do
path_ = "/v1/repository/permissions/policy"
headers = []
{query_, input} =
[
{"domain", "domain"},
{"domainOwner", "domain-owner"},
{"repository", "repository"},
]
|> AWS.Request.build_params(input)
request(client, :put, path_, query_, headers, input, options, nil)
end
@doc """
Updates the status of one or more versions of a package.
"""
def update_package_versions_status(client, input, options \\ []) do
path_ = "/v1/package/versions/update_status"
headers = []
{query_, input} =
[
{"domain", "domain"},
{"domainOwner", "domain-owner"},
{"format", "format"},
{"namespace", "namespace"},
{"package", "package"},
{"repository", "repository"},
]
|> AWS.Request.build_params(input)
request(client, :post, path_, query_, headers, input, options, nil)
end
@doc """
Update the properties of a repository.
"""
def update_repository(client, input, options \\ []) do
path_ = "/v1/repository"
headers = []
{query_, input} =
[
{"domain", "domain"},
{"domainOwner", "domain-owner"},
{"repository", "repository"},
]
|> AWS.Request.build_params(input)
request(client, :put, path_, query_, headers, input, options, nil)
end
@spec request(AWS.Client.t(), binary(), binary(), list(), list(), map(), list(), pos_integer()) ::
{:ok, map() | nil, map()}
| {:error, term()}
defp request(client, method, path, query, headers, input, options, success_status_code) do
client = %{client | service: "codeartifact"}
host = build_host("codeartifact", client)
url = host
|> build_url(path, client)
|> add_query(query, client)
additional_headers = [{"Host", host}, {"Content-Type", "application/x-amz-json-1.1"}]
headers = AWS.Request.add_headers(additional_headers, headers)
payload = encode!(client, input)
headers = AWS.Request.sign_v4(client, method, url, headers, payload)
perform_request(client, method, url, payload, headers, options, success_status_code)
end
defp perform_request(client, method, url, payload, headers, options, success_status_code) do
case AWS.Client.request(client, method, url, payload, headers, options) do
{:ok, %{status_code: status_code, body: body} = response}
when is_nil(success_status_code) and status_code in [200, 202, 204]
when status_code == success_status_code ->
body = if(body != "", do: decode!(client, body))
{:ok, body, response}
{:ok, response} ->
{:error, {:unexpected_response, response}}
error = {:error, _reason} -> error
end
end
defp build_host(_endpoint_prefix, %{region: "local", endpoint: endpoint}) do
endpoint
end
defp build_host(_endpoint_prefix, %{region: "local"}) do
"localhost"
end
defp build_host(endpoint_prefix, %{region: region, endpoint: endpoint}) do
"#{endpoint_prefix}.#{region}.#{endpoint}"
end
defp build_url(host, path, %{:proto => proto, :port => port}) do
"#{proto}://#{host}:#{port}#{path}"
end
defp add_query(url, [], _client) do
url
end
defp add_query(url, query, client) do
querystring = encode!(client, query, :query)
"#{url}?#{querystring}"
end
defp encode!(client, payload, format \\ :json) do
AWS.Client.encode!(client, payload, format)
end
defp decode!(client, payload) do
AWS.Client.decode!(client, payload, :json)
end
end
|
lib/aws/generated/codeartifact.ex
| 0.909394 | 0.645329 |
codeartifact.ex
|
starcoder
|
defprotocol Buildable.Collectable do
@moduledoc """
A protocol to traverse data structures.
Collectable protocol used by [`buildables`](`t:Buildable.t/0`).
The `Build.into/2` and `Build.into/3` functions use this protocol to insert an [buildable](`t:Buildable.t/0`) or a [range](`t:Range.t/0`) into a [`buildable`](`t:Buildable.t/0`).
"""
@type t :: Buildable.t()
@type collector :: (term, command -> Buildable.t() | term)
@type command :: {:cont, term()} | :done | :halt
@fallback_to_any true
@spec into(Buildable.t()) ::
{initial_acc :: term, collector}
def into(buildable)
end
defimpl Buildable.Collectable, for: [BitString, Map, MapSet] do
@impl true
defdelegate into(buildable), to: Collectable
end
defimpl Buildable.Collectable, for: List do
@impl true
def into(list) do
buildable_module = Buildable.impl_for(list)
fun = fn
list_acc, {:cont, elem} ->
[elem | list_acc]
list_acc, :done ->
# This implementation is different than the one in Collectible.List.
# Here we do allow inserting into non-empty lists
if buildable_module.default(:into_position) == :last do
:lists.reverse(list_acc) ++ list
else
list_acc ++ list
end
_list_acc, :halt ->
:ok
end
{[], fun}
end
end
defimpl Buildable.Collectable, for: Any do
import Build.Util, only: [invert_position: 1]
@impl true
def into(buildable) do
buildable_module = Buildable.impl_for(buildable)
reverse_result? =
buildable_module.default(:reversible?) == true and
buildable_module.default(:extract_position) ==
buildable_module.default(:into_position)
into_any(buildable, buildable_module, reverse_result?)
end
defp into_any(buildable, buildable_module, reverse_result?)
defp into_any(buildable, buildable_module, false) do
fun = fn
acc, {:cont, elem} ->
buildable_module.insert(acc, elem, buildable_module.default(:into_position))
acc, :done ->
acc
_acc, :halt ->
:ok
end
{buildable, fun}
end
defp into_any(buildable, buildable_module, true) do
inverted_into_position = invert_position(buildable_module.default(:into_position))
fun = fn
acc, {:cont, elem} ->
buildable_module.insert(acc, elem, inverted_into_position)
acc, :done ->
buildable_module.reverse(acc)
_acc, :halt ->
:ok
end
{buildable_module.reverse(buildable), fun}
end
end
|
lib/buildable/collectable.ex
| 0.886605 | 0.691497 |
collectable.ex
|
starcoder
|
defmodule Playwright.Runner.Catalog do
@moduledoc false
# `Catalog` provides storage and management of known resources. It implements
# `GenServer` in order to maintain state, while domain logic is handled within
# caller modules such as `Connection`, `Event`, and `Response`.
use GenServer
require Logger
@enforce_keys [:callers, :storage]
defstruct [:callers, :storage]
def start_link(arg) do
GenServer.start_link(__MODULE__, arg)
end
# Retrieve an entry from the `Catalog` storage. In this case (without a
# `caller` provided), the entry is expected to exist. `nil` will be
# returned if it does not.
def get(pid, guid) do
GenServer.call(pid, {:get, guid})
end
# Retrieves an entry from the `Catalog` storage. In this case, a `caller` is
# provided. If the entry does not exist at the time of the call, a `reply`
# will be sent to the caller when the entry arrives. The caller will block
# until the reply is received (or the default timeout is reached).
def get(pid, guid, caller) do
case get(pid, guid) do
nil ->
await!(pid, {guid, caller})
item ->
found!(pid, {item, caller})
end
end
# Adds an entry to the `Catalog` storage. Curently, the creation of the entry
# to add is left to the caller. At some future point, that responsibility
# might move here.
def put(pid, item) do
GenServer.call(pid, {:put, item})
end
# Removes an entry from the `Catalog` storage.
def rm(pid, guid) do
GenServer.call(pid, {:rm, guid})
end
# Recursively removal: given a "parent" entry, removes that and all
# descendants.
def rm_r(pid, guid) do
children = filter(pid, %{parent: get(pid, guid)}, [])
children |> Enum.each(fn child -> rm_r(pid, child.guid) end)
rm(pid, guid)
end
# Retrieves a list of entries, filtered by some attributes.
def filter(pid, filter, default \\ nil) do
GenServer.call(pid, {:filter, {filter, default}})
end
# @impl
# ----------------------------------------------------------------------------
@impl GenServer
def init(root) do
{:ok, %__MODULE__{callers: %{}, storage: %{"Root" => root}}}
end
@impl GenServer
def handle_call({:get, guid}, _, %{storage: storage} = state) do
{:reply, storage[guid], state}
end
@impl GenServer
def handle_call({:put, item}, _, %{callers: callers, storage: storage} = state) do
with updated <- Map.put(storage, item.guid, item) do
caller = Map.get(callers, item.guid)
if caller do
handle_call({:found, {item, caller}}, nil, state)
end
{:reply, updated, %{state | storage: updated}}
end
end
@impl GenServer
def handle_call({:rm, guid}, _, %{storage: storage} = state) do
with updated <- Map.delete(storage, guid) do
{:reply, updated, %{state | storage: updated}}
end
end
@impl GenServer
def handle_call({:await, {guid, caller}}, _, %{callers: callers} = state) do
with updated <- Map.put(callers, guid, caller) do
{:reply, updated, %{state | callers: updated}}
end
end
@impl GenServer
def handle_call({:found, {item, caller}}, _, state) do
{:reply, GenServer.reply(caller, item), state}
end
@impl GenServer
def handle_call({:values}, _, %{storage: storage} = state) do
{:reply, Map.values(storage), state}
end
@impl GenServer
def handle_call({:filter, {filter, default}}, _, %{storage: storage} = state) do
case select(Map.values(storage), filter, []) do
[] ->
{:reply, default, state}
result ->
{:reply, result, state}
end
end
# private
# ----------------------------------------------------------------------------
def await!(pid, {guid, caller}) do
GenServer.call(pid, {:await, {guid, caller}})
end
def found!(pid, {item, caller}) do
GenServer.call(pid, {:found, {item, caller}})
end
defp select([], _attrs, result) do
result
end
defp select([head | tail], attrs, result) when head.type == "" do
select(tail, attrs, result)
end
defp select([head | tail], %{parent: parent, type: type} = attrs, result)
when head.parent.guid == parent.guid and head.type == type do
select(tail, attrs, result ++ [head])
end
defp select([head | tail], %{parent: parent} = attrs, result)
when head.parent.guid == parent.guid do
select(tail, attrs, result ++ [head])
end
defp select([head | tail], %{type: type} = attrs, result)
when head.type == type do
select(tail, attrs, result ++ [head])
end
defp select([head | tail], %{guid: guid} = attrs, result)
when head.guid == guid do
select(tail, attrs, result ++ [head])
end
defp select([_head | tail], attrs, result) do
select(tail, attrs, result)
end
end
|
lib/playwright/runner/catalog.ex
| 0.810591 | 0.568715 |
catalog.ex
|
starcoder
|
defmodule Braintree.Address do
@moduledoc """
You can create an address for a customer only although the structure
is also used for a merchant account.
For additional reference see:
https://developers.braintreepayments.com/reference/request/address/create/ruby
"""
use Braintree.Construction
alias Braintree.HTTP
alias Braintree.ErrorResponse, as: Error
@type t :: %__MODULE__{
id: String.t,
company: String.t,
created_at: String.t,
updated_at: String.t,
first_name: String.t,
last_name: String.t,
locality: String.t,
postal_code: String.t,
region: String.t,
street_address: String.t,
country_code_alpha2: String.t,
country_code_alpha3: String.t,
country_code_numeric: String.t,
country_name: String.t,
customer_id: String.t,
extended_address: String.t
}
defstruct id: nil,
company: nil,
created_at: nil,
updated_at: nil,
first_name: nil,
last_name: nil,
locality: nil,
postal_code: nil,
region: nil,
street_address: nil,
country_code_alpha2: nil,
country_code_alpha3: nil,
country_code_numeric: nil,
country_name: nil,
customer_id: nil,
extended_address: nil
@doc """
Create an address record, or return an error response after failed validation.
## Example
{:ok, address} = Braintree.Address.create("customer_id", %{
first_name: "Jenna"
})
address.company # Braintree
"""
@spec create(binary, map, Keyword.t) :: {:ok, t} | {:error, Error.t}
def create(customer_id, params \\ %{}, opts \\ []) when is_binary(customer_id) do
with {:ok, payload} <- HTTP.post("customers/#{customer_id}/addresses/", %{address: params}, opts) do
{:ok, new(payload)}
end
end
@doc """
You can delete an address using its customer ID and address ID.
## Example
:ok = Braintree.Address.delete("customer_id", "address_id")
"""
@spec delete(binary, binary, Keyword.t) :: :ok | {:error, Error.t}
def delete(customer_id, id, opts \\ []) when is_binary(customer_id) and is_binary(id) do
with {:ok, _reponse} <- HTTP.delete("customers/#{customer_id}/addresses/" <> id, opts) do
:ok
end
end
@doc """
To update an address, use a customer's ID with an address's ID along with
new attributes. The same validations apply as when creating an address.
Any attribute not passed will remain unchanged.
## Example
{:ok, address} = Braintree.Address.update("customer_id", "address_id", %{
company: "New Company Name"
})
address.company # "New Company Name"
"""
@spec update(binary, binary, map, Keyword.t) :: {:ok, t} | {:error, Error.t}
def update(customer_id, id, params, opts \\ []) when is_binary(customer_id) and is_binary(id) do
with {:ok, payload} <- HTTP.put("customers/#{customer_id}/addresses/" <> id, %{address: params}, opts) do
{:ok, new(payload)}
end
end
@doc """
If you want to look up a single address for a customer using the customer ID and
the address ID, use the find method.
## Example
address = Braintree.Address.find("customer_id", "address_id")
"""
@spec find(binary, binary, Keyword.t) :: {:ok, t} | {:error, Error.t}
def find(customer_id, id, opts \\ []) when is_binary(customer_id) and is_binary(id) do
with {:ok, payload} <- HTTP.get("customers/#{customer_id}/addresses/" <> id, opts) do
{:ok, new(payload)}
end
end
@doc """
Convert a map into a Address struct.
## Example
address = Braintree.Address.new(%{"company" => "Braintree"})
"""
def new(%{"address" => map}), do: super(map)
end
|
lib/address.ex
| 0.891676 | 0.520862 |
address.ex
|
starcoder
|
defmodule Oban.Notifier do
@moduledoc """
The `Notifier` coordinates listening for and publishing notifications for events in predefined
channels.
Every Oban supervision tree contains a notifier process, registered as `Oban.Notifier`, which
can be any implementation of the `Oban.Notifier` behaviour. The default is
`Oban.Notifiers.Postgres`, which relies on Postgres `LISTEN/NOTIFY`. All incoming notifications
are relayed through the notifier to other processes.
## Channels
The notifier recognizes three predefined channels, each with a distinct responsibility:
* `gossip` — arbitrary communication between nodes or jobs are sent on the `gossip` channel
* `insert` — as jobs are inserted into the database an event is published on the `insert`
channel. Processes such as queue producers use this as a signal to dispatch new jobs.
* `signal` — instructions to take action, such as scale a queue or kill a running job, are sent
through the `signal` channel.
The `insert` and `signal` channels are primarily for internal use. Use the `gossip` channel to
send notifications between jobs or processes in your application.
## Examples
Broadcasting after a job is completed:
defmodule MyApp.Worker do
use Oban.Worker
@impl Oban.Worker
def perform(job) do
:ok = MyApp.do_work(job.args)
Oban.Notifier.notify(Oban, :gossip, %{complete: job.id})
:ok
end
end
Listening for job complete events from another process:
def insert_and_listen(args) do
:ok = Oban.Notifier.listen([:gosip])
{:ok, job} =
args
|> MyApp.Worker.new()
|> Oban.insert()
receive do
{:notification, :gossip, %{"complete" => ^job.id}} ->
IO.puts("Other job complete!")
after
30_000 ->
IO.puts("Other job didn't finish in 30 seconds!")
end
end
"""
alias Oban.{Config, Registry}
@type server :: GenServer.server()
@type option :: {:name, module()} | {:conf, Config.t()}
@type channel :: :gossip | :insert | :signal
@doc "Starts a notifier"
@callback start_link([option]) :: GenServer.on_start()
@doc "Register current process to receive messages from some channels"
@callback listen(server(), channels :: list(channel())) :: :ok
@doc "Unregister current process from channels"
@callback unlisten(server(), channels :: list(channel())) :: :ok
@doc "Broadcast a notification in a channel"
@callback notify(server(), channel :: channel(), payload :: [map()]) :: :ok
defguardp is_channel(channel) when channel in [:gossip, :insert, :signal]
@doc false
def child_spec(opts) do
conf = Keyword.fetch!(opts, :conf)
opts = Keyword.put_new(opts, :name, conf.notifier)
%{id: opts[:name], start: {conf.notifier, :start_link, [opts]}}
end
@doc """
Register the current process to receive relayed messages for the provided channels.
All messages are received as `JSON` and decoded _before_ they are relayed to registered
processes. Each registered process receives a three element notification tuple in the following
format:
{:notification, channel :: channel(), decoded :: map()}
## Example
Register to listen for all `:gossip` channel messages:
Oban.Notifier.listen([:gossip])
Listen for messages on all channels:
Oban.Notifier.listen([:gossip, :insert, :signal])
Listen for messages when using a custom Oban name:
Oban.Notifier.listen(MyApp.MyOban, [:gossip, :insert, :signal])
"""
@spec listen(server(), [channel]) :: :ok
def listen(server \\ Oban, channels) when is_list(channels) do
:ok = validate_channels!(channels)
conf = Oban.config(server)
server
|> Registry.whereis(Oban.Notifier)
|> conf.notifier.listen(channels)
end
@doc """
Unregister the current process from receiving relayed messages on provided channels.
## Example
Stop listening for messages on the `:gossip` channel:
Oban.Notifier.unlisten([:gossip])
Stop listening for messages when using a custom Oban name:
Oban.Notifier.unlisten(MyApp.MyOban, [:gossip])
"""
@spec unlisten(server(), [channel]) :: :ok
def unlisten(server \\ Oban, channels) when is_list(channels) do
conf = Oban.config(server)
server
|> Registry.whereis(Oban.Notifier)
|> conf.notifier.unlisten(channels)
end
@doc """
Broadcast a notification to listeners on all nodes.
Notifications are scoped to the configured `prefix`. For example, if there are instances running
with the `public` and `private` prefixes, a notification published in the `public` prefix won't
be picked up by processes listening with the `private` prefix.
Using notify/3 with a config is soft deprecated. Use a server as the first argument instead
## Example
Broadcast a gossip message:
Oban.Notifier.notify(:gossip, %{message: "hi!"})
"""
@spec notify(Config.t() | server(), channel :: channel(), payload :: map() | [map()]) :: :ok
def notify(conf_or_server \\ Oban, channel, payload)
def notify(%Config{} = conf, channel, payload) when is_channel(channel) do
with_span(conf, channel, payload, fn ->
conf.name
|> Registry.whereis(Oban.Notifier)
|> conf.notifier.notify(channel, normalize_payload(payload))
end)
end
def notify(server, channel, payload) when is_channel(channel) do
server
|> Oban.config()
|> notify(channel, payload)
end
defp with_span(conf, channel, payload, fun) do
tele_meta = %{conf: conf, channel: channel, payload: payload}
:telemetry.span([:oban, :notifier, :notify], tele_meta, fn ->
{fun.(), tele_meta}
end)
end
defp validate_channels!([]), do: :ok
defp validate_channels!([head | tail]) when is_channel(head), do: validate_channels!(tail)
defp validate_channels!([head | _]), do: raise(ArgumentError, "unexpected channel: #{head}")
defp normalize_payload(payload) do
payload
|> List.wrap()
|> Enum.map(&encode/1)
end
defp encode(payload) do
payload
|> to_encodable()
|> Jason.encode!()
end
defp to_encodable(%_{} = term), do: term
defp to_encodable(map) when is_map(map) do
for {key, val} <- map, into: %{}, do: {key, to_encodable(val)}
end
defp to_encodable(list) when is_list(list) do
for element <- list, do: to_encodable(element)
end
defp to_encodable(tuple) when is_tuple(tuple) do
tuple
|> Tuple.to_list()
|> to_encodable()
end
defp to_encodable(term), do: term
end
|
lib/oban/notifier.ex
| 0.878223 | 0.501587 |
notifier.ex
|
starcoder
|
defmodule ReWeb.Types.Listing do
@moduledoc """
GraphQL types for listings
"""
use Absinthe.Schema.Notation
import Absinthe.Resolution.Helpers, only: [dataloader: 2]
alias ReWeb.Resolvers
enum :deactivation_reason, values: ~w(duplicated gave_up left_emcasa publication_mistake rented
rejected sold sold_by_emcasa temporarily_suspended to_be_published
went_exclusive)
enum :status, values: ~w(active inactive)
enum :garage_type, values: ~w(contract condominium)
enum :orientation_type, values: ~w(frontside backside lateral inside)
enum :sun_period_type, values: ~w(morning evening)
object :listing do
field :id, :id
field :uuid, :uuid, resolve: &Resolvers.Listings.get_uuid/3
field :type, :string
field :complement, :string
field :description, :string
field :price, :integer
field :property_tax, :float
field :maintenance_fee, :float
field :floor, :string
field :rooms, :integer
field :bathrooms, :integer
field :restrooms, :integer
field :area, :integer
field :garage_spots, :integer
field :garage_type, :garage_type
field :suites, :integer
field :dependencies, :integer
field :balconies, :integer
field :has_elevator, :boolean
field :matterport_code, :string
field :is_active, :boolean, resolve: &Resolvers.Listings.is_active/3
field :is_exclusive, :boolean
field :is_release, :boolean
field :is_exportable, :boolean
field :orientation, :orientation_type
field :floor_count, :integer
field :unit_per_floor, :integer
field :sun_period, :sun_period_type
field :elevators, :integer
field :construction_year, :integer
field :price_per_area, :float
field :inserted_at, :naive_datetime
field :deactivation_reason, :deactivation_reason
field :sold_price, :integer
field :score, :integer do
deprecate("Use normalized_liquidity_ratio instead")
resolve &Resolvers.Listings.score/3
end
field :normalized_liquidity_ratio, :integer,
resolve: &Resolvers.Listings.normalized_liquidity_ratio/3
field :address, :address,
resolve: dataloader(Re.Addresses, &Resolvers.Addresses.per_listing/3)
field :images, list_of(:image) do
arg :is_active, :boolean
arg :limit, :integer
resolve &Resolvers.Images.per_listing/3
end
field :owner, :user, resolve: &Resolvers.Accounts.owner/3
field :interest_count, :integer, resolve: &Resolvers.Statistics.interest_count/3
field :in_person_visit_count, :integer, resolve: &Resolvers.Statistics.in_person_visit_count/3
field :listing_favorite_count, :integer,
resolve: &Resolvers.Statistics.listings_favorite_count/3
field :tour_visualisation_count, :integer,
resolve: &Resolvers.Statistics.tour_visualisation_count/3
field :listing_visualisation_count, :integer,
resolve: &Resolvers.Statistics.listing_visualisation_count/3
field :previous_prices, list_of(:price_history), resolve: &Resolvers.Listings.price_history/3
field :suggested_price, :float, resolve: &Resolvers.Listings.suggested_price/3
field :price_recently_reduced, :boolean, resolve: &Resolvers.Listings.price_recently_reduced/3
field :related, :listing_index do
arg :pagination, non_null(:listing_pagination)
arg :filters, non_null(:listing_filter_input)
resolve &Resolvers.Listings.related/3
end
field :units, list_of(:unit), resolve: &Resolvers.Units.per_listing/3
field :development, :development, resolve: &Resolvers.Developments.per_listing/3
field :tags, list_of(:tag), resolve: &Resolvers.Tags.per_listing/3
field :owner_contact, :owner_contact, resolve: &Resolvers.OwnerContacts.per_listing/3
end
input_object :listing_input do
field :type, non_null(:string)
field :complement, :string
field :description, :string
field :price, :integer
field :property_tax, :float
field :maintenance_fee, :float
field :floor, :string
field :rooms, :integer
field :bathrooms, :integer
field :restrooms, :integer
field :area, :integer
field :garage_spots, :integer
field :garage_type, :garage_type
field :suites, :integer
field :dependencies, :integer
field :balconies, :integer
field :has_elevator, :boolean
field :matterport_code, :string
field :is_exclusive, :boolean
field :is_release, :boolean
field :is_exportable, :boolean
field :orientation, :orientation_type
field :floor_count, :integer
field :unit_per_floor, :integer
field :sun_period, :sun_period_type
field :elevators, :integer
field :construction_year, :integer
field :phone, :string
field :address, :address_input
field :address_id, :id
field :development_uuid, :uuid
field :tags, list_of(non_null(:uuid))
field :owner_contact, :owner_contact_input
field :deactivation_reason, :deactivation_reason
field :status, :status
end
input_object :deactivation_options_input do
field :deactivation_reason, non_null(:deactivation_reason)
field :sold_price, :integer
end
object :listing_user do
field :listing, :listing
field :user, :user
end
object :listing_index do
field :listings, list_of(:listing)
field :remaining_count, :integer
field :filters, :listing_filter
end
input_object :listing_pagination do
field :page_size, :integer
field :excluded_listing_ids, list_of(:id)
end
input_object :order_by do
field :field, :orderable_field
field :type, :order_type
end
enum :orderable_field,
values:
~w(id price property_tax maintenance_fee rooms bathrooms restrooms area garage_spots suites dependencies balconies
price_per_area inserted_at floor)a
enum :order_type, values: ~w(desc asc desc_nulls_last asc_nulls_last)a
input_object :listing_filter_input do
field :max_price, :integer
field :min_price, :integer
field :max_rooms, :integer
field :min_rooms, :integer
field :max_suites, :integer
field :min_suites, :integer
field :max_bathrooms, :integer
field :min_bathrooms, :integer
field :min_area, :integer
field :max_area, :integer
field :neighborhoods, list_of(:string)
field :types, list_of(:string)
field :max_lat, :float
field :min_lat, :float
field :max_lng, :float
field :min_lng, :float
field :neighborhoods_slugs, list_of(:string)
field :max_garage_spots, :integer
field :min_garage_spots, :integer
field :garage_types, list_of(:garage_type)
field :cities, list_of(:string)
field :cities_slug, list_of(:string)
field :statuses, list_of(non_null(:string))
field :tags_slug, list_of(non_null(:string))
field :tags_uuid, list_of(non_null(:uuid))
field :min_floor_count, :integer
field :max_floor_count, :integer
field :min_unit_per_floor, :integer
field :max_unit_per_floor, :integer
field :has_elevator, :boolean
field :orientations, list_of(non_null(:orientation_type))
field :sun_periods, list_of(non_null(:sun_period_type))
field :min_age, :integer
field :max_age, :integer
field :min_price_per_area, :float
field :max_price_per_area, :float
field :min_maintenance_fee, :float
field :max_maintenance_fee, :float
field :is_release, :boolean
field :is_exportable, :boolean
field :exclude_similar_for_primary_market, :boolean
end
object :listing_filter do
field :max_price, :integer
field :min_price, :integer
field :max_rooms, :integer
field :min_rooms, :integer
field :max_suites, :integer
field :min_suites, :integer
field :max_bathrooms, :integer
field :min_bathrooms, :integer
field :min_area, :integer
field :max_area, :integer
field :neighborhoods, list_of(:string)
field :types, list_of(:string)
field :max_lat, :float
field :min_lat, :float
field :max_lng, :float
field :min_lng, :float
field :neighborhoods_slugs, list_of(:string)
field :max_garage_spots, :integer
field :min_garage_spots, :integer
field :garage_types, list_of(:garage_type)
field :cities, list_of(:string)
field :cities_slug, list_of(:string)
field :statuses, list_of(:string)
field :tags_slug, list_of(:string)
field :tags_uuid, list_of(:uuid)
field :min_floor_count, :integer
field :max_floor_count, :integer
field :min_unit_per_floor, :integer
field :max_unit_per_floor, :integer
field :has_elevator, :boolean
field :orientations, list_of(non_null(:orientation_type))
field :sun_periods, list_of(non_null(:sun_period_type))
field :min_age, :integer
field :max_age, :integer
field :min_price_per_area, :float
field :max_price_per_area, :float
field :is_release, :boolean
field :exclude_similar_for_primary_market, :boolean
end
object :price_history do
field :price, :integer
field :inserted_at, :naive_datetime
end
object :listing_queries do
@desc "Listings index"
field :listings, :listing_index do
arg :pagination, :listing_pagination
arg :filters, :listing_filter_input
arg :order_by, list_of(:order_by)
resolve &Resolvers.Listings.index/2
end
@desc "Show listing"
field :listing, :listing do
arg :id, non_null(:id)
resolve &Resolvers.Listings.show/2
end
@desc "List user listings"
field :user_listings, list_of(:listing), do: resolve(&Resolvers.Listings.per_user/2)
@desc "Get favorited listings"
field :favorited_listings, list_of(:listing), resolve: &Resolvers.Accounts.favorited/2
@desc "Featured listings"
field :featured_listings, list_of(:listing), resolve: &Resolvers.Listings.featured/2
@desc "Get listings with relaxed filters"
field :relaxed_listings, :listing_index do
arg :pagination, :listing_pagination
arg :filters, :listing_filter_input
arg :order_by, list_of(:order_by)
resolve &Resolvers.Listings.relaxed/2
end
end
object :listing_mutations do
@desc "Insert listing"
field :insert_listing, type: :listing do
arg :input, non_null(:listing_input)
resolve &Resolvers.Listings.insert/2
end
@desc "Update listing"
field :update_listing, type: :listing do
arg :id, non_null(:id)
arg :input, non_null(:listing_input)
resolve &Resolvers.Listings.update/2
end
@desc "Activate listing"
field :activate_listing, type: :listing do
arg :id, non_null(:id)
resolve &Resolvers.Listings.activate/2
end
@desc "Deactivate listing"
field :deactivate_listing, type: :listing do
arg :id, non_null(:id)
arg :input, :deactivation_options_input
resolve &Resolvers.Listings.deactivate/2
end
@desc "Favorite listing"
field :favorite_listing, type: :listing_user do
arg :id, non_null(:id)
resolve &Resolvers.Favorites.add/2
end
@desc "Unfavorite listing"
field :unfavorite_listing, type: :listing_user do
arg :id, non_null(:id)
resolve &Resolvers.Favorites.remove/2
end
@desc "Tour visualization"
field :tour_visualized, type: :listing do
arg :id, non_null(:id)
resolve &Resolvers.Listings.show/2
end
end
object :listing_subscriptions do
@desc "Subscribe to listing activation"
field :listing_activated, :listing do
arg :id, non_null(:id)
config &Resolvers.Listings.listing_activated_config/2
trigger :activate_listing, topic: &Resolvers.Listings.listing_activate_trigger/1
end
@desc "Subscribe to listing deactivation"
field :listing_deactivated, :listing do
arg :id, non_null(:id)
config &Resolvers.Listings.listing_deactivated_config/2
trigger :deactivate_listing, topic: &Resolvers.Listings.listing_deactivate_trigger/1
end
@desc "Subscribe to listing show"
field :listing_inserted, :listing do
config &Resolvers.Listings.listing_inserted_config/2
trigger :insert_listing, topic: &Resolvers.Listings.insert_listing_trigger/1
end
@desc "Subscribe to listing update"
field :listing_updated, :listing do
arg :id, non_null(:id)
config &Resolvers.Listings.listing_updated_config/2
trigger :update_listing, topic: &Resolvers.Listings.update_listing_trigger/1
end
end
end
|
apps/re_web/lib/graphql/types/listing.ex
| 0.724091 | 0.537648 |
listing.ex
|
starcoder
|
defmodule Argon2 do
@moduledoc """
Elixir wrapper for the Argon2 password hashing function.
This library can be used on its own, or it can be used together with
[Comeonin](https://hexdocs.pm/comeonin/api-reference.html), which
provides a higher-level api.
Before using Argon2, you will need to configure it. Read the documentation
for Argon2.Stats for more information about configuration. After that,
most users will just need to use the `hash_pwd_salt/2` and `verify_pass/3`
functions from this module.
For a lower-level API, see Argon2.Base.
## Argon2
Argon2 is the winner of the [Password Hashing Competition (PHC)](https://password-hashing.net).
Argon2 is a memory-hard password hashing function which can be used to hash
passwords for credential storage, key derivation, or other applications.
Argon2 has the following three variants (Argon2i is the default):
* Argon2d - suitable for applications with no threats from side-channel
timing attacks (eg. cryptocurrencies)
* Argon2i - suitable for password hashing and password-based key derivation
* Argon2id - a hybrid of Argon2d and Argon2i
Argon2i, Argon2d, and Argon2id are parametrized by:
* A **time** cost, which defines the amount of computation realized and
therefore the execution time, given in number of iterations
* A **memory** cost, which defines the memory usage, given in kibibytes
* A **parallelism** degree, which defines the number of parallel threads
More information can be found in the documentation for the Argon2.Stats
module and at the [Argon2 reference C implementation
repository](https://github.com/P-H-C/phc-winner-argon2).
## Comparison with Bcrypt / Pbkdf2
Currently, the most popular password hashing functions are Bcrypt,
which was presented in 1999, and Pbkdf2 (pbkdf2_sha256 or pbkdf2_sha512),
which dates back to 2000. Both are strong password hashing functions
with no known vulnerabilities, and their algorithms have been used and
widely reviewed for over 10 years. To help you decide whether you should
use Argon2 instead, here is a brief comparison of Bcrypt / Pbkdf2 with
Argon2.
Argon2 is a lot newer, and this can be considered to be both an
advantage and a disadvantage. On the one hand, Argon2 benefits
from more recent research, and it is designed to combat the kinds
of attacks which have become more common over the past decade,
such as the use of GPUs or dedicated hardware. On the other hand,
Argon2 has not received the same amount of scrutiny that Bcrypt / Pbkdf2
has.
One of the main differences is that Argon2 is a memory-hard function,
and this means that it is designed to use a lot more memory than
Bcrypt / Pbkdf2. With Bcrypt / Pbkdf2, attackers can use GPUs to hash
several hundred / thousand passwords in parallel. This can result in
significant gains in the time it takes an attacker to crack passwords.
Argon2's memory cost means that it is a lot more difficult for attackers
to benefit from using GPUs or other dedicated hardware.
"""
alias Argon2.Base
@doc """
Generate a random salt.
The default length for the salt is 16 bytes. We do not recommend using
a salt shorter than the default.
"""
def gen_salt(salt_len \\ 16), do: :crypto.strong_rand_bytes(salt_len)
@doc """
Generate a random salt and hash a password using Argon2.
## Options
For more information about the options for the underlying hash function,
see the documentation for Argon2.Base.hash_password/3.
This function has the following additional option:
* `:salt_len` - the length of the random salt
* the default is 16 (the minimum is 8) bytes
* we do not recommend using a salt less than 16 bytes long
## Examples
The following example changes the default `t_cost` and `m_cost`:
Argon2.hash_pwd_salt("password", [t_cost: 8, m_cost: 20])
In the example below, the Argon2 type is changed to Argon2id:
Argon2.hash_pwd_salt("password", [argon2_type: 2])
To use Argon2d, use `argon2_type: 0`.
"""
def hash_pwd_salt(password, opts \\ []) do
Base.hash_password(password, Keyword.get(opts, :salt_len, 16) |> gen_salt, opts)
end
@doc """
Check the password.
The check is performed in constant time to avoid timing attacks.
"""
def verify_pass(password, stored_hash) do
hash = :binary.bin_to_list(stored_hash)
case Base.verify_nif(hash, password, argon2_type(stored_hash)) do
0 -> true
_ -> false
end
end
@doc """
Verify an encoded Argon2 hash.
This function is deprecated, and it will be removed in version 2.0.
Please use `verify_pass` instead.
"""
def verify_hash(stored_hash, password, opts \\ [])
def verify_hash(stored_hash, password, _) do
IO.puts(:stderr, "Argon2.verify_hash is deprecated - please use Argon2.verify_pass instead")
hash = :binary.bin_to_list(stored_hash)
case Base.verify_nif(hash, password, argon2_type(stored_hash)) do
0 -> true
_ -> false
end
end
@doc """
A dummy verify function to help prevent user enumeration.
This function hashes the password and then returns false, and it is
intended to make it more difficult for any potential attacker to find
valid usernames by using timing attacks. This function is only useful
if it is used as part of a policy of hiding usernames. For more information,
see the section below on username obfuscation.
It is important that this function is called with the same options
that are used to hash the password.
## Example
The following example looks for the user in the database and checks the
password with the stored password hash if the user is found. It then
returns the user struct, if the password is correct, or false. If no user
is found, the `no_user_verify` function is called. This will take the same
time to run as the `verify_pass` function. This means that the end user
will not be able to find valid usernames just by timing the responses.
def verify_password(username, password) do
case Repo.get_by(User, username: username) do
nil -> Argon2.no_user_verify()
user -> Argon2.verify_pass(password, user.password_hash) && user
end
end
## Username obfuscation
In addition to keeping passwords secret, hiding the precise username
can help make online attacks more difficult. An attacker would then
have to guess a username / password combination, rather than just
a password, to gain access.
This does not mean that the username should be kept completely secret.
Adding a short numerical suffix to a user's name, for example, would be
sufficient to increase the attacker's work considerably.
If you are implementing a policy of hiding usernames, it is important
to make sure that the username is not revealed by any other part of
your application.
"""
def no_user_verify(opts \\ []) do
hash_pwd_salt("", opts)
false
end
defp argon2_type("$argon2id" <> _), do: 2
defp argon2_type("$argon2i" <> _), do: 1
defp argon2_type("$argon2d" <> _), do: 0
defp argon2_type(_) do
raise ArgumentError,
"Invalid Argon2 hash. " <> "Please check the 'stored_hash' input to verify_pass."
end
end
|
deps/argon2_elixir/lib/argon2.ex
| 0.848031 | 0.63392 |
argon2.ex
|
starcoder
|
defmodule Conduit.ContentType do
@moduledoc """
Formats and parses a message body based on the content type given.
Custom content types can be specified in your configuration.
config :conduit, Conduit.ContentType, [{"application/x-custom", MyApp.CustomContentType}]
Note that any new content types specified in this way will require a recompile of Conduit.
$ mix deps.clean conduit --build
$ mix deps.get
Any custom content types should implement the Conduit.ContentType
behaviour. See `Conduit.ContentType.JSON` for an example.
"""
@callback format(Conduit.Message.t(), Keyword.t()) :: Conduit.Message.t()
@callback parse(Conduit.Message.t(), Keyword.t()) :: Conduit.Message.t()
@default_content_types [
{"text/plain", Conduit.ContentType.Text},
{"application/json", Conduit.ContentType.JSON},
{"application/x-erlang-binary", Conduit.ContentType.ErlangBinary}
]
@doc false
defmacro __using__(_opts) do
quote do
@behaviour Conduit.ContentType
import Conduit.Message
end
end
@doc """
Formats the message body with the specified content type.
## Examples
iex> import Conduit.Message
iex> message =
iex> %Conduit.Message{}
iex> |> put_body(%{})
iex> |> Conduit.ContentType.format("application/json", [])
iex> message.body
"{}"
"""
@spec format(Conduit.Message.t(), String.t(), Keyword.t()) :: Conduit.Message.t()
def format(message, type, opts) do
content_type(type).format(message, opts)
end
@doc """
Parses the message body with the specified content type.
## Examples
iex> import Conduit.Message
iex> message =
iex> %Conduit.Message{}
iex> |> put_body("{}")
iex> |> Conduit.ContentType.parse("application/json", [])
iex> message.body
%{}
"""
@spec parse(Conduit.Message.t(), String.t(), Keyword.t()) :: Conduit.Message.t()
def parse(message, type, opts) do
content_type(type).parse(message, opts)
end
@spec content_type(String.t()) :: module
config_content_types = Application.get_env(:conduit, Conduit.ContentType, [])
for {type, content_type} <- config_content_types ++ @default_content_types do
defp content_type(unquote(type)), do: unquote(content_type)
end
defp content_type(content_type) do
raise Conduit.UnknownContentTypeError, "Unknown content type #{inspect(content_type)}"
end
end
|
lib/conduit/content_type.ex
| 0.826642 | 0.440349 |
content_type.ex
|
starcoder
|
defmodule Inquisitor.JsonApi.Include do
@moduledoc """
Inquisitor query handlers for JSON API includes
[JSON API Spec](http://jsonapi.org/format/#fetching-includes)
#### Usage
`use` the module *after* the `Inquisitor` module:
defmodule MyApp do
use Inquisitor
use Inquisitor.JsonApi.Include
...
end
this module allow you to decide how to you want to handle include params.
For example you may query your API with the following URL:
https://example.com/posts?include=tags,author
You can use `build_include_query/3` to define matchers:
def build_include_query(query, include, _context) do
Ecto.Query.preload(query, ^String.to_existing_atom(include))
end
#### Relationship paths
The value for an include could be dot-seperated to indicate a nesting:
author.profile
If you want to parse and `preload` this relationship properly:
def build_incude_query(query, include, _context) do
preload = Inquisitor.JsonApi.Include.preload_parser(include)
Ecto.Query.preload(query, preload)
end
For the given include of `author.profile` the result of `Inquisitor.JsonApi.Include.preload_parser/1`
would be `[author: :profile]`. The parser can handle infinite depths:
preload_parser("foo.bar.baz.qux")
> [foo: [bar: [baz: :qux]]]
#### Security
This module is secure by default. Meaning that you must opt-in to handle the include params.
Otherwise they are ignored by the query builder.
If you would like to limit the values to act upon use a `guard`:
@include_whitelist ~w(tags author)
def build_include_query(query, include, _context) when include in @include_whitelist do
Ecto.Query.preload(query, ^String.to_existing_atom(include))
end
"""
require Inquisitor
defmacro __using__(_opts) do
quote do
def build_query(query, "include", includes, context) do
includes
|> String.split(",")
|> Enum.reduce(query, fn(include, query) ->
build_include_query(query, include, context)
end)
end
@before_compile Inquisitor.JsonApi.Include
end
end
defmacro __before_compile__(_env) do
quote generated: true do
def build_include_query(query, include, context), do: query
defoverridable [build_include_query: 3]
end
end
@doc """
Parse path segments into nested keyword list
Example:
"foo.bar.baz.qux"
|> preload_parser()
> [foo: [bar: [baz: :qux]]]
"""
def preload_parser(path) do
path
|> String.split(".")
|> build_segments()
end
defp build_segments([segment | []]),
do: String.to_existing_atom(segment)
defp build_segments([segment | segments]) do
[{String.to_existing_atom(segment), build_segments(segments)}]
end
end
|
lib/inquisitor/jsonapi/include.ex
| 0.887674 | 0.46035 |
include.ex
|
starcoder
|
defmodule Timex.Format.Duration.Formatter do
@moduledoc """
This module defines the behaviour for custom Time formatters
"""
use Timex
alias Timex.Translator
alias Timex.Duration
alias Timex.Format.Duration.Formatters.Default
alias Timex.Format.Duration.Formatters.Humanized
defmacro __using__(_) do
quote do
alias Timex.Duration
@behaviour Timex.Format.Duration.Formatter
end
end
@callback format(Duration.t()) :: String.t() | {:error, term}
@callback lformat(Duration.t(), locale :: String.t()) :: String.t() | {:error, term}
@doc """
Formats a Duration as a string, using the provided
formatter. If a formatter is not provided, the formatter used is
`Timex.Format.Duration.Formatters.Default`. As a handy shortcut, you can reference
the other built-in formatter (Humanized) via the :humanized atom as shown below.
# Examples
iex> d = Timex.Duration.from_erl({1435, 180354, 590264})
...> #{__MODULE__}.format(d)
"P45Y6M5DT21H12M34.590264S"
"""
@spec format(Duration.t()) :: String.t() | {:error, term}
def format(duration), do: lformat(duration, Translator.current_locale(), Default)
@doc """
Same as format/1, but takes a formatter name as an argument
## Examples
iex> d = Timex.Duration.from_erl({1435, 180354, 590264})
...> #{__MODULE__}.format(d, :humanized)
"45 years, 6 months, 5 days, 21 hours, 12 minutes, 34 seconds, 590.264 milliseconds"
"""
@spec format(Duration.t(), atom) :: String.t() | {:error, term}
def format(duration, formatter), do: lformat(duration, Translator.current_locale(), formatter)
@doc """
Same as format/1, but takes a locale name as an argument, and translates the format string,
if the locale has translations.
"""
@spec lformat(Duration.t(), String.t()) :: String.t() | {:error, term}
def lformat(duration, locale), do: lformat(duration, locale, Default)
@doc """
Same as lformat/2, but takes a formatter as an argument
"""
@spec lformat(Duration.t(), String.t(), atom) :: String.t() | {:error, term}
def lformat(%Duration{} = duration, locale, formatter)
when is_binary(locale) and is_atom(formatter) do
case formatter do
:humanized -> Humanized.lformat(duration, locale)
_ -> formatter.lformat(duration, locale)
end
end
def lformat(_, _, _), do: {:error, :invalid_duration}
end
|
lib/format/duration/formatter.ex
| 0.891419 | 0.428712 |
formatter.ex
|
starcoder
|
defmodule Analytics.RegionOfInterest do
@moduledoc """
The Reports context.
"""
use Evercam.Schema
import Ecto.Query, warn: false
alias Analytics.RegionOfInterest
schema "regionof_interest" do
field :cameraex, :string
field :roi_wideness, :integer
field :roi_type, :string
field :x, :float
field :y, :float
field :x2, :float
field :y2, :float
field :from_date, :string
timestamps()
end
def get_last_record(start_date, exid) do
query = from u in RegionOfInterest,
where: (u.from_date <= ^start_date) and (u.cameraex == ^exid),
select: u.from_date,
order_by: [desc: :from_date]
query
|> limit(1)
|> AnalyticsRepo.one
end
def get_last_rectangular_record(start_date, exid) do
query = from u in RegionOfInterest,
where: (u.from_date <= ^start_date) and (u.cameraex == ^exid) and (u.roi_type == "rectangular"),
order_by: [desc: :from_date]
query
|> limit(1)
|> AnalyticsRepo.one
end
def get_coordinates_record(start_date, exid) do
query = from u in RegionOfInterest,
where: (u.from_date == ^start_date) and (u.cameraex == ^exid),
order_by: [desc: :from_date]
query
|> AnalyticsRepo.all
end
def get_region_of_interest!(id), do: AnalyticsRepo.get!(RegionOfInterest, id)
def get_all_roi(exid) do
RegionOfInterest
|> where(cameraex: ^exid)
|> order_by(asc: :from_date)
|> AnalyticsRepo.all
end
@doc false
def changeset(%RegionOfInterest{} = region_of_Interest, attrs) do
region_of_Interest
|> cast(attrs, [:cameraex, :roi_wideness, :roi_type, :x, :y, :x2, :y2, :from_date])
|> validate_required(:roi_type, [message: "Drawing type cannot be empty."])
|> validate_required(:x, [message: "X1 cannot be empty."])
|> validate_required(:y, [message: "Y1 type cannot be empty."])
|> validate_required(:x2, [message: "X2 type cannot be empty."])
|> validate_required(:y2, [message: "Y2 type cannot be empty."])
|> validate_required(:from_date, [message: "Start date type cannot be empty."])
end
end
|
lib/evercam_models/analytics/region_of_interest.ex
| 0.644337 | 0.418519 |
region_of_interest.ex
|
starcoder
|
defmodule Re.Exporters.FacebookAds.RealEstate do
@moduledoc """
Listing XML exporter for Facebook Dynamic Ads for Real Estate
https://developers.facebook.com/docs/marketing-api/dynamic-ads-for-real-estate
"""
@exported_attributes ~w(id url title availability listing_type description price property_type
rooms bathrooms units area_unit area neighborhood address latitude longitude image)a
@default_options %{attributes: @exported_attributes}
@frontend_url Application.get_env(:re_integrations, :frontend_url)
@image_url "https://res.cloudinary.com/emcasa/image/upload/f_auto/v1513818385"
@max_images 20
@availabilities %{
"pre-launch" => "off_market",
"planning" => "off_market",
"building" => "available_soon",
"delivered" => "for_sale"
}
def export_listings_xml(listings, options \\ %{}) do
options = merge_default_options(options)
listings
|> Enum.filter(&has_image?/1)
|> Enum.map(&build_node(&1, options))
|> build_root()
|> XmlBuilder.document()
|> XmlBuilder.generate(format: :none)
end
defp has_image?(%{images: []}), do: false
defp has_image?(_), do: true
def merge_default_options(options) do
Map.merge(@default_options, options)
end
def build_node(listing, options) do
{"listing", %{}, convert_attributes(listing, options)}
end
def build_images_node(images) do
images
|> Enum.map(&build_image_node(&1))
|> Enum.take(@max_images)
end
defp build_root(nodes) do
{"listings", %{}, nodes}
end
def convert_attributes(listing, %{attributes: attributes}) do
Enum.map(attributes, &convert_attribute_with_cdata(&1, listing))
end
defp convert_attribute_with_cdata(:address = attr, listing) do
convert_attribute(attr, listing)
end
defp convert_attribute_with_cdata(:image = attr, listing) do
convert_attribute(attr, listing)
end
defp convert_attribute_with_cdata(attr, listing) do
{tag, attrs, value} = convert_attribute(attr, listing)
{tag, attrs, escape_cdata(value)}
end
defp convert_attribute(:id, %{id: id}) do
{"home_listing_id", %{}, id}
end
defp convert_attribute(:url, %{id: id}) do
{"url", %{}, build_url(@frontend_url, "/imoveis/", to_string(id))}
end
defp convert_attribute(:title, %{type: type, address: %{city: city}}) do
{"name", %{}, "#{type} a venda em #{city}"}
end
defp convert_attribute(:availability, %{development: nil}) do
{"availability", %{}, "for_sale"}
end
defp convert_attribute(:availability, %{development: %{phase: phase}}) do
{"availability", %{}, Map.get(@availabilities, phase, "for_sale")}
end
defp convert_attribute(:listing_type, %{development: nil}) do
{"listing_type", %{}, "for_sale_by_owner"}
end
defp convert_attribute(:listing_type, _) do
{"listing_type", %{}, "new_listing"}
end
defp convert_attribute(:description, %{description: description}) do
{"description", %{}, description}
end
defp convert_attribute(:price, %{price: price}) do
{"price", %{}, "#{price} BRL"}
end
defp convert_attribute(:property_type, %{type: type}) do
{"property_type", %{}, expand_type(type)}
end
defp convert_attribute(:rooms, %{rooms: rooms}) do
{"num_beds", %{}, rooms || 0}
end
defp convert_attribute(:bathrooms, %{bathrooms: bathrooms}) do
{"num_baths", %{}, bathrooms || 0}
end
defp convert_attribute(:units, _) do
{"num_units", %{}, 1}
end
defp convert_attribute(:address, %{address: address}) do
{
"address",
%{format: "simple"},
[
{"component", %{name: "addr1"}, escape_cdata("#{address.street}")},
{"component", %{name: "city"}, escape_cdata("#{address.city}")},
{"component", %{name: "region"}, escape_cdata("#{address.neighborhood}")},
{"component", %{name: "country"}, escape_cdata("Brazil")},
{"component", %{name: "postal_code"}, escape_cdata("#{address.postal_code}")}
]
}
end
defp convert_attribute(:neighborhood, %{address: address}) do
{"neighborhood", %{}, address.neighborhood}
end
defp convert_attribute(:latitude, %{address: %{lat: lat}}) do
{"latitude", %{}, lat}
end
defp convert_attribute(:longitude, %{address: %{lng: lng}}) do
{"longitude", %{}, lng}
end
defp convert_attribute(:image, %{images: []}) do
{"image", %{}, nil}
end
defp convert_attribute(:image, %{images: images}) do
build_images_node(images)
end
defp convert_attribute(:area, %{area: area}) do
{"area_size", %{}, area}
end
defp convert_attribute(:area_unit, _) do
{"area_unit", %{}, "sq_m"}
end
defp expand_type("Apartamento"), do: "apartment"
defp expand_type("Cobertura"), do: "apartment"
defp expand_type("Casa"), do: "house"
defp expand_type(type), do: type
defp escape_cdata(nil) do
nil
end
defp escape_cdata(value) when is_binary(value) do
{:cdata, value}
end
defp escape_cdata(value) do
escape_cdata(to_string(value))
end
defp build_url(host, path, param) do
host
|> URI.merge(path)
|> URI.merge(param)
|> URI.to_string()
end
defp build_image_node(image) do
{
"image",
%{},
[
{"url", %{}, escape_cdata("#{@image_url}/#{image.filename}")}
]
}
end
end
|
apps/re/lib/exporters/facebook_ads/real_estate.ex
| 0.640748 | 0.402245 |
real_estate.ex
|
starcoder
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.