code
stringlengths 114
1.05M
| path
stringlengths 3
312
| quality_prob
float64 0.5
0.99
| learning_prob
float64 0.2
1
| filename
stringlengths 3
168
| kind
stringclasses 1
value |
---|---|---|---|---|---|
defmodule AWS.KinesisVideo do
@moduledoc """
<p/>
"""
@doc """
Creates a signaling channel.
`CreateSignalingChannel` is an asynchronous operation.
"""
def create_signaling_channel(client, input, options \\ []) do
path_ = "/createSignalingChannel"
headers = []
query_ = []
request(client, :post, path_, query_, headers, input, options, nil)
end
@doc """
Creates a new Kinesis video stream.
When you create a new stream, Kinesis Video Streams assigns it a version
number. When you change the stream's metadata, Kinesis Video Streams
updates the version.
`CreateStream` is an asynchronous operation.
For information about how the service works, see [How it
Works](https://docs.aws.amazon.com/kinesisvideostreams/latest/dg/how-it-works.html).
You must have permissions for the `KinesisVideo:CreateStream` action.
"""
def create_stream(client, input, options \\ []) do
path_ = "/createStream"
headers = []
query_ = []
request(client, :post, path_, query_, headers, input, options, nil)
end
@doc """
Deletes a specified signaling channel. `DeleteSignalingChannel` is an
asynchronous operation. If you don't specify the channel's current version,
the most recent version is deleted.
"""
def delete_signaling_channel(client, input, options \\ []) do
path_ = "/deleteSignalingChannel"
headers = []
query_ = []
request(client, :post, path_, query_, headers, input, options, nil)
end
@doc """
Deletes a Kinesis video stream and the data contained in the stream.
This method marks the stream for deletion, and makes the data in the stream
inaccessible immediately.
To ensure that you have the latest version of the stream before deleting
it, you can specify the stream version. Kinesis Video Streams assigns a
version to each stream. When you update a stream, Kinesis Video Streams
assigns a new version number. To get the latest stream version, use the
`DescribeStream` API.
This operation requires permission for the `KinesisVideo:DeleteStream`
action.
"""
def delete_stream(client, input, options \\ []) do
path_ = "/deleteStream"
headers = []
query_ = []
request(client, :post, path_, query_, headers, input, options, nil)
end
@doc """
Returns the most current information about the signaling channel. You must
specify either the name or the Amazon Resource Name (ARN) of the channel
that you want to describe.
"""
def describe_signaling_channel(client, input, options \\ []) do
path_ = "/describeSignalingChannel"
headers = []
query_ = []
request(client, :post, path_, query_, headers, input, options, nil)
end
@doc """
Returns the most current information about the specified stream. You must
specify either the `StreamName` or the `StreamARN`.
"""
def describe_stream(client, input, options \\ []) do
path_ = "/describeStream"
headers = []
query_ = []
request(client, :post, path_, query_, headers, input, options, nil)
end
@doc """
Gets an endpoint for a specified stream for either reading or writing. Use
this endpoint in your application to read from the specified stream (using
the `GetMedia` or `GetMediaForFragmentList` operations) or write to it
(using the `PutMedia` operation).
<note> The returned endpoint does not have the API name appended. The
client needs to add the API name to the returned endpoint.
</note> In the request, specify the stream either by `StreamName` or
`StreamARN`.
"""
def get_data_endpoint(client, input, options \\ []) do
path_ = "/getDataEndpoint"
headers = []
query_ = []
request(client, :post, path_, query_, headers, input, options, nil)
end
@doc """
Provides an endpoint for the specified signaling channel to send and
receive messages. This API uses the
`SingleMasterChannelEndpointConfiguration` input parameter, which consists
of the `Protocols` and `Role` properties.
`Protocols` is used to determine the communication mechanism. For example,
if you specify `WSS` as the protocol, this API produces a secure websocket
endpoint. If you specify `HTTPS` as the protocol, this API generates an
HTTPS endpoint.
`Role` determines the messaging permissions. A `MASTER` role results in
this API generating an endpoint that a client can use to communicate with
any of the viewers on the channel. A `VIEWER` role results in this API
generating an endpoint that a client can use to communicate only with a
`MASTER`.
"""
def get_signaling_channel_endpoint(client, input, options \\ []) do
path_ = "/getSignalingChannelEndpoint"
headers = []
query_ = []
request(client, :post, path_, query_, headers, input, options, nil)
end
@doc """
Returns an array of `ChannelInfo` objects. Each object describes a
signaling channel. To retrieve only those channels that satisfy a specific
condition, you can specify a `ChannelNameCondition`.
"""
def list_signaling_channels(client, input, options \\ []) do
path_ = "/listSignalingChannels"
headers = []
query_ = []
request(client, :post, path_, query_, headers, input, options, nil)
end
@doc """
Returns an array of `StreamInfo` objects. Each object describes a stream.
To retrieve only streams that satisfy a specific condition, you can specify
a `StreamNameCondition`.
"""
def list_streams(client, input, options \\ []) do
path_ = "/listStreams"
headers = []
query_ = []
request(client, :post, path_, query_, headers, input, options, nil)
end
@doc """
Returns a list of tags associated with the specified signaling channel.
"""
def list_tags_for_resource(client, input, options \\ []) do
path_ = "/ListTagsForResource"
headers = []
query_ = []
request(client, :post, path_, query_, headers, input, options, nil)
end
@doc """
Returns a list of tags associated with the specified stream.
In the request, you must specify either the `StreamName` or the
`StreamARN`.
"""
def list_tags_for_stream(client, input, options \\ []) do
path_ = "/listTagsForStream"
headers = []
query_ = []
request(client, :post, path_, query_, headers, input, options, nil)
end
@doc """
Adds one or more tags to a signaling channel. A *tag* is a key-value pair
(the value is optional) that you can define and assign to AWS resources. If
you specify a tag that already exists, the tag value is replaced with the
value that you specify in the request. For more information, see [Using
Cost Allocation
Tags](https://docs.aws.amazon.com/awsaccountbilling/latest/aboutv2/cost-alloc-tags.html)
in the *AWS Billing and Cost Management User Guide*.
"""
def tag_resource(client, input, options \\ []) do
path_ = "/TagResource"
headers = []
query_ = []
request(client, :post, path_, query_, headers, input, options, nil)
end
@doc """
Adds one or more tags to a stream. A *tag* is a key-value pair (the value
is optional) that you can define and assign to AWS resources. If you
specify a tag that already exists, the tag value is replaced with the value
that you specify in the request. For more information, see [Using Cost
Allocation
Tags](https://docs.aws.amazon.com/awsaccountbilling/latest/aboutv2/cost-alloc-tags.html)
in the *AWS Billing and Cost Management User Guide*.
You must provide either the `StreamName` or the `StreamARN`.
This operation requires permission for the `KinesisVideo:TagStream` action.
Kinesis video streams support up to 50 tags.
"""
def tag_stream(client, input, options \\ []) do
path_ = "/tagStream"
headers = []
query_ = []
request(client, :post, path_, query_, headers, input, options, nil)
end
@doc """
Removes one or more tags from a signaling channel. In the request, specify
only a tag key or keys; don't specify the value. If you specify a tag key
that does not exist, it's ignored.
"""
def untag_resource(client, input, options \\ []) do
path_ = "/UntagResource"
headers = []
query_ = []
request(client, :post, path_, query_, headers, input, options, nil)
end
@doc """
Removes one or more tags from a stream. In the request, specify only a tag
key or keys; don't specify the value. If you specify a tag key that does
not exist, it's ignored.
In the request, you must provide the `StreamName` or `StreamARN`.
"""
def untag_stream(client, input, options \\ []) do
path_ = "/untagStream"
headers = []
query_ = []
request(client, :post, path_, query_, headers, input, options, nil)
end
@doc """
Increases or decreases the stream's data retention period by the value that
you specify. To indicate whether you want to increase or decrease the data
retention period, specify the `Operation` parameter in the request body. In
the request, you must specify either the `StreamName` or the `StreamARN`.
<note> The retention period that you specify replaces the current value.
</note> This operation requires permission for the
`KinesisVideo:UpdateDataRetention` action.
Changing the data retention period affects the data in the stream as
follows:
<ul> <li> If the data retention period is increased, existing data is
retained for the new retention period. For example, if the data retention
period is increased from one hour to seven hours, all existing data is
retained for seven hours.
</li> <li> If the data retention period is decreased, existing data is
retained for the new retention period. For example, if the data retention
period is decreased from seven hours to one hour, all existing data is
retained for one hour, and any data older than one hour is deleted
immediately.
</li> </ul>
"""
def update_data_retention(client, input, options \\ []) do
path_ = "/updateDataRetention"
headers = []
query_ = []
request(client, :post, path_, query_, headers, input, options, nil)
end
@doc """
Updates the existing signaling channel. This is an asynchronous operation
and takes time to complete.
If the `MessageTtlSeconds` value is updated (either increased or reduced),
it only applies to new messages sent via this channel after it's been
updated. Existing messages are still expired as per the previous
`MessageTtlSeconds` value.
"""
def update_signaling_channel(client, input, options \\ []) do
path_ = "/updateSignalingChannel"
headers = []
query_ = []
request(client, :post, path_, query_, headers, input, options, nil)
end
@doc """
Updates stream metadata, such as the device name and media type.
You must provide the stream name or the Amazon Resource Name (ARN) of the
stream.
To make sure that you have the latest version of the stream before updating
it, you can specify the stream version. Kinesis Video Streams assigns a
version to each stream. When you update a stream, Kinesis Video Streams
assigns a new version number. To get the latest stream version, use the
`DescribeStream` API.
`UpdateStream` is an asynchronous operation, and takes time to complete.
"""
def update_stream(client, input, options \\ []) do
path_ = "/updateStream"
headers = []
query_ = []
request(client, :post, path_, query_, headers, input, options, nil)
end
@spec request(AWS.Client.t(), binary(), binary(), list(), list(), map(), list(), pos_integer()) ::
{:ok, map() | nil, map()}
| {:error, term()}
defp request(client, method, path, query, headers, input, options, success_status_code) do
client = %{client | service: "kinesisvideo"}
host = build_host("kinesisvideo", client)
url = host
|> build_url(path, client)
|> add_query(query, client)
additional_headers = [{"Host", host}, {"Content-Type", "application/x-amz-json-1.1"}]
headers = AWS.Request.add_headers(additional_headers, headers)
payload = encode!(client, input)
headers = AWS.Request.sign_v4(client, method, url, headers, payload)
perform_request(client, method, url, payload, headers, options, success_status_code)
end
defp perform_request(client, method, url, payload, headers, options, success_status_code) do
case AWS.Client.request(client, method, url, payload, headers, options) do
{:ok, %{status_code: status_code, body: body} = response}
when is_nil(success_status_code) and status_code in [200, 202, 204]
when status_code == success_status_code ->
body = if(body != "", do: decode!(client, body))
{:ok, body, response}
{:ok, response} ->
{:error, {:unexpected_response, response}}
error = {:error, _reason} -> error
end
end
defp build_host(_endpoint_prefix, %{region: "local", endpoint: endpoint}) do
endpoint
end
defp build_host(_endpoint_prefix, %{region: "local"}) do
"localhost"
end
defp build_host(endpoint_prefix, %{region: region, endpoint: endpoint}) do
"#{endpoint_prefix}.#{region}.#{endpoint}"
end
defp build_url(host, path, %{:proto => proto, :port => port}) do
"#{proto}://#{host}:#{port}#{path}"
end
defp add_query(url, [], _client) do
url
end
defp add_query(url, query, client) do
querystring = encode!(client, query, :query)
"#{url}?#{querystring}"
end
defp encode!(client, payload, format \\ :json) do
AWS.Client.encode!(client, payload, format)
end
defp decode!(client, payload) do
AWS.Client.decode!(client, payload, :json)
end
end
|
lib/aws/generated/kinesis_video.ex
| 0.901864 | 0.538862 |
kinesis_video.ex
|
starcoder
|
defmodule Sanbase.BlockchainAddress.MetricAdapter do
@behaviour Sanbase.Metric.Behaviour
import Sanbase.Utils.Transform, only: [maybe_apply_function: 2, rename_map_keys: 2]
alias Sanbase.Model.Project
alias Sanbase.Balance
@aggregations [:sum, :ohlc]
@default_aggregation :sum
@timeseries_metrics ["historical_balance", "historical_balance_changes"]
@histogram_metrics []
@table_metrics []
@metrics @histogram_metrics ++ @timeseries_metrics ++ @table_metrics
@access_map Enum.into(@metrics, %{}, fn metric -> {metric, :free} end)
@min_plan_map Enum.into(@metrics, %{}, fn metric -> {metric, :free} end)
@free_metrics Enum.filter(@access_map, fn {_, level} -> level == :free end)
|> Enum.map(&elem(&1, 0))
@restricted_metrics Enum.filter(@access_map, fn {_, level} -> level == :restricted end)
|> Enum.map(&elem(&1, 0))
@required_selectors Enum.into(@metrics, %{}, &{&1, [[:blockchain_address], [:slug]]})
@default_complexity_weight 0.3
@human_readable_name_map %{
"historical_balance" => "Historical Balance",
"historical_balance_changes" => "Historical Balance Changes"
}
@impl Sanbase.Metric.Behaviour
def has_incomplete_data?(_), do: false
@impl Sanbase.Metric.Behaviour
def complexity_weight(_), do: @default_complexity_weight
@impl Sanbase.Metric.Behaviour
def required_selectors(), do: @required_selectors
@impl Sanbase.Metric.Behaviour
def available_aggregations(), do: @aggregations
@impl Sanbase.Metric.Behaviour
def available_timeseries_metrics(), do: @timeseries_metrics
@impl Sanbase.Metric.Behaviour
def available_histogram_metrics(), do: @histogram_metrics
@impl Sanbase.Metric.Behaviour
def available_table_metrics(), do: @table_metrics
@impl Sanbase.Metric.Behaviour
def available_metrics(), do: @metrics
@impl Sanbase.Metric.Behaviour
def available_metrics(%{slug: slug}) do
with {:ok, _, _, infrastructure} <- Project.contract_info_infrastructure_by_slug(slug),
str when is_binary(str) <- Balance.blockchain_from_infrastructure(infrastructure) do
{:ok, @metrics}
else
_ -> {:ok, []}
end
end
@impl Sanbase.Metric.Behaviour
def available_slugs() do
infrastructures = Balance.supported_infrastructures()
{:ok, Project.List.slugs_by_infrastructure(infrastructures)}
end
@impl Sanbase.Metric.Behaviour
def available_slugs(metric) when metric in @metrics do
available_slugs()
end
@impl Sanbase.Metric.Behaviour
def first_datetime(metric, %{slug: slug, blockchain_address: %{address: address}})
when metric in @metrics do
Balance.first_datetime(address, slug)
end
@impl Sanbase.Metric.Behaviour
def last_datetime_computed_at(metric, %{slug: _slug, blockchain_address: %{address: _address}})
when metric in @metrics do
# There is no nice value we can put here
{:ok, DateTime.utc_now()}
end
@impl Sanbase.Metric.Behaviour
def free_metrics(), do: @free_metrics
@impl Sanbase.Metric.Behaviour
def restricted_metrics(), do: @restricted_metrics
@impl Sanbase.Metric.Behaviour
def access_map(), do: @access_map
@impl Sanbase.Metric.Behaviour
def min_plan_map(), do: @min_plan_map
@impl Sanbase.Metric.Behaviour
def human_readable_name(metric), do: {:ok, Map.get(@human_readable_name_map, metric)}
@impl Sanbase.Metric.Behaviour
def metadata(metric) do
{:ok,
%{
metric: metric,
min_interval: "5m",
default_aggregation: @default_aggregation,
available_aggregations: @aggregations,
available_selectors: [:blockchain_address, :slug],
required_selectors: Map.get(@required_selectors, metric, []),
data_type: :timeseries,
complexity_weight: @default_complexity_weight
}}
end
@impl Sanbase.Metric.Behaviour
def timeseries_data(
"historical_balance",
%{slug: slug, blockchain_address: %{address: address}},
from,
to,
interval,
opts
) do
case Keyword.get(opts, :aggregation) do
:ohlc ->
Sanbase.Balance.historical_balance_ohlc(address, slug, from, to, interval)
|> maybe_apply_function(fn elem ->
%{
datetime: elem.datetime,
open: elem.open_balance,
close: elem.close_balance,
high: elem.high_balance,
low: elem.low_balance
}
end)
_ ->
Sanbase.Balance.historical_balance(address, slug, from, to, interval)
|> rename_map_keys(old_key: :balance, new_key: :value)
end
end
@impl Sanbase.Metric.Behaviour
def timeseries_data(
"historical_balance_changes",
%{slug: slug, blockchain_address: %{address: address}},
from,
to,
interval,
_opts
) do
Sanbase.Balance.historical_balance_changes(address, slug, from, to, interval)
|> rename_map_keys(old_key: :balance, new_key: :value)
end
@impl Sanbase.Metric.Behaviour
def aggregated_timeseries_data(metric, _selector, _from, _to, _opts) do
not_implemented_error("aggregated timeseries data", metric)
end
@impl Sanbase.Metric.Behaviour
def slugs_by_filter(metric, _from, _to, _operator, _threshold, _opts) do
not_implemented_error("slugs_by_filter", metric)
end
@impl Sanbase.Metric.Behaviour
def slugs_order(metric, _from, _to, _direction, _opts) do
not_implemented_error("slugs order", metric)
end
# Private functions
defp not_implemented_error(function, metric) do
{:error, "The #{function} function is not implemented for #{metric}"}
end
end
|
lib/sanbase/blockchain_address/metric_adapter.ex
| 0.813238 | 0.467149 |
metric_adapter.ex
|
starcoder
|
defmodule SanbaseWeb.Graphql.Helpers.CalibrateInterval do
alias Sanbase.DateTimeUtils
def calibrate(module, id, from, to, interval, min_seconds \\ 300, max_data_points \\ 500)
def calibrate(module, id, from, to, "", min_seconds, max_data_points) do
with {:ok, first_datetime} <- module.first_datetime(id) do
first_datetime = first_datetime || from
from =
max(
DateTime.to_unix(from, :second),
DateTime.to_unix(first_datetime, :second)
)
interval =
max(
div(DateTime.to_unix(to, :second) - from, max_data_points),
min_seconds
)
{:ok, DateTime.from_unix!(from), to, "#{interval}s"}
end
end
def calibrate(_module, _id, from, to, interval, _min_interval, _max_data_points) do
{:ok, from, to, interval}
end
def calibrate(module, metric, slug, from, to, "", min_seconds, max_data_points) do
{:ok, first_datetime} = module.first_datetime(metric, slug)
first_datetime = first_datetime || from
from =
max(
DateTime.to_unix(from, :second),
DateTime.to_unix(first_datetime, :second)
)
interval =
max(
div(DateTime.to_unix(to, :second) - from, max_data_points),
min_seconds
)
{:ok, DateTime.from_unix!(from), to, "#{interval}s"}
end
def calibrate(
_module,
_metric,
_id,
from,
to,
interval,
_min_interval,
_max_data_points
) do
{:ok, from, to, interval}
end
def calibrate_moving_average(
module,
id,
from,
to,
interval,
min_interval,
moving_average_base,
max_data_points \\ 500
) do
{:ok, from, to, interval} =
calibrate(module, id, from, to, interval, min_interval, max_data_points)
ma_interval =
max(
div(
DateTimeUtils.str_to_sec(moving_average_base),
DateTimeUtils.str_to_sec(interval)
),
2
)
{:ok, from, to, interval, ma_interval}
end
def calibrate_incomplete_data_params(true, _module, _metric, from, to) do
{:ok, from, to}
end
def calibrate_incomplete_data_params(false, module, metric, from, to) do
case module.has_incomplete_data?(metric) do
true -> rewrite_params_incomplete_data(from, to)
false -> {:ok, from, to}
end
end
defp rewrite_params_incomplete_data(from, to) do
end_of_previous_day = Timex.now() |> Timex.beginning_of_day() |> Timex.shift(microseconds: -1)
case DateTime.compare(from, end_of_previous_day) != :lt do
true ->
{:error,
"""
The time range provided [#{from} - #{to}] is contained in today. The metric
requested could have incomplete data as it's calculated since the beginning
of the day and not for the last 24 hours. If you still want to see this
data you can pass the flag `includeIncompleteData: true` in the
`timeseriesData` arguments
"""}
false ->
to =
if DateTime.compare(to, end_of_previous_day) == :gt, do: end_of_previous_day, else: to
{:ok, from, to}
end
end
end
|
lib/sanbase_web/graphql/helpers/calibrate_interval.ex
| 0.794505 | 0.417598 |
calibrate_interval.ex
|
starcoder
|
defmodule HL7 do
@moduledoc """
Main module of the **ex_hl7** library.
"""
alias HL7.{Codec, Message, Reader, Segment, Type, Writer}
@type segment_id :: Type.segment_id()
@type sequence :: Type.sequence()
@type composite_id :: Type.composite_id()
@type field :: Type.field()
@type item_type :: Type.item_type()
@type value_type :: Type.value_type()
@type value :: Type.value()
@type repetition :: Type.repetition()
@type read_option :: Reader.option()
@type write_option :: Writer.option()
@type read_ret ::
{:ok, Message.t()}
| {:incomplete, {(binary -> read_ret), rest :: binary}}
| {:error, reason :: any}
@doc """
Reads a binary containing an HL7 message converting it to a list of segments.
## Arguments
* `buffer`: a binary containing the HL7 message to be parsed (partial
messages are allowed).
* `options`: keyword list with the read options; these are:
* `input_format`: the format the message in the `buffer` is in; it can be
either `:wire` for the normal HL7 wire format with carriage-returns as
segment terminators or `:text` for a format that replaces segment
terminators with line feeds to easily output messages to a console or
text file.
* `segment_creator`: function that receives a segment ID and returns a
tuple containing the module and the struct corresponding to the given
segment ID. By default, `&HL7.Segment.new/1` is used.
* `trim`: boolean that when set to `true` causes the fields to be
shortened to their optimal layout, removing trailing empty items (see
`HL7.Codec` for an explanation of this).
## Return values
Returns the parsed message (i.e. list of segments) or raises an
`HL7.ReadError` exception in case of error.
## Examples
Given an HL7 message like the following bound to the `buffer` variable:
"MSH|^~\\&|CLIENTHL7|CLI01020304|SERVHL7|PREPAGA^112233^IIN|20120201101155||ZQA^Z02^ZQA_Z02|00XX20120201101155|P|2.4|||ER|SU|ARG\\r" <>
"PRD|PS~4600^^HL70454||^^^B||||30123456789^CU\\r" <>
"PID|0||1234567890ABC^^^&112233&IIN^HC||unknown\\r" <>
"PR1|1||903401^^99DH\\r" <>
"AUT||112233||||||1|0\\r" <>
"PR1|2||904620^^99DH\\r" <>
"AUT||112233||||||1|0\\r"
You could read the message in the following way:
iex> message = HL7.read!(buffer, input_format: :wire, trim: true)
"""
@spec read!(buffer :: binary, [read_option()]) :: Message.t() | no_return
def read!(buffer, options \\ []), do: Message.read!(Reader.new(options), buffer)
@doc """
Reads a binary containing an HL7 message converting it to a list of segments.
## Arguments
* `buffer`: a binary containing the HL7 message to be parsed (partial
messages are allowed).
* `options`: keyword list with the read options; these are:
* `input_format`: the format the message in the `buffer` is in; it can be
either `:wire` for the normal HL7 wire format with carriage-returns as
segment terminators or `:text` for a format that replaces segment
terminators with line feeds to easily output messages to a console or
text file.
* `segment_creator`: function that receives a segment ID and returns a
tuple containing the module and the struct corresponding to the given
segment ID. By default, `&HL7.Segment.new/1` is used.
* `trim`: boolean that when set to `true` causes the fields to be
shortened to their optimal layout, removing trailing empty items (see
`HL7.Codec` for an explanation of this).
## Return values
* `{:ok, HL7.message}` if the buffer could be parsed successfully, then
a message will be returned. This is actually a list of `HL7.segment`
structs (check the [segment.ex](lib/ex_hl7/segment.ex) file to see the
list of included segment definitions).
* `{:incomplete, {(binary -> read_ret), rest :: binary}}` if the message
in the string is not a complete HL7 message, then a function will be
returned together with the part of the message that could not be parsed.
You should acquire the remaining part of the message and concatenate it
to the `rest` of the previous buffer. Finally, you have to call the
function that was returned passing it the concatenated string.
* `{:error, reason :: any}` if the contents of the buffer were malformed
and could not be parsed correctly.
## Examples
Given an HL7 message like the following bound to the `buffer` variable:
"MSH|^~\\&|CLIENTHL7|CLI01020304|SERVHL7|PREPAGA^112233^IIN|20120201101155||ZQA^Z02^ZQA_Z02|00XX20120201101155|P|2.4|||ER|SU|ARG\\r" <>
"PRD|PS~4600^^HL70454||^^^B||||30123456789^CU\\r" <>
"PID|0||1234567890ABC^^^&112233&IIN^HC||unknown\\r" <>
"PR1|1||903401^^99DH\\r" <>
"AUT||112233||||||1|0\\r" <>
"PR1|2||904620^^99DH\\r" <>
"AUT||112233||||||1|0\\r"
You could read the message in the following way:
iex> {:ok, message} = HL7.read(buffer, input_format: :wire, trim: true)
"""
@spec read(buffer :: binary, [read_option()]) :: read_ret()
def read(buffer, options \\ []), do: Message.read(Reader.new(options), buffer)
@doc """
Writes a list of HL7 segments into an iolist.
## Arguments
* `message`: a list of HL7 segments to be written into the string.
* `options`: keyword list with the write options; these are:
* `output_format`: the format the message will be written in; it can be
either `:wire` for the normal HL7 wire format with carriage-returns as
segment terminators or `:text` for a format that replaces segment
terminators with line feeds to easily output messages to a console or
text file. Defaults to `:wire`.
* `separators`: a tuple containing the item separators to be used when
generating the message as returned by `HL7.Codec.set_separators/1`.
Defaults to `HL7.Codec.separators`.
* `trim`: boolean that when set to `true` causes the fields to be
shortened to their optimal layout, removing trailing empty items (see
`HL7.Codec` for an explanation of this). Defaults to `true`.
## Return value
iolist containing the message in the selected output format.
## Examples
Given the `message` parsed in the `HL7.read/2` example you could do:
iex> buffer = HL7.write(message, output_format: :text, trim: true)
iex> IO.puts(buffer)
MSH|^~\\&|CLIENTHL7|CLI01020304|SERVHL7|PREPAGA^112233^IIN|20120201101155||ZQA^Z02^ZQA_Z02|00XX20120201101155|P|2.4|||ER|SU|ARG
PRD|PS~4600^^HL70454||^^^B||||30123456789^CU
PID|0||1234567890ABC^^^&112233&IIN^HC||unknown
PR1|1||903401^^99DH
AUT||112233||||||1|0
PR1|2||904620^^99DH
AUT||112233||||||1|0
"""
@spec write(Message.t(), [write_option()]) :: iodata
def write(message, options \\ []), do: Message.write(Writer.new(options), message)
@doc """
Retrieve the segment ID from a segment.
## Return value
If the argument is an `HL7.segment` the function returns a binary with the
segment ID; otherwise it returns `nil`.
## Examples
iex> aut = HL7.segment(message, "AUT")
iex> "AUT" = HL7.segment_id(aut)
"""
@spec segment_id(Segment.t()) :: segment_id()
defdelegate segment_id(segment), to: Segment, as: :id
@doc """
Return the first repetition of a segment within a message.
## Return value
If a segment with the passed `segment_id` can be found in the `message`
then the function returns the segment; otherwise it returns `nil`.
## Examples
iex> pr1 = HL7.segment(message, "PR1")
iex> 1 = pr1.set_id
"""
@spec segment(Message.t(), segment_id()) :: Segment.t() | nil
defdelegate segment(message, segment_id), to: Message
@doc """
Return the nth repetition (0-based) of a segment within a message.
## Return value
If the corresponding `repetition` of a segment with the passed `segment_id`
is present in the `message` then the function returns the segment; otherwise
it returns `nil`.
## Examples
iex> pr1 = HL7.segment(message, "PR1", 0)
iex> 1 = pr1.set_id
iex> pr1 = HL7.segment(message, "PR1", 1)
iex> 2 = pr1.set_id
"""
@spec segment(Message.t(), segment_id(), repetition()) :: Segment.t() | nil
defdelegate segment(message, segment_id, repetition), to: Message
@doc """
Return the first grouping of segments with the specified segment IDs.
In HL7 messages sometimes some segments are immediately followed by other
segments within the message. This function was created to help find those
"grouped segments".
For example, the `PR1` segment is sometimes followed by some other segments
(e.g. `OBX`, `AUT`, etc.) to include observations and other related
information for a practice. Note that there might be multiple segment
groupings in a message.
## Return value
A list of segments corresponding to the segment IDs that were passed. The
list might not include all of the requested segments if they were not
present in the message. The function will stop as soon as it finds a segment
that does not belong to the passed sequence.
## Examples
iex> [pr1, aut] = HL7.paired_segments(message, ["PR1", "AUT"])
"""
@spec paired_segments(Message.t(), [segment_id()]) :: [Segment.t()]
defdelegate paired_segments(message, segment_ids), to: Message
@doc """
Return the nth (0-based) grouping of segments with the specified segment IDs.
In HL7 messages sometimes some segments are immediately followed by other
segments within the message. This function was created to help find those
"grouped segments".
For example, the `PR1` segment is sometimes followed by some other segments
(e.g. `OBX`, `AUT`, etc.) to include observations and other related
information for a practice. Note that there might be multiple segment
groupings in a message.
## Return value
A list of segments corresponding to the segment IDs that were passed. The
list might not include all of the requested segments if they were not
present in the message. The function will stop as soon as it finds a segment
that does not belong to the passed sequence.
## Examples
iex> [pr1, aut] = HL7.paired_segments(message, ["PR1", "AUT"], 0)
iex> [pr1, aut] = HL7.paired_segments(message, ["PR1", "AUT"], 1)
iex> [] = HL7.paired_segments(message, ["PR1", "AUT"], 2)
iex> [aut] = HL7.paired_segments(message, ["PR1", "OBX"], 1)
"""
@spec paired_segments(Message.t(), [segment_id()], repetition()) :: [Segment.t()]
defdelegate paired_segments(message, segment_ids, repetition), to: Message
@doc """
It skips over the first `repetition` groups of paired segment and invokes
`fun` for each subsequent group of paired segments in the `message`. It
passes the following arguments to `fun` on each call:
- list of segments found that correspond to the group.
- index of the group of segments in the `message` (0-based).
- accumulator `acc` with the incremental results returned by `fun`.
In HL7 messages sometimes some segments are immediately followed by other
segments within the message. This function was created to easily process
those "paired segments".
For example, the `PR1` segment is sometimes followed by some other segments
(e.g. `OBX`, `AUT`, etc.) to include observations and other related
information for a procedure. Note that there might be multiple segment
groupings in a message.
## Arguments
* `message`: list of segments containing a decoded HL7 message.
* `segment_ids`: list of segment IDs that define the group of segments to
retrieve.
* `repetition`: index of the group of segments to retrieve (0-based); it also
corresponds to the number of groups to skip.
* `acc`: term containing the initial value of the accumulator to be passed to
the `fun` callback.
* `fun`: callback function receiving a group of segments, the index of the
group in the message and the accumulator.
## Return value
The accumulator returned by `fun` in its last invocation.
## Examples
iex> HL7.reduce_paired_segments(message, ["PR1", "AUT"], 0, [], fun segments, index, acc ->
segment_ids = for segment <- segments, do: HL7.segment_id(segment)
[{index, segment_ids} | acc]
end
[{0, ["PR1", "AUT"]}, {1, ["PR1", "AUT"]}]
"""
@spec reduce_paired_segments(
Message.t(),
[segment_id()],
repetition(),
acc :: term,
([Segment.t()], repetition(), acc :: term -> acc :: term)
) :: acc :: term
defdelegate reduce_paired_segments(message, segment_ids, repetition, acc, fun), to: Message
@doc """
Return the number of segments with a specified segment ID in an HL7 message.
## Examples
iex> 2 = HL7.segment_count(message, "PR1")
iex> 0 = HL7.segment_count(message, "OBX")
"""
@spec segment_count(Message.t(), segment_id()) :: non_neg_integer
defdelegate segment_count(message, segment_id), to: Message
@doc """
Deletes the first repetition of a segment in a message
## Examples
iex> HL7.delete(message, "NTE")
"""
@spec delete(Message.t(), segment_id()) :: Message.t()
defdelegate delete(message, segment_id), to: Message
@doc """
Deletes the given repetition (0-based) of a segment in a message
## Examples
iex> HL7.delete(message, "NTE", 0)
"""
@spec delete(Message.t(), segment_id(), repetition()) :: Message.t()
defdelegate delete(message, segment_id, repetition), to: Message
@doc """
Inserts a segment or group of segments before the first repetition of an
existing segment in a message.
## Arguments
* `message`: the `HL7.message` where the segment/s will be inserted.
* `segment_id`: the segment ID of a segment that should be present in the
`message`.
* `segment`: the segment or list of segments that will be inserted
## Return values
If a segment with the `segment_id` was present, the function will return a
new message with the inserted segments. If not, it will return the original
message
## Examples
iex> alias HL7.Segment.MSA
iex> ack = %MSA{ack_code: "AA", message_control_id: "1234"}
iex> HL7.insert_before(message, "ERR", msa)
"""
@spec insert_before(Message.t(), segment_id(), Segment.t() | [Segment.t()]) :: Message.t()
defdelegate insert_before(message, segment_id, segment), to: Message
@doc """
Inserts a segment or group of segments before the given repetition of an
existing segment in a message.
## Arguments
* `message`: the `HL7.message` where the segment/s will be inserted.
* `segment_id`: the segment ID of a segment that should be present in the
`message`.
* `repetition`: the repetition (0-based) of the `segment_id` in the `message`.
* `segment`: the segment or list of segments that will be inserted
## Return values
If a segment with the `segment_id` was present with the given `repetition`,
the function will return a new message with the inserted segments. If not,
it will return the original message
## Examples
iex> alias HL7.Segment.MSA
iex> ack = %MSA{ack_code: "AA", message_control_id: "1234"}
iex> HL7.insert_before(message, "ERR", 0, msa)
"""
@spec insert_before(Message.t(), segment_id(), repetition(), Segment.t() | [Segment.t()]) ::
Message.t()
defdelegate insert_before(message, segment_id, repetition, segment), to: Message
@doc """
Inserts a segment or group of segments after the first repetition of an
existing segment in a message.
## Arguments
* `message`: the `HL7.message` where the segment/s will be inserted.
* `segment_id`: the segment ID of a segment that should be present in the
`message`.
* `segment`: the segment or list of segments that will be inserted
## Return values
If a segment with the `segment_id` was present, the function will return a
new message with the inserted segments. If not, it will return the original
message
## Examples
iex> alias HL7.Segment.MSA
iex> ack = %MSA{ack_code: "AA", message_control_id: "1234"}
iex> HL7.insert_after(message, "MSH", msa)
"""
@spec insert_after(Message.t(), segment_id(), Segment.t() | [Segment.t()]) :: Message.t()
defdelegate insert_after(message, segment_id, segment), to: Message
@doc """
Inserts a segment or group of segments after the given repetition of an
existing segment in a message.
## Arguments
* `message`: the `HL7.message` where the segment/s will be inserted.
* `segment_id`: the segment ID of a segment that should be present in the
`message`.
* `repetition`: the repetition (0-based) of the `segment_id` in the `message`.
* `segment`: the segment or list of segments that will be inserted
## Return values
If a segment with the `segment_id` was present with the given `repetition`,
the function will return a new message with the inserted segments. If not,
it will return the original message
## Examples
iex> alias HL7.Segment.MSA
iex> ack = %MSA{ack_code: "AA", message_control_id: "1234"}
iex> HL7.insert_after(message, "MSH", 0, msa)
"""
@spec insert_after(Message.t(), segment_id(), repetition(), Segment.t() | [Segment.t()]) :: Message.t()
defdelegate insert_after(message, segment_id, repetition, segment), to: Message
@doc """
Appends a segment or segments onto the end of a message
## Arguments
* `message`: the `HL7.message` where the segment/s will be appended.
* `segment`: the segment or list of segments that will be appended
## Return values
Return a new message with the appended segments.
## Examples
iex> alias HL7.Segment.MSA
iex> ack = %MSA{ack_code: "AA", message_control_id: "1234"}
iex> HL7.Message.append(message, msa)
"""
@spec append(Message.t(), Segment.t() | [Segment.t()]) :: Message.t()
defdelegate append(message, segment), to: Message
@doc """
Replaces the first repetition of an existing segment in a message.
## Arguments
* `message`: the `HL7.message` where the segment/s will be inserted.
* `segment_id`: the segment ID of a segment that should be present in the
`message`.
* `segment`: the segment or list of segments that will replace the existing
one.
## Return values
If a segment with the `segment_id` was present, the function will return a
new message with the replaced segments. If not, it will return the original
message
## Examples
iex> alias HL7.Segment.MSA
iex> ack = %MSA{ack_code: "AA", message_control_id: "1234"}
iex> HL7.replace(message, "MSA", msa)
"""
@spec replace(Message.t(), segment_id(), Segment.t()) :: Message.t()
defdelegate replace(message, segment_id, segment), to: Message
@doc """
Replaces the given repetition of an existing segment in a message.
## Arguments
* `message`: the `HL7.message` where the segment/s will be inserted.
* `segment_id`: the segment ID of a segment that should be present in the
`message`.
* `repetition`: the repetition (0-based) of the `segment_id` in the `message`.
* `segment`: the segment or list of segments that will replace the existing
one.
## Return values
If a segment with the `segment_id` was present with the given `repetition`,
the function will return a new message with the replaced segments. If not,
it will return the original message.
## Examples
iex> alias HL7.Segment.MSA
iex> ack = %MSA{ack_code: "AA", message_control_id: "1234"}
iex> HL7.replace(message, "MSA", 0, msa)
"""
@spec replace(Message.t(), segment_id(), repetition(), Segment.t()) :: Message.t()
defdelegate replace(message, segment_id, repetition, segment), to: Message
@doc """
Escape a string that may contain separators using the HL7 escaping rules.
## Arguments
* `value`: a string to escape; it may or may not contain separator
characters.
* `options`: keyword list with the escape options; these are:
* `separators`: a tuple containing the item separators to be used when
generating the message as returned by `HL7.Codec.set_separators/1`.
Defaults to `HL7.Codec.separators`.
* `escape_char`: character to be used as escape delimiter. Defaults to `?\\\\ `
(backlash).
## Examples
iex> "ABCDEF" = HL7.escape("ABCDEF")
iex> "ABC\\\\F\\\\DEF\\\\F\\\\GHI" = HL7.escape("ABC|DEF|GHI", separators: HL7.Codec.separators())
"""
@spec escape(binary, options :: Keyword.t()) :: binary
def escape(value, options \\ []) do
separators = Keyword.get(options, :separators, Codec.separators())
escape_char = Keyword.get(options, :escape_char, ?\\)
Codec.escape(value, separators, escape_char)
end
@doc """
Convert an escaped string into its original value.
## Arguments
* `value`: a string to unescape; it may or may not contain escaped characters.
* `options`: keyword list with the escape options; these are:
* `separators`: a tuple containing the item separators to be used when
generating the message as returned by `HL7.Codec.set_separators/1`.
Defaults to `HL7.Codec.separators`.
* `escape_char`: character to be used as escape delimiter. Defaults to `?\\\\ `
(backlash).
## Examples
iex> "ABCDEF" = HL7.unescape("ABCDEF")
iex> "ABC|DEF|GHI" = HL7.unescape("ABC\\\\F\\\\DEF\\\\F\\\\GHI", escape_char: ?\\)
"""
@spec unescape(binary, options :: Keyword.t()) :: binary
def unescape(value, options \\ []) do
separators = Keyword.get(options, :separators, Codec.separators())
escape_char = Keyword.get(options, :escape_char, ?\\)
Codec.unescape(value, separators, escape_char)
end
@vertical_tab 0x0b
@file_separator 0x1c
@carriage_return 0x0d
@doc """
Add MLLP framing to an already encoded HL7 message.
An MLLP-framed message carries a one byte vertical tab (0x0b) control code
as header and a two byte trailer consisting of a file separator (0x1c) and
a carriage return (0x0d) control code.
## Arguments
* `buffer`: binary or iolist containing an encoded HL7 message as returned
by `HL7.write/2`.
"""
@spec to_mllp(buffer :: iodata) :: iolist
def to_mllp(buffer) when is_binary(buffer) or is_list(buffer) do
[@vertical_tab, buffer, @file_separator, @carriage_return]
end
@doc """
Remove MLLP framing from an already encoded HL7 message.
An MLLP-framed message carries a one byte vertical tab (0x0b) control code
as header and a two byte trailer consisting of a file separator (0x1c) and
a carriage return (0x0d) control code.
## Arguments
* `buffer`: binary or iolist containing an MLLP-framed HL7 message as
returned by `HL7.to_mllp/1`.
## Return value
Returns the encoded message with the MLLP framing removed.
"""
@spec from_mllp(buffer :: iodata) ::
{:ok, msg_buffer :: iodata} | :incomplete | {:error, reason :: term}
def from_mllp([@vertical_tab, msg_buffer, @file_separator, @carriage_return]) do
{:ok, msg_buffer}
end
def from_mllp([@vertical_tab | tail]) do
case Enum.reverse(tail) do
[@carriage_return, @file_separator | msg_iolist] ->
{:ok, Enum.reverse(msg_iolist)}
_ ->
:incomplete
end
end
def from_mllp(buffer) when is_binary(buffer) do
msg_len = byte_size(buffer) - 3
case buffer do
<<@vertical_tab, msg_buffer::binary-size(msg_len), @file_separator, @carriage_return>>
when msg_len > 0 ->
{:ok, msg_buffer}
<<@vertical_tab, _tail::binary>> ->
:incomplete
_ ->
{:error, :bad_mllp_framing}
end
end
@doc """
Remove MLLP framing from an already encoded HL7 message.
An MLLP-framed message carries a one byte vertical tab (0x0b) control code
as header and a two byte trailer consisting of a file separator (0x1c) and
a carriage return (0x0d) control code.
## Arguments
* `buffer`: binary or iolist containing an MLLP-framed HL7 message as
returned by `HL7.to_mllp/1`.
## Return value
Returns the encoded message with the MLLP framing removed or raises an
`HL7.ReadError` exception in case of error.
"""
@spec from_mllp!(buffer :: iodata) :: msg_buffer :: iodata | no_return
def from_mllp!(buffer) do
case from_mllp(buffer) do
{:ok, msg_buffer} -> msg_buffer
:incomplete -> raise HL7.ReadError, :incomplete
{:error, reason} -> raise HL7.ReadError, reason
end
end
end
|
lib/ex_hl7.ex
| 0.871434 | 0.487368 |
ex_hl7.ex
|
starcoder
|
defmodule Quetzal.Graph do
@doc """
The base module to make graph components based on plotly.js
In order to make components provide two keywords, the first one contains the
div properties and the second the keywords to use as data in PlotlyJS, with this
you are able to build any graph supported into PlotlyJS.
# Example
[{Quetzal.Graph, [id: "mygraph"], [type: "pie", values: [1,2], labels: ["A", "B"]]}]
The above code can be set as a single component in the live view to render as graph.
"""
require EEx
# decompose every graph component so we can handle almost every supported over plotly
graph = fn(def_graph) ->
def unquote(def_graph)(component_opts, options) do
plotly_graph(unquote(def_graph), component_opts, options)
end
end
Enum.map(~w(graph)a, graph)
@doc """
Returns the component that holds the graph and their data holding into a single EEx template
"""
def plotly_graph(:graph, component_opts, options) do
# options should be a valid JSON to encode with Jason, so, for example, all plotlyjs
# data examples will be passed as json here and rendered by EEx template.
opts = options |> Enum.into(%{})
opts = [opts] |> Jason.encode!
build_graph(component_opts, opts)
end
def plotly_graph(_, _, _) do
:error
end
defp build_graph(component_opts, options) do
id = Keyword.get(component_opts, :id, "#")
style = Keyword.get(component_opts, :style, "")
class = Keyword.get(component_opts, :class, "")
"""
#{ html_tag(id, style, class, options) }
#{ js_tag(id) }
"""
end
# EEx template for div to hold graph
EEx.function_from_string(
:def,
:html_tag,
~s[<div phx-hook="Graph" id="<%= id %>" style="<%= style %>" class="<%= class %>" options='<%= options %>'></div>],
~w(id style class options)a
)
# EEx template for script to create graph
EEx.function_from_string(
:def,
:js_tag,
~s[<script>
fn_<%= id %> = new Function("ID = document.getElementById('<%= id %>');" +
"OPTS = ID.getAttribute('options');" +
"Plotly.react(ID, JSON.parse(OPTS));");
fn_<%= id %>()
</script>],
~w(id)a
)
end
|
lib/graph/quetzal_graph.ex
| 0.746139 | 0.60964 |
quetzal_graph.ex
|
starcoder
|
defmodule BSV.Tx do
@moduledoc """
A Tx is a data structure representing a Bitcoin transaction.
A Tx consists of a version number, a list of inputs, list of outputs, and a
locktime value.
A Bitcoin transaction is used to transfer custody of Bitcoins. It can also be
used for smart contracts, recording and timestamping data, and many other
functionalities.
The Tx module is used for parsing and serialising transaction data. Use the
`BSV.TxBuilder` module for building transactions.
"""
alias BSV.{Hash, OutPoint, Serializable, TxIn, TxOut, VarInt}
import BSV.Util, only: [decode: 2, encode: 2, reverse_bin: 1]
defstruct version: 1, inputs: [], outputs: [], lock_time: 0
@typedoc "Tx struct"
@type t() :: %__MODULE__{
version: non_neg_integer(),
inputs: list(TxIn.t()),
outputs: list(TxOut.t()),
lock_time: non_neg_integer()
}
@typedoc """
Tx hash
Result of hashing the transaction data through the SHA-256 algorithm twice.
"""
@type hash() :: <<_::256>>
@typedoc """
TXID
Result of reversing and hex-encoding the `t:BSV.Tx.hash/0`.
"""
@type txid() :: String.t()
@doc """
Adds the given `t:BSV.TxIn.t/0` to the transaction.
"""
@spec add_input(t(), TxIn.t()) :: t()
def add_input(%__MODULE__{} = tx, %TxIn{} = txin),
do: update_in(tx.inputs, & &1 ++ [txin])
@doc """
Adds the given `t:BSV.TxOut.t/0` to the transaction.
"""
@spec add_output(t(), TxOut.t()) :: t()
def add_output(%__MODULE__{} = tx, %TxOut{} = txout),
do: update_in(tx.outputs, & &1 ++ [txout])
@doc """
Returns true if the given `t:BSV.Tx.t/0` is a coinbase transaction (the first
transaction in a block, containing the miner block reward).
"""
@spec is_coinbase?(t()) :: boolean()
def is_coinbase?(%__MODULE__{inputs: [txin]}),
do: OutPoint.is_null?(txin.outpoint)
def is_coinbase?(%__MODULE__{}), do: false
@doc """
Parses the given binary into a `t:BSV.Tx.t/0`.
Returns the result in an `:ok` / `:error` tuple pair.
## Options
The accepted options are:
* `:encoding` - Optionally decode the binary with either the `:base64` or `:hex` encoding scheme.
"""
@spec from_binary(binary(), keyword()) :: {:ok, t()} | {:error, term()}
def from_binary(data, opts \\ []) when is_binary(data) do
encoding = Keyword.get(opts, :encoding)
with {:ok, data} <- decode(data, encoding),
{:ok, tx, _rest} <- Serializable.parse(%__MODULE__{}, data)
do
{:ok, tx}
end
end
@doc """
Parses the given binary into a `t:BSV.Tx.t/0`.
As `from_binary/2` but returns the result or raises an exception.
"""
@spec from_binary!(binary(), keyword()) :: t()
def from_binary!(data, opts \\ []) when is_binary(data) do
case from_binary(data, opts) do
{:ok, tx} ->
tx
{:error, error} ->
raise BSV.DecodeError, error
end
end
@doc """
Returns the `t:BSV.Tx.hash/0` of the given transaction.
"""
@spec get_hash(t()) :: hash()
def get_hash(%__MODULE__{} = tx) do
tx
|> to_binary()
|> Hash.sha256_sha256()
end
@doc """
Returns the number of bytes of the given `t:BSV.Tx.t/0`.
"""
@spec get_size(t()) :: non_neg_integer()
def get_size(%__MODULE__{} = tx),
do: to_binary(tx) |> byte_size()
@doc """
Returns the `t:BSV.Tx.txid/0` of the given transaction.
"""
@spec get_txid(t()) :: txid()
def get_txid(%__MODULE__{} = tx) do
tx
|> get_hash()
|> reverse_bin()
|> encode(:hex)
end
@doc """
Serialises the given `t:BSV.TxIn.t/0` into a binary.
## Options
The accepted options are:
* `:encoding` - Optionally encode the binary with either the `:base64` or `:hex` encoding scheme.
"""
@spec to_binary(t()) :: binary()
def to_binary(%__MODULE__{} = tx, opts \\ []) do
encoding = Keyword.get(opts, :encoding)
tx
|> Serializable.serialize()
|> encode(encoding)
end
defimpl Serializable do
@impl true
def parse(tx, data) do
with <<version::little-32, data::binary>> <- data,
{:ok, inputs, data} <- VarInt.parse_items(data, TxIn),
{:ok, outputs, data} <- VarInt.parse_items(data, TxOut),
<<lock_time::little-32, rest::binary>> = data
do
{:ok, struct(tx, [
version: version,
inputs: inputs,
outputs: outputs,
lock_time: lock_time
]), rest}
end
end
@impl true
def serialize(%{version: version, inputs: inputs, outputs: outputs, lock_time: lock_time}) do
inputs_data = Enum.reduce(inputs, VarInt.encode(length(inputs)), fn input, data ->
data <> Serializable.serialize(input)
end)
outputs_data = Enum.reduce(outputs, VarInt.encode(length(outputs)), fn output, data ->
data <> Serializable.serialize(output)
end)
<<
version::little-32,
inputs_data::binary,
outputs_data::binary,
lock_time::little-32
>>
end
end
end
|
lib/bsv/tx.ex
| 0.914844 | 0.719186 |
tx.ex
|
starcoder
|
defmodule MongooseICE.UDP do
@moduledoc """
UDP STUN server
The easiest way to start a server is to spawn it under MongooseICE's
supervision tree providing the following configuration:
config :mongooseice, :servers,
udp: [ip: {192,168, 1, 21}, port: 32323]
...or hook it up to your supervision tree:
children = [
MongooseICE.UDP.child_spec([port: 3478]),
MongooseICE.UDP.child_spec([port: 1234]),
...
]
You can also start a server under MongooseICE's supervision tree
using `start/1`.
## Options
All methods of starting a server accept the same configuration options
passed as a keyword list:
* `:port` - the port which server should be bound to
* `:ip` - the address of an interface which server should listen on
* `:relay_ip` - the address of an interface which relay should listen on
* `:realm` - public name of the server used as context of authorization.
Does not have to be same as the server's hostname, yet in very basic configuration it may be.
You may start multiple UDP servers at a time.
"""
@type socket :: :gen_udp.socket
@type server_opts :: [option]
@type option :: {:ip, MongooseICE.ip} | {:port, MongooseICE.portn} |
{:relay_ip, MongooseICE.ip} | {:realm, String.t}
@default_opts [ip: {127, 0, 0, 1}, port: 3478, relay_ip: {127, 0, 0, 1},
realm: "localhost"]
@allowed_opts [:ip, :port, :relay_ip, :realm]
@doc """
Starts UDP STUN server under MongooseICE's supervisor
Accepts the same options as `start_link/1`.
"""
@spec start(server_opts) :: Supervisor.on_start_child
def start(opts \\ @default_opts) do
child = child_spec(opts)
Supervisor.start_child(MongooseICE.Supervisor, child)
end
@doc """
Stops UDP server started with start/1
It accepts the *port number* server is running on as argument
"""
@spec stop(MongooseICE.portn) :: :ok | :error
def stop(port) do
name = base_name(port)
with :ok <- Supervisor.terminate_child(MongooseICE.Supervisor, name),
:ok <- Supervisor.delete_child(MongooseICE.Supervisor, name) do
:ok
else
_ -> :error
end
end
@doc """
Starts UDP STUN server with given options
Default options are:
#{inspect @default_opts}
Links the server to the calling process.
"""
@spec start_link(server_opts) :: Supervisor.on_start
def start_link(opts \\ @default_opts) do
opts = normalize_opts(opts)
MongooseICE.UDP.Supervisor.start_link(opts)
end
@doc """
Returns child specification of UDP server which can be hooked
up into supervision tree
"""
@spec child_spec(server_opts) :: Supervisor.Spec.spec
def child_spec(opts) do
opts = normalize_opts(opts)
name = base_name(opts[:port])
Supervisor.Spec.supervisor(MongooseICE.UDP.Supervisor, [opts], id: name)
end
defp normalize_opts(opts) do
@default_opts
|> Keyword.merge(opts)
|> Keyword.take(@allowed_opts)
end
@doc false
def base_name(port) do
"#{__MODULE__}.#{port}" |> String.to_atom()
end
@doc false
def sup_name(base_name) do
build_name(base_name, "Supervisor")
end
@doc false
def receiver_name(base_name) do
build_name(base_name, "Receiver")
end
@doc false
def dispatcher_name(base_name) do
build_name(base_name, "Dispatcher")
end
@doc false
def worker_sup_name(base_name) do
build_name(base_name, "WorkerSupervisor")
end
@doc false
defp build_name(base, suffix) do
"#{base}.#{suffix}" |> String.to_atom()
end
end
|
lib/mongooseice/udp.ex
| 0.792785 | 0.410284 |
udp.ex
|
starcoder
|
defmodule AWS.NetworkFirewall do
@moduledoc """
This is the API Reference for AWS Network Firewall.
This guide is for developers who need detailed information about the Network
Firewall API actions, data types, and errors.
* The REST API requires you to handle connection details, such as
calculating signatures, handling request retries, and error handling. For
general information about using the AWS REST APIs, see [AWS APIs](https://docs.aws.amazon.com/general/latest/gr/aws-apis.html).
To access Network Firewall using the REST API endpoint:
`https://network-firewall.<region>.amazonaws.com `
* Alternatively, you can use one of the AWS SDKs to access an API
that's tailored to the programming language or platform that you're using. For
more information, see [AWS SDKs](http://aws.amazon.com/tools/#SDKs). * For descriptions of Network Firewall features, including and
step-by-step instructions on how to use them through the Network Firewall
console, see the [Network Firewall Developer
Guide](https://docs.aws.amazon.com/network-firewall/latest/developerguide/).
Network Firewall is a stateful, managed, network firewall and intrusion
detection and prevention service for Amazon Virtual Private Cloud (Amazon VPC).
With Network Firewall, you can filter traffic at the perimeter of your VPC. This
includes filtering traffic going to and coming from an internet gateway, NAT
gateway, or over VPN or AWS Direct Connect. Network Firewall uses rules that are
compatible with Suricata, a free, open source intrusion detection system (IDS)
engine. For information about Suricata, see the [Suricata website](https://suricata-ids.org/).
You can use Network Firewall to monitor and protect your VPC traffic in a number
of ways. The following are just a few examples:
* Allow domains or IP addresses for known AWS service endpoints,
such as Amazon S3, and block all other forms of traffic.
* Use custom lists of known bad domains to limit the types of domain
names that your applications can access.
* Perform deep packet inspection on traffic entering or leaving your
VPC.
* Rate limit traffic going from AWS to on-premises IP destinations.
* Use stateful protocol detection to filter protocols like HTTPS,
regardless of the port used.
To enable Network Firewall for your VPCs, you perform steps in both Amazon VPC
and in Network Firewall. For information about using Amazon VPC, see [Amazon VPC User Guide](https://docs.aws.amazon.com/vpc/latest/userguide/).
To start using Network Firewall, do the following:
1. (Optional) If you don't already have a VPC that you want to
protect, create it in Amazon VPC.
2. In Amazon VPC, in each Availability Zone where you want to have a
firewall endpoint, create a subnet for the sole use of Network Firewall.
3. In Network Firewall, create stateless and stateful rule groups,
to define the components of the network traffic filtering behavior that you want
your firewall to have.
4. In Network Firewall, create a firewall policy that uses your rule
groups and specifies additional default traffic filtering behavior.
5. In Network Firewall, create a firewall and specify your new
firewall policy and VPC subnets. Network Firewall creates a firewall endpoint in
each subnet that you specify, with the behavior that's defined in the firewall
policy.
6. In Amazon VPC, use ingress routing enhancements to route traffic
through the new firewall endpoints.
"""
alias AWS.Client
alias AWS.Request
def metadata do
%AWS.ServiceMetadata{
abbreviation: "Network Firewall",
api_version: "2020-11-12",
content_type: "application/x-amz-json-1.0",
credential_scope: nil,
endpoint_prefix: "network-firewall",
global?: false,
protocol: "json",
service_id: "Network Firewall",
signature_version: "v4",
signing_name: "network-firewall",
target_prefix: "NetworkFirewall_20201112"
}
end
@doc """
Associates a `FirewallPolicy` to a `Firewall`.
A firewall policy defines how to monitor and manage your VPC network traffic,
using a collection of inspection rule groups and other settings. Each firewall
requires one firewall policy association, and you can use the same firewall
policy for multiple firewalls.
"""
def associate_firewall_policy(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "AssociateFirewallPolicy", input, options)
end
@doc """
Associates the specified subnets in the Amazon VPC to the firewall.
You can specify one subnet for each of the Availability Zones that the VPC
spans.
This request creates an AWS Network Firewall firewall endpoint in each of the
subnets. To enable the firewall's protections, you must also modify the VPC's
route tables for each subnet's Availability Zone, to redirect the traffic that's
coming into and going out of the zone through the firewall endpoint.
"""
def associate_subnets(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "AssociateSubnets", input, options)
end
@doc """
Creates an AWS Network Firewall `Firewall` and accompanying `FirewallStatus` for
a VPC.
The firewall defines the configuration settings for an AWS Network Firewall
firewall. The settings that you can define at creation include the firewall
policy, the subnets in your VPC to use for the firewall endpoints, and any tags
that are attached to the firewall AWS resource.
After you create a firewall, you can provide additional settings, like the
logging configuration.
To update the settings for a firewall, you use the operations that apply to the
settings themselves, for example `UpdateLoggingConfiguration`,
`AssociateSubnets`, and `UpdateFirewallDeleteProtection`.
To manage a firewall's tags, use the standard AWS resource tagging operations,
`ListTagsForResource`, `TagResource`, and `UntagResource`.
To retrieve information about firewalls, use `ListFirewalls` and
`DescribeFirewall`.
"""
def create_firewall(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "CreateFirewall", input, options)
end
@doc """
Creates the firewall policy for the firewall according to the specifications.
An AWS Network Firewall firewall policy defines the behavior of a firewall, in a
collection of stateless and stateful rule groups and other settings. You can use
one firewall policy for multiple firewalls.
"""
def create_firewall_policy(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "CreateFirewallPolicy", input, options)
end
@doc """
Creates the specified stateless or stateful rule group, which includes the rules
for network traffic inspection, a capacity setting, and tags.
You provide your rule group specification in your request using either
`RuleGroup` or `Rules`.
"""
def create_rule_group(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "CreateRuleGroup", input, options)
end
@doc """
Deletes the specified `Firewall` and its `FirewallStatus`.
This operation requires the firewall's `DeleteProtection` flag to be `FALSE`.
You can't revert this operation.
You can check whether a firewall is in use by reviewing the route tables for the
Availability Zones where you have firewall subnet mappings. Retrieve the subnet
mappings by calling `DescribeFirewall`. You define and update the route tables
through Amazon VPC. As needed, update the route tables for the zones to remove
the firewall endpoints. When the route tables no longer use the firewall
endpoints, you can remove the firewall safely.
To delete a firewall, remove the delete protection if you need to using
`UpdateFirewallDeleteProtection`, then delete the firewall by calling
`DeleteFirewall`.
"""
def delete_firewall(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "DeleteFirewall", input, options)
end
@doc """
Deletes the specified `FirewallPolicy`.
"""
def delete_firewall_policy(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "DeleteFirewallPolicy", input, options)
end
@doc """
Deletes a resource policy that you created in a `PutResourcePolicy` request.
"""
def delete_resource_policy(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "DeleteResourcePolicy", input, options)
end
@doc """
Deletes the specified `RuleGroup`.
"""
def delete_rule_group(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "DeleteRuleGroup", input, options)
end
@doc """
Returns the data objects for the specified firewall.
"""
def describe_firewall(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "DescribeFirewall", input, options)
end
@doc """
Returns the data objects for the specified firewall policy.
"""
def describe_firewall_policy(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "DescribeFirewallPolicy", input, options)
end
@doc """
Returns the logging configuration for the specified firewall.
"""
def describe_logging_configuration(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "DescribeLoggingConfiguration", input, options)
end
@doc """
Retrieves a resource policy that you created in a `PutResourcePolicy` request.
"""
def describe_resource_policy(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "DescribeResourcePolicy", input, options)
end
@doc """
Returns the data objects for the specified rule group.
"""
def describe_rule_group(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "DescribeRuleGroup", input, options)
end
@doc """
Removes the specified subnet associations from the firewall.
This removes the firewall endpoints from the subnets and removes any network
filtering protections that the endpoints were providing.
"""
def disassociate_subnets(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "DisassociateSubnets", input, options)
end
@doc """
Retrieves the metadata for the firewall policies that you have defined.
Depending on your setting for max results and the number of firewall policies, a
single call might not return the full list.
"""
def list_firewall_policies(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "ListFirewallPolicies", input, options)
end
@doc """
Retrieves the metadata for the firewalls that you have defined.
If you provide VPC identifiers in your request, this returns only the firewalls
for those VPCs.
Depending on your setting for max results and the number of firewalls, a single
call might not return the full list.
"""
def list_firewalls(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "ListFirewalls", input, options)
end
@doc """
Retrieves the metadata for the rule groups that you have defined.
Depending on your setting for max results and the number of rule groups, a
single call might not return the full list.
"""
def list_rule_groups(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "ListRuleGroups", input, options)
end
@doc """
Retrieves the tags associated with the specified resource.
Tags are key:value pairs that you can use to categorize and manage your
resources, for purposes like billing. For example, you might set the tag key to
"customer" and the value to the customer name or ID. You can specify one or more
tags to add to each AWS resource, up to 50 tags for a resource.
You can tag the AWS resources that you manage through AWS Network Firewall:
firewalls, firewall policies, and rule groups.
"""
def list_tags_for_resource(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "ListTagsForResource", input, options)
end
@doc """
Creates or updates an AWS Identity and Access Management policy for your rule
group or firewall policy.
Use this to share rule groups and firewall policies between accounts. This
operation works in conjunction with the AWS Resource Access Manager (RAM)
service to manage resource sharing for Network Firewall.
Use this operation to create or update a resource policy for your rule group or
firewall policy. In the policy, you specify the accounts that you want to share
the resource with and the operations that you want the accounts to be able to
perform.
When you add an account in the resource policy, you then run the following
Resource Access Manager (RAM) operations to access and accept the shared rule
group or firewall policy.
*
[GetResourceShareInvitations](https://docs.aws.amazon.com/ram/latest/APIReference/API_GetResourceShareInvitations.html) - Returns the Amazon Resource Names (ARNs) of the resource share invitations.
*
[AcceptResourceShareInvitation](https://docs.aws.amazon.com/ram/latest/APIReference/API_AcceptResourceShareInvitation.html)
- Accepts the share invitation for a specified resource share.
For additional information about resource sharing using RAM, see [AWS Resource Access Manager User
Guide](https://docs.aws.amazon.com/ram/latest/userguide/what-is.html).
"""
def put_resource_policy(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "PutResourcePolicy", input, options)
end
@doc """
Adds the specified tags to the specified resource.
Tags are key:value pairs that you can use to categorize and manage your
resources, for purposes like billing. For example, you might set the tag key to
"customer" and the value to the customer name or ID. You can specify one or more
tags to add to each AWS resource, up to 50 tags for a resource.
You can tag the AWS resources that you manage through AWS Network Firewall:
firewalls, firewall policies, and rule groups.
"""
def tag_resource(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "TagResource", input, options)
end
@doc """
Removes the tags with the specified keys from the specified resource.
Tags are key:value pairs that you can use to categorize and manage your
resources, for purposes like billing. For example, you might set the tag key to
"customer" and the value to the customer name or ID. You can specify one or more
tags to add to each AWS resource, up to 50 tags for a resource.
You can manage tags for the AWS resources that you manage through AWS Network
Firewall: firewalls, firewall policies, and rule groups.
"""
def untag_resource(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "UntagResource", input, options)
end
@doc """
Modifies the flag, `DeleteProtection`, which indicates whether it is possible to
delete the firewall.
If the flag is set to `TRUE`, the firewall is protected against deletion. This
setting helps protect against accidentally deleting a firewall that's in use.
"""
def update_firewall_delete_protection(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "UpdateFirewallDeleteProtection", input, options)
end
@doc """
Modifies the description for the specified firewall.
Use the description to help you identify the firewall when you're working with
it.
"""
def update_firewall_description(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "UpdateFirewallDescription", input, options)
end
@doc """
Updates the properties of the specified firewall policy.
"""
def update_firewall_policy(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "UpdateFirewallPolicy", input, options)
end
def update_firewall_policy_change_protection(%Client{} = client, input, options \\ []) do
Request.request_post(
client,
metadata(),
"UpdateFirewallPolicyChangeProtection",
input,
options
)
end
@doc """
Sets the logging configuration for the specified firewall.
To change the logging configuration, retrieve the `LoggingConfiguration` by
calling `DescribeLoggingConfiguration`, then change it and provide the modified
object to this update call. You must change the logging configuration one
`LogDestinationConfig` at a time inside the retrieved `LoggingConfiguration`
object.
You can perform only one of the following actions in any call to
`UpdateLoggingConfiguration`:
* Create a new log destination object by adding a single
`LogDestinationConfig` array element to `LogDestinationConfigs`.
* Delete a log destination object by removing a single
`LogDestinationConfig` array element from `LogDestinationConfigs`.
* Change the `LogDestination` setting in a single
`LogDestinationConfig` array element.
You can't change the `LogDestinationType` or `LogType` in a
`LogDestinationConfig`. To change these settings, delete the existing
`LogDestinationConfig` object and create a new one, using two separate calls to
this update operation.
"""
def update_logging_configuration(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "UpdateLoggingConfiguration", input, options)
end
@doc """
Updates the rule settings for the specified rule group.
You use a rule group by reference in one or more firewall policies. When you
modify a rule group, you modify all firewall policies that use the rule group.
To update a rule group, first call `DescribeRuleGroup` to retrieve the current
`RuleGroup` object, update the object as needed, and then provide the updated
object to this call.
"""
def update_rule_group(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "UpdateRuleGroup", input, options)
end
def update_subnet_change_protection(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "UpdateSubnetChangeProtection", input, options)
end
end
|
lib/aws/generated/network_firewall.ex
| 0.904242 | 0.752718 |
network_firewall.ex
|
starcoder
|
defmodule Airtable do
@moduledoc """
Documentation for Airtable.
"""
@doc """
Retrieves a certain row from a table.
"""
def get(api_key, table_key, table_name, item_id), do: perform(:get, api_key, table_key, table_name, item_id, [])
@doc """
Deletes a certain row from a table. Returns {:ok, "DELETED_ID"} on success.
"""
def delete(api_key, table_key, table_name, item_id), do: perform(:delete, api_key, table_key, table_name, item_id, [])
@doc """
Creates a new row by performing a POST request to Airtable. Parameters are
sent via the _fields_ option. Upload fields just need to be given one or more
downloadable URLs.
Airtable.create(
"AIRTABLE_API_KEY", "TABLE_KEY", "persons",
fields: %{
"Name" => "<NAME>",
"Notes" => "formerly knows as gutschilla",
"Attachments" => [%{"url" => "https://dummyimage.com/600x400/000/fff"}]
}
)
"""
def create(api_key, table_key, table_name, options), do: perform(:create, api_key, table_key, table_name, nil, options)
@doc ~S"""
Replaces an existing row with a new one. If you just want to update certain
fields, use update/5 instead. Returns the replaces item.
# create
{:ok, %Airtable.Result.Item{id: id , fields: %{"name": "Frank", age: 55}} = Airtable.create("API_KEY", "TABLE_KEY", "persons", "rec_SOME_ID", fields: %{"name": "Frank", age: 55})
# overwrite
{:ok, %Airtable.Result.Item{id: ^id, fields: %{"name": "Martin", age: 39}} = Airtable.replace("API_KEY", "TABLE_KEY", "persons", id, fields: %{"name": "Martin", age: 39})
"""
def replace(api_key, table_key, table_name, id, options), do: perform(:replace, api_key, table_key, table_name, id, options)
@doc ~S"""
Update given fields for a row. Fields not set in this call will be kapt as-is.
If you want to replace the whole entry/row, use replace/5 instead. Returns the
updated item.
# create
{:ok, %Airtable.Result.Item{id: id , fields: %{"name": "Frank", age: 55}} = Airtable.create("API_KEY", "TABLE_KEY", "persons", "rec_SOME_ID", fields: %{"name": "Frank", age: 55})
# overwrite, age is still 55
{:ok, %Airtable.Result.Item{id: ^id, fields: %{"name": "Martin", age: 55}} = Airtable.replace("API_KEY", "TABLE_KEY", "persons", id, fields: %{"name": "Martin"})
"""
def update(api_key, table_key, table_name, id, options), do: perform(:update, api_key, table_key, table_name, id, options)
@doc """
Perfoms the call cycle for :get, :delete, :update, :replace calls.
- create request struct
- make actual HTTP request
- handle JSON response
"""
def perform(action, api_key, table_key, table_name, item_id, options \\ []) do
with {:make_request, request} <- {:make_request, make_request(action, api_key, table_key, table_name, item_id, options)},
{:ok, response = %Mojito.Response{}} <- Mojito.request(request) do
handle_response(action, response)
end
end
@doc """
Retrieves all entries.
## options
### fields:
list of strings for fields to retrieve only. Remember, that id will always be there.
```
Airtable.list("API_KEY", "app_BASE", "Filme", fields: ["Titel", "Jahr"])
{:ok,
%Airtable.Result.List{
offset: nil,
records: [
%Airtable.Result.Item{
fields: %{"Jahr" => "2004", "Titel" => "Kill Bill Volume 2"},
id: "rec15b3sYhdEStY1e"
},
%Airtable.Result.Item{
fields: %{"Titel" => "Ein blonder Traum"},
id: "rec3KUcL7R3AHD3rY"
},
...
]
}
}
```
- filterByFormula
- maxRecords
- maxRecords
- sort
- view
- cellFormat
- timeZone
- userLocale
## Examples
iex> Airtable.list("AIRTABLE_API_KEY", "TABLE_KEY", "films", max_records: 1000)
%Airtable.Result.List%{records: [%Airtable.Result.Item{id: "someid", fields: %{"foo": "bar"}}], offset: "…"}
"""
def list(api_key, table_key, table_name, options \\ []) do
request = make_request(:list, api_key, table_key, table_name, options)
with {:ok, response = %Mojito.Response{}} <- Mojito.request(request) do
handle_response(:list, response)
end
end
def handle_response(:delete, response) do
with {:status, %Mojito.Response{body: body, status_code: 200}} <- {:status, response},
{:json, {:ok, %{"id" => id, "deleted" => true}}} <- {:json, Jason.decode(body)} do
{:ok, id}
else
{:status, %Mojito.Response{status_code: 404}} -> {:error, :not_found}
{reason, details} -> {:error, {reason, details}}
end
end
def handle_response(type, response) when type in [:get, :list, :create, :update, :replace] do
with {:status, %Mojito.Response{body: body, status_code: 200}} <- {:status, response},
{:json, {:ok, map = %{}}} <- {:json, Jason.decode(body)},
{:struct, {:ok, item}} <- {:struct, make_struct(type, map)} do
{:ok, item}
else
{:status, %Mojito.Response{status_code: 404}} -> {:error, :not_found}
{reason, details} -> {:error, {reason, details}}
end
end
defp make_struct(type, map) when type in [:get, :create, :update, :replace] do
with item = %Airtable.Result.Item{} <- Airtable.Result.Item.from_item_map(map), do: {:ok, item}
end
defp make_struct(:list, map) do
with list = %Airtable.Result.List{} <- Airtable.Result.List.from_record_maps(map), do: {:ok, list}
end
def make_request(type, api_key, table_key, table_name, item_id, options) when type in [:get, :delete, :update, :replace, :create] do
%Mojito.Request{
headers: make_headers(api_key),
method: method_for(type),
url: make_url(table_key, table_name, item_id),
body: make_body(options[:fields]),
}
end
def make_request(:list, api_key, table_key, table_name, options) do
url =
make_url(table_key, table_name)
|> URI.parse()
|> Map.put(:query, opts_to_query(options))
|> URI.to_string()
%Mojito.Request{
headers: make_headers(api_key),
method: :get,
url: url
}
end
defp make_body(nil), do: ""
defp make_body(map = %{}), do: Jason.encode!(%{"fields" => map})
defp method_for(:get), do: :get
defp method_for(:create), do: :post
defp method_for(:delete), do: :delete
defp method_for(:replace), do: :put
defp method_for(:update), do: :patch
defp opts_to_query(options) when options == [], do: ""
defp opts_to_query(options) do
options
|> Enum.map(&encode_query_param/1)
|> Enum.intersperse("&")
|> Enum.join()
end
defp encode_query_param({k, v}) when is_list(v) do
v
|> Enum.map(fn value -> {"#{Airtable.Camelizer.from_atom(k)}[]", value} end)
|> URI.encode_query()
end
defp encode_query_param({k, v}) do
[{Airtable.Camelizer.from_atom(k), v}]
|> URI.encode_query()
end
defp make_headers(api_key) when is_binary(api_key) do
[
{"Authorization", "Bearer #{api_key}"},
{"Content-Type", "application/json"},
]
end
defp make_url(table_key, table_name, item_id \\ nil) do
[base_url(), table_key, table_name, item_id]
|> Enum.filter(fn nil -> false; _ -> true end)
|> Enum.join("/")
end
defp base_url(), do: "https://api.airtable.com/v0"
end
|
lib/airtable.ex
| 0.739046 | 0.464355 |
airtable.ex
|
starcoder
|
defmodule Faker.Phone.EnUs do
@moduledoc """
This follows the rules outlined in the North American Numbering Plan
at https://en.wikipedia.org/wiki/North_American_Numbering_Plan.
The NANP number format may be summarized in the notation NPA-NXX-xxxx:
The allowed ranges for NPA (area code) are: [2–9] for the first digit, and
[0-9] for the second and third digits. The NANP is not assigning area codes
with 9 as the second digit.
The allowed ranges for NXX (central office/exchange) are: [2–9] for the first
digit, and [0–9] for both the second and third digits (however, in geographic
area codes the third digit of the exchange cannot be 1 if the second digit is
also 1).
The allowed ranges for xxxx (subscriber number) are [0–9] for each of the four
digits.
"""
@doc """
Returns a random US phone number
Possible returned formats:
(123) 456-7890
123/456-7890
123-456-7890
123.456.7890
1234567890
## Examples
iex> Faker.Phone.EnUs.phone()
"5528621083"
iex> Faker.Phone.EnUs.phone()
"(730) 552-5702"
iex> Faker.Phone.EnUs.phone()
"652-505-3376"
iex> Faker.Phone.EnUs.phone()
"(377) 347-8109"
"""
@spec phone() :: String.t()
def phone do
phone(digit(0, 3))
end
defp phone(0), do: "(#{area_code()}) #{exchange_code()}-#{subscriber_number()}"
defp phone(1), do: "#{area_code()}/#{exchange_code()}-#{subscriber_number()}"
defp phone(_) do
sep = std_separator()
"#{area_code()}#{sep}#{exchange_code()}#{sep}#{subscriber_number()}"
end
@doc """
Returns a random area code
## Examples
iex> Faker.Phone.EnUs.area_code()
"825"
iex> Faker.Phone.EnUs.area_code()
"246"
iex> Faker.Phone.EnUs.area_code()
"681"
iex> Faker.Phone.EnUs.area_code()
"683"
"""
@spec area_code() :: String.t()
def area_code do
[digit(2, 9), digit(0, 8), digit(0, 9)] |> Enum.join()
end
@doc """
Returns a random exchange code
## Examples
iex> Faker.Phone.EnUs.exchange_code()
"503"
iex> Faker.Phone.EnUs.exchange_code()
"845"
iex> Faker.Phone.EnUs.exchange_code()
"549"
iex> Faker.Phone.EnUs.exchange_code()
"509"
"""
@spec exchange_code() :: String.t()
def exchange_code do
second_dig = digit(0, 9)
third_dig =
case second_dig do
1 -> digit(2, 9)
_ -> digit(1, 9)
end
[digit(2, 9), second_dig, third_dig] |> Enum.join()
end
@doc """
Returns a random subscriber number `n` digits long
## Examples
iex> Faker.Phone.EnUs.subscriber_number()
"0154"
iex> Faker.Phone.EnUs.subscriber_number()
"2646"
iex> Faker.Phone.EnUs.subscriber_number(2)
"10"
iex> Faker.Phone.EnUs.subscriber_number(5)
"83297"
"""
@spec subscriber_number(pos_integer) :: String.t()
def subscriber_number(n) when is_integer(n) do
Faker.format(String.duplicate("#", n))
end
@spec subscriber_number() :: String.t()
def subscriber_number, do: subscriber_number(4)
@doc """
Returns a random extension `n` digits long
## Examples
iex> Faker.Phone.EnUs.extension()
"0154"
iex> Faker.Phone.EnUs.extension()
"2646"
iex> Faker.Phone.EnUs.extension(3)
"108"
iex> Faker.Phone.EnUs.extension(5)
"32970"
"""
defdelegate extension(n), to: __MODULE__, as: :subscriber_number
defdelegate extension, to: __MODULE__, as: :subscriber_number
defp std_separator, do: Enum.at(["-", ".", ""], Faker.random_between(0, 2))
defp digit(min, max), do: Faker.random_between(min, max)
end
|
lib/faker/phone/en_us.ex
| 0.811003 | 0.608914 |
en_us.ex
|
starcoder
|
defmodule Bintree do
@moduledoc """
Basic module
"""
defstruct value: nil, left: nil, right: nil
@typedoc """
Binary tree where branches can be nil
"""
@type t :: %Bintree{value: any, left: t | nil, right: t | nil}
@type filter_fun :: (any -> boolean)
@type process_fun :: (any -> any)
@doc """
Creates base binary tree
"""
@doc since: "1.0.0"
@spec new(any, t | nil, t | nil) :: t
def new(value, left \\ nil, right \\ nil) do
left =
if branch?(left) do
left
else
new(left)
end
right =
if branch?(right) do
right
else
new(right)
end
%Bintree{value: value, left: left, right: right}
end
defp branch?(nil), do: true
defp branch?(%Bintree{}), do: true
defp branch?(_other), do: false
@doc """
Automatically generates binary tree values
Generates a branch while the filter_fun returns a truthy value.
## Examples
iex> Bintree.new(1, &(&1*3), &(&1+3), &(&1 > 10))
Returns a binary tree, where turning left is multiplying by three,
turning right is adding three. If the number is greater than 10,
then the generation of this branch is stopped.
iex> Bintree.new(1, &(&1*3), &(&1+3), 4)
The rules are the same as the previous one, but the generation of the tree will end
when the depth of 4 values is reached.
"""
@doc since: "1.0.0"
@spec new(any, process_fun, process_fun, filter_fun | non_neg_integer()) :: t | nil
def new(value, left_fun, right_fun, filter_fun) when is_function(filter_fun) do
if filter_fun.(value) do
left = new(left_fun.(value), left_fun, right_fun, filter_fun)
right = new(right_fun.(value), left_fun, right_fun, filter_fun)
new(value, left, right)
else
nil
end
end
def new(_value, _left_fun, _right_fun, 0), do: nil
def new(value, left_fun, right_fun, depth) when is_integer(depth) do
left = new(left_fun.(value), left_fun, right_fun, depth - 1)
right = new(right_fun.(value), left_fun, right_fun, depth - 1)
new(value, left, right)
end
@doc """
Inserts a `value` at a given `path`
## Example
iex> Bintree.new(1, 3, 5)
iex> |> Bintree.insert([:left, :left], 5)
iex> |> Bintree.insert([:left, :right], 28)
# Result:
# 1
# |
# /---\\
# | |
# 3 5
# |
# /--\\
# | |
# 5 28
"""
@doc since: "1.1.1"
@spec insert(t, [:left | :right, ...], any) :: t
def insert(tree, path, value)
def insert(nil, [], value), do: new(value)
def insert(tree, [], value), do: %{tree | value: value}
def insert(%Bintree{value: v, left: left, right: right}, [head | tail], value) do
case head do
:left ->
new(v, insert(left, tail, value), right)
:right ->
new(v, left, insert(right, tail, value))
end
end
@doc """
Filters the `bintree`, i.e. returns only those branch for which `filter_fun` returns a truthy value.
"""
@doc since: "1.0.0"
@spec filter(t | nil, filter_fun) :: t | nil
def filter(tree, filter_fun)
def filter(%Bintree{value: v, left: l, right: r}, filter_fun) do
if filter_fun.(v) do
left = filter(l, filter_fun)
right = filter(r, filter_fun)
new(v, left, right)
else
nil
end
end
def filter(nil, _fun), do: nil
end
defimpl String.Chars, for: Bintree do
@spec to_string(Bintree.t()) :: String.t()
def to_string(tree) do
Bintree.Display.format(tree)
end
end
|
lib/bintree.ex
| 0.917755 | 0.614669 |
bintree.ex
|
starcoder
|
require Logger
require Integer
defmodule ExoSQL.Builtins do
@moduledoc """
Builtin functions.
There are two categories, normal functions and aggregate functions. Aggregate
functions receive as first parameter a ExoSQL.Result with a full table,
and the rest of parameters are the function calling parameters, unsolved.
These expressions must be first simplified with
`ExoSQL.Expr.simplify` and then executed on the rows with
`ExoSQL.Expr.run_expr`.
"""
import ExoSQL.Utils, only: [to_number: 1, to_float: 1]
@functions %{
"bool" => {ExoSQL.Builtins, :bool},
"lower" => {ExoSQL.Builtins, :lower},
"upper" => {ExoSQL.Builtins, :upper},
"split" => {ExoSQL.Builtins, :split},
"join" => {ExoSQL.Builtins, :join},
"trim" => {ExoSQL.Builtins, :trim},
"to_string" => {ExoSQL.Builtins, :to_string_},
"to_datetime" => {ExoSQL.Builtins, :to_datetime},
"to_timestamp" => {ExoSQL.Builtins, :to_timestamp},
"to_number" => {ExoSQL.Utils, :to_number!},
"substr" => {ExoSQL.Builtins, :substr},
"now" => {ExoSQL.Builtins, :now},
"strftime" => {ExoSQL.Builtins, :strftime},
"format" => {ExoSQL.Builtins, :format},
"debug" => {ExoSQL.Builtins, :debug},
"width_bucket" => {ExoSQL.Builtins, :width_bucket},
"generate_series" => {ExoSQL.Builtins, :generate_series},
"urlparse" => {ExoSQL.Builtins, :urlparse},
"jp" => {ExoSQL.Builtins, :jp},
"json" => {ExoSQL.Builtins, :json},
"unnest" => {ExoSQL.Builtins, :unnest},
"regex" => {ExoSQL.Builtins, :regex},
"regex_all" => {ExoSQL.Builtins, :regex_all},
"random" => {ExoSQL.Builtins, :random},
"randint" => {ExoSQL.Builtins, :randint},
"range" => {ExoSQL.Builtins, :range},
"greatest" => {ExoSQL.Builtins, :greatest},
"lowest" => {ExoSQL.Builtins, :lowest},
"coalesce" => {ExoSQL.Builtins, :coalesce},
"nullif" => {ExoSQL.Builtins, :nullif},
"datediff" => {ExoSQL.DateTime, :datediff},
## Math
"round" => {ExoSQL.Builtins, :round},
"trunc" => {ExoSQL.Builtins, :trunc},
"floor" => {ExoSQL.Builtins, :floor_},
"ceil" => {ExoSQL.Builtins, :ceil_},
"power" => {ExoSQL.Builtins, :power},
"sqrt" => {ExoSQL.Builtins, :sqrt},
"log" => {ExoSQL.Builtins, :log},
"ln" => {ExoSQL.Builtins, :ln},
"abs" => {ExoSQL.Builtins, :abs},
"mod" => {ExoSQL.Builtins, :mod},
"sign" => {ExoSQL.Builtins, :sign},
## Aggregates
"count" => {ExoSQL.Builtins, :count},
"sum" => {ExoSQL.Builtins, :sum},
"avg" => {ExoSQL.Builtins, :avg},
"max" => {ExoSQL.Builtins, :max_},
"min" => {ExoSQL.Builtins, :min_}
}
def call_function({mod, fun, name}, params) do
try do
apply(mod, fun, params)
rescue
_excp ->
# Logger.debug("Exception #{inspect _excp}: #{inspect {{mod, fun}, params}}")
throw({:function, {name, params}})
end
end
def call_function(name, params) do
case @functions[name] do
nil ->
raise {:unknown_function, name}
{mod, fun} ->
try do
apply(mod, fun, params)
rescue
_excp ->
# Logger.debug("Exception #{inspect(_excp)}: #{inspect({{mod, fun}, params})}")
throw({:function, {name, params}})
end
end
end
def cant_simplify(f) do
is_aggregate(f) or f in ["random", "randint", "debug"]
end
def is_projectable(f) do
f in ["unnest", "generate_series"]
end
def round(nil), do: nil
def round(n) do
{:ok, n} = to_float(n)
Kernel.round(n)
end
def round(n, 0) do
{:ok, n} = to_float(n)
Kernel.round(n)
end
def round(n, "0") do
{:ok, n} = to_float(n)
Kernel.round(n)
end
def round(n, r) do
{:ok, n} = to_float(n)
{:ok, r} = to_number(r)
Float.round(n, r)
end
def power(nil, _), do: nil
def power(_, nil), do: nil
def power(_, 0), do: 1
# To allow big power. From https://stackoverflow.com/questions/32024156/how-do-i-raise-a-number-to-a-power-in-elixir#32024157
def power(x, n) when Integer.is_odd(n) do
{:ok, x} = to_number(x)
{:ok, n} = to_number(n)
x * :math.pow(x, n - 1)
end
def power(x, n) do
{:ok, x} = to_number(x)
{:ok, n} = to_number(n)
result = :math.pow(x, n / 2)
result * result
end
def sqrt(nil), do: nil
def sqrt(n) do
{:ok, n} = to_number(n)
:math.sqrt(n)
end
def log(nil), do: nil
def log(n) do
{:ok, n} = to_number(n)
:math.log10(n)
end
def ln(nil), do: nil
def ln(n) do
{:ok, n} = to_number(n)
:math.log(n)
end
def abs(nil), do: nil
def abs(n) do
{:ok, n} = to_number(n)
:erlang.abs(n)
end
def mod(nil, _), do: nil
def mod(_, nil), do: nil
def mod(n, m) do
{:ok, n} = to_number(n)
{:ok, m} = to_number(m)
:math.fmod(n, m)
end
def sign(nil), do: nil
def sign(n) do
{:ok, n} = to_number(n)
cond do
n < 0 -> -1
n == 0 -> 0
true -> 1
end
end
def random(), do: :rand.uniform()
def randint(max_) do
:rand.uniform(max_ - 1)
end
def randint(min_, max_) do
:rand.uniform(max_ - min_ - 1) + min_ - 1
end
def bool(nil), do: false
def bool(0), do: false
def bool(""), do: false
def bool(false), do: false
def bool(_), do: true
def lower(nil), do: nil
def lower({:range, {a, _b}}), do: a
def lower(s), do: String.downcase(s)
def upper(nil), do: nil
def upper({:range, {_a, b}}), do: b
def upper(s), do: String.upcase(s)
def to_string_(%DateTime{} = d), do: DateTime.to_iso8601(d)
def to_string_(%{} = d) do
{:ok, e} = Poison.encode(d)
e
end
def to_string_(s), do: to_string(s)
def now(), do: Timex.local()
def now(tz), do: Timex.now(tz)
def to_datetime(nil), do: nil
def to_datetime(other), do: ExoSQL.DateTime.to_datetime(other)
def to_datetime(other, mod), do: ExoSQL.DateTime.to_datetime(other, mod)
def to_timestamp(%DateTime{} = d), do: DateTime.to_unix(d)
def substr(nil, _skip, _len) do
""
end
def substr(str, skip, len) do
# force string
str = to_string_(str)
{:ok, skip} = to_number(skip)
{:ok, len} = to_number(len)
if len < 0 do
String.slice(str, skip, max(0, String.length(str) + len - skip))
else
String.slice(str, skip, len)
end
end
def substr(str, skip) do
# A upper limit on what to return, should be enought
substr(str, skip, 10_000)
end
def trim(nil), do: nil
def trim(str) do
String.trim(str)
end
def join(str), do: join(str, ",")
def join(nil, _), do: nil
def join(str, sep) do
Enum.join(str, sep)
end
def split(nil, _sep), do: []
def split(str, sep) do
String.split(str, sep)
end
def split(str) do
split(str, [", ", ",", " "])
end
@doc ~S"""
Convert datetime to string.
If no format is given, it is as to_string, which returns the ISO 8601.
Format allows all substitutions from
[Timex.format](https://hexdocs.pm/timex/Timex.Format.DateTime.Formatters.Strftime.html),
for example:
%d day of month: 00
%H hour: 00-24
%m month: 01-12
%M minute: 00-59
%s seconds since 1970-01-01
%S seconds: 00-59
%Y year: 0000-9999
%i ISO 8601 format
%V Week number
%% %
"""
def strftime(%DateTime{} = d), do: to_string_(d)
def strftime(%DateTime{} = d, format), do: ExoSQL.DateTime.strftime(d, format)
def strftime(other, format), do: strftime(to_datetime(other), format)
@doc ~S"""
sprintf style formatting.
Known interpolations:
%d - Integer
%f - Float, 2 digits
%.Nf - Float N digits
%k - integer with k, M sufix
%.k - float with k, M sufix, uses float part
"""
def format(str), do: ExoSQL.Format.format(str, [])
def format(str, args) when is_list(args) do
ExoSQL.Format.format(str, args)
end
@doc ~S"""
Very simple sprintf formatter. Knows this formats:
* %%
* %s
* %d
* %f (only two decimals)
* %.{ndec}f
"""
def format(str, arg1), do: format(str, [arg1])
def format(str, arg1, arg2), do: format(str, [arg1, arg2])
def format(str, arg1, arg2, arg3), do: format(str, [arg1, arg2, arg3])
def format(str, arg1, arg2, arg3, arg4), do: format(str, [arg1, arg2, arg3, arg4])
def format(str, arg1, arg2, arg3, arg4, arg5), do: format(str, [arg1, arg2, arg3, arg4, arg5])
def format(str, arg1, arg2, arg3, arg4, arg5, arg6),
do: format(str, [arg1, arg2, arg3, arg4, arg5, arg6])
def format(str, arg1, arg2, arg3, arg4, arg5, arg6, arg7),
do: format(str, [arg1, arg2, arg3, arg4, arg5, arg6, arg7])
def format(str, arg1, arg2, arg3, arg4, arg5, arg6, arg7, arg8),
do: format(str, [arg1, arg2, arg3, arg4, arg5, arg6, arg7, arg8])
def format(str, arg1, arg2, arg3, arg4, arg5, arg6, arg7, arg8, arg9),
do: format(str, [arg1, arg2, arg3, arg4, arg5, arg6, arg7, arg8, arg9])
@doc ~S"""
Print some value to the log
"""
def debug(str) do
Logger.debug("SQL DEBUG: #{inspect(str)}")
str
end
@doc ~S"""
Returns to which bucket it belongs.
Only numbers, but datetimes can be transformed to unix datetime.
"""
def width_bucket(n, start_, end_, nbuckets) do
import ExoSQL.Utils, only: [to_float!: 1, to_number!: 1]
n = to_float!(n)
start_ = to_float!(start_)
end_ = to_float!(end_)
nbuckets = to_number!(nbuckets)
bucket = (n - start_) * nbuckets / (end_ - start_)
bucket = bucket |> Kernel.round()
cond do
bucket < 0 -> 0
bucket >= nbuckets -> nbuckets - 1
true -> bucket
end
end
@doc ~S"""
Performs a regex match
May return a list of groups, or a dict with named groups, depending on
the regex.
As an optional third parameter it performs a jp query.
Returns NULL if no match (which is falsy, so can be used for expressions)
"""
def regex(str, regexs) do
# slow. Should have been precompiled (simplify)
regex_real(str, regexs)
end
def regex(str, regexs, query) do
jp(regex_real(str, regexs), query)
end
def regex_real(str, {regex, captures}) do
if captures do
Regex.named_captures(regex, str)
else
Regex.run(regex, str)
end
end
def regex_real(str, regexs) when is_binary(regexs) do
captures = String.contains?(regexs, "(?<")
regex = Regex.compile!(regexs)
regex_real(str, {regex, captures})
end
@doc ~S"""
Performs a regex scan
Returns all the matches, if groups are used, then its a list of groups matching.
As an optional third parameter it performs a jp query.
Returns NULL if no match (which is falsy, so can be used for expressions)
"""
def regex_all(str, regexs) do
# slow. Should have been precompiled (simplify)
regex_all_real(str, regexs)
end
def regex_all(str, regexs, query) do
regex_all_real(str, regexs)
|> Enum.map(&jp(&1, query))
end
def regex_all_real(str, regexs) when is_binary(regexs) do
regex = Regex.compile!(regexs)
regex_all_real(str, regex)
end
def regex_all_real(str, regex) do
Regex.scan(regex, str)
end
@doc ~S"""
Generates a table with the series of numbers as given. Use for histograms
without holes.
"""
def generate_series(end_), do: generate_series(1, end_, 1)
def generate_series(start_, end_), do: generate_series(start_, end_, 1)
def generate_series(%DateTime{} = start_, %DateTime{} = end_, days) when is_number(days) do
generate_series(start_, end_, "#{days}D")
end
def generate_series(%DateTime{} = start_, %DateTime{} = end_, mod) when is_binary(mod) do
duration =
case ExoSQL.DateTime.Duration.parse(mod) do
{:error, other} ->
throw({:error, other})
%ExoSQL.DateTime.Duration{seconds: 0, days: 0, months: 0, years: 0} ->
throw({:error, :invalid_duration})
{:ok, other} ->
other
end
cmp =
if ExoSQL.DateTime.Duration.is_negative(duration) do
:lt
else
:gt
end
rows =
ExoSQL.Utils.generate(start_, fn value ->
cmpr = DateTime.compare(value, end_)
if cmpr == cmp do
:halt
else
next = ExoSQL.DateTime.Duration.datetime_add(value, duration)
{[value], next}
end
end)
%ExoSQL.Result{
columns: [{:tmp, :tmp, "generate_series"}],
rows: rows
}
end
def generate_series(start_, end_, step)
when is_number(start_) and is_number(end_) and is_number(step) do
if step == 0 do
raise ArgumentError, "Step invalid. Will never reach end."
end
if step < 0 and start_ < end_ do
raise ArgumentError, "Start, end and step invalid. Will never reach end."
end
if step > 0 and start_ > end_ do
raise ArgumentError, "Start, end and step invalid. Will never reach end."
end
%ExoSQL.Result{
columns: [{:tmp, :tmp, "generate_series"}],
rows: generate_series_range(start_, end_, step)
}
end
def generate_series(start_, end_, step) do
import ExoSQL.Utils, only: [to_number!: 1, to_number: 1]
# there are two options: numbers or dates. Check if I can convert the start_ to a number
# and if so, do the generate_series for numbers
case to_number(start_) do
{:ok, start_} ->
generate_series(start_, to_number!(end_), to_number!(step))
# maybe a date
{:error, _} ->
generate_series(to_datetime(start_), to_datetime(end_), step)
end
end
defp generate_series_range(current, stop, step) do
cond do
step > 0 and current > stop ->
[]
step < 0 and current < stop ->
[]
true ->
[[current] | generate_series_range(current + step, stop, step)]
end
end
@doc ~S"""
Parses an URL and return some part of it.
If not what is provided, returns a JSON object with:
* host
* port
* scheme
* path
* query
* user
If what is passed, it performs a JSON Pointer search (jp function).
It must receive a url with scheme://server or the result may not be well
formed.
For example, for emails, just use "email://<EMAIL>" or
similar.
"""
def urlparse(url), do: urlparse(url, nil)
def urlparse(nil, what), do: urlparse("", what)
def urlparse(url, what) do
parsed = URI.parse(url)
query =
case parsed.query do
nil -> nil
q -> URI.decode_query(q)
end
json = %{
"host" => parsed.host,
"port" => parsed.port,
"scheme" => parsed.scheme,
"path" => parsed.path,
"query" => query,
"user" => parsed.userinfo,
"domain" => get_domain(parsed.host)
}
if what do
jp(json, what)
else
json
end
end
@doc ~S"""
Gets the domain from the domain name.
This means "google" from "www.google.com" or "google" from "www.google.co.uk"
The algorithm disposes the tld (.uk) and the skips unwanted names (.co).
Returns the first thats rest, or a default that is originally the full domain
name or then each disposed part.
"""
def get_domain(nil), do: nil
def get_domain(hostname) do
[_tld | rparts] = hostname |> String.split(".") |> Enum.reverse()
# always remove last part
get_domainr(rparts, hostname)
end
# list of strings that are never domains.
defp get_domainr([head | rest], candidate) do
nodomains = ~w(com org net www co)
if head in nodomains do
get_domainr(rest, candidate)
else
head
end
end
defp get_domainr([], candidate), do: candidate
@doc ~S"""
Performs a JSON Pointer search on JSON data.
It just uses / to separate keys.
"""
def jp(nil, _), do: nil
def jp(json, idx) when is_list(json) and is_number(idx), do: Enum.at(json, idx)
def jp(json, str) when is_binary(str), do: jp(json, String.split(str, "/"))
def jp(json, [head | rest]) when is_list(json) do
n = ExoSQL.Utils.to_number!(head)
jp(Enum.at(json, n), rest)
end
def jp(json, ["" | rest]), do: jp(json, rest)
def jp(json, [head | rest]), do: jp(Map.get(json, head, nil), rest)
def jp(json, []), do: json
@doc ~S"""
Convert from a string to a JSON object
"""
def json(nil), do: nil
def json(str) when is_binary(str) do
Poison.decode!(str)
end
def json(js) when is_map(js), do: js
def json(arr) when is_list(arr), do: arr
@doc ~S"""
Extracts some keys from each value on an array and returns the array of
those values
"""
def unnest(array) do
array = json(array) || []
%ExoSQL.Result{
columns: [{:tmp, :tmp, "unnest"}],
rows: Enum.map(array, &[&1])
}
end
def unnest(array, cols) when is_list(cols) do
array = json(array) || []
rows =
Enum.map(array, fn row ->
Enum.map(cols, &Map.get(row, &1))
end)
columns = Enum.map(cols, &{:tmp, :tmp, &1})
%ExoSQL.Result{
columns: columns,
rows: rows
}
end
def unnest(array, col1), do: unnest(array, [col1])
def unnest(array, col1, col2), do: unnest(array, [col1, col2])
def unnest(array, col1, col2, col3), do: unnest(array, [col1, col2, col3])
def unnest(array, col1, col2, col3, col4), do: unnest(array, [col1, col2, col3, col4])
def unnest(array, col1, col2, col3, col4, col5),
do: unnest(array, [col1, col2, col3, col4, col5])
def unnest(array, col1, col2, col3, col4, col5, col5),
do: unnest(array, [col1, col2, col3, col4, col5, col5])
def unnest(array, col1, col2, col3, col4, col5, col5, col6),
do: unnest(array, [col1, col2, col3, col4, col5, col5, col6])
def unnest(array, col1, col2, col3, col4, col5, col5, col6, col7),
do: unnest(array, [col1, col2, col3, col4, col5, col5, col6, col7])
def unnest(array, col1, col2, col3, col4, col5, col5, col6, col7, col8),
do: unnest(array, [col1, col2, col3, col4, col5, col5, col6, col7, col8])
@doc ~S"""
Creates a range, which can later be used in:
* `IN` -- Subset / element contains
* `*` -- Interesection -> nil if no intersection, the intersected range if any.
"""
def range(a, b), do: {:range, {a, b}}
@doc ~S"""
Get the greatest of arguments
"""
def greatest(a, nil), do: a
def greatest(nil, b), do: b
def greatest(a, b) do
if a > b do
a
else
b
end
end
def greatest(a, b, c), do: Enum.reduce([a, b, c], nil, &greatest/2)
def greatest(a, b, c, d), do: Enum.reduce([a, b, c, d], nil, &greatest/2)
def greatest(a, b, c, d, e), do: Enum.reduce([a, b, c, d, e], nil, &greatest/2)
@doc ~S"""
Get the least of arguments.
Like min, for not for aggregations.
"""
def least(a, nil), do: a
def least(nil, b), do: b
def least(a, b) do
if a < b do
a
else
b
end
end
def least(a, b, c), do: Enum.reduce([a, b, c], nil, &least/2)
def least(a, b, c, d), do: Enum.reduce([a, b, c, d], nil, &least/2)
def least(a, b, c, d, e), do: Enum.reduce([a, b, c, d, e], nil, &least/2)
@doc ~S"""
Returns the first not NULL
"""
def coalesce(a, b), do: Enum.find([a, b], &(&1 != nil))
def coalesce(a, b, c), do: Enum.find([a, b, c], &(&1 != nil))
def coalesce(a, b, c, d), do: Enum.find([a, b, c, d], &(&1 != nil))
def coalesce(a, b, c, d, e), do: Enum.find([a, b, c, d, e], &(&1 != nil))
@doc ~S"""
Returns NULL if both equal, first argument if not.
"""
def nullif(a, a), do: nil
def nullif(a, _), do: a
def floor_(n) when is_float(n), do: trunc(Float.floor(n))
def floor_(n) when is_number(n), do: n
def floor_(n), do: floor_(ExoSQL.Utils.to_number!(n))
def ceil_(n) when is_float(n), do: trunc(Float.ceil(n))
def ceil_(n) when is_number(n), do: n
def ceil_(n), do: ceil_(ExoSQL.Utils.to_number!(n))
### Aggregate functions
def is_aggregate(x) do
x in ["count", "avg", "sum", "max", "min"]
end
def count(data, {:lit, '*'}) do
Enum.count(data.rows)
end
def count(data, {:distinct, expr}) do
expr = ExoSQL.Expr.simplify(expr, %{columns: data.columns})
Enum.reduce(data.rows, MapSet.new(), fn row, acc ->
case ExoSQL.Expr.run_expr(expr, %{row: row}) do
nil -> acc
val -> MapSet.put(acc, val)
end
end)
|> Enum.count()
end
def count(data, expr) do
expr = ExoSQL.Expr.simplify(expr, %{columns: data.columns})
Enum.reduce(data.rows, 0, fn row, acc ->
case ExoSQL.Expr.run_expr(expr, %{row: row}) do
nil -> acc
_other -> 1 + acc
end
end)
end
def avg(data, expr) do
# Logger.debug("Avg of #{inspect data} by #{inspect expr}")
if data.columns == [] do
nil
else
sum(data, expr) / count(data, {:lit, '*'})
end
end
def sum(data, expr) do
# Logger.debug("Sum of #{inspect data} by #{inspect expr}")
expr = ExoSQL.Expr.simplify(expr, %{columns: data.columns})
# Logger.debug("Simplified expression #{inspect expr}")
Enum.reduce(data.rows, 0, fn row, acc ->
n = ExoSQL.Expr.run_expr(expr, %{row: row})
n =
case ExoSQL.Utils.to_number(n) do
{:ok, n} -> n
{:error, nil} -> 0
end
acc + n
end)
end
def max_(data, expr) do
expr = ExoSQL.Expr.simplify(expr, %{columns: data.columns})
Enum.reduce(data.rows, nil, fn row, acc ->
n = ExoSQL.Expr.run_expr(expr, %{row: row})
{acc, n} = ExoSQL.Expr.match_types(acc, n)
if ExoSQL.Expr.is_greater(acc, n) do
acc
else
n
end
end)
end
def min_(data, expr) do
expr = ExoSQL.Expr.simplify(expr, %{columns: data.columns})
Enum.reduce(data.rows, nil, fn row, acc ->
n = ExoSQL.Expr.run_expr(expr, %{row: row})
{acc, n} = ExoSQL.Expr.match_types(acc, n)
res =
if acc != nil and ExoSQL.Expr.is_greater(n, acc) do
acc
else
n
end
res
end)
end
## Simplications.
# Precompile regex
def simplify("format", [{:lit, format} | rest]) when is_binary(format) do
compiled = ExoSQL.Format.compile_format(format)
# Logger.debug("Simplify format: #{inspect compiled}")
simplify("format", [{:lit, compiled} | rest])
end
def simplify("regex", [str, {:lit, regexs}]) when is_binary(regexs) do
regex = Regex.compile!(regexs)
captures = String.contains?(regexs, "(?<")
simplify("regex", [str, {:lit, regex}, {:lit, captures}])
end
def simplify("regex", [str, {:lit, regexs}, {:lit, query}]) when is_binary(regexs) do
regex = Regex.compile!(regexs)
captures = String.contains?(regexs, "(?<")
# this way jq can be simplified too
params = [
simplify("regex", [str, {:lit, regex}, {:lit, captures}]),
{:lit, query}
]
simplify("jp", params)
end
def simplify("jp", [json, {:lit, path}]) when is_binary(path) do
# Logger.debug("JP #{inspect json}")
simplify("jp", [json, {:lit, String.split(path, "/")}])
end
def simplify("random", []), do: {:fn, {{ExoSQL.Builtins, :random, "random"}, []}}
def simplify("randint", params), do: {:fn, {{ExoSQL.Builtins, :randint, "randint"}, params}}
# default: convert to {mod fun name} tuple
def simplify(name, params) when is_binary(name) do
# Logger.debug("Simplify #{inspect name} #{inspect params}")
if not is_aggregate(name) and Enum.all?(params, &is_lit(&1)) do
# Logger.debug("All is literal for #{inspect {name, params}}.. just do it once")
params = Enum.map(params, fn {:lit, n} -> n end)
ret = ExoSQL.Builtins.call_function(name, params)
{:lit, ret}
else
case @functions[name] do
nil ->
throw({:unknown_function, name})
{mod, fun} ->
{:fn, {{mod, fun, name}, params}}
end
end
end
def simplify(modfun, params), do: {:fn, {modfun, params}}
def is_lit({:lit, '*'}), do: false
def is_lit({:lit, _n}), do: true
def is_lit(_), do: false
end
|
lib/builtins.ex
| 0.670824 | 0.584568 |
builtins.ex
|
starcoder
|
defmodule Adventofcode.Day09SmokeBasin do
use Adventofcode
alias __MODULE__.{Parser, Part1, Part2}
def part_1(input) do
input
|> Parser.parse()
|> Part1.solve()
end
def part_2(input) do
input
|> Parser.parse()
|> Part2.solve()
end
defmodule Part1 do
def solve(state) do
state
|> Enum.filter(&low_point?(&1, state))
|> Enum.map(fn {_, height} -> height + 1 end)
|> Enum.sum()
end
def low_point?({{x, y}, height}, state) do
{x, y}
|> neighbours()
|> Enum.map(&Map.get(state, &1))
|> Enum.filter(& &1)
|> Enum.all?(&(height < &1))
end
@neighbours [
{0, 1},
{1, 0},
{0, -1},
{-1, 0}
]
def neighbours({x, y}) do
Enum.map(@neighbours, fn {dx, dy} -> {x + dx, y + dy} end)
end
end
defmodule Part2 do
def solve(state) do
state
|> Enum.filter(&Part1.low_point?(&1, state))
|> Enum.map(&scan_basin([&1], state))
|> Enum.sort(:desc)
|> Enum.take(3)
|> Enum.reduce(1, &(&1 * &2))
end
defp scan_basin(basin, state) do
case basin
|> Enum.flat_map(&do_scan_basin(&1, state))
|> Enum.uniq() do
^basin -> basin |> Enum.map(&elem(&1, 1)) |> Enum.count()
basin -> scan_basin(basin, state)
end
end
defp do_scan_basin({{x, y}, height}, state) do
{x, y}
|> Part1.neighbours()
|> Enum.map(&{&1, Map.get(state, &1)})
|> Enum.filter(&elem(&1, 1))
|> Enum.filter(fn {_, h} -> h != 9 and h > height end)
|> Enum.concat([{{x, y}, height}])
end
end
defmodule Parser do
def parse(input) do
input
|> String.trim()
|> String.split("\n")
|> Enum.with_index()
|> Enum.flat_map(&parse_line/1)
|> Map.new()
end
defp parse_line({row, y}) do
row
|> String.graphemes()
|> Enum.map(&String.to_integer/1)
|> Enum.with_index()
|> Enum.map(fn {cell, x} -> {{x, y}, cell} end)
end
end
end
|
lib/day_09_smoke_basin.ex
| 0.599837 | 0.530297 |
day_09_smoke_basin.ex
|
starcoder
|
defmodule Omise.Token do
@moduledoc ~S"""
Provides Token API interfaces.
<https://www.omise.co/tokens-api>
***NOTE***:
Full Credit Card data should never go through your server.
Do not send the credit card data to Omise from your servers directly.
You must send the card data from the client browser via Javascript (Omise-JS).
The methods described on this page should only be used either with fake data in test mode (e.g.: quickly creating some fake data, testing our API from a terminal, etc.), or if you are PCI-DSS compliant.
Sending card data from server requires a valid PCI-DSS certification. You can learn more about this in [Security Best Practices](https://www.omise.co/security-best-practices)
"""
use Omise.HTTPClient, endpoint: "tokens", key_type: :public_key
defstruct object: "token",
id: nil,
livemode: nil,
location: nil,
used: nil,
card: %Omise.Card{},
created: nil
@type t :: %__MODULE__{
object: String.t(),
id: String.t(),
livemode: boolean,
location: String.t(),
used: boolean,
card: Omise.Card.t(),
created: String.t()
}
@doc ~S"""
Create a token.
Returns `{:ok, token}` if the request is successful, `{:error, error}` otherwise.
## Request Parameters:
* `name` - The cardholder name as printed on the card.
* `number` - The card number. Note that the number you pass can contains spaces and dashes but will be stripped from the response.
* `expiration_month` - The expiration month printed on the card in the format M or MM.
* `expiration_year` - The expiration year printed on the card in the format YYYY.
* `security_code` - The security code (CVV, CVC, etc) printed on the back of the card.
* `city` - The postal code from the city where the card was issued.
* `postal_code` - The city where the card was issued.
## Examples
params = [
card: [
name: "<NAME>",
city: "Bangkok",
postal_code: 10320,
number: 4242424242424242,
security_code: 123,
expiration_month: 10,
expiration_year: 2019
]
]
Omise.Token.create(params)
"""
@spec create(Keyword.t(), Keyword.t()) :: {:ok, t} | {:error, Omise.Error.t()}
def create(params, opts \\ []) do
opts = Keyword.merge(opts, as: %__MODULE__{})
post(@endpoint, params, opts)
end
@doc ~S"""
Retrieve a token.
Returns `{:ok, token}` if the request is successful, `{:error, error}` otherwise.
## Examples
Omise.Token.retrieve("<PASSWORD>")
"""
@spec retrieve(String.t(), Keyword.t()) :: {:ok, t} | {:error, Omise.Error.t()}
def retrieve(id, opts \\ []) do
opts = Keyword.merge(opts, as: %__MODULE__{})
get("#{@endpoint}/#{id}", [], opts)
end
end
|
lib/omise/token.ex
| 0.847021 | 0.624923 |
token.ex
|
starcoder
|
defmodule Atlas.Adapters.Postgres do
@behaviour Atlas.Database.Adapter
import Atlas.Query.Builder, only: [list_to_binding_placeholders: 1]
import Atlas.Database.FieldNormalizer
alias :pgsql, as: PG
def connect(config) do
case PG.connect(
String.to_char_list(config.host),
String.to_char_list(config.username),
String.to_char_list(config.password),
database: String.to_char_list(config.database)) do
{:ok, pid} -> {:ok, pid}
{:error, reason} -> {:error, reason}
end
end
def execute_query(pid, string) do
normalize_results(PG.squery(pid, string))
end
@doc """
Executes prepared query with adapter after converting Atlas bindings to native formats
Returns "normalized" results with Elixir specific types coerced from DB binaries
"""
def execute_prepared_query(pid, query_string, args) do
args = denormalize_values(args)
native_bindings = convert_bindings_to_native_format(query_string, args)
PG.equery(pid, native_bindings, List.flatten(args)) |> normalize_results
end
defp normalize_results(results) do
case results do
{:ok, cols, rows} -> {:ok, {nil, normalize_cols(cols), normalize_rows(rows)}}
{:ok, count} -> {:ok, {count, [], []}}
{:ok, count, cols, rows} -> {:ok, {count, normalize_cols(cols), normalize_rows(rows)}}
{:error, error } -> {:error, error }
end
end
@doc """
Convert Atlas query binding syntax to native adapter format.
Examples
```
iex> convert_bindings_to_native_format("SELECT * FROM users WHERE id = ? AND archived = ?", [1, false])
SELECT * FROM users WHERE id = $1 AND archived = $2"
iex> convert_bindings_to_native_format("SELECT * FROM users WHERE id IN(?)", [[1,2,3]])
SELECT * FROM users WHERE id IN($1, $2, $3)
"""
def convert_bindings_to_native_format(query_string, args) do
parts = expand_bindings(query_string, args) |> String.split("?")
parts
|> Enum.with_index
|> Enum.map(fn {part, index} ->
if index < Enum.count(parts) - 1 do
part <> "$#{index + 1}"
else
part
end
end)
|> Enum.join("")
end
@doc """
Expand binding placeholder "?" into "?, ?, ?..." when binding matches list
Examples
```
iex> expand_bindings("SELECT * FROM users WHERE id IN(?)", [[1,2,3]])
"SELECT * FROM users WHERE id IN($1, $2, $3)"
```
"""
def expand_bindings(query_string, args) do
parts = query_string |> String.split("?") |> Enum.with_index
expanded_placeholders = Enum.map parts, fn {part, index} ->
if index < Enum.count(parts) - 1 do
case Enum.at(args, index) do
values when is_list(values) -> part <> list_to_binding_placeholders(values)
_ -> part <> "?"
end
else
part
end
end
expanded_placeholders |> Enum.join("")
end
def quote_column(column), do: "\"#{column}\""
def quote_tablename(tablename), do: "\"#{tablename}\""
def quote_namespaced_column(table, column) do
if table do
"#{quote_tablename(table)}.#{quote_column(column)}"
else
quote_column(column)
end
end
# Ex: [{:column,"id",:int4,4,-1,0}, {:column,"age",:int4,4,-1,0}]
# => [:id, :age]
defp normalize_cols(columns) do
Enum.map columns, fn col -> String.to_atom(elem(col, 1)) end
end
defp normalize_rows(rows_of_tuples) do
rows_of_tuples
|> Enum.map(&Tuple.to_list(&1))
|> Enum.map(&normalize_values(&1))
end
end
|
lib/atlas/adapters/postgres.ex
| 0.570571 | 0.573021 |
postgres.ex
|
starcoder
|
defmodule NLP do
import Network
alias Deeppipe, as: DP
@moduledoc """
for natural language
"""
defnetwork init1(_x) do
_x
|> lstm(29, 14, 0.1, 0.001)
|> w(29, 2)
|> softmax
end
def sgd(m, n) do
image = train_image()
onehot = train_label_onehot()
network = init1(0)
test_image = train_image()
test_label = train_label()
DP.train(network, image, onehot, test_image, test_label, :cross, :sgd, m, n)
end
def train_image() do
{:ok, dt} = File.read("rnn/train.exs")
dt |> String.replace("\n", "") |> preprocess()
end
def train_label() do
{:ok, dt} = File.read("rnn/train-label.exs")
dt
|> String.replace("\n", " ")
|> String.split(" ")
|> butlast()
|> Enum.map(fn x -> String.to_integer(x) end)
end
def train_label_onehot() do
ls = train_label()
dim = Enum.max(ls)
ls |> Enum.map(fn x -> DP.to_onehot(x, dim) end)
end
def test_image() do
{:ok, dt} = File.read("rnn/test.exs")
dt |> String.replace("\n", "") |> preprocess()
end
def test_label() do
{:ok, dt} = File.read("rnn/test-label.exs")
dt
|> String.replace("\n", " ")
|> String.split(" ")
|> butlast()
|> Enum.map(fn x -> String.to_integer(x) end)
end
@doc """
transform sentences to matrix. Each element is onehot_vector.
iex(1)> NLP.preprocess("I love you.you love me?")
[
[
[0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0],
[0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0],
[0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0],
[0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0]
],
[
[0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0],
[0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0],
[0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0],
[0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0]
]
]
"""
def preprocess(text) do
{text1, dic, _} = preprocess1(text)
maxlen = text1 |> Enum.map(fn x -> length(x) end) |> Enum.max()
count = length(dic)
text1
|> Enum.map(fn x -> addzero(x, maxlen - length(x)) end)
|> Enum.map(fn x -> Enum.map(x, fn y -> DP.to_onehot(y, count) end) end)
end
defp addzero1(0) do
[]
end
defp addzero1(n) do
[0 | addzero1(n - 1)]
end
defp addzero(ls, n) do
ls ++ addzero1(n)
end
@doc """
generate corpus,dic of word->ID and dic of ID->word from sentences as text.
iex(2)> NLP.preprocess1("I love you.you love me?")
{[[1, 2, 3, 4], [3, 2, 5, 6]], [I: 1, love: 2, you: 3, ".": 4, me: 5, "?": 6],
[{1, :I}, {2, :love}, {3, :you}, {4, :.}, {5, :me}, {6, :"?"}]}
"""
def preprocess1(text) do
dic =
text
|> String.replace(".", " . ")
|> String.replace("?", " ? ")
|> String.split(" ")
|> butlast()
|> Enum.map(fn x -> String.to_atom(x) end)
|> word_to_id()
text1 =
text
|> String.replace(".", ".EOS")
|> String.replace("?", "?EOS")
|> String.split("EOS")
|> butlast()
|> Enum.map(fn x -> preprocess2(x, dic) end)
{text1, dic, id_to_word(dic)}
end
def preprocess2(text, dic) do
text1 =
text
|> String.replace(".", " .")
|> String.replace("?", " ?")
|> String.split(" ")
|> Enum.map(fn x -> String.to_atom(x) end)
corpus(text1, dic)
end
def butlast(ls) do
ls
|> Enum.reverse()
|> Enum.drop(1)
|> Enum.reverse()
end
def corpus([], _) do
[]
end
def corpus([l | ls], dic) do
[dic[l] | corpus(ls, dic)]
end
def word_to_id(ls) do
word_to_id1(ls, 1, [])
end
def word_to_id1([], _, dic) do
Enum.reverse(dic)
end
def word_to_id1([l | ls], n, dic) do
if dic[l] != nil do
word_to_id1(ls, n, dic)
else
word_to_id1(ls, n + 1, Keyword.put(dic, l, n))
end
end
def id_to_word([]) do
[]
end
def id_to_word([{word, id} | ls]) do
[{id, word} | id_to_word(ls)]
end
end
|
lib/nlp.ex
| 0.574992 | 0.413832 |
nlp.ex
|
starcoder
|
defmodule Histogrex do
@moduledoc """
High Dynamic Range (HDR) Histogram allows the recording of values across
a configurable range at a configurable precision.
Storage requirement is fixed (and depends on how the histogram is configured).
All functions are fully executed within the calling process (not serialized
through a single process) as the data is stored within write-optimized ETS
table. Each recording consists of as single call to `:ets.update_counter`.
Read operations consiste of a single `:ets.lookup` and are subsequently
processed on that copy of the data (again, within the calling process).
The fist step involves creating a registry:
defmodule MyApp.Stats do
use Histogrex
histogrex :load_user, min: 1, max: 10_000_000, precision: 3
histogrex :db_save_settings, min: 1, max: 10_000, precision: 2
...
end
And then adding this module as a `worker` to your application's supervisor
tree:
worker(MyApp.Stats, [])
You can then record values and make queries:
alias MyApp.Stats
Stats.record!(:load_user, 233)
Stats.record!(:db_save_settings, 84)
Stats.mean(:load_user)
Stats.max(:db_save_settings)
Stats.total_count(:db_save_settings)
Stats.value_at_quantile(:load_user, 99.9)
"""
use Bitwise
@total_count_index 2
defstruct [
:name,
:registrar,
:bucket_count,
:counts_length,
:unit_magnitude,
:sub_bucket_mask,
:sub_bucket_count,
:sub_bucket_half_count,
:sub_bucket_half_count_magnitude,
:template
]
@type t :: %Histogrex{
name: atom | binary,
registrar: module,
bucket_count: pos_integer,
counts_length: pos_integer,
unit_magnitude: non_neg_integer,
sub_bucket_mask: non_neg_integer,
sub_bucket_count: non_neg_integer,
sub_bucket_half_count: non_neg_integer,
sub_bucket_half_count_magnitude: non_neg_integer,
template: nil | tuple
}
defmodule Iterator do
@moduledoc false
defstruct [
:h,
:total_count,
:counts,
bucket_index: 0,
sub_bucket_index: -1,
count_at_index: 0,
count_to_index: 0,
value_from_index: 0,
highest_equivalent_value: 0
]
@type t :: %Iterator{
h: struct,
total_count: non_neg_integer,
bucket_index: non_neg_integer,
sub_bucket_index: integer,
count_at_index: non_neg_integer,
count_to_index: non_neg_integer,
value_from_index: non_neg_integer,
highest_equivalent_value: non_neg_integer
}
@doc """
Resets the iterator so that it can be reused. There shouldn't be a need to
execute this directly.
"""
@spec reset(t) :: t
def reset(it) do
%{
it
| bucket_index: 0,
sub_bucket_index: -1,
count_at_index: 0,
count_to_index: 0,
value_from_index: 0,
highest_equivalent_value: 0
}
end
@doc false
@spec empty() :: t
def empty() do
h = %Histogrex{
bucket_count: 0,
counts_length: 1,
sub_bucket_mask: 0,
unit_magnitude: 0,
sub_bucket_half_count: 0,
sub_bucket_half_count_magnitude: 0,
sub_bucket_count: 0
}
%Iterator{total_count: 0, counts: 0, sub_bucket_index: 0, h: h}
end
end
@doc false
defmacro __using__(opts) do
adapter =
Keyword.get(opts, :adapter, {:ets, [:set, :public, :named_table, write_concurrency: true]})
quote location: :keep do
use GenServer
import Histogrex, only: [histogrex: 2, template: 2]
alias Histogrex.Iterator, as: It
@before_compile Histogrex
@adapter unquote(adapter)
Module.register_attribute(__MODULE__, :histogrex_names, accumulate: true)
Module.register_attribute(__MODULE__, :histogrex_registry, accumulate: true)
@doc false
def start_link() do
{:ok, pid} = GenServer.start_link(__MODULE__, :ok)
GenServer.call(pid, :register_tables)
{:ok, pid}
end
def init(args) do
{:ok, args}
end
@doc false
# we can't inline register_tables() in here as we need the @histogrex_registry
# module to get loaded with values. register_tables is created via the
# @before_compile hook
def handle_call(:register_tables, _from, state) do
register_tables()
{:reply, :ok, state}
end
@doc false
@spec record!(atom, non_neg_integer) :: :ok | no_return
def record!(metric, value) when is_number(value), do: record_n!(metric, value, 1)
@doc false
@spec record_n!(atom, non_neg_integer, non_neg_integer) :: :ok | no_return
def record_n!(metric, value, n) when is_number(value) do
Histogrex.record!(get_histogrex(metric), value, n)
end
@doc false
@spec record(atom, non_neg_integer) :: :ok | {:error, binary}
def record(metric, value) when is_number(value), do: record_n(metric, value, 1)
@doc false
@spec record_n(atom, non_neg_integer, non_neg_integer) :: :ok | {:error, binary}
def record_n(metric, value, n) when is_number(value) do
Histogrex.record(get_histogrex(metric), value, n)
end
@doc false
@spec record!(atom, atom | binary, non_neg_integer) :: :ok | no_return
def record!(template, metric, value), do: record_n!(template, metric, value, 1)
@doc false
@spec record_n!(atom, atom | binary, non_neg_integer, non_neg_integer) :: :ok | no_return
def record_n!(template, metric, value, n) do
Histogrex.record!(get_histogrex(template), metric, value, n)
end
@doc false
@spec record(atom, atom | binary, non_neg_integer) :: :ok | {:error, binary}
def record(template, metric, value), do: record_n(template, metric, value, 1)
@doc false
@spec record_n(atom, atom | binary, non_neg_integer, non_neg_integer) ::
:ok | {:error, binary}
def record_n(template, metric, value, n) do
Histogrex.record(get_histogrex(template), metric, value, n)
end
@doc false
@spec value_at_quantile(Iterator.t() | atom, number) :: non_neg_integer
def value_at_quantile(%It{} = it, q) do
Histogrex.value_at_quantile(it, q)
end
@doc false
def value_at_quantile(metric, q) do
Histogrex.value_at_quantile(get_histogrex(metric), q)
end
@doc false
@spec value_at_quantile(atom, atom | binary, number) :: non_neg_integer
def value_at_quantile(template, metric, q) do
Histogrex.value_at_quantile(get_histogrex(template), metric, q)
end
@doc false
@spec total_count(Iterator.t() | atom) :: non_neg_integer
def total_count(%It{} = it), do: Histogrex.total_count(it)
@doc false
def total_count(metric), do: Histogrex.total_count(get_histogrex(metric))
@doc false
@spec total_count(atom, atom | binary) :: non_neg_integer
def total_count(template, metric),
do: Histogrex.total_count(get_histogrex(template), metric)
@doc false
@spec mean(atom) :: non_neg_integer
def mean(%It{} = it), do: Histogrex.mean(it)
@doc false
def mean(metric), do: Histogrex.mean(get_histogrex(metric))
@doc false
@spec mean(atom, atom | binary) :: non_neg_integer
def mean(template, metric), do: Histogrex.mean(get_histogrex(template), metric)
@doc false
@spec max(Iterator.t() | atom) :: non_neg_integer
def max(%It{} = it), do: Histogrex.max(it)
@doc false
def max(metric), do: Histogrex.max(get_histogrex(metric))
@doc false
@spec max(atom, atom | binary) :: non_neg_integer
def max(template, metric), do: Histogrex.max(get_histogrex(template), metric)
@doc false
@spec min(Iterator.t() | atom) :: non_neg_integer
def min(%It{} = it), do: Histogrex.min(it)
@doc false
def min(metric), do: Histogrex.min(get_histogrex(metric))
@doc false
@spec min(atom, atom | binary) :: non_neg_integer
def min(template, metric), do: Histogrex.min(get_histogrex(template), metric)
@doc false
@spec reset(Iterator.t() | atom) :: :ok
def reset(%It{} = it), do: Histogrex.reset(it)
@doc false
def reset(metric), do: Histogrex.reset(get_histogrex(metric))
@doc false
@spec reset(atom, atom | binary) :: :ok
def reset(template, metric), do: Histogrex.reset(get_histogrex(template), metric)
@doc false
@spec iterator(atom) :: Iterator.t()
def iterator(metric) do
Histogrex.iterator(get_histogrex(metric))
end
@doc false
@spec iterator(atom, atom | binary) :: Iterator.t()
def iterator(template, metric) do
Histogrex.iterator(get_histogrex(template), metric)
end
@doc false
@spec delete(atom) :: :ok
def delete(metric), do: Histogrex.delete(get_histogrex(metric))
@doc false
@spec delete(atom, atom | binary) :: :ok
def delete(template, metric), do: Histogrex.delete(get_histogrex(template), metric)
@doc false
@spec reduce(any, (Iterator.t(), any -> any)) :: any
def reduce(acc, fun), do: Histogrex.reduce(__MODULE__, acc, fun)
end
end
@doc false
defmacro __before_compile__(_env) do
quote do
@spec get_names() :: [atom]
def get_names(), do: @histogrex_names
@spec get_adapter() :: atom
def get_adapter(), do: elem(@adapter, 0)
defp register_tables() do
case @adapter do
:skip ->
:ok
{:ets, opts} ->
:ets.new(__MODULE__, opts)
{module, func, opts} ->
apply(module, func, opts)
end
for h <- @histogrex_registry do
Histogrex.reset(h)
end
end
def get_histogrex(_), do: {:error, :undefined}
end
end
@doc """
registers the histogram
A `min`, `max` and `precision` must be supplied. `min` and `max` represent
the minimal and maximal expected values. For example, if you're looking to
measure how long a database call took, you could specify: `min: 1, max: 10_000`
and provide time in millisecond, thus allowing you to capture values from 1ms
to 10sec.
`min` must be greater than 0.
`max` must be greater than `min`.
`precision` must be between 1 and 5 (inclusive).
## Examples
histogrex :user_list, min: 1, max: 10000000, precision: 3
"""
defmacro histogrex(name, opts) do
quote location: :keep do
@unquote(name)(
Histogrex.new(
unquote(name),
__MODULE__,
unquote(opts)[:min],
unquote(opts)[:max],
unquote(opts)[:precision] || 3,
false
)
)
@histogrex_names unquote(name)
@histogrex_registry @unquote(name)()
def get_histogrex(unquote(name)), do: @unquote(name)()
end
end
defmacro template(name, opts) do
quote location: :keep do
@unquote(name)(
Histogrex.new(
unquote(name),
__MODULE__,
unquote(opts)[:min],
unquote(opts)[:max],
unquote(opts)[:precision] || 3,
true
)
)
def get_histogrex(unquote(name)), do: @unquote(name)()
end
end
@doc """
Creates a new histogrex object. Note that this only creates the configuration
structure. It does not create the underlying ets table/entries. There should
be no need to call this direction. Use the `histogrex` macro instead.
"""
@spec new(binary | atom, module, pos_integer, pos_integer, 1..5, boolean) :: t
def new(name, registrar, min, max, precision \\ 3, template \\ false)
when min > 0 and max > min and precision in 1..5 do
largest_value_with_single_unit_resolution = 2 * :math.pow(10, precision)
sub_bucket_count_magnitude =
round(Float.ceil(:math.log2(largest_value_with_single_unit_resolution)))
sub_bucket_half_count_magnitude =
case sub_bucket_count_magnitude < 1 do
true -> 1
false -> sub_bucket_count_magnitude - 1
end
unit_magnitude =
case round(Float.floor(:math.log2(min))) do
n when n < 0 -> 0
n -> n
end
sub_bucket_count = round(:math.pow(2, sub_bucket_half_count_magnitude + 1))
sub_bucket_half_count = round(sub_bucket_count / 2)
sub_bucket_mask = bsl(sub_bucket_count - 1, unit_magnitude)
bucket_count = calculate_bucket_count(bsl(sub_bucket_count, unit_magnitude), max, 1)
counts_length = round((bucket_count + 1) * (sub_bucket_count / 2))
template =
case template do
false -> nil
true -> create_row(name, name, counts_length)
end
%__MODULE__{
name: name,
template: template,
registrar: registrar,
bucket_count: bucket_count,
counts_length: counts_length,
unit_magnitude: unit_magnitude,
sub_bucket_mask: sub_bucket_mask,
sub_bucket_count: sub_bucket_count,
sub_bucket_half_count: sub_bucket_half_count,
sub_bucket_half_count_magnitude: sub_bucket_half_count_magnitude
}
end
@doc """
Same as `record/3` but raises on error
"""
@spec record!(t, pos_integer, pos_integer) :: :ok | no_return
def record!(h, value, n) do
case record(h, value, n) do
:ok -> :ok
{:error, message} -> raise message
end
end
@doc """
Records the `value` `n` times where `n` defaults to 1. Return `:ok` on success
or `{:error, message}` on failure. A larger than the 'max' specified
when the histogram was created will cause an error. This is usually not called
directly, but rather through the `record/3` of your custom registry
"""
@spec record(t | {:error, any}, pos_integer, pos_integer) :: :ok | {:error, any}
def record({:error, _} = err, _value, _n), do: err
def record(h, value, n) do
index = get_value_index(h, value)
case index < 0 or h.counts_length <= index do
true ->
{:error, "value it outside of range"}
false ->
h.registrar.get_adapter().update_counter(h.registrar, h.name, [{3, n}, {index + 4, n}])
:ok
end
end
@doc """
Same as `record_template/4` but raises on error
"""
@spec record!(t, atom | binary, pos_integer, pos_integer) :: :ok | no_return
def record!(template, metric, value, n) do
case record(template, metric, value, n) do
:ok -> :ok
{:error, message} -> raise message
end
end
@doc """
Records the `value` `n` times where `n` defaults to 1. Uses the template
to create the histogram if it doesn't already exist. Return `:ok` on success
or `{:error, message}` on failure. A larger than the 'max' specified
when the histogram was created will cause an error. This is usually not called
directly, but rather through the `record/3` of your custom registry
"""
@spec record(t | {:error, any}, atom | binary, pos_integer, pos_integer) :: :ok | {:error, any}
def record({:error, _} = err, _metric, _value, _n), do: err
def record(h, metric, value, n) do
index = get_value_index(h, value)
case index < 0 or h.counts_length <= index do
true ->
{:error, "value it outside of range"}
false ->
h.registrar.get_adapter().update_counter(
h.registrar,
metric,
[{3, n}, {index + 4, n}],
h.template
)
:ok
end
end
@doc """
Gets the value at the requested quantile. The quantile must be greater than 0
and less than or equal to 100. It can be a float.
"""
@spec value_at_quantile(t | Iterator.t(), float) :: float
def value_at_quantile(%Histogrex{} = h, q) when q > 0 and q <= 100 do
do_value_at_quantile(iterator(h), q)
end
@doc """
Gets the value at the requested quantile using the given iterator. When doing
multiple calculations, it is slightly more efficent to first recreate and then
re-use an iterator (plus the values will consistently be calculated based on
the same data). Iterators are automatically reset before each call.
"""
def value_at_quantile(%Iterator{} = it, q) when q > 0 and q <= 100 do
do_value_at_quantile(Iterator.reset(it), q)
end
@doc """
Gets the value at the requested quantile for the templated histogram
"""
@spec value_at_quantile(t, atom, float) :: float
def value_at_quantile(%Histogrex{} = h, metric, q) when q > 0 and q <= 100 do
do_value_at_quantile(iterator(h, metric), q)
end
defp do_value_at_quantile(it, q) do
count_at_percetile = round(Float.floor(q / 100 * it.total_count + 0.5))
Enum.reduce_while(it, 0, fn it, total ->
total = total + it.count_at_index
case total >= count_at_percetile do
true -> {:halt, highest_equivalent_value(it.h, it.value_from_index)}
false -> {:cont, total}
end
end)
end
@doc """
Returns the mean value
"""
@spec mean(t | Iterator.t()) :: float
def mean(%Histogrex{} = h), do: do_mean(iterator(h))
@doc """
Returns the mean value from the iterator
"""
def mean(%Iterator{} = it), do: do_mean(Iterator.reset(it))
@doc false
def mean({:error, _}), do: 0
@doc """
Returns the mean value from a templated histogram
"""
def mean(%Histogrex{} = h, metric), do: do_mean(iterator(h, metric))
@doc false
def mean({:error, _}, _metric), do: 0
defp do_mean(it) do
case it.total_count == 0 do
true ->
0
false ->
total =
Enum.reduce(it, 0, fn it, total ->
case it.count_at_index do
0 -> total
n -> total + n * median_equivalent_value(it.h, it.value_from_index)
end
end)
total / it.total_count
end
end
@doc """
Resets the histogram to 0 values. Note that the histogram is a fixed-size, so
calling this won't free any memory. It is useful for testing.
"""
@spec reset(t | Iterator.t()) :: :ok
def reset(%Histogrex{} = h) do
h.registrar.get_adapter().insert(h.registrar, create_row(h.name, nil, h.counts_length))
:ok
end
@doc """
Resets the histogram to 0 values using an iterator. It is safe to reset an
iterator during a `reduce`.
"""
def reset(%Iterator{} = it) do
h = it.h
# cannot use it.h.name as this could be a dynamic metric and we don't want
# the template name
name = elem(it.counts, 0)
h.registrar.get_adapter().insert(h.registrar, create_row(name, nil, h.counts_length))
:ok
end
@doc """
Resets the histogram to 0 values for the templated historam.
Note that the histogram is a fixed-size, so calling this won't free any memory.
"""
@spec reset(t, atom | binary) :: :ok
def reset(%Histogrex{} = h, metric) do
h.registrar.get_adapter().insert(h.registrar, create_row(metric, h.name, h.counts_length))
:ok
end
@doc """
Deletes the histogram. The histogram can no longer be used.
"""
@spec delete(t) :: :ok
def delete(%Histogrex{} = h), do: delete(h, h.name)
@doc """
Deletes the histogram. Since this histogram was dynamically created through
a template, you can safely continue using it.
"""
@spec delete(t, atom | binary) :: :ok
def delete(%Histogrex{} = h, metric) do
h.registrar.get_adapter().delete(h.registrar, metric)
:ok
end
defp create_row(name, template, count) do
# +1 for the total_count that we'll store at the start
List.to_tuple([name | [template | List.duplicate(0, count + 1)]])
end
@doc """
Get the total number of recorded values. This is O(1)
"""
@spec total_count(t | Iterator.t()) :: non_neg_integer
def total_count(%Histogrex{} = h) do
elem(get_counts(h), @total_count_index)
end
@doc """
Get the total number of recorded values from an iterator. This is O(1)
"""
def total_count(%Iterator{} = it) do
it.total_count
end
@doc false
def total_count({:error, _}), do: 0
@doc """
Get the total number of recorded values from an iterator. This is O(1)
"""
@spec total_count(t, atom | binary) :: non_neg_integer
def total_count(%Histogrex{} = h, metric) do
case get_counts(h, metric) do
nil -> 0
counts -> elem(counts, @total_count_index)
end
end
@doc false
def total_count({:error, _}, _metric), do: 0
@doc """
Gets the approximate maximum value recorded
"""
@spec max(t | Iterator.t()) :: non_neg_integer
def max(%Histogrex{} = h), do: do_max(iterator(h))
@doc """
Gets the approximate maximum value recorded using the given iterator.
"""
def max(%Iterator{} = it), do: do_max(Iterator.reset(it))
@doc false
def max({:error, _}), do: 0
@doc """
Returns the approximate maximum value from a templated histogram
"""
def max(%Histogrex{} = h, metric), do: do_max(iterator(h, metric))
@doc false
def max({:error, _}, _metric), do: 0
defp do_max(it) do
max =
Enum.reduce(it, 0, fn it, max ->
case it.count_at_index == 0 do
true -> max
false -> it.highest_equivalent_value
end
end)
highest_equivalent_value(it.h, max)
end
@doc """
Gets the approximate minimum value recorded
"""
@spec min(t | Iterator.t()) :: non_neg_integer
def min(%Histogrex{} = h), do: do_min(iterator(h))
@doc """
Gets the approximate minimum value recorded using the given iterator.
"""
def min(%Iterator{} = it), do: do_min(Iterator.reset(it))
@doc false
def min({:error, _}), do: 0
@doc """
Returns the approximate minimum value from a templated histogram
"""
def min(%Histogrex{} = h, metric), do: do_min(iterator(h, metric))
@doc false
def min({:error, _}, _metric), do: 0
defp do_min(it) do
min =
Enum.reduce_while(it, 0, fn it, min ->
case it.count_at_index != 0 && min == 0 do
true -> {:halt, it.highest_equivalent_value}
false -> {:cont, min}
end
end)
lowest_equivalent_value(it.h, min)
end
@doc false
@spec iterator(t | {:error, any}) :: Iterator.t()
def iterator({:error, _}), do: Iterator.empty()
def iterator(h) do
counts = get_counts(h)
%Iterator{h: h, counts: counts, total_count: elem(counts, @total_count_index)}
end
@doc false
@spec iterator(t | {:error, any}, atom | binary) :: Iterator.t()
def iterator({:error, _}, _metric), do: Iterator.empty()
def iterator(h, metric) do
case get_counts(h, metric) do
nil -> Iterator.empty()
counts -> %Iterator{h: h, counts: counts, total_count: elem(counts, @total_count_index)}
end
end
@doc """
Reduce all of a registry's histograms
"""
@spec reduce(module, any, (Iterator.t(), any -> any)) :: any
def reduce(module, acc, fun) do
f = fn counts, acc ->
name = elem(counts, 0)
h =
case elem(counts, 1) do
nil -> module.get_histogrex(name)
template -> module.get_histogrex(template)
end
it = %Iterator{h: h, counts: counts, total_count: elem(counts, @total_count_index)}
fun.({name, it}, acc)
end
:ets.foldl(f, acc, module)
end
defp get_counts(h), do: get_counts(h, h.name)
defp get_counts(h, metric) do
case h.registrar.get_adapter().lookup(h.registrar, metric) do
[] -> nil
[counts] -> counts
end
end
defp calculate_bucket_count(smallest_untrackable_value, max, bucket_count) do
case smallest_untrackable_value < max do
false -> bucket_count
true -> calculate_bucket_count(bsl(smallest_untrackable_value, 1), max, bucket_count + 1)
end
end
defp get_value_index(h, value) do
{bucket, sub} = get_bucket_indexes(h, value)
get_count_index(h, bucket, sub)
end
@doc false
def get_count_index(h, bucket_index, sub_bucket_index) do
bucket_base_index = bsl(bucket_index + 1, h.sub_bucket_half_count_magnitude)
offset_in_bucket = sub_bucket_index - h.sub_bucket_half_count
bucket_base_index + offset_in_bucket
end
@doc false
def value_from_index(h, bucket_index, sub_bucket_index) do
bsl(sub_bucket_index, bucket_index + h.unit_magnitude)
end
@doc false
def highest_equivalent_value(h, value) do
next_non_equivalent_value(h, value) - 1
end
def lowest_equivalent_value(h, value) do
{bucket_index, sub_bucket_index} = get_bucket_indexes(h, value)
lowest_equivalent_value(h, bucket_index, sub_bucket_index)
end
def lowest_equivalent_value(h, bucket_index, sub_bucket_index) do
value_from_index(h, bucket_index, sub_bucket_index)
end
defp next_non_equivalent_value(h, value) do
{bucket_index, sub_bucket_index} = get_bucket_indexes(h, value)
lowest_equivalent_value(h, bucket_index, sub_bucket_index) +
size_of_equivalent_value_range(h, bucket_index, sub_bucket_index)
end
defp median_equivalent_value(h, value) do
{bucket_index, sub_bucket_index} = get_bucket_indexes(h, value)
lowest_equivalent_value(h, bucket_index, sub_bucket_index) +
bsr(size_of_equivalent_value_range(h, bucket_index, sub_bucket_index), 1)
end
defp size_of_equivalent_value_range(h, bucket_index, sub_bucket_index) do
adjusted_bucket_index =
case sub_bucket_index >= h.sub_bucket_count do
true -> bucket_index + 1
false -> bucket_index
end
bsl(1, h.unit_magnitude + adjusted_bucket_index)
end
@doc false
defp get_bucket_indexes(h, value) do
ceiling = bit_length(bor(value, h.sub_bucket_mask), 0)
bucket_index = ceiling - h.unit_magnitude - (h.sub_bucket_half_count_magnitude + 1)
sub_bucket_index = bsr(value, bucket_index + h.unit_magnitude)
{bucket_index, sub_bucket_index}
end
defp bit_length(value, n) when value >= 32768 do
bit_length(bsr(value, 16), n + 16)
end
defp bit_length(value, n) do
{value, n} =
case value >= 128 do
true -> {bsr(value, 8), n + 8}
false -> {value, n}
end
{value, n} =
case value >= 8 do
true -> {bsr(value, 4), n + 4}
false -> {value, n}
end
{value, n} =
case value >= 2 do
true -> {bsr(value, 2), n + 2}
false -> {value, n}
end
case value == 1 do
true -> n + 1
false -> n
end
end
end
defimpl Enumerable, for: Histogrex.Iterator do
use Bitwise
@doc """
Gets the total count of recorded samples. This is an 0(1) operation
"""
def count(it), do: it.total_count
@doc false
def member?(_it, _value), do: {:error, __MODULE__}
@doc false
def slice(_it), do: {:error, __MODULE__}
@doc false
def reduce(_it, {:halt, acc}, _f), do: {:halted, acc}
@doc false
def reduce(it, {:suspend, acc}, f), do: {:suspended, acc, &reduce(it, &1, f)}
@doc false
def reduce(it, {:cont, acc}, f) do
case it.count_to_index >= it.total_count do
true -> {:done, acc}
false -> do_reduce(it, acc, f)
end
end
@doc false
defp do_reduce(it, acc, f) do
h = it.h
sub_bucket_index = it.sub_bucket_index + 1
{it, bucket_index, sub_bucket_index} =
case sub_bucket_index >= h.sub_bucket_count do
true ->
bucket_index = it.bucket_index + 1
sub_bucket_index = h.sub_bucket_half_count
it = %{it | bucket_index: bucket_index, sub_bucket_index: sub_bucket_index}
{it, bucket_index, sub_bucket_index}
false ->
it = %{it | sub_bucket_index: sub_bucket_index}
{it, it.bucket_index, sub_bucket_index}
end
case bucket_index >= h.bucket_count do
true ->
{:done, acc}
false ->
count_at_index = count_at_index(it, bucket_index, sub_bucket_index)
value_from_index = Histogrex.value_from_index(h, bucket_index, sub_bucket_index)
it = %{
it
| count_at_index: count_at_index,
value_from_index: value_from_index,
count_to_index: it.count_to_index + count_at_index,
highest_equivalent_value: Histogrex.highest_equivalent_value(h, value_from_index)
}
reduce(it, f.(it, acc), f)
end
end
defp count_at_index(it, bucket_index, sub_bucket_index) do
index = Histogrex.get_count_index(it.h, bucket_index, sub_bucket_index)
# 0 is the name
# 1 is the template (or nil for static metrics)
# 2 is the total_count
# the real count buckets start at 3
elem(it.counts, index + 3)
end
end
|
lib/histogrex.ex
| 0.922883 | 0.683353 |
histogrex.ex
|
starcoder
|
defmodule Jerboa.Format.Body.Attribute.ErrorCode do
@moduledoc """
ERROR-CODE attribute as defined in [STUN RFC](https://tools.ietf.org/html/rfc5389#section-15.6)
"""
alias Jerboa.Format.Body.Attribute.{Decoder,Encoder}
alias Jerboa.Format.ErrorCode.{FormatError, LengthError}
alias Jerboa.Format.Meta
defstruct [:code, :name, reason: ""]
@typedoc """
Represents error code of error response
Struct fields
* `:code` - integer representation of an error
* `:name` - atom representation of an error
* `:reason`
"""
@type t :: %__MODULE__{
code: code,
name: name,
reason: String.t
}
@type code :: 300 | 400 | 401 | 420 | 438 | 500
| 403 | 437 | 441 | 442 | 486 | 508
@type name :: :try_alternate
| :bad_request
| :unauthorized
| :unknown_attribute
| :stale_nonce
| :server_error
| :forbidden
| :allocation_mismatch
| :wrong_credentials
| :unsupported_protocol
| :allocation_quota_reached
| :insufficient_capacity
@valid_codes [300, 400, 401, 420, 438, 500,
403, 437, 441, 442, 486, 508]
@valid_names [:try_alternate,
:bad_request,
:unauthorized,
:unknown_attribute,
:stale_nonce,
:server_error,
:forbidden,
:allocation_mismatch,
:wrong_credentials,
:unsupported_protocol,
:allocation_quota_reached,
:insufficient_capacity]
@max_reason_length 128
@spec new(name | code) :: t
def new(code_or_name)
def new(code) when code in @valid_codes do
%__MODULE__{code: code, name: code_to_name(code)}
end
def new(name) when name in @valid_names do
%__MODULE__{name: name, code: name_to_code(name)}
end
defimpl Encoder do
alias Jerboa.Format.Body.Attribute.ErrorCode
@type_code 0x0009
@spec type_code(ErrorCode.t) :: integer
def type_code(_), do: @type_code
@spec encode(ErrorCode.t, Meta.t) :: {Meta.t, binary}
def encode(attr, meta), do: {meta, ErrorCode.encode(attr)}
end
defimpl Decoder do
alias Jerboa.Format.Body.Attribute.ErrorCode
@spec decode(ErrorCode.t, value :: binary, Meta.t)
:: {:ok, Meta.t, ErrorCode.t} | {:error, struct}
def decode(_, value, meta), do: ErrorCode.decode(value, meta)
end
@doc false
def encode(%__MODULE__{code: code, name: name, reason: reason}) do
error_code = code || name_to_code(name)
if code_valid?(error_code) do
encode(error_code, reason)
else
raise ArgumentError, "invalid or missing error code or name " <>
"while encoding ERROR-CODE attribute"
end
end
defp encode(error_code, reason) do
if reason_valid?(reason) do
error_class = div error_code, 100
error_number = rem error_code, 100
<<0::21, error_class::3, error_number::8>> <> reason
else
raise ArgumentError, "ERROR-CODE reason must be UTF-8 encoded binary"
end
end
@doc false
def decode(<<0::21, error_class::3, error_number::8, reason::binary>>, meta) do
code = code(error_class, error_number)
if reason_valid?(reason) && code_valid?(code) do
{:ok, meta,
%__MODULE__{
code: code,
name: code_to_name(code),
reason: reason
}}
else
{:error, FormatError.exception(code: code,
reason: reason)}
end
end
def decode(bin, _) do
{:error, LengthError.exception(length: byte_size(bin))}
end
for {code, name} <- List.zip([@valid_codes, @valid_names]) do
defp code_to_name(unquote(code)), do: unquote(name)
defp name_to_code(unquote(name)), do: unquote(code)
defp code_valid?(unquote(code)), do: true
end
defp code_to_name(_), do: :error
defp name_to_code(_), do: :error
defp code_valid?(_), do: false
defp reason_valid?(reason) do
String.valid?(reason) && String.length(reason) <= @max_reason_length
end
defp code(class, number), do: class * 100 + number
@doc false
def max_reason_length, do: @max_reason_length
@doc false
def valid_codes, do: @valid_codes
@doc false
def valid_names, do: @valid_names
end
|
lib/jerboa/format/body/attribute/error_code.ex
| 0.788787 | 0.460046 |
error_code.ex
|
starcoder
|
defmodule Recurly.Webhooks do
@moduledoc """
Module responsible for parsing webhooks: https://dev.recurly.com/page/webhooks
Each webhook has it's own type. These types are returned from `Recurly.Webhooks.parse/1` respectively.
* [BillingInfoUpdatedNotification](https://dev.recurly.com/page/webhooks#section-updated-billing-information)
* [CanceledAccountNotification](https://dev.recurly.com/page/webhooks#section-closed-account)
* [CanceledSubscriptionNotification](https://dev.recurly.com/page/webhooks#section-canceled-subscription)
* [ClosedInvoiceNotification](https://dev.recurly.com/page/webhooks#section-closed-invoice)
* [ExpiredSubscriptionNotification](https://dev.recurly.com/page/webhooks#section-expired-subscription)
* [FailedPaymentNotification](https://dev.recurly.com/page/webhooks#section-failed-payment-only-for-ach-payments-)
* [NewAccountNotification](https://dev.recurly.com/page/webhooks#section-new-account)
* [NewInvoiceNotification](https://dev.recurly.com/page/webhooks#invoice-notifications)
* [NewSubscriptionNotification](https://dev.recurly.com/page/webhooks#section-new-subscription)
* [PastDueInvoiceNotification](https://dev.recurly.com/page/webhooks#section-past-due-invoichttps://dev.recurly.com/page/webhooks#invoice-notifications)
* [ProcessingInvoiceNotification](https://dev.recurly.com/page/webhooks#section-processing-invoice-automatic-only-for-ach-payments-)
* [ProcessingPaymentNotification](https://dev.recurly.com/page/webhooks#section-processing-payment-only-for-ach-payments-)
* [ReactivatedAccountNotification](https://dev.recurly.com/page/webhooks#section-reactivated-account)
* [RenewedSubscriptionNotification](https://dev.recurly.com/page/webhooks#section-renewed-subscription)
* [ScheduledPaymentNotification](https://dev.recurly.com/page/webhooks#section-scheduled-payment-only-for-ach-payments-)
* [SuccessfulPaymentNotification](https://dev.recurly.com/page/webhooks#section-successful-payment-only-for-ach-payments-)
* [SuccessfulRefundNotification](https://dev.recurly.com/page/webhooks#section-successful-refund)
* [UpdatedSubscriptionNotification](https://dev.recurly.com/page/webhooks#section-updated-subscription)
* [VoidPaymentNotification](https://dev.recurly.com/page/webhooks#section-void-payment)
"""
import SweetXml
alias Recurly.XML
@doc """
Parses an xml document containing a webhook
## Parameters
- `xml_doc` String webhook xml
## Examples
```
xml_doc = \"""
<?xml version="1.0" encoding="UTF-8"?>
<canceled_account_notification>
<account>
<account_code>1</account_code>
<username nil="true"></username>
<email><EMAIL></email>
<first_name>Verena</first_name>
<last_name>Example</last_name>
<company_name nil="true"></company_name>
</account>
</canceled_account_notification>
\"""
alias Recurly.Webhooks
notification = Webhooks.parse(xml_doc)
# It may be helpful to pattern match against the possible types
case Webhooks.parse(xml_doc) do
%Webhooks.CanceledAccountNotification{} n -> IO.inspect(n.account)
%Webhooks.CanceledSubscriptionNotification{} n -> IO.inspect(n.subscription)
# ... etc
_ -> IO.puts("not handled")
end
```
"""
def parse(xml_doc) do
xml_doc
|> notification_name
|> resource
|> XML.Parser.parse(xml_doc, false)
end
@doc """
Gives you the name of the notification from the xml
## Parameters
- `xml_doc` String webhook xml
## Examples
```
xml_doc = \"""
<?xml version="1.0" encoding="UTF-8"?>
<canceled_account_notification>
<account>
<account_code>1</account_code>
<username nil="true"></username>
<email><EMAIL></email>
<first_name>Verena</first_name>
<last_name>Example</last_name>
<company_name nil="true"></company_name>
</account>
</canceled_account_notification>
\"""
"canceled_account_notification" = Recurly.Webhooks.parse(xml_doc)
```
"""
def notification_name(xml_doc) do
xpath(xml_doc, ~x"name(.)")
end
defmodule BillingInfoUpdatedNotification do
@moduledoc false
use Recurly.Resource
schema :billing_info_updated_notification do
field :account, Recurly.Account
end
end
defmodule CanceledAccountNotification do
@moduledoc false
use Recurly.Resource
schema :canceled_account_notification do
field :account, Recurly.Account
end
end
defmodule CanceledSubscriptionNotification do
@moduledoc false
use Recurly.Resource
schema :canceled_subscription_notification do
field :account, Recurly.Account
field :subscription, Recurly.Subscription
end
end
defmodule ClosedInvoiceNotification do
@moduledoc false
use Recurly.Resource
schema :closed_invoice_notification do
field :account, Recurly.Account
field :invoice, Recurly.Invoice
end
end
defmodule ExpiredSubscriptionNotification do
@moduledoc false
use Recurly.Resource
schema :expired_subscription_notification do
field :account, Recurly.Account
field :subscription, Recurly.Subscription
end
end
defmodule FailedPaymentNotification do
@moduledoc false
use Recurly.Resource
schema :failed_payment_notification do
field :account, Recurly.Account
field :transaction, Recurly.Transaction
end
end
defmodule NewAccountNotification do
@moduledoc false
use Recurly.Resource
schema :new_account_notification do
field :account, Recurly.Account
end
end
defmodule NewInvoiceNotification do
@moduledoc false
use Recurly.Resource
schema :new_invoice_notification do
field :account, Recurly.Account
field :invoice, Recurly.Invoice
end
end
defmodule NewSubscriptionNotification do
@moduledoc false
use Recurly.Resource
schema :new_subscription_notification do
field :account, Recurly.Account
field :subscription, Recurly.Subscription
end
end
defmodule PastDueInvoiceNotification do
@moduledoc false
use Recurly.Resource
schema :past_due_invoice_notification do
field :account, Recurly.Account
field :invoice, Recurly.Invoice
end
end
defmodule ProcessingInvoiceNotification do
@moduledoc false
use Recurly.Resource
schema :processing_invoice_notification do
field :account, Recurly.Account
field :invoice, Recurly.Invoice
end
end
defmodule ProcessingPaymentNotification do
@moduledoc false
use Recurly.Resource
schema :processing_payment_notification do
field :account, Recurly.Account
field :transaction, Recurly.Transaction
end
end
defmodule ReactivatedAccountNotification do
@moduledoc false
use Recurly.Resource
schema :reactivated_account_notification do
field :account, Recurly.Account
field :subscription, Recurly.Subscription
end
end
defmodule RenewedSubscriptionNotification do
@moduledoc false
use Recurly.Resource
schema :renewed_subscription_notification do
field :account, Recurly.Account
field :subscription, Recurly.Subscription
end
end
defmodule ScheduledPaymentNotification do
@moduledoc false
use Recurly.Resource
schema :scheduled_payment_notification do
field :account, Recurly.Account
field :transaction, Recurly.Transaction
end
end
defmodule SuccessfulPaymentNotification do
@moduledoc false
use Recurly.Resource
schema :successful_payment_notification do
field :account, Recurly.Account
field :transaction, Recurly.Transaction
end
end
defmodule SuccessfulRefundNotification do
@moduledoc false
use Recurly.Resource
schema :successful_refund_notification do
field :account, Recurly.Account
field :transaction, Recurly.Transaction
end
end
defmodule UpdatedSubscriptionNotification do
@moduledoc false
use Recurly.Resource
schema :updated_subscription_notification do
field :account, Recurly.Account
field :subscription, Recurly.Subscription
end
end
defmodule VoidPaymentNotification do
@moduledoc false
use Recurly.Resource
schema :void_payment_notification do
field :account, Recurly.Account
field :transaction, Recurly.Transaction
end
end
defp resource('billing_info_updated_notification'), do: %BillingInfoUpdatedNotification{}
defp resource('canceled_account_notification'), do: %CanceledAccountNotification{}
defp resource('canceled_subscription_notification'), do: %CanceledSubscriptionNotification{}
defp resource('closed_invoice_notification'), do: %ClosedInvoiceNotification{}
defp resource('expired_subscription_notification'), do: %ExpiredSubscriptionNotification{}
defp resource('failed_payment_notification'), do: %FailedPaymentNotification{}
defp resource('new_account_notification'), do: %NewAccountNotification{}
defp resource('new_invoice_notification'), do: %NewInvoiceNotification{}
defp resource('new_subscription_notification'), do: %NewSubscriptionNotification{}
defp resource('past_due_invoice_notification'), do: %PastDueInvoiceNotification{}
defp resource('processing_invoice_notification'), do: %ProcessingInvoiceNotification{}
defp resource('processing_payment_notification'), do: %ProcessingPaymentNotification{}
defp resource('reactivated_account_notification'), do: %ReactivatedAccountNotification{}
defp resource('renewed_subscription_notification'), do: %RenewedSubscriptionNotification{}
defp resource('scheduled_payment_notification'), do: %ScheduledPaymentNotification{}
defp resource('successful_payment_notification'), do: %SuccessfulPaymentNotification{}
defp resource('successful_refund_notification'), do: %SuccessfulRefundNotification{}
defp resource('updated_subscription_notification'), do: %UpdatedSubscriptionNotification{}
defp resource('void_payment_notification'), do: %VoidPaymentNotification{}
defp resource(attr_name) do
raise ArgumentError, message: "xml attribute #{attr_name} is not supported"
end
end
|
lib/recurly/webhooks.ex
| 0.720663 | 0.614625 |
webhooks.ex
|
starcoder
|
defmodule EctoTestDSL.Parse.Pnode.Group do
use EctoTestDSL.Drink.Me
use T.Drink.AndParse
use T.Drink.Assertively
def squeeze_into_map(kws) do
reducer = fn {name, value}, acc ->
case {Map.get(acc, name), Pnode.Mergeable.impl_for(value)} do
{nil, _} ->
Map.put(acc, name, value)
{previously, nil} ->
elaborate_flunk("`#{inspect name}` may not be repeated",
left: previously,
right: value)
{previously, _} ->
elaborate_assert(previously.__struct__ == value.__struct__,
"You've repeated `#{inspect name}`, but with incompatible values",
left: previously, right: value)
Map.put(acc, name, Pnode.Mergeable.merge(previously, value))
end
end
Enum.reduce(kws, %{}, reducer)
end
def parse_time_substitutions(example, previous_examples) do
update_for_protocol(example, Pnode.Substitutable,
&(Pnode.Substitutable.substitute(&1, previous_examples)))
end
def collect_eens(example) do
eens = accumulated_eens(example)
Map.put(example, :eens, eens)
end
def export(example) do
example
|> delete_keys_with_protocol(Pnode.Deletable)
|> update_for_protocol(Pnode.Exportable, &Pnode.Exportable.export/1)
end
defp delete_keys_with_protocol(example, protocol),
do: Map.drop(example, keys_for_protocol(example, protocol))
# ----------------------------------------------------------------------------
defp keys_for_protocol(example, protocol) do
example
|> KeyVal.filter_by_value(&protocol.impl_for/1)
|> Enum.map(fn {key, _value} -> key end)
end
defp update_for_protocol(example, protocol, f) do
reducer = fn key, acc ->
Map.update!(acc, key, f)
end
keys_for_protocol(example, protocol)
|> Enum.reduce(example, reducer)
end
defp accumulated_eens(example) do
getter = fn key ->
Map.get(example, key) |> Pnode.EENable.eens
end
keys_for_protocol(example, Pnode.EENable)
|> Enum.flat_map(getter)
end
end
|
lib/10_parse/nodes/pnode_group.ex
| 0.549641 | 0.49884 |
pnode_group.ex
|
starcoder
|
defmodule Ash.Changeset.ManagedRelationshipHelpers do
@moduledoc """
Tools for introspecting managed relationships.
Extensions can use this to look at an argument that will be passed
to a `manage_relationship` change and determine what their behavior
should be. For example, AshAdmin uses these to find out what kind of
nested form it should offer for each argument that manages a relationship.
"""
def sanitize_opts(relationship, opts) do
[
on_no_match: :ignore,
on_missing: :ignore,
on_match: :ignore,
on_lookup: :ignore
]
|> Keyword.merge(opts)
|> Keyword.update!(:on_no_match, fn
:create when relationship.type == :many_to_many ->
action = Ash.Resource.Info.primary_action!(relationship.destination, :create)
join_action = Ash.Resource.Info.primary_action!(relationship.through_destination, :create)
{:create, action.name, join_action.name, []}
{:create, action_name} when relationship.type == :many_to_many ->
join_action = Ash.Resource.Info.primary_action!(relationship.through_destination, :create)
{:create, action_name, join_action.name, []}
:create ->
action = Ash.Resource.Info.primary_action!(relationship.destination, :create)
{:create, action.name}
other ->
other
end)
|> Keyword.update!(:on_missing, fn
:destroy when relationship.type == :many_to_many ->
action = Ash.Resource.Info.primary_action!(relationship.destination, :destroy)
join_action =
Ash.Resource.Info.primary_action!(relationship.through_destination, :destroy)
{:destroy, action.name, join_action.name, []}
{:destroy, action_name} when relationship.type == :many_to_many ->
join_action =
Ash.Resource.Info.primary_action!(relationship.through_destination, :destroy)
{:destroy, action_name, join_action.name, []}
:destroy ->
action = Ash.Resource.Info.primary_action!(relationship.destination, :destroy)
{:destroy, action.name}
:unrelate ->
{:unrelate, nil}
other ->
other
end)
|> Keyword.update!(:on_match, fn
:update when relationship.type == :many_to_many ->
update = Ash.Resource.Info.primary_action!(relationship.destination, :update)
join_update = Ash.Resource.Info.primary_action!(relationship.through, :update)
{:update, update.name, join_update.name, []}
{:update, update} when relationship.type == :many_to_many ->
join_update = Ash.Resource.Info.primary_action!(relationship.through, :update)
{:update, update, join_update.name, []}
{:update, update, join_update} when relationship.type == :many_to_many ->
{:update, update, join_update, []}
:update ->
action = Ash.Resource.Info.primary_action!(relationship.destination, :update)
{:update, action.name}
:unrelate ->
{:unrelate, nil}
other ->
other
end)
|> Keyword.update!(:on_lookup, fn
operation
when relationship.type == :many_to_many and
operation in [:relate, :relate_and_update] ->
read = Ash.Resource.Info.primary_action(relationship.destination, :read)
create = Ash.Resource.Info.primary_action(relationship.through, :create)
{operation, create.name, read.name, []}
operation
when relationship.type in [:has_many, :has_one] and
operation in [:relate, :relate_and_update] ->
read = Ash.Resource.Info.primary_action(relationship.destination, :read)
update = Ash.Resource.Info.primary_action(relationship.destination, :update)
{operation, update.name, read.name}
operation when operation in [:relate, :relate_and_update] ->
read = Ash.Resource.Info.primary_action(relationship.destination, :read)
update = Ash.Resource.Info.primary_action(relationship.source, :update)
{operation, update.name, read.name}
:ignore ->
:ignore
end)
end
def could_lookup?(opts) do
opts[:on_lookup] != :ignore
end
def must_load?(opts) do
only_creates? = unwrap(opts[:on_match]) == :create && unwrap(opts[:on_no_match]) == :create
only_ignores? = opts[:on_no_match] == :ignore && opts[:on_match] == :ignore
can_skip_load? = opts[:on_missing] == :ignore && (only_creates? || only_ignores?)
not can_skip_load?
end
defp unwrap(value) when is_atom(value), do: true
defp unwrap(tuple) when is_tuple(tuple), do: elem(tuple, 0)
defp unwrap(value), do: value
end
|
lib/ash/changeset/managed_relationship_helpers.ex
| 0.786254 | 0.424352 |
managed_relationship_helpers.ex
|
starcoder
|
defmodule WeChat do
@moduledoc """
WeChat SDK for Elixir
## 定义 `Client` 模块
### 公众号(默认):
```elixir
defmodule YourApp.WeChatAppCodeName do
@moduledoc "CodeName"
use WeChat,
appid: "wx-appid",
appsecret: "appsecret"
end
```
### 小程序:
```elixir
defmodule YourApp.WeChatAppCodeName do
@moduledoc "CodeName"
use WeChat,
app_type: :mini_program,
appid: "wx-appid",
appsecret: "appsecret"
end
```
### 第三方应用:
```elixir
defmodule YourApp.WeChatAppCodeName do
@moduledoc "CodeName"
use WeChat,
by_component?: true,
app_type: :official_account | :mini_program, # 默认为 :official_account
appid: "wx-appid",
component_appid: "wx-third-appid", # 第三方 appid
end
```
## 参数说明
请看 `t:options/0`
## 接口调用
支持两种方式调用:
- 调用 `client` 方法
`YourApp.WeChatAppCodeName.Material.batch_get_material(:image, 2)`
- 原生调用方法
`WeChat.Material.batch_get_material(YourApp.WeChatAppCodeName, :image, 2)`
"""
import WeChat.Utils, only: [doc_link_prefix: 0]
alias WeChat.Storage.Cache
@typedoc """
OpenID 普通用户的标识,对当前公众号唯一
加密后的微信号,每个用户对每个公众号的 `OpenID` 是唯一的。对于不同公众号,同一用户的 `OpenID` 不同
[Docs Link](#{doc_link_prefix()}/doc/offiaccount/User_Management/Get_users_basic_information_UnionID.html){:target="_blank"}
"""
@type openid :: String.t()
@type openid_list :: [openid]
@typedoc """
UnionID 不同应用下的唯一ID
同一用户,对同一个微信开放平台下的不同应用,`UnionID` 是相同的
[Docs Link](#{doc_link_prefix()}/doc/offiaccount/User_Management/Get_users_basic_information_UnionID.html){:target="_blank"}
"""
@type unionid :: String.t()
@typedoc """
服务器角色
`:client`: 默认,刷新`token`
`:hub`: 中控服务器,刷新`token`
`:hub_client`: 逻辑服务器,获取`token`
"""
@type server_role :: :client | :hub | :hub_client
@typedoc "是否第三方平台开发"
@type by_component? :: boolean
@typedoc """
`client` 的应用类型
- `:official_account`: 公众号
- `:mini_program`: 小程序
"""
@type app_type :: :official_account | :mini_program
@typedoc "公众号/小程序 应用id"
@type appid :: String.t()
@typedoc "公众号/小程序 应用代码"
@type code_name :: String.t()
@typedoc "应用秘钥"
@type appsecret :: String.t()
@typedoc "第三方平台应用id"
@type component_appid :: String.t()
@typedoc "第三方平台应用秘钥"
@type component_appsecret :: String.t()
@typedoc """
服务器配置里的 `token` 值,在接收消息时用于校验签名
"""
@type token :: String.t()
@typedoc "错误码"
@type err_code :: non_neg_integer
@typedoc "错误信息"
@type err_msg :: String.t()
@typep env :: String.t()
@typep url :: String.t()
@typedoc """
参数
## 参数说明
- `appid`: `t:appid/0` - 必填
- `app_type`: `t:app_type/0`
- `by_component?`: `t:by_component?/0`
- `server_role`: `t:server_role/0`
- `storage`: `t:WeChat.Storage.Adapter.t()`
- `appsecret`: `t:appsecret/0` - 仅在 `by_component?` 设定为 `false` 时才有效
- `component_appid`: `t:component_appid/0` - 仅在 `by_component?` 设定为 `true` 时才有效
- `component_appsecret`: `t:component_appsecret/0` - 仅在 `by_component?` 设定为 `true` 时才有效
- `encoding_aes_key`: `t:WeChat.ServerMessage.Encryptor.encoding_aes_key/0` - 在编译时会自动将 `encoding_aes_key` 转换为 `aes_key`
- `token`: `t:token/0`
- `requester`: 请求客户端 - `t:module/0`
## 默认参数:
- `server_role`: `:client`
- `by_component?`: `false`
- `app_type`: `:official_account`
- `storage`: `WeChat.Storage.File`
- `requester`: `WeChat.Requester`
- 其余参数皆为可选
"""
@type options :: [
server_role: server_role,
by_component?: by_component?,
app_type: app_type,
storage: WeChat.Storage.Adapter.t(),
appid: appid,
appsecret: appsecret,
component_appid: component_appid,
component_appsecret: component_appsecret,
encoding_aes_key: WeChat.ServerMessage.Encryptor.encoding_aes_key(),
token: token,
requester: module
]
@type client :: module()
@type requester :: module()
@type response :: Tesla.Env.result()
@doc false
defmacro __using__(options \\ []) do
quote do
use WeChat.Builder, unquote(options)
end
end
@doc """
根据 `appid` 获取 `client`
"""
@spec get_client_by_appid(appid) :: nil | client
defdelegate get_client_by_appid(appid), to: WeChat.Storage.Cache, as: :search_client
@doc "动态构建 client"
@spec build_client(client, options) :: {:ok, client}
def build_client(client, options) do
with {:module, module, _binary, _term} <-
Module.create(
client,
quote do
@moduledoc false
use WeChat.Builder, unquote(options)
end,
Macro.Env.location(__ENV__)
) do
{:ok, module}
end
end
# hub_url
@spec set_hub_url(client, url) :: true
def set_hub_url(client, url) when is_binary(url) do
Cache.put_cache(client.appid(), :hub_url, url)
end
@spec get_hub_url(client) :: nil | url
def get_hub_url(client) do
Cache.get_cache(client.appid(), :hub_url)
end
# oauth2_env_url
@spec set_oauth2_env_url(client, env, url) :: true
def set_oauth2_env_url(client, env, url) when is_binary(env) and is_binary(url) do
Cache.put_cache(client.appid(), {:oauth2_env, env}, url)
end
@spec get_oauth2_env_url(client, env) :: nil | url
def get_oauth2_env_url(client, env) do
Cache.get_cache(client.appid(), {:oauth2_env, env})
end
end
|
lib/wechat.ex
| 0.641759 | 0.4133 |
wechat.ex
|
starcoder
|
defmodule ElixirLS.Utils.OutputDevice do
@moduledoc """
Intercepts IO request messages and forwards them to the Output server to be sent as events to
the IDE.
In order to send console output to Visual Studio Code, the debug adapter needs to send events
using the usual wire protocol. In order to intercept the debugged code's output, we replace the
registered processes `:user` and `:standard_error` and the process's group leader with instances
of this server. When it receives a message containing output, it sends an event via the `Output`
server with the correct category ("stdout" or "stderr").
"""
use GenServer
## Client API
def start_link(device, output_fn, opts \\ []) do
GenServer.start_link(__MODULE__, {device, output_fn}, opts)
end
## Server callbacks
@impl GenServer
def init({device, output_fn}) do
{:ok, {device, output_fn}}
end
@impl GenServer
def handle_info({:io_request, from, reply_as, {:put_chars, _encoding, characters}}, s) do
output(from, reply_as, characters, s)
{:noreply, s}
end
@impl GenServer
def handle_info({:io_request, from, reply_as, {:put_chars, characters}}, s) do
output(from, reply_as, characters, s)
{:noreply, s}
end
@impl GenServer
def handle_info({:io_request, from, reply_as, {:put_chars, _encoding, module, func, args}}, s) do
output(from, reply_as, apply(module, func, args), s)
{:noreply, s}
end
@impl GenServer
def handle_info({:io_request, from, reply_as, {:put_chars, module, func, args}}, s) do
output(from, reply_as, apply(module, func, args), s)
{:noreply, s}
end
@impl GenServer
def handle_info({:io_request, from, reply_as, {:requests, reqs}}, s) do
for req <- reqs do
handle_info({:io_request, from, reply_as, req}, s)
end
{:noreply, s}
end
# Any other message (get_geometry, set_opts, etc.) goes directly to original device
@impl GenServer
def handle_info(msg, {device, _} = s) do
send(device, msg)
{:noreply, s}
end
## Helpers
defp output(from, reply_as, characters, {_, output_fn}) do
output_fn.(IO.iodata_to_binary(characters))
send(from, {:io_reply, reply_as, :ok})
end
end
|
apps/elixir_ls_utils/lib/output_device.ex
| 0.741112 | 0.437223 |
output_device.ex
|
starcoder
|
defmodule Unleash do
@moduledoc """
If you have no plans on extending the client, then `Unleash` will be the main
usage point of the library. Upon starting your app, the client is registered
with the unleash server, and two `GenServer`s are started, one to fetch and
poll for feature flags from the server, and one to send metrics.
Configuring `:disable_client` to `true` disables both servers as well as
registration, while configuring `:disable_metrics` to `true` disables only
the metrics `GenServer`.
"""
use Application
require Logger
alias Unleash.Config
alias Unleash.Feature
alias Unleash.Metrics
alias Unleash.Repo
alias Unleash.Variant
@typedoc """
The context needed for a few activation strategies. Check their documentation
for the required key.
* `:user_id` is the ID of the user interacting _with your system_, can be any
`String.t()`
* `session_id` is the ID of the current session _in your system_, can be any
`String.t()`
* `remote_address` is the address of the user interacting _with your system_,
can be any `String.t()`
"""
@type context :: %{
user_id: String.t(),
session_id: String.t(),
remote_address: String.t()
}
@doc """
Aliased to `enabled?/2`
"""
@spec is_enabled?(atom() | String.t(), boolean) :: boolean
def is_enabled?(feature, default) when is_boolean(default),
do: enabled?(feature, default)
@doc """
Aliased to `enabled?/3`
"""
@spec is_enabled?(atom() | String.t(), map(), boolean) :: boolean
def is_enabled?(feature, context \\ %{}, default \\ false),
do: enabled?(feature, context, default)
@doc """
Checks if the given feature is enabled. Checks as though an empty context was
passed in.
## Examples
iex> Unleash.enabled?(:my_feature, false)
false
iex> Unleash.enabled?(:my_feature, true)
true
"""
@spec enabled?(atom() | String.t(), boolean) :: boolean
def enabled?(feature, default) when is_boolean(default),
do: enabled?(feature, %{}, default)
@doc """
Checks if the given feature is enabled.
If `:disable_client` is `true`, simply returns the given `default`.
If `:disable_metrics` is `true`, nothing is logged about the given toggle.
## Examples
iex> Unleash.enabled?(:my_feature)
false
iex> Unleash.enabled?(:my_feature, context)
false
iex> Unleash.enabled?(:my_feature, context, true)
false
"""
@spec enabled?(atom() | String.t(), map(), boolean) :: boolean
def enabled?(feature, context \\ %{}, default \\ false) do
if Config.disable_client() do
Logger.warn(fn ->
"Client is disabled, it will only return default: #{default}"
end)
default
else
feature
|> Repo.get_feature()
|> case do
nil ->
{feature, default}
feature ->
{feature, Feature.enabled?(feature, Map.put(context, :feature_toggle, feature.name))}
end
|> Metrics.add_metric()
end
end
@doc """
Returns a variant for the given name.
If `:disable_client` is `true`, returns the fallback.
A [variant](https://unleash.github.io/docs/beta_features#feature-toggle-variants)
allows for more complicated toggling than a simple `true`/`false`, instead
returning one of the configured variants depending on whether or not there
are any overrides for a given context value as well as factoring in the
weights for the various weight options.
## Examples
iex> Unleash.get_variant(:test)
%{enabled: true, name: "test", payload: %{...}}
iex> Unleash.get_variant(:test)
%{enabled: false, name: "disabled"}
"""
@spec get_variant(atom() | String.t(), map(), Variant.result()) :: Variant.result()
def get_variant(name, context \\ %{}, fallback \\ %{name: "disabled", enabled: false}) do
if Config.disable_client() do
Logger.warn(fn ->
"Client is disabled, it will only return the fallback: #{Jason.encode!(fallback)}"
end)
fallback
else
name
|> Repo.get_feature()
|> case do
nil -> fallback
feature -> Variant.select_variant(feature, context)
end
end
end
@doc false
def start(_type, _args) do
children =
[
{Repo, Config.disable_client()},
{{Metrics, name: Metrics}, Config.disable_client() or Config.disable_metrics()}
]
|> Enum.filter(fn {_m, not_enabled} -> not not_enabled end)
|> Enum.map(fn {module, _e} -> module end)
unless children == [] do
Config.client().register_client()
end
Supervisor.start_link(children, strategy: :one_for_one)
end
end
|
lib/unleash.ex
| 0.915828 | 0.410638 |
unleash.ex
|
starcoder
|
defmodule CFSync.RichText do
@moduledoc """
RichText recursive struct
RichText in Contentful is implemented as a tree of nodes.
All nodes share a common structure and some of them have specific properties.
Here I chosed to represent all nodes with a single struct for simplicity.
"""
alias CFSync.Link
defstruct type: :document,
content: [],
value: nil,
marks: [],
target: nil,
uri: nil,
colspan: 0,
rowspan: 0
@type marks ::
:bold
| :italic
| :underline
| :code
@type node_types ::
:document
| :paragraph
| :heading_1
| :heading_2
| :heading_3
| :heading_4
| :heading_5
| :heading_6
| :ol_list
| :ul_list
| :list_item
| :hr
| :quote
| :embedded_entry
| :embedded_asset
| :table
| :table_row
| :table_cell
| :table_header_cell
| :hyperlink
| :entry_hyperlink
| :asset_hyperlink
| :embedded_entry_inline
| :text
@type t :: %__MODULE__{
type: node_types(),
content: list(t()),
value: binary(),
marks: list(marks()),
target: nil | Link.t(),
uri: nil | binary(),
colspan: integer(),
rowspan: integer()
}
@spec new(:empty | map) :: t()
def new(data) when is_map(data) do
create(data)
|> maybe_add_content(data)
|> maybe_add_value(data)
|> maybe_add_marks(data)
|> maybe_add_target(data)
|> maybe_add_uri(data)
|> maybe_add_colspan(data)
|> maybe_add_rowspan(data)
end
def new(:empty) do
create(%{
"nodeType" => "document",
"content" => []
})
end
defp create(data), do: %__MODULE__{type: type(data)}
defp maybe_add_content(node, %{"content" => content}) when is_list(content),
do: %__MODULE__{node | content: Enum.map(content, &new/1)}
defp maybe_add_content(node, _data), do: node
defp maybe_add_target(node, %{"data" => %{"target" => link_data}}),
do: %__MODULE__{node | target: Link.new(link_data)}
defp maybe_add_target(node, _data), do: node
defp maybe_add_uri(node, %{"data" => %{"uri" => uri}}) when is_binary(uri),
do: %__MODULE__{node | uri: uri}
defp maybe_add_uri(node, _data), do: node
defp maybe_add_colspan(node, %{"data" => %{"colspan" => colspan}}) when is_integer(colspan),
do: %__MODULE__{node | colspan: colspan}
defp maybe_add_colspan(node, _data), do: node
defp maybe_add_rowspan(node, %{"data" => %{"rowspan" => rowspan}}) when is_integer(rowspan),
do: %__MODULE__{node | rowspan: rowspan}
defp maybe_add_rowspan(node, _data), do: node
defp maybe_add_value(node, %{"value" => v}) when is_binary(v) do
value =
v
|> Phoenix.HTML.html_escape()
|> Phoenix.HTML.safe_to_string()
%__MODULE__{node | value: value}
end
defp maybe_add_value(node, _data), do: node
defp maybe_add_marks(node, %{"marks" => marks}) when is_list(marks),
do: %__MODULE__{node | marks: Enum.map(marks, &mark/1)}
defp maybe_add_marks(node, _data), do: node
defp mark(%{"type" => "bold"}), do: :bold
defp mark(%{"type" => "italic"}), do: :italic
defp mark(%{"type" => "underline"}), do: :underline
defp mark(%{"type" => "code"}), do: :code
defp type(%{"nodeType" => "document"}), do: :document
defp type(%{"nodeType" => "paragraph"}), do: :paragraph
defp type(%{"nodeType" => "heading-1"}), do: :heading_1
defp type(%{"nodeType" => "heading-2"}), do: :heading_2
defp type(%{"nodeType" => "heading-3"}), do: :heading_3
defp type(%{"nodeType" => "heading-4"}), do: :heading_4
defp type(%{"nodeType" => "heading-5"}), do: :heading_5
defp type(%{"nodeType" => "heading-6"}), do: :heading_6
defp type(%{"nodeType" => "ordered-list"}), do: :ol_list
defp type(%{"nodeType" => "unordered-list"}), do: :ul_list
defp type(%{"nodeType" => "list-item"}), do: :list_item
defp type(%{"nodeType" => "hr"}), do: :hr
defp type(%{"nodeType" => "blockquote"}), do: :quote
defp type(%{"nodeType" => "embedded-entry-block"}), do: :embedded_entry
defp type(%{"nodeType" => "embedded-asset-block"}), do: :embedded_asset
defp type(%{"nodeType" => "table"}), do: :table
defp type(%{"nodeType" => "table-row"}), do: :table_row
defp type(%{"nodeType" => "table-cell"}), do: :table_cell
defp type(%{"nodeType" => "table-header-cell"}), do: :table_header_cell
defp type(%{"nodeType" => "hyperlink"}), do: :hyperlink
defp type(%{"nodeType" => "entry-hyperlink"}), do: :entry_hyperlink
defp type(%{"nodeType" => "asset-hyperlink"}), do: :asset_hyperlink
defp type(%{"nodeType" => "embedded-entry-inline"}), do: :embedded_entry_inline
defp type(%{"nodeType" => "text"}), do: :text
end
|
lib/cf_sync/rich_text.ex
| 0.673299 | 0.486636 |
rich_text.ex
|
starcoder
|
defmodule BinFormat.FieldType.Boolean do
defstruct name: nil, default: nil, size: nil, options: nil
@moduledoc """
Boolean field type for defformat.
"""
@doc """
Add a Boolean field to the format structure in defformat.
A lookup field uses a list of values and labels to map a stanard value type
in the binary to an arbitrary Elixir value in the struct. The type is the
name of any macro in the BinFormat.FieldType.BuiltIn module as an atom and
the rest of the arguments are the same as they would be in that module.
If the value read from the binary does not have a label defined in
lookup_vals or a term in the struct does not have a matching raw value the
encode or decode function will fail.
"""
defmacro boolean(name, default, size, options \\ []) do
field = quote do
%BinFormat.FieldType.Boolean{name: unquote(name),
default: unquote(default), size: unquote(size),
options: unquote(options)}
end
quote do
BinFormat.FieldType.Util.add_field(unquote(field), __ENV__)
end
end
end
defimpl BinFormat.Field, for: BinFormat.FieldType.Boolean do
alias BinFormat.FieldType.Boolean
def struct_definition(%Boolean{name: name, default: default}, _module) do
BinFormat.FieldType.Util.standard_struct_def(name, default)
end
def struct_build_pattern(%Boolean{name: name}, module, prefix) do
full_name = String.to_atom(prefix <> Atom.to_string(name))
var_name = Macro.var(full_name, module)
pattern = quote do
{unquote(name),
case unquote(var_name) do
0 -> false
x when is_integer(x) -> true
end
}
end
{:ok, pattern}
end
def struct_match_pattern(%Boolean{name: name}, module, prefix) do
BinFormat.FieldType.Util.standard_struct_pattern(name, module, prefix)
end
def bin_build_pattern(%Boolean{name: name, size: size, options: options}, module, prefix) do
option_vars = Enum.map([:integer | options], fn(opt) -> Macro.var(opt, __MODULE__) end)
pattern_options = option_vars ++ case size do
:undefined -> []
_ -> [quote do size(unquote(size)) end]
end
full_name = String.to_atom(prefix <> Atom.to_string(name))
var_name = Macro.var(full_name, module)
case_block = quote do
case unquote(var_name) do
false -> 0
true -> 1
end
end
pattern = quote do
unquote(case_block) :: unquote(Enum.reduce(pattern_options, fn(rhs, lhs) ->
quote do
unquote(lhs) - unquote(rhs)
end
end))
end
{:ok, pattern}
end
def bin_match_pattern(%Boolean{name: name, size: size, options: options}, module, prefix) do
BinFormat.FieldType.Util.standard_bin_pattern(name, :integer, size, options, module, prefix)
end
end
|
lib/bin_format/field_type/boolean.ex
| 0.641647 | 0.627167 |
boolean.ex
|
starcoder
|
defmodule Magnet do
@moduledoc """
`Magnet` struct which represents Magnet URI.
See: https://en.wikipedia.org/wiki/Magnet_URI_scheme
"""
defstruct name: nil,
length: nil,
info_hash: [],
fallback: nil,
source: [],
keywords: [],
manifest: nil,
announce: [],
experimental: %{}
@type t :: %__MODULE__{
name: String.t(),
length: number,
info_hash: [String.t()],
fallback: String.t(),
source: [String.t()],
keywords: [String.t()],
manifest: String.t(),
announce: [String.t()],
experimental: map
}
defdelegate decode(data), to: Magnet.Decoder
defdelegate encode(data), to: Magnet.Encoder
end
defimpl Collectable, for: Magnet do
@spec into(Magnet.t()) ::
{Magnet.t(),
(Magnet.t(), {:cont, {String.t(), String.t()}} | :done | :halt -> Magnet.t() | :ok)}
def into(original) do
{original,
fn
# ignore entries with empty values
acc, {:cont, {_, ""}} ->
acc
# as (Acceptable Source) - Web link to the file online
acc, {:cont, {"as", value}} ->
uri = URI.decode(value)
%Magnet{acc | fallback: uri}
# dn (Display Name) - Suggested filename
acc, {:cont, {"dn", value}} ->
%Magnet{acc | name: value}
# kt (Keyword Topic) - Key words for the given torrent
acc, {:cont, {<<"kt", priority::binary>>, value}} ->
entry = parse_suffix_number(priority, String.split(value, "+"))
%Magnet{acc | keywords: [entry | acc.keywords]}
# mt (Manifest Topic) - link to a file that contains a list of magneto (MAGMA - MAGnet MAnifest)
acc, {:cont, {"mt", value}} ->
%Magnet{acc | manifest: value}
# tr (address TRacker) - Tracker/Announce URLs for BitTorrent downloads
acc, {:cont, {<<"tr", priority::binary>>, value}} ->
announce = URI.decode(value)
entry = parse_suffix_number(priority, announce)
%Magnet{acc | announce: [entry | acc.announce]}
# xl (eXact Length) - File size in bytes
acc, {:cont, {"xl", value}} ->
length = String.to_integer(value)
%Magnet{acc | length: length}
# xs (eXact Source) - peer-to-peer links
acc, {:cont, {<<"xs", priority::binary>>, value}} ->
uri = URI.decode(value)
entry = parse_suffix_number(priority, uri)
%Magnet{acc | source: [entry | acc.source]}
# xt (eXact Topic) - URN containing file hash
acc, {:cont, {<<"xt", priority::binary>>, value}} ->
entry = parse_suffix_number(priority, value)
%Magnet{acc | info_hash: [entry | acc.info_hash]}
acc, {:cont, {<<"x.", key::binary>>, value}} ->
experimental = URI.decode(value)
%Magnet{acc | experimental: Map.put(acc.experimental, key, experimental)}
acc, :done ->
keywords =
acc.keywords
|> sort_by_priority
|> List.flatten()
|> Enum.dedup()
%Magnet{
acc
| info_hash: prepare_list(acc.info_hash),
announce: prepare_list(acc.announce),
source: prepare_list(acc.source),
keywords: keywords
}
_, :halt ->
:ok
end}
end
@spec prepare_list([{number, String.t()}]) :: [String.t()]
defp prepare_list(list) do
list
|> sort_by_priority
|> Enum.dedup()
end
@spec sort_by_priority([{number, String.t()}]) :: [String.t()]
defp sort_by_priority(priority_list) do
priority_list
|> Enum.sort_by(&elem(&1, 0))
|> Enum.map(&elem(&1, 1))
end
@spec parse_suffix_number(String.t(), any) :: {number, any}
defp parse_suffix_number("", value),
do: {0, value}
defp parse_suffix_number(<<".", number::binary>>, value) do
with {num, _} <- Integer.parse(number), do: {num, value}
end
end
|
lib/magnet.ex
| 0.836454 | 0.495789 |
magnet.ex
|
starcoder
|
defmodule Engine.DB.Fee do
@moduledoc """
This module represents computed fees and how they are stored in the database.
The schema contains the following fields:
- hash: The sha256 hash of the `term`
- type:
- previous_fees: Fees that are still valid for a short period of time after being updated.
This is to improve the UX by still accepting transactions that was built with a fee that changed just before the submission.
- merged_fees: A merged map of current and previous fees that is used to validate the output amount of a transaction.
- current_fees: The currently valid fees.
- term: The Map of fees per token
"""
use Ecto.Schema
use Spandex.Decorators
import Ecto.Changeset
import Ecto.Query
alias Ecto.Atom
alias Ecto.Term
alias Engine.Repo
@required_fields [:type]
@optional_fields [:term, :inserted_at]
@allowed_types [:previous_fees, :merged_fees, :current_fees]
@timestamps_opts [inserted_at: :node_inserted_at, updated_at: :node_updated_at]
@primary_key false
schema "fees" do
field(:hash, :string, primary_key: true)
field(:type, Atom, primary_key: true)
field(:term, Term)
field(:inserted_at, :utc_datetime)
timestamps()
end
def changeset(struct, params) do
struct
|> cast(params, @required_fields ++ @optional_fields)
|> validate_required(@required_fields)
|> validate_inclusion(:type, @allowed_types)
|> put_hash()
end
@decorate trace(service: :ecto, type: :backend)
def insert(params) do
%__MODULE__{}
|> changeset(params)
|> Repo.insert(on_conflict: :nothing)
end
@decorate trace(service: :ecto, type: :backend)
def remove_previous_fees() do
query = where(__MODULE__, type: ^:previous_fees)
Repo.delete_all(query)
end
@decorate trace(service: :ecto, type: :backend)
def fetch_current_fees(), do: fetch(:current_fees)
@decorate trace(service: :ecto, type: :backend)
def fetch_merged_fees(), do: fetch(:merged_fees)
@decorate trace(service: :ecto, type: :backend)
def fetch_previous_fees(), do: fetch(:previous_fees)
defp fetch(type) do
__MODULE__
|> where([r], r.type == ^type)
|> order_by([r], desc: r.inserted_at)
|> limit(1)
|> Repo.one()
|> case do
nil -> {:error, :not_found}
fees -> {:ok, fees}
end
end
defp put_hash(changeset) do
case changeset do
%Ecto.Changeset{valid?: true, changes: changes} ->
put_change(changeset, :hash, calculate_hash(changes[:term]))
_ ->
changeset
end
end
defp calculate_hash(nil), do: hash("")
defp calculate_hash(term), do: term |> inspect() |> hash()
defp hash(term) do
:sha256
|> :crypto.hash(term)
|> Base.encode16(case: :lower)
end
end
|
apps/engine/lib/engine/db/fee.ex
| 0.853852 | 0.607343 |
fee.ex
|
starcoder
|
defmodule Day03.Point do
defstruct x: 0, y: 0
def origin, do: %__MODULE__{}
def distance(p0, p1) do
abs(p0.x - p1.x) + abs(p0.y - p1.y)
end
end
defmodule Day03.Path do
def parse(raw) do
raw
|> String.split(",")
|> Enum.map(&tupleize/1)
end
def tupleize("R" <> len), do: tupleize(:right, len)
def tupleize("L" <> len), do: tupleize(:left, len)
def tupleize("U" <> len), do: tupleize(:up, len)
def tupleize("D" <> len), do: tupleize(:down, len)
defp tupleize(dir, len), do: {dir, len |> Integer.parse() |> elem(0) }
end
defmodule Day03.Wires do
def points_for(path), do: points_for(path, Day03.Point.origin, [])
defp points_for([], _pos, points), do: points
defp points_for([{_, 0} | rest], pos, points), do: points_for(rest, pos, points)
defp points_for([{dir, len} | rest], pos, points) do
pos = case dir do
:down -> %Day03.Point{x: pos.x, y: pos.y + 1}
:left -> %Day03.Point{x: pos.x - 1, y: pos.y}
:right -> %Day03.Point{x: pos.x + 1, y: pos.y}
:up -> %Day03.Point{x: pos.x, y: pos.y - 1}
end
points_for([{dir, len - 1} | rest], pos, [pos | points])
end
def run(:part1) do
points()
|> Enum.map(&MapSet.new/1)
|> Enum.reduce(fn(a, b) -> MapSet.intersection(a, b) end)
|> Enum.map(&(Day03.Point.distance(&1, Day03.Point.origin)))
|> Enum.min
|> IO.inspect
end
def run(:part2) do
paths = points() |> Enum.map(&Enum.reverse/1)
steps = paths |> Enum.map(&count_steps/1)
paths
|> Enum.map(&MapSet.new/1)
|> Enum.reduce(fn(a, b) -> MapSet.intersection(a, b) end)
|> Enum.map(&(steps_for(&1, steps)))
|> Enum.min
|> IO.inspect
end
defp points do
InputFile.contents_of(3, :stream)
|> Enum.map(&Day03.Path.parse/1)
|> Enum.map(&Day03.Wires.points_for/1)
end
def count_steps(path), do: count_steps(path, 1, %{})
defp count_steps([], _count, counts), do: counts
defp count_steps([point | rest], count, counts) do
count_steps(rest, count + 1, Map.put_new(counts, point, count))
end
def steps_for(point, step_counts) do
step_counts
|> Enum.map(&(Map.get(&1, point)))
|> Enum.reduce(&Kernel.+/2)
end
end
|
year_2019/lib/day_03/wires.ex
| 0.646237 | 0.746093 |
wires.ex
|
starcoder
|
defmodule MangoPay.BankingAlias do
@moduledoc """
Functions for MangoPay [BankingAlias](https://docs.mangopay.com/endpoints/v2.01/banking-aliases#e849_the-banking-alias-object).
"""
use MangoPay.Query.Base
set_path "bankingaliases"
@doc """
Get a banking alias.
## Examples
{:ok, banking_alias} = MangoPay.BankingAlias.get(id)
"""
def get id do
_get id
end
@doc """
Get a banking alias.
## Examples
banking_alias = MangoPay.BankingAlias.get!(id)
"""
def get! id do
_get! id
end
@doc """
Create a BankingAlias.
## Examples
params = %{
"Tag": "custom meta",
"CreditedUserId": "8494514",
"OwnerName": "<NAME>",
"Country": "FR"
}
{:ok, banking_alias} = MangoPay.BankingAlias.create(params)
"""
def create wallet_id, params do
_create params, [MangoPay.Wallet.path(wallet_id), MangoPay.BankingAlias.path("iban")]
end
@doc """
Create a banking alias.
## Examples
params = %{
"Tag": "custom meta",
"CreditedUserId": "8494514",
"OwnerName": "<NAME>",
"Country": "FR"
}
banking_alias = MangoPay.BankingAlias.create!(params)
"""
def create! wallet_id, params do
_create! params, [MangoPay.Wallet.path(wallet_id), MangoPay.BankingAlias.path("iban")]
end
@doc """
Update a banking alias.
## Examples
params = %{
"Active": false
}
{:ok, banking_alias} = MangoPay.BankingAlias.update(id, params)
"""
def update id, params do
_update params, id
end
@doc """
Update a banking alias.
## Examples
params = %{
"Active": false
}
banking_alias = MangoPay.BankingAlias.update!(id, params)
"""
def update! id, params do
_update! params, id
end
@doc """
List all banking aliases by wallet.
## Examples
wallet_id = Id of a wallet
query = %{
"Page": 1,
"Per_Page": 25,
"Sort": "CreationDate:DESC",
"BeforeDate": 1463440221,
"AfterDate": 1431817821
}
{:ok, banking_alias} = MangoPay.BankingAlias.all_by_wallet!(wallet_id, query)
"""
def all_by_wallet id, query \\ %{} do
_all [MangoPay.Wallet.path(id), MangoPay.BankingAlias.path()], query
end
@doc """
List all banking aliases by wallet.
## Examples
wallet_id = Id of a wallet
query = %{
"Page": 1,
"Per_Page": 25,
"Sort": "CreationDate:DESC",
"BeforeDate": 1463440221,
"AfterDate": 1431817821
}
banking_alias = MangoPay.BankingAlias.all_by_wallet!(wallet_id, query)
"""
def all_by_wallet! id, query \\ %{} do
_all! [MangoPay.Wallet.path(id), MangoPay.BankingAlias.path()], query
end
end
|
lib/mango_pay/banking_alias.ex
| 0.683208 | 0.417509 |
banking_alias.ex
|
starcoder
|
defmodule Stein.Accounts do
@moduledoc """
Helper functions around user accounts
To fully utilize the `Stein.Accounts` functions, your user schema struct
should contain the following fields:
```elixir
defmodule MyApp.Users.User do
# ...
schema "users" do
field(:email, :string)
field(:password, :string, virtual: true)
field(:password_hash, :string)
field(:email_verification_token, Ecto.UUID)
field(:email_verified_at, :utc_datetime)
field(:password_reset_token, Ecto.UUID)
field(:password_reset_expires_at, :utc_datetime)
end
# ...
end
```
A sample Ecto migration:
```elixir
def change() do
create table(:users) do
add(:email, :string)
add(:password_hash, :string)
add(:email_verification_token, :uuid)
add(:email_verified_at, :utc_datetime)
add(:password_reset_token, :uuid)
add(:password_reset_expires_at, :utc_datetime)
timestamps()
end
create index(:users, ["lower(email)"], unique: true)
end
```
"""
require Logger
require Ecto.Query
alias Ecto.Query
alias Stein.Time
@type email() :: String.t()
@type password() :: String.t()
@type password_hash() :: String.t()
@type password_params() :: %{
password: password(),
password_confirmation: password()
}
@type reset_token() :: String.t()
@type user() :: %{
email: email(),
password: password(),
password_hash: password_<PASSWORD>(),
email_verification_token: Stein.uuid(),
email_verified_at: DateTime.t()
}
@type user_fun() :: (user() -> :ok)
@type user_schema() :: atom()
@doc """
Find a user by their email address
Trims and downcases the email to find an existing user. Checks against
the `lower` unique index on their email that should be set up when using
Stein.
"""
def find_by_email(repo, schema, email) do
email =
email
|> String.trim()
|> String.downcase()
query =
schema
|> Query.where([s], fragment("lower(?) = ?", s.email, ^email))
|> Query.limit(1)
case repo.one(query) do
nil ->
{:error, :not_found}
user ->
{:ok, user}
end
end
@doc """
Hash the changed password in a changeset
- Skips if the changeset is invalid
- Skips if a password is not changed
- Hashes the password with BCrypt otherwise
Requires the user schema to contain:
- `password`, type `:string`
- `password_hash`, type `:string`
"""
@spec hash_password(Ecto.Changeset.t()) :: Ecto.Changeset.t()
def hash_password(changeset) do
case changeset.valid? do
true ->
password = Ecto.Changeset.get_change(changeset, :password)
case is_nil(password) do
true ->
changeset
false ->
hashed_password = Bcrypt.hash_pwd_salt(password)
Ecto.Changeset.put_change(changeset, :password_hash, hashed_password)
end
false ->
changeset
end
end
@doc """
Validate a email and password match a user
Requires the user schema to contain:
- `email`, type `:string`
- `password_hash`, type `:string`
"""
@spec validate_login(Stein.repo(), user_schema(), email(), password()) ::
{:error, :invalid} | {:ok, user()}
def validate_login(repo, schema, email, password) do
case find_by_email(repo, schema, email) do
{:error, :not_found} ->
Bcrypt.no_user_verify()
{:error, :invalid}
{:ok, user} ->
check_password(user, password)
end
end
defp check_password(user, password) do
case Bcrypt.verify_pass(password, user.password_hash) do
true ->
{:ok, user}
false ->
{:error, :invalid}
end
end
@doc """
Prepare a user for email validation
This should run as part of the create changeset when registering a new user
"""
@spec start_email_verification_changeset(Ecto.Changeset.t()) :: Ecto.Changeset.t()
def start_email_verification_changeset(changeset) do
changeset
|> Ecto.Changeset.put_change(:email_verification_token, UUID.uuid4())
|> Ecto.Changeset.put_change(:email_verified_at, nil)
end
@doc """
Verify a user's email address from a token sent to their email address
This token should be a UUID, if it is not `{:error, :invalid}` will be returned.
Requires the user schema to contain:
- `email_verification_token`, type `:uuid`
- `email_verified_at`, type `:utc_datetime`
"""
@spec verify_email(Stein.repo(), user_schema(), Stein.uuid()) ::
{:ok, user()} | {:error, :invalid} | {:error, Ecto.Changeset.t()}
def verify_email(repo, struct, token) do
case Ecto.UUID.cast(token) do
{:ok, token} ->
case repo.get_by(struct, email_verification_token: token) do
nil ->
{:error, :invalid}
user ->
user
|> Ecto.Changeset.change()
|> Ecto.Changeset.put_change(:email_verified_at, Time.now())
|> Ecto.Changeset.put_change(:email_verification_token, nil)
|> repo.update()
end
:error ->
{:error, :invalid}
end
end
@doc """
Check if the user's email has been verified
iex> user = %User{email_verified_at: Timex.now()}
iex> Accounts.email_verified?(user)
true
iex> user = %User{}
iex> Accounts.email_verified?(user)
false
"""
@spec email_verified?(user()) :: boolean()
def email_verified?(user)
def email_verified?(%{email_verified_at: verified_at}) when verified_at != nil, do: true
def email_verified?(_), do: false
@doc """
Start the password reset process
On successful start of reset, the success function will be called. This can be
used to send the password reset email.
Requires the user schema to contain:
- `password_reset_token`, type `:uuid`
- `password_reset_expires_at`, type `utc_datetime`
"""
@spec start_password_reset(Stein.repo(), user_schema(), email(), user_fun()) :: :ok
def start_password_reset(repo, schema, email, success_fun \\ fn _user -> :ok end) do
case find_by_email(repo, schema, email) do
{:ok, user} ->
expires_at = DateTime.add(Time.now(), 3600, :second)
user
|> Ecto.Changeset.change()
|> Ecto.Changeset.put_change(:password_reset_token, UUID.uuid4())
|> Ecto.Changeset.put_change(:password_reset_expires_at, expires_at)
|> repo.update()
|> maybe_run_success(success_fun)
:ok
{:error, :not_found} ->
:ok
end
end
defp maybe_run_success({:ok, user}, success_fun), do: success_fun.(user)
defp maybe_run_success(_, _), do: :ok
@doc """
Finish resetting a password
Takes the token, checks for expiration, and then resets the password
"""
@spec reset_password(Stein.repo(), user_schema(), reset_token(), password_params()) ::
{:ok, user()} | {:error, Ecto.Changeset.t()}
def reset_password(repo, struct, token, params) do
with {:ok, uuid} <- Ecto.UUID.cast(token),
{:ok, user} <- find_user_by_reset_token(repo, struct, uuid),
{:ok, user} <- check_password_reset_expired(user) do
user
|> password_changeset(params)
|> repo.update()
end
end
defp find_user_by_reset_token(repo, struct, uuid) do
case repo.get_by(struct, password_reset_token: uuid) do
nil ->
:error
user ->
{:ok, user}
end
end
defp check_password_reset_expired(user) do
case Time.after?(Time.now(), user.password_reset_expires_at) do
true ->
:error
false ->
{:ok, user}
end
end
defp password_changeset(user, params) do
user
|> Ecto.Changeset.cast(params, [:password, :password_confirmation])
|> Ecto.Changeset.validate_required([:password])
|> Ecto.Changeset.validate_confirmation(:password)
|> Ecto.Changeset.put_change(:password_reset_token, nil)
|> Ecto.Changeset.put_change(:password_reset_expires_at, nil)
|> hash_password()
|> Ecto.Changeset.validate_required([:password_hash])
end
@doc """
Trim a field in a changeset if present
Calls `String.trim/1` on the field and replaces the value.
"""
def trim_field(changeset, field) do
case Ecto.Changeset.get_change(changeset, field) do
nil ->
changeset
value ->
Ecto.Changeset.put_change(changeset, field, String.trim(value))
end
end
end
|
lib/stein/accounts.ex
| 0.822189 | 0.704948 |
accounts.ex
|
starcoder
|
defmodule Erl2ex.Results do
@moduledoc """
Erl2ex.Results defines the structure of result data returned from most
functions in the Erl2ex module.
"""
alias Erl2ex.Results
defmodule File do
@moduledoc """
Erl2ex.Results.File defines the result data structure for a particular file.
"""
defstruct(
input_path: nil,
output_path: nil,
error: nil
)
@typedoc """
The conversion results of a single file.
* `input_path` is the path to the input Erlang file, or nil if the input
is a string
* `output_path` is the path to the output Elixir file, or nil if the
output is a string.
* `error` is the CompileError if a fatal error happened, or nil if the
conversion was successful.
"""
@type t :: %__MODULE__{
input_path: Path.t | nil,
output_path: Path.t | nil,
error: %CompileError{} | nil
}
end
defstruct(
files: []
)
@typedoc """
Overall results for an entire conversion job of one or more files.
"""
@type t :: %__MODULE__{
files: [Results.File.t]
}
@doc """
Returns true if the entire conversion was successful, meaning no file
resulted in an error.
"""
@spec success?(Results.t | Results.File.t) :: boolean
def success?(%Results{files: files}), do:
not Enum.any?(files, &get_error/1)
def success?(%Results.File{error: nil}), do: true
def success?(%Results.File{}), do: false
@doc """
Returns the error that caused a conversion to fail, or nil if the conversion
was successful. If more than one fatal error was detected, one error is
returned but it is undefined which one is chosen.
"""
@spec get_error(Results.t | Results.File.t) :: %CompileError{} | nil
def get_error(%Results{files: files}), do:
Enum.find_value(files, &get_error/1)
def get_error(%Results.File{error: err}), do: err
@doc """
If the conversion failed, throw the error that caused the failure. Otherwise
return the results.
"""
@spec throw_error(a) :: a when a: Results.t
def throw_error(results) do
case get_error(results) do
nil -> results
err -> throw(err)
end
end
end
|
lib/erl2ex/results.ex
| 0.679817 | 0.63696 |
results.ex
|
starcoder
|
defmodule Chex do
@moduledoc """
A library for playing chess in Elixir.
"""
alias Chex.{Board, Color, Game, Piece}
@typedoc """
The main type representing a single game. Values should be treated as
read-only and should only be modified using public API functions.
* `:active_color` - The color of the player who's turn it is to move.
* `:board` - The pieces on the board keyed by the square they occupy.
* `:castling` - Castling rights.
* `:en_passant` - The _en passant_ square, if any.
* `:moves` - A list of moves with the most recent move first.
* `:halfmove_clock` - Fifty-move rule. Set to 0 on pawn move or capture.
* `:fullmove_clock` - Starts at 1 and is incremented after every black move.
* `:captures` - A list of captured pieces. Most recent captures first.
* `:check` - The color of the player in check.
* `:result` - Set on game completion.
* `:pgn` - A map with PGN tag pairs and `:moves` as values. `nil` if if not
created from PGN data.
"""
@type game :: %Game{
active_color: color(),
board: %{square() => Board.value()},
castling: [castling_right()] | [],
en_passant: square() | nil,
moves: [move()] | [],
halfmove_clock: non_neg_integer(),
fullmove_clock: pos_integer(),
captures: [piece()] | [],
check: color() | nil,
result: result(),
pgn: map() | nil
}
@typedoc """
A two element tuple with a file a-h as an atom and a rank integer 1-8.
"""
@type square :: {file :: atom, rank :: pos_integer}
@typedoc """
A starting `t:square/0` and a destination `t:square/0` as a two element tuple
or a three element tuple with a piece t:name() to promote to.
"""
@type move ::
{from :: square(), to :: square()}
| {from :: square(), to :: square(), piece :: name()}
@typedoc """
One of `:white`, `:black`, `:draw`, or `nil`.
"""
@type result :: color() | :draw | nil
@typedoc """
The color of a piece, square, or player as an atom.
"""
@type color :: :white | :black
@typedoc """
The name of a piece as an atom.
"""
@type name :: :king | :queen | :bishop | :knight | :rook | :pawn
@typedoc """
A `t:name/0`, `t:color/0` pair.
"""
@type piece :: {name(), color()}
@typedoc """
A FEN style, single character, atom. Uppercase letters denote white's rights
while lowercase letters denote black's.
"""
@type castling_right :: :K | :Q | :k | :q
@doc """
Returns the other color.
## Examples
iex> Chex.flip_color(:black)
:white
iex> Chex.flip_color(:white)
:black
"""
@spec flip_color(color()) :: color()
defdelegate flip_color(color), to: Color, as: :flip
@doc """
Get the color of a piece at a square.
## Examples
iex> {:ok, game} = Chex.new_game()
iex> Chex.get_piece_color(game, {:d, 1})
:white
iex> {:ok, game} = Chex.new_game()
iex> Chex.get_piece_color(game, {:d, 8})
:black
iex> {:ok, game} = Chex.new_game()
iex> Chex.get_piece_color(game, {:d, 5})
nil
"""
@spec get_piece_color(game(), square()) :: color() | nil
defdelegate get_piece_color(game, square), to: Board
@doc """
Get the name of a piece at a square.
## Examples
iex> {:ok, game} = Chex.new_game()
iex> Chex.get_piece_name(game, {:e, 1})
:king
iex> {:ok, game} = Chex.new_game()
iex> Chex.get_piece_name(game, {:e, 4})
nil
"""
@spec get_piece_name(game(), square()) :: name() | nil
defdelegate get_piece_name(game, square), to: Board
@doc """
Makes a move on the provided `game`.
If a piece name is not provided it promotes to queen.
## Examples
iex> {:ok, game} = Chex.new_game()
iex> Chex.make_move(game, "e7e5")
{:error, :out_of_turn}
iex> {:ok, game} = Chex.new_game()
iex> {:ok, game} = Chex.make_move(game, "e2e4")
iex> Chex.make_move(game, "e2e3")
{:error, :no_piece_at_square}
"""
@spec make_move(game(), move()) :: {:ok, game()} | {:error, atom()}
defdelegate make_move(game, move), to: Game, as: :move
@doc """
Creates a new game with the standard starting position.
## Examples
iex> {:ok, game} = Chex.new_game()
iex> game.active_color
:white
"""
@spec new_game :: {:ok, game()}
defdelegate new_game, to: Game, as: :new
@doc """
Same as new_game/0 only returns a game or raises.
## Examples
iex> Chex.new_game!().active_color
:white
"""
@spec new_game! :: game()
def new_game! do
case Game.new() do
{:ok, game} -> game
end
end
@doc """
Check if a square is occupied by a piece.
## Examples
iex> {:ok, game} = Chex.new_game()
iex> Chex.occupied?(game, {:b, 1})
true
iex> {:ok, game} = Chex.new_game()
iex> Chex.occupied?(game, {:a, 5})
false
"""
@spec occupied?(game(), square()) :: boolean
defdelegate occupied?(game, square), to: Board
@doc """
Check if a square is occupied by a piece with the specified color.
## Examples
iex> {:ok, game} = Chex.new_game()
iex> Chex.occupied_by_color?(game, :white, {:c, 1})
true
iex> {:ok, game} = Chex.new_game()
iex> Chex.occupied_by_color?(game, :black, {:c, 1})
false
iex> {:ok, game} = Chex.new_game()
iex> Chex.occupied_by_color?(game, :white, {:c, 5})
false
"""
@spec occupied_by_color?(game(), color(), square()) :: boolean
defdelegate occupied_by_color?(game, color, square), to: Board
@doc """
Parse a FEN style piece string representation into a `t:String.t/0`.
## Examples
iex> Chex.piece_from_string("N")
{:knight, :white}
iex> Chex.piece_from_string("q")
{:queen, :black}
"""
@spec piece_from_string(String.t()) :: piece()
defdelegate piece_from_string(string), to: Piece, as: :from_string
@doc """
Returns a FEN style character from a given `t:String.t/0`.
## Examples
iex> Chex.piece_to_string({:knight, :white})
"N"
iex> Chex.piece_to_string({:queen, :black})
"q"
"""
@spec piece_from_string(piece()) :: String.t()
defdelegate piece_to_string(string), to: Piece, as: :to_string
@doc """
Get the possible moves for a piece at a given square.
This function assumes the piece at the given square has the right to move.
That is, it's the same color as the `:active_color` of the game state. This
may not be true and the piece may not be able to move until the other color
has made a move. This could cause the list of moves returned to be invalid.
## Examples
iex> {:ok, game} = Chex.new_game()
iex> Chex.possible_moves(game, {:e, 2})
[e: 4, e: 3]
iex> {:ok, game} = Chex.new_game()
iex> Chex.possible_moves(game, {:b, 8})
[c: 6, a: 6]
"""
@spec possible_moves(game(), square()) :: [square()] | []
defdelegate possible_moves(game, square), to: Piece
end
|
lib/chex.ex
| 0.946101 | 0.689737 |
chex.ex
|
starcoder
|
defmodule Mix.Tasks.Absinthe.Gen.Query do
@shortdoc "Generates an absinthe query schema and inserts the record in the base schema.ex"
@moduledoc """
Generates an Absinthe Query
### Options
#{NimbleOptions.docs(AbsintheGenerator.Query.definitions())}
### Specifying Queries
The following format can be used to specify queries
```bash
query_name:return_type:arg_name:arg_type:arg_name:arg_type:ResolverModule.resolver_function
```
you can also specify middleware before or after the resolver
### Example
```bash
mix absinthe.gen.query
summoners:list_of(return_type):arg_a:string:arg_b:non_null(:integer):Resolvers.Summoner.list_all
summoner:return_type:id:id:Resolvers.Summoner.find
summoner:return_type:middleware:IDToIntegerMiddlewareid:id:middleware:AuthMiddleware:Resolvers.Summoner.find:middleware:ChangesetErrorFormatter
--app-name MyApp
--query-name students
--moduledoc "this is the test"
```
"""
use Mix.Task
alias Mix.AbsintheGeneratorUtils
alias Mix.AbsintheGeneratorUtils.SchemaFieldParser
@query_regex ~r/^([a-z]+|list_of\([^\(\)]+\))+[A-Za-z\.]+(:middleware:[A-Za-z]+)?+$/
def run(args) do
AbsintheGeneratorUtils.ensure_not_in_umbrella!("absinthe.gen.query")
{args, extra_args} = AbsintheGeneratorUtils.parse_path_opts(args, [
path: :string,
app_name: :string,
moduledoc: :string,
query_name: :string
])
parsed_query_functions = extra_args
|> validate_query_string
|> SchemaFieldParser.parse_fields
args
|> Map.new
|> Map.put(:queries, parsed_query_functions)
|> serialize_to_query_struct
|> IO.inspect
|> AbsintheGenerator.Query.run
|> AbsintheGeneratorUtils.write_template(path_from_args(args))
end
defp path_from_args(args) do
Keyword.get(
args,
:path,
"./lib/#{Macro.underscore(args[:app_name])}_web/queries/#{Macro.underscore(args[:query_name])}.ex"
)
end
defp validate_query_string(query_parts) do
if query_parts === [] or Enum.all?(query_parts, &Regex.match?(@query_regex, &1)) do
query_parts
else
Mix.raise("""
\n
Query format isn't setup properly and must match the following regex
#{inspect @query_regex}
Example:
summoners:list_of(return_type):arg_a:string:arg_b:non_null(:integer):Resolvers.Summoner.list_all
summoner:return_type:id:id:Resolvers.Summoner.find
summoner:return_type:middleware:IDToIntegerMiddlewareid:id:middleware:AuthMiddleware:Resolvers.Summoner.find:middleware:ChangesetErrorFormatter
""")
end
end
defp serialize_to_query_struct(params) do
%AbsintheGenerator.Query{
app_name: params[:app_name],
moduledoc: params[:moduledoc],
query_name: params[:query_name],
queries: params[:queries]
}
end
end
|
lib/mix/tasks/query.ex
| 0.860369 | 0.713007 |
query.ex
|
starcoder
|
defmodule ExCell.View do
@moduledoc """
Cell helpers used to render the cells in both Views and Cells
"""
@view_adapter ExCell.config(:view_adapter, Phoenix.View)
@doc """
Returns the relative path of a module to the namespace. This method is used
to determine the template path of the cell.
## Examples
iex(0)> ExCell.View.relative_path(AppWeb.AvatarCell, AppWeb)
"avatar"
iex(1)> ExCell.View.relative_path(AppWeb.Namespace.AvatarCell, AppWeb)
"namespace/avatar"
"""
def relative_path(module, namespace) do
module
|> ExCell.module_relative_to(namespace)
|> Enum.map(&Macro.underscore/1)
|> Enum.join("/")
|> String.replace_suffix("_cell", "")
end
@doc """
Renders a cell in the view.
### Examples
iex(0)> safe_to_string(AppWeb.AvatarView.cell(AvatarCell))
"<div class=\"AvatarCell\" ...>"
"""
def cell(cell) do
render_cell(cell, [])
end
@doc """
Renders a cell in the view with children.
### Examples
iex(0)> safe_to_string(AppWeb.AvatarView.cell(AvatarCell, do: "Foo"))
"<div class=\"AvatarCell\" ...>Foo</div>"
"""
def cell(cell, do: children) do
render_cell(cell, children: children)
end
@doc """
Renders a cell in the view with assigns.
### Examples
iex(0)> safe_to_string(AppWeb.AvatarView.cell(AvatarCell, user: %User{name: "Bar"}))
"<div class=\"AvatarCell\" ...>Bar</div>"
"""
def cell(cell, assigns) when is_list(assigns) do
render_cell(cell, assigns)
end
@doc """
Renders a cell in the view with children without a block.
### Examples
iex(0)> safe_to_string(AppWeb.AvatarView.cell(AvatarCell, "Hello"))
"<div class=\"AvatarCell\" ...>Hello</div>"
"""
def cell(cell, children) do
render_cell(cell, children: children)
end
def cell(cell, assigns, do: children) when is_list(assigns) do
render_cell(cell, [children: children] ++ assigns)
end
def cell(cell, children, assigns) when is_list(assigns) do
render_cell(cell, [children: children] ++ assigns)
end
@doc """
Renders the cell directly as a string, used for testing purposes.
### Examples
iex(0)> AppWeb.AvatarView.cell_to_string(AvatarCell)
"<div class=\"AvatarCell\" ...>"
"""
def cell_to_string(cell) do
render_cell_to_string(cell, [])
end
def cell_to_string(cell, do: children) do
render_cell_to_string(cell, children: children)
end
def cell_to_string(cell, assigns) do
render_cell_to_string(cell, assigns)
end
def cell_to_string(cell, assigns, do: children) do
render_cell_to_string(cell, [children: children] ++ assigns)
end
defp render_cell(cell, assigns) do
@view_adapter.render(cell, "template.html", assigns)
end
defp render_cell_to_string(cell, assigns) do
@view_adapter.render_to_string(cell, "template.html", assigns)
end
end
|
lib/ex_cell/view.ex
| 0.770335 | 0.581184 |
view.ex
|
starcoder
|
defmodule Base85.UnrecognizedCharacterSet do
@moduledoc """
Raised at runtime when an unknown character set is specified.
"""
defexception [:charset, :operation]
@doc false
def message(%__MODULE__{charset: charset, operation: op}) do
"unrecognized character set #{charset} requested while #{op}"
end
end
defmodule Base85.UnrecognizedPadding do
@moduledoc """
Raised at runtime when an unknown padding method is specified.
"""
defexception [:padding, :operation]
@doc false
def message(%__MODULE__{padding: padding, operation: op}) do
"unrecognized padding method #{padding} requested while #{op}"
end
end
defmodule Base85.InvalidCharacterForCharacterSet do
@moduledoc """
Raised at runtime when decoding finds an invalid coding for the specified
character set.
"""
defexception [:character, :charset]
@doc false
def message(%__MODULE__{character: char, charset: charset}) do
"invalid character value #{char} for character set #{charset}"
end
end
defmodule Base85.InvalidEncodedLength do
@moduledoc """
Raised at runtime when presented with encoded data with an invalid length.
"""
defexception [:hint]
def message(%__MODULE__{hint: hint}) do
"encoded data had invalid encoded length" <>
if is_nil(hint) do
""
else
", expected #{hint}"
end
end
end
defmodule Base85.InvalidUnencodedLength do
@moduledoc """
Raised at runtime when presented with unencoded data with an invalid
length.
"""
defexception [:padding, :hint]
@doc false
def message(%__MODULE__{padding: padding, hint: hint}) do
"raw data had invalid length for padding method #{padding}" <>
if is_nil(hint) do
""
else
", expected #{hint}"
end
end
end
defmodule Base85.InvalidPaddingData do
@moduledoc """
Raised at runtime when presented with data with corrupted padding data.
"""
defexception [:padding, :hint]
@doc false
def message(%__MODULE__{padding: padding, hint: hint}) do
"encoded data had invalid padding bytes for padding method #{padding}" <>
if is_nil(hint) do
""
else
", expected #{hint}"
end
end
end
defmodule Base85.InternalError do
@moduledoc """
Raised at runtime when an internal error is encountered.
"""
defexception [:message]
end
|
lib/base85/exceptions.ex
| 0.835249 | 0.452475 |
exceptions.ex
|
starcoder
|
defmodule North.Scope.Wildcard do
@moduledoc """
Scope behaviour that supports wildcard matching.
"""
@wc "*"
@behaviour North.Scope
@doc """
Matches using the `*` wildcard character.
Wildcards can be used on either side of the match, with the left hand side
taking precedence. The examples provide a clearer idea of what matches and
what does not. Check the test suite for a more complete set of examples.
## Example
iex> North.Scope.Wildcard.matches?(~w(foo.bar), "foo.bar")
true
iex> North.Scope.Wildcard.matches?(~w(foo.*), "foo.bar.baz")
true
iex> North.Scope.Wildcard.matches?(~w(foo.*), "foo")
false
iex> North.Scope.Wildcard.matches?(~w(foo.*.bar.*), "foo.*.bar.*.*.*")
true
iex> North.Scope.Wildcard.matches?(~w(foo.*.bar.*), "foo.baz.bar")
false
## Options
* `:delimiter` - The character used to delimit scope granularity.
For example the scope: `user:profile` is delimited by the `:`
(colon) character. Supported delimiters are: `.` `:` `,` `;`.
The default is `.` (period).
"""
@impl true
def matches?(scopes, scope, opts \\ []) when is_list(scopes) and is_binary(scope) do
splitter =
opts
|> Keyword.get(:delimiter, ".")
|> scope_splitter()
parts = splitter.(scope)
Enum.any?(scopes, &do_match?(splitter.(&1), parts))
end
defp do_match?([], _), do: false
defp do_match?(a, b) when length(a) > length(b), do: false
defp do_match?([@wc | _], ["" | _]), do: false
defp do_match?([@wc | []], _), do: true
defp do_match?([@wc | t1], [_ | t2]), do: do_match?(t1, t2)
defp do_match?([h | []], [h | []]), do: true
defp do_match?([h | t1], [h | t2]), do: do_match?(t1, t2)
defp do_match?(_, _), do: false
defp scope_splitter(pattern) when pattern in ~w(. : , ;) do
&String.split(&1, pattern)
end
defp scope_splitter(pattern) do
raise ArgumentError,
message: """
cannot use #{inspect(pattern)} for scope delimitation.
Use one of the supported delimiters (. : , ;) instead\
"""
end
end
|
lib/north/scope/wildcard.ex
| 0.85928 | 0.402099 |
wildcard.ex
|
starcoder
|
defmodule ForcingPhase do
@moduledoc """
Functions for converting between time and phase modulo a forcing frequency.
"""
@doc """
Returns the fractional part of a number()ing point number
"""
@spec frac_part(number()) :: number()
def frac_part(x) do
x - trunc(x)
end
@doc """
Returns the remainder of `:x` divided by `:y` - like `Kernel.rem` but for number()s
"""
@spec modulo(number(), number()) :: number()
def modulo(x, y) when y == 0 do
x
end
def modulo(x, y) when y < 0 do
modulo(x, abs(y))
end
def modulo(x, y) when x < 0 do
modulo(x + y, y)
end
def modulo(x, y) do
x - trunc(x / y) * y
end
@doc """
For a given forcing frequency `:omega` returns the the forcing period
"""
@spec forcing_period(number()) :: {atom(), number()}
def forcing_period(omega) do
cond do
omega <= 0 -> {:error, "Forcing frequency must be positive"}
true -> {:ok, 2.0 * :math.pi() / omega}
end
end
@doc """
For a given time `:t` and forcing frequency `:omega` returns the phase
relative to the forcing period and optionally scaled by the forcing
period so that it ranges from 0 to 1
"""
@spec phi(number(), number(), boolean()) :: number()
def phi(t, omega, scaled \\ true) do
forcing_period(omega)
|> (&if(elem(&1, 0) == :ok,
do:
elem(&1, 1)
|> (fn period -> modulo(t, period) / if(scaled, do: period, else: 1) end).(),
else: nil
)).()
end
@doc """
Returns the lowest time greater than or equal to time `:t` for which the phase relative to `:period` is `:phi`
"""
@spec forward_to_phase(number(), number(), number()) :: number()
def forward_to_phase(t, phi, period) do
phase_difference = period * phi - modulo(t, period)
result =
cond do
phase_difference >= 0 -> t + phase_difference
true -> t + period + phase_difference
end
# Check for rounding errors - new phase should equal old. We particularly don't want it to be slightly less, as this
# could trap us in the sticking region
new_phi = modulo(result / period, 1)
delta_phi = phi - new_phi
cond do
delta_phi > 0 -> result + delta_phi * period
true -> result
end
end
@doc """
For a forcing frequency `:omega` returns the coefficient of the forcing term in the equation for
the displacement between impacts
"""
@spec gamma(number()) :: number()
def gamma(omega) when omega in [1, -1] do
1
end
def gamma(omega) do
1.0 / (1.0 - :math.pow(omega, 2))
end
end
|
apps/imposc/lib/dynamics/forcing_phase.ex
| 0.936 | 0.874077 |
forcing_phase.ex
|
starcoder
|
defmodule MssqlexV3.Error do
@moduledoc """
Defines an error returned from the ODBC adapter.
* `message` is the full message returned by ODBC
* `odbc_code` is an atom representing the returned
[SQLSTATE](https://docs.microsoft.com/en-us/sql/odbc/reference/appendixes/appendix-a-odbc-error-codes)
or the string representation of the code if it cannot be translated.
"""
defexception [:message, :mssql, :connection_id, :query]
@type t :: %MssqlexV3.Error{}
@doc false
@spec exception({list(), integer(), list()}) :: t()
def exception({_odbc_code, 574, reason}) do
exception([mssql: %{code: :not_allowed_in_transaction, message: reason}])
end
def exception({_odbc_code, 226, reason}) do
exception([mssql: %{code: :not_allowed_in_transaction, message: reason}])
end
def exception({odbc_code, _native_code, reason}) do
exception([mssql: %{code: odbc_code, message: reason}])
end
def exception(code) when is_atom(code) do
exception([mssql: %{code: code, message: code}])
end
def exception(opts) do
mssql =
if fields = Keyword.get(opts, :mssql) do
code = Map.get(fields, :code, :feature_not_supported)
message = Map.get(fields, :message, "No message provided!")
driver = fields |> Map.get(:driver) |> build_driver()
fields
|> Map.put(:mssql_code, to_string(code))
|> Map.put(:driver, driver)
|> Map.put(:code, MssqlexV3.ErrorCode.code_to_name(code))
|> Map.put(:message, build_message(message, driver))
end
message = Keyword.get(opts, :message)
connection_id = Keyword.get(opts, :connection_id)
%MssqlexV3.Error{mssql: mssql, message: message, connection_id: connection_id}
end
def message(e) do
if map = e.mssql do
IO.iodata_to_binary([
# map.severity,
# ?\s,
Map.get(map, :mssql_code, "feature_not_supported"),
?\s,
[?(, Atom.to_string(map.code), ?)],
?\s,
Map.get(map, :message, "No message provided!"),
# build_query(e.query),
# build_metadata(map),
# build_detail(map)
])
else
e.message
end
end
defp build_driver(nil), do: nil
defp build_driver(driver) do
String.replace(driver, ~r/\{|\}/, "")
end
defp build_message(msg, driver) do
msg
|> to_string()
|> String.replace("[Microsoft]", "")
|> String.replace("[SQL Server]", "")
|> String.replace("[ODBC Driver 17 for SQL Server]", "")
|> String.replace("[#{driver}]", "")
|> String.replace(~r/(\.\s+|\.)/, ". ")
|> String.trim()
end
# defp get_constraint_violations(reason) do
# constraint_checks = [
# unique: ~r/Violation of UNIQUE KEY constraint '(\S+?)'./,
# unique: ~r/Cannot insert duplicate key row .* with unique index '(\S+?)'/,
# foreign_key:
# ~r/conflicted with the (?:FOREIGN KEY|REFERENCE) constraint "(\S+?)"./,
# check: ~r/conflicted with the CHECK constraint "(\S+?)"./
# ]
# extract = fn {key, test}, acc ->
# concatenate_match = fn [match], acc -> [{key, match} | acc] end
# case Regex.scan(test, reason, capture: :all_but_first) do
# [] -> acc
# matches -> Enum.reduce(matches, acc, concatenate_match)
# end
# end
# Enum.reduce(constraint_checks, [], extract)
# end
end
defmodule MssqlexV3.QueryError do
defexception [:message]
end
|
lib/mssqlex_v3/error.ex
| 0.814422 | 0.400661 |
error.ex
|
starcoder
|
defmodule Aino.Session do
@moduledoc """
Session storage
"""
alias Aino.Session.Storage
@doc """
Put a session configuration into the token
Used for `decode/1` and `encode/1`. The configuration should be an implementation
of `Aino.Session.Storage`.
The following keys will be added to the token `[:session_config]`
iex> config = %Session.Cookie{key: "key", salt: "salt"}
iex> token = %{}
iex> token = Session.config(token, config)
iex> token.session_config == config
true
"""
def config(token, config) do
Map.put(token, :session_config, config)
end
@doc """
Decode session data from the token
Can only be used with `Aino.Session.config/2` having run before.
The following keys will be added to the token `[:session]`
"""
def decode(token) do
Storage.decode(token.session_config, token)
end
@doc """
Encode session data from the token
Can only be used with `Aino.Middleware.cookies/1` and `Aino.Session.config/2` having run before.
"""
def encode(%{session_updated: true} = token) do
Storage.encode(token.session_config, token)
end
def encode(token), do: token
end
defmodule Aino.Session.Token do
@moduledoc """
Token functions related only to session
Session data _must_ be decoded before using these functions
"""
@doc """
Puts a new key/value session data
Values _must_ be serializable via JSON.
Session data _must_ be decoded before using putting a new key
"""
def put(%{session: session} = token, key, value) do
session = Map.put(session, key, value)
token
|> Map.put(:session, session)
|> Map.put(:session_updated, true)
end
def put(_token, _key, _value) do
raise """
Make sure to decode session data before trying to put values in it
See `Aino.Session.decode/1`
"""
end
@doc """
Clear a session, resetting to an empty map
"""
def clear(token) do
token
|> Map.put(:session, %{})
|> Map.put(:session_updated, true)
end
end
defprotocol Aino.Session.Storage do
@moduledoc """
Encode and decode session data in a pluggable backend
"""
@doc """
Parse session data on the token
The following keys should be added to the token `[:session]`
"""
def decode(config, token)
@doc """
Set session data from the token
"""
def encode(config, token)
end
defmodule Aino.Session.Cookie do
@moduledoc """
Session implementation using cookies as the storage
"""
alias Aino.Token
defstruct [:key, :salt]
@doc false
def signature(config, data) do
Base.encode64(:crypto.mac(:hmac, :sha256, config.key, data <> config.salt))
end
@doc """
Parse session data from cookies
Verifies the signature and if valid, parses session JSON data.
Can only be used with `Aino.Middleware.cookies/1` and `Aino.Session.config/2` having run before.
Adds the following keys to the token `[:session]`
"""
def decode(config, token) do
case token.cookies["_aino_session"] do
data when is_binary(data) ->
expected_signature = token.cookies["_aino_session_signature"]
signature = signature(config, data)
case expected_signature == signature do
true ->
parse_session(token, data)
false ->
Map.put(token, :session, %{})
end
_ ->
Map.put(token, :session, %{})
end
end
defp parse_session(token, data) do
case Jason.decode(data) do
{:ok, session} ->
Map.put(token, :session, session)
{:error, _} ->
Map.put(token, :session, %{})
end
end
@doc """
Response will be returned with two new `Set-Cookie` headers, a signature
of the session data and the session data itself as JSON.
"""
def encode(config, token) do
case is_map(token.session) do
true ->
session = Map.put(token.session, "t", DateTime.utc_now())
case Jason.encode(session) do
{:ok, data} ->
signature = signature(config, data)
token
|> Token.response_header("Set-Cookie", "_aino_session=#{data}")
|> Token.response_header("Set-Cookie", "_aino_session_signature=#{signature}")
:error ->
token
end
false ->
token
end
end
defimpl Aino.Session.Storage do
alias Aino.Session.Cookie
def decode(config, token) do
Cookie.decode(config, token)
end
def encode(config, token) do
Cookie.encode(config, token)
end
end
end
|
lib/aino/session.ex
| 0.834508 | 0.54692 |
session.ex
|
starcoder
|
defmodule ExUnit.Formatter do
@moduledoc """
This module holds helper functions related to formatting and contains
documentation about the formatting protocol.
Formatters are registered at the `ExUnit.EventManager` event manager and
will be send events by the runner.
The following events are possible:
* `{:suite_started, opts}` -
the suite has started with the specified options to the runner.
* `{:suite_finished, run_us, load_us}` -
the suite has finished. `run_us` and `load_us` are the run and load
times in microseconds respectively.
* `{:case_started, test_case}` -
a test case has started. See `ExUnit.TestCase` for details.
* `{:case_finished, test_case}` -
a test case has finished. See `ExUnit.TestCase` for details.
* `{:test_started, test_case}` -
a test case has started. See `ExUnit.Test` for details.
* `{:test_finished, test_case}` -
a test case has finished. See `ExUnit.Test` for details.
"""
@type id :: term
@type test_case :: ExUnit.TestCase.t
@type test :: ExUnit.Test.t
@type run_us :: pos_integer
@type load_us :: pos_integer | nil
import Exception, only: [format_stacktrace_entry: 1]
@label_padding " "
@counter_padding " "
@inspect_padding @counter_padding <> @label_padding
@doc """
Formats time taken running the test suite.
It receives the time spent running the tests and
optionally the time spent loading the test suite.
## Examples
iex> format_time(10000, nil)
"Finished in 0.01 seconds"
iex> format_time(10000, 20000)
"Finished in 0.03 seconds (0.02s on load, 0.01s on tests)"
iex> format_time(10000, 200000)
"Finished in 0.2 seconds (0.2s on load, 0.01s on tests)"
"""
@spec format_time(run_us, load_us) :: String.t
def format_time(run_us, nil) do
"Finished in #{run_us |> normalize_us |> format_us} seconds"
end
def format_time(run_us, load_us) do
run_us = run_us |> normalize_us
load_us = load_us |> normalize_us
total_us = run_us + load_us
"Finished in #{format_us total_us} seconds (#{format_us load_us}s on load, #{format_us run_us}s on tests)"
end
defp normalize_us(us) do
div(us, 10000)
end
defp format_us(us) do
if us < 10 do
"0.0#{us}"
else
us = div us, 10
"#{div(us, 10)}.#{rem(us, 10)}"
end
end
@doc """
Formats filters used to constrain cases to be run.
## Examples
iex> format_filters([run: true, slow: false], :include)
"Including tags: [run: true, slow: false]"
"""
@spec format_filters(Keyword.t, atom) :: String.t
def format_filters(filters, type) do
case type do
:include -> "Including tags: #{inspect filters}"
:exclude -> "Excluding tags: #{inspect filters}"
end
end
@doc """
Receives a test and formats its failure.
"""
def format_test_failure(test, failures, counter, width, formatter) do
%ExUnit.Test{name: name, case: case, tags: tags} = test
test_info(with_counter(counter, "#{name} (#{inspect case})"), formatter)
<> test_location(with_location(tags), formatter)
<> Enum.map_join(Enum.with_index(failures), "", fn {{kind, reason, stack}, i} ->
failure_header(failures, i)
<> format_kind_reason(kind, reason, width, formatter)
<> format_stacktrace(stack, case, name, formatter)
end)
<> report(tags, failures, width, formatter)
end
defp report(tags, failures, width, formatter) do
case Map.take(tags, List.wrap(tags[:report])) do
report when map_size(report) == 0 ->
""
report ->
report_spacing(failures)
<> extra_info("tags:", formatter)
<> Enum.map_join(report, "", fn {k, v} ->
prefix = " #{k}: "
prefix <> inspect_multiline(v, byte_size(prefix), width) <> "\n"
end)
end
end
defp report_spacing([_]), do: ""
defp report_spacing(_), do: "\n"
@doc """
Receives a test case and formats its failure.
"""
def format_test_case_failure(test_case, failures, counter, width, formatter) do
%ExUnit.TestCase{name: name} = test_case
test_case_info(with_counter(counter, "#{inspect name}: "), formatter)
<> Enum.map_join(Enum.with_index(failures), "", fn {{kind, reason, stack}, i} ->
failure_header(failures, i)
<> format_kind_reason(kind, reason, width, formatter)
<> format_stacktrace(stack, name, nil, formatter)
end)
end
defp format_kind_reason(:error, %ExUnit.AssertionError{} = struct, width, formatter) do
padding_size = byte_size(@inspect_padding)
fields = [
note: if_value(struct.message, &format_banner(&1, formatter)),
code: if_value(struct.expr, &code_multiline(&1, padding_size)),
lhs: if_value(struct.left, &inspect_multiline(&1, padding_size, width)),
rhs: if_value(struct.right, &inspect_multiline(&1, padding_size, width))
]
if formatter.(:colors_enabled?, nil) do
fields ++ [diff: format_diff(struct, formatter)]
else
fields
end
|> filter_interesting_fields()
|> format_each_field(formatter)
|> make_into_lines(@counter_padding)
end
defp format_kind_reason(kind, reason, _width, formatter) do
error_info Exception.format_banner(kind, reason), formatter
end
defp filter_interesting_fields(fields) do
Enum.filter(fields, fn {_, value} ->
value != ExUnit.AssertionError.no_value
end)
end
defp format_each_field(fields, formatter) do
Enum.map(fields, fn {label, value} ->
format_label(label, formatter) <> value
end)
end
defp if_value(value, fun) do
if value == ExUnit.AssertionError.no_value do
value
else
fun.(value)
end
end
defp format_label(:note, _formatter), do: ""
defp format_label(label, formatter) do
formatter.(:error_info, String.ljust("#{label}:", byte_size(@label_padding)))
end
defp format_banner(value, formatter) do
value = String.replace(value, "\n", "\n" <> @counter_padding)
formatter.(:error_info, value)
end
defp code_multiline(expr, padding_size) when is_binary(expr) do
padding = String.duplicate(" ", padding_size)
String.replace(expr, "\n", "\n" <> padding)
end
defp code_multiline(expr, padding_size) do
code_multiline(Macro.to_string(expr), padding_size)
end
defp inspect_multiline(expr, padding_size, width) do
padding = String.duplicate(" ", padding_size)
width = if width == :infinity, do: width, else: width - padding_size
inspect(expr, [pretty: true, width: width])
|> String.replace("\n", "\n" <> padding)
end
defp make_into_lines(reasons, padding) do
padding <> Enum.join(reasons, "\n" <> padding) <> "\n"
end
defp format_diff(struct, formatter) do
if_value(struct.left, fn left ->
if_value(struct.right, fn right ->
format_diff(left, right, formatter) || ExUnit.AssertionError.no_value
end)
end)
end
@doc """
Formats the difference between `left` and `right`.
Returns `nil` if they are not the same data type,
or if the given data type is not supported.
"""
def format_diff(left, right, formatter_fun)
def format_diff(<<left::bytes>>, <<right::bytes>>, formatter) do
if String.printable?(left) and String.printable?(right) do
String.myers_difference(left, right)
|> Enum.map_join(&format_diff_fragment(&1, formatter))
end
end
def format_diff(%name{} = left, %name{} = right, formatter) do
left = Map.from_struct(left)
right = Map.from_struct(right)
format_map_diff(left, right, inspect(name), formatter)
end
def format_diff(%_{}, %_{}, _formatter), do: nil
def format_diff(%{} = left, %{} = right, formatter) do
format_map_diff(left, right, "", formatter)
end
def format_diff(left, right, formatter)
when is_integer(left) and is_integer(right)
when is_float(left) and is_float(right) do
{kind, skew} =
case to_string(right - left) do
"-" <> _ = result ->
{:diff_delete, result}
result ->
{:diff_insert, "+" <> result}
end
value_diff = formatter.(kind, "(off by " <> skew <> ")")
format_diff(inspect(left), inspect(right), formatter) <> " " <> value_diff
end
def format_diff(left, right, formatter)
when is_tuple(left) and is_tuple(right)
when is_list(left) and is_list(right) do
format_diff(inspect(left), inspect(right), formatter)
end
def format_diff(_left, _right, _formatter), do: nil
defp format_map_diff(left, right, name, formatter) do
{surplus, altered, missing} = map_difference(left, right)
keyword? =
Inspect.List.keyword?(surplus) and
Inspect.List.keyword?(altered) and
Inspect.List.keyword?(missing)
result =
if map_size(right) > length(altered) + length(missing),
do: ["..."],
else: []
result = Enum.reduce(missing, result, fn({key, val}, acc) ->
map_pair = format_map_pair(inspect(key), inspect(val), keyword?)
[formatter.(:diff_insert, map_pair) | acc]
end)
result = Enum.reduce(surplus, result, fn({key, val}, acc) ->
map_pair = format_map_pair(inspect(key), inspect(val), keyword?)
[formatter.(:diff_delete, map_pair) | acc]
end)
result = Enum.reduce(altered, result, fn({key, {val1, val2}}, acc) ->
value_diff = format_inner_diff(val1, val2, formatter)
[format_map_pair(inspect(key), value_diff, keyword?) | acc]
end)
"%" <> name <> "{" <> Enum.join(result, ", ") <> "}"
end
defp map_difference(map1, map2) do
{surplus, altered} =
Enum.reduce(map1, {[], []}, fn({key, val1}, {surplus, altered} = acc) ->
case Map.fetch(map2, key) do
{:ok, ^val1} ->
acc
{:ok, val2} ->
{surplus, [{key, {val1, val2}} | altered]}
:error ->
{[{key, val1} | surplus], altered}
end
end)
missing = Enum.reduce(map2, [], fn({key, _} = pair, acc) ->
if Map.has_key?(map1, key), do: acc, else: [pair | acc]
end)
{surplus, altered, missing}
end
defp format_map_pair(key, value, false) do
key <> " => " <> value
end
defp format_map_pair(":" <> rest, value, true) do
format_map_pair(rest, value, true)
end
defp format_map_pair(key, value, true) do
key <> ": " <> value
end
defp format_inner_diff(<<left::bytes>>, <<right::bytes>>, formatter) do
format_diff(inspect(left), inspect(right), formatter)
end
defp format_inner_diff(left, right, formatter) do
if result = format_diff(left, right, formatter) do
result
else
formatter.(:diff_delete, inspect(left)) <>
formatter.(:diff_insert, inspect(right))
end
end
defp format_diff_fragment({:eq, content}, _), do: content
defp format_diff_fragment({:del, content}, formatter) do
formatter.(:diff_delete, content)
end
defp format_diff_fragment({:ins, content}, formatter) do
formatter.(:diff_insert, content)
end
defp format_stacktrace([], _case, _test, _color) do
""
end
defp format_stacktrace(stacktrace, test_case, test, color) do
extra_info("stacktrace:", color) <>
Enum.map_join(stacktrace,
fn(s) -> stacktrace_info format_stacktrace_entry(s, test_case, test), color end)
end
defp format_stacktrace_entry({test_case, test, _, location}, test_case, test) do
"#{location[:file]}:#{location[:line]}"
end
defp format_stacktrace_entry(s, _test_case, _test) do
format_stacktrace_entry(s)
end
defp with_location(tags) do
"#{Path.relative_to_cwd(tags[:file])}:#{tags[:line]}"
end
defp failure_header([_], _), do: ""
defp failure_header(_, i), do: "\n#{@counter_padding}Failure ##{i+1}\n"
defp with_counter(counter, msg) when counter < 10 do " #{counter}) #{msg}" end
defp with_counter(counter, msg) when counter < 100 do " #{counter}) #{msg}" end
defp with_counter(counter, msg) do "#{counter}) #{msg}" end
defp test_case_info(msg, nil), do: msg <> "failure on setup_all callback, tests invalidated\n"
defp test_case_info(msg, formatter), do: test_case_info(formatter.(:test_case_info, msg), nil)
defp test_info(msg, nil), do: msg <> "\n"
defp test_info(msg, formatter), do: test_info(formatter.(:test_info, msg), nil)
defp test_location(msg, nil), do: " " <> msg <> "\n"
defp test_location(msg, formatter), do: test_location(formatter.(:location_info, msg), nil)
defp error_info(msg, nil) do
" " <> String.replace(msg, "\n", "\n ") <> "\n"
end
defp error_info(msg, formatter), do: error_info(formatter.(:error_info, msg), nil)
defp extra_info(msg, nil), do: " " <> msg <> "\n"
defp extra_info(msg, formatter), do: extra_info(formatter.(:extra_info, msg), nil)
defp stacktrace_info(msg, nil), do: " " <> msg <> "\n"
defp stacktrace_info(msg, formatter), do: stacktrace_info(formatter.(:stacktrace_info, msg), nil)
end
|
lib/ex_unit/lib/ex_unit/formatter.ex
| 0.849831 | 0.700511 |
formatter.ex
|
starcoder
|
defmodule BroadwaySQS.Producer do
@moduledoc """
A GenStage producer that continuously polls messages from a SQS queue and
acknowledge them after being successfully processed.
By default this producer uses `BroadwaySQS.ExAwsClient` to talk to SQS but
you can provide your client by implementing the `BroadwaySQS.SQSClient`
behaviour.
For a quick getting started on using Broadway with Amazon SQS, please see
the [Amazon SQS Guide](https://hexdocs.pm/broadway/amazon-sqs.html).
## Options
Aside from `:receive_interval` and `:sqs_client` which are generic and apply to all
producers (regardless of the client implementation), all other options are specific to
the `BroadwaySQS.ExAwsClient`, which is the default client.
#{NimbleOptions.docs(BroadwaySQS.Options.definition())}
## Acknowledgments
You can use the `:on_success` and `:on_failure` options to control how messages are
acked on SQS. You can set these options when starting the SQS producer or change them
for each message through `Broadway.Message.configure_ack/2`. By default, successful
messages are acked (`:ack`) and failed messages are not (`:noop`).
The possible values for `:on_success` and `:on_failure` are:
* `:ack` - acknowledge the message. SQS will delete the message from the queue
and will not redeliver it to any other consumer.
* `:noop` - do not acknowledge the message. SQS will eventually redeliver the message
or remove it based on the "Visibility Timeout" and "Max Receive Count"
configurations. For more information, see:
* ["Visibility Timeout" page on Amazon SQS](https://docs.aws.amazon.com/AWSSimpleQueueService/latest/SQSDeveloperGuide/sqs-visibility-timeout.html)
* ["Dead Letter Queue" page on Amazon SQS](https://docs.aws.amazon.com/AWSSimpleQueueService/latest/SQSDeveloperGuide/sqs-dead-letter-queues.html)
### Batching
Even if you are not interested in working with Broadway batches via the
`handle_batch/3` callback, we recommend all Broadway pipelines with SQS
producers to define a default batcher with `batch_size` set to 10, so
messages can be acknowledged in batches, which improves the performance
and reduce the costs of integrating with SQS.
## Example
Broadway.start_link(MyBroadway,
name: MyBroadway,
producer: [
module: {BroadwaySQS.Producer,
queue_url: "https://sqs.amazonaws.com/0000000000/my_queue",
config: [
access_key_id: "YOUR_AWS_ACCESS_KEY_ID",
secret_access_key: "YOUR_AWS_SECRET_ACCESS_KEY",
region: "us-east-2"
]
}
],
processors: [
default: []
],
batchers: [
default: [
batch_size: 10,
batch_timeout: 2000
]
]
)
The above configuration will set up a producer that continuously receives
messages from `"my_queue"` and sends them downstream.
## Retrieving Metadata
By default the following information is added to the `metadata` field in the
`%Message{}` struct:
* `message_id` - The message id received when the message was sent to the queue
* `receipt_handle` - The receipt handle
* `md5_of_body` - An MD5 digest of the message body
You can access any of that information directly while processing the message:
def handle_message(_, message, _) do
receipt = %{
id: message.metadata.message_id,
receipt_handle: message.metadata.receipt_handle
}
# Do something with the receipt
end
If you want to retrieve `attributes` or `message_attributes`, you need to
configure the `:attributes_names` and `:message_attributes_names` options
accordingly, otherwise, attributes will not be attached to the response and
will not be available in the `metadata` field
producer: [
module: {BroadwaySQS.Producer,
queue_url: "https://sqs.amazonaws.com/0000000000/my_queue",
# Define which attributes/message_attributes you want to be attached
attribute_names: [:approximate_receive_count],
message_attribute_names: ["SomeAttribute"]
}
]
and then in `handle_message`:
def handle_message(_, message, _) do
approximate_receive_count = message.metadata.attributes["approximate_receive_count"]
some_attribute = message.metadata.message_attributes["SomeAttribute"]
# Do something with the attributes
end
For more information on the `:attributes_names` and `:message_attributes_names`
options, see ["AttributeName.N" and "MessageAttributeName.N" on the ReceiveMessage documentation](https://docs.aws.amazon.com/AWSSimpleQueueService/latest/APIReference/API_ReceiveMessage.html)
## Telemetry
This library exposes the following Telemetry events:
* `[:broadway_sqs, :receive_messages, :start]` - Dispatched before receiving
messages from SQS (`c:receive_messages/2`)
* measurement: `%{time: System.monotonic_time}`
* metadata: `%{name: atom, demand: integer}`
* `[:broadway_sqs, :receive_messages, :stop]` - Dispatched after messages have
been received from SQS and "wrapped".
* measurement: `%{duration: native_time}`
* metadata:
```
%{
name: atom,
messages: [Broadway.Message.t],
demand: integer
}
```
* `[:broadway_sqs, :receive_messages, :exception]` - Dispatched after a failure
while receiving messages from SQS.
* measurement: `%{duration: native_time}`
* metadata:
```
%{
name: atom,
demand: integer,
kind: kind,
reason: reason,
stacktrace: stacktrace
}
```
"""
use GenStage
require Logger
alias Broadway.Producer
alias NimbleOptions.ValidationError
@behaviour Producer
@impl true
def init(opts) do
receive_interval = opts[:receive_interval]
sqs_client = opts[:sqs_client]
{:ok, client_opts} = sqs_client.init(opts)
{:producer,
%{
demand: 0,
receive_timer: nil,
receive_interval: receive_interval,
sqs_client: {sqs_client, client_opts}
}}
end
@impl true
def prepare_for_start(_module, broadway_opts) do
{producer_module, client_opts} = broadway_opts[:producer][:module]
if Keyword.has_key?(client_opts, :queue_name) do
Logger.error(
"The option :queue_name has been removed in order to keep compatibility with " <>
"ex_aws_sqs >= v3.0.0. Please set the queue URL using the new :queue_url option."
)
exit(:invalid_config)
end
case NimbleOptions.validate(client_opts, BroadwaySQS.Options.definition()) do
{:error, error} ->
raise ArgumentError, format_error(error)
{:ok, opts} ->
ack_ref = broadway_opts[:name]
:persistent_term.put(ack_ref, %{
queue_url: opts[:queue_url],
config: opts[:config],
on_success: opts[:on_success],
on_failure: opts[:on_failure]
})
broadway_opts_with_defaults =
put_in(broadway_opts, [:producer, :module], {producer_module, opts})
{[], broadway_opts_with_defaults}
end
end
defp format_error(%ValidationError{keys_path: [], message: message}) do
"invalid configuration given to SQSBroadway.prepare_for_start/2, " <> message
end
defp format_error(%ValidationError{keys_path: keys_path, message: message}) do
"invalid configuration given to SQSBroadway.prepare_for_start/2 for key #{inspect(keys_path)}, " <>
message
end
@impl true
def handle_demand(incoming_demand, %{demand: demand} = state) do
handle_receive_messages(%{state | demand: demand + incoming_demand})
end
@impl true
def handle_info(:receive_messages, %{receive_timer: nil} = state) do
{:noreply, [], state}
end
@impl true
def handle_info(:receive_messages, state) do
handle_receive_messages(%{state | receive_timer: nil})
end
@impl true
def handle_info(_, state) do
{:noreply, [], state}
end
@impl Producer
def prepare_for_draining(%{receive_timer: receive_timer} = state) do
receive_timer && Process.cancel_timer(receive_timer)
{:noreply, [], %{state | receive_timer: nil}}
end
defp handle_receive_messages(%{receive_timer: nil, demand: demand} = state) when demand > 0 do
messages = receive_messages_from_sqs(state, demand)
new_demand = demand - length(messages)
receive_timer =
case {messages, new_demand} do
{[], _} -> schedule_receive_messages(state.receive_interval)
{_, 0} -> nil
_ -> schedule_receive_messages(0)
end
{:noreply, messages, %{state | demand: new_demand, receive_timer: receive_timer}}
end
defp handle_receive_messages(state) do
{:noreply, [], state}
end
defp receive_messages_from_sqs(state, total_demand) do
%{sqs_client: {client, opts}} = state
metadata = %{name: get_in(opts, [:ack_ref]), demand: total_demand}
:telemetry.span(
[:broadway_sqs, :receive_messages],
metadata,
fn ->
messages = client.receive_messages(total_demand, opts)
{messages, Map.put(metadata, :messages, messages)}
end
)
end
defp schedule_receive_messages(interval) do
Process.send_after(self(), :receive_messages, interval)
end
end
|
lib/broadway_sqs/producer.ex
| 0.896104 | 0.67256 |
producer.ex
|
starcoder
|
defmodule Mix.Tasks.Run do
use Mix.Task
@shortdoc "Runs the given file or expression"
@moduledoc """
Runs the given file or expression in the context of the application.
You can use this task to execute a particular file or command:
mix run -e Hello.world
mix run my_script.exs
This task provides a subset of the functionality available in the
`elixir` executable, including setting up the `System.argv/0` arguments:
mix run my_script.exs arg1 arg2 arg3
You can also use this task to simply start an application and keep
it running without halting:
mix run --no-halt
Before running any command, the task compiles and starts the current
application. Those can be configured with the options below.
You may also pass options specific to the `elixir` executable as follows:
elixir --sname hello -S mix run --no-halt
## Command line options
* `--config`, `-c` - loads the given configuration file
* `--eval`, `-e` - evaluate the given code
* `--require`, `-r` - requires pattern before running the command
* `--parallel`, `-p` - makes all requires parallel
* `--no-compile` - does not compile even if files require compilation
* `--no-deps-check` - does not check dependencies
* `--no-archives-check` - does not check archives
* `--no-halt` - does not halt the system after running the command
* `--no-mixexs` - allows the command to run even if there is no mix.exs
* `--no-start` - does not start applications after compilation
* `--no-elixir-version-check` - does not check the Elixir version from mix.exs
"""
@spec run(OptionParser.argv) :: :ok
def run(args) do
{opts, head} = OptionParser.parse_head!(args,
aliases: [r: :require, p: :parallel, e: :eval, c: :config],
strict: [parallel: :boolean, require: :keep, eval: :keep, config: :keep, mixexs: :boolean,
halt: :boolean, compile: :boolean, deps_check: :boolean, start: :boolean,
archives_check: :boolean, elixir_version_check: :boolean, parallel_require: :keep])
run(args, opts, head, &Code.eval_string/1, &Code.require_file/1)
unless Keyword.get(opts, :halt, true), do: Process.sleep(:infinity)
:ok
end
@doc false
@spec run(OptionParser.argv, keyword, OptionParser.argv,
(String.t -> term()), (String.t -> term())) :: :ok
def run(args, opts, head, expr_evaluator, file_evaluator) do
# TODO: Remove on v2.0
opts =
Enum.flat_map(opts, fn
{:parallel_require, value} ->
IO.warn "the --parallel-require option is deprecated in favour of using " <>
"--parallel to make all requires parallel and --require VAL for requiring"
[require: value, parallel: true]
opt ->
[opt]
end)
{file, argv} =
case {Keyword.has_key?(opts, :eval), head} do
{true, _} -> {nil, head}
{_, [head | tail]} -> {head, tail}
{_, []} -> {nil, []}
end
System.argv(argv)
process_config(opts)
# Start app after rewriting System.argv,
# but before requiring and evaling.
cond do
Mix.Project.get ->
Mix.Task.run "app.start", args
"--no-mixexs" in args ->
:ok
true ->
Mix.Project.get!
end
process_load(opts, expr_evaluator)
if file do
if File.regular?(file) do
file_evaluator.(file)
else
Mix.raise "No such file: #{file}"
end
end
:ok
end
defp process_config(opts) do
Enum.each opts, fn
{:config, value} ->
Mix.Task.run "loadconfig", [value]
_ ->
:ok
end
end
defp process_load(opts, expr_evaluator) do
require_runner =
if opts[:parallel] do
&Kernel.ParallelRequire.files/1
else
fn(files) -> Enum.each(files, &Code.require_file/1) end
end
Enum.each opts, fn
{:require, value} ->
case filter_patterns(value) do
[] ->
Mix.raise "No files matched pattern #{inspect value} given to --require"
filtered ->
require_runner.(filtered)
end
{:eval, value} ->
expr_evaluator.(value)
_ ->
:ok
end
end
defp filter_patterns(pattern) do
Enum.filter(Enum.uniq(Path.wildcard(pattern)), &File.regular?(&1))
end
end
|
lib/mix/lib/mix/tasks/run.ex
| 0.70202 | 0.519826 |
run.ex
|
starcoder
|
defmodule Contex.BarChart do
@moduledoc """
Draws a barchart from a `Contex.Dataset`.
`Contex.BarChart` will attempt to create reasonable output with minimal input. The defaults are as follows:
- Bars will be drawn vertically (use `orientation/2` to override - options are `:horizontal` and `:vertical`)
- The first column of the dataset is used as the category column (i.e. the bar), and the second
column is used as the value column (i.e. the bar height). These can be overridden
with `set_cat_col_name/2` and `set_val_col_names/2`
- The barchart type defaults to `:stacked`. This doesn't really matter when you only have one series (one value column)
but if you accept the defaults and then add another value column you will see stacked bars rather than grouped. You
can override this with `type/2`
- By default the chart will be annotated with data labels (i.e. the value of a bar will be printed on a bar). This
can be overriden with `data_labels/2`. This override has no effect when there are 4 or more value columns specified.
- By default, the padding between the data series is 2 (how this translates into pixels depends on the plot size you specify
when adding the barchart to a `Contex.Plot`)
By default the BarChart figures out reasonable value axes. In the case of a `:stacked` bar chart it find the maximum
of the sum of the values for each category and the value axis is set to {0, that_max}. For a `:grouped` bar chart the
value axis minimum is set to the minimum value for any category and series, and likewise, the maximum is set to the
maximum value for any category and series. This may not work. For example, in the situation where you want zero to be
shown. You can force the range using `force_value_range/2`
"""
import Contex.SVG
alias __MODULE__
alias Contex.{Scale, ContinuousLinearScale, OrdinalScale}
alias Contex.CategoryColourScale
alias Contex.{Dataset, Mapping}
alias Contex.Axis
alias Contex.Utils
defstruct [
:dataset,
:mapping,
:options,
:category_scale,
:value_scale,
:series_fill_colours,
:custom_value_formatter,
:phx_event_handler,
:select_item,
:value_range,
axis_label_rotation: :auto,
width: 100,
height: 100,
type: :stacked,
data_labels: true,
orientation: :vertical,
colour_palette: :default,
padding: 2
]
@required_mappings [
category_col: :exactly_one,
value_cols: :one_or_more
]
@type t() :: %__MODULE__{}
@type orientation() :: :vertical | :horizontal
@type plot_type() :: :stacked | :grouped
@type selected_item() :: %{category: any(), series: any()}
@doc """
Creates a new barchart from a dataset and sets defaults.
If the data in the dataset is stored as a map, the `:mapping` option is required. This value must be a map of the plot's `:category_col` and `:value_cols` to keys in the map, such as `%{category_col: :column_a, value_cols: [:column_b, column_c]`. The value for the `:value_cols` key must be a list.
"""
@spec new(Contex.Dataset.t(), keyword()) :: Contex.BarChart.t()
def new(%Dataset{} = dataset, options \\ [orientation: :vertical]) when is_list(options) do
mapping = Mapping.new(@required_mappings, Keyword.get(options, :mapping), dataset)
%BarChart{
dataset: dataset,
mapping: mapping,
orientation: get_orientation_from_options(options),
options: options
}
|> set_default_scales()
end
@doc """
Sets the default scales for the plot based on its column mapping.
"""
@spec set_default_scales(Contex.BarChart.t()) :: Contex.BarChart.t()
def set_default_scales(%BarChart{mapping: %{column_map: column_map}} = plot) do
set_cat_col_name(plot, column_map.category_col)
|> set_val_col_names(column_map.value_cols)
end
@doc """
Specifies whether data labels are shown on the bars
"""
@spec data_labels(Contex.BarChart.t(), boolean()) :: Contex.BarChart.t()
def data_labels(%BarChart{} = plot, data_labels) do
%{plot | data_labels: data_labels}
end
@doc """
Specifies whether the bars are drawn stacked or grouped.
"""
@spec type(Contex.BarChart.t(), plot_type()) :: Contex.BarChart.t()
def type(%BarChart{mapping: mapping} = plot, type) do
%{plot | type: type}
|> set_val_col_names(mapping.column_map.value_cols)
end
@doc """
Specifies whether the bars are drawn horizontally or vertically.
"""
@spec orientation(Contex.BarChart.t(), orientation()) :: Contex.BarChart.t()
def orientation(%BarChart{} = plot, orientation) do
%{plot | orientation: orientation}
end
@doc """
Forces the value scale to the given data range
"""
@spec force_value_range(Contex.BarChart.t(), {number, number}) :: Contex.BarChart.t()
def force_value_range(%BarChart{mapping: mapping} = plot, {min, max} = value_range)
when is_number(min) and is_number(max) do
%{plot | value_range: value_range}
|> set_val_col_names(mapping.column_map.value_cols)
end
@doc false
def set_size(%BarChart{mapping: mapping} = plot, width, height) do
# We pretend to set the value and category columns to force a recalculation of scales - may be expensive.
# We only really need to set the range, not recalculate the domain
%{plot | width: width, height: height}
|> set_val_col_names(mapping.column_map.value_cols)
|> set_cat_col_name(mapping.column_map.category_col)
end
@doc """
Specifies the label rotation value that will be applied to the bottom axis. Accepts integer
values for degrees of rotation or `:auto`. Note that manually set rotation values other than
45 or 90 will be treated as zero. The default value is `:auto`, which sets the rotation to
zero degrees if the number of items on the axis is greater than eight, 45 degrees otherwise.
"""
@spec axis_label_rotation(Contex.BarChart.t(), integer() | :auto) :: Contex.BarChart.t()
def axis_label_rotation(%BarChart{} = plot, rotation) when is_integer(rotation) do
%{plot | axis_label_rotation: rotation}
end
def axis_label_rotation(%BarChart{} = plot, _) do
%{plot | axis_label_rotation: :auto}
end
@doc """
Specifies the padding between the category groups. Defaults to 2. Specified relative to the plot size.
"""
@spec padding(Contex.BarChart.t(), number) :: Contex.BarChart.t()
def padding(%BarChart{category_scale: %OrdinalScale{} = cat_scale} = plot, padding)
when is_number(padding) do
cat_scale = OrdinalScale.padding(cat_scale, padding)
%{plot | padding: padding, category_scale: cat_scale}
end
def padding(%BarChart{} = plot, padding) when is_number(padding) do
%{plot | padding: padding}
end
@doc """
Overrides the default colours.
Colours can either be a named palette defined in `Contex.CategoryColourScale` or a list of strings representing hex code
of the colour as per CSS colour hex codes, but without the #. For example:
```
barchart = BarChart.colours(barchart, ["fbb4ae", "b3cde3", "ccebc5"])
```
The colours will be applied to the data series in the same order as the columns are specified in `set_val_col_names/2`
"""
@spec colours(Contex.BarChart.t(), Contex.CategoryColourScale.colour_palette()) ::
Contex.BarChart.t()
def colours(%BarChart{mapping: mapping} = plot, colour_palette) when is_list(colour_palette) do
%{plot | colour_palette: colour_palette}
|> set_val_col_names(mapping.column_map.value_cols)
end
def colours(%BarChart{mapping: mapping} = plot, colour_palette) when is_atom(colour_palette) do
%{plot | colour_palette: colour_palette}
|> set_val_col_names(mapping.column_map.value_cols)
end
def colours(%BarChart{mapping: mapping} = plot, _) do
%{plot | colour_palette: :default}
|> set_val_col_names(mapping.column_map.value_cols)
end
@doc """
Optionally specify a LiveView event handler. This attaches a `phx-click` attribute to each bar element. Note that it may
not work with some browsers (e.g. Safari on iOS).
"""
def event_handler(%BarChart{} = plot, event_handler) do
%{plot | phx_event_handler: event_handler}
end
@doc """
Highlights a selected value based on matching category and series.
"""
@spec select_item(Contex.BarChart.t(), selected_item()) :: Contex.BarChart.t()
def select_item(%BarChart{} = plot, select_item) do
%{plot | select_item: select_item}
end
@doc ~S"""
Allows the axis tick labels to be overridden. For example, if you have a numeric representation of money and you want to
have the value axis show it as millions of dollars you might do something like:
# Turns 1_234_567.67 into $1.23M
defp money_formatter_millions(value) when is_number(value) do
"$#{:erlang.float_to_binary(value/1_000_000.0, [decimals: 2])}M"
end
defp show_chart(data) do
BarChart.new(data)
|> BarChart.custom_value_formatter(&money_formatter_millions/1)
end
"""
@spec custom_value_formatter(Contex.BarChart.t(), nil | fun) :: Contex.BarChart.t()
def custom_value_formatter(%BarChart{} = plot, custom_value_formatter)
when is_function(custom_value_formatter) or custom_value_formatter == nil do
%{plot | custom_value_formatter: custom_value_formatter}
end
@doc false
def to_svg(
%BarChart{
category_scale: category_scale,
value_scale: value_scale,
orientation: orientation
} = plot,
options
) do
options = refine_options(options, orientation)
category_axis = get_category_axis(category_scale, orientation, plot)
value_scale = %{value_scale | custom_tick_formatter: plot.custom_value_formatter}
value_axis = get_value_axis(value_scale, orientation, plot)
plot = %{plot | value_scale: value_scale}
cat_axis_svg = if options.show_cat_axis, do: Axis.to_svg(category_axis), else: ""
val_axis_svg = if options.show_val_axis, do: Axis.to_svg(value_axis), else: ""
[
cat_axis_svg,
val_axis_svg,
"<g>",
get_svg_bars(plot),
"</g>"
]
end
defp get_orientation_from_options(options) when is_list(options) do
case Keyword.get(options, :orientation) do
:horizontal -> :horizontal
_ -> :vertical
end
end
defp refine_options(options, :horizontal),
do:
options
|> Map.put(:show_cat_axis, options.show_y_axis)
|> Map.put(:show_val_axis, options.show_x_axis)
defp refine_options(options, _),
do:
options
|> Map.put(:show_cat_axis, options.show_x_axis)
|> Map.put(:show_val_axis, options.show_y_axis)
defp get_category_axis(category_scale, :horizontal, plot) do
Axis.new_left_axis(category_scale) |> Axis.set_offset(plot.width)
end
defp get_category_axis(category_scale, _, plot) do
rotation =
case plot.axis_label_rotation do
:auto ->
if length(Scale.ticks_range(category_scale)) > 8, do: 45, else: 0
degrees ->
degrees
end
category_scale
|> Axis.new_bottom_axis()
|> Axis.set_offset(plot.height)
|> Kernel.struct(rotation: rotation)
end
defp get_value_axis(value_scale, :horizontal, plot),
do: Axis.new_bottom_axis(value_scale) |> Axis.set_offset(plot.height)
defp get_value_axis(value_scale, _, plot),
do: Axis.new_left_axis(value_scale) |> Axis.set_offset(plot.width)
@doc false
def get_svg_legend(%BarChart{series_fill_colours: scale, orientation: :vertical, type: :stacked}) do
Contex.Legend.to_svg(scale, true)
end
def get_svg_legend(%BarChart{series_fill_colours: scale}) do
Contex.Legend.to_svg(scale)
end
defp get_svg_bars(%BarChart{mapping: %{column_map: column_map}, dataset: dataset} = plot) do
series_fill_colours = plot.series_fill_colours
fills =
Enum.map(column_map.value_cols, fn column ->
CategoryColourScale.colour_for_value(series_fill_colours, column)
end)
dataset.data
|> Enum.map(fn row -> get_svg_bar(row, plot, fills) end)
end
defp get_svg_bar(
row,
%BarChart{mapping: mapping, category_scale: category_scale, value_scale: value_scale} =
plot,
fills
) do
cat_data = mapping.accessors.category_col.(row)
series_values = Enum.map(mapping.accessors.value_cols, fn value_col -> value_col.(row) end)
cat_band = OrdinalScale.get_band(category_scale, cat_data)
bar_values = prepare_bar_values(series_values, value_scale, plot.type)
labels = Enum.map(series_values, fn val -> Scale.get_formatted_tick(value_scale, val) end)
event_handlers = get_bar_event_handlers(plot, cat_data, series_values)
opacities = get_bar_opacities(plot, cat_data)
get_svg_bar_rects(cat_band, bar_values, labels, plot, fills, event_handlers, opacities)
end
defp get_bar_event_handlers(
%BarChart{phx_event_handler: phx_event_handler, mapping: mapping},
category,
series_values
)
when is_binary(phx_event_handler) and phx_event_handler != "" do
Enum.zip(mapping.column_map.value_cols, series_values)
|> Enum.map(fn {col, value} ->
[category: category, series: col, value: value, phx_click: phx_event_handler]
end)
end
defp get_bar_event_handlers(%BarChart{mapping: mapping}, _, _) do
Enum.map(mapping.column_map.value_cols, fn _ -> [] end)
end
@bar_faded_opacity "0.3"
defp get_bar_opacities(
%BarChart{
select_item: %{category: selected_category, series: _selected_series},
mapping: mapping
},
category
)
when selected_category != category do
Enum.map(mapping.column_map.value_cols, fn _ -> @bar_faded_opacity end)
end
defp get_bar_opacities(
%BarChart{
select_item: %{category: _selected_category, series: selected_series},
mapping: mapping
},
_category
) do
Enum.map(mapping.column_map.value_cols, fn col ->
case col == selected_series do
true -> ""
_ -> @bar_faded_opacity
end
end)
end
defp get_bar_opacities(%BarChart{mapping: mapping}, _) do
Enum.map(mapping.column_map.value_cols, fn _ -> "" end)
end
# Transforms the raw value for each series into a list of range tuples the bar has to cover, scaled to the display area
defp prepare_bar_values(series_values, scale, :stacked) do
{results, _last_val} =
Enum.reduce(series_values, {[], 0}, fn data_val, {points, last_val} ->
end_val = data_val + last_val
new = {Scale.domain_to_range(scale, last_val), Scale.domain_to_range(scale, end_val)}
{[new | points], end_val}
end)
Enum.reverse(results)
end
defp prepare_bar_values(series_values, scale, :grouped) do
{scale_min, _} = Scale.get_range(scale)
results =
Enum.reduce(series_values, [], fn data_val, points ->
range_val = Scale.domain_to_range(scale, data_val)
[{scale_min, range_val} | points]
end)
Enum.reverse(results)
end
defp get_svg_bar_rects(
{cat_band_min, cat_band_max} = cat_band,
bar_values,
labels,
plot,
fills,
event_handlers,
opacities
)
when is_number(cat_band_min) and is_number(cat_band_max) do
count = length(bar_values)
indices = 0..(count - 1)
adjusted_bands =
Enum.map(indices, fn index ->
adjust_cat_band(cat_band, index, count, plot.type, plot.orientation)
end)
rects =
Enum.zip([bar_values, fills, labels, adjusted_bands, event_handlers, opacities])
|> Enum.map(fn {bar_value, fill, label, adjusted_band, event_opts, opacity} ->
{x, y} = get_bar_rect_coords(plot.orientation, adjusted_band, bar_value)
opts = [fill: fill, opacity: opacity] ++ event_opts
rect(x, y, title(label), opts)
end)
texts =
case count < 4 and plot.data_labels do
false ->
[]
_ ->
Enum.zip([bar_values, labels, adjusted_bands])
|> Enum.map(fn {bar_value, label, adjusted_band} ->
get_svg_bar_label(plot.orientation, bar_value, label, adjusted_band, plot)
end)
end
# TODO: Get nicer text with big stacks - maybe limit to two series
[rects, texts]
end
defp get_svg_bar_rects(_x, _y, _label, _plot, _fill, _event_handlers, _opacities), do: ""
defp adjust_cat_band(cat_band, _index, _count, :stacked, _), do: cat_band
defp adjust_cat_band({cat_band_start, cat_band_end}, index, count, :grouped, :vertical) do
interval = (cat_band_end - cat_band_start) / count
{cat_band_start + index * interval, cat_band_start + (index + 1) * interval}
end
defp adjust_cat_band({cat_band_start, cat_band_end}, index, count, :grouped, :horizontal) do
interval = (cat_band_end - cat_band_start) / count
# Flip index so that first series is at top of group
index = count - index - 1
{cat_band_start + index * interval, cat_band_start + (index + 1) * interval}
end
defp get_bar_rect_coords(:horizontal, cat_band, bar_extents), do: {bar_extents, cat_band}
defp get_bar_rect_coords(:vertical, cat_band, bar_extents), do: {cat_band, bar_extents}
defp get_svg_bar_label(:horizontal, {_, bar_end} = bar, label, cat_band, _plot) do
text_y = midpoint(cat_band)
width = width(bar)
{text_x, class, anchor} =
case width < 50 do
true -> {bar_end + 2, "exc-barlabel-out", "start"}
_ -> {midpoint(bar), "exc-barlabel-in", "middle"}
end
text(text_x, text_y, label, text_anchor: anchor, class: class, dominant_baseline: "central")
end
defp get_svg_bar_label(_, {bar_start, _} = bar, label, cat_band, _plot) do
text_x = midpoint(cat_band)
{text_y, class} =
case width(bar) > 20 do
true -> {midpoint(bar), "exc-barlabel-in"}
_ -> {bar_start - 10, "exc-barlabel-out"}
end
text(text_x, text_y, label, text_anchor: "middle", class: class)
end
@doc """
Sets the category column name. This must exist in the dataset.
This provides the labels for each bar or group of bars
"""
def set_cat_col_name(
%BarChart{dataset: dataset, padding: padding, mapping: mapping} = plot,
cat_col_name
) do
mapping = Mapping.update(mapping, %{category_col: cat_col_name})
categories = Dataset.unique_values(dataset, cat_col_name)
{r_min, r_max} = get_range(:category, plot)
cat_scale =
OrdinalScale.new(categories)
|> Scale.set_range(r_min, r_max)
|> OrdinalScale.padding(padding)
%{plot | category_scale: cat_scale, mapping: mapping}
end
@doc """
Sets the value column names. Each must exist in the dataset.
This provides the value for each bar.
"""
def set_val_col_names(%BarChart{dataset: dataset, mapping: mapping} = plot, val_col_names)
when is_list(val_col_names) do
mapping = Mapping.update(mapping, %{value_cols: val_col_names})
{min, max} =
get_overall_value_domain(plot, dataset, val_col_names, plot.type)
|> Utils.fixup_value_range()
{r_start, r_end} = get_range(:value, plot)
val_scale =
ContinuousLinearScale.new()
|> ContinuousLinearScale.domain(min, max)
|> Scale.set_range(r_start, r_end)
series_fill_colours =
CategoryColourScale.new(val_col_names)
|> CategoryColourScale.set_palette(plot.colour_palette)
%{plot | value_scale: val_scale, series_fill_colours: series_fill_colours, mapping: mapping}
end
def set_val_col_names(%BarChart{} = plot, _), do: plot
defp get_range(:category, %BarChart{orientation: :horizontal} = plot), do: {plot.height, 0}
defp get_range(:category, plot), do: {0, plot.width}
defp get_range(:value, %BarChart{orientation: :horizontal} = plot), do: {0, plot.width}
defp get_range(:value, plot), do: {plot.height, 0}
defp get_overall_value_domain(%BarChart{value_range: {min, max}}, _, _, _), do: {min, max}
defp get_overall_value_domain(_plot, dataset, col_names, :stacked) do
{_, max} = Dataset.combined_column_extents(dataset, col_names)
{0, max}
end
defp get_overall_value_domain(_plot, dataset, col_names, :grouped) do
combiner = fn {min1, max1}, {min2, max2} ->
{Utils.safe_min(min1, min2), Utils.safe_max(max1, max2)}
end
Enum.reduce(col_names, {nil, nil}, fn col, acc_extents ->
inner_extents = Dataset.column_extents(dataset, col)
combiner.(acc_extents, inner_extents)
end)
end
defp midpoint({a, b}), do: (a + b) / 2.0
defp width({a, b}), do: abs(a - b)
end
|
lib/chart/barchart.ex
| 0.952353 | 0.837354 |
barchart.ex
|
starcoder
|
defprotocol Cat.Effect.Spawn do
@moduledoc """
Spawn defines
* `start(t(a)) :: t(Fiber.t(t, a))`
* `never(t(any)) :: t(none)`
* `cede(t(any)) :: t(no_return)`
* `background(t(a)) :: Resource.t(t, t(MonadCancel.outcome(a)))`
* `race_pair(t(a), t(b)) :: t(race_pair_out(a, b))`
* `race(t(a), t(b)) :: t(Either.t(a, b))`
* `both(t(a), t(b)) :: t({a, b})`
**It must also be `MonadCancel`, `MonadError, `Monad`, `Applicative` and `Functor`.**
Default implementations (at `Spawn.Default`):
* `background(t(a)) :: Resource.t(t, t(MonadCancel.outcome(a)))`
* `race(t(a), t(b)) :: t(Either.t(a, b))`
* `both(t(a), t(b)) :: t({a, b})`
"""
@type t(_x) :: term
@spec start(t(a)) :: t(Fiber.t(t, a)) when a: var
def start(ta)
@spec never(t(any)) :: t(none)
def never(example)
@spec cede(t(any)) :: t(no_return)
def cede(example)
@spec background(t(a)) :: Resource.t(t, t(MonadCancel.outcome(a))) when a: var
def background(ta)
@typep race_pair_out(a, b) :: Either.t(
{MonadCancel.outcome(a), Fiber.t(b)},
{Fiber.t(a), MonadCancel.outcome(b)}
)
@spec race_pair(t(a), t(b)) :: t(race_pair_out(a, b)) when a: var, b: var
def race_pair(ta, tb)
@spec race(t(a), t(b)) :: t(Either.t(a, b)) when a: var, b: var
def race(ta, tb)
@spec both(t(a), t(b)) :: t({a, b}) when a: var, b: var
def both(ta, tb)
end
alias Cat.{Applicative, Either, Monad, MonadError}
alias Cat.Effect.{Fiber, MonadCancel, Resource, Spawn}
defmodule Cat.Effect.Spawn.Default do
@spec background(Spawn.t(a)) :: Resource.t(Spawn.t, Spawn.t(MonadCancel.outcome(a))) when a: var
def background(ta) do
Resource.new acquire: Spawn.start(ta),
release: &Fiber.cancel/1,
map: &Fiber.join/1
end
@spec race(Spawn.t(a), Spawn.t(b)) :: Spawn.t(Either.t(a, b)) when a: var, b: var
def race(ta, tb) do
MonadCancel.uncancelable fn poll ->
Monad.flat_map poll.(Spawn.race_pair(ta, tb)), fn
%Either.Left{v: {{:ok, a}, fb}} ->
Applicative.product_r Fiber.cancel(fb), Functor.map(ta, &Either.left/1)
%Either.Left{v: {{:error, error}, fb}} ->
Applicative.product_r Fiber.cancel(fb), MonadError.raise(ta, error)
%Either.Left{v: {:canceled, fb}} ->
joined = MonadCancel.on_cancel(poll.(Fiber.join(fb)), Fiber.cancel(fb))
Monad.flat_map joined, fn
{:ok, _} -> Functor.map(tb, &Either.right/1)
{:error, error} -> MonadError.raise(tb, error)
:canceled -> Applicative.product_r poll.(MonadCancel.canceled(tb)), Spawn.never(tb)
end
%Either.Right{v: {fa, {:ok, b}}} ->
Applicative.product_r Fiber.cancel(fa), Functor.map(tb, &Either.right/1)
%Either.Right{v: {fa, {:error, error}}} ->
Applicative.product_r Fiber.cancel(fa), MonadError.raise(tb, error)
%Either.Right{v: {fa, :canceled}} ->
joined = MonadCancel.on_cancel(poll.(Fiber.join(fa)), Fiber.cancel(fa))
Monad.flat_map joined, fn
{:ok, _} -> Functor.map(ta, &Either.right/1)
{:error, error} -> MonadError.raise(ta, error)
:canceled -> Applicative.product_r poll.(MonadCancel.canceled(ta)), Spawn.never(ta)
end
end
end
end
@spec both(Spawn.t(a), Spawn.t(b)) :: Spawn.t({a, b}) when a: var, b: var
def both(ta, tb) do
MonadCancel.uncancelable fn poll ->
Monad.flat_map poll.(Spawn.race_pair(ta, tb)), fn
%Either.Left{v: {{:ok, a}, fb}} ->
joined = MonadCancel.on_cancel(poll.(Fiber.join(fb)), Fiber.cancel(fb))
Monad.flat_map joined, fn
{:ok, b} -> Applicative.pure(tb, {a, b})
{:error, error} -> MonadError.raise(tb, error)
:canceled -> Applicative.product_r poll.(MonadCancel.canceled(tb)), Spawn.never(tb)
end
%Either.Left{v: {{:error, error}, fb}} ->
Applicative.product_r Fiber.cancel(fb), MonadError.raise(tb, error)
%Either.Left{v: {:canceled, fb}} ->
Applicative.product_r poll.(MonadCancel.canceled(tb)), Spawn.never(tb)
%Either.Right{v: {fa, {:ok, b}}} ->
joined = MonadCancel.on_cancel(poll.(Fiber.join(fa)), Fiber.cancel(fa))
Monad.flat_map joined, fn
{:ok, a} -> Applicative.pure(ta, {a, b})
{:error, error} -> MonadError.raise(ta, error)
:canceled -> Applicative.product_r poll.(MonadCancel.canceled(ta)), Spawn.never(ta)
end
%Either.Right{v: {fa, {:error, error}}} ->
Applicative.product_r Fiber.cancel(fa), MonadError.raise(ta, error)
%Either.Right{v: {fa, :canceled}} ->
Applicative.product_r poll.(MonadCancel.canceled(ta)), Spawn.never(ta)
end
end
end
end
|
lib/cat/protocols/effect/spawn.ex
| 0.877746 | 0.524577 |
spawn.ex
|
starcoder
|
defmodule ExForce.Client.Tesla do
@moduledoc """
HTTP Client for Salesforce REST API using Tesla.
## Adapter
To use different Tesla adapter, set it via Mix configuration.
```elixir
config :tesla, ExForce.Client.Tesla, adapter: Tesla.Adapter.Hackney
```
"""
@behaviour ExForce.Client
alias ExForce.{
Request,
Response
}
@default_api_version "42.0"
@default_user_agent "ex_force"
@doc """
Returns Tesla client for ExForce functions
Options
- `:headers`: set additional headers; default: `[{"user-agent", "#{@default_user_agent}"}]`
- `:api_version`: use the given api_version; default: `"#{@default_api_version}"`
- `:adapter`: use the given adapter with custom opts; default: `nil`, which causes Tesla to use the default adapter or the one set in config.
"""
@impl ExForce.Client
def build_client(instance_url_or_map, opts \\ [headers: [{"user-agent", @default_user_agent}]])
def build_client(%{instance_url: instance_url, access_token: access_token}, opts) do
with headers <- Keyword.get(opts, :headers, []),
new_headers <- [{"authorization", "Bearer " <> access_token} | headers],
new_opts <- Keyword.put(opts, :headers, new_headers) do
build_client(instance_url, new_opts)
end
end
def build_client(instance_url, opts) when is_binary(instance_url) do
Tesla.client(
[
{ExForce.Client.Tesla.Middleware,
{instance_url, Keyword.get(opts, :api_version, @default_api_version)}},
{Tesla.Middleware.Compression, format: "gzip"},
{Tesla.Middleware.JSON, engine: Jason},
{Tesla.Middleware.Headers, Keyword.get(opts, :headers, [])}
],
Keyword.get(opts, :adapter)
)
end
@doc """
Returns client for ExForce.OAuth functions
### Options
- `:user_agent`
"""
@impl ExForce.Client
def build_oauth_client(url, opts \\ [headers: [{"user-agent", @default_user_agent}]]) do
Tesla.client(
[
{Tesla.Middleware.BaseUrl, url},
{Tesla.Middleware.Compression, format: "gzip"},
Tesla.Middleware.FormUrlencoded,
{Tesla.Middleware.DecodeJson, engine: Jason},
{Tesla.Middleware.Headers, Keyword.get(opts, :headers, [])}
],
Keyword.get(opts, :adapter)
)
end
@doc """
Sends a request to Salesforce
"""
@impl ExForce.Client
def request(%Tesla.Client{} = client, %Request{} = request) do
client
|> Tesla.request(cast_tesla_request(request))
|> cast_response()
end
defp cast_tesla_request(%Request{} = request) do
request
|> convert_struct(Tesla.Env)
|> Map.to_list()
|> Enum.reject(fn {_key, value} -> is_nil(value) end)
end
defp convert_struct(%_struct{} = fields, new_struct),
do: struct(new_struct, Map.from_struct(fields))
defp cast_response({:ok, %Tesla.Env{} = response}),
do: {:ok, convert_struct(response, Response)}
defp cast_response({:error, error}), do: {:error, error}
end
|
lib/ex_force/client/tesla/tesla.ex
| 0.892429 | 0.699806 |
tesla.ex
|
starcoder
|
defmodule JSON.Parser.Array do
@moduledoc """
Implements a JSON Array Parser for Bitstring values
"""
alias JSON.Parser, as: Parser
require Logger
import JSON.Logger
@doc """
parses a valid JSON array value, returns its elixir list representation
## Examples
iex> JSON.Parser.Array.parse ""
{:error, :unexpected_end_of_buffer}
iex> JSON.Parser.Array.parse "[1, 2 "
{:error, :unexpected_end_of_buffer}
iex> JSON.Parser.Array.parse "face0ff"
{:error, {:unexpected_token, "face0ff"}}
iex> JSON.Parser.Array.parse "[] lala"
{:ok, [], " lala"}
iex> JSON.Parser.Array.parse "[]"
{:ok, [], ""}
iex> JSON.Parser.Array.parse "[\\\"foo\\\", 1, 2, 1.5] lala"
{:ok, ["foo", 1, 2, 1.5], " lala"}
"""
def parse(<<?[, rest::binary>>) do
log(:debug, fn ->
"#{__MODULE__}.parse(#{inspect(rest)}) trimming string and the calling parse_array_contents()"
end)
rest |> String.trim() |> parse_array_contents()
end
def parse(<<>>) do
log(:debug, fn -> "#{__MODULE__}.parse(<<>>) unexpected end of buffer." end)
{:error, :unexpected_end_of_buffer}
end
def parse(json) do
log(:debug, fn -> "#{__MODULE__}.parse(<<>>) unexpected token: #{inspect(json)}" end)
{:error, {:unexpected_token, json}}
end
# begin parse array
defp parse_array_contents(json) when is_binary(json) do
log(:debug, fn ->
"#{__MODULE__}.parse_array_contents(#{inspect(json)}) beginning to parse array contents..."
end)
parse_array_contents([], json)
end
# stop condition
defp parse_array_contents(acc, <<?], rest::binary>>) do
log(:debug, fn ->
"#{__MODULE__}.parse_array_contents(#{inspect(acc)}, #{inspect(rest)}) finished parsing array contents."
end)
{:ok, Enum.reverse(acc), rest}
end
# error condition
defp parse_array_contents(_, <<>>) do
log(:debug, fn ->
"#{__MODULE__}.parse_array_contents(acc, <<>>) unexpected end of buffer."
end)
{:error, :unexpected_end_of_buffer}
end
defp parse_array_contents(acc, json) do
json
|> String.trim()
|> Parser.parse()
|> case do
{:error, error_info} ->
log(:debug, fn ->
"#{__MODULE__}.parse_array_contents(#{inspect(acc)}, #{inspect(json)}) generated an error: #{
inspect(error_info)
}"
end)
{:error, error_info}
{:ok, value, after_value} ->
log(:debug, fn ->
"#{__MODULE__}.parse_array_contents(acc, json) sucessfully parsed value `#{
inspect(value)
}`, with
after_value=#{inspect(after_value)}"
end)
after_value
|> String.trim()
|> case do
<<?,, after_comma::binary>> ->
trimmed = String.trim(after_comma)
log(:debug, fn ->
"#{__MODULE__}.parse_array_contents(acc, json) found a comma, continuing parsing of #{
inspect(trimmed)
}"
end)
parse_array_contents([value | acc], trimmed)
rest ->
log(:debug, fn ->
"#{__MODULE__}.parse_array_contents(acc, json) continuing parsing of #{
inspect(rest)
}"
end)
parse_array_contents([value | acc], rest)
end
end
end
end
|
node_modules/@snyk/snyk-hex-plugin/elixirsrc/deps/json/lib/json/parser/array.ex
| 0.831588 | 0.434761 |
array.ex
|
starcoder
|
defmodule Wobserver.Table do
@moduledoc ~S"""
Table (ets) information and listing.
"""
import Wobserver.Util.Helper, only: [string_to_module: 1]
@doc ~S"""
Lists all tables with basic information.
Note: data is not included.
"""
@spec list :: list(map)
def list do
:ets.all()
|> Enum.map(&info/1)
end
@doc """
Creates an overview of table information based on the given `table` atom or number.
If `include_data` is set to `true`, it will also contain the table data.
"""
@spec info(table :: atom | integer, include_data :: boolean) :: map
def info(table, include_data \\ false)
def info(table, false) do
%{
id: table,
name: :ets.info(table, :name),
type: :ets.info(table, :type),
size: :ets.info(table, :size),
memory: :ets.info(table, :memory),
owner: :ets.info(table, :owner),
protection: :ets.info(table, :protection),
meta: %{
read_concurrency: :ets.info(table, :read_concurrency),
write_concurrency: :ets.info(table, :write_concurrency),
compressed: :ets.info(table, :compressed)
}
}
end
def info(table, true) do
table
|> info(false)
|> Map.put(:data, data(table))
end
@doc ~S"""
Sanitizes a `table` name and returns either the table id or name.
Example:
```bash
iex> Wobserver.Table.sanitize :code
:code
```
```bash
iex> Wobserver.Table.sanitize 1
1
```
```bash
iex> Wobserver.Table.sanitize "code"
:code
```
```bash
iex> Wobserver.Table.sanitize "1"
1
```
"""
@spec sanitize(table :: atom | integer | String.t()) :: atom | integer
def sanitize(table) when is_atom(table), do: table
def sanitize(table) when is_integer(table), do: table
def sanitize(table) when is_binary(table) do
case Integer.parse(table) do
{nr, ""} -> nr
_ -> table |> string_to_module()
end
end
# Helpers
defp data(table) do
case :ets.info(table, :protection) do
:private ->
[]
_ ->
table
|> :ets.match(:"$1")
|> Enum.map(&data_row/1)
end
end
defp data_row([row]) do
row
|> Tuple.to_list()
|> Enum.map(&to_string(:io_lib.format("~tp", [&1])))
end
end
|
lib/wobserver/util/table.ex
| 0.830525 | 0.843251 |
table.ex
|
starcoder
|
defmodule Ecto.Model.Validations do
@moduledoc ~S"""
Conveniences for defining module-level validations in models.
This module provides two macros `validate` and `validatep` that
wrap around `Ecto.Validator`. Let's see an example:
defmodule User do
use Ecto.Model
queryable "users" do
field :name, :string
field :age, :string
field :filename, :string
field :format, :string
end
validate user,
name: present(),
age: present(message: "must be present"),
age: greater_than(18),
also: validate_attachments
validatep validate_attachments(user),
filename: has_format(~r/\w+/),
format: member_of(~w(jpg gif png))
end
By calling `validate user`, a `validate(user)` function is defined
that validates each attribute according to the given predicates.
A special attribute called `:also` is supported, useful to wire
different validations together.
The validations can be executed by calling the `validate` function:
User.validate(User.new)
#=> [name: "can't be blank", age: "must be present"]
This function returns a list with the validation errors, with the
attribute as key and the error message as value. You can match on
an empty list to know if there were validation errors or not:
case User.validate(user) do
[] -> # no errors
errors -> # got errors
end
`validatep` works the same as `validate` but defines a private
function. Note both macros can pass a function name as first
argument which is the function to be defined. For `validatep`, we
defined a `validate_attachments` function. All validation functions
must receive the current entity as argument. We can call the
`validate_attachments/1` locally as:
validate_attachments(user)
## Predicates
Validations are executed via a series of predicates:
validate user,
name: present(),
age: present(message: "must be present"),
age: greater_than(18),
also: validate_attachments
Each predicate above is going to receive the attribute being validated
and its current value as argument. For example, the `present` predicate
above is going to be called as:
present(:name, user.name)
present(:age, user.age, message: "must be present")
Note that predicates can be chained together with `and`. The following
is equivalent to the example above:
validate user,
name: present(),
age: present(message: "must be present") and greater_than(18),
also: validate_attachments
The predicate given to `:also` is special as it simply receives the
current record as argument. In this example, `validate_attachments`
will be invoked as:
validate_attachments(user)
Which matches the API of the private `validate_attachments(user)`
function we have defined below. Note all predicates must return a
keyword list, with the attribute error as key and the validation
message as value.
## Custom predicates
By using `Ecto.Model.Validations`, all predicates defined at
`Ecto.Validator.Predicates` are automatically imported into your
model.
However, defining custom predicates is easy. As we have seen in
the previous section, a custom predicate is simply a function that
receives a particular set of arguments. For example, imagine we want
to change the predicates below:
validatep validate_attachments(user),
filename: has_format(~r/\w+/),
format: member_of(~w(jpg gif png))
To a custom predicate for image attachments:
validatep validate_attachments(user),
filename: image_attachment()
It could be implemented as:
def image_attachments(attr, value, opts \\ []) do
if Path.extname(value) in ~w(jpg gif png) do
[]
else
[{ attr, opts[:message] || "is not an image attachment" }]
end
end
Note that predicates can also be called over remote functions as
long as it complies with the predicate API:
validatep validate_attachments(user),
filename: Image.valid_attachment
## Function scope
Note that calling `validate` and `validatep` starts a new function,
with its own scope. That said, the following is invalid:
values = ~w(jpg gif png)
validatep validate_attachments(user),
filename: has_format(~r/\w+/),
format: member_of(values)
You can use module attributes instead:
@values ~w(jpg gif png)
validatep validate_attachments(user),
filename: has_format(~r/\w+/),
format: member_of(@values)
On the plus side, it means you can also call other functions from
the validator:
validatep validate_attachments(user),
filename: has_format(~r/\w+/),
format: member_of(valid_formats)
defp valid_formats(), do: ~w(jpg gif png)
or even receive arguments:
validatep validate_attachments(user, valid_formats \\ ~w(jpg gif png)),
filename: has_format(~r/\w+/),
format: member_of(valid_formats)
or:
validatep validate_attachments(user, validate_format),
filename: has_format(~r/\w+/),
format: member_of(~w(jpg gif png)) when validate_format
"""
@doc false
defmacro __using__(_) do
quote do
require Ecto.Validator
import Ecto.Validator.Predicates
import Ecto.Model.Validations
end
end
@doc """
Defines a public function that runs the given validations.
"""
defmacro validate(function, keywords) do
do_validate(:def, function, keywords, Module.get_attribute(__CALLER__.module, :ecto_entity))
end
@doc """
Defines a private function that runs the given validations.
"""
defmacro validatep(function, keywords) do
do_validate(:defp, function, keywords, Module.get_attribute(__CALLER__.module, :ecto_entity))
end
defp do_validate(kind, { _, _, context } = var, keywords, entity) when is_atom(context) do
do_validate(kind, { :validate, [], [var] }, keywords, entity)
end
defp do_validate(_kind, { _, _, [] }, _keywords, _entity) do
raise ArgumentError, message: "validate and validatep expects a function with at least one argument"
end
defp do_validate(kind, { _, _, [h|_] } = signature, keywords, entity) do
do_validate_var(h)
quote do
unquote(do_validate_opt(kind, signature, keywords, entity))
Kernel.unquote(kind)(unquote(signature)) do
Ecto.Validator.record unquote(h), unquote(keywords)
end
end
end
defp do_validate_opt(_kind, _signature, _keywords, nil) do
nil
end
defp do_validate_opt(kind, { fun, meta, [h|t] }, keywords, entity) do
signature = { fun, meta, [quote(do: unquote(h) = unquote(entity)[])|t] }
quote do
Kernel.unquote(kind)(unquote(signature)) do
Ecto.Validator.record unquote(h), unquote(keywords)
end
end
end
defp do_validate_var({ _, _, context }) when is_atom(context), do: :ok
defp do_validate_var(expr) do
raise ArgumentError, message: "validate and validatep expects a function with a var " <>
"as first argument, got: #{Macro.to_string(expr)}"
end
end
|
lib/ecto/model/validations.ex
| 0.893501 | 0.659624 |
validations.ex
|
starcoder
|
defmodule EctoSchemaStore.BuildQueries do
@moduledoc false
defmacro build_ecto_query(query, :is_nil, key) do
quote do
from q in unquote(query),
where: is_nil(field(q, ^unquote(key)))
end
end
defmacro build_ecto_query(query, :not_nil, key) do
quote do
from q in unquote(query),
where: not is_nil(field(q, ^unquote(key)))
end
end
defmacro build_ecto_query(query, :like, key, value) do
quote do
from q in unquote(query),
where: like(field(q, ^unquote(key)), unquote(value))
end
end
defmacro build_ecto_query(query, :ilike, key, value) do
quote do
from q in unquote(query),
where: ilike(field(q, ^unquote(key)), unquote(value))
end
end
defmacro build_ecto_query(query, :eq, key, value) do
quote do
from q in unquote(query),
where: field(q, ^unquote(key)) == unquote(value)
end
end
defmacro build_ecto_query(query, :not, key, value) do
quote do
from q in unquote(query),
where: field(q, ^unquote(key)) != unquote(value)
end
end
defmacro build_ecto_query(query, :lt, key, value) do
quote do
from q in unquote(query),
where: field(q, ^unquote(key)) < unquote(value)
end
end
defmacro build_ecto_query(query, :lte, key, value) do
quote do
from q in unquote(query),
where: field(q, ^unquote(key)) <= unquote(value)
end
end
defmacro build_ecto_query(query, :gt, key, value) do
quote do
from q in unquote(query),
where: field(q, ^unquote(key)) > unquote(value)
end
end
defmacro build_ecto_query(query, :gte, key, value) do
quote do
from q in unquote(query),
where: field(q, ^unquote(key)) >= unquote(value)
end
end
defmacro build_ecto_query(query, :in, key, value) do
quote do
from q in unquote(query),
where: field(q, ^unquote(key)) in unquote(value)
end
end
defmacro build(schema) do
keys = EctoSchemaStore.Utils.keys(Macro.expand(schema, __CALLER__), false)
assocs = EctoSchemaStore.Utils.keys(Macro.expand(schema, __CALLER__), true)
quote do
defp build_keyword_query(query, field_name, {:like, value}) do
{:ok, EctoSchemaStore.BuildQueries.build_ecto_query(query, :like, field_name, ^value)}
end
defp build_keyword_query(query, field_name, {:ilike, value}) do
{:ok, EctoSchemaStore.BuildQueries.build_ecto_query(query, :ilike, field_name, ^value)}
end
defp build_keyword_query(query, field_name, {:in, value}) do
{:ok, EctoSchemaStore.BuildQueries.build_ecto_query(query, :in, field_name, ^value)}
end
defp build_keyword_query(query, field_name, {:>=, value}) do
{:ok, EctoSchemaStore.BuildQueries.build_ecto_query(query, :gte, field_name, ^value)}
end
defp build_keyword_query(query, field_name, {:>, value}) do
{:ok, EctoSchemaStore.BuildQueries.build_ecto_query(query, :gt, field_name, ^value)}
end
defp build_keyword_query(query, field_name, {:<=, value}) do
{:ok, EctoSchemaStore.BuildQueries.build_ecto_query(query, :lte, field_name, ^value)}
end
defp build_keyword_query(query, field_name, {:<, value}) do
{:ok, EctoSchemaStore.BuildQueries.build_ecto_query(query, :lt, field_name, ^value)}
end
defp build_keyword_query(query, field_name, {:!=, nil}) do
{:ok, EctoSchemaStore.BuildQueries.build_ecto_query(query, :not_nil, field_name)}
end
defp build_keyword_query(query, field_name, {:==, nil}) do
{:ok, EctoSchemaStore.BuildQueries.build_ecto_query(query, :is_nil, field_name)}
end
defp build_keyword_query(query, field_name, {:!=, value}) do
{:ok, EctoSchemaStore.BuildQueries.build_ecto_query(query, :not, field_name, ^value)}
end
defp build_keyword_query(query, field_name, {:==, value}) do
{:ok, EctoSchemaStore.BuildQueries.build_ecto_query(query, :eq, field_name, ^value)}
end
defp build_keyword_query(query, field_name, nil) do
{:ok, EctoSchemaStore.BuildQueries.build_ecto_query(query, :is_nil, field_name)}
end
defp build_keyword_query(query, field_name, value) do
{:ok, EctoSchemaStore.BuildQueries.build_ecto_query(query, :eq, field_name, ^value)}
end
def schema_fields, do: unquote(keys)
def schema_associations, do: unquote(assocs)
defp build_query(%Ecto.Query{} = query, []), do: {:ok, query}
# Schema name only, convert into query to avoid errors.
defp build_query(query, []), do: {:ok, from(q in query)}
defp build_query(query, [{key, value} | t]) do
case build_keyword_query(query, key, value) do
{:ok, query} -> build_query(query, t)
end
end
defp build_query(query, %{} = filters) do
build_query(query, Enum.into(filters, []))
end
@doc """
Build an `Ecto.Query` from the provided fields and values map. A keyword list builds
a query in the order of the provided keys. Maps do not guarantee an order.
Available fields: `#{inspect(unquote(keys))}`
"""
def build_query(filters \\ [])
def build_query(filters) do
build_query(unquote(schema), alias_filters(filters))
end
@doc """
Build an `Ecto.Query` from the provided fields and values map. Returns the values or throws an error.
Available fields: `#{inspect(unquote(keys))}`
"""
def build_query!(filters \\ [])
def build_query!(filters) do
case build_query(filters) do
{:ok, query} -> query
end
end
end
end
end
|
lib/ecto_schema_store/build_queries.ex
| 0.639624 | 0.609844 |
build_queries.ex
|
starcoder
|
defmodule Crawly.Pipelines.WriteToFile do
@moduledoc """
Stores a given item into Filesystem
Pipeline Lifecycle:
1. When run (by `Crawly.Utils.pipe`), creates a file descriptor if not already created.
2. Performs the write operation
3. File descriptor is reused by passing it through the pipeline state with `:write_to_file_fd`
> Note: `File.close` is not necessary due to the file descriptor being automatically closed upon the end of a the parent process.
>
> Refer to https://github.com/oltarasenko/crawly/pull/19#discussion_r350599526 for relevant discussion.
### Options
In the absence of tuple-based options being passed, the pipeline will fallback onto the config of `:crawly`, `Crawly.Pipelines.WriteToFile`, for the `:folder` and `:extension` keys
- `:folder`, optional. The folder in which the file will be created. Defaults to current project's folder.
If provided folder does not exist it's created.
- `:extension`, optional. The file extension in which the file will be created with. Defaults to `jl`.
- `:include_timestamp`, boolean, optional, true by default. Allows to add timestamp to the filename.
### Example Declaration
```
pipelines: [
Crawly.Pipelines.JSONEncoder,
{Crawly.Pipelines.WriteToFile, folder: "/tmp", extension: "csv"}
]
```
### Example Output
```
iex> item = %{my: "item"}
iex> WriteToFile.run(item, %{}, folder: "/tmp", extension: "csv")
{ %{my: "item"} , %{write_to_file_fd: #PID<0.123.0>} }
```
"""
@behaviour Crawly.Pipeline
require Logger
@impl Crawly.Pipeline
@spec run(
item :: any,
state :: %{
optional(:write_to_file_fd) => pid | {:file_descriptor, atom, any}
},
opts :: [
folder: String.t(),
extension: String.t(),
include_timestamp: boolean()
]
) ::
{item :: any,
state :: %{write_to_file_fd: pid | {:file_descriptor, atom, any}}}
def run(item, state, opts \\ [])
def run(item, %{write_to_file_fd: fd} = state, _opts) do
:ok = write(fd, item)
{item, state}
end
# No active FD
def run(item, state, opts) do
opts =
Enum.into(opts, %{folder: nil, extension: nil, include_timestamp: true})
folder = Map.get(opts, :folder, "./")
:ok = maybe_create_folder(folder)
extension = Map.get(opts, :extension, "jl")
filename =
case Map.get(opts, :include_timestamp, false) do
false ->
"#{inspect(state.spider_name)}.#{extension}"
true ->
ts_string =
NaiveDateTime.utc_now()
|> NaiveDateTime.to_string()
|> String.replace(~r/( |-|:|\.)/, "_")
"#{inspect(state.spider_name)}_#{ts_string}.#{extension}"
end
fd = open_fd(folder, filename)
:ok = write(fd, item)
{item, Map.put(state, :write_to_file_fd, fd)}
end
defp open_fd(folder, filename) do
# Open file descriptor to write items
{:ok, io_device} =
File.open(
Path.join([folder, filename]),
[:binary, :write, :delayed_write, :utf8]
)
io_device
end
# Performs the write operation
@spec write(io, item) :: :ok
when io: File.io_device(),
item: any()
defp write(io, item) do
try do
IO.write(io, item)
IO.write(io, "\n")
catch
error, reason ->
Logger.error(
"Could not write item: #{inspect(item)} to io: #{inspect(io)}\n#{
Exception.format(error, reason, __STACKTRACE__)
}"
)
end
end
# Creates a folder if it does not exist
defp maybe_create_folder(path) do
case File.exists?(path) do
false ->
File.mkdir_p(path)
true ->
:ok
end
end
end
|
lib/crawly/pipelines/write_to_file.ex
| 0.745861 | 0.792464 |
write_to_file.ex
|
starcoder
|
defmodule AWS.Rekognition do
@moduledoc """
This is the Amazon Rekognition API reference.
"""
@doc """
Compares a face in the *source* input image with each of the 100 largest
faces detected in the *target* input image.
<note> If the source image contains multiple faces, the service detects the
largest face and compares it with each face detected in the target image.
</note> You pass the input and target images either as base64-encoded image
bytes or as a references to images in an Amazon S3 bucket. If you use the
Amazon CLI to call Amazon Rekognition operations, passing image bytes is
not supported. The image must be either a PNG or JPEG formatted file.
In response, the operation returns an array of face matches ordered by
similarity score in descending order. For each face match, the response
provides a bounding box of the face, facial landmarks, pose details (pitch,
role, and yaw), quality (brightness and sharpness), and confidence value
(indicating the level of confidence that the bounding box contains a face).
The response also provides a similarity score, which indicates how closely
the faces match.
<note> By default, only faces with a similarity score of greater than or
equal to 80% are returned in the response. You can change this value by
specifying the `SimilarityThreshold` parameter.
</note> `CompareFaces` also returns an array of faces that don't match the
source image. For each face, it returns a bounding box, confidence value,
landmarks, pose details, and quality. The response also returns information
about the face in the source image, including the bounding box of the face
and confidence value.
If the image doesn't contain Exif metadata, `CompareFaces` returns
orientation information for the source and target images. Use these values
to display the images with the correct image orientation.
If no faces are detected in the source or target images, `CompareFaces`
returns an `InvalidParameterException` error.
<note> This is a stateless API operation. That is, data returned by this
operation doesn't persist.
</note> For an example, see `faces-compare-images`.
This operation requires permissions to perform the
`rekognition:CompareFaces` action.
"""
def compare_faces(client, input, options \\ []) do
request(client, "CompareFaces", input, options)
end
@doc """
Creates a collection in an AWS Region. You can add faces to the collection
using the operation.
For example, you might create collections, one for each of your application
users. A user can then index faces using the `IndexFaces` operation and
persist results in a specific collection. Then, a user can search the
collection for faces in the user-specific container.
<note> Collection names are case-sensitive.
</note> This operation requires permissions to perform the
`rekognition:CreateCollection` action.
"""
def create_collection(client, input, options \\ []) do
request(client, "CreateCollection", input, options)
end
@doc """
Creates an Amazon Rekognition stream processor that you can use to detect
and recognize faces in a streaming video.
Rekognition Video is a consumer of live video from Amazon Kinesis Video
Streams. Rekognition Video sends analysis results to Amazon Kinesis Data
Streams.
You provide as input a Kinesis video stream (`Input`) and a Kinesis data
stream (`Output`) stream. You also specify the face recognition criteria in
`Settings`. For example, the collection containing faces that you want to
recognize. Use `Name` to assign an identifier for the stream processor. You
use `Name` to manage the stream processor. For example, you can start
processing the source video by calling with the `Name` field.
After you have finished analyzing a streaming video, use to stop
processing. You can delete the stream processor by calling .
"""
def create_stream_processor(client, input, options \\ []) do
request(client, "CreateStreamProcessor", input, options)
end
@doc """
Deletes the specified collection. Note that this operation removes all
faces in the collection. For an example, see `delete-collection-procedure`.
This operation requires permissions to perform the
`rekognition:DeleteCollection` action.
"""
def delete_collection(client, input, options \\ []) do
request(client, "DeleteCollection", input, options)
end
@doc """
Deletes faces from a collection. You specify a collection ID and an array
of face IDs to remove from the collection.
This operation requires permissions to perform the
`rekognition:DeleteFaces` action.
"""
def delete_faces(client, input, options \\ []) do
request(client, "DeleteFaces", input, options)
end
@doc """
Deletes the stream processor identified by `Name`. You assign the value for
`Name` when you create the stream processor with . You might not be able to
use the same name for a stream processor for a few seconds after calling
`DeleteStreamProcessor`.
"""
def delete_stream_processor(client, input, options \\ []) do
request(client, "DeleteStreamProcessor", input, options)
end
@doc """
Provides information about a stream processor created by . You can get
information about the input and output streams, the input parameters for
the face recognition being performed, and the current status of the stream
processor.
"""
def describe_stream_processor(client, input, options \\ []) do
request(client, "DescribeStreamProcessor", input, options)
end
@doc """
Detects faces within an image that is provided as input.
`DetectFaces` detects the 100 largest faces in the image. For each face
detected, the operation returns face details including a bounding box of
the face, a confidence value (that the bounding box contains a face), and a
fixed set of attributes such as facial landmarks (for example, coordinates
of eye and mouth), gender, presence of beard, sunglasses, etc.
The face-detection algorithm is most effective on frontal faces. For
non-frontal or obscured faces, the algorithm may not detect the faces or
might detect faces with lower confidence.
You pass the input image either as base64-encoded image bytes or as a
reference to an image in an Amazon S3 bucket. If you use the Amazon CLI to
call Amazon Rekognition operations, passing image bytes is not supported.
The image must be either a PNG or JPEG formatted file.
<note> This is a stateless API operation. That is, the operation does not
persist any data.
</note> For an example, see `procedure-detecting-faces-in-images`.
This operation requires permissions to perform the
`rekognition:DetectFaces` action.
"""
def detect_faces(client, input, options \\ []) do
request(client, "DetectFaces", input, options)
end
@doc """
Detects instances of real-world entities within an image (JPEG or PNG)
provided as input. This includes objects like flower, tree, and table;
events like wedding, graduation, and birthday party; and concepts like
landscape, evening, and nature. For an example, see `images-s3`.
<note> `DetectLabels` does not support the detection of activities.
However, activity detection is supported for label detection in videos. For
more information, see .
</note> You pass the input image as base64-encoded image bytes or as a
reference to an image in an Amazon S3 bucket. If you use the Amazon CLI to
call Amazon Rekognition operations, passing image bytes is not supported.
The image must be either a PNG or JPEG formatted file.
For each object, scene, and concept the API returns one or more labels.
Each label provides the object name, and the level of confidence that the
image contains the object. For example, suppose the input image has a
lighthouse, the sea, and a rock. The response will include all three
labels, one for each object.
`{Name: lighthouse, Confidence: 98.4629}`
`{Name: rock,Confidence: 79.2097}`
` {Name: sea,Confidence: 75.061}`
In the preceding example, the operation returns one label for each of the
three objects. The operation can also return multiple labels for the same
object in the image. For example, if the input image shows a flower (for
example, a tulip), the operation might return the following three labels.
`{Name: flower,Confidence: 99.0562}`
`{Name: plant,Confidence: 99.0562}`
`{Name: tulip,Confidence: 99.0562}`
In this example, the detection algorithm more precisely identifies the
flower as a tulip.
In response, the API returns an array of labels. In addition, the response
also includes the orientation correction. Optionally, you can specify
`MinConfidence` to control the confidence threshold for the labels
returned. The default is 50%. You can also add the `MaxLabels` parameter to
limit the number of labels returned.
<note> If the object detected is a person, the operation doesn't provide
the same facial details that the `DetectFaces` operation provides.
</note> This is a stateless API operation. That is, the operation does not
persist any data.
This operation requires permissions to perform the
`rekognition:DetectLabels` action.
"""
def detect_labels(client, input, options \\ []) do
request(client, "DetectLabels", input, options)
end
@doc """
Detects explicit or suggestive adult content in a specified JPEG or PNG
format image. Use `DetectModerationLabels` to moderate images depending on
your requirements. For example, you might want to filter images that
contain nudity, but not images containing suggestive content.
To filter images, use the labels returned by `DetectModerationLabels` to
determine which types of content are appropriate. For information about
moderation labels, see `moderation`.
You pass the input image either as base64-encoded image bytes or as a
reference to an image in an Amazon S3 bucket. If you use the Amazon CLI to
call Amazon Rekognition operations, passing image bytes is not supported.
The image must be either a PNG or JPEG formatted file.
"""
def detect_moderation_labels(client, input, options \\ []) do
request(client, "DetectModerationLabels", input, options)
end
@doc """
Detects text in the input image and converts it into machine-readable text.
Pass the input image as base64-encoded image bytes or as a reference to an
image in an Amazon S3 bucket. If you use the AWS CLI to call Amazon
Rekognition operations, you must pass it as a reference to an image in an
Amazon S3 bucket. For the AWS CLI, passing image bytes is not supported.
The image must be either a .png or .jpeg formatted file.
The `DetectText` operation returns text in an array of elements,
`TextDetections`. Each `TextDetection` element provides information about a
single word or line of text that was detected in the image.
A word is one or more ISO basic latin script characters that are not
separated by spaces. `DetectText` can detect up to 50 words in an image.
A line is a string of equally spaced words. A line isn't necessarily a
complete sentence. For example, a driver's license number is detected as a
line. A line ends when there is no aligned text after it. Also, a line ends
when there is a large gap between words, relative to the length of the
words. This means, depending on the gap between words, Amazon Rekognition
may detect multiple lines in text aligned in the same direction. Periods
don't represent the end of a line. If a sentence spans multiple lines, the
`DetectText` operation returns multiple lines.
To determine whether a `TextDetection` element is a line of text or a word,
use the `TextDetection` object `Type` field.
To be detected, text must be within +/- 30 degrees orientation of the
horizontal axis.
For more information, see `text-detection`.
"""
def detect_text(client, input, options \\ []) do
request(client, "DetectText", input, options)
end
@doc """
Gets the name and additional information about a celebrity based on his or
her Rekognition ID. The additional information is returned as an array of
URLs. If there is no additional information about the celebrity, this list
is empty. For more information, see `get-celebrity-info-procedure`.
This operation requires permissions to perform the
`rekognition:GetCelebrityInfo` action.
"""
def get_celebrity_info(client, input, options \\ []) do
request(client, "GetCelebrityInfo", input, options)
end
@doc """
Gets the celebrity recognition results for a Rekognition Video analysis
started by .
Celebrity recognition in a video is an asynchronous operation. Analysis is
started by a call to which returns a job identifier (`JobId`). When the
celebrity recognition operation finishes, Rekognition Video publishes a
completion status to the Amazon Simple Notification Service topic
registered in the initial call to `StartCelebrityRecognition`. To get the
results of the celebrity recognition analysis, first check that the status
value published to the Amazon SNS topic is `SUCCEEDED`. If so, call
`GetCelebrityDetection` and pass the job identifier (`JobId`) from the
initial call to `StartCelebrityDetection`. For more information, see
`video`.
`GetCelebrityRecognition` returns detected celebrities and the time(s) they
are detected in an array (`Celebrities`) of objects. Each
`CelebrityRecognition` contains information about the celebrity in a object
and the time, `Timestamp`, the celebrity was detected.
By default, the `Celebrities` array is sorted by time (milliseconds from
the start of the video). You can also sort the array by celebrity by
specifying the value `ID` in the `SortBy` input parameter.
The `CelebrityDetail` object includes the celebrity identifer and
additional information urls. If you don't store the additional information
urls, you can get them later by calling with the celebrity identifer.
No information is returned for faces not recognized as celebrities.
Use MaxResults parameter to limit the number of labels returned. If there
are more results than specified in `MaxResults`, the value of `NextToken`
in the operation response contains a pagination token for getting the next
set of results. To get the next page of results, call
`GetCelebrityDetection` and populate the `NextToken` request parameter with
the token value returned from the previous call to
`GetCelebrityRecognition`.
"""
def get_celebrity_recognition(client, input, options \\ []) do
request(client, "GetCelebrityRecognition", input, options)
end
@doc """
Gets the content moderation analysis results for a Rekognition Video
analysis started by .
Content moderation analysis of a video is an asynchronous operation. You
start analysis by calling . which returns a job identifier (`JobId`). When
analysis finishes, Rekognition Video publishes a completion status to the
Amazon Simple Notification Service topic registered in the initial call to
`StartContentModeration`. To get the results of the content moderation
analysis, first check that the status value published to the Amazon SNS
topic is `SUCCEEDED`. If so, call `GetCelebrityDetection` and pass the job
identifier (`JobId`) from the initial call to `StartCelebrityDetection`.
For more information, see `video`.
`GetContentModeration` returns detected content moderation labels, and the
time they are detected, in an array, `ModerationLabels`, of objects.
By default, the moderated labels are returned sorted by time, in
milliseconds from the start of the video. You can also sort them by
moderated label by specifying `NAME` for the `SortBy` input parameter.
Since video analysis can return a large number of results, use the
`MaxResults` parameter to limit the number of labels returned in a single
call to `GetContentModeration`. If there are more results than specified in
`MaxResults`, the value of `NextToken` in the operation response contains a
pagination token for getting the next set of results. To get the next page
of results, call `GetContentModeration` and populate the `NextToken`
request parameter with the value of `NextToken` returned from the previous
call to `GetContentModeration`.
For more information, see `moderation`.
"""
def get_content_moderation(client, input, options \\ []) do
request(client, "GetContentModeration", input, options)
end
@doc """
Gets face detection results for a Rekognition Video analysis started by .
Face detection with Rekognition Video is an asynchronous operation. You
start face detection by calling which returns a job identifier (`JobId`).
When the face detection operation finishes, Rekognition Video publishes a
completion status to the Amazon Simple Notification Service topic
registered in the initial call to `StartFaceDetection`. To get the results
of the face detection operation, first check that the status value
published to the Amazon SNS topic is `SUCCEEDED`. If so, call and pass the
job identifier (`JobId`) from the initial call to `StartFaceDetection`.
`GetFaceDetection` returns an array of detected faces (`Faces`) sorted by
the time the faces were detected.
Use MaxResults parameter to limit the number of labels returned. If there
are more results than specified in `MaxResults`, the value of `NextToken`
in the operation response contains a pagination token for getting the next
set of results. To get the next page of results, call `GetFaceDetection`
and populate the `NextToken` request parameter with the token value
returned from the previous call to `GetFaceDetection`.
"""
def get_face_detection(client, input, options \\ []) do
request(client, "GetFaceDetection", input, options)
end
@doc """
Gets the face search results for Rekognition Video face search started by .
The search returns faces in a collection that match the faces of persons
detected in a video. It also includes the time(s) that faces are matched in
the video.
Face search in a video is an asynchronous operation. You start face search
by calling to which returns a job identifier (`JobId`). When the search
operation finishes, Rekognition Video publishes a completion status to the
Amazon Simple Notification Service topic registered in the initial call to
`StartFaceSearch`. To get the search results, first check that the status
value published to the Amazon SNS topic is `SUCCEEDED`. If so, call
`GetFaceSearch` and pass the job identifier (`JobId`) from the initial call
to `StartFaceSearch`. For more information, see `collections`.
The search results are retured in an array, `Persons`, of objects.
Each`PersonMatch` element contains details about the matching faces in the
input collection, person information for the matched person, and the time
the person was matched in the video.
By default, the `Persons` array is sorted by the time, in milliseconds from
the start of the video, persons are matched. You can also sort by persons
by specifying `INDEX` for the `SORTBY` input parameter.
"""
def get_face_search(client, input, options \\ []) do
request(client, "GetFaceSearch", input, options)
end
@doc """
Gets the label detection results of a Rekognition Video analysis started by
.
The label detection operation is started by a call to which returns a job
identifier (`JobId`). When the label detection operation finishes, Amazon
Rekognition publishes a completion status to the Amazon Simple Notification
Service topic registered in the initial call to `StartlabelDetection`. To
get the results of the label detection operation, first check that the
status value published to the Amazon SNS topic is `SUCCEEDED`. If so, call
and pass the job identifier (`JobId`) from the initial call to
`StartLabelDetection`.
`GetLabelDetection` returns an array of detected labels (`Labels`) sorted
by the time the labels were detected. You can also sort by the label name
by specifying `NAME` for the `SortBy` input parameter.
The labels returned include the label name, the percentage confidence in
the accuracy of the detected label, and the time the label was detected in
the video.
Use MaxResults parameter to limit the number of labels returned. If there
are more results than specified in `MaxResults`, the value of `NextToken`
in the operation response contains a pagination token for getting the next
set of results. To get the next page of results, call `GetlabelDetection`
and populate the `NextToken` request parameter with the token value
returned from the previous call to `GetLabelDetection`.
"""
def get_label_detection(client, input, options \\ []) do
request(client, "GetLabelDetection", input, options)
end
@doc """
Gets the person tracking results of a Rekognition Video analysis started by
.
The person detection operation is started by a call to
`StartPersonTracking` which returns a job identifier (`JobId`). When the
person detection operation finishes, Rekognition Video publishes a
completion status to the Amazon Simple Notification Service topic
registered in the initial call to `StartPersonTracking`.
To get the results of the person tracking operation, first check that the
status value published to the Amazon SNS topic is `SUCCEEDED`. If so, call
and pass the job identifier (`JobId`) from the initial call to
`StartPersonTracking`.
`GetPersonTracking` returns an array, `Persons`, of tracked persons and the
time(s) they were tracked in the video.
By default, the array is sorted by the time(s) a person is tracked in the
video. You can sort by tracked persons by specifying `INDEX` for the
`SortBy` input parameter.
Use the `MaxResults` parameter to limit the number of items returned. If
there are more results than specified in `MaxResults`, the value of
`NextToken` in the operation response contains a pagination token for
getting the next set of results. To get the next page of results, call
`GetPersonTracking` and populate the `NextToken` request parameter with the
token value returned from the previous call to `GetPersonTracking`.
"""
def get_person_tracking(client, input, options \\ []) do
request(client, "GetPersonTracking", input, options)
end
@doc """
Detects faces in the input image and adds them to the specified collection.
Amazon Rekognition does not save the actual faces detected. Instead, the
underlying detection algorithm first detects the faces in the input image,
and for each face extracts facial features into a feature vector, and
stores it in the back-end database. Amazon Rekognition uses feature vectors
when performing face match and search operations using the and operations.
If you are using version 1.0 of the face detection model, `IndexFaces`
indexes the 15 largest faces in the input image. Later versions of the face
detection model index the 100 largest faces in the input image. To
determine which version of the model you are using, check the the value of
`FaceModelVersion` in the response from `IndexFaces`. For more information,
see `face-detection-model`.
If you provide the optional `ExternalImageID` for the input image you
provided, Amazon Rekognition associates this ID with all faces that it
detects. When you call the operation, the response returns the external ID.
You can use this external image ID to create a client-side index to
associate the faces with each image. You can then use the index to find all
faces in an image.
In response, the operation returns an array of metadata for all detected
faces. This includes, the bounding box of the detected face, confidence
value (indicating the bounding box contains a face), a face ID assigned by
the service for each face that is detected and stored, and an image ID
assigned by the service for the input image. If you request all facial
attributes (using the `detectionAttributes` parameter, Amazon Rekognition
returns detailed facial attributes such as facial landmarks (for example,
location of eye and mount) and other facial attributes such gender. If you
provide the same image, specify the same collection, and use the same
external ID in the `IndexFaces` operation, Amazon Rekognition doesn't save
duplicate face metadata.
The input image is passed either as base64-encoded image bytes or as a
reference to an image in an Amazon S3 bucket. If you use the Amazon CLI to
call Amazon Rekognition operations, passing image bytes is not supported.
The image must be either a PNG or JPEG formatted file.
This operation requires permissions to perform the `rekognition:IndexFaces`
action.
"""
def index_faces(client, input, options \\ []) do
request(client, "IndexFaces", input, options)
end
@doc """
Returns list of collection IDs in your account. If the result is truncated,
the response also provides a `NextToken` that you can use in the subsequent
request to fetch the next set of collection IDs.
For an example, see `list-collection-procedure`.
This operation requires permissions to perform the
`rekognition:ListCollections` action.
"""
def list_collections(client, input, options \\ []) do
request(client, "ListCollections", input, options)
end
@doc """
Returns metadata for faces in the specified collection. This metadata
includes information such as the bounding box coordinates, the confidence
(that the bounding box contains a face), and face ID. For an example, see
`list-faces-in-collection-procedure`.
This operation requires permissions to perform the `rekognition:ListFaces`
action.
"""
def list_faces(client, input, options \\ []) do
request(client, "ListFaces", input, options)
end
@doc """
Gets a list of stream processors that you have created with .
"""
def list_stream_processors(client, input, options \\ []) do
request(client, "ListStreamProcessors", input, options)
end
@doc """
Returns an array of celebrities recognized in the input image. For more
information, see `celebrities`.
`RecognizeCelebrities` returns the 100 largest faces in the image. It lists
recognized celebrities in the `CelebrityFaces` array and unrecognized faces
in the `UnrecognizedFaces` array. `RecognizeCelebrities` doesn't return
celebrities whose faces are not amongst the largest 100 faces in the image.
For each celebrity recognized, the `RecognizeCelebrities` returns a
`Celebrity` object. The `Celebrity` object contains the celebrity name, ID,
URL links to additional information, match confidence, and a `ComparedFace`
object that you can use to locate the celebrity's face on the image.
Rekognition does not retain information about which images a celebrity has
been recognized in. Your application must store this information and use
the `Celebrity` ID property as a unique identifier for the celebrity. If
you don't store the celebrity name or additional information URLs returned
by `RecognizeCelebrities`, you will need the ID to identify the celebrity
in a call to the operation.
You pass the imput image either as base64-encoded image bytes or as a
reference to an image in an Amazon S3 bucket. If you use the Amazon CLI to
call Amazon Rekognition operations, passing image bytes is not supported.
The image must be either a PNG or JPEG formatted file.
For an example, see `celebrities-procedure-image`.
This operation requires permissions to perform the
`rekognition:RecognizeCelebrities` operation.
"""
def recognize_celebrities(client, input, options \\ []) do
request(client, "RecognizeCelebrities", input, options)
end
@doc """
For a given input face ID, searches for matching faces in the collection
the face belongs to. You get a face ID when you add a face to the
collection using the `IndexFaces` operation. The operation compares the
features of the input face with faces in the specified collection.
<note> You can also search faces without indexing faces by using the
`SearchFacesByImage` operation.
</note> The operation response returns an array of faces that match,
ordered by similarity score with the highest similarity first. More
specifically, it is an array of metadata for each face match that is found.
Along with the metadata, the response also includes a `confidence` value
for each face match, indicating the confidence that the specific face
matches the input face.
For an example, see `search-face-with-id-procedure`.
This operation requires permissions to perform the
`rekognition:SearchFaces` action.
"""
def search_faces(client, input, options \\ []) do
request(client, "SearchFaces", input, options)
end
@doc """
For a given input image, first detects the largest face in the image, and
then searches the specified collection for matching faces. The operation
compares the features of the input face with faces in the specified
collection.
<note> To search for all faces in an input image, you might first call the
operation, and then use the face IDs returned in subsequent calls to the
operation.
You can also call the `DetectFaces` operation and use the bounding boxes in
the response to make face crops, which then you can pass in to the
`SearchFacesByImage` operation.
</note> You pass the input image either as base64-encoded image bytes or as
a reference to an image in an Amazon S3 bucket. If you use the Amazon CLI
to call Amazon Rekognition operations, passing image bytes is not
supported. The image must be either a PNG or JPEG formatted file.
The response returns an array of faces that match, ordered by similarity
score with the highest similarity first. More specifically, it is an array
of metadata for each face match found. Along with the metadata, the
response also includes a `similarity` indicating how similar the face is to
the input face. In the response, the operation also returns the bounding
box (and a confidence level that the bounding box contains a face) of the
face that Amazon Rekognition used for the input image.
For an example, see `search-face-with-image-procedure`.
This operation requires permissions to perform the
`rekognition:SearchFacesByImage` action.
"""
def search_faces_by_image(client, input, options \\ []) do
request(client, "SearchFacesByImage", input, options)
end
@doc """
Starts asynchronous recognition of celebrities in a stored video.
Rekognition Video can detect celebrities in a video must be stored in an
Amazon S3 bucket. Use `Video` to specify the bucket name and the filename
of the video. `StartCelebrityRecognition` returns a job identifier
(`JobId`) which you use to get the results of the analysis. When celebrity
recognition analysis is finished, Rekognition Video publishes a completion
status to the Amazon Simple Notification Service topic that you specify in
`NotificationChannel`. To get the results of the celebrity recognition
analysis, first check that the status value published to the Amazon SNS
topic is `SUCCEEDED`. If so, call and pass the job identifier (`JobId`)
from the initial call to `StartCelebrityRecognition`. For more information,
see `celebrities`.
"""
def start_celebrity_recognition(client, input, options \\ []) do
request(client, "StartCelebrityRecognition", input, options)
end
@doc """
Starts asynchronous detection of explicit or suggestive adult content in a
stored video.
Rekognition Video can moderate content in a video stored in an Amazon S3
bucket. Use `Video` to specify the bucket name and the filename of the
video. `StartContentModeration` returns a job identifier (`JobId`) which
you use to get the results of the analysis. When content moderation
analysis is finished, Rekognition Video publishes a completion status to
the Amazon Simple Notification Service topic that you specify in
`NotificationChannel`.
To get the results of the content moderation analysis, first check that the
status value published to the Amazon SNS topic is `SUCCEEDED`. If so, call
and pass the job identifier (`JobId`) from the initial call to
`StartContentModeration`. For more information, see `moderation`.
"""
def start_content_moderation(client, input, options \\ []) do
request(client, "StartContentModeration", input, options)
end
@doc """
Starts asynchronous detection of faces in a stored video.
Rekognition Video can detect faces in a video stored in an Amazon S3
bucket. Use `Video` to specify the bucket name and the filename of the
video. `StartFaceDetection` returns a job identifier (`JobId`) that you use
to get the results of the operation. When face detection is finished,
Rekognition Video publishes a completion status to the Amazon Simple
Notification Service topic that you specify in `NotificationChannel`. To
get the results of the label detection operation, first check that the
status value published to the Amazon SNS topic is `SUCCEEDED`. If so, call
and pass the job identifier (`JobId`) from the initial call to
`StartFaceDetection`. For more information, see `faces-video`.
"""
def start_face_detection(client, input, options \\ []) do
request(client, "StartFaceDetection", input, options)
end
@doc """
Starts the asynchronous search for faces in a collection that match the
faces of persons detected in a stored video.
The video must be stored in an Amazon S3 bucket. Use `Video` to specify the
bucket name and the filename of the video. `StartFaceSearch` returns a job
identifier (`JobId`) which you use to get the search results once the
search has completed. When searching is finished, Rekognition Video
publishes a completion status to the Amazon Simple Notification Service
topic that you specify in `NotificationChannel`. To get the search results,
first check that the status value published to the Amazon SNS topic is
`SUCCEEDED`. If so, call and pass the job identifier (`JobId`) from the
initial call to `StartFaceSearch`. For more information, see
`collections-search-person`.
"""
def start_face_search(client, input, options \\ []) do
request(client, "StartFaceSearch", input, options)
end
@doc """
Starts asynchronous detection of labels in a stored video.
Rekognition Video can detect labels in a video. Labels are instances of
real-world entities. This includes objects like flower, tree, and table;
events like wedding, graduation, and birthday party; concepts like
landscape, evening, and nature; and activities like a person getting out of
a car or a person skiing.
The video must be stored in an Amazon S3 bucket. Use `Video` to specify the
bucket name and the filename of the video. `StartLabelDetection` returns a
job identifier (`JobId`) which you use to get the results of the operation.
When label detection is finished, Rekognition Video publishes a completion
status to the Amazon Simple Notification Service topic that you specify in
`NotificationChannel`.
To get the results of the label detection operation, first check that the
status value published to the Amazon SNS topic is `SUCCEEDED`. If so, call
and pass the job identifier (`JobId`) from the initial call to
`StartLabelDetection`.
<p/>
"""
def start_label_detection(client, input, options \\ []) do
request(client, "StartLabelDetection", input, options)
end
@doc """
Starts the asynchronous tracking of persons in a stored video.
Rekognition Video can track persons in a video stored in an Amazon S3
bucket. Use `Video` to specify the bucket name and the filename of the
video. `StartPersonTracking` returns a job identifier (`JobId`) which you
use to get the results of the operation. When label detection is finished,
Amazon Rekognition publishes a completion status to the Amazon Simple
Notification Service topic that you specify in `NotificationChannel`.
To get the results of the person detection operation, first check that the
status value published to the Amazon SNS topic is `SUCCEEDED`. If so, call
and pass the job identifier (`JobId`) from the initial call to
`StartPersonTracking`.
"""
def start_person_tracking(client, input, options \\ []) do
request(client, "StartPersonTracking", input, options)
end
@doc """
Starts processing a stream processor. You create a stream processor by
calling . To tell `StartStreamProcessor` which stream processor to start,
use the value of the `Name` field specified in the call to
`CreateStreamProcessor`.
"""
def start_stream_processor(client, input, options \\ []) do
request(client, "StartStreamProcessor", input, options)
end
@doc """
Stops a running stream processor that was created by .
"""
def stop_stream_processor(client, input, options \\ []) do
request(client, "StopStreamProcessor", input, options)
end
@spec request(map(), binary(), map(), list()) ::
{:ok, Poison.Parser.t | nil, Poison.Response.t} |
{:error, Poison.Parser.t} |
{:error, HTTPoison.Error.t}
defp request(client, action, input, options) do
client = %{client | service: "rekognition"}
host = get_host("rekognition", client)
url = get_url(host, client)
headers = [{"Host", host},
{"Content-Type", "application/x-amz-json-1.1"},
{"X-Amz-Target", "RekognitionService.#{action}"}]
payload = Poison.Encoder.encode(input, [])
headers = AWS.Request.sign_v4(client, "POST", url, headers, payload)
case HTTPoison.post(url, payload, headers, options) do
{:ok, response=%HTTPoison.Response{status_code: 200, body: ""}} ->
{:ok, nil, response}
{:ok, response=%HTTPoison.Response{status_code: 200, body: body}} ->
{:ok, Poison.Parser.parse!(body), response}
{:ok, _response=%HTTPoison.Response{body: body}} ->
error = Poison.Parser.parse!(body)
exception = error["__type"]
message = error["message"]
{:error, {exception, message}}
{:error, %HTTPoison.Error{reason: reason}} ->
{:error, %HTTPoison.Error{reason: reason}}
end
end
defp get_host(endpoint_prefix, client) do
if client.region == "local" do
"localhost"
else
"#{endpoint_prefix}.#{client.region}.#{client.endpoint}"
end
end
defp get_url(host, %{:proto => proto, :port => port}) do
"#{proto}://#{host}:#{port}/"
end
end
|
lib/aws/rekognition.ex
| 0.961116 | 0.927166 |
rekognition.ex
|
starcoder
|
defmodule Nebulex.Adapter do
@moduledoc """
This module specifies the adapter API that a Cache adapter is required to
implement.
"""
@type t :: module
@type cache :: Nebulex.Cache.t()
@type key :: Nebulex.Cache.key()
@type object :: Nebulex.Object.t()
@type opts :: Nebulex.Cache.opts()
@doc """
The callback invoked in case the adapter needs to inject code.
"""
@macrocallback __before_compile__(env :: Macro.Env.t()) :: Macro.t()
@doc """
Initializes the adapter supervision tree by returning the children
"""
@callback init(opts) :: {:ok, [:supervisor.child_spec() | {module(), term()} | module()]}
@doc """
Retrieves a single object from cache.
See `c:Nebulex.Cache.get/2`.
"""
@callback get(cache, key, opts) :: object | nil
@doc """
Returns a map with the objects for all specified keys. For every key that
does not hold a value or does not exist, that key is simply ignored.
Because of this, the operation never fails.
See `c:Nebulex.Cache.get_many/2`.
"""
@callback get_many(cache, [key], opts) :: map
@doc """
Sets the given `object` under `key` into the cache.
If the object already exists, it is overwritten. Any previous time to live
associated with the key is discarded on successful `set` operation.
## Options
Besides the "Shared options" section in `Nebulex.Cache` documentation,
it accepts:
* `:action` - It may be one of `:add`, `:replace`, `:set` (the default).
See the "Actions" section for more information.
## Actions
The `:action` option supports the following values:
* `:add` - Only set the `key` if it does not already exist. If it does,
`nil` is returned.
* `:replace` - Alters the object stored under `key`, but only if the object
already exists into the cache.
* `:set` - Set `key` to hold the given `object` (default).
See `c:Nebulex.Cache.set/3`, `c:Nebulex.Cache.add/3`, `c:Nebulex.Cache.replace/3`.
"""
@callback set(cache, object, opts) :: boolean
@doc """
Sets the given `objects`, replacing existing ones, just as regular `set`.
Returns `:ok` if the all objects were successfully set, otherwise
`{:error, failed_keys}`, where `failed_keys` contains the keys that
could not be set.
Ideally, this operation should be atomic, so all given keys are set at once.
But it depends purely on the adapter's implementation and the backend used
internally by the adapter. Hence, it is recommended to checkout the
adapter's documentation.
See `c:Nebulex.Cache.set_many/2`.
"""
@callback set_many(cache, [object], opts) :: :ok | {:error, failed_keys :: [key]}
@doc """
Deletes a single object from cache.
See `c:Nebulex.Cache.delete/2`.
"""
@callback delete(cache, key, opts) :: :ok
@doc """
Returns and removes the object with key `key` in the cache.
See `c:Nebulex.Cache.take/2`.
"""
@callback take(cache, key, opts) :: object | nil
@doc """
Returns whether the given `key` exists in cache.
See `c:Nebulex.Cache.has_key?/1`.
"""
@callback has_key?(cache, key) :: boolean
@doc """
Returns the information associated with `attr` for the given `key`,
or returns `nil` if `key` doesn't exist.
See `c:Nebulex.Cache.object_info/2`.
"""
@callback object_info(cache, key, attr :: :ttl | :version) :: any | nil
@doc """
Returns the expiry timestamp for the given `key`, if the timeout `ttl`
(in seconds) is successfully updated.
If `key` doesn't exist, `nil` is returned.
See `c:Nebulex.Cache.expire/2`.
"""
@callback expire(cache, key, ttl :: timeout) :: timeout | nil
@doc """
Updates (increment or decrement) the counter mapped to the given `key`.
See `c:Nebulex.Cache.update_counter/3`.
"""
@callback update_counter(cache, key, incr :: integer, opts) :: integer
@doc """
Returns the total number of cached entries.
See `c:Nebulex.Cache.size/0`.
"""
@callback size(cache) :: integer
@doc """
Flushes the cache.
See `c:Nebulex.Cache.flush/0`.
"""
@callback flush(cache) :: :ok
end
|
lib/nebulex/adapter.ex
| 0.891952 | 0.439386 |
adapter.ex
|
starcoder
|
defmodule Hui.Encode do
@moduledoc """
Utilities for encoding Solr query and update data structures.
"""
alias Hui.Query.Update
@type query :: Hui.Query.solr_query()
@type options :: Hui.Encode.Options.t()
@url_delimiters {"=", "&"}
@json_delimters {":", ","}
@update_encoding_sequence [:doc, :delete_id, :delete_query, :commit, :optimize, :rollback]
@update_field_sequence %{
:commit => [:commit, :expungeDeletes, :waitSearcher],
:doc => [:commitWithin, :overwrite, :doc],
:delete_id => [:delete_id],
:delete_query => [:delete_query],
:optimize => [:optimize, :maxSegments, :waitSearcher],
:rollback => [:rollback]
}
defmodule Options do
defstruct [:per_field, :prefix, format: :url]
@type t :: %__MODULE__{
format: :url | :json,
per_field: binary,
prefix: binary
}
end
@doc """
Encodes keywords list to IO data.
"""
@spec encode(list(keyword), options) :: iodata
def encode(query, opts \\ %Options{})
def encode([commitWithin: c, overwrite: o, doc: d], %{format: :json} = opts) do
docs = if is_list(d), do: d, else: [d]
for doc <- docs do
[
"\"add\"",
":",
"{",
_encode({:commitWithin, c}, opts, {":", ","}),
_encode({:overwrite, o}, opts, {":", ","}),
_encode({:doc, doc}, opts, {":", ""}),
"}",
","
]
end
|> List.flatten |> Enum.reverse() |> tl() |> Enum.reverse() # remove last `.`
end
def encode([commit: true, expungeDeletes: e, waitSearcher: w], %{format: :json} = opts) do
sep = unless is_nil(w), do: elem(@json_delimters, 1), else: ""
[
"\"commit\"",
":",
"{",
_encode({:expungeDeletes, e}, opts, {":", sep}),
_encode({:waitSearcher, w}, opts, {":", ""}),
"}"
]
end
def encode([optimize: true, maxSegments: m, waitSearcher: w], %{format: :json} = opts) do
sep = unless is_nil(w), do: elem(@json_delimters, 1), else: ""
[
"\"optimize\"",
":",
"{",
_encode({:maxSegments, m}, opts, {":", sep}),
_encode({:waitSearcher, w}, opts, {":", ""}),
"}"
]
end
def encode(query, opts) when is_list(query) do
delimiters = if opts.format == :json, do: @json_delimters, else: @url_delimiters
query
|> remove_fields()
|> _encode(opts, delimiters)
end
defp _encode([h | []], %{format: :url} = opts, _), do: [_encode(h, opts, {"=", ""})]
defp _encode([h | []], %{format: :json} = opts, _), do: [_encode(h, opts, {":", ""})]
defp _encode([h | t], opts, del), do: [_encode(h, opts, del) | _encode(t, opts, del)]
# do not render nil valued or empty keyword
defp _encode({_, nil}, _, _), do: ""
defp _encode([], _, _), do: ""
# when value is a also struct, e.g. %Hui.Query.FacetRange/Interval{}
defp _encode({_, %{__struct__: _} = v}, _, _) when is_map(v), do: [Hui.Encoder.encode(v)]
# encodes fq: [x, y] type keyword to "fq=x&fq=y"
defp _encode({k, v}, opts, {eql, sep}) when is_list(v) do
sep0 = if opts.format == :json, do: elem(@json_delimters, 1), else: elem(@url_delimiters, 1)
cond do
k == :delete and is_binary_list?(v) ->
["\"", to_string(k), "\"", eql, Poison.encode!(v), sep]
true ->
[
v
|> Enum.reject(&(&1 == nil or &1 == ""))
|> Enum.map_join(sep0, &_encode({k, &1}, opts, {eql, ""})),
sep
]
end
end
defp _encode({k, v}, %{format: :url}, {eql, sep}),
do: [to_string(k), eql, URI.encode_www_form(to_string(v)), sep]
defp _encode({:rollback, true}, %{format: :json}, {eql, sep}),
do: ["\"", "rollback", "\"", eql, "{", "}", sep]
defp _encode({k, v}, %{format: :json}, {eql, sep}) when k == :delete and is_tuple(v) do
value = _encode(v, %{format: :json}, {eql, sep})
["\"", to_string(k), "\"", eql, "{", value, "}", sep]
end
defp _encode({k, v}, %{format: :json}, {eql, sep}),
do: ["\"", to_string(k), "\"", eql, Poison.encode!(v), sep]
@doc """
Transforms built-in query structs to keyword list.
This function maps data struct according to Solr syntax,
addressing prefix, per-field requirement, as well as
adding implicit query fields such as `facet=true`, `hl=true`
"""
@spec transform(query, options) :: list(keyword)
def transform(query, opts \\ %Options{})
def transform(%{__struct__: Update} = query, %{format: :json} = opts) do
for set <- @update_encoding_sequence do
query
|> extract_update_fields(set)
|> _transform(opts)
end
|> Enum.reject(&(&1 == []))
end
def transform(%{__struct__: Update} = _, %{format: f}) when f != :json do
raise "#{f} format is not supported. Hui currently only encodes update message in JSON."
end
def transform(%{__struct__: _} = query, opts) do
query
|> Map.to_list()
|> remove_fields()
|> _transform(opts)
end
# render keywords according to Solr prefix / per field syntax
# e.g. transform `field: "year"` into `"facet.field": "year"`, `f.[field].facet.gap` etc.
defp _transform([], _), do: []
defp _transform([h | []], opts), do: [_transform(h, opts)]
defp _transform([h | t], opts), do: [_transform(h, opts) | _transform(t, opts)]
defp _transform({k, v}, %{prefix: k_prefix, per_field: per_field_field}) do
cond do
k_prefix && String.ends_with?(k_prefix, to_string(k)) -> {:"#{k_prefix}", v}
k_prefix && per_field_field == nil -> {:"#{k_prefix}.#{k}", v}
k_prefix && per_field_field != nil -> {:"f.#{per_field_field}.#{k_prefix}.#{k}", v}
k == :delete_id and is_list(v) -> {:delete, v |> Enum.map(&{:id, &1})}
k == :delete_id and is_binary(v) -> {:delete, {:id, v}}
k == :delete_query and is_list(v) -> {:delete, v |> Enum.map(&{:query, &1})}
k == :delete_query and is_binary(v) -> {:delete, {:query, v}}
true -> {k, v}
end
end
defp remove_fields(query) do
query
|> Enum.reject(fn {k, v} ->
is_nil(v) or v == "" or v == [] or k == :__struct__ or k == :per_field
end)
end
defp extract_update_fields(%{__struct__: _} = q, group) do
sequence = @update_field_sequence[group]
main_fl = Map.get(q, group)
if main_fl != false and main_fl != nil do
for fl <- sequence do
{fl, q |> Map.get(fl)}
end
else
[]
end
end
defp is_binary_list?(v) do
is_list(v) && is_binary(List.first(v))
end
end
|
lib/hui/encode.ex
| 0.755727 | 0.487063 |
encode.ex
|
starcoder
|
defmodule Nat do
def corpus(str) do
str |> String.replace(".", " .") |> String.split(" ")
end
def wordvec(x) do
x
|> String.to_charlist()
|> Enum.map(fn x -> (x - 96) / 26 end)
end
end
defmodule Rnn do
def forward(_, _, [], res) do
res
end
def forward([x | xs], h, [wx, wh, b | ls], res) do
dt = Cmatrix.add(Cmatrix.add(Cmatrix.mult(x, wx), Cmatrix.mult(h, wh)), b)
dt1 = Cmatrix.apply_function(dt, fn x -> DP.tanh(x) end)
forward(xs, dt1, ls, [dt1 | res])
end
def forward_for_back(_, _, [], res) do
res
end
def forward_for_back([x | xs], h, [wx, wh, b | ls], res) do
dt = Cmatrix.add(Cmatrix.add(Cmatrix.mult(x, wx), Cmatrix.mult(h, wh)), b)
dt1 = Cmatrix.apply_function(dt, fn x -> DP.tanh(x) end)
forward_for_back(xs, dt1, ls, [dt1 | res])
end
end
# LSTM
defmodule Lstm do
def forward(_, _, _, [], res) do
res
end
def forward([x | xs], h, c, [wx, wh, b | ls], res) do
dt = Cmatrix.add(Cmatrix.add(Cmatrix.mult(x, wx), Cmatrix.mult(h, wh)), b)
f = dt |> partf |> Cmatrix.apply_function(fn x -> DP.sigmoid(x) end)
g = dt |> partg |> Cmatrix.apply_function(fn x -> DP.tanh(x) end)
i = dt |> parti |> Cmatrix.apply_function(fn x -> DP.sigmoid(x) end)
o = dt |> parto |> Cmatrix.apply_function(fn x -> DP.sigmoid(x) end)
c1 = Cmatrix.add(Cmatrix.emult(c, f), Cmatrix.emult(g, i))
h1 = Cmatrix.emult(o, Cmatrix.apply_function(c1, fn x -> DP.tanh(x) end))
forward(xs, h, c1, ls, [h1 | res])
end
def forward_for_back(_, _, _, [], res) do
res
end
def forward_for_back([x, x1 | xs], h, c, [wx, wh, b | ls], res) do
dt = Cmatrix.add(Cmatrix.add(Cmatrix.mult(x, wx), Cmatrix.mult(h, wh)), b)
f = dt |> partf |> Cmatrix.apply_function(fn x -> DP.sigmoid(x) end)
g = dt |> partg |> Cmatrix.apply_function(fn x -> :math.tanh(x) end)
i = dt |> parti |> Cmatrix.apply_function(fn x -> DP.sigmoid(x) end)
o = dt |> parto |> Cmatrix.apply_function(fn x -> DP.sigmoid(x) end)
c1 = Cmatrix.add(Cmatrix.emult(c, f), Cmatrix.emult(g, i))
h1 = Cmatrix.emult(o, Cmatrix.apply_function(c1, fn x -> :math.tanh(x) end))
forward_for_back([x1 | xs], h, c1, ls, [[c1, h1, x1, f, g, i, o] | res])
end
# LSTM backpropagation
# l = loss vector
# e = list of expanded LSTM. Each element is [wx,wh,b]
# 3rd argument is saved middle predict data
# 4th argument is gradient of [wx,wh,b] list
def backpropagation(_, [], _, res) do
res
end
def backpropagation(
l,
[[_, wh, _] | es],
[[c1, _, _, f1, g1, i1, o1], [c2, h2, x2, f2, g2, i2, o2] | us],
res
) do
dc = l |> Cmatrix.emult(Cmatrix.apply_function(o1, fn x -> DP.dtanh(x) end))
df = dc |> Cmatrix.emult(f1) |> Cmatrix.apply_function(fn x -> DP.dsigmoid(x) end)
dg = dc |> Cmatrix.emult(i1) |> Cmatrix.apply_function(fn x -> DP.dtanh(x) end)
di = dc |> Cmatrix.emult(g1) |> Cmatrix.apply_function(fn x -> DP.dsigmoid(x) end)
do0 =
l
|> Cmatrix.emult(Cmatrix.apply_function(c1, fn x -> DP.tanh(x) end))
|> Cmatrix.apply_function(fn x -> DP.dsigmoid(x) end)
d = Cmatrix.stick(df, dg, di, do0)
b1 = d
wh1 = Cmatrix.mult(Cmatrix.transpose(h2), d)
l1 = Cmatrix.mult(d, Cmatrix.transpose(wh))
wx1 = Cmatrix.mult(Cmatrix.transpose(x2), d)
backpropagation(l1, es, [[c2, h2, x2, f2, g2, i2, o2] | us], [[wx1, wh1, b1] | res])
end
# divide matrex_dt to four parts
def partf(m) do
{r, c} = m[:size]
size = div(c, 4)
index = 1
Cmatrix.part(m, 1, index, r, size)
end
def partg(m) do
{r, c} = m[:size]
size = div(c, 4)
index = size + 1
Cmatrix.part(m, 1, index, r, size)
end
def parti(m) do
{r, c} = m[:size]
size = div(c, 4)
index = size * 2 + 1
Cmatrix.part(m, 1, index, r, size)
end
def parto(m) do
{r, c} = m[:size]
size = div(c, 4)
index = size * 3 + 1
Cmatrix.part(m, 1, index, r, size)
end
end
|
lib/natlang.ex
| 0.590661 | 0.612397 |
natlang.ex
|
starcoder
|
defmodule Kalevala.Character.Foreman do
@moduledoc """
Session Foreman
Manages data flowing from the player into the game.
"""
use GenServer
require Logger
alias Kalevala.Character.Conn
alias Kalevala.Event
alias Kalevala.Character.Foreman.Channel
@type t() :: %__MODULE__{}
defstruct [
:callback_module,
:character,
:communication_module,
:controller,
:supervisor_name,
processing_action: nil,
action_queue: [],
private: %{},
session: %{},
flash: %{}
]
@doc """
Start a new foreman for a connecting player
"""
def start_player(protocol_pid, options) do
options =
Keyword.merge(options,
callback_module: Kalevala.Character.Foreman.Player,
protocol: protocol_pid
)
DynamicSupervisor.start_child(options[:supervisor_name], {__MODULE__, options})
end
@doc """
Start a new foreman for a non-player (character run by the world)
"""
def start_non_player(options) do
options = Keyword.merge(options, callback_module: Kalevala.Character.Foreman.NonPlayer)
DynamicSupervisor.start_child(options[:supervisor_name], {__MODULE__, options})
end
@doc false
def start_link(opts) do
GenServer.start_link(__MODULE__, opts, [])
end
@impl true
def init(opts) do
opts = Enum.into(opts, %{})
state = %__MODULE__{
callback_module: opts.callback_module,
communication_module: opts.communication_module,
controller: opts.initial_controller,
supervisor_name: opts.supervisor_name
}
state = opts.callback_module.init(state, opts)
{:ok, state, {:continue, :init_controller}}
end
@doc false
def new_conn(state) do
%Conn{
character: state.character,
session: state.session,
flash: state.flash,
private: %Conn.Private{
request_id: Conn.Private.generate_request_id()
}
}
end
@impl true
def handle_continue(:init_controller, state) do
new_conn(state)
|> state.controller.init()
|> handle_conn(state)
end
@impl true
def handle_info({:recv, :text, data}, state) do
new_conn(state)
|> state.controller.recv(data)
|> handle_conn(state)
end
def handle_info({:recv, :event, event}, state) do
new_conn(state)
|> state.controller.recv_event(event)
|> handle_conn(state)
end
def handle_info(event = %Event{}, state) do
new_conn(state)
|> state.controller.event(event)
|> handle_conn(state)
end
def handle_info({:route, event = %Event{}}, state) do
new_conn(state)
|> Map.put(:events, [event])
|> handle_conn(state)
end
def handle_info(event = %Event.Delayed{}, state) do
event = Event.Delayed.to_event(event)
new_conn(state)
|> Map.put(:events, [event])
|> handle_conn(state)
end
def handle_info(event = %Event.Display{}, state) do
new_conn(state)
|> state.controller.display(event)
|> handle_conn(state)
end
def handle_info({:process_action, action}, state) do
case state.processing_action == action do
true ->
Logger.info(
"Processing #{inspect(action.type)}, #{Enum.count(state.action_queue)} left in the queue.",
request_id: action.request_id
)
state = Map.put(state, :processing_action, nil)
new_conn(state)
|> action.type.run(action.params)
|> handle_conn(state)
false ->
Logger.warn("Character tried processing an action that was not next", type: :foreman)
{:noreply, state}
end
end
def handle_info(:terminate, state) do
state.callback_module.terminating(state)
DynamicSupervisor.terminate_child(state.supervisor_name, self())
{:noreply, state}
end
@doc """
Handle the conn struct after processing
"""
def handle_conn(conn, state) do
conn
|> Channel.handle_channels(state)
|> send_options(state)
|> send_output(state)
|> send_events()
session = Map.merge(state.session, conn.session)
flash = Map.merge(state.flash, conn.flash)
state =
state
|> Map.put(:session, session)
|> Map.put(:flash, flash)
|> Map.put(:action_queue, state.action_queue ++ conn.private.actions)
case conn.private.halt? do
true ->
state.callback_module.terminate(state)
{:noreply, state}
false ->
state
|> handle_actions()
|> update_character(conn)
|> update_controller(conn)
end
end
defp handle_actions(state = %{processing_action: nil, action_queue: [action | actions]}) do
Logger.info(
"Delaying #{inspect(action.type)} for #{action.delay}ms with #{inspect(action.params)}",
request_id: action.request_id
)
Process.send_after(self(), {:process_action, action}, action.delay)
state
|> Map.put(:processing_action, action)
|> Map.put(:action_queue, actions)
end
defp handle_actions(state), do: state
defp send_options(conn, state) do
state.callback_module.send_options(state, conn.options)
conn
end
defp send_output(conn, state) do
state.callback_module.send_output(state, conn.output)
conn
end
@doc false
def send_events(conn) do
{events, delayed_events} =
Enum.split_with(conn.events, fn event ->
match?(%Kalevala.Event{}, event)
end)
Enum.each(delayed_events, fn delayed_event ->
Process.send_after(self(), delayed_event, delayed_event.delay)
end)
case Conn.event_router(conn) do
nil ->
conn
event_router ->
Enum.each(events, fn event ->
send(event_router, event)
end)
conn
end
end
defp update_character(state, conn) do
case is_nil(conn.private.update_character) do
true ->
state
false ->
state.callback_module.track_presence(state, conn)
%{state | character: conn.private.update_character}
end
end
defp update_controller(state, conn) do
case is_nil(conn.private.next_controller) do
true ->
{:noreply, state}
false ->
state =
state
|> Map.put(:controller, conn.private.next_controller)
|> Map.put(:flash, conn.private.next_controller_flash)
{:noreply, state, {:continue, :init_controller}}
end
end
end
defmodule Kalevala.Character.Foreman.Callbacks do
@moduledoc """
Callbacks for a integrating with the character foreman process
"""
alias Kalevala.Character.Conn
alias Kalevala.Character.Foreman
@type state() :: Foreman.t()
@typedoc "Options for starting the foreman process"
@type opts() :: Keyword.t()
@doc """
Fill in state with any passed in options
"""
@callback init(state(), opts()) :: state()
@doc """
Called when the foreman process is halted through a conn
Perform whatever actions are required to start terminating.
"""
@callback terminate(state()) :: :ok
@doc """
The process is terminating from a `:terminate` message
Perform whatever is required before terminating.
"""
@callback terminating(state()) :: :ok
@doc """
Send options to a connection process
"""
@callback send_options(state(), list()) :: :ok
@doc """
Send text to a connection process
"""
@callback send_output(state(), list()) :: :ok
@doc """
The character updated and presence should be tracked
"""
@callback track_presence(state, Conn.t()) :: :ok
end
defmodule Kalevala.Character.Foreman.Player do
@moduledoc """
Callbacks for a player character
"""
alias Kalevala.Character.Conn
alias Kalevala.Character.Foreman
alias Kalevala.Event
@behaviour Kalevala.Character.Foreman.Callbacks
defstruct [:protocol, :presence_module, :quit_view]
@impl true
def init(state, opts) do
private = %__MODULE__{
protocol: opts.protocol,
presence_module: opts.presence_module,
quit_view: opts.quit_view
}
%{state | private: private}
end
@impl true
def terminate(state) do
send(state.private.protocol, :terminate)
end
@impl true
def terminating(%{character: nil}), do: :ok
def terminating(state) do
{quit_view, quit_template} = state.private.quit_view
conn = Foreman.new_conn(state)
event = %Event{
topic: Event.Movement,
data: %Event.Movement{
character: Conn.Private.character(conn),
direction: :from,
reason: quit_view.render(quit_template, %{character: state.character}),
room_id: state.character.room_id
}
}
conn
|> Map.put(:events, [event])
|> Foreman.send_events()
end
@impl true
def send_options(state, options) do
Enum.each(options, fn option ->
send(state.private.protocol, {:send, option})
end)
end
@impl true
def send_output(state, text) do
Enum.each(text, fn line ->
send(state.private.protocol, {:send, line})
end)
end
@impl true
def track_presence(state, conn) do
state.private.presence_module.track(Conn.character(conn, trim: true))
end
end
defmodule Kalevala.Character.Foreman.NonPlayer do
@moduledoc """
Callbacks for a non-player character
"""
require Logger
alias Kalevala.Character.Conn
alias Kalevala.Character.Foreman
alias Kalevala.Event
@behaviour Kalevala.Character.Foreman.Callbacks
defstruct [:quit_view]
@impl true
def init(state, opts) do
Logger.info("Character starting - #{opts.character.id}")
private = %__MODULE__{
quit_view: opts.quit_view
}
%{state | character: %{opts.character | pid: self()}, private: private}
end
@impl true
def terminate(state), do: state
@impl true
def terminating(%{character: nil}), do: :ok
def terminating(state) do
{quit_view, quit_template} = state.private.quit_view
conn = Foreman.new_conn(state)
event = %Event{
topic: Event.Movement,
data: %Event.Movement{
character: Conn.Private.character(conn),
direction: :from,
reason: quit_view.render(quit_template, %{character: state.character}),
room_id: state.character.room_id
}
}
conn
|> Map.put(:events, [event])
|> Foreman.send_events()
end
@impl true
def send_options(_state, _options), do: :ok
@impl true
def send_output(_state, _output), do: :ok
@impl true
def track_presence(_state, _conn), do: :ok
end
|
lib/kalevala/character/foreman.ex
| 0.811825 | 0.404743 |
foreman.ex
|
starcoder
|
defmodule IceCream do
defmacro __using__(_opts) do
quote do
require IceCream
import IceCream
end
end
@doc """
Prints the calling filename, line number, and parent module/function. It returns an `:ok` atom.
```elixir
# lib/foo.ex
defmodule Foo do
import IceCream
def bar do
ic()
end
end
# running Foo.bar()
Foo.bar() # ic| lib/foo.ex:5 in Elixir.Foo.bar/0
:ok
```
"""
defmacro ic() do
quote do
IceCream.build_label("", __ENV__, function: true, location: true)
|> IO.puts()
end
end
@doc """
Prints the term with itself as a label. Returns the evaluated term.
## Examples
#### Variables
```
foo = "abc"
ic(foo) # ic| foo: "abc"
"abc"
```
#### Module Function Argument calls
```
ic(:math.pow(2,3)) # ic| :math.pow(2,3): 8.0
8.0
```
It also works with pipes
```
2
|> :math.pow(3)
|> ic() # ic| :math.pow(2,3): 8.0`
8.0
```
## Options
Accepts the same options as the Inspect protocol. (see: [`Inspect.Opts`](https://hexdocs.pm/elixir/Inspect.Opts.html)), with some additions:
* `:location` - when truthy, will add the file name and line number.
* `:function` - when truthy, will print out the module name with the function name and arity.
```
# lib/foo.ex
defmodule Foo do
import IceCream
def bar(baz) do
ic(baz, location: true, function: true)
end
end
# running Foo.bar()
Foo.bar(1.0) # ic| lib/foo.ex:5 in Elixir.Foo.bar/1 baz: 1.0
1.0
```
"""
defmacro ic(term, opts \\ []) do
label_io_list = [Macro.to_string(replace_ic(term))]
quote do
label = IceCream.build_label(unquote(label_io_list), __ENV__, unquote(opts))
inspect_opts = Keyword.merge([label: label], unquote(opts))
IO.inspect(unquote(term), inspect_opts)
end
end
@doc false
def build_label(term_string, env, opts) do
opts = Keyword.merge(Application.get_all_env(:ice_cream), opts)
[term_string]
|> maybe_prepend_function(Keyword.get(opts, :function, false), env)
|> maybe_prepend_location(Keyword.get(opts, :location, false), env)
|> prepend_ic()
end
defp replace_ic({:ic, _meta, args}), do: replace_ic(List.first(args))
defp replace_ic({f, m, args}) when is_list(args), do: {f, m, Enum.map(args, &replace_ic(&1))}
defp replace_ic(ast), do: ast
defp maybe_prepend_function(label_io_list, prepend?, env)
defp maybe_prepend_function(label_io_list, false, _), do: label_io_list
defp maybe_prepend_function(label_io_list, true, %{function: nil}), do: label_io_list
defp maybe_prepend_function(label_io_list, true, env) do
%{function: {func, arity}, module: module} = env
[
"in ",
String.replace_leading(to_string(module), "Elixir.", ""),
".",
to_string(func),
"/",
to_string(arity),
" " | label_io_list
]
end
defp maybe_prepend_location(label_io_list, prepend?, env)
defp maybe_prepend_location(label_io_list, false, _), do: label_io_list
defp maybe_prepend_location(label_io_list, true, env) do
%{file: file, line: line} = env
file = Path.relative_to_cwd(file)
[file, ":", to_string(line), " " | label_io_list]
end
defp prepend_ic(label_io_list), do: ["ic| " | label_io_list]
end
|
lib/ice_cream.ex
| 0.820721 | 0.792143 |
ice_cream.ex
|
starcoder
|
defmodule InfinityOne.TabBar do
@moduledoc """
Manage the TabBar data store.
Manages the the data store for buttons and ftab state.
"""
@name :tabbar
@doc """
Initialize the TabBar data store.
"""
def initialize do
:ets.new @name, [:public, :named_table]
end
@doc """
Insert an entry into the data store
"""
def insert(key, value) do
:ets.insert @name, {key, value}
end
@doc """
Lookup a value from the data store
"""
def lookup(key) do
:ets.lookup @name, key
end
@doc """
Add a button to the button store
## Examples
iex> InfinityOne.TabBar.add_button %{id: "one", name: "B1"}
true
"""
def add_button(config) do
insert {:button, config.id}, config
end
@doc """
Get a button from the button store
## Examples
iex> InfinityOne.TabBar.add_button %{id: "one", name: "B1"}
iex> InfinityOne.TabBar.get_button "one"
%{id: "one", name: "B1"}
"""
def get_button(key) do
case lookup {:button, key} do
[{_, data}] -> data
_ -> nil
end
end
@doc """
Get a button from the button store
## Examples
iex> InfinityOne.TabBar.add_button %{id: "one", name: "B1"}
iex> InfinityOne.TabBar.get_button! "one"
%{id: "one", name: "B1"}
"""
def get_button!(key) do
get_button(key) || raise("invalid button #{key}")
end
@doc """
Get all buttons from the button store
## Examples
iex> InfinityOne.TabBar.add_button %{id: "one", name: "B1", display: true}
iex> InfinityOne.TabBar.get_buttons
[%{id: "one", name: "B1", display: true}]
"""
def get_buttons() do
@name
|> :ets.match({{:button, :"_"}, :"$2"})
|> List.flatten
|> Enum.filter(& &1.display)
|> Enum.sort(& &1.order < &2.order)
end
def update_button(key, field, value) do
button = get_button(key)
add_button Map.put(button, field, value)
end
def show_button(key) do
update_button key, :display, true
end
def hide_button(key) do
update_button key, :display, false
end
@doc """
Add a ftab from the ftab store
## Examples
iex> InfinityOne.TabBar.open_ftab 1, 2, "test", nil
true
"""
def open_ftab(user_id, channel_id, name, nil) do
if view = get_view user_id, channel_id, name do
insert {:ftab, {user_id, channel_id}}, {name, view}
else
insert {:ftab, {user_id, channel_id}}, {name, nil}
end
end
def open_ftab(user_id, channel_id, name, view) do
insert {:ftab, {user_id, channel_id}}, {name, view}
open_view user_id, channel_id, name, view
end
@doc """
Get a ftab from the ftab store
## Examples
iex> InfinityOne.TabBar.open_ftab 1, 2, "test", nil
iex> InfinityOne.TabBar.get_ftab 1, 2
{"test", nil}
iex> InfinityOne.TabBar.open_ftab 1, 2, "test", %{one: 1}
iex> InfinityOne.TabBar.get_ftab 1, 2
{"test", %{one: 1}}
"""
def get_ftab(user_id, channel_id) do
case lookup {:ftab, {user_id, channel_id}} do
[{_, data}] -> data
_ -> nil
end
end
@doc """
Get the open view from the ftab store.
## Examples
iex> InfinityOne.TabBar.insert {:ftab_view, {1, 2, "test"}}, %{one: 1}
iex> InfinityOne.TabBar.get_view 1, 2, "test"
%{one: 1}
"""
def get_view(user_id, channel_id, name) do
case lookup {:ftab_view, {user_id, channel_id, name}} do
[{_, data}] -> data
_ -> nil
end
end
@doc """
Inserts a ftab view into the store.
## Examples
iex> InfinityOne.TabBar.open_view 1, 2, "other", %{two: 2}
iex> InfinityOne.TabBar.get_view 1, 2, "other"
%{two: 2}
"""
def open_view(user_id, channel_id, name, view) do
insert {:ftab_view, {user_id, channel_id, name}}, view
end
@doc """
Removes a ftab view from the store.
## Examples
iex> InfinityOne.TabBar.open_view 1, 2, "other", %{two: 2}
iex> InfinityOne.TabBar.close_view 1, 2, "other"
iex> InfinityOne.TabBar.get_view 1, 2, "other"
nil
"""
def close_view(user_id, channel_id, name) do
:ets.delete @name, {:ftab_view, {user_id, channel_id, name}}
insert {:ftab, {user_id, channel_id}}, {name, nil}
end
@doc """
Close a ftab
Removes the ftab entry
## Examples
iex> InfinityOne.TabBar.open_ftab 1, 2, "test", nil
iex> InfinityOne.TabBar.get_ftab 1, 2
{"test", nil}
iex> InfinityOne.TabBar.close_ftab 1, 2
iex> InfinityOne.TabBar.get_ftab 1, 2
nil
"""
def close_ftab(user_id, channel_id) do
:ets.delete @name, {:ftab, {user_id, channel_id}}
end
@doc """
Get all ftabs
## Examples
iex> InfinityOne.TabBar.open_ftab 1, 2, "test", nil
iex> InfinityOne.TabBar.open_ftab 1, 3, "other", %{one: 1}
iex> InfinityOne.TabBar.get_ftabs |> Enum.sort
[[{1, 2}, {"test", nil}], [{1, 3}, {"other", %{one: 1}}]]
"""
def get_ftabs() do
:ets.match(@name, {{:ftab, :"$1"}, :"$2"})
end
@doc """
Get all views
## Examples
"""
def get_views() do
:ets.match(@name, {{:ftab_view, :"$1"}, :"$2"})
end
@doc """
Get all tabs for a given user.
## Examples
iex> InfinityOne.TabBar.open_ftab 1, 2, "test", nil
iex> InfinityOne.TabBar.open_ftab 1, 3, "other", %{one: 1}
iex> InfinityOne.TabBar.open_ftab 2, 3, "other", %{one: 2}
iex> InfinityOne.TabBar.get_ftabs(1) |> Enum.sort
[{"other", %{one: 1}}, {"test", nil}]
"""
def get_ftabs(user_id) do
@name
|> :ets.match({{:ftab, {user_id, :"_"}}, :"$2"})
|> List.flatten
end
@doc """
Get all views for a given user
"""
def get_views(user_id) do
@name
|> :ets.match({{:ftab_view, {user_id, :"_", :"$1"}}, :"$2"})
|> List.flatten
end
@doc """
Close all ftabs for a given user.
## Examples
iex> InfinityOne.TabBar.open_ftab 1, 2, "test", nil
iex> InfinityOne.TabBar.open_ftab 1, 3, "other", %{one: 1}
iex> InfinityOne.TabBar.open_ftab 2, 3, "other", %{one: 2}
iex> InfinityOne.TabBar.close_user_ftabs 1
iex> InfinityOne.TabBar.get_ftabs
[[{2, 3}, {"other", %{one: 2}}]]
"""
def close_user_ftabs(user_id) do
:ets.match_delete @name, {{:ftab, {user_id, :"_"}}, :"_"}
end
@doc """
Close all ftabs for a given channel.
## Examples
iex> InfinityOne.TabBar.open_ftab 1, 2, "test", nil
iex> InfinityOne.TabBar.open_ftab 1, 3, "other", %{one: 1}
iex> InfinityOne.TabBar.open_ftab 2, 3, "other", %{one: 2}
iex> InfinityOne.TabBar.close_channel_ftabs 3
iex> InfinityOne.TabBar.get_ftabs
[[{1, 2}, {"test", nil}]]
"""
def close_channel_ftabs(channel_id) do
:ets.match_delete @name, {{:ftab, {:"_", channel_id}}, :"_"}
end
@doc """
Delete all ftabs
## Examples
iex> InfinityOne.TabBar.open_ftab 1, 2, "test", nil
iex> InfinityOne.TabBar.open_ftab 1, 3, "other", %{one: 1}
iex> InfinityOne.TabBar.open_ftab 2, 3, "other", %{one: 2}
iex> InfinityOne.TabBar.add_button %{id: "one", name: "B1", display: true}
iex> InfinityOne.TabBar.delete_ftabs
iex> InfinityOne.TabBar.get_ftabs
[]
iex> InfinityOne.TabBar.get_buttons
[%{id: "one", name: "B1", display: true}]
"""
def delete_ftabs do
:ets.match_delete(@name, {{:ftab, :"_"}, :"_"})
end
@doc """
Delete all ftabs
## Examples
iex> InfinityOne.TabBar.open_ftab 1, 2, "test", nil
iex> InfinityOne.TabBar.add_button %{id: "one", name: "B1"}
iex> InfinityOne.TabBar.add_button %{id: "two", name: "B2"}
iex> InfinityOne.TabBar.delete_buttons
iex> InfinityOne.TabBar.get_buttons
[]
iex> InfinityOne.TabBar.get_ftabs
[[{1, 2}, {"test", nil}]]
"""
def delete_buttons do
:ets.match_delete(@name, {{:button, :"_"}, :"_"})
end
@doc """
Get all entries
## Examples
iex> InfinityOne.TabBar.open_ftab 1, 2, "test", nil
iex> InfinityOne.TabBar.add_button %{id: "one", name: "B1"}
iex> InfinityOne.TabBar.add_button %{id: "two", name: "B2"}
iex> InfinityOne.TabBar.get_all |> Enum.sort
[[{{:button, "one"}, %{id: "one", name: "B1"}}],
[{{:button, "two"}, %{id: "two", name: "B2"}}],
[{{:ftab, {1, 2}}, {"test", nil}}]]
"""
def get_all do
:ets.match @name, :"$1"
end
@doc """
Delete all entries
## Examples
iex> InfinityOne.TabBar.open_ftab 1, 2, "test", nil
iex> InfinityOne.TabBar.add_button %{id: "one", name: "B1"}
iex> InfinityOne.TabBar.add_button %{id: "two", name: "B2"}
iex> InfinityOne.TabBar.delete_all
iex> InfinityOne.TabBar.get_all
[]
"""
def delete_all do
:ets.match_delete @name, :"$1"
end
end
|
lib/infinity_one/tab_bar.ex
| 0.831691 | 0.487063 |
tab_bar.ex
|
starcoder
|
defmodule ReIntegrations.Orulo.TagMapper do
@moduledoc """
Module to map orulo's tags into our tags.
"""
alias ReIntegrations.{
Orulo.BuildingPayload
}
def map_tags(%BuildingPayload{} = %{payload: %{"features" => features}}) do
features
|> Enum.reduce([], &convert_tag(&1, &2))
|> Enum.dedup()
end
def map_tags(_), do: []
defp convert_tag("Fitness", acc), do: ["academia" | acc]
defp convert_tag("Fitness ao ar livre", acc), do: ["academia" | acc]
defp convert_tag("Churrasqueira condominial", acc), do: ["churrasqueira" | acc]
defp convert_tag("Espaço gourmet", acc), do: ["espaco-gourmet" | acc]
defp convert_tag("Jardim", acc), do: ["espaco-verde" | acc]
defp convert_tag("Praça", acc), do: ["espaco-verde" | acc]
defp convert_tag("Trilhas e bosque", acc), do: ["espaco-verde" | acc]
defp convert_tag("Piscina adulto", acc), do: ["piscina" | acc]
defp convert_tag("Piscina aquecida", acc), do: ["piscina" | acc]
defp convert_tag("Piscina com raia", acc), do: ["piscina" | acc]
defp convert_tag("Piscina infantil", acc), do: ["piscina" | acc]
defp convert_tag("Piscina térmica", acc), do: ["piscina" | acc]
defp convert_tag("Playground", acc), do: ["playground" | acc]
defp convert_tag("Quadra futebol sete", acc), do: ["quadra" | acc]
defp convert_tag("Quadra paddle", acc), do: ["quadra" | acc]
defp convert_tag("Quadra poliesportiva", acc), do: ["quadra" | acc]
defp convert_tag("Quadra tênis", acc), do: ["quadra" | acc]
defp convert_tag("Quadra volei", acc), do: ["quadra" | acc]
defp convert_tag("Salão de festas", acc), do: ["salao-de-festas" | acc]
defp convert_tag("Sala de jogos", acc), do: ["salao-de-jogos" | acc]
defp convert_tag("Sauna", acc), do: ["sauna" | acc]
defp convert_tag("Terraço", acc), do: ["terraco" | acc]
defp convert_tag("Terraço coletivo", acc), do: ["terraco" | acc]
defp convert_tag("Bicicletário", acc), do: ["bicicletario" | acc]
defp convert_tag("Espaço kids", acc), do: ["brinquedoteca" | acc]
defp convert_tag("Portaria 24 horas", acc), do: ["portaria-24-horas" | acc]
defp convert_tag("Portaria", acc), do: ["portaria-horario-comercial" | acc]
defp convert_tag("Porteiro eletrônico", acc), do: ["portaria-eletronica" | acc]
defp convert_tag(_, acc), do: acc
end
|
apps/re_integrations/lib/orulo/tag_mapper.ex
| 0.513425 | 0.448245 |
tag_mapper.ex
|
starcoder
|
defmodule Gas do
@moduledoc """
Module for updating the current gas value and calculating
the additional costs for the opcodes, based on dynamic data
"""
use Bitwise
require OpCodesUtil
require GasCodes
@doc """
Subtract a given `gas_cost` from the current gas in the state
"""
@spec update_gas(integer(), map()) :: map() | {:error, String.t(), map()}
def update_gas(gas_cost, state) do
curr_gas = State.gas(state)
if curr_gas >= gas_cost do
gas_after = curr_gas - gas_cost
State.set_gas(gas_after, state)
else
throw({:error, "out_of_gas", state})
end
end
@doc """
Get the initial gas cost for a given `op_code`
"""
@spec op_gas_cost(char()) :: integer()
def op_gas_cost(op_code) do
{_name, _pushed, _popped, op_gas_price} = OpCodesUtil.opcode(op_code)
op_gas_price
end
@doc """
Calculate the fee for the expanded memory
"""
@spec memory_gas_cost(map(), map()) :: integer()
def memory_gas_cost(state_with_ops, state_without) do
words1 = Memory.memory_size_words(state_with_ops)
case Memory.memory_size_words(state_without) do
^words1 ->
0
words2 ->
first = round(GasCodes._GMEMORY() * words1 + Float.floor(words1 * words1 / 512))
second = round(GasCodes._GMEMORY() * words2 + Float.floor(words2 * words2 / 512))
first - second
end
end
@doc """
Calculate gas cost for a given opcode, based on some dynamic data
"""
@spec dynamic_gas_cost(String.t(), map()) :: integer()
def dynamic_gas_cost("CALL", state) do
dynamic_call_cost(state)
end
def dynamic_gas_cost("DELEGATECALL", state) do
dynamic_call_cost(state)
end
def dynamic_gas_cost("CALLDATACOPY", state) do
GasCodes._GCOPY() * round(Float.ceil(Stack.peek(2, state) / 32))
end
def dynamic_gas_cost("CODECOPY", state) do
GasCodes._GCOPY() * round(Float.ceil(Stack.peek(2, state) / 32))
end
def dynamic_gas_cost("EXTCODECOPY", state) do
GasCodes._GCOPY() * round(Float.ceil(Stack.peek(3, state) / 32))
end
def dynamic_gas_cost("LOG0", state) do
GasCodes._GLOGDATA() * Stack.peek(1, state)
end
def dynamic_gas_cost("LOG1", state) do
GasCodes._GLOGDATA() * Stack.peek(1, state)
end
def dynamic_gas_cost("LOG2", state) do
GasCodes._GLOGDATA() * Stack.peek(1, state)
end
def dynamic_gas_cost("LOG3", state) do
GasCodes._GLOGDATA() * Stack.peek(1, state)
end
def dynamic_gas_cost("LOG4", state) do
GasCodes._GLOGDATA() * Stack.peek(1, state)
end
def dynamic_gas_cost("SHA3", state) do
peeked = Stack.peek(1, state)
GasCodes._GSHA3WORD() * round(Float.ceil(peeked / 32))
end
def dynamic_gas_cost("SSTORE", state) do
address = Stack.peek(0, state)
value = Stack.peek(1, state)
curr_storage = Storage.sload(address, state)
if value != 0 && curr_storage === 0 do
GasCodes._GSSET()
else
GasCodes._GSRESET()
end
end
def dynamic_gas_cost("EXP", state) do
case Stack.peek(1, state) do
0 -> 0
peeked -> GasCodes._GEXPBYTE() * (1 + log(peeked))
end
end
def dynamic_gas_cost(_op_name, _state) do
0
end
# Determine the gas cost for a CALL instruction
defp dynamic_call_cost(state) do
gas_cost_0 = GasCodes._GCALL()
gas_state = State.gas(state)
gas = Stack.peek(0, state)
value = Stack.peek(2, state)
gas_cost_1 =
gas_cost_0 +
if value !== 0 do
GasCodes._GCALLVALUE()
else
0
end
gas_cost_1 +
if gas_state >= gas_cost_1 do
gas_one_64_substracted = substract_one_64(gas_state - gas_cost_1)
if gas < gas_one_64_substracted do
gas
else
gas_one_64_substracted
end
else
gas
end
end
defp log(value) when is_integer(value) do
log(value, -1)
end
defp log(0, num), do: num
defp log(value, num) do
log(Bitwise.bsr(value, 8), num + 1)
end
defp substract_one_64(value) do
one_64th = value / 64
rounded_64th = one_64th |> Float.floor() |> round()
value - rounded_64th
end
end
|
apps/aevm/lib/gas.ex
| 0.771757 | 0.645267 |
gas.ex
|
starcoder
|
defmodule Ecto.Query.Util do
@moduledoc """
This module provide utility functions on queries.
"""
alias Ecto.Query
@doc """
Look up a source with a variable.
"""
def find_source(sources, {:&, _, [ix]}) when is_tuple(sources) do
elem(sources, ix)
end
def find_source(sources, {:&, _, [ix]}) when is_list(sources) do
Enum.at(sources, ix)
end
@doc """
Look up the expression where the variable was bound.
"""
def source_expr(%Query{from: from}, {:&, _, [0]}) do
from
end
def source_expr(%Query{joins: joins}, {:&, _, [ix]}) do
Enum.at(joins, ix - 1)
end
@doc "Returns the source from a source tuple."
def source({source, _model}), do: source
@doc "Returns model from a source tuple or nil if there is none."
def model({_source, model}), do: model
# Converts internal type format to "typespec" format
@doc false
def type_to_ast({type, inner}), do: {type, [], [type_to_ast(inner)]}
def type_to_ast(type) when is_atom(type), do: {type, [], nil}
@doc false
defmacro types do
~w(boolean string integer float decimal binary datetime date time interval virtual)a
end
@doc false
defmacro poly_types do
~w(array)a
end
# Takes an elixir value and returns its ecto type
@doc false
def value_to_type(value, fun \\ nil)
def value_to_type(nil, _fun), do: {:ok, nil}
def value_to_type(value, _fun) when is_boolean(value), do: {:ok, :boolean}
def value_to_type(value, _fun) when is_binary(value), do: {:ok, :string}
def value_to_type(value, _fun) when is_integer(value), do: {:ok, :integer}
def value_to_type(value, _fun) when is_float(value), do: {:ok, :float}
def value_to_type(%Decimal{}, _fun), do: {:ok, :decimal}
def value_to_type(%Ecto.DateTime{} = dt, fun) do
values = Map.delete(dt, :__struct__) |> Map.values
types = Enum.map(values, &value_to_type(&1, fun))
res = Enum.find_value(types, fn
{:ok, :integer} -> nil
{:error, _} = err -> err
{:error, "all datetime elements have to be a literal of integer type"}
end)
res || {:ok, :datetime}
end
def value_to_type(%Ecto.Date{} = d, fun) do
values = Map.delete(d, :__struct__) |> Map.values
types = Enum.map(values, &value_to_type(&1, fun))
res = Enum.find_value(types, fn
{:ok, :integer} -> nil
{:error, _} = err -> err
{:error, "all date elements have to be a literal of integer type"}
end)
res || {:ok, :date}
end
def value_to_type(%Ecto.Time{} = t, fun) do
values = Map.delete(t, :__struct__) |> Map.values
types = Enum.map(values, &value_to_type(&1, fun))
res = Enum.find_value(types, fn
{:ok, :integer} -> nil
{:error, _} = err -> err
{:error, "all time elements have to be a literal of integer type"}
end)
res || {:ok, :time}
end
def value_to_type(%Ecto.Interval{} = dt, fun) do
values = Map.delete(dt, :__struct__) |> Map.values
types = Enum.map(values, &value_to_type(&1, fun))
res = Enum.find_value(types, fn
{:ok, :integer} -> nil
{:error, _} = err -> err
_ -> {:error, "all interval elements have to be a literal of integer type"}
end)
if res do
res
else
{:ok, :interval}
end
end
def value_to_type(%Ecto.Binary{value: binary}, fun) do
case value_to_type(binary, fun) do
{:ok, :binary} -> {:ok, :binary}
{:ok, :string} -> {:ok, :binary}
{:error, _} = err -> err
_ -> {:error, "binary/1 argument has to be a literal of binary type"}
end
end
def value_to_type(%Ecto.Array{value: list, type: type}, fun) do
unless type in types or (list == [] and nil?(type)) do
{:error, "invalid type given to `array/2`: `#{inspect type}`"}
end
elem_types = Enum.map(list, &value_to_type(&1, fun))
res = Enum.find_value(elem_types, fn
{:ok, elem_type} ->
unless type_eq?(type, elem_type) do
{:error, "all elements in array have to be of same type"}
end
{:error, _} = err ->
err
end)
if res do
res
else
{:ok, {:array, type}}
end
end
def value_to_type(value, nil), do: {:error, "`unknown type of value `#{inspect value}`"}
def value_to_type(expr, fun), do: fun.(expr)
# Returns true if value is a query literal
@doc false
def literal?(nil), do: true
def literal?(value) when is_boolean(value), do: true
def literal?(value) when is_binary(value), do: true
def literal?(value) when is_integer(value), do: true
def literal?(value) when is_float(value), do: true
def literal?(%Decimal{}), do: true
def literal?(%Ecto.DateTime{}), do: true
def literal?(%Ecto.Date{}), do: true
def literal?(%Ecto.Time{}), do: true
def literal?(%Ecto.Interval{}), do: true
def literal?(%Ecto.Binary{}), do: true
def literal?(%Ecto.Array{}), do: true
def literal?(_), do: false
# Returns true if the two types are considered equal by the type system
# Note that this does not consider casting
@doc false
def type_eq?(_, :any), do: true
def type_eq?(:any, _), do: true
def type_eq?({outer, inner1}, {outer, inner2}), do: type_eq?(inner1, inner2)
def type_eq?(type, type), do: true
def type_eq?(_, _), do: false
# Returns true if another type can be casted to the given type
@doc false
def type_castable_to?(:binary), do: true
def type_castable_to?({:array, _}), do: true
def type_castable_to?(_), do: false
# Tries to cast the given value to the specified type.
# If value cannot be casted just return it.
@doc false
def try_cast(binary, :binary) when is_binary(binary) do
%Ecto.Binary{value: binary}
end
def try_cast(list, {:array, inner}) when is_list(list) do
%Ecto.Array{value: list, type: inner}
end
def try_cast(value, _) do
value
end
# Get var for given model in query
def model_var(query, model) do
sources = tuple_to_list(query.sources)
pos = Enum.find_index(sources, &(model(&1) == model))
{:&, [], [pos]}
end
# Find var in select clause. Returns a list of tuple and list indicies to
# find the var.
def locate_var({left, right}, var) do
locate_var({:{}, [], [left, right]}, var)
end
def locate_var({:{}, _, list}, var) do
locate_var(list, var)
end
def locate_var({:assoc, _, [left, _right]}, var) do
if left == var, do: []
end
def locate_var(list, var) when is_list(list) do
list = Stream.with_index(list)
res = Enum.find_value(list, fn {elem, ix} ->
if poss = locate_var(elem, var) do
{poss, ix}
else
nil
end
end)
case res do
{poss, pos} -> [pos|poss]
nil -> nil
end
end
def locate_var(expr, var) do
if expr == var, do: []
end
end
|
lib/ecto/query/util.ex
| 0.852245 | 0.447641 |
util.ex
|
starcoder
|
defmodule Mix.Tasks.Surface.Init.ExPatcher do
@moduledoc false
alias Sourceror.Zipper, as: Z
alias Mix.Tasks.Surface.Init.ExPatcher.Move
@derive {Inspect, only: [:code, :result, :node]}
defstruct [:zipper, :node, :code, :moves, :result]
@line_break ["\n", "\r\n", "\r"]
def parse_string!(code) do
zipper = code |> Sourceror.parse_string!() |> Z.zip()
%__MODULE__{
code: code,
result: :unpatched,
zipper: zipper,
node: Z.node(zipper)
}
end
def parse_file!(file) do
case File.read(file) do
{:ok, code} ->
parse_string!(code)
{:error, :enoent} ->
%__MODULE__{result: :file_not_found}
{:error, _reason} ->
%__MODULE__{result: :cannot_read_file}
end
end
def zipper(%__MODULE__{zipper: zipper}) do
zipper
end
def result(%__MODULE__{result: result}) do
result
end
def to_node(%__MODULE__{node: node}) do
node
end
def valid?(%__MODULE__{node: node}) do
node != nil
end
def node_to_string(patcher, opts \\ []) do
patcher |> to_node() |> Sourceror.to_string(opts)
end
def inspect_code(%__MODULE__{code: code} = patcher, label \\ "CODE") do
IO.puts("--- BEGIN #{label} ---")
IO.puts(code)
IO.puts("--- END #{label} ---")
patcher
end
def inspect_node(patcher, label \\ "NODE") do
IO.puts("--- BEGIN #{label} ---")
patcher |> to_node() |> IO.inspect()
IO.puts("--- END #{label} ---")
patcher
end
def inspect_zipper(patcher, label \\ "ZIPPER") do
IO.puts("--- BEGIN #{label} ---")
patcher |> zipper() |> IO.inspect()
IO.puts("--- END #{label} ---")
patcher
end
defp move(%__MODULE__{result: result} = patcher, _move) when result != :unpatched do
patcher
end
defp move(%__MODULE__{zipper: zipper} = patcher, move) do
zipper = move.(zipper)
{node, result} =
if zipper do
{Z.node(zipper), result(patcher)}
else
{nil, :cannot_patch}
end
%__MODULE__{patcher | zipper: zipper, node: node, result: result}
end
def find_code(patcher, string) do
move(patcher, &Move.find_code(&1, string))
end
def find_code_containing(patcher, string) do
move(patcher, &Move.find_code_containing(&1, string))
end
def find_call(patcher, name, predicate \\ fn _ -> true end) do
move(patcher, &Move.find_call(&1, name, predicate))
end
def enter_call(patcher, name, predicate \\ fn _ -> true end) do
move(patcher, &Move.enter_call(&1, name, predicate))
end
def find_call_with_args_and_opt(patcher, name, args, opt) do
move(patcher, &Move.find_call_with_args_and_opt(&1, name, args, opt))
end
def find_call_with_args(patcher, name, predicate) do
move(patcher, &Move.find_call_with_args(&1, name, predicate))
end
def find_def(patcher, name, predicate \\ fn _ -> true end) do
move(patcher, &Move.find_def(&1, name, predicate))
end
def find_defp(patcher, name, predicate \\ fn _ -> true end) do
move(patcher, &Move.find_defp(&1, name, predicate))
end
def find_defp_with_args(patcher, name, predicate) do
move(patcher, &Move.find_defp_with_args(&1, name, predicate))
end
def enter_def(patcher, name) do
patcher
|> find_def(name)
|> body()
end
def enter_defp(patcher, name) do
patcher
|> find_defp(name)
|> body()
end
def enter_defmodule(patcher) do
enter_call(patcher, :defmodule)
end
def enter_defmodule(patcher, module) do
patcher
|> find_call_with_args_and_opt(:defmodule, [inspect(module)], [:do])
|> body()
end
def last_arg(patcher) do
move(patcher, &Move.last_arg(&1))
end
def body(patcher) do
move(patcher, &Move.body(&1))
end
def find_keyword(patcher, keys) do
move(patcher, &Move.find_keyword(&1, keys))
end
def find_keyword_value(patcher, keys) do
patcher
|> find_keyword(keys)
|> value()
end
def value(patcher) do
move(patcher, &Move.value(&1))
end
def down(patcher) do
move(patcher, &Z.down(&1))
end
def last_child(patcher) do
move(patcher, &Move.last_child(&1))
end
def find_child_with_code(patcher, string) do
move(patcher, &Move.find_child_with_code(&1, string))
end
def find_list_item_with_code(patcher, string) do
move(patcher, &Move.find_list_item_with_code(&1, string))
end
def find_list_item_containing(patcher, string) do
move(patcher, &Move.find_list_item_containing(&1, string))
end
def replace(patcher, fun) when is_function(fun) do
patch(patcher, fn zipper ->
zipper
|> Z.node()
|> Sourceror.to_string()
|> fun.()
end)
end
def replace(patcher, code) when is_binary(code) do
patch(patcher, fn _zipper -> code end)
end
def replace_code(%__MODULE__{code: code} = patcher, fun) when is_function(fun) do
patch(patcher, [preserve_indentation: false], fn zipper ->
node = Z.node(zipper)
range = Sourceror.get_range(node, include_comments: true)
code_to_replace = get_code_by_range(code, range)
fun.(code_to_replace)
end)
end
def insert_after(patcher, string) do
node = Sourceror.parse_string!(string)
patch(patcher, fn zipper ->
zipper
|> Z.down()
|> Z.insert_right(node)
|> Z.up()
|> Z.node()
|> Sourceror.to_string()
end)
end
def insert_keyword(patcher, key, value) do
keyword = build_keyword_node(key, value)
patch(patcher, [preserve_indentation: false], fn zipper ->
zipper
|> Z.insert_child(keyword)
|> Z.node()
|> Sourceror.to_string(format: :splicing)
|> String.trim()
end)
end
def append_keyword(patcher, key, value) do
keyword = build_keyword_node(key, value)
patch(patcher, fn zipper ->
zipper
|> Z.down()
|> Z.append_child(keyword)
|> Z.up()
|> Z.node()
|> Sourceror.to_string()
end)
end
def append_list_item(patcher, string, opts \\ []) do
opts = Keyword.merge([preserve_indentation: false], opts)
node = Sourceror.parse_string!(string)
patch(patcher, opts, fn zipper ->
zipper
|> Z.down()
|> Z.append_child(node)
|> Z.up()
|> Z.node()
|> Sourceror.to_string(to_string_opts())
end)
end
def halt_if(%__MODULE__{result: result} = patcher, _predicate, _new_status) when result != :unpatched do
patcher
end
def halt_if(patcher, predicate, result) do
case predicate.(patcher) do
%__MODULE__{node: nil} -> patcher
nil -> patcher
false -> patcher
_ -> set_result(patcher, result)
end
end
def set_result(patcher, status) do
%__MODULE__{patcher | result: status}
end
def append_child(patcher, string) do
patch(patcher, fn zipper ->
# If possible, we try to replace the last child so the whole block
# doesn't have to be formatted when using `Z.append_child/2`
case Move.last_child(zipper) do
nil ->
node = Sourceror.parse_string!(string)
zipper
|> Z.append_child(node)
|> Z.node()
|> Sourceror.to_string()
{{:., _, _}, _} ->
# We can't get the range of the dot call in a qualified call like
# `foo.bar()`, so we apply the patch to the parent. We get into this
# situation when the qualified call has no arguments: the first child
# will be a dot call of the form `{:., meta, [left, identifier]}`
# where `identifier` is a bare atom, like `:compilers`. The line
# metadata for the identifier lives in the parent call, making it
# impossible to generate a patch for the child call alone.
append_child_patch(zipper, string)
last_child_zipper ->
append_child_patch(last_child_zipper, string)
end
end)
end
defp append_child_patch(zipper, string) do
node = Z.node(zipper)
range = Sourceror.get_range(node, include_comments: true)
updated_code = Sourceror.parse_string!(Sourceror.to_string(node) <> string)
change =
zipper
|> Z.replace(updated_code)
|> Z.node()
|> Sourceror.to_string()
%{change: change, range: range}
end
def patch(patcher, opts \\ [], fun)
def patch(%__MODULE__{result: result} = patcher, _opts, _fun) when result != :unpatched do
patcher
end
def patch(patcher, opts, fun) do
zipper = zipper(patcher)
patch =
case fun.(zipper) do
change when is_binary(change) ->
range = zipper |> Z.node() |> Sourceror.get_range(include_comments: true)
Map.merge(%{change: change, range: range}, Map.new(opts))
patch ->
patch
end
updated_code = patcher |> code() |> Sourceror.patch_string([patch])
%__MODULE__{patcher | code: updated_code, result: :patched}
end
def code(%__MODULE__{code: code}) do
code
end
defp build_keyword_node(key, value) do
{:__block__, _, [[keyword]]} = Sourceror.parse_string!(~s([#{key}: #{value}]))
keyword
end
defp to_string_opts() do
"mix.exs"
|> Mix.Tasks.Format.formatter_opts_for_file()
|> Keyword.take([:line_length])
end
defp get_code_by_range(code, range) do
{_, text_after} = split_at(code, range.start[:line], range.start[:column])
line = range.end[:line] - range.start[:line] + 1
{text, _} = split_at(text_after, line, range.end[:column])
text
end
defp split_at(code, line, col) do
pos = find_position(code, line, col, {0, 1, 1})
String.split_at(code, pos)
end
defp find_position(_text, line, col, {pos, line, col}) do
pos
end
defp find_position(text, line, col, {pos, current_line, current_col}) do
case String.next_grapheme(text) do
{grapheme, rest} ->
{new_pos, new_line, new_col} =
if grapheme in @line_break do
if current_line == line do
# this is the line we're lookin for
# but it's shorter than expected
{pos, current_line, col}
else
{pos + 1, current_line + 1, 1}
end
else
{pos + 1, current_line, current_col + 1}
end
find_position(rest, line, col, {new_pos, new_line, new_col})
nil ->
pos
end
end
end
|
lib/mix/tasks/surface/surface.init/ex_patcher.ex
| 0.711832 | 0.474509 |
ex_patcher.ex
|
starcoder
|
defmodule Authoritex do
@moduledoc "Elixir authority lookup behavior"
@type authority :: {module(), String.t(), String.t()}
@type fetch_result :: %{
id: String.t(),
label: String.t(),
qualified_label: String.t(),
hint: String.t() | nil
}
@type search_result :: %{id: String.t(), label: String.t(), hint: String.t() | nil}
@doc "Returns true if the module can resolve the given identifier"
@callback can_resolve?(String.t()) :: true | false
@doc "Returns the unique short code for the authority"
@callback code() :: String.t()
@doc "Returns a human-readable description of the authority"
@callback description() :: String.t()
@doc "Fetches a label (and optional hint string) for a specified resource"
@callback fetch(String.t()) :: {:ok, :fetch_result} | {:error, term()}
@doc "Returns a list of search results (and optional hints) matching a query"
@callback search(String.t(), integer()) :: {:ok, list(:search_result)} | {:error, term()}
@doc """
Returns a label given an id.
Examples:
```
iex> Authoritex.fetch("http://id.loc.gov/authorities/names/no2011087251")
{:ok, "<NAME>"}
iex> Authoritex.fetch("http://id.loc.gov/authorities/names/unknown-id")
{:error, 404}
iex> Authoritex.fetch("http://fake.authority.org/not-a-real-thing")
{:error, :unknown_authority}
```
"""
@spec fetch(binary()) :: {:ok, fetch_result()} | {:error, term()}
def fetch(id) do
case authority_for(id) do
nil -> {:error, :unknown_authority}
{authority, _, _} -> authority.fetch(id)
end
end
@doc """
Returns search results for a given query.
Examples:
```
iex> Authoritex.search("lcnaf", "valim")
{:ok,
[
%{id: "info:lc/authorities/names/n2013200729", label: "<NAME>"},
%{id: "info:lc/authorities/names/nb2006000541", label: "<NAME>"},
%{id: "info:lc/authorities/names/n88230271", label: "<NAME>, 1919-"},
%{id: "info:lc/authorities/names/no2019037344", label: "<NAME>"},
%{id: "info:lc/authorities/names/no2012078919", label: "<NAME>"},
%{id: "info:lc/authorities/names/no2001072420", label: "Lucisano-Valim, <NAME>"},
%{id: "info:lc/authorities/names/no2011087251", label: "<NAME>"},
%{id: "info:lc/authorities/names/no2019110111", label: "<NAME>"},
%{id: "info:lc/authorities/names/n2014206721", label: "<NAME>"},
%{id: "info:lc/authorities/names/no2009021335", label: "<NAME>"}
]}
iex> Authoritex.search("lcnaf", "blergh")
{:ok, []}
iex> Authoritex.search("blergh", "valim")
{:error, "Unknown authority: blergh"}
```
"""
@spec search(binary(), binary()) :: {:ok, list(search_result())} | {:error, term()}
def search(authority_code, query) do
case(find_authority(authority_code)) do
nil -> {:error, "Unknown authority: #{authority_code}"}
authority -> authority.search(query)
end
end
@doc "Like `Authoritex.search/2` but with a specific maximum number of results"
@spec search(binary(), binary(), integer()) :: {:ok, list(search_result())} | {:error, term()}
def search(authority_code, query, max_results) do
case(find_authority(authority_code)) do
nil -> {:error, "Unknown authority: #{authority_code}"}
authority -> authority.search(query, max_results)
end
end
@doc """
Lists the available authories, returning a list of
{implementation_module, authority_code, authority_description}
Example:
```
iex> Authoritex.authorities()
[
{Authoritex.FAST.CorporateName, "fast-corporate-name", "Faceted Application of Subject Terminology -- Corporate Name"},
{Authoritex.FAST.EventName, "fast-event-name", "Faceted Application of Subject Terminology -- Event Name"},
{Authoritex.FAST.Form, "fast-form", "Faceted Application of Subject Terminology -- Form/Genre"},
{Authoritex.FAST.Geographic, "fast-geographic", "Faceted Application of Subject Terminology -- Geographic"},
{Authoritex.FAST.Personal, "fast-personal", "Faceted Application of Subject Terminology -- Personal"},
{Authoritex.FAST.Topical, "fast-topical", "Faceted Application of Subject Terminology -- Topical"},
{Authoritex.FAST.UniformTitle, "fast-uniform-title", "Faceted Application of Subject Terminology -- Uniform Title"},
{Authoritex.FAST, "fast", "Faceted Application of Subject Terminology"},
{Authoritex.GeoNames, "geonames", "GeoNames geographical database"},
{Authoritex.Getty.AAT, "aat", "Getty Art & Architecture Thesaurus (AAT)"},
{Authoritex.Getty.TGN, "tgn", "Getty Thesaurus of Geographic Names (TGN)"},
{Authoritex.Getty.ULAN, "ulan", "Getty Union List of Artist Names (ULAN)"},
{Authoritex.Getty, "getty", "Getty Vocabularies"},
{Authoritex.LOC.Languages, "lclang", "Library of Congress MARC List for Languages"},
{Authoritex.LOC.Names, "lcnaf", "Library of Congress Name Authority File"},
{Authoritex.LOC.SubjectHeadings, "lcsh", "Library of Congress Subject Headings"},
{Authoritex.LOC, "loc", "Library of Congress Linked Data"}
]
```
"""
@spec authorities() :: list(authority())
def authorities do
Application.get_env(:authoritex, :authorities, [])
|> Enum.map(fn mod -> {mod, mod.code(), mod.description()} end)
end
@spec authority_for(binary()) :: authority() | nil
def authority_for(id) do
authorities()
|> Enum.find(fn {authority, _, _} -> authority.can_resolve?(id) end)
end
defp find_authority(code) do
authorities()
|> Enum.find_value(fn
{authority, ^code, _label} -> authority
_ -> nil
end)
end
end
|
lib/authoritex.ex
| 0.89129 | 0.694277 |
authoritex.ex
|
starcoder
|
defmodule HTS221 do
@moduledoc """
Functions for working with the HTS221
The functionality is useful for use cases where you need complete control
over the sensor or to debug the registers. If you just want to get started
quickly see `HTS221.Server` module.
To read registers you will need to provide a `HTS221.Transport.t()`.
```
{:ok, transport} = HTS221.Transport.init(HTS221.Transport.I2C, bus_name: "i2c-1")
```
## Reading Registers
After opening the transport you can read or write registers.
```
{:ok, %HTS221.Temperature{} = temp} = HTS221.read_temperature(transport)
```
While this library provides some common helper functions to read particular
registers it does not provide all the registers currently. However, the
library does provide the `HTS221.Register` protocol and
`HTS221.read_register/2` that will allow you to provide support for any
register.
## Writing Registers
To write a register you will need to use `HTS221.write_register/2`.
```
# this will provide the default register values
ctrl_reg1 = %HTS221.CTRLReg1{}
HTS221.write_register(transport, ctrl_reg1)
```
## Calibration
Each HTS221 is calibrated that the factory and the calibration that is stored
in non-volatile memory is specific to each sensor. The calibration contains
data about how to calculate the temperature and humidity and thus you will
need to the calibration values to make those calculations.
This library provides functionality for reading the calibration and using it
to calculate the temperature and humidity.
```
{:ok, %HTS221.Calibration{} = calibration} = HTS221.read_calibration(transport)
{:ok, %HTS221.Temperature{} = temp} = HTS221.read_temperature(transport)
temp_in_celsius = HTS221.calculate_temperature(temp, calibration)
```
_the same steps are required for calculating humidity_
"""
alias HTS221.{AVConf, Calibration, CTRLReg1, Humidity, Register, Temperature, Transport}
@typedoc """
Signed 16-bit integer
"""
@type s16() :: -32_768..32_767
@typedoc """
The scale in which the temperature is calculated (default `:celsius`)
"""
@type scale() :: :celsius | :fahrenheit | :kelvin
@type opt() :: {:scale, scale()}
@doc """
Read the calibration on the HTS221
This is useful for checking the calibration on the hardware itself or fetch
the calibration after any other register initialization and storing it for
future calculations.
```elixir
{:ok, calibration} = HTS221.read_calibration(hts221)
%HTS221{hts221 | calibration: calibration}
```
"""
@spec read_calibration(Transport.t()) :: {:ok, Calibration.t()} | {:error, any()}
def read_calibration(transport) do
case read_register(transport, %Calibration{}) do
{:ok, binary} ->
{:ok, Calibration.from_binary(binary)}
error ->
error
end
end
@doc """
Read the `CTRL_REG1` register
See the `HTS221.CTRLReg1` module for more information.
"""
@spec read_ctrl_reg1(Transport.t()) :: {:ok, CTRLReg1.t()} | {:error, any()}
def read_ctrl_reg1(transport) do
case read_register(transport, %CTRLReg1{}) do
{:ok, binary} ->
{:ok, CTRLReg1.from_binary(binary)}
error ->
error
end
end
@doc """
Read the `AV_CONF` register
See the `HTS221.AVConfig` module for more information.
"""
@spec read_av_conf(Transport.t()) :: {:ok, AVConf.t()} | {:error, any()}
def read_av_conf(transport) do
case read_register(transport, %AVConf{}) do
{:ok, binary} ->
{:ok, AVConf.from_binary(binary)}
error ->
error
end
end
@doc """
Read the values of the temperature registers
This function does not provide the final calculations of the temperature but
only provides the functionality of reading the raw values in the register.
"""
@spec read_temperature(Transport.t()) :: {:ok, Temperature.t()} | {:error, any()}
def read_temperature(transport) do
case read_register(transport, %Temperature{}) do
{:ok, binary} ->
{:ok, Temperature.from_binary(binary)}
error ->
error
end
end
@doc """
Read the values of the humidity registers
This function does not provided the final calculations of the humidity but
only provides the functionality of reading the raw values in the register.
"""
@spec read_humidity(Transport.t()) :: {:ok, Humidity.t()} | {:error, any()}
def read_humidity(transport) do
case read_register(transport, %Humidity{}) do
{:ok, binary} ->
{:ok, Humidity.from_binary(binary)}
error ->
error
end
end
@doc """
Read any register that implements the `HTS221.Register` protocol
"""
@spec read_register(Transport.t(), Register.t()) :: {:ok, binary()} | {:error, any()}
def read_register(transport, register) do
case Register.read(register) do
{:ok, io_request} ->
Transport.send(transport, io_request)
error ->
error
end
end
@doc """
Write any register that implements the `HTS221.Register` protocol
"""
@spec write_register(Transport.t(), Register.t()) :: :ok | {:error, any()}
def write_register(transport, register) do
case Register.write(register) do
{:ok, io_request} ->
Transport.send(transport, io_request)
error ->
error
end
end
@doc """
Calculate the temperature from the `HTS221.Temperature` register values
This requires the `HTS221.Calibration` has the the temperature register
values are the raw reading from the ADC. Each HTS221 is calibrated during
manufacturing and contains the coefficients to required to convert the ADC
values into degrees celsius (default).
"""
@spec calculate_temperature(Temperature.t(), Calibration.t(), [opt()]) :: float()
def calculate_temperature(temperature, calibration, opts \\ []) do
scale = Keyword.get(opts, :scale, :celsius)
t0 = Calibration.t0(calibration)
t1 = Calibration.t1(calibration)
t = temperature.raw
slope = (t1 - t0) / (calibration.t1_out - calibration.t0_out)
offset = t0 - slope * calibration.t0_out
calc_temp(
slope * t + offset,
scale
)
end
@doc """
Calculate the humidity from the `HTS221.Humidity` register values
This requires the `HTS221.Calibration` has the the humidity register values
are the raw reading from the ADC. Each HTS221 is calibrated during
manufacturing and contains the coefficients to required to convert the ADC
values into percent.
"""
@spec calculate_humidity(Humidity.t(), Calibration.t()) :: float()
def calculate_humidity(humidity, %Calibration{} = calibration) do
h0 = Calibration.h0(calibration)
h1 = Calibration.h1(calibration)
h = humidity.raw
slope = (h1 - h0) / (calibration.h1_t0_out - calibration.h0_t0_out)
offset = h0 - slope * calibration.h0_t0_out
slope * h + offset
end
defp calc_temp(temp, :celsius), do: temp
defp calc_temp(temp, :fahrenheit), do: temp * 1.8 + 32
defp calc_temp(temp, :kelvin), do: temp + 273.15
end
|
lib/hts221.ex
| 0.922255 | 0.939969 |
hts221.ex
|
starcoder
|
use Dogma.RuleBuilder
defrule Dogma.Rule.InfixOperatorPadding,
[fn_arrow_padding: false, elixir: ">= 1.1.0"] do
@moduledoc """
A rule that ensures that all infix operators, except the range operator `..`,
are surrounded by spaces.
This rule is only enabled for Elixir v1.1 or greater.
Good:
foo = bar
foo - bar
foo || bar
foo |> bar
foo..bar
Bad:
foo=bar
foo-bar
foo||bar
foo|>bar
By default, no space is required between the `fn` and `->` of an anonymous
function. A space can be required by setting the `fn_arrow_padding` option to
`true`.
"""
@operators [
:comp_op,
:comp_op2,
:dual_op,
:mult_op,
:two_op,
:arrow_op,
:rel_op,
:rel_op2,
:and_op,
:or_op,
:match_op,
:in_match_op,
:assoc_op,
:stab_op,
:pipe_op
]
@subtracters [
:number,
:identifier,
:")",
:")"
]
@ignore_ops [
:-,
:..
]
def test(rule, script) do
script.tokens
|> Enum.map(&normalize_token/1)
|> check_operators(rule)
end
defp normalize_token({a, {b, c, d}}), do: {a, b, c, d, nil}
defp normalize_token({a, {b, c, d}, e}), do: {a, b, c, d, e}
defp normalize_token({a, {b, c, d}, e, _}), do: {a, b, c, d, e}
defp normalize_token({a, {b, c, d}, e, _, _}), do: {a, b, c, d, e}
defp check_operators(tokens, rule, acc \\ [])
defp check_operators([], _rule, acc), do: Enum.reverse(acc)
defp check_operators([
{token, line, _, _, _},
{:identifier, line, _, _, _},
{:mult_op, line, _, _, :/}
| rest], rule, acc)
when token == :capture_op or token == :. do
check_operators(rest, rule, acc)
end
defp check_operators([
{token1, line, _, column, _},
{:dual_op, line, column, _, :-},
{token3, line, _, _, _}
| rest], rule, acc)
when (token1 in @subtracters or token1 == :")")
and (token3 in @subtracters or token3 == :"(") do
check_operators(rest, rule, [error(line) | acc])
end
defp check_operators([
{token1, line, _, _, _},
{:dual_op, line, _, column, :-},
{token3, line, column, _, _}
| rest], rule, acc)
when (token1 in @subtracters or token1 == :")")
and (token3 in @subtracters or token3 == :"(") do
check_operators(rest, rule, [error(line) | acc])
end
defp check_operators([
{:fn, line, _, column, _},
{:stab_op, line, column, _, _}
| rest], rule, acc) do
if rule.fn_arrow_padding do
check_operators(rest, rule, [error(line) | acc])
else
check_operators(rest, rule, acc)
end
end
for operator <- @operators do
defp check_operators([
{unquote(operator), line, _, column, value},
{token2, line, column, _, _}
| rest], rule, acc)
when not value in @ignore_ops and token2 != :eol do
check_operators(rest, rule, [error(line) | acc])
end
end
for operator <- @operators do
defp check_operators([
{_, line, _, column, _},
{unquote(operator), line, column, _, value}
| rest], rule, acc)
when not value in @ignore_ops do
check_operators(rest, rule, [error(line) | acc])
end
end
defp check_operators([_ | rest], rule, acc) do
check_operators(rest, rule, acc)
end
defp error(line) do
%Error{
rule: __MODULE__,
message: "Infix operators should be surrounded by whitespace.",
line: line,
}
end
end
|
lib/dogma/rule/infix_operator_padding.ex
| 0.778986 | 0.489137 |
infix_operator_padding.ex
|
starcoder
|
defmodule HandimanApi.GuardianHooks do
require IEx
use Guardian.Hooks
@moduledoc """
HandimanApi.GuardianHooks is a simple module that hooks into guardian to prevent playback of tokens.
In vanilla Guardian, tokens aren't tracked so the main mechanism that exists to make a token inactive is to set the expiry and wait until it arrives.
HandimanApi.GuardianHooks takes an active role and stores each token in the database verifying it's presense (based on it's jti) when Guardian verifies the token.
If the token is not present in the DB, the Guardian token cannot be verified.
Provides a simple database storage and check for Guardian tokens.
- When generating a token, the token is stored in a database.
- When tokens are verified (channel, session or header) the database is checked for an entry that matches. If none is found, verification results in an error.
- When logout, or revoking the token, the corresponding entry is removed
"""
defmodule Token do
@moduledoc """
A very simple model for storing tokens generated by guardian.
"""
use Ecto.Model
@primary_key {:jti, :string, autogenerate: false }
schema "guardian_tokens" do
field :aud, :string
field :iss, :string
field :sub, :string
field :exp, :integer
field :jwt, :string
field :claims, :string
timestamps
end
@doc """
Create a new new token based on the JWT and decoded claims
"""
def create!(claims, jwt) do
prepared_claims = claims |> Dict.put("jwt", jwt)
HandimanApi.GuardianHooks.repo.insert cast(%Token{}, prepared_claims, [], [:jti, :aud, :iss, :sub, :exp, :jwt])
end
@doc """
Purge any tokens that are expired. This should be done periodically to keep your DB table clean of clutter
"""
def purge_expired_tokens! do
timestamp = Guardian.Utils.timestamp
from(t in Token, where: t.exp < ^timestamp) |> HandimanApi.GuardianHooks.repo.delete_all
end
end
if !Dict.get(Application.get_env(:guardian_db, HandimanApi.GuardianHooks), :repo), do: raise "HandimanApi.GuardianHooks requires a repo"
@doc """
After the JWT is generated, stores the various fields of it in the DB for tracking
"""
def after_encode_and_sign(resource, type, claims, jwt) do
case Token.create!(claims, jwt) do
{ :error, _ } -> { :error, :token_storage_failure }
_ -> { :ok, { resource, type, claims, jwt } }
end
end
@doc """
When a token is verified, check to make sure that it is present in the DB.
If the token is found, the verification continues, if not an error is returned.
"""
def on_verify(claims, jwt) do
case repo.get_by(Token, jti: Dict.get(claims, "jti")) do
nil -> { :error, :unauthorized }
token -> { :ok, { claims, jwt } }
end
end
@doc """
When logging out, or revoking a token, removes from the database so the token may no longer be used
"""
def on_revoke(claims, jwt) do
jti = Dict.get(claims, "jti")
model = repo.get_by(Token, jti: jti)
if model do
case repo.delete(model) do
{ :error, _ } -> { :error, :could_not_revoke_token }
nil -> { :error, :could_not_revoke_token }
_ -> { :ok, { claims, jwt } }
end
else
{ :ok, { claims, jwt } }
end
end
def repo do
Dict.get(Application.get_env(:guardian_db, HandimanApi.GuardianHooks), :repo)
end
end
|
lib/handiman_api/guardian_hooks.ex
| 0.652131 | 0.5083 |
guardian_hooks.ex
|
starcoder
|
defmodule Rummage.Phoenix.PaginateController do
@moduledoc """
`PaginateController` a controller helper in `Rummage.Phoenix` which stores
helpers for Paginate hook in `Rummage`. This formats params before `index`
action into a format that is expected by the default `Rummage.Ecto`'s paginate
hook: `Rummage.Ecto.Paginate`
```
"""
@doc """
This function formats params into `rumamge` params, that are expected by
`Rummage.Ecto`'s default paginate hook:
## Examples
When `rummage` passed is an empty `Map`, it returns
and empty `Map`:
iex> alias Rummage.Phoenix.PaginateController
iex> rummage = %{}
iex> PaginateController.rummage(rummage)
%{}
When `rummage` passed is not an empty `Map`, but
doesn't have a `"paginate"` key, it returns
and empty `Map`:
iex> alias Rummage.Phoenix.PaginateController
iex> rummage = %{"pizza" => "eat"}
iex> PaginateController.rummage(rummage)
%{}
When `rummage` passed is not an empty `Map`, but
the value corresponding to `"paginate"` key is an empty `String`,
it returns and empty `Map`:
iex> alias Rummage.Phoenix.PaginateController
iex> rummage = %{"paginate" => ""}
iex> PaginateController.rummage(rummage)
%{}
When `rummage` passed is not an empty `Map`, but
the value corresponding to `"paginate"` key is a non-empty `String`,
it decodes the value returns it:
iex> alias Rummage.Phoenix.PaginateController
iex> rummage = %{"paginate" => "1"}
iex> PaginateController.rummage(rummage)
1
When `rummage` passed is not an empty `Map`, but
the value corresponding to `"paginate"` key is a `Map`,
it returns the `Map` itself:
iex> alias Rummage.Phoenix.PaginateController
iex> rummage = %{"paginate" => %{"h" => "i"}}
iex> PaginateController.rummage(rummage)
%{"h" => "i"}
"""
def rummage(rummage) do
paginate_params = Map.get(rummage, "paginate")
case paginate_params do
s when s in ["", nil] ->
%{}
s when is_binary(s) ->
paginate_params
|> Poison.decode!()
_ ->
paginate_params
end
end
end
|
lib/rummage_phoenix/hooks/controllers/paginate_controller.ex
| 0.738669 | 0.708112 |
paginate_controller.ex
|
starcoder
|
defmodule Exenv.Adapter do
@moduledoc """
Defines an Exenv adapter.
An Exenv adapter is simply a module that adheres to the callbacks required. It
can be as simple as:
defmodule MyAdapter do
use Exenv.Adapter
@imple true
def load(opts) do
# load some system env vars
:ok
end
end
Some adapters may be simple and do not require a process on their own. But if
some form of state is needed, we can also make our adapter process-based.
If we define our adapter within the normal Exenv startup flow, this process
will then be automatically started and supervised. Below is an example:
defmodule MyAdapter do
use Exenv.Adapter
use GenServer
@impl true
def start_link(opts) do
GenServer.start_link(__MODULE__, opts, name: __MODULE__)
end
@impl true
def init(opts) do
{:ok, opts}
end
@impl true
def load(opts) do
# load some system env vars
GenServer.call(__MODULE__, {:load, opts})
end
@impl true
def handle_call({:load, opts}, _from, config) do
# load some system env vars
{:reply, :ok, config}
end
end
And thats it! We can know start using our new adapter.
## Reading Files
If your adapter reads files in order to load env vars, it is recommended that
`Exenv.read_file/2` is used. This will enabled support for secrets encryption.
If a user passes the one of the following with your options, the file will
be automatically decrypted.
# Decrypts the file using MASTER_KEY env var
[encryption: true]
# Decrypts the file using a master key file
[encryption: [master_key: "/path/to/master.key"]]
"""
@doc """
Starts the adapter process if required.
"""
@callback start_link(opts :: keyword()) :: GenServer.on_start()
@doc """
Loads the system env vars using the adapter and options provided.
"""
@callback load(opts :: keyword()) :: result()
@type t :: module()
@type config :: {Exenv.Adapter.t(), keyword()}
@type result :: :ok | {:error, term()}
defmacro __using__(_) do
quote do
@behaviour Exenv.Adapter
@doc false
def start_link(_opts) do
:ignore
end
@doc false
def child_spec(config) do
%{
id: __MODULE__,
start: {__MODULE__, :start_link, [config]}
}
end
@doc false
def load(_opts) do
{:error, :not_implemented}
end
defoverridable Exenv.Adapter
end
end
end
|
lib/exenv/adapter.ex
| 0.718298 | 0.400573 |
adapter.ex
|
starcoder
|
defmodule Filtrex.Condition do
@moduledoc """
`Filtrex.Condition` is an abstract module for parsing conditions.
To implement your own condition, add `use Filtrex.Condition` in your module and implement the three callbacks:
* `parse/2` - produce a condition struct from a configuration and attributes
* `type/0` - the description of the condition that must match the underscore version of the module's last namespace
* `comparators/0` - the list of used query comparators for parsing params
"""
@modules [
Filtrex.Condition.Text,
Filtrex.Condition.Date,
Filtrex.Condition.DateTime,
Filtrex.Condition.Boolean,
Filtrex.Condition.Number
]
@callback parse(Filtrex.Type.Config.t, %{inverse: boolean, column: String.t, value: any, comparator: String.t}) :: {:ok, any} | {:error, any}
@callback type :: Atom.t
@callback comparators :: [String.t]
defstruct column: nil, comparator: nil, value: nil
defmacro __using__(_) do
quote do
import Filtrex.Utils.Encoder
alias Filtrex.Condition
import unquote(__MODULE__), except: [parse: 2]
@behaviour Filtrex.Condition
defstruct type: nil, column: nil, comparator: nil, value: nil, inverse: false
end
end
@doc """
Parses a condition by dynamically delegating to modules
It delegates based on the type field of the options map (e.g. `Filtrex.Condition.Text` for the type `"text"`).
Example Input:
config:
```
Filtrex.Condition.parse([
%Filtrex.Type.Config{type: :text, keys: ~w(title comments)}
], %{
type: string,
column: string,
comparator: string,
value: string,
inverse: boolean # inverts the comparator logic
})
```
"""
def parse(configs, options = %{type: type}) do
case condition_module(type) do
nil ->
{:error, "Unknown filter condition '#{type}'"}
module ->
type_atom = String.to_existing_atom(type)
config = Filtrex.Type.Config.configs_for_type(configs, type_atom)
|> Filtrex.Type.Config.config(options[:column])
if config do
module.parse(config, Map.delete(options, :type))
else
{:error, "Unknown column '#{options[:column]}'"}
end
end
end
@doc "Parses a params key into the condition type, column, and comparator"
def param_key_type(configs, key_with_comparator) do
result = Enum.find_value(condition_modules(), fn (module) ->
Enum.find_value(module.comparators, fn (comparator) ->
normalized = "_" <> String.replace(comparator, " ", "_")
key = String.replace_trailing(key_with_comparator, normalized, "")
config = Filtrex.Type.Config.config(configs, key)
if !is_nil(config) and key in config.keys and config.type == module.type do
{:ok, module, config, key, comparator}
end
end)
end)
if result, do: result, else: {:error, "Unknown filter key '#{key_with_comparator}'"}
end
@doc "Helper method to validate that a comparator is in list"
@spec validate_comparator(atom, binary, List.t) :: {:ok, binary} | {:error, binary}
def validate_comparator(type, comparator, comparators) do
if comparator in comparators do
{:ok, comparator}
else
{:error, parse_error(comparator, :comparator, type)}
end
end
@doc "Helper method to validate whether a value is in a list"
@spec validate_in(any, List.t) :: nil | any
def validate_in(nil, _), do: nil
def validate_in(_, nil), do: nil
def validate_in(value, list) do
cond do
value in list -> value
true -> nil
end
end
@doc "Helper method to validate whether a value is a binary"
@spec validate_is_binary(any) :: nil | String.t
def validate_is_binary(value) when is_binary(value), do: value
def validate_is_binary(_), do: nil
@doc "Generates an error description for a generic parse error"
@spec parse_error(any, Atom.t, Atom.t) :: String.t
def parse_error(value, type, filter_type) do
"Invalid #{to_string(filter_type)} #{to_string(type)} '#{value}'"
end
@doc "Generates an error description for a parse error resulting from an invalid value type"
@spec parse_value_type_error(any, Atom.t) :: String.t
def parse_value_type_error(column, filter_type) when is_binary(column) do
"Invalid #{to_string(filter_type)} value for #{column}"
end
def parse_value_type_error(column, filter_type) do
opts = struct(Inspect.Opts, [])
iodata = Inspect.Algebra.to_doc(column, opts)
|> Inspect.Algebra.format(opts.width)
|> Enum.join
if String.length(iodata) <= 15 do
parse_value_type_error("'#{iodata}'", filter_type)
else
"'#{String.slice(iodata, 0..12)}...#{String.slice(iodata, -3..-1)}'"
|> parse_value_type_error(filter_type)
end
end
@doc "List out the available condition modules"
def condition_modules do
Application.get_env(:filtrex, :conditions, @modules)
end
defp condition_module(type) do
Enum.find(condition_modules(), fn (module) ->
type == to_string(module.type)
end)
end
end
|
lib/filtrex/condition.ex
| 0.874614 | 0.834474 |
condition.ex
|
starcoder
|
defmodule Day2 do
@moduledoc """
Solution to Day 2 Advent of Code puzzle 2021: Dive!
Assumptions: All values for command distance are < 10 units
"""
# sort inputs into 2 lists to reduce, x & y
# up/down (up: negative, down: positive)
# - seems like all commands are < 10
# 2 ways to skin this cat -
# 1) take in all the data, forward[], up/down[] where up is negative
# (reducing depth) and down is positive (increasing depth) and store
# in 2 arrays, then reduce them
# - useful if you need to access vert/horizontal data changes individually
# 2) process input at the same time as it is parsed
@spec run(
binary
| maybe_improper_list(
binary | maybe_improper_list(any, binary | []) | char,
binary | []
)
) :: :ok
@doc """
Returns the number of instances a measurement increased from the
previous measurement
"""
def run(filepath) do
data = get_data(filepath)
output = analyze(data, 0, 0)
IO.puts("Change in horizontal position: #{output.forward}")
IO.puts("Change in vertical position: #{output.depth}")
IO.puts("------------ Calculations with aim ------------")
output = analyze_aim(data, 0, 0, 0)
IO.puts("Change in horizontal position: #{output.forward}")
IO.puts("Change in vertical position: #{output.depth}")
end
@doc """
Reads measurement data from a .txt file and converts it into a List
of Integer
Returns a list of numbers given a filename - the expected format
is a return-separated list of numerical values.
"""
def get_data(filepath) do
{:ok, data} = File.read(filepath)
data |> String.split("\n", trim: true)
|> parse_data()
end
def parse_data(list) do
Enum.reverse(Enum.reduce(list, [], fn(x, acc) -> [Day2.parse_datum(x) | acc] end))
end
def parse_datum(bitstring) do
l = length(to_charlist(bitstring))
%{ :direction => String.first(bitstring), :magnitude => elem(Integer.parse(String.slice(bitstring, l-1..l), 10), 0) }
end
@doc """
Converts a list of bitstrings to a list of Integers
Returns a list of Integers
"""
def bins_to_ints(string_list, int_list) do
Enum.reverse(Enum.reduce(string_list, int_list, fn(x, acc) -> [elem(Integer.parse(x, 10), 0) | acc] end))
end
@doc """
Analyzes measurement data to determine vertical & horizontal depth changes
Returns change in direction and magnitude of that change
"""
def analyze(list, depth, forward) do
current = Enum.at(list, 0)
cond do
!current ->
%{ :depth => depth, :forward => forward }
current.direction === "f" ->
# get the numerical value of horizontal movement
analyze(Enum.slice(list, 1..length(list) - 1), depth, forward + current.magnitude)
current.direction === "d" ->
analyze(Enum.slice(list, 1..length(list) - 1), depth + current.magnitude, forward)
current.direction === "u" ->
analyze(Enum.slice(list, 1..length(list) - 1), depth - current.magnitude, forward)
end
end
def analyze_aim(list, depth, forward, aim) do
current = Enum.at(list, 0)
cond do
!current ->
%{ :depth => depth, :forward => forward }
current.direction === "d" ->
analyze_aim(Enum.slice(list, 1..length(list) - 1), depth, forward, aim + current.magnitude)
current.direction === "u" ->
analyze_aim(Enum.slice(list, 1..length(list) - 1), depth, forward, aim - current.magnitude)
current.direction === "f" ->
analyze_aim(Enum.slice(list, 1..length(list) - 1), depth + aim * current.magnitude, forward + current.magnitude, aim)
end
end
end
|
lib/day_2/day_2.ex
| 0.825449 | 0.751489 |
day_2.ex
|
starcoder
|
defmodule Solution do
@moduledoc """
Solving the next problem:
https://www.hackerrank.com/challenges/flatland-space-stations/problem
"""
@doc """
Hackerank main function to execute solution
"""
def main do
[n, m] = IO.gets("") |> String.trim |> String.split(" ") |> Enum.map(&String.to_integer/1)
cities_with_sst = IO.gets("") |> String.trim |> String.split(" ") |> Enum.map(&String.to_integer/1)
IO.puts solution([n, m], cities_with_sst)
end
@doc """
Dev main function to solve the problem in my computer
We will receive:
n m
c1 c2, c3,..., cth
where n is the number of cities
m is the number of cities that contains a space station
cth are the cities that have space stations
It is important to consider that at the end we should get the max(d_c1, d_c2,...,d_cth)
where d_cth is the max distance between city
There are 3 possible cases for max distance:
From left outermost city to first station (just lowest value of stations array);
From right outermost city to last station (n - 1 - (max value of stations array));
From city somewhere in the middle between 2 stations;
All we have to do is to sort stations, find max distance between 2 stations,
divide it by 2 to closest lowest integer and compare this distance with left distance and right distance.
Max value will be result.
GOAL:
Determine the maximum distance from any city to it's nearest space station.
"""
def main_dev([total_cities, num_cities_with_sst], cities_with_sst) do
IO.puts solution([total_cities, num_cities_with_sst], cities_with_sst)
end
defp solution([total_cities, _num_cities_with_sst], cities_with_sst) do
get_closest_stations(total_cities, cities_with_sst)
end
defp get_closest_stations(total_cities, cities_with_sst) do
# assembling a list structure like this:
# {city, nearest station, if it has an station, all the cities with space stations}
0..total_cities-1
|> Enum.map(fn(city) ->
{city, 0, Enum.member?(cities_with_sst, city)}
end)
|> get_nearest_station(cities_with_sst)
|> Enum.max
end
defp get_nearest_station(cities, cities_with_sst) do
# we are iterating over all the cities and if they have space stations
# they will be marked as cero the nearest space station distance
# but if they does not have SS then we will iterate over all cities with SS
# and will get the nearest SS for the given city case
cities |>
Enum.map(fn({cty, _cls_sst, w_sst}) ->
cond do
w_sst == true ->
0
true ->
# here we should get the nearest station
# expanding cities_with_sst to {city, difference between c_W_sst and currect city analized}
Enum.map(cities_with_sst, fn(c) ->
abs(cty - c)
end)
|> Enum.min
end
end)
end
end
#Solution.main()
# correct: 2
Solution.main_dev(
[5, 2],
# 0 and 4 cities have a space station
[0, 4]
)
#correct: 0
Solution.main_dev(
[6, 6],
[0, 1, 2, 4, 3, 5]
)
|
hackerank_elixir/easy/flatland_space_stations.ex
| 0.794505 | 0.62042 |
flatland_space_stations.ex
|
starcoder
|
defmodule NavigationHistory.Tracker do
@moduledoc """
A plug to track user navigation history.
Visited paths will be stored in the session by the plug.
The paths can then be accessed with `NavigationHistory.last_path` and
`NavigationHistory.last_paths`.
The session must already be fetched with `Plug.Conn.fetch_session/1`.
## Options
* `excluded_paths` - The list of paths which should not be tracked.
For example, `/login` or similar for a lot of apps.
Defaults to `[]`
* `included_paths` - Limits list the paths to be tracked when set.
`excluded_paths` is ignored if set.
* `methods` - The list methods which should be tracked.
Defaults to `["GET"]`
* `history_size` - The number of history entries to track in `last_paths`.
Defaults to `10`.
* `key` - The key used to track the navigation.
It can also be passed to `last_path` and `last_paths` to retrieve the paths for the
relevant key.
Defaults to `"default"`.
## Examples
```elixir
plug NavigationHistory.Tracker, excluded_paths: ["/login", ~r(/admin.*)], history_size: 5
```
"""
@behaviour Plug
def init(opts) do
opts
|> Keyword.put_new(:excluded_paths, [])
|> Keyword.put_new(:methods, ~w(GET))
|> Keyword.put_new(:history_size, 10)
end
def call(conn, opts) do
path = path_and_query(conn)
method = conn.method
if register?(method, path, opts),
do: put_previous_path(conn, path, opts),
else: conn
end
defp path_and_query(conn) do
query_portion = if conn.query_string == "", do: "", else: "?#{conn.query_string}"
conn.request_path <> query_portion
end
defp register?(method, path, opts),
do: valid_method?(method, opts) and valid_path?(path, opts)
defp valid_method?(method, opts), do: method in opts[:methods]
defp valid_path?(path, opts) do
if opts[:included_paths],
do: path_matches_any?(path, opts[:included_paths]),
else: not path_matches_any?(path, opts[:excluded_paths])
end
defp path_matches_any?(path, matches),
do: Enum.any?(matches, &path_match?(path, &1))
defp path_match?(path, matched) when is_bitstring(matched),
do: path == matched
defp path_match?(path, matched),
do: String.match?(path, matched)
defp put_previous_path(conn, path, opts) do
last_paths = NavigationHistory.last_paths(conn, opts)
maybe_put_previous_path(conn, path, last_paths, opts)
end
defp maybe_put_previous_path(conn, path, [last_path | _] = last_paths, opts) do
if last_path != path do
paths = dequeue_path([path | last_paths], opts[:history_size])
NavigationHistory.Session.save_paths(conn, paths, opts)
else
NavigationHistory.Session.save_paths(conn, last_paths, opts)
end
end
defp maybe_put_previous_path(conn, path, last_paths, opts) do
paths = dequeue_path([path | last_paths], opts[:history_size])
NavigationHistory.Session.save_paths(conn, paths, opts)
end
defp dequeue_path(paths, history_size) when length(paths) > history_size,
do: List.delete_at(paths, length(paths) - 1)
defp dequeue_path(paths, _), do: paths
end
|
lib/navigation_history/tracker.ex
| 0.853211 | 0.859605 |
tracker.ex
|
starcoder
|
defmodule Asteroid.Token.DeviceCode do
import Asteroid.Utils
alias Asteroid.Context
alias Asteroid.Client
alias Asteroid.OAuth2
alias Asteroid.Token
@moduledoc """
Device code structure
Note that this token is searched using 2 keys:
- the device code (when polling)
- the user code, to mark this token as `"granted"` or `"denied"` upon completion of
the user web flow
Thus, even though the primary key is the device code id, the user code should probably be
indexed as well.
## Field naming
The `data` field holds the token data. The following field names are standard and are used
by Asteroid:
- `"exp"`: the expiration unix timestamp of the device code
- `"clid"`: the `t:Asteroid.Client.id()` of the device code
- `"sjid"`: the `t:Asteroid.Subject.id()` of the user that has accepted the request,
after entering the user code in the web flow
- `"requested_scopes"`: a list of `OAuth2Utils.Scope.scope()` requested scopes
- `"granted_scopes"`: a list of `OAuth2Utils.Scope.scope()` granted scopes
- `"status"`: a `String.t()` for the status of the device code. Mandatory, one of:
- `"authorization_pending"`: the user has not yet granted or denied the request (the default
value upon the token's creation)
- `"granted"`: the user has granted the request
- `"denied"`: the user has denied the request
"""
@enforce_keys [:id, :user_code, :serialization_format, :data]
defstruct [:id, :user_code, :serialization_format, :data]
@type t :: %__MODULE__{
id: OAuth2.DeviceAuthorization.device_code(),
user_code: binary() | nil,
serialization_format: Asteroid.Token.serialization_format(),
data: map()
}
@doc ~s"""
Creates a new device code
## Options
- `:id`: `String.t()` id, **mandatory**
- `:user_code`: the `t:Asteroid.OAuth2.DeviceAuthorization.user_code/0` associated to the
device code. **Mandatory**
- `:data`: a data `map()`
- `:serialization_format`: an `t:Asteroid.Token.serialization_format/0` atom, defaults to
`:opaque`
"""
@spec new(Keyword.t()) :: t()
def new(opts) do
%__MODULE__{
id: opts[:id] || raise("Missing device code"),
user_code: opts[:user_code] || raise("Missing user code"),
data: opts[:data] || %{},
serialization_format: opts[:serialization_format] || :opaque
}
end
@doc """
Generates a new device code
## Options
- `:user_code`: the user code to be presented to the user. **Mandatory**
- `:serialization_format`: an `t:Asteroid.Token.serialization_format/0` atom, defaults to
`:opaque`
"""
@spec gen_new(Keyword.t()) :: t()
def gen_new(opts \\ []) do
%__MODULE__{
id: secure_random_b64(),
user_code: opts[:user_code] || raise("Missing user code"),
data: %{},
serialization_format:
if(opts[:serialization_format], do: opts[:serialization_format], else: :opaque)
}
end
@doc """
Gets a device code from the device code store
## Options
- `:check_active`: determines whether the validity of the device code should be checked.
Defaults to `true`. For validity checking details, see `active?/1`
"""
@spec get(OAuth2.DeviceAuthorization.device_code(), Keyword.t()) ::
{:ok, t()}
| {:error, Exception.t()}
def get(device_code_id, opts \\ [check_active: true]) do
code_store_module = astrenv(:object_store_device_code)[:module]
code_store_opts = astrenv(:object_store_device_code)[:opts] || []
case code_store_module.get(device_code_id, code_store_opts) do
{:ok, device_code} when not is_nil(device_code) ->
if opts[:check_active] != true or active?(device_code) do
{:ok, device_code}
else
{:error,
Token.InvalidTokenError.exception(
sort: "device code",
reason: "expired code",
id: device_code_id
)}
end
{:ok, nil} ->
{:error,
Token.InvalidTokenError.exception(
sort: "device code",
reason: "not found in the token store",
id: device_code_id
)}
{:error, error} ->
{:error, error}
end
end
@doc """
Gets a device code from the device code store from its associated user code
## Options
- `:check_active`: determines whether the validity of the device code should be checked.
Defaults to `true`. For validity checking details, see `active?/1`
"""
@spec get_from_user_code(OAuth2.DeviceAuthorization.user_code(), Keyword.t()) ::
{:ok, t()}
| {:error, Exception.t()}
def get_from_user_code(user_code, opts \\ [check_active: true]) do
code_store_module = astrenv(:object_store_device_code)[:module]
code_store_opts = astrenv(:object_store_device_code)[:opts] || []
case code_store_module.get_from_user_code(user_code, code_store_opts) do
{:ok, device_code} when not is_nil(device_code) ->
if opts[:check_active] != true or active?(device_code) do
{:ok, device_code}
else
{:error,
Token.InvalidTokenError.exception(
sort: "device code",
reason: "inactive token",
id: device_code.id
)}
end
{:ok, nil} ->
{:error,
Token.InvalidTokenError.exception(
sort: "device code",
reason: "invalid user code",
id: user_code
)}
{:error, error} ->
{:error, error}
end
end
@doc """
Stores a device code
"""
@spec store(t(), Context.t()) :: {:ok, t()} | {:error, any()}
def store(device_code, ctx) do
code_store_module = astrenv(:object_store_device_code)[:module]
code_store_opts = astrenv(:object_store_device_code)[:opts] || []
device_code = astrenv(:object_store_device_code_before_store_callback).(device_code, ctx)
case code_store_module.put(device_code, code_store_opts) do
:ok ->
{:ok, device_code}
{:error, _} = error ->
error
end
end
@doc """
Deletes a device code
"""
@spec delete(t() | OAuth2.DeviceAuthorization.device_code()) :: :ok | {:error, any()}
def delete(%__MODULE__{id: id}) do
delete(id)
end
def delete(device_code_id) do
code_store_module = astrenv(:object_store_device_code)[:module]
code_store_opts = astrenv(:object_store_device_code)[:opts] || []
code_store_module.delete(device_code_id, code_store_opts)
end
@doc """
Puts a value into the `data` field of a device code
If the value is `nil`, the device code is not changed and the filed is not added.
"""
@spec put_value(t(), any(), any()) :: t()
def put_value(device_code, _key, nil), do: device_code
def put_value(device_code, key, val) do
%{device_code | data: Map.put(device_code.data, key, val)}
end
@doc """
Removes a value from the `data` field of a device code
If the value does not exist, does nothing.
"""
@spec delete_value(t(), any()) :: t()
def delete_value(device_code, key) do
%{device_code | data: Map.delete(device_code.data, key)}
end
@doc """
Serializes the device code, using its inner `t:Asteroid.Token.serialization_format/0`
information
Supports serialization to `:opaque`.
"""
@spec serialize(t()) :: String.t()
def serialize(%__MODULE__{id: id, serialization_format: :opaque}) do
id
end
@doc """
Returns `true` if the token is active, `false` otherwise
The following data, *when set*, are used to determine that a token is active:
- `"exp"`: must be higher than current time
"""
@spec active?(t()) :: boolean()
def active?(device_code) do
is_nil(device_code.data["exp"]) or device_code.data["exp"] > now()
end
@doc """
Returns the device code lifetime
## Processing rules
- If the client has the
`"__asteroid_oauth2_flow_device_authorization_device_code_lifetime"` set to an integer,
returns this value
- Otherwise, if the
#{Asteroid.Config.link_to_option(:oauth2_flow_device_authorization_device_code_lifetime)}
#configuration option is set, return this value
- Otherwise returns `0`
"""
@spec lifetime(Context.t()) :: non_neg_integer()
def lifetime(%{client: client}) do
attr = "__asteroid_oauth2_flow_device_authorization_device_code_lifetime"
client = Client.fetch_attributes(client, [attr])
case client.attrs[attr] do
lifetime when is_integer(lifetime) ->
lifetime
_ ->
astrenv(:oauth2_flow_device_authorization_device_code_lifetime, 0)
end
end
end
|
lib/asteroid/token/device_code.ex
| 0.910386 | 0.523055 |
device_code.ex
|
starcoder
|
defmodule CSQuery.OperatorOption do
@moduledoc """
A struct representing an option for one of the structured query syntax
operators.
During expression construction, options that are not recognized for a given
operator will either be discarded (if already a `CSQuery.OperatorOption` struct) or
be treated as a named field (if part of a keyword list). Options that do not
have a string value will be discarded.
"""
@typedoc "Valid option names."
@type names :: :boost | :distance | :field
@typedoc "Valid option values."
@type values :: nil | String.t()
@typedoc "The struct for `CSQuery.OperatorOption`."
@type t :: %__MODULE__{name: names, value: values}
@enforce_keys [:name, :value]
defstruct [:name, :value]
@names [:boost, :distance, :field]
@doc """
Provide a `CSQuery.OperatorOption` struct (or `nil`).
`new/1` is mostly used during expression list construction. See `new/2` for
more information.
"""
@spec new(t) :: t
def new(%__MODULE__{} = value), do: value
@spec new({names, any}) :: t | nil
def new({name, value}), do: new(name, value)
@doc """
Return a `CSQuery.OperatorOption` struct, or `nil` based on the `name` and `value`
provided.
`new/2` may return `nil` if:
* the `name` is not in `t:names/0`;
* the `value` is `nil`;
* or the `value` does not conform to the `String.Chars` protocol.
"""
def new(name, value)
@spec new(names, nil) :: nil
def new(_, nil), do: nil
@spec new(atom, any) :: nil
def new(name, _) when not (name in @names), do: nil
@spec new(names, any) :: t | nil
def new(name, value) do
%__MODULE__{name: name, value: to_string(value)}
rescue
Protocol.UndefinedError -> nil
end
@doc """
Return a string value representation of the `CSQuery.OperatorOption` struct.
The response format will be `"name=value"`. If the struct `value` is `nil` or
does not conform to the `String.Chars` protocol, the response will be `""`.
"""
def to_value(%__MODULE__{value: nil}), do: ""
def to_value(%__MODULE__{name: name, value: value}) do
"#{name}=#{value}"
rescue
Protocol.UndefinedError -> ""
end
end
|
lib/csquery/operator_option.ex
| 0.850965 | 0.609001 |
operator_option.ex
|
starcoder
|
defmodule Ecto.Model.Schema do
@moduledoc """
Defines a schema for a model. A schema is a struct with associated
meta data that is persisted to a repository.
Every schema model is also a struct, that means that you work with models just
like you would work with structs, to set the default values for the struct
fields the `default` option is set in the `field` options.
When used, it allows the following options:
* `:primary_key` - Sets the primary key, if this option is not set a primary
key named *id* of type *integer* will be generated. If
set to `false` no primary key will be generated, to set
a custom primary key give `{name, type, opts}` to the option.
## Reflection
Any schema module will generate the `__schema__` function that can be used for
runtime introspection of the schema.
* `__schema__(:source)` - Returns the source as given to `schema/2`;
* `__schema__(:field, field)` - Returns the options for the given field;
* `__schema__(:field_type, field)` - Returns the type of the given field;
* `__schema__(:field_names)` - Returns a list of all field names;
* `__schema__(:associations)` - Returns a list of all association field names;
* `__schema__(:association, field)` - Returns the given field's association
reflection;
* `__schema__(:primary_key)` - Returns the field that is the primary key or
`nil` if there is none;
* `__schema__(:allocate, values)` - Creates a new model struct from the given
field values;
* `__schema__(:keywords, model)` - Return a keyword list of all non-virtual
fields and their values;
## Example
defmodule User do
use Ecto.Model.Schema
schema "users" do
field :name, :string
field :age, :integer, default: 0
has_many :posts, Post
end
end
This module also automatically imports `from/2` from `Ecto.Query`
as a convenience.
## Schema defaults
When using the block syntax, the created model uses the usual default
of a primary key named `:id`, of type `:integer`. This can be customized
by passing `primary_key: false` to schema:
schema "weather", primary_key: false do
...
end
Or by passing a tuple in the format `{field, type, opts}`:
schema "weather", primary_key: {:custom_field, :string, []} do
...
end
Global defaults can be specified via the `@scehma_defaults` attribute.
This is useful if you want to use a different default primary key
through your entire application.
The supported options are:
* `primary_key` - either `false`, or a `{field, type, opts}` tuple
* `foreign_key_type` - sets the type for any belongs_to associations.
This can be overrided using the `:type` option
to the `belongs_to` statement. Defaults to
type `:integer`
## Example
defmodule MyApp.Model do
defmacro __using__(_) do
quote do
@schema_defaults primary_key: {:uuid, :string, []},
foreign_key_type: :string
use Ecto.Model
end
end
end
defmodule MyApp.Post do
use MyApp.Model
schema "posts" do
has_many :comments, MyApp.Comment
end
end
defmodule MyApp.Comment do
use MyApp.Model
schema "comments" do
belongs_to :post, MyApp.Comment
end
end
Any models using `MyApp.Model will get the `:uuid` field, with type `:string`
as the primary key.
The `belongs_to` association on `MyApp.Comment` will also now require
that `:post_id` be of `:string` type to reference the `:uuid` of a
`MyApp.Post` model.
"""
require Ecto.Query.Util, as: Util
@doc false
defmacro __using__(_) do
quote do
import Ecto.Query, only: [from: 2]
import Ecto.Model, only: [primary_key: 1, put_primary_key: 2]
import Ecto.Model.Schema, only: [schema: 2, schema: 3]
end
end
@doc """
Defines a schema with a source name and field definitions.
"""
defmacro schema(source, opts \\ [], do: block)
defmacro schema(source, opts, [do: block]) do
quote do
opts = (Module.get_attribute(__MODULE__, :schema_defaults) || [])
|> Keyword.merge(unquote(opts))
@ecto_fields []
@struct_fields []
@ecto_primary_key nil
@ecto_source unquote(source)
Module.register_attribute(__MODULE__, :ecto_assocs, accumulate: true)
@ecto_foreign_key_type opts[:foreign_key_type]
case opts[:primary_key] do
nil ->
Ecto.Model.Schema.field(:id, :integer, primary_key: true)
false ->
:ok
{name, type, opts} ->
Ecto.Model.Schema.field(name, type, Keyword.put(opts, :primary_key, true))
other ->
raise ArgumentError, message: ":primary_key must be false or {name, type, opts}"
end
import Ecto.Model.Schema, only: [field: 1, field: 2, field: 3, has_many: 2,
has_many: 3, has_one: 2, has_one: 3, belongs_to: 2, belongs_to: 3]
unquote(block)
import Ecto.Model.Schema, only: []
all_fields = @ecto_fields |> Enum.reverse
assocs = @ecto_assocs |> Enum.reverse
fields = Enum.filter(all_fields, fn {_, opts} -> opts[:type] != :virtual end)
def __schema__(:source), do: @ecto_source
Module.eval_quoted __MODULE__, [
Ecto.Model.Schema.ecto_struct(@struct_fields),
Ecto.Model.Schema.ecto_queryable(@ecto_source, __MODULE__),
Ecto.Model.Schema.ecto_fields(fields),
Ecto.Model.Schema.ecto_assocs(assocs, @ecto_primary_key, fields),
Ecto.Model.Schema.ecto_primary_key(@ecto_primary_key),
Ecto.Model.Schema.ecto_helpers(fields, all_fields, @ecto_primary_key) ]
end
end
## API
@doc """
Defines a field on the model schema with given name and type, will also create
a struct field. If the type is `:virtual` it wont be persisted.
## Options
* `:default` - Sets the default value on the schema and the struct;
* `:primary_key` - Sets the field to be the primary key, the default
primary key have to be overridden by setting its name to `nil`;
"""
defmacro field(name, type \\ :string, opts \\ []) do
quote do
Ecto.Model.Schema.__field__(__MODULE__, unquote(name), unquote(type), unquote(opts))
end
end
@doc ~S"""
Indicates a one-to-many association with another model, where the current
model has zero or more records of the other model. The other model often
has a `belongs_to` field with the reverse association.
Creates a virtual field called `name`. The association can be accessed via
this field, see `Ecto.Associations.HasMany` for more information. See the
examples to see how to perform queries on the association and
`Ecto.Query.join/3` for joins.
## Options
* `:foreign_key` - Sets the foreign key, this should map to a field on the
other model, defaults to: `:"#{model}_id"`;
* `:references` - Sets the key on the current model to be used for the
association, defaults to the primary key on the model;
## Examples
defmodule Post do
schema "posts" do
has_many :comments, Comment
end
end
# Get all comments for a given post
post = Repo.get(Post, 42)
comments = Repo.all(post.comments)
# The comments can come preloaded on the post struct
[post] = Repo.all(from(p in Post, where: p.id == 42, preload: :comments))
post.comments.all #=> [ %Comment{...}, ... ]
# Or via an association join
[post] = Repo.all(from(p in Post,
where: p.id == 42,
left_join: c in p.comments,
select: assoc(p, c)))
post.comments.all #=> [ %Comment{...}, ... ]
"""
defmacro has_many(name, queryable, opts \\ []) do
quote do
Ecto.Model.Schema.__has_many__(__MODULE__, unquote(name), unquote(queryable), unquote(opts))
end
end
@doc ~S"""
Indicates a one-to-one association with another model, where the current model
has zero or one records of the other model. The other model often has a
`belongs_to` field with the reverse association.
Creates a virtual field called `name`. The association can be accessed via
this field, see `Ecto.Associations.HasOne` for more information. Check the
examples to see how to perform queries on the association and
`Ecto.Query.join/3` for joins.
## Options
* `:foreign_key` - Sets the foreign key, this should map to a field on the
other model, defaults to: `:"#{model}_id"`;
* `:references` - Sets the key on the current model to be used for the
association, defaults to the primary key on the model;
## Examples
defmodule Post do
schema "posts" do
has_one :permalink, Permalink
end
end
# The permalink can come preloaded on the post record
[post] = Repo.all(from(p in Post, where: p.id == 42, preload: :permalink))
post.permalink.get #=> %Permalink{...}
# Or via an association join
[post] = Repo.all(from(p in Post,
where: p.id == 42,
left_join: pl in p.permalink,
select: assoc(p, pl)))
post.permalink.get #=> %Permalink{...}
"""
defmacro has_one(name, queryable, opts \\ []) do
quote do
Ecto.Model.Schema.__has_one__(__MODULE__, unquote(name), unquote(queryable), unquote(opts))
end
end
@doc ~S"""
Indicates a one-to-one association with another model, the current model
belongs to zero or one records of the other model. The other model
often has a `has_one` or a `has_many` field with the reverse association.
Compared to `has_one` this association should be used where you would place
the foreign key on an SQL table.
Creates a virtual field called `name`. The association can be accessed via
this field, see `Ecto.Associations.BelongsTo` for more information. Check the
examples to see how to perform queries on the association and
`Ecto.Query.join/3` for joins.
## Options
* `:foreign_key` - Sets the foreign key field name, defaults to:
`:"#{other_model}_id"`;
* `:references` - Sets the key on the other model to be used for the
association, defaults to: `:id`;
* `:type` - Sets the type of `:foreign_key`. Defaults to: `:integer`;
## Examples
defmodule Comment do
schema "comments" do
belongs_to :post, Post
end
end
# The post can come preloaded on the comment record
[comment] = Repo.all(from(c in Comment, where: c.id == 42, preload: :post))
comment.post.get #=> %Post{...}
# Or via an association join
[comment] = Repo.all(from(c in Comment,
where: c.id == 42,
left_join: p in c.post,
select: assoc(c, p)))
comment.post.get #=> %Post{...}
"""
defmacro belongs_to(name, queryable, opts \\ []) do
quote do
Ecto.Model.Schema.__belongs_to__(__MODULE__, unquote(name), unquote(queryable), unquote(opts))
end
end
## Callbacks
# TODO: Check that the opts are valid for the given type,
# especially check the default value
@doc false
def __field__(mod, name, type, opts) do
check_type!(type)
fields = Module.get_attribute(mod, :ecto_fields)
if opts[:primary_key] do
if pk = Module.get_attribute(mod, :ecto_primary_key) do
raise ArgumentError, message: "primary key already defined as `#{pk}`"
else
Module.put_attribute(mod, :ecto_primary_key, name)
end
end
clash = Enum.any?(fields, fn {prev, _} -> name == prev end)
if clash do
raise ArgumentError, message: "field `#{name}` was already set on schema"
end
struct_fields = Module.get_attribute(mod, :struct_fields)
Module.put_attribute(mod, :struct_fields, struct_fields ++ [{name, opts[:default]}])
opts = Enum.reduce([:default, :primary_key], opts, &Dict.delete(&2, &1))
Module.put_attribute(mod, :ecto_fields, [{name, [type: type] ++ opts}|fields])
end
@doc false
def __has_many__(mod, name, queryable, opts) do
assoc = Ecto.Associations.HasMany.Proxy.__assoc__(:new, name, mod)
__field__(mod, name, :virtual, default: assoc)
opts = [type: :has_many, queryable: queryable] ++ opts
Module.put_attribute(mod, :ecto_assocs, {name, opts})
end
@doc false
def __has_one__(mod, name, queryable, opts) do
assoc = Ecto.Associations.HasOne.Proxy.__assoc__(:new, name, mod)
__field__(mod, name, :virtual, default: assoc)
opts = [type: :has_one, queryable: queryable] ++ opts
Module.put_attribute(mod, :ecto_assocs, {name, opts})
end
@doc false
def __belongs_to__(mod, name, queryable, opts) do
opts = opts
|> Keyword.put_new(:references, :id)
|> Keyword.put_new(:foreign_key, :"#{name}_id")
foreign_key_type =
opts[:type] || Module.get_attribute(mod, :ecto_foreign_key_type) || :integer
__field__(mod, opts[:foreign_key], foreign_key_type, [])
assoc = Ecto.Associations.BelongsTo.Proxy.__assoc__(:new, name, mod)
__field__(mod, name, :virtual, default: assoc)
opts = [type: :belongs_to, queryable: queryable] ++ opts
Module.put_attribute(mod, :ecto_assocs, {name, opts})
end
## Helpers
@doc false
def ecto_struct(struct_fields) do
quote do
defstruct unquote(Macro.escape(struct_fields))
end
end
@doc false
def ecto_queryable(source, module) do
quote do
def __queryable__ do
%Ecto.Query{from: {unquote(source), unquote(module)}}
end
end
end
@doc false
def ecto_fields(fields) do
quoted = Enum.map(fields, fn {name, opts} ->
quote do
def __schema__(:field, unquote(name)), do: unquote(opts)
def __schema__(:field_type, unquote(name)), do: unquote(opts[:type])
end
end)
field_names = Enum.map(fields, &elem(&1, 0))
quoted ++ [ quote do
def __schema__(:field, _), do: nil
def __schema__(:field_type, _), do: nil
def __schema__(:field_names), do: unquote(field_names)
end ]
end
@doc false
def ecto_assocs(assocs, primary_key, fields) do
quoted = Enum.map(assocs, fn {name, opts} ->
quote bind_quoted: [name: name, opts: opts, primary_key: primary_key, fields: fields] do
pk = opts[:references] || primary_key
if nil?(pk) do
raise ArgumentError, message: "need to set `references` option for " <>
"association when model has no primary key"
end
if opts[:type] in [:has_many, :has_one] do
unless Enum.any?(fields, fn {name, _} -> pk == name end) do
raise ArgumentError, message: "`references` option on association " <>
"doesn't match any field on the model"
end
end
refl = Ecto.Associations.create_reflection(opts[:type], name,
__MODULE__, pk, opts[:queryable], opts[:foreign_key])
def __schema__(:association, unquote(name)) do
unquote(Macro.escape(refl))
end
end
end)
quote do
def __schema__(:associations), do: unquote(Keyword.keys(assocs))
unquote(quoted)
def __schema__(:association, _), do: nil
end
end
@doc false
def ecto_primary_key(primary_key) do
quote do
def __schema__(:primary_key), do: unquote(primary_key)
end
end
@doc false
def ecto_helpers(fields, all_fields, primary_key) do
field_names = Enum.map(fields, &elem(&1, 0))
all_field_names = Enum.map(all_fields, &elem(&1, 0))
quote do
# TODO: This can be optimized
def __schema__(:allocate, values) do
zip = Enum.zip(unquote(field_names), values)
pk = Dict.get(zip, unquote(primary_key))
model = struct(__MODULE__, zip)
if pk, do: model = Ecto.Model.put_primary_key(model, pk)
model
end
def __schema__(:keywords, model, opts \\ []) do
keep_pk = Keyword.get(opts, :primary_key, true)
primary_key = unquote(primary_key)
values = Map.take(model, unquote(all_field_names))
Enum.filter(values, fn {field, _} ->
__schema__(:field, field) && (keep_pk or field != primary_key)
end)
end
end
end
defp check_type!({outer, inner}) when outer in Util.poly_types and inner in Util.types, do: :ok
defp check_type!(type) when type in Util.types, do: :ok
defp check_type!(type) do
raise ArgumentError, message: "`#{Macro.to_string(type)}` is not a valid field type"
end
end
|
lib/ecto/model/schema.ex
| 0.897914 | 0.484014 |
schema.ex
|
starcoder
|
defmodule Spotify.Albums do
@moduledoc """
For manipulating albums.
[Spotify Docs](https://beta.developer.spotify.com/documentation/web-api/reference/albums/)
"""
alias Spotify.Albums.AlbumFull
alias Spotify.Artists.ArtistSimple
alias Spotify.Tracks.TrackSimple
alias Spotify.Pagings.Paging
alias Spotify.{Copyright, ExternalUrls, ExternalIds, Image, Timestamp}
@typedoc """
The type of the album: one of `album` , `single` , or `compilation`.
"""
@type album_type :: String.t
@typedoc """
The artists of the album. Each artist object includes a link in href to more detailed information about the artist.
"""
@type artists :: [ArtistSimple.t]
@typedoc """
The markets in which the album is available: ISO 3166-1 alpha-2 country codes. Note that an album is
considered available in a market when at least 1 of its tracks is available in that market.
"""
@type available_markets :: [String.t] | nil
@typedoc """
The copyright statements of the album.
"""
@type copyrights :: [Copyright.t]
@typedoc """
The copyright statements of the album.
"""
@type external_ids :: ExternalIds.t
@typedoc """
Known external URLs for this album.
"""
@type external_urls :: ExternalUrls.t
@typedoc """
A list of the genres used to classify the album. For example: `Prog Rock` , `Post-Grunge`.
(If not yet classified, the array is empty.)
"""
@type genres :: [String.t]
@typedoc """
A link to the Web API endpoint providing full details of the album.
"""
@type href :: String.t
@typedoc """
The Spotify ID for the album.
"""
@type id :: String.t
@typedoc """
The cover art for the album in various sizes, widest first.
"""
@type images :: [Image.t]
@typedoc """
The label for the album.
"""
@type label :: String.t
@typedoc """
The name of the album. In case of an album takedown, the value may be an empty string.
"""
@type name :: String.t
@typedoc """
The popularity of the album. The value will be between 0 and 100, with 100 being the most popular.
The popularity is calculated from the popularity of the album’s individual tracks.
"""
@type popularity :: integer
@typedoc """
The date the album was first released, for example `1981-12-15`.
Depending on the precision, it might be shown as `1981` or `1981-12`.
"""
@type release_date :: String.t
@typedoc """
The precision with which release_date value is known: `year` , `month` , or `day`.
"""
@type release_date_precision :: String.t
@typedoc """
The tracks of the album.
"""
@type tracks :: Paging.t(TrackSimple.t)
@typedoc """
The object type: `album`
"""
@type type :: String.t
@typedoc """
The Spotify URI for the album.
"""
@type uri :: String.t
@typedoc """
The date and time the album was saved.
"""
@type added_at :: Timestamp.t
@typedoc """
Information about the album.
"""
@type album :: AlbumFull.t
end
|
lib/spotify/models/albums/albums.ex
| 0.857604 | 0.704961 |
albums.ex
|
starcoder
|
defmodule Absinthe.Blueprint.Execution do
@moduledoc """
Blueprint Execution Data
The `%Absinthe.Blueprint.Execution{}` struct holds on to the core values that
drive a document's execution.
Here's how the execution flow works. Given a document like:
```
{
posts {
title
author { name }
}
}
```
After all the validation happens, and we're actually going to execute this document,
an `%Execution{}` struct is created. This struct is passed to each plugin's
`before_resolution` callback, so that plugins can set initial values in the accumulator
or context.
Then the resolution phase walks the document until it hits the `posts` field.
To resolve the posts field, an `%Absinthe.Resolution{}` struct is created from
the `%Execution{}` struct. This resolution struct powers the normal middleware
resolution process. When a field has resolved, the `:acc`, `:context`, and `:field_cache`
values within the resolution struct are pulled out and used to update the execution.
"""
alias Absinthe.Phase
@type acc :: map
defstruct [
:adapter,
:schema,
fragments: %{},
fields_cache: %{},
validation_errors: [],
result: nil,
acc: %{},
context: %{},
root_value: %{}
]
@type t :: %__MODULE__{
validation_errors: [Phase.Error.t()],
result: nil | Result.Object.t(),
acc: acc
}
@type node_t ::
Result.Object
| Result.List
| Result.Leaf
def get(%{execution: %{result: nil} = exec} = bp_root, operation) do
result = %Absinthe.Blueprint.Result.Object{
root_value: exec.root_value,
emitter: operation
}
%{
exec
| result: result,
adapter: bp_root.adapter,
schema: bp_root.schema,
fragments: Map.new(bp_root.fragments, &{&1.name, &1})
}
end
def get(%{execution: exec}, _) do
exec
end
def get_result(%__MODULE__{result: nil, root_value: root_value}, operation) do
%Absinthe.Blueprint.Result.Object{
root_value: root_value,
emitter: operation
}
end
def get_result(%{result: result}, _, _) do
result
end
def update(resolution, result, context, acc) do
%{resolution | context: context, result: result, acc: acc}
end
end
|
lib/absinthe/blueprint/execution.ex
| 0.859796 | 0.835282 |
execution.ex
|
starcoder
|
defmodule Benchee.Conversion.Memory do
@moduledoc """
Unit scaling for memory converting from bytes to kilobytes and others.
Only benchee plugins should use this code.
"""
alias Benchee.Conversion.{Format, Scale, Unit}
@behaviour Scale
@behaviour Format
@bytes_per_kilobyte 1024
@bytes_per_megabyte @bytes_per_kilobyte * @bytes_per_kilobyte
@bytes_per_gigabyte @bytes_per_megabyte * @bytes_per_kilobyte
@bytes_per_terabyte @bytes_per_gigabyte * @bytes_per_kilobyte
@units %{
terabyte: %Unit{
name: :terabyte,
magnitude: @bytes_per_terabyte,
label: "TB",
long: "Terabytes"
},
gigabyte: %Unit{
name: :gigabyte,
magnitude: @bytes_per_gigabyte,
label: "GB",
long: "Gigabytes"
},
megabyte: %Unit{
name: :megabyte,
magnitude: @bytes_per_megabyte,
label: "MB",
long: "Megabytes"
},
kilobyte: %Unit{
name: :kilobyte,
magnitude: @bytes_per_kilobyte,
label: "KB",
long: "Kilobytes"
},
byte: %Unit{
name: :byte,
magnitude: 1,
label: "B",
long: "Bytes"
}
}
@type unit_atom :: :byte | :kilobyte | :megabyte | :gigabyte | :terabyte
@type any_unit :: unit_atom | Unit.t()
@doc """
Converts a value for a specified %Unit or unit atom and converts it to the equivalent of another unit of measure.
## Examples
iex> {value, unit} = Benchee.Conversion.Memory.convert({1024, :kilobyte}, :megabyte)
iex> value
1.0
iex> unit.name
:megabyte
iex> current_unit = Benchee.Conversion.Memory.unit_for :kilobyte
iex> {value, unit} = Benchee.Conversion.Memory.convert({1024, current_unit}, :megabyte)
iex> value
1.0
iex> unit.name
:megabyte
"""
@spec convert({number, any_unit}, any_unit) :: Scale.scaled_number()
def convert(number_and_unit, desired_unit) do
Scale.convert(number_and_unit, desired_unit, __MODULE__)
end
# Scaling functions
@doc """
Scales a memory value in bytes into a larger unit if appropriate
## Examples
iex> {value, unit} = Benchee.Conversion.Memory.scale(1)
iex> value
1.0
iex> unit.name
:byte
iex> {value, unit} = Benchee.Conversion.Memory.scale(1_234)
iex> value
1.205078125
iex> unit.name
:kilobyte
iex> {value, unit} = Benchee.Conversion.Memory.scale(11_234_567_890.123)
iex> value
10.463006692121736
iex> unit.name
:gigabyte
iex> {value, unit} = Benchee.Conversion.Memory.scale(1_111_234_567_890.123)
iex> value
1.0106619519229962
iex> unit.name
:terabyte
"""
def scale(memory) when memory >= @bytes_per_terabyte do
scale_with_unit(memory, :terabyte)
end
def scale(memory) when memory >= @bytes_per_gigabyte do
scale_with_unit(memory, :gigabyte)
end
def scale(memory) when memory >= @bytes_per_megabyte do
scale_with_unit(memory, :megabyte)
end
def scale(memory) when memory >= @bytes_per_kilobyte do
scale_with_unit(memory, :kilobyte)
end
def scale(memory) do
scale_with_unit(memory, :byte)
end
# Helper function for returning a tuple of {value, unit}
defp scale_with_unit(nil, _) do
{nil, nil}
end
defp scale_with_unit(memory, unit) do
{scale(memory, unit), unit_for(unit)}
end
@doc """
Get a unit by its atom representation. If handed already a %Unit{} struct it
just returns it.
## Examples
iex> Benchee.Conversion.Memory.unit_for :gigabyte
%Benchee.Conversion.Unit{
name: :gigabyte,
magnitude: 1_073_741_824,
label: "GB",
long: "Gigabytes"
}
iex> Benchee.Conversion.Memory.unit_for(%Benchee.Conversion.Unit{
...> name: :gigabyte,
...> magnitude: 1_073_741_824,
...> label: "GB",
...> long: "Gigabytes"
...>})
%Benchee.Conversion.Unit{
name: :gigabyte,
magnitude: 1_073_741_824,
label: "GB",
long: "Gigabytes"
}
"""
def unit_for(unit) do
Scale.unit_for(@units, unit)
end
@doc """
Scales a memory value in bytes into a value in the specified unit
## Examples
iex> Benchee.Conversion.Memory.scale(12345, :kilobyte)
12.0556640625
iex> Benchee.Conversion.Memory.scale(12345, :megabyte)
0.011773109436035156
iex> Benchee.Conversion.Memory.scale(123_456_789, :gigabyte)
0.11497809458523989
"""
def scale(count, unit) do
Scale.scale(count, unit, __MODULE__)
end
@doc """
Finds the best unit for a list of memory units. By default, chooses the most common
unit. In case of tie, chooses the largest of the most common units.
Pass `[strategy: :smallest]` to always return the smallest unit in the list.
Pass `[strategy: :largest]` to always return the largest unit in the list.
Pass `[strategy: :best]` to always return the most frequent unit in the list.
Pass `[strategy: :none]` to always return :byte.
## Examples
iex> Benchee.Conversion.Memory.best([23, 23_000, 34_000, 2_340_000]).name
:kilobyte
iex> Benchee.Conversion.Memory.best([23, 23_000, 34_000, 2_340_000, 3_450_000]).name
:megabyte
iex> Benchee.Conversion.Memory.best([23, 23_000, 34_000, 2_340_000], strategy: :smallest).name
:byte
iex> Benchee.Conversion.Memory.best([23, 23_000, 34_000, 2_340_000], strategy: :largest).name
:megabyte
"""
def best(list, opts \\ [strategy: :best])
def best(list, opts) do
Scale.best_unit(list, __MODULE__, opts)
end
@doc """
The most basic unit in which memory occur, byte.
## Examples
iex> Benchee.Conversion.Memory.base_unit.name
:byte
"""
def base_unit, do: unit_for(:byte)
@doc """
Formats a number as a string, with a unit label. To specify the unit, pass
a tuple of `{value, unit_atom}` like `{1_234, :kilobyte}`
## Examples
iex> Benchee.Conversion.Memory.format(45_678.9)
"44.61 KB"
iex> Benchee.Conversion.Memory.format(45.6789)
"45.68 B"
iex> Benchee.Conversion.Memory.format({45.6789, :kilobyte})
"45.68 KB"
iex> Benchee.Conversion.Memory.format {45.6789,
...> %Benchee.Conversion.Unit{
...> long: "Kilobytes", magnitude: 1024, label: "KB"}
...> }
"45.68 KB"
"""
def format(memory) do
Format.format(memory, __MODULE__)
end
end
|
lib/benchee/conversion/memory.ex
| 0.88054 | 0.537527 |
memory.ex
|
starcoder
|
defmodule BlueBird.Controller do
@moduledoc """
Defines macros used to add documentation to api routes.
## Usage
Use `api/3` in your controllers. Optionally add the `apigroup/1` or
`apigroup/2` macro to your controllers.
defmodule MyApp.Web.UserController do
use BlueBird.Controller
alias MyApp.Accounts
apigroup "Customers", "These are the routes that we'll talk about."
api :GET, "users" do
title "List users"
description "Lists all active users"
end
def index(conn, _params) do
users = Accounts.list_users()
render(conn, "index.html", users: users)
end
end
Instead of adding `use BlueBird.Controller` to every controller module, you
can also add it to the `web.ex` controller function to make it available
in every controller.
def controller do
quote do
...
use BlueBird.Controller
...
end
end
"""
alias BlueBird.{Parameter, Route}
defmacro __using__(_) do
quote do
import BlueBird.Controller, only: [api: 3, apigroup: 1, apigroup: 2]
end
end
@doc """
Describes a route.
```
api <method> <url> do ... end
```
- `method`: HTTP method (GET, POST, PUT etc.)
- `url`: Route as defined in the Phoenix router
- `title` (optional): Title for the action
- `description` (optional): Description of the route
- `note` (optional): Note
- `warning` (optional): Warning
- `parameter` (optional): Used for path and query parameters. It takes the
name as defined in the route and the type. The third parameter is an
optional keyword list with additional options. See `BlueBird.Parameter`
for descriptions of the available options.
## Example
api :GET, "user/:id/posts/:slug" do
title "Show post"
description "Show post by user ID and post slug"
note "You should really know this."
warning "Please don't ever do this."
parameter :id, :integer
parameter :slug, :string, [
description: "This is the post slug.",
example: "whatever"
]
end
"""
defmacro api(method, path, do: block) do
method_str = method_to_string(method)
metadata = extract_metadata(block)
title = extract_option(metadata, :title)
description = extract_option(metadata, :description)
note = extract_option(metadata, :note)
warning = extract_option(metadata, :warning)
parameters = extract_parameters(metadata)
quote do
def api_doc(unquote(method_str), unquote(path)) do
%Route{
title: unquote(title),
description: unquote(description),
note: unquote(note),
method: unquote(method_str),
warning: unquote(warning),
path: unquote(path),
parameters: unquote(Macro.escape(parameters))
}
end
end
end
@doc """
Defines the name and an optional description for a resource group.
BlueBird defines groups by the controller. By default, the group name
is taken from the controller name. If you want to specify a different name,
you can use this macro. You can also add a group description as a second
parameter.
## Example
apigroup "resource group name"
or
apigroup "resource group name", "description"
"""
defmacro apigroup(name, description \\ "") do
name = to_string(name)
description = to_string(description)
quote do
def api_group do
%{
name: unquote(name),
description: unquote(description)
}
end
end
end
@spec method_to_string(String.t() | atom) :: String.t()
defp method_to_string(method) when is_binary(method) or is_atom(method) do
method
|> to_string
|> String.upcase()
end
@spec extract_metadata(
{:__block__, any, {String.t(), any, [any]}}
| {String.t(), any, [any]}
| nil
) :: [{atom, any}]
defp extract_metadata({:__block__, _, data}) do
Enum.map(data, fn {name, _line, params} ->
{name, params}
end)
end
defp extract_metadata({key, _, data}), do: [{key, data}]
defp extract_metadata(nil), do: []
@spec extract_option([{atom, any}], atom) :: nil | any
defp extract_option(metadata, key) do
values = metadata |> Keyword.get(key)
cond do
is_nil(values) ->
nil
length(values) == 1 ->
List.first(values)
true ->
raise ArgumentError,
"Expected single value for #{key}, got #{length(values)}"
end
end
@spec extract_parameters([{atom, any}]) :: [Parameter.t()]
defp extract_parameters(metadata) do
metadata
|> Keyword.get_values(:parameter)
|> Enum.reduce([], fn param, list -> [param_to_map(param) | list] end)
|> Enum.reverse()
end
@spec param_to_map([any]) :: Parameter.t()
defp param_to_map([name, type, options]) when is_list(options) do
Map.merge(
%Parameter{
name: to_string(name),
type: to_string(type)
},
Enum.into(options, %{})
)
end
defp param_to_map([name, type]) do
%Parameter{
name: to_string(name),
type: to_string(type)
}
end
defp param_to_map([_, _, _]) do
raise ArgumentError,
"The parameter macro expects a keyword list as " <> "third argument."
end
defp param_to_map(_) do
raise ArgumentError, """
Wrong number of arguments for parameter option.
Expected either two or three arguments: The name, the type
and an optional keyword list. Correct usage:
parameter :name, :type
or
parameter :name, :type, [description: "description",
optional: true]
"""
end
end
|
lib/blue_bird/controller.ex
| 0.856107 | 0.623176 |
controller.ex
|
starcoder
|
defmodule Engine.Ethereum.RootChain.AbiEventSelector do
@moduledoc """
We define Solidity Event selectors that help us decode returned values from function calls.
Function names are to be used as inputs to Event Fetcher.
Function names describe the type of the event Event Fetcher will retrieve.
"""
@spec exit_started() :: ABI.FunctionSelector.t()
def exit_started() do
%ABI.FunctionSelector{
function: "ExitStarted",
input_names: ["owner", "exit_id", "utxo_pos", "output_tx"],
inputs_indexed: [true, false, false, false],
method_id: <<190, 31, 206, 232>>,
returns: [],
type: :event,
types: [:address, {:uint, 168}, {:uint, 256}, :bytes]
}
end
@spec in_flight_exit_started() :: ABI.FunctionSelector.t()
def in_flight_exit_started() do
%ABI.FunctionSelector{
function: "InFlightExitStarted",
input_names: ["initiator", "tx_hash", "in_flight_tx", "input_utxos_pos", "in_flight_tx_witnesses", "input_txs"],
inputs_indexed: [true, true, false, false, false, false],
method_id: <<150, 80, 84, 111>>,
returns: [],
type: :event,
types: [
:address,
{:bytes, 32},
:bytes,
{:array, {:uint, 256}},
{:array, :bytes},
{:array, :bytes}
]
}
end
@spec deposit_created() :: ABI.FunctionSelector.t()
def deposit_created() do
%ABI.FunctionSelector{
function: "DepositCreated",
input_names: ["depositor", "blknum", "token", "amount"],
inputs_indexed: [true, true, true, false],
method_id: <<24, 86, 145, 34>>,
returns: [],
type: :event,
types: [:address, {:uint, 256}, :address, {:uint, 256}]
}
end
@spec in_flight_exit_input_piggybacked() :: ABI.FunctionSelector.t()
def in_flight_exit_input_piggybacked() do
%ABI.FunctionSelector{
function: "InFlightExitInputPiggybacked",
input_names: ["exit_target", "tx_hash", "input_index"],
inputs_indexed: [true, true, false],
method_id: <<169, 60, 14, 155>>,
returns: [],
type: :event,
types: [:address, {:bytes, 32}, {:uint, 16}]
}
end
@spec in_flight_exit_output_piggybacked() :: ABI.FunctionSelector.t()
def in_flight_exit_output_piggybacked() do
%ABI.FunctionSelector{
function: "InFlightExitOutputPiggybacked",
input_names: ["exit_target", "tx_hash", "output_index"],
inputs_indexed: [true, true, false],
method_id: <<110, 205, 142, 121>>,
returns: [],
type: :event,
types: [:address, {:bytes, 32}, {:uint, 16}]
}
end
end
|
apps/engine/lib/engine/ethereum/root_chain/abi_event_selectors.ex
| 0.710226 | 0.411909 |
abi_event_selectors.ex
|
starcoder
|
defmodule DBux.Protocol do
require Logger
@type endianness :: :little_endian | :big_endian
@debug !is_nil(System.get_env("DBUX_DEBUG"))
@doc """
Unmarshalls given bitstring while interpreting data using given endianness
and signature (where signature comes in D-Bus format).
It returns `{:ok, {list_of_values, rest}}` on success.
If `unwrap_values` is `false`, `list_of_values` will contain a nested list of
`DBux.Value` structs, otherwise it will contain plain values.
For example, signature "i(ss)" will map to the following result:
`[%DBux.Value{type: :int32, value: 123},
%DBux.Value{type: :struct, value: [
%DBux.Value{type: :string, value: "sample1"},
%DBux.Value{type: :string, value: "sample2"}]]`
if unwrap_values is set to `false`, but it will map to:
`[123, {"sample1", "sample2"}]` if it is set to `true`.
`rest` will contain remaining part of the bitstring.
It returns `{:error, reason}` in case of failure.
Specifically, it returns `{:error, :bitstring_too_short}` if given bitstring
was not long enough.
"""
@spec unmarshall_bitstring(Bitstring, endianness, String.t, boolean) :: {:ok, DBux.Value.list_of_values, Bitstring} | {:error, any}
def unmarshall_bitstring(bitstring, endianness, signature, unwrap_values) when is_binary(bitstring) and is_atom(endianness) and is_binary(signature) do
case DBux.Type.type_from_signature(signature) do
{:ok, signature_as_list} ->
unmarshall_bitstring_step(bitstring, endianness, signature_as_list, unwrap_values, [], 0)
{:error, reason} ->
{:error, reason}
end
end
@doc """
Unmarshalls given bitstring while interpreting data using given endianness
and signature (where signature comes in as nested list of types, such as
one generated by `DBux.Type.type_from_signature/1`).
It handles padding between values if they require some alignment.
It returns `{:ok, {list_of_values, rest}}` on success.
If `unwrap_values` is `false`, `list_of_values` will contain a nested list of
`DBux.Value` structs, otherwise it will contain plain values.
For example, signature "i(ss)" will map to the following result:
`[%DBux.Value{type: :int32, value: 123},
%DBux.Value{type: :struct, value: [
%DBux.Value{type: :string, value: "sample1"},
%DBux.Value{type: :string, value: "sample2"}]]`
if unwrap_values is set to `false`, but it will map to:
`[123, {"sample1", "sample2"}]` if it is set to `true`.
`rest` will contain remaining part of the bitstring.
It returns `{:error, reason}` in case of failure.
Specifically, it returns `{:error, :bitstring_too_short}` if given bitstring
was not long enough.
"""
@spec unmarshall_bitstring(Bitstring, endianness, DBux.Type.list_of_types, boolean) :: {:ok, DBux.Value.list_of_values, Bitstring} | {:error, any}
def unmarshall_bitstring(bitstring, endianness, signature, unwrap_values) when is_binary(bitstring) and is_atom(endianness) and is_list(signature) do
unmarshall_bitstring_step(bitstring, endianness, signature, unwrap_values, [], 0)
end
@doc """
Marshalls given list of values using given endianness.
It returns `{:ok, bitstring}`.
It applies padding between values if they require some alignment.
"""
@spec marshall_bitstring(DBux.Value.list_of_values, endianness) :: {:ok, {Bitstring, String.t}}
def marshall_bitstring(values, endianness) when is_list(values) and is_atom(endianness) do
marshall_bitstring_step(values, endianness, << >>)
end
defp marshall_bitstring_step([], _endianness, bitstring_acc) do
{:ok, bitstring_acc}
end
defp marshall_bitstring_step([head|tail], endianness, bitstring_acc) do
marshall_bitstring_step(tail, endianness, bitstring_acc |> DBux.Value.marshall(head, endianness))
end
defp unmarshall_bitstring_step(bitstring, _endianness, [], _unwrap_values, values_acc, _position_acc) do
{:ok, {values_acc, bitstring}}
end
defp unmarshall_bitstring_step(<< >>, _endianness, [_signature_head|_signature_rest], _unwrap_values, _values_acc, _position_acc) do
{:error, :bitstring_too_short}
end
defp unmarshall_bitstring_step(bitstring, endianness, [signature_head|signature_rest], unwrap_values, values_acc, position_acc) do
if @debug, do: Logger.debug("unmarshall_bitstring_step: bitstring = #{inspect(bitstring)}, signature_head = #{inspect(signature_head)}, values_acc = #{inspect(values_acc)}, position_acc = #{inspect(position_acc)}")
padding_size = DBux.Type.compute_padding_size(position_acc, signature_head)
<< padding :: binary-size(padding_size), rest_after_padding :: binary >> = bitstring
if @debug, do: Logger.debug("unmarshall_bitstring_step: padding = #{inspect(padding)}, rest_after_padding = #{inspect(rest_after_padding)}")
case DBux.Value.unmarshall(rest_after_padding, endianness, signature_head, unwrap_values, 0) do
{:ok, {value, rest_after_parse}} ->
unmarshall_bitstring_step(rest_after_parse, endianness, signature_rest, unwrap_values, values_acc ++ [value], position_acc + (byte_size(bitstring) - byte_size(rest_after_parse)))
{:error, reason} ->
{:error, reason}
end
end
end
|
lib/protocol.ex
| 0.743354 | 0.680567 |
protocol.ex
|
starcoder
|
defmodule Result do
@moduledoc """
Documentation for Result.
"""
@type t(error, value) :: Result.Error.t(error) | Result.Ok.t(value)
@doc """
See `Result.Ok.of/1`
"""
defdelegate ok(value), to: Result.Ok, as: :of
@doc """
See `Result.Error.of/1`
"""
defdelegate error(value), to: Result.Error, as: :of
@doc """
See `Result.Operators.from/2`
"""
defdelegate from(arg1, arg2), to: Result.Operators
# Operators
@doc """
See `Result.Operators.fold/1`
"""
defdelegate fold(result), to: Result.Operators
@doc """
See `Result.Operators.map/2`
"""
defdelegate map(result, f), to: Result.Operators
@doc """
See `Result.Operators.map2/3`
"""
defdelegate map2(result1, result2, f), to: Result.Operators
@doc """
See `Result.Operators.map_error/2`
"""
defdelegate map_error(result, f), to: Result.Operators
@doc """
See `Result.Operators.catch_error/3`
"""
defdelegate catch_error(result, expected_error, f), to: Result.Operators
@doc """
See `Result.Operators.catch_all_errors/2`
"""
defdelegate catch_all_errors(result, f), to: Result.Operators
@doc """
See `Result.Operators.perform/2`
"""
defdelegate perform(result, f), to: Result.Operators
@doc """
See `Result.Operators.and_then/2`
"""
defdelegate and_then(result, f), to: Result.Operators
@doc """
See `Result.Operators.and_then_x/2`
"""
defdelegate and_then_x(results, f), to: Result.Operators
@doc """
See `Result.Operators.with_default/2`
"""
defdelegate with_default(result, default), to: Result.Operators
@doc """
See `Result.Operators.resolve/1`
"""
defdelegate resolve(result), to: Result.Operators
@doc """
See `Result.Operators.retry/4`
"""
defdelegate retry(result, f, count, timeout \\ 1000), to: Result.Operators
@doc """
See `Result.Operators.error?/1`
"""
defdelegate error?(result), to: Result.Operators
@doc """
See `Result.Operators.ok?/1`
"""
defdelegate ok?(result), to: Result.Operators
# Calculations
@doc """
See `Result.Calc.r_and/2`
"""
defdelegate r_and(r1, r2), to: Result.Calc
@doc """
See `Result.Calc.r_or/2`
"""
defdelegate r_or(r1, r2), to: Result.Calc
@doc """
See `Result.Calc.product/1`
"""
defdelegate product(list), to: Result.Calc
@doc """
See `Result.Calc.sum/1`
"""
defdelegate sum(list), to: Result.Calc
def wrap(m = {:ok, _}), do: m
def wrap(m = {:error, _}), do: m
def wrap(nil), do: error()
def wrap(:error), do: error()
def wrap(v), do: ok(v)
def unwrap({t, v}) when t in [:ok, :error], do: unwrap(v)
def unwrap(x), do: x
def error(), do: {:error, nil}
def is_ok({:ok, _}), do: true
def is_ok(_), do: false
def is_error({:error, _}), do: true
def is_error(_), do: false
end
|
lib/result.ex
| 0.827584 | 0.585931 |
result.ex
|
starcoder
|
defmodule Inspecto do
@moduledoc """
`Inspecto` is a utility for inspecting Ecto schemas to view their field names,
data types, and default values.
Ecto schema modules do not contain full information about your database
schemas: they only contain enough information to act as a viable intermediary
for the Elixir layer. You cannot, for example, know character length limits or
input constraints by merely inspecting Ecto schemas. Although Ecto _migrations_
contain a lot more of this information, they too aren't great for the purpose
because migrations are additive with changes spread out over time, and importantly,
there's not requirement that a database be defined via migrations.
## Usage
The expected usage of this package is to call `Inspecto.summarize/2` from within
a `@moduledoc` tag somewhere inside your app, whereever you wish a summary of
your Ecto schemas to appear, e.g. something like this:
```
defmodule MyApp.MyModel do
@moduledoc \"\"\"
Here is a summary of my Ecto schemas:
\#\{ MyApp.MyModel |> Inspecto.modules() |> Inspecto.summarize(format: :html)\}
\"\"\"
end
```
This will render a series of HTML tables at compile-time.
> #### Warning {: .warning}
>
> When you call a function from inside a `@moduledoc` tag, it is evaluated
> at *compile-time*. `Inspecto` _should_ filter out problems and avoid raising
> errors, but if it generates a problem for any reason, be aware that you may
> need to remove any calls to its functions from your `@moduledoc` tags.
"""
alias Inspecto.Schema
require EEx
@local_path Application.app_dir(:inspecto, ["priv/resources"])
@doc """
Summarizes the given Ecto schema modules using the format provided.
It is necessary to supply a list of modules.
## Options
- `:format` controls the format of the output. Supported values: `:html`, `:raw` (default).
The `:raw` format returns information as a list of `Inspecto.Schema` structs.
The `:html` format returns information as an HTML table. This is suitable for
use inside a `@moduledoc` attribute.
Other formats may be supported in the future (e.g. Mermaid JS, but it's not yet
compatible with how documentation generation strips out newlines).
## Examples
iex> MyApp.Schemas |> Inspecto.modules() |> Inspecto.summarize(format: :raw)
[
%Inspecto.Schema{...},
...
]
"""
@spec summarize(modules :: [module()], opts :: Keyword.t()) :: String.t() | [Schema.t()]
def summarize(modules, opts \\ []) when is_list(modules) do
format = Keyword.get(opts, :format, :raw)
modules
|> Enum.reduce([], fn m, acc ->
case Schema.inspect(m) do
{:ok, schema} -> [schema | acc]
{:invalid_module, _} -> acc
end
end)
|> Enum.reverse()
|> do_format(format)
end
defp do_format(schemas, :raw), do: schemas
defp do_format(schemas, :html), do: table_wrapper(schemas)
EEx.function_from_file(
:defp,
:table_wrapper,
"#{@local_path}/table_wrapper.eex",
[:schemas]
)
EEx.function_from_file(:defp, :table, "#{@local_path}/table.eex", [:schema])
defp stringify(module), do: String.trim_leading("#{module}", "Elixir.")
@doc """
This is a convenience function which retrieves a list of modules in the given
"namespace". This is a useful way of gathering up modules that define Ecto
schemas.
## Examples
iex> Inspecto.modules(MyApp)
[MyApp.Foo, MyApp.Bar, ...]
"""
def modules(namespace) when is_atom(namespace) do
:code.all_available()
|> Enum.filter(fn {name, _file, _loaded} ->
String.starts_with?(to_string(name), to_string(namespace))
end)
|> Enum.map(fn {name, _file, _loaded} ->
name |> List.to_atom()
end)
end
end
|
lib/inspecto.ex
| 0.853088 | 0.841891 |
inspecto.ex
|
starcoder
|
defmodule Versioning.Adapter.Semantic do
@moduledoc """
A versioning adapter for semantic-based versions.
Under the hood, this adapter uses the `Version` module. For details on the rules
that are used for parsing and comparison, please see the `Version` module.
## Example
defmodule MyApp.Versioning do
use Versioning.Schema, adapter: Versioning.Adapter.Semantic
version "1.0.0" do
type "Post" do
change(MyApp.Change)
end
end
end
"""
@behaviour Versioning.Adapter
@doc """
Parses semantic based versions.
## Example
iex> Versioning.Adapter.Semantic.parse("1.0.0")
{:ok, #Version<1.0.0>}
iex> Versioning.Adapter.Semantic.parse("foo")
:error
"""
@impl Versioning.Adapter
@spec parse(binary() | Version.t()) :: :error | {:ok, Version.t()}
def parse(version) when is_binary(version) do
Version.parse(version)
end
def parse(%Version{} = version) do
{:ok, version}
end
def parse(_) do
:error
end
@doc """
Compares semantic based versions.
Returns `:gt` if the first verison is greater than the second, and `:lt` for
vice-versa. If the two versions are equal, `:eq` is returned. Returns `:error`
if the version cannot be parsed.
## Example
iex> Versioning.Adapter.Semantic.compare("1.0.1", "1.0.0")
:gt
iex> Versioning.Adapter.Semantic.compare("1.0.0", "1.0.1)
:lt
iex> Versioning.Adapter.Semantic.compare("1.0.1", "1.0.1")
:eq
iex> Versioning.Adapter.Semantic.compare("foo", "bar")
:error
"""
@impl Versioning.Adapter
@spec compare(binary() | Version.t(), binary() | Version.t()) :: :eq | :error | :gt | :lt
def compare(version1, version2) when is_binary(version1) and is_binary(version2) do
with {:ok, version1} <- parse(version1),
{:ok, version2} <- parse(version2) do
compare(version1, version2)
end
end
def compare(%Version{} = version1, %Version{} = version2) do
Version.compare(version1, version2)
rescue
_ -> :error
end
def compare(_version1, _version2) do
:error
end
end
|
lib/versioning/adapter/semantic.ex
| 0.880887 | 0.438244 |
semantic.ex
|
starcoder
|
defmodule AWS.IoT do
@moduledoc """
AWS IoT
AWS IoT provides secure, bi-directional communication between
Internet-connected things (such as sensors, actuators, embedded devices, or
smart appliances) and the AWS cloud. You can discover your custom IoT-Data
endpoint to communicate with, configure rules for data processing and
integration with other services, organize resources associated with each
thing (Thing Registry), configure logging, and create and manage policies
and credentials to authenticate things.
For more information about how AWS IoT works, see the [Developer
Guide](http://docs.aws.amazon.com/iot/latest/developerguide/aws-iot-how-it-works.html).
"""
@doc """
Accepts a pending certificate transfer. The default state of the
certificate is INACTIVE.
To check for pending certificate transfers, call `ListCertificates` to
enumerate your certificates.
"""
def accept_certificate_transfer(client, certificate_id, input, options \\ []) do
url = "/accept-certificate-transfer/#{URI.encode(certificate_id)}"
headers = []
request(client, :patch, url, headers, input, options, nil)
end
@doc """
Adds a thing to a thing group.
"""
def add_thing_to_thing_group(client, input, options \\ []) do
url = "/thing-groups/addThingToThingGroup"
headers = []
request(client, :put, url, headers, input, options, nil)
end
@doc """
Associates a group with a continuous job. The following criteria must be
met:
<ul> <li> The job must have been created with the `targetSelection` field
set to "CONTINUOUS".
</li> <li> The job status must currently be "IN_PROGRESS".
</li> <li> The total number of targets associated with a job must not
exceed 100.
</li> </ul>
"""
def associate_targets_with_job(client, job_id, input, options \\ []) do
url = "/jobs/#{URI.encode(job_id)}/targets"
headers = []
request(client, :post, url, headers, input, options, nil)
end
@doc """
Attaches a policy to the specified target.
"""
def attach_policy(client, policy_name, input, options \\ []) do
url = "/target-policies/#{URI.encode(policy_name)}"
headers = []
request(client, :put, url, headers, input, options, nil)
end
@doc """
Attaches the specified policy to the specified principal (certificate or
other credential).
**Note:** This API is deprecated. Please use `AttachPolicy` instead.
"""
def attach_principal_policy(client, policy_name, input, options \\ []) do
url = "/principal-policies/#{URI.encode(policy_name)}"
headers = []
if Dict.has_key?(input, "principal") do
headers = [{"x-amzn-iot-principal", input["principal"]}|headers]
input = Dict.delete(input, "principal")
end
request(client, :put, url, headers, input, options, nil)
end
@doc """
Attaches the specified principal to the specified thing.
"""
def attach_thing_principal(client, thing_name, input, options \\ []) do
url = "/things/#{URI.encode(thing_name)}/principals"
headers = []
if Dict.has_key?(input, "principal") do
headers = [{"x-amzn-principal", input["principal"]}|headers]
input = Dict.delete(input, "principal")
end
request(client, :put, url, headers, input, options, nil)
end
@doc """
Cancels a pending transfer for the specified certificate.
**Note** Only the transfer source account can use this operation to cancel
a transfer. (Transfer destinations can use `RejectCertificateTransfer`
instead.) After transfer, AWS IoT returns the certificate to the source
account in the INACTIVE state. After the destination account has accepted
the transfer, the transfer cannot be cancelled.
After a certificate transfer is cancelled, the status of the certificate
changes from PENDING_TRANSFER to INACTIVE.
"""
def cancel_certificate_transfer(client, certificate_id, input, options \\ []) do
url = "/cancel-certificate-transfer/#{URI.encode(certificate_id)}"
headers = []
request(client, :patch, url, headers, input, options, nil)
end
@doc """
Cancels a job.
"""
def cancel_job(client, job_id, input, options \\ []) do
url = "/jobs/#{URI.encode(job_id)}/cancel"
headers = []
request(client, :put, url, headers, input, options, nil)
end
@doc """
Clears the default authorizer.
"""
def clear_default_authorizer(client, input, options \\ []) do
url = "/default-authorizer"
headers = []
request(client, :delete, url, headers, input, options, nil)
end
@doc """
Creates an authorizer.
"""
def create_authorizer(client, authorizer_name, input, options \\ []) do
url = "/authorizer/#{URI.encode(authorizer_name)}"
headers = []
request(client, :post, url, headers, input, options, nil)
end
@doc """
Creates an X.509 certificate using the specified certificate signing
request.
**Note:** The CSR must include a public key that is either an RSA key with
a length of at least 2048 bits or an ECC key from NIST P-256 or NIST P-384
curves.
**Note:** Reusing the same certificate signing request (CSR) results in a
distinct certificate.
You can create multiple certificates in a batch by creating a directory,
copying multiple .csr files into that directory, and then specifying that
directory on the command line. The following commands show how to create a
batch of certificates given a batch of CSRs.
Assuming a set of CSRs are located inside of the directory
my-csr-directory:
On Linux and OS X, the command is:
$ ls my-csr-directory/ | xargs -I {} aws iot create-certificate-from-csr
--certificate-signing-request file://my-csr-directory/{}
This command lists all of the CSRs in my-csr-directory and pipes each CSR
file name to the aws iot create-certificate-from-csr AWS CLI command to
create a certificate for the corresponding CSR.
The aws iot create-certificate-from-csr part of the command can also be run
in parallel to speed up the certificate creation process:
$ ls my-csr-directory/ | xargs -P 10 -I {} aws iot
create-certificate-from-csr --certificate-signing-request
file://my-csr-directory/{}
On Windows PowerShell, the command to create certificates for all CSRs in
my-csr-directory is:
> ls -Name my-csr-directory | %{aws iot create-certificate-from-csr
--certificate-signing-request file://my-csr-directory/$_}
On a Windows command prompt, the command to create certificates for all
CSRs in my-csr-directory is:
> forfiles /p my-csr-directory /c "cmd /c aws iot
create-certificate-from-csr --certificate-signing-request file://@path"
"""
def create_certificate_from_csr(client, input, options \\ []) do
url = "/certificates"
headers = []
request(client, :post, url, headers, input, options, nil)
end
@doc """
Creates a job.
"""
def create_job(client, job_id, input, options \\ []) do
url = "/jobs/#{URI.encode(job_id)}"
headers = []
request(client, :put, url, headers, input, options, nil)
end
@doc """
Creates a 2048-bit RSA key pair and issues an X.509 certificate using the
issued public key.
**Note** This is the only time AWS IoT issues the private key for this
certificate, so it is important to keep it in a secure location.
"""
def create_keys_and_certificate(client, input, options \\ []) do
url = "/keys-and-certificate"
headers = []
request(client, :post, url, headers, input, options, nil)
end
@doc """
Creates an AWS IoT OTAUpdate on a target group of things or groups.
"""
def create_o_t_a_update(client, ota_update_id, input, options \\ []) do
url = "/otaUpdates/#{URI.encode(ota_update_id)}"
headers = []
request(client, :post, url, headers, input, options, nil)
end
@doc """
Creates an AWS IoT policy.
The created policy is the default version for the policy. This operation
creates a policy version with a version identifier of **1** and sets **1**
as the policy's default version.
"""
def create_policy(client, policy_name, input, options \\ []) do
url = "/policies/#{URI.encode(policy_name)}"
headers = []
request(client, :post, url, headers, input, options, nil)
end
@doc """
Creates a new version of the specified AWS IoT policy. To update a policy,
create a new policy version. A managed policy can have up to five versions.
If the policy has five versions, you must use `DeletePolicyVersion` to
delete an existing version before you create a new one.
Optionally, you can set the new version as the policy's default version.
The default version is the operative version (that is, the version that is
in effect for the certificates to which the policy is attached).
"""
def create_policy_version(client, policy_name, input, options \\ []) do
url = "/policies/#{URI.encode(policy_name)}/version"
headers = []
request(client, :post, url, headers, input, options, nil)
end
@doc """
Creates a role alias.
"""
def create_role_alias(client, role_alias, input, options \\ []) do
url = "/role-aliases/#{URI.encode(role_alias)}"
headers = []
request(client, :post, url, headers, input, options, nil)
end
@doc """
Creates a stream for delivering one or more large files in chunks over
MQTT. A stream transports data bytes in chunks or blocks packaged as MQTT
messages from a source like S3. You can have one or more files associated
with a stream. The total size of a file associated with the stream cannot
exceed more than 2 MB. The stream will be created with version 0. If a
stream is created with the same streamID as a stream that existed and was
deleted within last 90 days, we will resurrect that old stream by
incrementing the version by 1.
"""
def create_stream(client, stream_id, input, options \\ []) do
url = "/streams/#{URI.encode(stream_id)}"
headers = []
request(client, :post, url, headers, input, options, nil)
end
@doc """
Creates a thing record in the thing registry.
"""
def create_thing(client, thing_name, input, options \\ []) do
url = "/things/#{URI.encode(thing_name)}"
headers = []
request(client, :post, url, headers, input, options, nil)
end
@doc """
Create a thing group.
"""
def create_thing_group(client, thing_group_name, input, options \\ []) do
url = "/thing-groups/#{URI.encode(thing_group_name)}"
headers = []
request(client, :post, url, headers, input, options, nil)
end
@doc """
Creates a new thing type.
"""
def create_thing_type(client, thing_type_name, input, options \\ []) do
url = "/thing-types/#{URI.encode(thing_type_name)}"
headers = []
request(client, :post, url, headers, input, options, nil)
end
@doc """
Creates a rule. Creating rules is an administrator-level action. Any user
who has permission to create rules will be able to access data processed by
the rule.
"""
def create_topic_rule(client, rule_name, input, options \\ []) do
url = "/rules/#{URI.encode(rule_name)}"
headers = []
request(client, :post, url, headers, input, options, nil)
end
@doc """
Deletes an authorizer.
"""
def delete_authorizer(client, authorizer_name, input, options \\ []) do
url = "/authorizer/#{URI.encode(authorizer_name)}"
headers = []
request(client, :delete, url, headers, input, options, nil)
end
@doc """
Deletes a registered CA certificate.
"""
def delete_c_a_certificate(client, certificate_id, input, options \\ []) do
url = "/cacertificate/#{URI.encode(certificate_id)}"
headers = []
request(client, :delete, url, headers, input, options, nil)
end
@doc """
Deletes the specified certificate.
A certificate cannot be deleted if it has a policy attached to it or if its
status is set to ACTIVE. To delete a certificate, first use the
`DetachPrincipalPolicy` API to detach all policies. Next, use the
`UpdateCertificate` API to set the certificate to the INACTIVE status.
"""
def delete_certificate(client, certificate_id, input, options \\ []) do
url = "/certificates/#{URI.encode(certificate_id)}"
headers = []
request(client, :delete, url, headers, input, options, nil)
end
@doc """
Delete an OTA update.
"""
def delete_o_t_a_update(client, ota_update_id, input, options \\ []) do
url = "/otaUpdates/#{URI.encode(ota_update_id)}"
headers = []
request(client, :delete, url, headers, input, options, nil)
end
@doc """
Deletes the specified policy.
A policy cannot be deleted if it has non-default versions or it is attached
to any certificate.
To delete a policy, use the DeletePolicyVersion API to delete all
non-default versions of the policy; use the DetachPrincipalPolicy API to
detach the policy from any certificate; and then use the DeletePolicy API
to delete the policy.
When a policy is deleted using DeletePolicy, its default version is deleted
with it.
"""
def delete_policy(client, policy_name, input, options \\ []) do
url = "/policies/#{URI.encode(policy_name)}"
headers = []
request(client, :delete, url, headers, input, options, nil)
end
@doc """
Deletes the specified version of the specified policy. You cannot delete
the default version of a policy using this API. To delete the default
version of a policy, use `DeletePolicy`. To find out which version of a
policy is marked as the default version, use ListPolicyVersions.
"""
def delete_policy_version(client, policy_name, policy_version_id, input, options \\ []) do
url = "/policies/#{URI.encode(policy_name)}/version/#{URI.encode(policy_version_id)}"
headers = []
request(client, :delete, url, headers, input, options, nil)
end
@doc """
Deletes a CA certificate registration code.
"""
def delete_registration_code(client, input, options \\ []) do
url = "/registrationcode"
headers = []
request(client, :delete, url, headers, input, options, nil)
end
@doc """
Deletes a role alias
"""
def delete_role_alias(client, role_alias, input, options \\ []) do
url = "/role-aliases/#{URI.encode(role_alias)}"
headers = []
request(client, :delete, url, headers, input, options, nil)
end
@doc """
Deletes a stream.
"""
def delete_stream(client, stream_id, input, options \\ []) do
url = "/streams/#{URI.encode(stream_id)}"
headers = []
request(client, :delete, url, headers, input, options, nil)
end
@doc """
Deletes the specified thing.
"""
def delete_thing(client, thing_name, input, options \\ []) do
url = "/things/#{URI.encode(thing_name)}"
headers = []
request(client, :delete, url, headers, input, options, nil)
end
@doc """
Deletes a thing group.
"""
def delete_thing_group(client, thing_group_name, input, options \\ []) do
url = "/thing-groups/#{URI.encode(thing_group_name)}"
headers = []
request(client, :delete, url, headers, input, options, nil)
end
@doc """
Deletes the specified thing type . You cannot delete a thing type if it has
things associated with it. To delete a thing type, first mark it as
deprecated by calling `DeprecateThingType`, then remove any associated
things by calling `UpdateThing` to change the thing type on any associated
thing, and finally use `DeleteThingType` to delete the thing type.
"""
def delete_thing_type(client, thing_type_name, input, options \\ []) do
url = "/thing-types/#{URI.encode(thing_type_name)}"
headers = []
request(client, :delete, url, headers, input, options, nil)
end
@doc """
Deletes the rule.
"""
def delete_topic_rule(client, rule_name, input, options \\ []) do
url = "/rules/#{URI.encode(rule_name)}"
headers = []
request(client, :delete, url, headers, input, options, nil)
end
@doc """
Deletes a logging level.
"""
def delete_v2_logging_level(client, input, options \\ []) do
url = "/v2LoggingLevel"
headers = []
request(client, :delete, url, headers, input, options, nil)
end
@doc """
Deprecates a thing type. You can not associate new things with deprecated
thing type.
"""
def deprecate_thing_type(client, thing_type_name, input, options \\ []) do
url = "/thing-types/#{URI.encode(thing_type_name)}/deprecate"
headers = []
request(client, :post, url, headers, input, options, nil)
end
@doc """
Describes an authorizer.
"""
def describe_authorizer(client, authorizer_name, options \\ []) do
url = "/authorizer/#{URI.encode(authorizer_name)}"
headers = []
request(client, :get, url, headers, nil, options, nil)
end
@doc """
Describes a registered CA certificate.
"""
def describe_c_a_certificate(client, certificate_id, options \\ []) do
url = "/cacertificate/#{URI.encode(certificate_id)}"
headers = []
request(client, :get, url, headers, nil, options, nil)
end
@doc """
Gets information about the specified certificate.
"""
def describe_certificate(client, certificate_id, options \\ []) do
url = "/certificates/#{URI.encode(certificate_id)}"
headers = []
request(client, :get, url, headers, nil, options, nil)
end
@doc """
Describes the default authorizer.
"""
def describe_default_authorizer(client, options \\ []) do
url = "/default-authorizer"
headers = []
request(client, :get, url, headers, nil, options, nil)
end
@doc """
Returns a unique endpoint specific to the AWS account making the call.
"""
def describe_endpoint(client, options \\ []) do
url = "/endpoint"
headers = []
request(client, :get, url, headers, nil, options, nil)
end
@doc """
Describes event configurations.
"""
def describe_event_configurations(client, options \\ []) do
url = "/event-configurations"
headers = []
request(client, :get, url, headers, nil, options, nil)
end
@doc """
Describes a search index.
"""
def describe_index(client, index_name, options \\ []) do
url = "/indices/#{URI.encode(index_name)}"
headers = []
request(client, :get, url, headers, nil, options, nil)
end
@doc """
Describes a job.
"""
def describe_job(client, job_id, options \\ []) do
url = "/jobs/#{URI.encode(job_id)}"
headers = []
request(client, :get, url, headers, nil, options, nil)
end
@doc """
Describes a job execution.
"""
def describe_job_execution(client, job_id, thing_name, options \\ []) do
url = "/things/#{URI.encode(thing_name)}/jobs/#{URI.encode(job_id)}"
headers = []
request(client, :get, url, headers, nil, options, nil)
end
@doc """
Describes a role alias.
"""
def describe_role_alias(client, role_alias, options \\ []) do
url = "/role-aliases/#{URI.encode(role_alias)}"
headers = []
request(client, :get, url, headers, nil, options, nil)
end
@doc """
Gets information about a stream.
"""
def describe_stream(client, stream_id, options \\ []) do
url = "/streams/#{URI.encode(stream_id)}"
headers = []
request(client, :get, url, headers, nil, options, nil)
end
@doc """
Gets information about the specified thing.
"""
def describe_thing(client, thing_name, options \\ []) do
url = "/things/#{URI.encode(thing_name)}"
headers = []
request(client, :get, url, headers, nil, options, nil)
end
@doc """
Describe a thing group.
"""
def describe_thing_group(client, thing_group_name, options \\ []) do
url = "/thing-groups/#{URI.encode(thing_group_name)}"
headers = []
request(client, :get, url, headers, nil, options, nil)
end
@doc """
Describes a bulk thing provisioning task.
"""
def describe_thing_registration_task(client, task_id, options \\ []) do
url = "/thing-registration-tasks/#{URI.encode(task_id)}"
headers = []
request(client, :get, url, headers, nil, options, nil)
end
@doc """
Gets information about the specified thing type.
"""
def describe_thing_type(client, thing_type_name, options \\ []) do
url = "/thing-types/#{URI.encode(thing_type_name)}"
headers = []
request(client, :get, url, headers, nil, options, nil)
end
@doc """
Detaches a policy from the specified target.
"""
def detach_policy(client, policy_name, input, options \\ []) do
url = "/target-policies/#{URI.encode(policy_name)}"
headers = []
request(client, :post, url, headers, input, options, nil)
end
@doc """
Removes the specified policy from the specified certificate.
**Note:** This API is deprecated. Please use `DetachPolicy` instead.
"""
def detach_principal_policy(client, policy_name, input, options \\ []) do
url = "/principal-policies/#{URI.encode(policy_name)}"
headers = []
if Dict.has_key?(input, "principal") do
headers = [{"x-amzn-iot-principal", input["principal"]}|headers]
input = Dict.delete(input, "principal")
end
request(client, :delete, url, headers, input, options, nil)
end
@doc """
Detaches the specified principal from the specified thing.
"""
def detach_thing_principal(client, thing_name, input, options \\ []) do
url = "/things/#{URI.encode(thing_name)}/principals"
headers = []
if Dict.has_key?(input, "principal") do
headers = [{"x-amzn-principal", input["principal"]}|headers]
input = Dict.delete(input, "principal")
end
request(client, :delete, url, headers, input, options, nil)
end
@doc """
Disables the rule.
"""
def disable_topic_rule(client, rule_name, input, options \\ []) do
url = "/rules/#{URI.encode(rule_name)}/disable"
headers = []
request(client, :post, url, headers, input, options, nil)
end
@doc """
Enables the rule.
"""
def enable_topic_rule(client, rule_name, input, options \\ []) do
url = "/rules/#{URI.encode(rule_name)}/enable"
headers = []
request(client, :post, url, headers, input, options, nil)
end
@doc """
Gets effective policies.
"""
def get_effective_policies(client, input, options \\ []) do
url = "/effective-policies"
headers = []
request(client, :post, url, headers, input, options, nil)
end
@doc """
Gets the search configuration.
"""
def get_indexing_configuration(client, options \\ []) do
url = "/indexing/config"
headers = []
request(client, :get, url, headers, nil, options, nil)
end
@doc """
Gets a job document.
"""
def get_job_document(client, job_id, options \\ []) do
url = "/jobs/#{URI.encode(job_id)}/job-document"
headers = []
request(client, :get, url, headers, nil, options, nil)
end
@doc """
Gets the logging options.
"""
def get_logging_options(client, options \\ []) do
url = "/loggingOptions"
headers = []
request(client, :get, url, headers, nil, options, nil)
end
@doc """
Gets an OTA update.
"""
def get_o_t_a_update(client, ota_update_id, options \\ []) do
url = "/otaUpdates/#{URI.encode(ota_update_id)}"
headers = []
request(client, :get, url, headers, nil, options, nil)
end
@doc """
Gets information about the specified policy with the policy document of the
default version.
"""
def get_policy(client, policy_name, options \\ []) do
url = "/policies/#{URI.encode(policy_name)}"
headers = []
request(client, :get, url, headers, nil, options, nil)
end
@doc """
Gets information about the specified policy version.
"""
def get_policy_version(client, policy_name, policy_version_id, options \\ []) do
url = "/policies/#{URI.encode(policy_name)}/version/#{URI.encode(policy_version_id)}"
headers = []
request(client, :get, url, headers, nil, options, nil)
end
@doc """
Gets a registration code used to register a CA certificate with AWS IoT.
"""
def get_registration_code(client, options \\ []) do
url = "/registrationcode"
headers = []
request(client, :get, url, headers, nil, options, nil)
end
@doc """
Gets information about the rule.
"""
def get_topic_rule(client, rule_name, options \\ []) do
url = "/rules/#{URI.encode(rule_name)}"
headers = []
request(client, :get, url, headers, nil, options, nil)
end
@doc """
Gets the fine grained logging options.
"""
def get_v2_logging_options(client, options \\ []) do
url = "/v2LoggingOptions"
headers = []
request(client, :get, url, headers, nil, options, nil)
end
@doc """
Lists the policies attached to the specified thing group.
"""
def list_attached_policies(client, target, input, options \\ []) do
url = "/attached-policies/#{URI.encode(target)}"
headers = []
request(client, :post, url, headers, input, options, nil)
end
@doc """
Lists the authorizers registered in your account.
"""
def list_authorizers(client, options \\ []) do
url = "/authorizers"
headers = []
request(client, :get, url, headers, nil, options, nil)
end
@doc """
Lists the CA certificates registered for your AWS account.
The results are paginated with a default page size of 25. You can use the
returned marker to retrieve additional results.
"""
def list_c_a_certificates(client, options \\ []) do
url = "/cacertificates"
headers = []
request(client, :get, url, headers, nil, options, nil)
end
@doc """
Lists the certificates registered in your AWS account.
The results are paginated with a default page size of 25. You can use the
returned marker to retrieve additional results.
"""
def list_certificates(client, options \\ []) do
url = "/certificates"
headers = []
request(client, :get, url, headers, nil, options, nil)
end
@doc """
List the device certificates signed by the specified CA certificate.
"""
def list_certificates_by_c_a(client, ca_certificate_id, options \\ []) do
url = "/certificates-by-ca/#{URI.encode(ca_certificate_id)}"
headers = []
request(client, :get, url, headers, nil, options, nil)
end
@doc """
Lists the search indices.
"""
def list_indices(client, options \\ []) do
url = "/indices"
headers = []
request(client, :get, url, headers, nil, options, nil)
end
@doc """
Lists the job executions for a job.
"""
def list_job_executions_for_job(client, job_id, options \\ []) do
url = "/jobs/#{URI.encode(job_id)}/things"
headers = []
request(client, :get, url, headers, nil, options, nil)
end
@doc """
Lists the job executions for the specified thing.
"""
def list_job_executions_for_thing(client, thing_name, options \\ []) do
url = "/things/#{URI.encode(thing_name)}/jobs"
headers = []
request(client, :get, url, headers, nil, options, nil)
end
@doc """
Lists jobs.
"""
def list_jobs(client, options \\ []) do
url = "/jobs"
headers = []
request(client, :get, url, headers, nil, options, nil)
end
@doc """
Lists OTA updates.
"""
def list_o_t_a_updates(client, options \\ []) do
url = "/otaUpdates"
headers = []
request(client, :get, url, headers, nil, options, nil)
end
@doc """
Lists certificates that are being transferred but not yet accepted.
"""
def list_outgoing_certificates(client, options \\ []) do
url = "/certificates-out-going"
headers = []
request(client, :get, url, headers, nil, options, nil)
end
@doc """
Lists your policies.
"""
def list_policies(client, options \\ []) do
url = "/policies"
headers = []
request(client, :get, url, headers, nil, options, nil)
end
@doc """
Lists the principals associated with the specified policy.
**Note:** This API is deprecated. Please use `ListTargetsForPolicy`
instead.
"""
def list_policy_principals(client, policy_name \\ nil, options \\ []) do
url = "/policy-principals"
headers = []
if !is_nil(policy_name) do
headers = [{"x-amzn-iot-policy", policy_name}|headers]
end
request(client, :get, url, headers, nil, options, nil)
end
@doc """
Lists the versions of the specified policy and identifies the default
version.
"""
def list_policy_versions(client, policy_name, options \\ []) do
url = "/policies/#{URI.encode(policy_name)}/version"
headers = []
request(client, :get, url, headers, nil, options, nil)
end
@doc """
Lists the policies attached to the specified principal. If you use an
Cognito identity, the ID must be in [AmazonCognito Identity
format](http://docs.aws.amazon.com/cognitoidentity/latest/APIReference/API_GetCredentialsForIdentity.html#API_GetCredentialsForIdentity_RequestSyntax).
**Note:** This API is deprecated. Please use `ListAttachedPolicies`
instead.
"""
def list_principal_policies(client, principal \\ nil, options \\ []) do
url = "/principal-policies"
headers = []
if !is_nil(principal) do
headers = [{"x-amzn-iot-principal", principal}|headers]
end
request(client, :get, url, headers, nil, options, nil)
end
@doc """
Lists the things associated with the specified principal.
"""
def list_principal_things(client, principal \\ nil, options \\ []) do
url = "/principals/things"
headers = []
if !is_nil(principal) do
headers = [{"x-amzn-principal", principal}|headers]
end
request(client, :get, url, headers, nil, options, nil)
end
@doc """
Lists the role aliases registered in your account.
"""
def list_role_aliases(client, options \\ []) do
url = "/role-aliases"
headers = []
request(client, :get, url, headers, nil, options, nil)
end
@doc """
Lists all of the streams in your AWS account.
"""
def list_streams(client, options \\ []) do
url = "/streams"
headers = []
request(client, :get, url, headers, nil, options, nil)
end
@doc """
List targets for the specified policy.
"""
def list_targets_for_policy(client, policy_name, input, options \\ []) do
url = "/policy-targets/#{URI.encode(policy_name)}"
headers = []
request(client, :post, url, headers, input, options, nil)
end
@doc """
List the thing groups in your account.
"""
def list_thing_groups(client, options \\ []) do
url = "/thing-groups"
headers = []
request(client, :get, url, headers, nil, options, nil)
end
@doc """
List the thing groups to which the specified thing belongs.
"""
def list_thing_groups_for_thing(client, thing_name, options \\ []) do
url = "/things/#{URI.encode(thing_name)}/thing-groups"
headers = []
request(client, :get, url, headers, nil, options, nil)
end
@doc """
Lists the principals associated with the specified thing.
"""
def list_thing_principals(client, thing_name, options \\ []) do
url = "/things/#{URI.encode(thing_name)}/principals"
headers = []
request(client, :get, url, headers, nil, options, nil)
end
@doc """
Information about the thing registration tasks.
"""
def list_thing_registration_task_reports(client, task_id, options \\ []) do
url = "/thing-registration-tasks/#{URI.encode(task_id)}/reports"
headers = []
request(client, :get, url, headers, nil, options, nil)
end
@doc """
List bulk thing provisioning tasks.
"""
def list_thing_registration_tasks(client, options \\ []) do
url = "/thing-registration-tasks"
headers = []
request(client, :get, url, headers, nil, options, nil)
end
@doc """
Lists the existing thing types.
"""
def list_thing_types(client, options \\ []) do
url = "/thing-types"
headers = []
request(client, :get, url, headers, nil, options, nil)
end
@doc """
Lists your things. Use the **attributeName** and **attributeValue**
parameters to filter your things. For example, calling `ListThings` with
attributeName=Color and attributeValue=Red retrieves all things in the
registry that contain an attribute **Color** with the value **Red**.
"""
def list_things(client, options \\ []) do
url = "/things"
headers = []
request(client, :get, url, headers, nil, options, nil)
end
@doc """
Lists the things in the specified group.
"""
def list_things_in_thing_group(client, thing_group_name, options \\ []) do
url = "/thing-groups/#{URI.encode(thing_group_name)}/things"
headers = []
request(client, :get, url, headers, nil, options, nil)
end
@doc """
Lists the rules for the specific topic.
"""
def list_topic_rules(client, options \\ []) do
url = "/rules"
headers = []
request(client, :get, url, headers, nil, options, nil)
end
@doc """
Lists logging levels.
"""
def list_v2_logging_levels(client, options \\ []) do
url = "/v2LoggingLevel"
headers = []
request(client, :get, url, headers, nil, options, nil)
end
@doc """
Registers a CA certificate with AWS IoT. This CA certificate can then be
used to sign device certificates, which can be then registered with AWS
IoT. You can register up to 10 CA certificates per AWS account that have
the same subject field. This enables you to have up to 10 certificate
authorities sign your device certificates. If you have more than one CA
certificate registered, make sure you pass the CA certificate when you
register your device certificates with the RegisterCertificate API.
"""
def register_c_a_certificate(client, input, options \\ []) do
url = "/cacertificate"
headers = []
request(client, :post, url, headers, input, options, nil)
end
@doc """
Registers a device certificate with AWS IoT. If you have more than one CA
certificate that has the same subject field, you must specify the CA
certificate that was used to sign the device certificate being registered.
"""
def register_certificate(client, input, options \\ []) do
url = "/certificate/register"
headers = []
request(client, :post, url, headers, input, options, nil)
end
@doc """
Provisions a thing.
"""
def register_thing(client, input, options \\ []) do
url = "/things"
headers = []
request(client, :post, url, headers, input, options, nil)
end
@doc """
Rejects a pending certificate transfer. After AWS IoT rejects a certificate
transfer, the certificate status changes from **PENDING_TRANSFER** to
**INACTIVE**.
To check for pending certificate transfers, call `ListCertificates` to
enumerate your certificates.
This operation can only be called by the transfer destination. After it is
called, the certificate will be returned to the source's account in the
INACTIVE state.
"""
def reject_certificate_transfer(client, certificate_id, input, options \\ []) do
url = "/reject-certificate-transfer/#{URI.encode(certificate_id)}"
headers = []
request(client, :patch, url, headers, input, options, nil)
end
@doc """
Remove the specified thing from the specified group.
"""
def remove_thing_from_thing_group(client, input, options \\ []) do
url = "/thing-groups/removeThingFromThingGroup"
headers = []
request(client, :put, url, headers, input, options, nil)
end
@doc """
Replaces the rule. You must specify all parameters for the new rule.
Creating rules is an administrator-level action. Any user who has
permission to create rules will be able to access data processed by the
rule.
"""
def replace_topic_rule(client, rule_name, input, options \\ []) do
url = "/rules/#{URI.encode(rule_name)}"
headers = []
request(client, :patch, url, headers, input, options, nil)
end
@doc """
The query search index.
"""
def search_index(client, input, options \\ []) do
url = "/indices/search"
headers = []
request(client, :post, url, headers, input, options, nil)
end
@doc """
Sets the default authorizer. This will be used if a websocket connection is
made without specifying an authorizer.
"""
def set_default_authorizer(client, input, options \\ []) do
url = "/default-authorizer"
headers = []
request(client, :post, url, headers, input, options, nil)
end
@doc """
Sets the specified version of the specified policy as the policy's default
(operative) version. This action affects all certificates to which the
policy is attached. To list the principals the policy is attached to, use
the ListPrincipalPolicy API.
"""
def set_default_policy_version(client, policy_name, policy_version_id, input, options \\ []) do
url = "/policies/#{URI.encode(policy_name)}/version/#{URI.encode(policy_version_id)}"
headers = []
request(client, :patch, url, headers, input, options, nil)
end
@doc """
Sets the logging options.
"""
def set_logging_options(client, input, options \\ []) do
url = "/loggingOptions"
headers = []
request(client, :post, url, headers, input, options, nil)
end
@doc """
Sets the logging level.
"""
def set_v2_logging_level(client, input, options \\ []) do
url = "/v2LoggingLevel"
headers = []
request(client, :post, url, headers, input, options, nil)
end
@doc """
Sets the logging options for the V2 logging service.
"""
def set_v2_logging_options(client, input, options \\ []) do
url = "/v2LoggingOptions"
headers = []
request(client, :post, url, headers, input, options, nil)
end
@doc """
Creates a bulk thing provisioning task.
"""
def start_thing_registration_task(client, input, options \\ []) do
url = "/thing-registration-tasks"
headers = []
request(client, :post, url, headers, input, options, nil)
end
@doc """
Cancels a bulk thing provisioning task.
"""
def stop_thing_registration_task(client, task_id, input, options \\ []) do
url = "/thing-registration-tasks/#{URI.encode(task_id)}/cancel"
headers = []
request(client, :put, url, headers, input, options, nil)
end
@doc """
Test custom authorization.
"""
def test_authorization(client, input, options \\ []) do
url = "/test-authorization"
headers = []
request(client, :post, url, headers, input, options, nil)
end
@doc """
Invoke the specified custom authorizer for testing purposes.
"""
def test_invoke_authorizer(client, authorizer_name, input, options \\ []) do
url = "/authorizer/#{URI.encode(authorizer_name)}/test"
headers = []
request(client, :post, url, headers, input, options, nil)
end
@doc """
Transfers the specified certificate to the specified AWS account.
You can cancel the transfer until it is acknowledged by the recipient.
No notification is sent to the transfer destination's account. It is up to
the caller to notify the transfer target.
The certificate being transferred must not be in the ACTIVE state. You can
use the UpdateCertificate API to deactivate it.
The certificate must not have any policies attached to it. You can use the
DetachPrincipalPolicy API to detach them.
"""
def transfer_certificate(client, certificate_id, input, options \\ []) do
url = "/transfer-certificate/#{URI.encode(certificate_id)}"
headers = []
request(client, :patch, url, headers, input, options, nil)
end
@doc """
Updates an authorizer.
"""
def update_authorizer(client, authorizer_name, input, options \\ []) do
url = "/authorizer/#{URI.encode(authorizer_name)}"
headers = []
request(client, :put, url, headers, input, options, nil)
end
@doc """
Updates a registered CA certificate.
"""
def update_c_a_certificate(client, certificate_id, input, options \\ []) do
url = "/cacertificate/#{URI.encode(certificate_id)}"
headers = []
request(client, :put, url, headers, input, options, nil)
end
@doc """
Updates the status of the specified certificate. This operation is
idempotent.
Moving a certificate from the ACTIVE state (including REVOKED) will not
disconnect currently connected devices, but these devices will be unable to
reconnect.
The ACTIVE state is required to authenticate devices connecting to AWS IoT
using a certificate.
"""
def update_certificate(client, certificate_id, input, options \\ []) do
url = "/certificates/#{URI.encode(certificate_id)}"
headers = []
request(client, :put, url, headers, input, options, nil)
end
@doc """
Updates the event configurations.
"""
def update_event_configurations(client, input, options \\ []) do
url = "/event-configurations"
headers = []
request(client, :patch, url, headers, input, options, nil)
end
@doc """
Updates the search configuration.
"""
def update_indexing_configuration(client, input, options \\ []) do
url = "/indexing/config"
headers = []
request(client, :post, url, headers, input, options, nil)
end
@doc """
Updates a role alias.
"""
def update_role_alias(client, role_alias, input, options \\ []) do
url = "/role-aliases/#{URI.encode(role_alias)}"
headers = []
request(client, :put, url, headers, input, options, nil)
end
@doc """
Updates an existing stream. The stream version will be incremented by one.
"""
def update_stream(client, stream_id, input, options \\ []) do
url = "/streams/#{URI.encode(stream_id)}"
headers = []
request(client, :put, url, headers, input, options, nil)
end
@doc """
Updates the data for a thing.
"""
def update_thing(client, thing_name, input, options \\ []) do
url = "/things/#{URI.encode(thing_name)}"
headers = []
request(client, :patch, url, headers, input, options, nil)
end
@doc """
Update a thing group.
"""
def update_thing_group(client, thing_group_name, input, options \\ []) do
url = "/thing-groups/#{URI.encode(thing_group_name)}"
headers = []
request(client, :patch, url, headers, input, options, nil)
end
@doc """
Updates the groups to which the thing belongs.
"""
def update_thing_groups_for_thing(client, input, options \\ []) do
url = "/thing-groups/updateThingGroupsForThing"
headers = []
request(client, :put, url, headers, input, options, nil)
end
defp request(client, method, url, headers, input, options, success_status_code) do
client = %{client | service: "execute-api"}
host = get_host("iot", client)
url = get_url(host, url, client)
headers = Enum.concat([{"Host", host},
{"Content-Type", "application/x-amz-json-1.1"}],
headers)
payload = encode_payload(input)
headers = AWS.Request.sign_v4(client, method, url, headers, payload)
perform_request(method, url, payload, headers, options, success_status_code)
end
defp perform_request(method, url, payload, headers, options, nil) do
case HTTPoison.request(method, url, payload, headers, options) do
{:ok, response=%HTTPoison.Response{status_code: 200, body: ""}} ->
{:ok, response}
{:ok, response=%HTTPoison.Response{status_code: 200, body: body}} ->
{:ok, Poison.Parser.parse!(body), response}
{:ok, response=%HTTPoison.Response{status_code: 202, body: body}} ->
{:ok, Poison.Parser.parse!(body), response}
{:ok, response=%HTTPoison.Response{status_code: 204, body: body}} ->
{:ok, Poison.Parser.parse!(body), response}
{:ok, _response=%HTTPoison.Response{body: body}} ->
reason = Poison.Parser.parse!(body)["message"]
{:error, reason}
{:error, %HTTPoison.Error{reason: reason}} ->
{:error, %HTTPoison.Error{reason: reason}}
end
end
defp perform_request(method, url, payload, headers, options, success_status_code) do
case HTTPoison.request(method, url, payload, headers, options) do
{:ok, response=%HTTPoison.Response{status_code: ^success_status_code, body: ""}} ->
{:ok, nil, response}
{:ok, response=%HTTPoison.Response{status_code: ^success_status_code, body: body}} ->
{:ok, Poison.Parser.parse!(body), response}
{:ok, _response=%HTTPoison.Response{body: body}} ->
reason = Poison.Parser.parse!(body)["message"]
{:error, reason}
{:error, %HTTPoison.Error{reason: reason}} ->
{:error, %HTTPoison.Error{reason: reason}}
end
end
defp get_host(endpoint_prefix, client) do
if client.region == "local" do
"localhost"
else
"#{endpoint_prefix}.#{client.region}.#{client.endpoint}"
end
end
defp get_url(host, url, %{:proto => proto, :port => port}) do
"#{proto}://#{host}:#{port}#{url}/"
end
defp encode_payload(input) do
if input != nil do
Poison.Encoder.encode(input, [])
else
""
end
end
end
|
lib/aws/iot.ex
| 0.776708 | 0.416737 |
iot.ex
|
starcoder
|
defmodule CsvParser do
alias CsvParser.{Csv, Xlsx, Memory}
@doc """
Creates a new object to be passed into reduce!/3. new and reduce exist separately
so that errors in your files can be handled explicitly.
opts:
type: :csv | :xlsx
Defaults to nil, which will auto-detect
map: true | false | :lower | :upper | fun/1
Whether you want rows as a list or map. Defaults to false, which keeps rows
as lists. When :lower, keys will be lowercased. When :upper, keys will be
uppercased. When true, keys will be kept as-is. When fun/1 is passed, the
row, as a list, will be given to the function a a list must be returned (the
function maps the input keys to output key).
sheet_index: int
Index of the sheet to parse. Default to 1 (only applicable to xlsx)
separator: ?C
Field separator. Defaults to ?, (only applicable to csv)
"""
def new(path, opts \\ []) do
opts = Keyword.put_new(opts, :validate_row_length, false)
with {:ok, _} <- File.stat(path) do
type = opts[:type]
case type do
:csv -> Csv.new(path, opts)
:xlsx -> Xlsx.new(path, opts)
nil ->
case Xlsx.new(path, opts) do
{:error, :invalid_format} -> Csv.new(path, opts)
ok_or_error -> ok_or_error
end
_ -> {:error, :unknown_type}
end
end
end
def memory(data, opts \\ []) do
Memory.new(data, opts)
end
@doc """
Raises if path represents an invalid file
"""
def new!(path, opts \\ []) do
case new(path, opts) do
{:ok, obj} -> obj
{:error, err} -> raise err
end
end
def lines(lines, opts \\ []) do
opts = Keyword.put_new(opts, :validate_row_length, false)
Csv.lines(lines, opts)
end
def lines!(lines, opts \\[]) do
case lines(lines, opts) do
{:ok, csv} -> csv
{:error, err} -> raise err
end
end
@doc """
Reads the file returning a list of list. See new/2 for valid opts
"""
def read(path, opts \\ []) do
with {:ok, obj} <- new(path, opts) do
result = obj
|> reduce!([], fn row, acc -> [row | acc] end)
|> Enum.reverse()
{:ok, result}
end
end
@doc """
Reads the file returning a list of list or raises on error
"""
def read!(path, opts \\ []) do
case read(path, opts) do
{:ok, data} -> data
{:error, err} -> raise to_string(err)
end
end
@doc """
Reduces over the parsed file, calling fun/2 for each row.
Example:
csv = CsvParser.new!("sample.xlsx")
rows = Enum.reduce(csv, [], fn {row, rows} -> [row | rows] end)
"""
def reduce!(%Csv{} = csv, acc, fun), do: Csv.reduce(csv, acc, fun)
def reduce!(%Xlsx{} = xlsx, acc, fun), do: Xlsx.reduce(xlsx, acc, fun)
def reduce!(%Memory{} = csv, acc, fun), do: Memory.reduce(csv, acc, fun)
@doc """
Like calling new!/2 then reduce!/3
"""
def reduce!(file, acc, fun, opts \\ []) do
reduce!(new!(file, opts),acc, fun)
end
end
|
lib/parser.ex
| 0.613584 | 0.47792 |
parser.ex
|
starcoder
|
defmodule Sourceror.Patch do
@moduledoc """
Functions that generate patches for common operations.
Functions in this module assume that the AST was parsed using Sourceror
functions and that it wasn't modified. If you changed the tree before calling
`Sourceror.Patch` functions, then the patch ranges are not guaranteed to match
1:1 with the original source code.
"""
@sigil_letters for letter <- [?a..?z, ?A..?Z] |> Enum.flat_map(&Enum.to_list/1), do: <<letter>>
@doc """
Renames a qualified or unqualified function call.
iex> original = "String.to_atom(foo)"
iex> ast = Sourceror.parse_string!(original)
iex> patches = Sourceror.Patch.rename_call(ast, :to_existing_atom)
iex> Sourceror.patch_string(original, patches)
"String.to_existing_atom(foo)"
If the call is a sigil, you only need to provide the replacement letter:
iex> original = "~H(foo)"
iex> ast = Sourceror.parse_string!(original)
iex> patches = Sourceror.Patch.rename_call(ast, :F)
iex> Sourceror.patch_string(original, patches)
"~F(foo)"
"""
@spec rename_call(call :: Macro.t(), new_name :: atom | String.t()) :: [Sourceror.patch()]
def rename_call({{:., _, [_, call]}, meta, _}, new_name) do
new_name = to_string(new_name)
start_pos = [line: meta[:line], column: meta[:column]]
end_pos = [line: meta[:line], column: meta[:column] + String.length(to_string(call))]
range = %{start: start_pos, end: end_pos}
[%{range: range, change: new_name}]
end
def rename_call({call, meta, [{:<<>>, _, _}, modifiers]}, new_name)
when is_atom(call) and is_list(modifiers) do
new_name = to_string(new_name)
letter =
case new_name do
"sigil_" <> letter when letter in @sigil_letters -> letter
letter when letter in @sigil_letters -> letter
_ -> raise ArgumentError, "The sigil name must be a single letter character"
end
start_pos = [line: meta[:line], column: meta[:column] + 1]
end_pos = [line: meta[:line], column: meta[:column] + 2]
range = %{start: start_pos, end: end_pos}
[%{range: range, change: letter}]
end
def rename_call({call, meta, args}, new_name) when is_atom(call) and is_list(args) do
new_name = to_string(new_name)
start_pos = [line: meta[:line], column: meta[:column]]
end_pos = [line: meta[:line], column: meta[:column] + String.length(to_string(call))]
range = %{start: start_pos, end: end_pos}
[%{range: range, change: new_name}]
end
@doc """
Renames an identifier(ie a variable name).
## Examples
iex> original = "foo"
iex> ast = Sourceror.parse_string!(original)
iex> patches = Sourceror.Patch.rename_identifier(ast, :bar)
iex> Sourceror.patch_string(original, patches)
"bar"
"""
@spec rename_identifier(identifier :: Macro.t(), new_name :: atom | String.t()) :: [
Sourceror.patch()
]
def rename_identifier({identifier, meta, context}, new_name) when is_atom(context) do
new_name = to_string(new_name)
start_pos = [line: meta[:line], column: meta[:column]]
end_pos = [line: meta[:line], column: meta[:column] + String.length(to_string(identifier))]
range = %{start: start_pos, end: end_pos}
[%{range: range, change: new_name}]
end
@doc """
Generates patches that rename the keys of a keyword list.
The replacements is a keyword list, with the keys to replace as keys, and the
replacement as the value.
## Examples
iex> original = "[a: b, c: d, e: f]"
iex> ast = Sourceror.parse_string!(original)
iex> patches = Sourceror.Patch.rename_kw_keys(ast, a: :foo, e: :bar)
iex> Sourceror.patch_string(original, patches)
"[foo: b, c: d, bar: f]"
"""
@spec rename_kw_keys(keyword :: Macro.t(), replacements :: keyword) :: [Sourceror.patch()]
def rename_kw_keys({:__block__, _, [items]}, replacements) when is_list(items) do
for {{_, meta, [key]} = quoted, _} <- items,
meta[:format] == :keyword,
new_key = replacements[key],
new_key != nil,
do: patch_for_kw_key(quoted, new_key)
end
defp patch_for_kw_key(quoted, new_key) do
range =
quoted
|> Sourceror.get_range()
|> update_in([:end, :column], &(&1 - 1))
%{range: range, change: to_string(new_key)}
end
end
|
lib/sourceror/patch.ex
| 0.866401 | 0.583085 |
patch.ex
|
starcoder
|
defmodule Erlef.Accounts.Member do
use Erlef.Schema
@moduledoc """
Erlef.Accounts.Member provides a schema and helper functions for working with erlef members.
Members are a "concrete" representation of an associated external resource, namely wildapricot,
and as such it shou ld be duly that this application is not the source of truth of members.
The schema allows us to cache member attributes, such as the member's name. This is useful in the case of
quite wildapricot per their strict api rate limits. Additionally, this allows us to properly associate and
constraint other schemas within the system to a member; as well as keeping the rest of the application
completely ignorant in regards to wildapricot
See `Erlef.Accounts.External` for details on how fields are mapped between the two resources.
"""
@paid_member_levels [
:annual,
:lifetime,
:board,
:fellow,
:contributor
]
@membership_level_str_map %{
"Basic Membership" => :basic,
"Annual Supporting Membership" => :annual,
"Lifetime Supporting Membership" => :lifetime,
"Board" => :board,
"Fellow" => :fellow,
"Managing and Contributing" => :contributor
}
@membership_level_map Map.new(@membership_level_str_map, fn {k, v} -> {v, k} end)
@derive {Jason.Encoder, only: [:id]}
schema "members" do
field(:avatar, :map, virtual: true)
field(:avatar_url, :string)
field(:name, :string, redact: true)
field(:first_name, :string, redact: true)
field(:last_name, :string, redact: true)
field(:email, :string, redact: true)
field(:erlef_email_address, :string, redact: true)
field(:roles, {:array, Ecto.Enum}, values: [:app_admin])
field(:member_since, :date)
field(:membership_enabled, :boolean)
field(:membership_level, Ecto.Enum, values: Map.values(@membership_level_str_map))
field(:suspended_member, :boolean, default: false)
field(:terms_of_use_accepted, :boolean)
field(:is_app_admin, :boolean, default: false)
field(:is_archived, :boolean)
field(:is_donor, :boolean)
field(:has_email_box, :boolean, default: false)
field(:has_email_alias, :boolean, default: false)
field(:has_email_address, :boolean, default: false)
field(:has_requested_slack_invite, :boolean, default: false)
field(:requested_slack_invite_at, :utc_datetime)
field(:deactivated_at, :utc_datetime)
embeds_one(:external, Erlef.Accounts.External, on_replace: :update)
timestamps()
end
@fields [
:avatar_url,
:name,
:first_name,
:last_name,
:email,
:erlef_email_address,
:roles,
:has_email_alias,
:has_email_box,
:has_email_address,
:has_requested_slack_invite,
:is_app_admin,
:is_archived,
:is_donor,
:member_since,
:membership_enabled,
:membership_level,
:requested_slack_invite_at,
:suspended_member,
:terms_of_use_accepted,
:deactivated_at
]
@required_fields [
:name,
:first_name,
:last_name,
:email,
:roles,
:has_email_alias,
:has_email_box,
:has_email_address,
:is_app_admin,
:is_archived,
:is_donor,
:member_since,
:membership_enabled,
:membership_level,
:suspended_member,
:terms_of_use_accepted
]
@doc false
def changeset(member, attrs) do
member
|> cast(attrs, @fields)
|> cast_embed(:external)
|> validate_required(@required_fields)
|> validate_email(:email)
|> validate_email(:erlef_email_address)
end
def by_external_id(id) do
from(m in __MODULE__,
where: fragment("(external->>'id' = ?)", ^id)
)
end
def is_paying?(%Member{membership_level: level}) when is_atom(level) do
level in @paid_member_levels
end
def is_paying?(_), do: false
def membership_level(%Member{membership_level: level}, humanize: true) do
@membership_level_map[level]
end
end
|
lib/erlef/accounts/member.ex
| 0.575946 | 0.482246 |
member.ex
|
starcoder
|
defmodule SistemaFinanceiro.AccountService do
@moduledoc """
A lightfull service to orchestrate incoming calls/requests actions from our controller
"""
alias Repository.AccountRepository
alias Repository.ExchangeRateRepository
@doc """
Lists all accounts
## Examples:
```
iex> SistemaFinanceiro.AccountService.list()
{
:ok,
[
Account.new("1", "<NAME>", 50, :BRL),
Account.new("2", "<NAME>", 50, :BRL),
Account.new("3", "<NAME>", 25, :USD),
Account.new("4", "<NAME>", 100, :USD),
Account.new("5", "<NAME>", 25, :USD),
Account.new("6", "<NAME>", 50, :JPY),
Account.new("7", "<NAME>", 25, :JPY)
]
}
"""
def list do
case AccountRepository.all() do
{:ok, accounts} -> {:ok, accounts}
{:error, error} -> {:error, error.message}
end
end
@doc """
Splits `Money` into accounts given an amount and ratios
## Examples:
```
iex> SistemaFinanceiro.AccountService.split_money(["1", "5"], "0.05", [3,7])
{:error, "Monies with different currencies. Got USD and BRL"}
iex> SistemaFinanceiro.AccountService.split_money(["1", "2"], "0.05", [3,7])
{:ok,
[
%Account{
balance: %Money{amount: 5002, currency: :BRL},
code: "1",
owner: "<NAME>"
},
%Account{
balance: %Money{amount: 5003, currency: :BRL},
code: "2",
owner: "<NAME>"
}
]
}
"""
def split_money(accounts_code, amount, ratios) do
{:ok, find_accounts_by_code(accounts_code) |> do_split!(amount, ratios)}
rescue
e -> {:error, e.message}
end
def exchange_money(accounts_code, to_currency_code) do
{:ok, find_accounts_by_code(accounts_code) |> do_exchange!(to_currency_code)}
rescue
e -> {:error, e.message}
end
defp find_accounts_by_code(accounts_code) do
Enum.map(accounts_code, fn code ->
case AccountRepository.find(code) do
{:ok, nil} ->
raise ArgumentError,
message: "Account code #{code} not found"
{:ok, account} ->
account
{:error, reason} ->
raise ArgumentError, message: reason
end
end)
end
defp do_split!(accounts, amount, ratios) do
money = Money.parse(amount)
monies = Money.divide(money, ratios)
allocate_money_to_accounts(accounts, monies)
end
defp allocate_money_to_accounts([head_acc | tail_acc], [head_m | tail_m]) do
new_balance = Money.add(head_acc.balance, head_m)
if tail_acc !== [] do
[
Account.update(head_acc, %{balance: new_balance})
| allocate_money_to_accounts(tail_acc, tail_m)
]
else
[Account.update(head_acc, %{balance: new_balance})]
end
end
defp do_exchange!(accounts, to_convert_currency_code) do
Enum.map(accounts, fn acc ->
m = acc.balance
from_currency = Currency.find!(m.currency)
to_currency = Currency.find!(to_convert_currency_code)
result =
same_currency(to_currency.alpha_code, from_currency.alpha_code) ||
ExchangeRateRepository.find(%{
to: to_currency.alpha_code,
from: from_currency.alpha_code
})
case result do
{:ok, nil} ->
raise ArgumentError,
message:
"Exchange Rate from #{from_currency.alpha_code} to #{to_currency.alpha_code} not found"
{:ok, exchange_rate} ->
Account.update(acc, %{
balance: Money.multiply(m, exchange_rate.rate, to_currency.alpha_code)
})
{:error, reason} ->
raise ArgumentError, message: reason
end
end)
end
defp same_currency(to, from) do
cond do
to === from -> {:ok, %{rate: 1}}
true -> false
end
end
end
|
lib/service/account_service.ex
| 0.889463 | 0.709824 |
account_service.ex
|
starcoder
|
defmodule ChexDigits.Helper do
@moduledoc """
This module contains all helper functions to specify the algorithms for checking digit calculation.
The main function is `checksum`, which adds a lot of flexibility to the generic algorithm:
1 - add all digits, each multiplied by its respective weight
2 - calculate a remainder, which may or may not be subtracted from the module itself (e.g. 11 - mod(x, 11))
"""
@doc """
`checksum(digits, module, weights, replacements, weighted_sum_term_function)`
`digits`: a List of digits for which the checksum will be calculated
`module`: the module to calculate the final remainder.
If `nil`, the number will be returned untouched
If negative, the remainder will be: `abs(module) - rem(number, abs(module))`
`weights`:
An optional argument to perform a weighted sum on the digits
`replacements`:
An optional argument to specify a translation table (e.g. if the final result is 5, replace with "X").
There should be a "before" and an "after" fields, each of which
specify when the replacement will be applied -- before or after the `module - rem(digit, module)` calculation
The `Helper.replacements/2` function build this map from two separate maps, for ease of use
Example:
%{
"before" => %{0 => "X"},
"after" => %{4 => "Y"}
}
`weighted_sum_term_function`:
An optional argument that specifies if each step of the weighted sum will suffer an operation. The operation will be the given function.
"""
alias ChexDigits.Rule
@spec checksum(%Rule{}) :: integer | String.t()
def checksum(%Rule{
digits: digits,
input_alphabet: input_alphabet,
output_alphabet: output_alphabet,
module: module,
module_type: module_type,
weights: weights,
weight_alignment: weight_alignment,
per_term_function: per_term_function
}) do
digits
|> map_onto(input_alphabet)
|> Enum.map(&to_integer/1)
|> dot(weights, per_term_function, weight_alignment)
|> mod(module, module_type)
|> map_onto(output_alphabet)
|> Enum.at(0)
|> Integer.to_string()
end
@doc """
This function performs the dot-product `u . v`.
It aligns `v` over `u` depending on `alignment`.
`v` is always zero-padded to have the same length as `u`.\n
For example:\n
`u = [1, 2, 3, 4]`\n
`v = [1, 2]`\n
`alignment = :right`\n
is the same as:\n
`u = [1, 2, 3, 4]`\n
`v = [0, 0, 1, 2]`\n
`alignment = :left`\n
For each multiplication term, the function `fun/1` is applied afterwards.
"""
# DOT
def dot(u, v, fun, alignment) do
alignment
|> case do
:left ->
Enum.zip(u, v)
:right ->
u
|> Enum.reverse()
|> Enum.zip(Enum.reverse(v))
end
|> Enum.reduce(0, fn {x, y}, acc ->
fun.(x * y) + acc
end)
end
# MAP ONTO
@doc """
Performs the replacements specified by `alphabet` on `digits`
If a given `digit` is not in the `alphabet`, it is returned unchanged.
"""
@spec map_onto(list, Map.t()) :: list
def map_onto(digits, alphabet) do
digits
|> List.wrap()
|> Enum.map(fn digit ->
if Map.has_key?(alphabet, digit) do
Map.get(alphabet, digit)
else
digit
end
end)
end
# MOD
@doc """
Perfoms the module/remainder calculation as such:
If `module_type` == `:standard`:
`rem(value, module)`
If `module_type` == `:module_minus`:
`module - rem(value, module)`
If `module` == `nil`, then the value is returned untouched
"""
@spec mod(integer, integer | nil, atom) :: integer
def mod(value, nil, _), do: value
def mod(value, module, :standard) do
rem(value, module)
end
def mod(value, module, :module_minus) do
module - rem(value, module)
end
# PAD
@doc """
Zero-padding to comply to length
The third argument defines if the padding occurs from the `:left` or from the `:right`.
When `len <= length(digits)`, `digits` is returned untouched
"""
@spec pad(list, non_neg_integer, atom) :: list
def pad(digits, len, direction) do
if len <= length(digits) do
digits
else
padding_length = len - length(digits)
padding =
[0]
|> Stream.cycle()
|> Enum.take(padding_length)
case direction do
:right ->
digits ++ padding
:left ->
padding ++ digits
end
end
end
# TO INTEGER
def to_integer(x) when is_integer(x), do: x
def to_integer(x) when is_binary(x), do: String.to_integer(x)
# TO LIST
def to_list(l) when is_binary(l) do
l
|> String.codepoints()
end
def to_list(l) when is_list(l), do: l
def to_list(value), do: {:error, {:invalid_value, value}}
end
|
lib/helper.ex
| 0.926918 | 0.894375 |
helper.ex
|
starcoder
|
defmodule Journey.Process do
@moduledoc """
"""
@derive Jason.Encoder
@enforce_keys [:process_id, :steps]
defstruct [:process_id, steps: []]
@typedoc ~S"""
Holds the definition of a process.
"""
@type t :: %Journey.Process{process_id: String.t(), steps: list(Journey.Step.t())}
defp random_string(length) do
:crypto.strong_rand_bytes(length)
|> Base.encode32(case: :lower, padding: false)
|> binary_part(0, length)
|> String.replace(["+", "/"], "m")
end
@doc ~S"""
Starts a new execution of the process.
## Example
iex> process = %Journey.Process{
...> process_id: "horoscopes-r-us",
...> steps: [
...> %Journey.Step{name: :first_name},
...> %Journey.Step{name: :birth_month},
...> %Journey.Step{name: :birth_day},
...> %Journey.Step{
...> name: :astrological_sign,
...> func: fn _values ->
...> # Everyone is a Taurus!
...> {:ok, "taurus"}
...> end,
...> blocked_by: [
...> %Journey.BlockedBy{step_name: :birth_month, condition: :provided},
...> %Journey.BlockedBy{step_name: :birth_day, condition: :provided}
...> ]
...> },
...> %Journey.Step{
...> name: :horoscope,
...> func: fn values ->
...> name = values[:first_name].value
...> sign = values[:astrological_sign].value
...> {
...> :ok,
...> "#{name}! You are a #{sign}! Now is the perfect time to smash the racist patriarchy!"
...> }
...> end,
...> blocked_by: [
...> %Journey.BlockedBy{step_name: :first_name, condition: :provided},
...> %Journey.BlockedBy{step_name: :astrological_sign, condition: :provided}
...> ]
...> }
...> ]
...> }
iex>
iex> # Start a new execution of the process.
iex> execution = Journey.Process.execute(process)
iex>
iex>
iex> {:ok, execution} = Journey.Execution.update_value(execution, :first_name, "Luigi")
iex> {:not_computed, _} = Journey.Execution.read_value(execution, :astrological_sign)
iex> {:ok, execution} = Journey.Execution.update_value(execution, :birth_month, 4)
iex> {:ok, execution} = Journey.Execution.update_value(execution, :birth_day, 29)
iex> :timer.sleep(100) # Give :astrological_sign's function a bit of time to run.
iex> {:computed, "taurus"} = execution.execution_id |> Journey.Execution.load!() |> Journey.Execution.read_value(:astrological_sign)
iex> :timer.sleep(200) # Give :horoscope's function a bit of time to run.
iex> {:computed, horoscope} = execution.execution_id |> Journey.Execution.load!() |> Journey.Execution.read_value(:horoscope)
iex> horoscope
"Luigi! You are a taurus! Now is the perfect time to smash the racist patriarchy!"
"""
@spec execute(Journey.Process.t()) :: Journey.Execution.t()
def execute(process) do
process = process |> register_process()
execution =
%Journey.Execution{
execution_id: random_string(10),
process_id: process.process_id,
values: blank_values(process)
}
|> Journey.ExecutionStore.Postgres.put()
{:ok, execution} = Journey.Execution.update_value(execution.execution_id, :started_at, System.os_time(:second))
execution
end
def register_process(process) do
process = %{process | steps: [%Journey.Step{name: :started_at}] ++ process.steps}
Journey.ProcessCatalog.register(process)
# Create all atoms involved in step names.
_ =
process.steps
|> Enum.map(fn step ->
_atom = if is_atom(step.name), do: step.name, else: String.to_atom(step.name)
end)
process
end
defp blank_values(process) do
process
|> Map.get(:steps, [])
|> Enum.reduce(
%{},
fn step, acc ->
acc
|> Map.put_new(step.name, %Journey.Value{name: step.name})
end
)
end
end
|
lib/process.ex
| 0.695752 | 0.566978 |
process.ex
|
starcoder
|
defmodule MarcoPolo.Protocol.Types do
@moduledoc false
alias MarcoPolo.Document
alias MarcoPolo.BinaryRecord
alias MarcoPolo.Protocol.RecordSerialization
import MarcoPolo.Protocol.BinaryHelpers
@type encodable_term ::
boolean
| nil
| binary
| integer
| iolist
| {:short, integer}
| {:int, integer}
| {:long, integer}
| {:raw, binary}
| Document.t
| BinaryRecord.t
@doc """
Encodes a given term according to the binary protocol.
The type of `term` is usually inferred by its value but in some cases it can
be specified by using a tagged tuple. For example, to force encodng of an
integer as an OrientDB short, you can pass `{:short, n}`.
"""
# Made public for testing.
@spec encode(encodable_term) :: iodata
def encode(term)
# Booleans.
def encode(true), do: <<1>>
def encode(false), do: <<0>>
# nil.
def encode(nil), do: encode({:int, -1})
# Strings and bytes.
def encode(str) when is_binary(str), do: encode({:int, byte_size(str)}) <> str
# Encoding an Elixir integer defaults to encoding an OrientDB int (4 bytes).
def encode(i) when is_integer(i), do: encode({:int, i})
# Typed integers (short, int and long) have to be tagged.
def encode({:short, i}), do: <<i :: short>>
def encode({:int, i}), do: <<i :: int>>
def encode({:long, i}), do: <<i :: long>>
# A list is assumed to be iodata and is converted to binary before being serialized.
def encode(data) when is_list(data), do: [encode(IO.iodata_length(data)), data]
# Raw bytes (that have no leading length, just the bytes).
def encode({:raw, bytes}) when is_binary(bytes) or is_list(bytes), do: bytes
# An entire document.
def encode(%Document{} = record), do: encode(RecordSerialization.encode(record))
# A binary record (BLOB).
def encode(%BinaryRecord{contents: bytes}), do: encode(bytes)
@doc """
Encdes a list of terms.
"""
@spec encode_list([MarcoPolo.Protocol.Types.encodable_term]) :: iodata
def encode_list(list) when is_list(list) do
Enum.map(list, &encode/1)
end
@doc """
Decodes an instance of `type` from `data`.
Returns a `{value, rest}` tuple or the `:incomplete` atom if `data` doesn't
contain a full instance of `type`.
"""
@spec decode(binary, atom) :: {term, binary} | :incomplete
def decode(data, type)
def decode(<<-1 :: int, rest :: binary>>, type) when type in [:string, :bytes] do
{nil, rest}
end
def decode(<<length :: int, data :: binary>>, type) when type in [:string, :bytes] do
case data do
<<parsed :: bytes-size(length), rest :: binary>> -> {parsed, rest}
_ -> :incomplete
end
end
def decode(<<byte, rest :: binary>>, :byte), do: {byte, rest}
def decode(<<i :: short, rest :: binary>>, :short), do: {i, rest}
def decode(<<i :: int, rest :: binary>>, :int), do: {i, rest}
def decode(<<i :: long, rest :: binary>>, :long), do: {i, rest}
def decode(_data, _type) do
:incomplete
end
end
|
lib/marco_polo/protocol/types.ex
| 0.904349 | 0.50415 |
types.ex
|
starcoder
|
defmodule Component do
@moduledoc """
The Component behaviour specification.
This is heavily inspired by plug. I wanted the same type of functionality
without being locked into the Plug.Conn struct.
#### Function components
A function component is any function that receives a conn and a set of
options and returns a conn. Its type signature must be:
(Component.conn, Component.opts) :: Component.conn
#### Module components
Module components function a little bit differently then Module plugs. They
have two functions, `call/2` and `respond/2`. `call/2` functions are designed
to be executed in the order they are defined in a pipeline. `respond/2`
functions are executed in reverse order after all `call/2` functions have been
executed. `respond/2` can be thought of as similar to the
[Plug.Conn#register_before_send/2](https://hexdocs.pm/plug/Plug.Conn.html#register_before_send/2)
function.
A module component must export:
- a `call/2` function with the signature above
- an `init/1` function which takes a set of options and initializes it
- a `respond/2` function with the signature above
Conn should always be of the same type going out as it is coming in. While it
is possible to not follow this pattern, failure to do so will likely make this
all super confusing.
"""
@doc false
defmacro __using__(_opts) do
quote do
@behaviour Component
def call(conn, opts \\ [])
def call(conn, opts) when is_list(opts), do: conn
def call(_conn, _opts) do
raise ArgumentError, message: "opts must be a list"
end
def init(opts \\ [])
def init(opts) when is_list(opts), do: opts
def init(_opts) do
raise ArgumentError, message: "opts must be a list"
end
def respond(conn, opts \\ [])
def respond(conn, opts) when is_list(opts), do: conn
def respond(_conn, _opts) do
raise ArgumentError, message: "opts must be a list"
end
defoverridable [call: 2, init: 1, respond: 2]
end
end
@type conn :: binary | tuple | list | map | struct
@type opts :: [{atom, any}]
@doc """
Called at the top of the stack.
"""
@callback call(conn, opts) :: conn | {:halt, conn}
@doc """
Called at the bottom of the stack.
"""
@callback respond(conn, opts) :: conn
end
|
lib/component.ex
| 0.77518 | 0.582194 |
component.ex
|
starcoder
|
defmodule Kojin.Pod.PodPackageSet do
@moduledoc """
Models a collection of related packages which may refer to
types with `dot notation` paths.
"""
use TypedStruct
alias Kojin.Pod.{PodPackage, PodPackageSet, PodTypeRef, PodArray, PodType, PodObject, PodEnum}
@typedoc """
Models a collection of related packages which may refer to
types with `dot notation` paths.
"""
typedstruct enforce: true do
field(:id, atom)
field(:doc, binary)
field(:packages, list(PodPackage.t()))
field(:enums, list(PodEnum.t()))
field(:enums_map, %{atom => list(PodEnum.t())})
field(:objects, list(PodObject.t()))
field(:objects_map, %{atom => list(PodObject.t())})
field(:predefined_types, list(atom), default: [])
end
@doc """
Creates a collection of related packages which may refer to
types with `dot notation` paths.
"""
def pod_package_set(id, doc, packages, opts \\ []) when is_list(packages) do
defaults = [predefined_types: []]
opts = Kojin.check_args(defaults, opts)
enums =
packages
|> Enum.map(fn package -> Enum.map(package.pod_enums, fn e -> {package.id, e} end) end)
|> List.flatten()
objects =
packages
|> Enum.map(fn package -> Enum.map(package.pod_objects, fn o -> {package.id, o} end) end)
|> List.flatten()
%PodPackageSet{
id: id,
doc: doc,
packages: packages,
enums: enums,
enums_map: Enum.group_by(enums, fn {_pkg, e} -> e.id end),
objects: objects,
objects_map: Enum.group_by(objects, fn {_pkg, o} -> o.id end),
predefined_types: opts[:predefined_types]
}
end
def find_item_id(%PodPackageSet{} = pod_package_set, id) when is_atom(id) do
Enum.find(pod_package_set.enums_map, fn {e_id, _list} -> e_id == id end) ||
Enum.find(pod_package_set.objects_map, fn {o_id, _list} -> o_id == id end)
end
def find_pod_package(%PodPackageSet{} = pod_package_set, package_id) when is_atom(package_id) do
pod_package_set.packages
|> Enum.find(fn package -> package.id == package_id end)
end
def find_object(%PodPackageSet{} = pod_package_set, %PodTypeRef{} = pod_type_ref) do
pod_package_set.packages
|> Enum.find_value(fn package -> PodPackage.find_object(package, pod_type_ref) end)
end
def find_enum(%PodPackageSet{} = pod_package_set, %PodTypeRef{} = pod_type_ref) do
pod_package_set.packages
|> Enum.find_value(fn package -> PodPackage.find_enum(package, pod_type_ref) end)
end
def all_types(%PodPackageSet{} = pod_package_set) do
pod_package_set.packages
|> Enum.reduce(MapSet.new(), fn pod_package, acc ->
MapSet.union(acc, PodPackage.all_types(pod_package))
end)
end
def all_pod_types(%PodPackageSet{} = pod_package_set) do
for {_package, %PodType{}} = elm <- all_types(pod_package_set), do: elm
end
def all_ref_types(%PodPackageSet{} = pod_package_set) do
for {_package, %PodTypeRef{}} = elm <- all_types(pod_package_set), do: elm
end
def all_array_types(%PodPackageSet{} = pod_package_set) do
for {_package, %PodArray{}} = elm <- all_types(pod_package_set), do: elm
end
def info(%PodPackageSet{} = pod_package_set) do
IO.puts(inspect(pod_package_set, pretty: true))
end
end
|
lib/kojin/pod/pod_package_set.ex
| 0.759225 | 0.431225 |
pod_package_set.ex
|
starcoder
|
defmodule NewRelic do
@moduledoc """
New Relic Agent - Public API
"""
@doc """
Set the name of the current transaction.
The first segment will be treated as the Transaction namespace,
and commonly contains the name of the framework.
In the following example, you will see `/custom/transaction/name`
in the Transaction list.
```elixir
NewRelic.set_transaction_name("Plug/custom/transaction/name")
```
"""
defdelegate set_transaction_name(name), to: NewRelic.Transaction.Reporter
@doc """
Report a custom attribute on the current transaction
```elixir
NewRelic.add_attributes(foo: "bar")
```
"""
defdelegate add_attributes(custom_attributes), to: NewRelic.Transaction.Reporter
@doc false
defdelegate incr_attributes(attrs), to: NewRelic.Transaction.Reporter
@doc """
Store information about the type of work the current span is doing.
Options:
- `:generic, custom: attributes`
- `:http, url: url, method: method, component: component`
- `:datastore, statement: statement, instance: instance, address: address, hostname: hostname, component: component`
"""
defdelegate set_span(type, attributes), to: NewRelic.DistributedTrace
@doc """
You must manually instrument outgoing HTTP calls to connect them to a Distributed Trace.
The agent will automatically read request headers and detect if the request is a part
of a Distributed Trace, but outgoing requests need an extra header:
```elixir
dt_header_payload = NewRelic.create_distributed_trace_payload(:http)
HTTPoison.get(url, ["x-api-key": "secret"] ++ dt_header_payload)
```
**Notes:**
* Call `NewRelic.create_distributed_trace_payload` immediately before making the
request since calling the function marks the "start" time of the request.
"""
defdelegate create_distributed_trace_payload(type), to: NewRelic.DistributedTrace
@doc """
To get detailed information about a particular process, you can install a Process sampler.
You must tell the Agent about your process from within the process.
For a `GenServer`, this function call should be made in the `init` function:
```elixir
defmodule ImportantProcess do
use GenServer
def init(:ok) do
NewRelic.sample_process
{:ok, %{}}
end
end
```
Once installed, the agent will report `ElixirSample` events with:
* `category = "Process"`
* `message_queue_length`
* `reductions`
* `memory_kb`
"""
defdelegate sample_process, to: NewRelic.Sampler.Process
@doc """
Report a Custom event to NRDB.
```elixir
NewRelic.report_custom_event("EventType", %{"foo" => "bar"})
```
"""
defdelegate report_custom_event(type, attributes),
to: NewRelic.Harvest.Collector.CustomEvent.Harvester
@doc false
defdelegate report_aggregate(meta, values), to: NewRelic.Aggregate.Reporter
@doc false
defdelegate report_sample(category, values), to: NewRelic.Sampler.Reporter
@doc false
defdelegate report_span(span), to: NewRelic.Harvest.Collector.SpanEvent.Harvester
@doc false
defdelegate report_metric(identifier, values), to: NewRelic.Harvest.Collector.Metric.Harvester
@doc false
defdelegate log(level, message), to: NewRelic.Logger
@doc """
Will gracefully complete and shut down the agent harvest cycle.
To ensure a harvest at shutdown, you can add a hook to your application:
```elixir
System.at_exit(fn(_) ->
NewRelic.manual_shutdown()
end)
```
"""
defdelegate manual_shutdown(), to: NewRelic.Harvest.Supervisor
end
|
lib/new_relic.ex
| 0.884551 | 0.84228 |
new_relic.ex
|
starcoder
|
defmodule ExHealth.HealthServer do
@moduledoc """
The HealthServer is a GenServer that is responsible for running all health
checks and determining system health.
All other integrations must communicate to the HealthServer to get
information about the latest checks.
"""
use GenServer
@doc """
Start the HealthServer for a given state of type `ExHealth.Status.t`.
"""
def start_link(state) do
GenServer.start_link(__MODULE__, state, name: __MODULE__)
end
@impl true
def init(state) do
state
|> Map.get(:interval_ms)
|> schedule_update()
{:ok, state}
end
@impl true
def handle_call(:status, _from, %ExHealth.Status{} = state) do
{:reply, state, state}
end
@impl true
def handle_info(:perform_check, %ExHealth.Status{} = state) do
new_state =
state
|> get_status()
# Schedule next call
new_state
|> Map.get(:interval_ms)
|> schedule_update()
{:noreply, new_state}
end
@spec build_result(list()) :: %{msg: atom(), check_results: list()}
defp build_result(check_results) do
status = determine_status(check_results)
%{
msg: status,
check_results: check_results
}
end
defp determine_status([]), do: :healthy
@spec determine_status(list()) :: atom()
defp determine_status([res | remainder]) do
case res do
{_name, true} -> determine_status(remainder)
{_name, :ok} -> determine_status(remainder)
{_name, _} -> :unhealthy
end
end
@spec get_status(ExHealth.Status.t()) :: ExHealth.Status.t()
defp get_status(%ExHealth.Status{checks: checks} = status) do
check_results = perform_checks(checks)
new_result = build_result(check_results)
status
|> Map.merge(%{
last_check: DateTime.utc_now(),
result: new_result
})
end
@spec schedule_update(non_neg_integer) :: reference()
defp schedule_update(interval_ms) do
Process.send_after(self(), :perform_check, interval_ms)
end
defp perform_checks(checks, results \\ [])
defp perform_checks([], results) do
results
end
@spec perform_checks(list(), list()) :: list()
defp perform_checks(
[%ExHealth.Check{name: name, mfa: {m, f, a}} = _check | remainder],
results
) do
res = {name, apply(m, f, a)}
perform_checks(remainder, results ++ [res])
end
end
|
lib/ex_health/health_server.ex
| 0.852399 | 0.443299 |
health_server.ex
|
starcoder
|
defmodule StellarBase.XDR.OfferEntry do
@moduledoc """
Representation of Stellar `OfferEntry` type.
An offer is the building block of the offer book, they are automatically
claimed by payments when the price set by the owner is met.
For example an Offer is selling 10A where 1A is priced at 1.5B
"""
alias StellarBase.XDR.{AccountID, Asset, Int64, Price}
alias StellarBase.XDR.Ext
@behaviour XDR.Declaration
@struct_spec XDR.Struct.new(
seller_id: AccountID,
offer_id: Int64,
selling: Asset,
buying: Asset,
amount: Int64,
price: Price,
ext: Ext
)
@type t :: %__MODULE__{
seller_id: AccountID.t(),
offer_id: Int64.t(),
selling: Asset.t(),
buying: Asset.t(),
amount: Int64.t(),
price: Price.t(),
ext: Ext.t()
}
defstruct [:seller_id, :offer_id, :selling, :buying, :amount, :price, :ext]
@spec new(
seller_id :: AccountID.t(),
offer_id :: Int64.t(),
selling :: Asset.t(),
buying :: Asset.t(),
amount :: Int64.t(),
price :: Price.t(),
ext :: Ext.t()
) ::
t()
def new(
%AccountID{} = seller_id,
%Int64{} = offer_id,
%Asset{} = selling,
%Asset{} = buying,
%Int64{} = amount,
%Price{} = price,
%Ext{} = ext
),
do: %__MODULE__{
seller_id: seller_id,
offer_id: offer_id,
selling: selling,
buying: buying,
amount: amount,
price: price,
ext: ext
}
@impl true
def encode_xdr(%__MODULE__{
seller_id: seller_id,
offer_id: offer_id,
selling: selling,
buying: buying,
amount: amount,
price: price,
ext: ext
}) do
[
seller_id: seller_id,
offer_id: offer_id,
selling: selling,
buying: buying,
amount: amount,
price: price,
ext: ext
]
|> XDR.Struct.new()
|> XDR.Struct.encode_xdr()
end
@impl true
def encode_xdr!(%__MODULE__{
seller_id: seller_id,
offer_id: offer_id,
selling: selling,
buying: buying,
amount: amount,
price: price,
ext: ext
}) do
[
seller_id: seller_id,
offer_id: offer_id,
selling: selling,
buying: buying,
amount: amount,
price: price,
ext: ext
]
|> XDR.Struct.new()
|> XDR.Struct.encode_xdr!()
end
@impl true
def decode_xdr(bytes, struct \\ @struct_spec)
def decode_xdr(bytes, struct) do
case XDR.Struct.decode_xdr(bytes, struct) do
{:ok,
{%XDR.Struct{
components: [
seller_id: seller_id,
offer_id: offer_id,
selling: selling,
buying: buying,
amount: amount,
price: price,
ext: ext
]
}, rest}} ->
{:ok, {new(seller_id, offer_id, selling, buying, amount, price, ext), rest}}
error ->
error
end
end
@impl true
def decode_xdr!(bytes, struct \\ @struct_spec)
def decode_xdr!(bytes, struct) do
{%XDR.Struct{
components: [
seller_id: seller_id,
offer_id: offer_id,
selling: selling,
buying: buying,
amount: amount,
price: price,
ext: ext
]
}, rest} = XDR.Struct.decode_xdr!(bytes, struct)
{new(seller_id, offer_id, selling, buying, amount, price, ext), rest}
end
end
|
lib/xdr/ledger_entries/offer_entry.ex
| 0.859516 | 0.430506 |
offer_entry.ex
|
starcoder
|
defmodule DiodeClient.Transaction do
# alias DiodeClient.TransactionReceipt
alias DiodeClient.{Base16, Hash, Rlp, Rlpx, Secp256k1, Transaction, Wallet}
@enforce_keys [:chain_id]
defstruct nonce: 1,
gasPrice: 0,
gasLimit: 0,
to: nil,
value: 0,
chain_id: nil,
signature: nil,
init: nil,
data: nil
@type t :: %Transaction{}
def nonce(%Transaction{nonce: nonce}), do: nonce
def data(%Transaction{data: nil}), do: ""
def data(%Transaction{data: data}), do: data
def gas_price(%Transaction{gasPrice: gas_price}), do: gas_price
def gas_limit(%Transaction{gasLimit: gas_limit}), do: gas_limit
def value(%Transaction{value: val}), do: val
def signature(%Transaction{signature: sig}), do: sig
def payload(%Transaction{to: nil, init: nil}), do: ""
def payload(%Transaction{to: nil, init: init}), do: init
def payload(%Transaction{data: nil}), do: ""
def payload(%Transaction{data: data}), do: data
def to(%Transaction{to: nil} = tx), do: new_contract_address(tx)
def to(%Transaction{to: to}), do: to
def chain_id(%Transaction{chain_id: chain_id}), do: chain_id
@spec from_rlp(binary()) :: Transaction.t()
def from_rlp(bin) do
[nonce, gas_price, gas_limit, to, value, init, rec, r, s] = Rlp.decode!(bin)
to = Rlpx.bin2addr(to)
%Transaction{
nonce: Rlpx.bin2uint(nonce),
gasPrice: Rlpx.bin2uint(gas_price),
gasLimit: Rlpx.bin2uint(gas_limit),
to: to,
value: Rlpx.bin2uint(value),
init: if(to == nil, do: init, else: nil),
data: if(to != nil, do: init, else: nil),
signature: Secp256k1.rlp_to_bitcoin(rec, r, s),
chain_id: Secp256k1.chain_id(rec)
}
end
@spec print(DiodeClient.Transaction.t()) :: :ok
def print(tx) do
hash = Base16.encode(hash(tx))
from = Base16.encode(from(tx))
to = Base16.encode(to(tx))
type = Atom.to_string(type(tx))
value = value(tx)
code = Base16.encode(payload(tx))
code =
if byte_size(code) > 40 do
binary_part(code, 0, 37) <> "... [#{byte_size(code)}]"
end
IO.puts("")
IO.puts("\tTransaction: #{hash} Type: #{type}")
IO.puts("\tFrom: #{from} To: #{to}")
IO.puts("\tValue: #{value} Code: #{code}")
# rlp = to_rlp(tx) |> Rlp.encode!()
# IO.puts("\tRLP: #{Base16.encode(rlp)}")
:ok
end
@spec valid?(DiodeClient.Transaction.t()) :: boolean()
def valid?(tx) do
validate(tx) == true
end
@spec type(DiodeClient.Transaction.t()) :: :call | :create
def type(tx) do
if contract_creation?(tx) do
:create
else
:call
end
end
@spec validate(DiodeClient.Transaction.t()) :: true | {non_neg_integer(), any()}
def validate(tx) do
with {1, %Transaction{}} <- {1, tx},
{2, 65} <- {2, byte_size(signature(tx))},
{4, true} <- {4, value(tx) >= 0},
{5, true} <- {5, gas_price(tx) >= 0},
{6, true} <- {6, gas_limit(tx) >= 0},
{7, true} <- {7, byte_size(payload(tx)) >= 0} do
true
else
{nr, error} -> {nr, error}
end
end
@spec contract_creation?(DiodeClient.Transaction.t()) :: boolean()
def contract_creation?(%Transaction{to: to}) do
to == nil
end
@spec new_contract_address(DiodeClient.Transaction.t()) :: binary()
def new_contract_address(%Transaction{to: to}) when to != nil do
nil
end
def new_contract_address(%Transaction{nonce: nonce} = tx) do
address = Wallet.address!(origin(tx))
Rlp.encode!([address, nonce])
|> Hash.keccak_256()
|> Hash.to_address()
end
@spec to_rlp(DiodeClient.Transaction.t()) :: [...]
def to_rlp(tx) do
[tx.nonce, gas_price(tx), gas_limit(tx), tx.to, tx.value, payload(tx)] ++
Secp256k1.bitcoin_to_rlp(tx.signature, tx.chain_id)
end
@spec from(DiodeClient.Transaction.t()) :: <<_::160>>
def from(tx) do
Wallet.address!(origin(tx))
end
@spec recover(DiodeClient.Transaction.t()) :: binary()
def recover(tx) do
Secp256k1.recover!(signature(tx), to_message(tx), :kec)
end
@spec origin(DiodeClient.Transaction.t()) :: Wallet.t()
def origin(%Transaction{signature: {:fake, pubkey}}) do
Wallet.from_address(pubkey)
end
def origin(tx) do
recover(tx) |> Wallet.from_pubkey()
end
@spec sign(DiodeClient.Transaction.t(), <<_::256>>) :: DiodeClient.Transaction.t()
def sign(tx = %Transaction{}, priv) do
%{tx | signature: Secp256k1.sign(priv, to_message(tx), :kec)}
end
def hash(tx = %Transaction{signature: {:fake, _pubkey}}) do
to_message(tx) |> Hash.sha3_256()
end
@spec hash(Transaction.t()) :: binary()
def hash(tx) do
to_rlp(tx) |> Rlp.encode!() |> Hash.sha3_256()
end
@spec to_message(DiodeClient.Transaction.t()) :: binary()
def to_message(tx = %Transaction{chain_id: nil}) do
# pre EIP-155 encoding
[tx.nonce, gas_price(tx), gas_limit(tx), tx.to, tx.value, payload(tx)]
|> Rlp.encode!()
end
def to_message(tx = %Transaction{chain_id: 0}) do
# pre EIP-155 encoding
[tx.nonce, gas_price(tx), gas_limit(tx), tx.to, tx.value, payload(tx)]
|> Rlp.encode!()
end
def to_message(tx = %Transaction{chain_id: chain_id}) do
# EIP-155 encoding
[tx.nonce, gas_price(tx), gas_limit(tx), tx.to, tx.value, payload(tx), chain_id, 0, 0]
|> Rlp.encode!()
end
end
|
lib/diode_client/transaction.ex
| 0.718199 | 0.476275 |
transaction.ex
|
starcoder
|
defmodule Cocktail.RuleState do
@moduledoc false
alias Cocktail.{Rule, Validation, Validation.Shift}
@type t :: %__MODULE__{
count: pos_integer | nil,
until: Cocktail.time() | nil,
validations: [Validation.t(), ...],
current_time: Cocktail.time() | nil
}
@enforce_keys [:validations]
defstruct count: nil,
until: nil,
validations: [],
current_time: nil
@validation_order [
:base_sec,
:second_of_minute,
:base_min,
:minute_of_hour,
:base_hour,
:hour_of_day,
:time_of_day,
:time_range,
:base_wday,
:day,
:interval
]
@spec new(Rule.t()) :: t
def new(%Rule{} = rule) do
%__MODULE__{
count: rule.count,
until: rule.until,
validations: rule.validations |> sort_validations()
}
end
@spec sort_validations(Validation.validations_map()) :: [Validation.t(), ...]
defp sort_validations(validations_map) do
for key <- @validation_order, validation = validations_map[key], !is_nil(validation) do
validation
end
end
@spec next_time(t, Cocktail.time(), Cocktail.time()) :: t
def next_time(%__MODULE__{validations: validations} = rule_state, current_time, start_time) do
time = do_next_time(validations, current_time, start_time)
new_state(rule_state, time)
end
@spec do_next_time([Validation.t()], Cocktail.time(), Cocktail.time()) :: Cocktail.time()
defp do_next_time(validations, time, start_time) do
case Enum.reduce(validations, {:no_change, time}, &next_time_for_validation(&1, &2, start_time)) do
{:no_change, new_time} ->
new_time
{:change, new_time} ->
do_next_time(validations, new_time, start_time)
end
end
@spec next_time_for_validation(Validation.t(), Shift.result(), Cocktail.time()) :: Shift.result()
defp next_time_for_validation(%mod{} = validation, {change, time}, start_time) do
validation
|> mod.next_time(time, start_time)
|> mark_change(change)
end
@spec new_state(t, Cocktail.time()) :: t
defp new_state(%__MODULE__{until: nil} = rule_state, time), do: %{rule_state | current_time: time}
defp new_state(%__MODULE__{until: until} = rule_state, time) do
if Timex.compare(until, time) == -1 do
%{rule_state | current_time: nil}
else
%{rule_state | current_time: time}
end
end
@spec mark_change(Shift.result(), Shift.change_type()) :: Shift.result()
defp mark_change({:no_change, time}, :no_change), do: {:no_change, time}
defp mark_change({:no_change, time}, :change), do: {:change, time}
defp mark_change({:change, time}, :no_change), do: {:change, time}
defp mark_change({:change, time}, :change), do: {:change, time}
end
|
lib/cocktail/rule_state.ex
| 0.790369 | 0.475971 |
rule_state.ex
|
starcoder
|
defmodule SoftRepo do
@moduledoc """
Contains get/delete functions to do a soft delete
"""
import Ecto.Changeset, only: [change: 2]
import Ecto.Queryable, only: [to_query: 1]
require Ecto.Query
@repo SoftRepo.Client.repo()
def all(queryable, opts \\ [])
def all(queryable, opts = [with_thrash: true]) do
opts = Keyword.drop(opts, [:with_thrash])
@repo.all(queryable, opts)
end
def all(queryable, opts) do
exclude = Keyword.get(opts, :with_thrash, false)
opts = Keyword.drop(opts, [:with_thrash])
queryable
|> exclude_thrash(!exclude)
|> @repo.all(opts)
end
def get(queryable, id, opts \\ [])
def get(queryable, id, opts = [with_thrash: true]) do
opts = Keyword.drop(opts, [:with_thrash])
@repo.get(queryable, id, opts)
end
def get(queryable, id, opts) do
exclude = Keyword.get(opts, :with_thrash, false)
opts = Keyword.drop(opts, [:with_thrash])
queryable
|> exclude_thrash(!exclude)
|> @repo.get(id, opts)
end
def delete(struct, opts \\ [])
def delete(struct, opts = [force: true]) do
opts = Keyword.drop(opts, [:force])
@repo.delete(struct, opts)
end
def delete(struct, _opts) do
changeset = change(struct, deleted_at: DateTime.utc_now())
@repo.update(changeset)
end
def delete_all(queryable, opts \\ [])
def delete_all(queryable, opts = [force: true]) do
opts = Keyword.drop(opts, [:force])
@repo.delete_all(queryable, opts)
end
def delete_all(queryable, _opts) do
@repo.update_all(queryable, set: [deleted_at: DateTime.utc_now()])
end
def restore(queryable, id) do
changeset = change(@repo.get!(queryable, id), deleted_at: nil)
@repo.update(changeset)
end
defdelegate aggregate(queryable, aggregate, field, opts), to: @repo
defdelegate config(), to: @repo
defdelegate delete!(struct, opts \\ []), to: @repo
defdelegate get!(queryable, id, opts \\ []), to: @repo
defdelegate get_by!(queryable, clauses, opts \\ []), to: @repo
def get_by(queryable, clauses, opts \\ [])
def get_by(queryable, clauses, opts = [with_thrash: true]) do
opts = Keyword.drop(opts, [:with_thrash])
@repo.get_by(queryable, clauses, opts)
end
def get_by(queryable, clauses, opts) do
exclude = Keyword.get(opts, :with_thrash, false)
opts = Keyword.drop(opts, [:with_thrash])
queryable
|> exclude_thrash(!exclude)
|> @repo.get_by(clauses, opts)
end
defdelegate in_transaction?(), to: @repo
defdelegate init(arg0, config), to: @repo
defdelegate insert!(struct, opts \\ []), to: @repo
defdelegate insert(struct, opts \\ []), to: @repo
defdelegate insert_all(schema_or_source, entries, opts \\ []), to: @repo
defdelegate insert_or_update!(changeset, opts \\ []), to: @repo
defdelegate insert_or_update(changeset, opts \\ []), to: @repo
defdelegate one!(queryable, opts \\ []), to: @repo
def one(queryable, opts \\ [])
def one(queryable, opts = [with_thrash: true]) do
opts = Keyword.drop(opts, [:with_thrash])
@repo.one(queryable, opts)
end
def one(queryable, opts) do
exclude = Keyword.get(opts, :with_thrash, false)
opts = Keyword.drop(opts, [:with_thrash])
queryable
|> exclude_thrash(!exclude)
|> @repo.one(opts)
end
@doc """
Scrivener pagination.
"""
def paginate(queryable, opts \\ [])
def paginate(queryable, opts = [with_thrash: true]) do
opts = Keyword.drop(opts, [:with_thrash])
@repo.paginate(queryable, opts)
end
def paginate(queryable, opts) do
exclude = Keyword.get(opts, :with_thrash, false)
opts = Keyword.drop(opts, [:with_thrash])
queryable
|> exclude_thrash(!exclude)
|> @repo.paginate(opts)
end
defdelegate preload(structs_or_struct_or_nil, preloads, opts \\ []), to: @repo
defdelegate rollback(value), to: @repo
defdelegate start_link(opts \\ []), to: @repo
defdelegate stop(pid, timeout \\ 5000), to: @repo
defdelegate stream(queryable, opts), to: @repo
defdelegate transaction(fun_or_multi, opts \\ []), to: @repo
defdelegate update!(struct, opts \\ []), to: @repo
defdelegate update(struct, opts \\ []), to: @repo
defdelegate update_all(queryable, updates, opts \\ []), to: @repo
defp schema_fields(%{from: {_source, schema}}) when schema != nil,
do: schema.__schema__(:fields)
defp field_exists?(queryable, column) do
query = to_query(queryable)
fields = schema_fields(query)
Enum.member?(fields, column)
end
defp exclude_thrash(queryable, exclude) do
case field_exists?(queryable, :deleted_at) do
false ->
queryable
true ->
if exclude do
Ecto.Query.where(queryable, fragment("deleted_at IS NULL"))
else
queryable
end
end
end
end
|
lib/soft_repo.ex
| 0.631026 | 0.491578 |
soft_repo.ex
|
starcoder
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.