code
stringlengths 114
1.05M
| path
stringlengths 3
312
| quality_prob
float64 0.5
0.99
| learning_prob
float64 0.2
1
| filename
stringlengths 3
168
| kind
stringclasses 1
value |
---|---|---|---|---|---|
defmodule Tarams.Schema do
@moduledoc """
This is only a specification to define a schema to use with `Tarams.Params.cast` and `Tarams.Contract.validate`
Schema is just a map or keyword list that follow some simple conventions. Map's key is the field name and the value is a keyword list of field specifications.
**Example**
```elixir
%{
name: [type: :string, format: ~r/\d{4}/],
age: [type: :integer, number: [min: 15, max: 50]].
skill: [type: {:array, :string}, length: [min: 1, max: 10]]
}
```
If you only specify type of data, you can write it shorthand style like this
```elixir
%{
name: :string,
age: :integer,
skill: {:array, :string}
}
```
## I. Field type
**Built-in types**
A type could be any of built-in supported types:
- `boolean`
- `string` | `binary`
- `integer`
- `float`
- `number` (integer or float)
- `date`
- `time`
- `datetime` | `utc_datetime`: date time with time zone
- `naive_datetime`: date time without time zone
- `map`
- `keyword`
- `{array, type}` array of built-in type, all item must be the same type
**Other types**
Custom type may be supported depends on module.
**Nested types**
Nested types could be a another **schema** or list of **schema**
```elixir
%{
user: [type: %{
name: [type: :string]
}]
}
```
Or list of schema
```elixir
%{
users: [type: {:array, %{
name: [type: :string]
}} ]
}
```
**Alias**
Alias allow set a new key for value when using with `Taram.cast`
```elixir
schema = %{
email: [type: :string, as: :user_email]
}
Tarams.cast(%{email: "<EMAIL>"}, schema)
#> {:ok, %{user_email: "<EMAIL>"}}
```
## II. Field casting and default value
These specifications is used for casting data with `Tarams.Params.cast`
### 1. Default value
Is used when the given field is missing or nil.
- Default could be a value
```elixir
%{
status: [type: :string, default: "active"]
}
```
- Or a `function/0`, this function will be invoke each time data is `casted`
```elixir
%{
published_at: [type: :datetime, default: &DateTime.utc_now/0]
}
```
### 2. Custom cast function
You can provide a function to cast field value instead of using default casting function by using
`cast_func: <function/1>`
```elixir
%{
published_at: [type: :datetime, cast_func: &DateTime.from_iso8601/1]
}
```
## III. Field validation
**These validation are supported by `Tarams.Validator`**
### 1. Type validation
Type specification above could be used for validating or casting data.
### 2. Numeric validation
Support validating number value. These are list of supported validations:
- `equal_to`
- `greater_than_or_equal_to` | `min`
- `greater_than`
- `less_than`
- `less_than_or_equal_to` | `max`
Define validation: `number: [<name>: <value>, ...]`
**Example**
```elixir
%{
age: [type: :integer, number: [min: 1, max: 100]]
}
```
### 3. Length validation
Validate length of supported types include `string`, `array`, `map`, `tuple`, `keyword`.
Length condions are the same with **Numeric validation**
Define validation: `length: [<name>: <value>, ...]`
**Example**
```elixir
%{
skills: [type: {:array, :string}, length: [min: 0, max: 5]]
}
```
### 4. Format validation
Check if a string match a given pattern.
Define validation: `format: <Regex>`
**Example**
```elixir
%{
year: [type: :string, format: ~r/year:\s\d{4}/]
}
```
### 5. Inclusion and exclusion validation
Check if value is included or not included in given enumerable (`array`, `map`, or `keyword`)
Define validation: `in: <enumerable>` or `not_in: <enumerable>`
**Example**
```elixir
%{
status: [type: :string, in: ["active", "inactive"]],
selected_option: [type: :integer, not_in: [2,3]]
}
```
### 6. Custom validation function
You can provide a function to validate the value.
Define validation: `func: <function>`
Function must be follow this signature
```elixir
@spec func(value::any()) :: :ok | {:error, message::String.t()}
```
"""
@doc """
Expand short-hand type syntax to full syntax
field: :string -> field: [type: :string]
field: {:array, :string} -> field: [type: {:array, :string}]
field: %{#embedded} -> field: [type: %{#embedded}]
"""
@spec expand(map()) :: map()
def expand(schema) do
schema
|> Enum.map(&expand_field/1)
|> Enum.into(%{})
end
defp expand_field({field, type}) when is_atom(type) or is_map(type) do
expand_field({field, [type: type]})
end
defp expand_field({field, {:array, type}}) do
{field, [type: {:array, expand_type(type)}]}
end
defp expand_field({field, attrs}) do
attrs =
if attrs[:type] do
Keyword.put(attrs, :type, expand_type(attrs[:type]))
else
attrs
end
attrs = Keyword.put(attrs, :default, expand_default(attrs[:default]))
{field, attrs}
end
# expand nested schema
defp expand_type(%{} = type) do
expand(type)
end
defp expand_type(type), do: type
defp expand_default(default) when is_function(default, 0) do
default.()
end
defp expand_default(default), do: default
end
|
lib/schema.ex
| 0.892027 | 0.967625 |
schema.ex
|
starcoder
|
require Record
defmodule KidsChain.KChain do
@moduledoc """
A set of functions for working with invite tree.
## Example
iex> alias KidsChain.KChain
iex> KChain.insert(1, 0)
{:ok, 1}
iex> KChain.insert(2, 1)
{:ok, 2}
iex> KChain.insert(3, 1)
{:ok, 2}
iex> KChain.insert(4, 3)
{:ok, 3}
iex> KChain.leader()
{:ok, 4, 3}
iex> KChain.leader(2)
{:ok, 2, 2}
iex> KChain.chain()
{:ok, [4, 3, 1]}
iex> KChain.chain(2)
{:ok, [2, 1]}
"""
@type id :: integer
@type parent :: id
@type leader :: id
@type depth :: integer
use GenServer
Record.defrecordp(:state, port: nil, pids: :queue.new(), seq: 0, data: "")
@doc """
Starts a KChain process linked to the current process.
"""
def start_link(_opts) do
GenServer.start_link(__MODULE__, [], name: __MODULE__)
end
@doc false
def init(_opts) do
path = System.find_executable("kchain")
port = Port.open({:spawn_executable, path}, [:binary, {:line, 256}])
{:ok, state(port: port)}
end
@doc """
Inserts a node into invite tree.
"""
@spec insert(id, parent) :: {:ok, depth} | :error
def insert(id, parent) do
command(["i", id, parent], fn [depth] -> {:ok, depth} end)
end
@doc """
Returns the current chain leader by given `id`.
"""
@spec leader(id) :: {:ok, leader, depth} | :error
def leader(id \\ 0) do
command(["l", id], fn [leader, depth] -> {:ok, leader, depth} end)
end
@doc """
Returns the current chain by given `id`.
"""
@spec chain(id, integer) :: {:ok, list} | :error
def chain(id \\ 0, limit \\ 25) do
command(["c", id, limit], fn ids -> {:ok, List.delete(ids, 0)} end)
end
@doc false
def handle_call({:cmd, cmd}, from, state(port: port, pids: pids) = s) do
if Port.command(port, cmd) do
{:noreply, state(s, pids: :queue.in(from, pids))}
else
{:reply, :error, s}
end
end
@doc false
def handle_info({port, {:data, {:eol, value}}}, state(port: port, pids: pids, data: data) = s) do
res =
case String.split(data <> value, " ") |> Enum.map(&String.to_integer/1) do
[0 | items] -> {:ok, items}
_ -> :error
end
{{:value, pid}, pids} = :queue.out(pids)
GenServer.reply(pid, res)
{:noreply, state(s, pids: pids, data: "")}
end
@doc false
def handle_info({port, {:data, {:noeol, value}}}, state(port: port, data: data) = s) do
{:noreply, state(s, data: data <> value)}
end
# Makes a synchronous command call to the server and waits for its reply.
# If command success, the `fun` is executed with result, returning its result
defp command(args, fun) when is_list(args) and is_function(fun) do
cmd = Enum.join(args, " ") <> "\n"
with {:ok, items} <- GenServer.call(__MODULE__, {:cmd, cmd}) do
fun.(items)
end
end
end
|
lib/kids_chain/kchain.ex
| 0.861844 | 0.475605 |
kchain.ex
|
starcoder
|
defmodule AWS.CloudTrail do
@moduledoc """
AWS CloudTrail
This is the CloudTrail API Reference. It provides descriptions of actions,
data types, common parameters, and common errors for CloudTrail.
CloudTrail is a web service that records AWS API calls for your AWS account
and delivers log files to an Amazon S3 bucket. The recorded information
includes the identity of the user, the start time of the AWS API call, the
source IP address, the request parameters, and the response elements
returned by the service.
<note> As an alternative to the API, you can use one of the AWS SDKs, which
consist of libraries and sample code for various programming languages and
platforms (Java, Ruby, .NET, iOS, Android, etc.). The SDKs provide a
convenient way to create programmatic access to AWSCloudTrail. For example,
the SDKs take care of cryptographically signing requests, managing errors,
and retrying requests automatically. For information about the AWS SDKs,
including how to download and install them, see the [Tools for Amazon Web
Services page](http://aws.amazon.com/tools/).
</note> See the [AWS CloudTrail User
Guide](http://docs.aws.amazon.com/awscloudtrail/latest/userguide/cloudtrail-user-guide.html)
for information about the data that is included with each AWS API call
listed in the log files.
"""
@doc """
Adds one or more tags to a trail, up to a limit of 50. Tags must be unique
per trail. Overwrites an existing tag's value when a new value is specified
for an existing tag key. If you specify a key without a value, the tag will
be created with the specified key and a value of null. You can tag a trail
that applies to all regions only from the region in which the trail was
created (that is, from its home region).
"""
def add_tags(client, input, options \\ []) do
request(client, "AddTags", input, options)
end
@doc """
Creates a trail that specifies the settings for delivery of log data to an
Amazon S3 bucket. A maximum of five trails can exist in a region,
irrespective of the region in which they were created.
"""
def create_trail(client, input, options \\ []) do
request(client, "CreateTrail", input, options)
end
@doc """
Deletes a trail. This operation must be called from the region in which the
trail was created. `DeleteTrail` cannot be called on the shadow trails
(replicated trails in other regions) of a trail that is enabled in all
regions.
"""
def delete_trail(client, input, options \\ []) do
request(client, "DeleteTrail", input, options)
end
@doc """
Retrieves settings for the trail associated with the current region for
your account.
"""
def describe_trails(client, input, options \\ []) do
request(client, "DescribeTrails", input, options)
end
@doc """
Describes the settings for the event selectors that you configured for your
trail. The information returned for your event selectors includes the
following:
<ul> <li> The S3 objects that you are logging for data events.
</li> <li> If your event selector includes management events.
</li> <li> If your event selector includes read-only events, write-only
events, or all.
</li> </ul> For more information, see [Configuring Event Selectors for
Trails](http://docs.aws.amazon.com/awscloudtrail/latest/userguide/create-event-selectors-for-a-trail.html)
in the *AWS CloudTrail User Guide*.
"""
def get_event_selectors(client, input, options \\ []) do
request(client, "GetEventSelectors", input, options)
end
@doc """
Returns a JSON-formatted list of information about the specified trail.
Fields include information on delivery errors, Amazon SNS and Amazon S3
errors, and start and stop logging times for each trail. This operation
returns trail status from a single region. To return trail status from all
regions, you must call the operation on each region.
"""
def get_trail_status(client, input, options \\ []) do
request(client, "GetTrailStatus", input, options)
end
@doc """
Returns all public keys whose private keys were used to sign the digest
files within the specified time range. The public key is needed to validate
digest files that were signed with its corresponding private key.
<note> CloudTrail uses different private/public key pairs per region. Each
digest file is signed with a private key unique to its region. Therefore,
when you validate a digest file from a particular region, you must look in
the same region for its corresponding public key.
</note>
"""
def list_public_keys(client, input, options \\ []) do
request(client, "ListPublicKeys", input, options)
end
@doc """
Lists the tags for the trail in the current region.
"""
def list_tags(client, input, options \\ []) do
request(client, "ListTags", input, options)
end
@doc """
Looks up API activity events captured by CloudTrail that create, update, or
delete resources in your account. Events for a region can be looked up for
the times in which you had CloudTrail turned on in that region during the
last seven days. Lookup supports the following attributes:
<ul> <li> Event ID
</li> <li> Event name
</li> <li> Resource name
</li> <li> Resource type
</li> <li> User name
</li> </ul> All attributes are optional. The default number of results
returned is 10, with a maximum of 50 possible. The response includes a
token that you can use to get the next page of results.
<important> The rate of lookup requests is limited to one per second per
account. If this limit is exceeded, a throttling error occurs.
</important> <important> Events that occurred during the selected time
range will not be available for lookup if CloudTrail logging was not
enabled when the events occurred.
</important>
"""
def lookup_events(client, input, options \\ []) do
request(client, "LookupEvents", input, options)
end
@doc """
Configures an event selector for your trail. Use event selectors to specify
the type of events that you want your trail to log. When an event occurs in
your account, CloudTrail evaluates the event selectors in all trails. For
each trail, if the event matches any event selector, the trail processes
and logs the event. If the event doesn't match any event selector, the
trail doesn't log the event.
Example
<ol> <li> You create an event selector for a trail and specify that you
want write-only events.
</li> <li> The EC2 `GetConsoleOutput` and `RunInstances` API operations
occur in your account.
</li> <li> CloudTrail evaluates whether the events match your event
selectors.
</li> <li> The `RunInstances` is a write-only event and it matches your
event selector. The trail logs the event.
</li> <li> The `GetConsoleOutput` is a read-only event but it doesn't match
your event selector. The trail doesn't log the event.
</li> </ol> The `PutEventSelectors` operation must be called from the
region in which the trail was created; otherwise, an
`InvalidHomeRegionException` is thrown.
You can configure up to five event selectors for each trail. For more
information, see [Configuring Event Selectors for
Trails](http://docs.aws.amazon.com/awscloudtrail/latest/userguide/create-event-selectors-for-a-trail.html)
in the *AWS CloudTrail User Guide*.
"""
def put_event_selectors(client, input, options \\ []) do
request(client, "PutEventSelectors", input, options)
end
@doc """
Removes the specified tags from a trail.
"""
def remove_tags(client, input, options \\ []) do
request(client, "RemoveTags", input, options)
end
@doc """
Starts the recording of AWS API calls and log file delivery for a trail.
For a trail that is enabled in all regions, this operation must be called
from the region in which the trail was created. This operation cannot be
called on the shadow trails (replicated trails in other regions) of a trail
that is enabled in all regions.
"""
def start_logging(client, input, options \\ []) do
request(client, "StartLogging", input, options)
end
@doc """
Suspends the recording of AWS API calls and log file delivery for the
specified trail. Under most circumstances, there is no need to use this
action. You can update a trail without stopping it first. This action is
the only way to stop recording. For a trail enabled in all regions, this
operation must be called from the region in which the trail was created, or
an `InvalidHomeRegionException` will occur. This operation cannot be called
on the shadow trails (replicated trails in other regions) of a trail
enabled in all regions.
"""
def stop_logging(client, input, options \\ []) do
request(client, "StopLogging", input, options)
end
@doc """
Updates the settings that specify delivery of log files. Changes to a trail
do not require stopping the CloudTrail service. Use this action to
designate an existing bucket for log delivery. If the existing bucket has
previously been a target for CloudTrail log files, an IAM policy exists for
the bucket. `UpdateTrail` must be called from the region in which the trail
was created; otherwise, an `InvalidHomeRegionException` is thrown.
"""
def update_trail(client, input, options \\ []) do
request(client, "UpdateTrail", input, options)
end
@spec request(map(), binary(), map(), list()) ::
{:ok, Poison.Parser.t | nil, Poison.Response.t} |
{:error, Poison.Parser.t} |
{:error, HTTPoison.Error.t}
defp request(client, action, input, options) do
client = %{client | service: "cloudtrail"}
host = get_host("cloudtrail", client)
url = get_url(host, client)
headers = [{"Host", host},
{"Content-Type", "application/x-amz-json-1.1"},
{"X-Amz-Target", "com.amazonaws.cloudtrail.v20131101.CloudTrail_20131101.#{action}"}]
payload = Poison.Encoder.encode(input, [])
headers = AWS.Request.sign_v4(client, "POST", url, headers, payload)
case HTTPoison.post(url, payload, headers, options) do
{:ok, response=%HTTPoison.Response{status_code: 200, body: ""}} ->
{:ok, nil, response}
{:ok, response=%HTTPoison.Response{status_code: 200, body: body}} ->
{:ok, Poison.Parser.parse!(body), response}
{:ok, _response=%HTTPoison.Response{body: body}} ->
error = Poison.Parser.parse!(body)
exception = error["__type"]
message = error["message"]
{:error, {exception, message}}
{:error, %HTTPoison.Error{reason: reason}} ->
{:error, %HTTPoison.Error{reason: reason}}
end
end
defp get_host(endpoint_prefix, client) do
if client.region == "local" do
"localhost"
else
"#{endpoint_prefix}.#{client.region}.#{client.endpoint}"
end
end
defp get_url(host, %{:proto => proto, :port => port}) do
"#{proto}://#{host}:#{port}/"
end
end
|
lib/aws/cloud_trail.ex
| 0.888662 | 0.714267 |
cloud_trail.ex
|
starcoder
|
defmodule Ffaker.EnUs.PhoneNumber do
@moduledoc"""
Functions for US PhoneNumber data in English
"""
use Ffaker
import Ffaker, only: [numerify: 1]
@doc"""
Returns US phone number
https://en.wikipedia.org/wiki/List_of_North_American_Numbering_Plan_area_codes
## Examples
iex> Ffaker.EnUs.PhoneNumer.phone_number
"1-201-345-6789 x235"
"""
@spec phone_number() :: String.t
def phone_number do
suffix = [" x###", " x####", " x#####", ""] |> Enum.random |> numerify
prefix = Enum.random(["1-", ""])
"#{prefix}#{short_phone_number()}#{suffix}"
end
@doc"""
Returns US area code
https://en.wikipedia.org/wiki/List_of_North_American_Numbering_Plan_area_codes
## Examples
iex> Ffaker.EnUs.PhoneNumer.area_code
201
"""
@spec area_code() :: non_neg_integer
def area_code do
201..999
|> Enum.filter(fn code -> rem(code, 100) != 11 end)
|> Enum.random
end
@doc"""
Returns US exchange code
https://en.wikipedia.org/wiki/North_American_Numbering_Plan#Numbering_system
## Examples
iex> Ffaker.EnUs.PhoneNumer.exchange_code
201
"""
@spec exchange_code() :: non_neg_integer
def exchange_code do
Enum.random(201..999)
end
@doc"""
Returns short US short phone number
## Examples
iex> Ffaker.EnUs.PhoneNumer.short_phone_number
"201-345-6789"
"""
@spec short_phone_number() :: String.t
def short_phone_number do
"#{area_code()}-#{exchange_code()}-#{numerify("####")}"
end
@doc"""
Returns US phone calling code
## Examples
iex> Ffaker.EnUs.PhoneNumer.phone_calling_code
"+91"
"""
@spec phone_calling_code() :: String.t
def phone_calling_code do
Enum.random(~F(phone_calling_codes))
end
@doc"""
Returns US imei
## Examples
iex> Ffaker.EnUs.PhoneNumer.imei
"001245001234569"
"""
@spec imei(non_neg_integer) :: String.t
def imei(numbers) do
base_digits = "00124500#{numbers}"
check_digit =
base_digits |> String.graphemes |> Enum.reduce(0, &base_reducer/2)
"#{base_digits}#{10 - check_digit}"
end
defp base_reducer(x, acc) do
num =
case String.to_integer(x) do
n when rem(n, 2) == 0 -> n * 2
n -> n
end
case rem(acc + num, 10) do
0 -> 10
n -> n
end
end
end
|
lib/ffaker/en_us/phone_number.ex
| 0.778186 | 0.415254 |
phone_number.ex
|
starcoder
|
defmodule Oath.Stepper do
@moduledoc false
import Oath.Utils, only: [proper_list?: 1, is_proper_list: 1]
alias Oath.{ConformError, Spec}
defstruct enum: [],
index: 0
@type t :: %__MODULE__{
enum: Enumerable.t(),
index: non_neg_integer
}
@spec new(Enumerable.t(), non_neg_integer) :: t
def new(enum \\ [], index \\ 0) do
%__MODULE__{
enum: enum,
index: index
}
end
@spec init(t | Enumerable.t(), Spec.route()) :: {t, Spec.route()}
def init(%__MODULE__{index: i} = stepper, route) do
{stepper, [i | route]}
end
def init(enum, route) do
{new(enum, 0), [0 | route]}
end
@spec inc(t) :: t
def inc(stepper) do
stepper
|> Map.update!(:index, &(&1 + 1))
|> Map.update!(:enum, &tl/1)
end
@spec terminate(t, Spec.route()) :: {list, Spec.route()}
def terminate(stepper, [_ | route]) do
{to_list(stepper), route}
end
@spec to_list(t | Enumerable.t()) :: list
def to_list(stepper) do
Enum.to_list(stepper.enum)
end
@spec empty?(t | Enumerable.t()) :: boolean
def empty?(%__MODULE__{enum: enum}), do: Enum.empty?(enum)
def empty?(enum), do: Enum.empty?(enum)
@spec conform_while(
Spec.t(),
Spec.path(),
Spec.via(),
Spec.route(),
Spec.value(),
(Spec.result(), Spec.t(), list -> :halt | {:cont | :halt, Spec.result()}),
list
) :: Spec.result()
def conform_while(spec, path, via, route, rest, fun, acc \\ []) do
case fun.(conform(spec, path, via, route, rest), spec, acc) do
:halt ->
{:ok, :lists.reverse(acc), rest}
{:halt, {:ok, conformed, rest}} ->
{:ok, :lists.reverse([conformed | acc]), rest}
{:cont, {:ok, conformed, rest}} ->
conform_while(spec, path, via, route, rest, fun, [conformed | acc])
{_, {:error, ps}} ->
{:error, ps}
end
end
@spec conform(Spec.t(), Spec.path(), Spec.via(), Spec.route(), Spec.value()) :: Spec.result()
def conform(spec, path, via, route, %__MODULE__{} = val) do
conform_impl(spec, path, via, init(val, route))
end
def conform(spec, path, via, route, val) when is_proper_list(val) do
conform_impl(spec, path, via, init(val, route))
end
def conform(_spec, path, via, route, val) when is_list(val) do
{:error, [ConformError.new_problem(&proper_list?/1, path, via, route, val)]}
end
def conform(_spec, path, via, route, val) do
{:error, [ConformError.new_problem(&is_list/1, path, via, route, val)]}
end
@spec conform_impl(Spec.t(), Spec.path(), Spec.via(), {t, Spec.route()}) :: Spec.result()
defp conform_impl(spec, path, via, {%__MODULE__{enum: []}, [_ | route]}) do
{:error, [ConformError.new_problem(spec, path, via, route, [], :insufficient_data)]}
end
defp conform_impl(spec, path, via, {stepper, route}) do
case Spec.conform(spec, path, via, route, hd(stepper.enum)) do
{:ok, conformed, _} -> {:ok, conformed, inc(stepper)}
{:error, ps} -> {:error, ps}
end
end
defimpl Inspect do
def inspect(stepper, opts) do
@protocol.inspect(stepper.enum, opts)
end
end
end
|
lib/oath/stepper.ex
| 0.76895 | 0.460835 |
stepper.ex
|
starcoder
|
defmodule DiscoveryApi.Stats.Completeness do
@moduledoc """
Calculate data completeness. This is done by reducing multiple data messages to an accumulater of non-nil columns recursively through the data hierarchy.
"""
@doc """
A reducing function which accumulates completeness statistics for a row in a dataset
## Parameters
- dataset: The [SmartCity Dataset](https://github.com/smartcitiesdata/smart_city_data) for which stats are being accumulated
- row: A single row of data as a map, e.g. %{"id" => 1, "name" => "<NAME>"}
- dataset_stats: The statistics accumulator. This stores the running totals of the statistics and is intended to be the accumulator in a reduce function.
"""
def calculate_stats_for_row(dataset, row, dataset_stats) do
dataset_stats
|> Map.update(:record_count, 1, fn value -> value + 1 end)
|> Map.put(:fields, update_fields_map(dataset_stats, dataset, row))
end
defp update_fields_map(stats_map, dataset, data) do
existing_field_map = Map.get(stats_map, :fields, %{})
dataset.schema
|> get_fields()
|> Enum.reduce(existing_field_map, fn field, field_stats ->
update_field_count(field_stats, field, data)
end)
end
defp get_fields(schema) do
schema
|> Enum.map(fn field -> get_sub_fields(field, "") end)
|> List.flatten()
|> remove_dot()
end
defp update_field_count(field_stats, %{name: field_name} = field, data) do
field_stats
|> increment_field_count(field, get_field_path(field_name), data)
end
defp get_field_path(field_name) do
field_name
|> String.split(".")
|> Enum.map(&String.downcase(&1))
end
defp increment_field_count(field_stats, %{name: field_name, required: required}, field_path, data) do
count_in_row = field_count_in_row(data, field_path)
Map.update(
field_stats,
field_name,
%{required: required, count: count_in_row},
fn %{required: required, count: count} ->
%{required: required, count: count + count_in_row}
end
)
end
defp field_count_in_row(data, field_path) do
value = get_in(data, field_path)
cond do
is_nil(value) ->
0
is_binary(value) && String.trim(value) == "" ->
0
true ->
1
end
end
defp remove_dot([]), do: []
defp remove_dot(list) do
Enum.map(list, fn map ->
Map.update!(map, :name, fn name -> String.slice(name, 1..(String.length(name) - 1)) end)
end)
end
defp get_sub_fields(field, parent_name) do
if Map.get(field, :subSchema, nil) == nil do
name = parent_name <> "." <> Map.get(field, :name)
[%{name: name, required: Map.get(field, :required, false)}]
else
name = parent_name <> "." <> Map.get(field, :name)
sub_field =
field
|> Map.get(:subSchema)
|> Enum.map(fn sub_field -> get_sub_fields(sub_field, name) end)
sub_field ++ [[%{name: name, required: Map.get(field, :required, false)}]]
end
end
end
|
apps/discovery_api/lib/discovery_api/stats/completeness.ex
| 0.756987 | 0.698933 |
completeness.ex
|
starcoder
|
defmodule Swiss.DateTime do
@moduledoc """
Some extra functions for working with DateTimes that aren't in the native lib
or on Timex.
"""
@doc """
Helper method for getting "now" with second precision.
"""
@spec second_utc_now() :: DateTime.t()
def second_utc_now(),
do: DateTime.utc_now() |> DateTime.truncate(:second)
@doc """
Returns the biggest (latest) of two dates.
## Examples
iex> Swiss.DateTime.max(DateTime.from_unix!(1_577_664_000), DateTime.from_unix!(1_464_096_368))
~U[2019-12-30 00:00:00Z]
"""
@spec max(DateTime.t(), DateTime.t()) :: DateTime.t()
def max(date_1, date_2),
do: max([date_1, date_2])
@doc """
Returns the biggest (latest) of the given list of dates.
## Examples
iex> Swiss.DateTime.max([DateTime.from_unix!(1_577_664_000), DateTime.from_unix!(2_464_096_360), DateTime.from_unix!(1_464_096_368)])
~U[2048-01-31 15:12:40Z]
iex> Swiss.DateTime.max([DateTime.from_unix!(2_464_096_360), nil])
~U[2048-01-31 15:12:40Z]
iex> Swiss.DateTime.max([nil, nil, ~U[2020-11-09 09:00:50Z]])
~U[2020-11-09 09:00:50Z]
"""
@spec max([DateTime.t()]) :: DateTime.t()
def max(dates) when is_list(dates) do
dates
|> Enum.sort(fn date_1, date_2 ->
is_nil(date_2) || (!is_nil(date_1) && DateTime.compare(date_1, date_2) != :lt)
end)
|> List.first()
end
@doc """
Returns the smallest (earliest) of two dates.
## Examples
iex> Swiss.DateTime.min(DateTime.from_unix!(1_577_664_000), DateTime.from_unix!(1_464_096_368))
~U[2016-05-24 13:26:08Z]
"""
@spec min(DateTime.t(), DateTime.t()) :: DateTime.t()
def min(date_1, date_2),
do: min([date_1, date_2])
@doc """
Returns the smallest (earliest) of the given list of dates.
## Examples
iex> Swiss.DateTime.min([DateTime.from_unix!(1_577_664_000), DateTime.from_unix!(2_464_096_360), DateTime.from_unix!(1_464_096_368)])
~U[2016-05-24 13:26:08Z]
iex> Swiss.DateTime.min([DateTime.from_unix!(2_464_096_360), nil])
~U[2048-01-31 15:12:40Z]
iex> Swiss.DateTime.min([nil, nil, ~U[2020-11-09 09:00:50Z]])
~U[2020-11-09 09:00:50Z]
"""
@spec min([DateTime.t()]) :: DateTime.t()
def min(dates) when is_list(dates) do
dates
|> Enum.sort(fn date_1, date_2 ->
is_nil(date_2) || (!is_nil(date_1) && DateTime.compare(date_1, date_2) == :lt)
end)
|> List.first()
end
@doc """
Converts a ISO 8601 date into a DateTime and offset.
This is a wrapper around `DateTime.from_iso8601/2` that raises on error.
## Examples
iex> Swiss.DateTime.from_iso8601!("2015-01-23T23:50:07Z")
{~U[2015-01-23 23:50:07Z], 0}
"""
@spec from_iso8601!(String.t()) :: {DateTime.t(), integer()}
def from_iso8601!(iso_date, calendar \\ Calendar.ISO) do
case DateTime.from_iso8601(iso_date, calendar) do
{:ok, dt, offset} -> {dt, offset}
{:error, error} -> raise error
end
end
end
|
lib/swiss/date_time.ex
| 0.906814 | 0.447158 |
date_time.ex
|
starcoder
|
defmodule Conversor.ApiCurrency.Exchange do
@moduledoc """
Module responsible for making exchange between currencies.
The most used function is `exchange/3`, which actually performs the exchanges.
"""
use Tesla
plug Tesla.Middleware.Headers, [{"content-type", "application/json"}]
plug Tesla.Middleware.JSON
plug Tesla.Middleware.Retry,
delay: 500,
max_retries: 5,
max_delay: 10_000
plug Tesla.Middleware.Timeout, timeout: 100_000
alias Conversor.ApiCurrency.CurrencyList
alias Conversor.Error
alias Tesla.Env
@api_key System.get_env("API_KEY", "<KEY>")
@base_url System.get_env("BASE_URL", "http://apilayer.net/api/live")
@doc """
exchanges between two provided currencies.
## Parameters
- from: origin currency.
- to: destiny currency.
- amount: the amount of money to be exchanged.
## Errors
`%Conversor.Error{ error: "amount -1 must be equal or greater than 1",type: :bad_request}` = when amount is less than 1.
`%Conversor.Error{error: "Currency XYZ is invalid", type: :bad_request}` = when currencies provided are not supported.
## Example
iex> Exchange.exchange("BRL", "USD", 1)
%{destiny_currency: 0.18406423400012442, rate: 0.18406423400012442}
"""
@spec exchange(String.t(), String.t(), integer) :: map() | %Error{}
def exchange(_, _, amount) when amount <= 0,
do: Error.build("amount #{amount} must be equal or greater than 1", :bad_request)
def exchange("USD", "BRL", amount) do
%{"USDBRL" => brl} = request_api("USD", "BRL")
%{destiny_currency: brl * amount, rate: rate(brl * amount, amount)}
end
def exchange(from, to, amount) do
result = request_api(from, to)
case result do
%Error{} = error ->
error
value ->
result = convert(value["USD#{from}"], value["USD#{to}"], amount)
%{destiny_currency: result, rate: rate(result, amount)}
end
end
@spec convert(number(), number(), number()) :: float()
defp convert(from, to, amount), do: to / from * amount
@spec rate(number(), number()) :: float()
defp rate(value, amount), do: value / amount
@spec request_api(String.t(), String.t()) :: map() | %Error{}
defp request_api(from, to) do
case CurrencyList.validate_currencies(from, to) do
%Error{} = error ->
error
:ok ->
"#{@base_url}?access_key=#{@api_key}¤cies=#{from},#{to},BRL&source=USD&format=1"
|> get()
|> handle_request_api()
end
end
defp handle_request_api({:ok, %Env{body: %{"error" => error}}}),
do: Error.build(error, :bad_request)
defp handle_request_api({:ok, %Env{body: %{"quotes" => quotes}}}), do: quotes
end
|
lib/conversor/api_currency/exchange.ex
| 0.880283 | 0.410963 |
exchange.ex
|
starcoder
|
defmodule Hulaaki.Message do
@moduledoc """
Provides the structs and constructors for different kinds of message
packets in the MQTT protocol.
"""
defmodule Connect do
@moduledoc """
Struct for Hulaaki Connect
## Fields
* `client_id` : A string(binary) representing the client.
* `username` : A string(binary) representing the username.
* `password` : A string(binary) representing the password.
* `will_topic` : A string(binary) representing the will topic.
* `will_message` : A string(binary) representing the will message.
* `will_qos` : An integer of value either 0,1,2 representing the will qos.
* `will_retain` : An integer of value either 0,1 representing the will retain.
* `clean_session` : An integer of value either 0,1 representing whether the session is clean.
* `keep_alive` : An integer representing the keep alive value in seconds.
"""
defstruct [
:client_id,
:username,
:password,
:will_topic,
:will_message,
:will_qos,
:will_retain,
:clean_session,
:keep_alive,
type: :CONNECT
]
end
@doc """
Creates a Connect struct with the guards applied to the arguments.
"""
def connect(
client_id,
username,
password,
will_topic,
will_message,
will_qos,
will_retain,
clean_session,
keep_alive
)
when is_binary(client_id) and client_id > 0 and is_binary(username) and is_binary(password) and
is_binary(will_topic) and is_binary(will_message) and
(will_qos == 0 or will_qos == 1 or will_qos == 2) and
(will_retain == 0 or will_retain == 1) and (clean_session == 0 or clean_session == 1) and
is_integer(keep_alive) do
%Connect{
client_id: client_id,
username: username,
password: password,
will_topic: will_topic,
will_message: will_message,
will_qos: will_qos,
will_retain: will_retain,
clean_session: clean_session,
keep_alive: keep_alive
}
end
defmodule ConnAck do
@moduledoc """
Struct for Hulaaki ConnAck
## Fields
* `session_present` : An integer of value either 0,1 representing the session present.
* `return_code` : An integer of value either 0,1,2,3,4,5 representing the return code.
"""
defstruct [:session_present, :return_code, type: :CONNACK]
end
@doc """
Creates a ConnAck struct with the guards applied.
"""
def connect_ack(session_present, return_code)
when (session_present == 0 or session_present == 1) and
(return_code == 0 or return_code == 1 or return_code == 2 or return_code == 3 or
return_code == 4 or return_code == 5) do
%ConnAck{session_present: session_present, return_code: return_code}
end
defmodule Publish do
@moduledoc """
Struct for Hulaaki Publish
## Fields
* `packet_id` : An integer of value upto 65535 (2 bytes) representing packet identifier
* `topic` : A string(binary) representing the topic.
* `message` : A string(binary) representing the message.
* `dup` : An integer of value either 0,1 representing the dup bit.
* `qos` : An integer of value either 0,1,2 representing the qos bit.
* `retain` : An integer of value either 0,1 representing the retain bit.
"""
defstruct [:id, :topic, :message, :dup, :qos, :retain, type: :PUBLISH]
end
@doc """
Creates a Publish struct with the guards applied.
"""
def publish(packet_id, topic, message, dup, qos, retain)
when is_integer(packet_id) and packet_id > 0 and packet_id <= 65_535 and is_binary(topic) and
is_binary(message) and (dup == 0 or dup == 1) and (qos == 0 or qos == 1 or qos == 2) and
(retain == 0 or retain == 1) do
case qos do
0 ->
publish(topic, message, dup, qos, retain)
_ ->
%Publish{
id: packet_id,
topic: topic,
message: message,
dup: dup,
qos: qos,
retain: retain
}
end
end
@doc """
Creates a Publish struct with the guards applied.
"""
def publish(topic, message, dup, qos, retain)
when is_binary(topic) and is_binary(message) and (dup == 0 or dup == 1) and qos == 0 and
(retain == 0 or retain == 1) do
%Publish{topic: topic, message: message, dup: dup, qos: qos, retain: retain}
end
defmodule PubAck do
@moduledoc """
Struct for Hulaaki PubAck
## Fields
* `packet_id` : An integer of value upto 65535 (2 bytes) representing packet identifier
"""
defstruct [:id, type: :PUBACK]
end
@doc """
Creates a PubAck struct with the guards applied.
"""
def publish_ack(packet_id)
when is_integer(packet_id) and packet_id > 0 and packet_id <= 65_535 do
%PubAck{id: packet_id}
end
defmodule PubRec do
@moduledoc """
Struct for Hulaaki PubRec
## Fields
* `packet_id` : An integer of value upto 65535 (2 bytes) representing packet identifier
"""
defstruct [:id, type: :PUBREC]
end
@doc """
Creates a PubRec struct with the guards applied.
"""
def publish_receive(packet_id)
when is_integer(packet_id) and packet_id > 0 do
%PubRec{id: packet_id}
end
defmodule PubRel do
@moduledoc """
Struct for Hulaaki PubRel
## Fields
* `packet_id` : An integer of value upto 65535 (2 bytes) representing packet identifier
"""
defstruct [:id, type: :PUBREL]
end
@doc """
Creates a PubRel struct with the guards applied.
"""
def publish_release(packet_id)
when is_integer(packet_id) and packet_id > 0 and packet_id <= 65_535 do
%PubRel{id: packet_id}
end
defmodule PubComp do
@moduledoc """
Struct for Hulaaki PubComp
## Fields
* `packet_id` : An integer of value upto 65535 (2 bytes) representing packet identifier
"""
defstruct [:id, type: :PUBCOMP]
end
@doc """
Creates a PubComp struct with the guards applied.
"""
def publish_complete(packet_id)
when is_integer(packet_id) and packet_id > 0 and packet_id <= 65_535 do
%PubComp{id: packet_id}
end
defmodule Subscribe do
@moduledoc """
Struct for Hulaaki Subscribe
## Fields
* `packet_id` : An integer of value upto 65535 (2 bytes) representing packet identifier
* `topics` : A list of string(binary) representing various topics.
* `requested_qoses` : A list of integer of value 0,1,2 representing qoses.
"""
defstruct [:id, :topics, :requested_qoses, type: :SUBSCRIBE]
end
@doc """
Creates a Subscribe struct with the guards applied.
"""
def subscribe(packet_id, topics, requested_qoses)
when is_integer(packet_id) and packet_id > 0 and packet_id <= 65_535 and is_list(topics) and
is_list(requested_qoses) and length(requested_qoses) == length(topics) do
clean_topics = Enum.filter(topics, fn x -> is_binary(x) end)
valid_qos? = fn x -> x == 0 or x == 1 or x == 2 end
clean_qoses = Enum.filter(requested_qoses, valid_qos?)
%Subscribe{id: packet_id, topics: clean_topics, requested_qoses: clean_qoses}
end
defmodule SubAck do
@moduledoc """
Struct for Hulaaki SubAck
## Fields
* `packet_id` : An integer of value upto 65535 (2 bytes) representing packet identifier
* `granted_qoses` : A list of integer of value 0,1,2,128 representing qoses.
"""
defstruct [:id, :granted_qoses, type: :SUBACK]
end
@doc """
Creates a SubAck struct with the guards applied.
"""
def subscribe_ack(packet_id, granted_qoses)
when is_integer(packet_id) and packet_id > 0 and packet_id <= 65_535 and
is_list(granted_qoses) do
valid_qos? = fn x -> x == 0 or x == 1 or x == 2 or x == 128 end
clean_qoses = Enum.filter(granted_qoses, valid_qos?)
%SubAck{id: packet_id, granted_qoses: clean_qoses}
end
defmodule Unsubscribe do
@moduledoc """
Struct for Hulaaki Unsubscribe
## Fields
* `packet_id` : An integer of value upto 65535 (2 bytes) representing packet identifier
* `topics` : A list of string(binary) representing various topics.
"""
defstruct [:id, :topics, type: :UNSUBSCRIBE]
end
@doc """
Creates a Unsubscribe struct with the guards applied.
"""
def unsubscribe(packet_id, topics)
when is_integer(packet_id) and packet_id > 0 and is_list(topics) do
clean_topics = Enum.filter(topics, fn x -> is_binary(x) end)
%Unsubscribe{id: packet_id, topics: clean_topics}
end
defmodule UnsubAck do
@moduledoc """
Struct for Hulaaki UnsubAck
## Fields
* `packet_id` : An integer of value upto 65535 (2 bytes) representing packet identifier
"""
defstruct [:id, type: :UNSUBACK]
end
@doc """
Creates a UnsubAck struct with the guards applied.
"""
def unsubscribe_ack(packet_id)
when is_integer(packet_id) and packet_id > 0 do
%UnsubAck{id: packet_id}
end
defmodule PingReq do
@moduledoc """
Struct for Hulaaki PingReq
"""
defstruct type: :PINGREQ
end
@doc """
Creates a Pingreq struct.
"""
def ping_request do
%PingReq{}
end
defmodule PingResp do
@moduledoc """
Struct for Hulaaki PingResp
"""
defstruct type: :PINGRESP
end
@doc """
Creates a Pingresp struct.
"""
def ping_response do
%PingResp{}
end
defmodule Disconnect do
@moduledoc """
Struct for Hulaaki Disconnect
"""
defstruct type: :DISCONNECT
end
@doc """
Creates a Disconnect struct.
"""
def disconnect do
%Disconnect{}
end
end
|
lib/hulaaki/message.ex
| 0.835114 | 0.540681 |
message.ex
|
starcoder
|
defmodule MvOpentelemetry.Absinthe do
@moduledoc false
use MvOpentelemetry.SpanTracer,
name: :graphql,
events: [
[:absinthe, :execute, :operation, :start],
[:absinthe, :execute, :operation, :stop],
[:absinthe, :execute, :operation, :exception],
[:absinthe, :resolve, :field, :start],
[:absinthe, :resolve, :field, :stop],
[:absinthe, :resolve, :field, :exception]
]
@field_resolution_events [
[:absinthe, :resolve, :field, :start],
[:absinthe, :resolve, :field, :stop],
[:absinthe, :resolve, :field, :exception]
]
def register_tracer(opts) do
module_opts = __opts__()
prefix = Access.get(opts, :prefix, module_opts[:name])
name = Access.get(opts, :name, module_opts[:name])
tracer_id = :mv_opentelemetry
default_attributes = Access.get(opts, :default_attributes, [])
include_field_resolution = Access.get(opts, :include_field_resolution, false)
opts_with_defaults =
merge_defaults(opts,
prefix: prefix,
name: name,
tracer_id: tracer_id,
default_attributes: default_attributes
)
|> merge_default(:include_field_resolution, include_field_resolution)
events =
if include_field_resolution do
module_opts[:events]
else
module_opts[:events] -- @field_resolution_events
end
:telemetry.attach_many(
{name, __MODULE__},
events,
&__MODULE__.handle_event/4,
opts_with_defaults
)
end
@spec handle_event([atom()], map(), map(), Access.t()) :: :ok
def handle_event([:absinthe, :resolve, :field, :start], _measurements, meta, opts) do
event_name = [opts[:prefix]] ++ [:resolve, :field]
resolution = meta.resolution
event_name =
case resolution.definition.name do
x when is_bitstring(x) ->
Enum.join(event_name ++ [x], ".")
_ ->
Enum.join(event_name, ".")
end
resolution = meta.resolution
attributes = [
{"graphql.field.name", resolution.definition.name},
{"graphql.field.schema", resolution.schema}
]
attributes = attributes ++ opts[:default_attributes]
OpentelemetryTelemetry.start_telemetry_span(opts[:tracer_id], event_name, meta, %{})
|> Span.set_attributes(attributes)
:ok
end
def handle_event([:absinthe, :execute, :operation, :start], _measurements, meta, opts) do
event_name = Enum.join([opts[:prefix]] ++ [:execute, :operation], ".")
attributes = [{"graphql.operation.input", meta.blueprint.input}] ++ opts[:default_attributes]
OpentelemetryTelemetry.start_telemetry_span(opts[:tracer_id], event_name, meta, %{})
|> Span.set_attributes(attributes)
:ok
end
def handle_event([:absinthe, :resolve, :field, :stop], _measurements, meta, opts) do
resolution = meta.resolution
ctx = OpentelemetryTelemetry.set_current_telemetry_span(opts[:tracer_id], meta)
attributes = [{"graphql.field.state", resolution.state}]
Span.set_attributes(ctx, attributes)
OpentelemetryTelemetry.end_telemetry_span(opts[:tracer_id], meta)
:ok
end
def handle_event([:absinthe, :execute, :operation, :stop], _measurements, meta, opts) do
ctx = OpentelemetryTelemetry.set_current_telemetry_span(opts[:tracer_id], meta)
attributes = [{"graphql.operation.schema", meta.blueprint.schema}]
Span.set_attributes(ctx, attributes)
OpentelemetryTelemetry.end_telemetry_span(opts[:tracer_id], meta)
:ok
end
end
|
lib/mv_opentelemetry/absinthe.ex
| 0.67854 | 0.409988 |
absinthe.ex
|
starcoder
|
defmodule User do
use Calculus
require Record
@moduledoc """
OOP-like `User` data type example.
Internal representation of the `state` is record,
but it's completely hidden inside this module.
This data type have:
- public mutable `name` field (`get_name/1`, `set_name/2` methods)
- protected immutable `id` field (`get_id/1` method)
- private `balance` field (used internally in `deposit/2` and `withdraw/2` methods)
"""
Record.defrecordp(:user,
id: nil,
name: nil,
balance: nil
)
defcalculus user(id: id, name: name, balance: balance) = state do
:get_name ->
calculus(
state: state,
return: name
)
{:set_name, new_name} ->
calculus(
state: user(state, name: new_name),
return: name
)
:get_id ->
calculus(
state: state,
return: id
)
{:deposit, amount} ->
calculus(
state: user(state, balance: balance + amount),
return: :ok
)
{:withdraw, amount} when amount <= balance ->
calculus(
state: user(state, balance: balance - amount),
return: :ok
)
{:withdraw, _} ->
calculus(
state: state,
return: :insufficient_funds
)
end
@type id :: pos_integer
@type name :: String.t()
defmacrop with_valid_name(name, do: code) do
quote location: :keep do
name = unquote(name)
~r/^[A-Z][a-z]+$/
|> Regex.match?(name)
|> case do
true -> unquote(code)
false -> raise("invalid User name #{inspect(name)}")
end
end
end
@spec new(id: id, name: name) :: t
def new(id: id, name: name) when is_integer(id) and id > 0 and is_binary(name) do
with_valid_name name do
user(id: id, name: name, balance: 0)
|> construct()
end
end
@spec get_name(t) :: name
def get_name(it) do
it
|> eval(:get_name)
|> return()
end
@spec set_name(t, name) :: t
def set_name(it, name) when is_binary(name) do
with_valid_name name do
it
|> eval({:set_name, name})
end
end
@spec get_id(t) :: id
def get_id(it) do
it
|> eval(:get_id)
|> return()
end
@spec deposit(t, pos_integer) :: t
def deposit(it, amount) when is_integer(amount) and amount > 0 do
it
|> eval({:deposit, amount})
end
@spec withdraw(t, pos_integer) :: t
def withdraw(it, amount) when is_integer(amount) and amount > 0 do
it
|> eval({:withdraw, amount})
end
end
|
test/support/user.ex
| 0.898348 | 0.565029 |
user.ex
|
starcoder
|
defmodule ExStatic.Plug do
@moduledoc """
A plug for serving static assets from in-memory BEAM bytecode
It requires two options on initialization:
* `:at` - the request path to reach for static assets.
It must be a string.
If a static asset cannot be found, `Plug.Static` simply forwards
the connection to the rest of the pipeline.
## Cache mechanisms
`Plug.Static` uses etags for HTTP caching. This means browsers/clients
should cache assets on the first request and validate the cache on
following requests, not downloading the static asset once again if it
has not changed. The cache-control for etags is specified by the
`cache_control_for_etags` option and defaults to "public".
However, `Plug.Static` also support direct cache control by using
versioned query strings. If the request query string starts with
"?vsn=", `Plug.Static` assumes the application is versioning assets
and does not set the `ETag` header, meaning the cache behaviour will
be specified solely by the `cache_control_for_vsn_requests` config,
which defaults to "public, max-age=31536000".
## Options
* `:gzip` - given a request for `FILE`, serves `FILE.gz` if it exists
in the static directory and if the `accept-encoding` ehader is set
to allow gzipped content (defaults to `false`)
* `:cache_control_for_etags` - sets cache header for requests
that use etags. Defaults to "public".
* `:cache_control_for_vsn_requests` - sets cache header for requests
starting with "?vsn=" in the query string. Defaults to
"public, max-age=31536000"
"""
@behaviour Plug
@allowed_methods ~w(GET HEAD)
import Plug.Conn
alias Plug.Conn
def init(opts) do
at = Keyword.fetch!(opts, :at)
gzip = Keyword.get(opts, :gzip, true)
qs_cache = Keyword.get(opts, :cache_control_for_vsn_requests, "public, max-age=31536000")
et_cache = Keyword.get(opts, :cache_control_for_etags, "public")
{Plug.Router.Utils.split(at), gzip, qs_cache, et_cache}
end
def call(conn = %Conn{method: meth}, {at, gzip, qs_cache, et_cache}) when meth in @allowed_methods do
segments = subset(at, conn.path_info) |> Enum.map(&URI.decode/1)
case segments do
[] -> conn
_ ->
filepath = segments |> Path.join
serve_static(conn, ExStatic.exists?(filepath), filepath, gzip, qs_cache, et_cache)
end
end
def call(conn, _opts) do
conn
end
defp serve_static(conn, true, filepath, gzip, qs_cache, et_cache) do
case put_cache_header(conn, qs_cache, et_cache, filepath) do
{:stale, conn} ->
content_type = ExStatic.content_type!(filepath)
conn
|> put_resp_header("content-type", content_type)
|> put_resp_header("x-static", "true")
|> serve_content(filepath, gzip && gzip?(conn))
|> halt
{:fresh, conn} ->
conn
|> send_resp(304, "")
|> halt
end
end
defp serve_static(conn, false, _filepath, _gzip, _qs_cache, _et_cache) do
conn
end
defp serve_content(conn, filepath, false) do
conn
|> put_resp_header("content-length", Integer.to_string(ExStatic.size!(filepath)))
|> resp(200, ExStatic.contents!(filepath))
end
defp serve_content(conn, filepath, true) do
conn
|> put_resp_header("content-encoding", "gzip")
|> put_resp_header("content-length", Integer.to_string(ExStatic.gzip_size!(filepath)))
|> resp(200, ExStatic.gzip_contents!(filepath))
end
defp put_cache_header(%Conn{query_string: "vsn=" <> _} = conn, qs_cache, _et_cache, _filepath)
when is_binary(qs_cache) do
{:stale, put_resp_header(conn, "cache-control", qs_cache)}
end
defp put_cache_header(conn, _qs_cache, et_cache, filepath) when is_binary(et_cache) do
etag = etag_for_path(filepath)
conn =
conn
|> put_resp_header("cache-control", et_cache)
|> put_resp_header("etag", etag)
if etag in get_req_header(conn, "if-none-match") do
{:fresh, conn}
else
{:stale, conn}
end
end
defp put_cache_header(conn, _, _, _) do
{:stale, conn}
end
defp etag_for_path(filepath) do
size = ExStatic.size!(filepath)
mtime = ExStatic.mtime!(filepath)
{size, mtime} |> :erlang.phash2() |> Integer.to_string(16)
end
defp gzip?(conn) do
gzip_header? = &String.contains?(&1, ["gzip", "*"])
Enum.any? get_req_header(conn, "accept-encoding"), fn accept ->
accept |> Plug.Conn.Utils.list() |> Enum.any?(gzip_header?)
end
end
defp subset([h|expected], [h|actual]),
do: subset(expected, actual)
defp subset([], actual),
do: actual
defp subset(_, _),
do: []
end
|
lib/exstatic/plug.ex
| 0.781872 | 0.503357 |
plug.ex
|
starcoder
|
defmodule HyperEx do
@moduledoc """
The root HyperEx module contains all publically exposed functions of this
package.
"""
alias HyperEx.Abbreviation
alias HyperEx.Renderer
alias HyperEx.Util
@doc """
Render an abbreviation.
## Examples
iex> HyperEx.h("div")
~s{<div></div>}
iex> HyperEx.h("img")
~s{<img>}
iex> HyperEx.h("div#foo.bar")
~s{<div id="foo" class="bar"></div>}
"""
def h(abbreviation) do
{tag, attrs} = Abbreviation.expand(abbreviation)
Renderer.render(tag, attrs, "")
end
@doc """
Render an abbreviation with children (a binary or list) or attributes (a
keyword list).
## Examples
iex> HyperEx.h("div", "Hello world!")
~s{<div>Hello world!</div>}
iex> HyperEx.h("div", ["Hello ", "world!"])
~s{<div>Hello world!</div>}
iex> HyperEx.h("div", [class: "foo"])
~s{<div class="foo"></div>}
"""
def h(abbreviation, attrs_or_children) do
{tag, abbreviation_attrs} = Abbreviation.expand(abbreviation)
case Keyword.keyword?(attrs_or_children) do
true -> Renderer.render(tag, Util.merge_attrs(abbreviation_attrs, attrs_or_children), "")
false -> Renderer.render(tag, abbreviation_attrs, attrs_or_children)
end
end
@doc """
Render an abbreviation with attributes (a keyword list) and children (a
binary or list).
## Examples
iex> HyperEx.h("div.foo", [class: "bar"], "Hello world!")
~s{<div class="foo bar">Hello world!</div>}
"""
def h(abbreviation, attrs, children) do
{tag, abbreviation_attrs} = Abbreviation.expand(abbreviation)
Renderer.render(tag, Util.merge_attrs(abbreviation_attrs, attrs), children)
end
@doc """
Render an opening tag from an abbreviation.
## Examples
iex> HyperEx.open("div")
~s{<div>}
iex> HyperEx.open("div#foo")
~s{<div id="foo">}
"""
def open(abbreviation) do
{tag, abbreviation_attrs} = Abbreviation.expand(abbreviation)
Renderer.open(tag, abbreviation_attrs)
end
@doc """
Render an opening tag from an abbreviation with attributes.
## Examples
iex> HyperEx.open("div#foo", [class: "bar"])
~s{<div id="foo" class="bar">}
"""
def open(abbreviation, attrs) do
{tag, abbreviation_attrs} = Abbreviation.expand(abbreviation)
Renderer.open(tag, Util.merge_attrs(abbreviation_attrs, attrs))
end
@doc """
Render an closing tag from an abbreviation.
## Examples
iex> HyperEx.close("div")
~s{</div>}
iex> HyperEx.close("div#foo")
~s{</div>}
"""
def close(abbreviation) do
{tag, _abbreviation_attrs} = Abbreviation.expand(abbreviation)
Renderer.close(tag)
end
@doc """
Wrap children (a binary or list) in an abbreviation. Behaves like `h/2` but
expects children to be the first argument. Useful for piping.
## Examples
iex> HyperEx.h("div#foo") |> HyperEx.wrap("div#bar")
~s{<div id="bar"><div id="foo"></div></div>}
"""
def wrap(contents, abbreviation), do: h(abbreviation, contents)
@doc """
Wrap children (a binary or list) in an abbreviation with attributes. Behaves
like `h/3` but expects children to be the first argument. Useful for piping.
## Examples
iex> HyperEx.h("div#foo") |> HyperEx.wrap("div#bar", [class: "baz"])
~s{<div id="bar" class="baz"><div id="foo"></div></div>}
"""
def wrap(contents, abbreviation, attrs), do: h(abbreviation, attrs, contents)
end
|
lib/hyper_ex.ex
| 0.870831 | 0.451145 |
hyper_ex.ex
|
starcoder
|
defmodule TradeIndicators.RSI do
use TypedStruct
alias __MODULE__, as: RSI
alias __MODULE__.Item
alias TradeIndicators.MA
alias TradeIndicators.Util, as: U
alias Decimal, as: D
alias Enum, as: E
typedstruct do
field :list, List.t(), default: []
field :period, pos_integer(), default: 14
end
typedstruct module: Item do
field :value, D.t() | nil, default: nil
field :avg_gain, D.t() | nil, default: nil
field :avg_loss, D.t() | nil, default: nil
field :gain, D.t() | nil, default: nil
field :loss, D.t() | nil, default: nil
field :t, non_neg_integer(), default: 0
end
@zero D.new(0)
@one_hundred D.new(100)
def step(chart = %RSI{}, bars) when is_list(bars) do
case length(bars) do
0 -> chart
1 -> update_rsi_list(chart, bars)
_ -> update_rsi_list(chart, E.take(bars, -2))
end
end
def update_rsi_list(rsi_chart = %RSI{list: []}, [%{t: ts}]),
do: %{rsi_chart | list: [new_rsi_struct({nil, nil, nil}, @zero, @zero, ts)]}
def update_rsi_list(
rsi_chart = %RSI{list: rsi_list, period: len},
[%{c: close_old}, %{c: close_new, t: ts}]
)
when is_list(rsi_list) do
delta = D.sub(close_new, close_old)
gain_now = delta |> D.max(0)
loss_now = delta |> D.min(0) |> D.abs()
new_rsi_item =
case length(rsi_list) do
l when l < len -> {nil, nil, nil}
^len -> get_initial_gain_loss(rsi_list, {gain_now, loss_now}, len) |> calc_rsi()
_ -> calc_rs(rsi_list, gain_now, loss_now, len) |> calc_rsi()
end
|> new_rsi_struct(gain_now, loss_now, ts)
%{rsi_chart | list: rsi_list ++ [new_rsi_item]}
end
def new_rsi_struct({rsi, avg_g, avg_l}, gain, loss, ts),
do: %Item{value: rsi, avg_gain: avg_g, avg_loss: avg_l, gain: gain, loss: loss, t: ts}
def calc_rs(rsi_list, gain_now, loss_now, len) do
%Item{avg_gain: gain_last, avg_loss: loss_last} = E.at(rsi_list, -1)
{MA.rma(gain_last, gain_now, len), MA.rma(loss_last, loss_now, len)}
end
def calc_rsi({avg_gain = %D{}, avg_loss = %D{}}) do
cond do
D.eq?(@zero, avg_loss) -> {@one_hundred, avg_gain, avg_loss}
D.eq?(@zero, avg_gain) -> {@zero, avg_gain, avg_loss}
true -> {D.sub(100, D.div(100, D.add(1, D.div(avg_gain, avg_loss)))), avg_gain, avg_loss}
end
end
def get_initial_gain_loss(rsi_list, {gain_now, loss_now}, period)
when is_list(rsi_list) and is_integer(period) and period > 1 do
E.reduce(rsi_list, {0, 0}, fn %{gain: gain, loss: loss}, {total_gain, total_loss} ->
{D.add(total_gain, U.dec(gain)), D.add(total_loss, U.dec(loss))}
end)
|> (fn {g, l} -> {D.div(D.add(g, gain_now), period), D.div(D.add(l, loss_now), period)} end).()
end
end
|
lib/rsi.ex
| 0.64232 | 0.44565 |
rsi.ex
|
starcoder
|
defmodule Logger.Translator do
@moduledoc """
Default translation for Erlang log messages.
Logger allows developers to rewrite log messages provided by
Erlang applications into a format more compatible to Elixir
log messages by providing translator.
A translator is simply a tuple containing a module and a function
that can be added and removed via the `add_translator/1` and
`remove_translator/1` functions and is invoked for every Erlang
message above the minimum log level with four arguments:
* `min_level` - the current Logger level
* `level` - the level of the message being translator
* `kind` - if the message is a report or a format
* `data` - the data to format. If it is a report, it is a tuple
with `{report_type, report_data}`, if it is a format, it is a
tuple with `{format_message, format_args}`
The function must return:
* `{:ok, iodata}` - if the message was translated with its translation
* `:skip` - if the message is not meant to be translated nor logged
* `:none` - if there is no translation, which triggers the next translator
See the function `translate/4` in this module for an example implementation
and the default messages translated by Logger.
"""
def translate(min_level, :error, :format, message) do
case message do
{'** Generic server ' ++ _, [name, last, state, reason]} ->
msg = "GenServer #{inspect name} terminating\n"
if min_level == :debug do
msg = msg <> "Last message: #{inspect last}\n"
<> "State: #{inspect state}\n"
end
{:ok, msg <> "** (exit) " <> Exception.format_exit(reason)}
{'** gen_event handler ' ++ _, [name, manager, last, state, reason]} ->
msg = "GenEvent handler #{inspect name} installed in #{inspect manager} terminating\n"
if min_level == :debug do
msg = msg <> "Last message: #{inspect last}\n"
<> "State: #{inspect state}\n"
end
{:ok, msg <> "** (exit) " <> Exception.format_exit(reason)}
{'** Task ' ++ _, [name, starter, function, args, reason]} ->
msg = "Task #{inspect name} started from #{inspect starter} terminating\n" <>
"Function: #{inspect function}\n" <>
" Args: #{inspect args}\n" <>
"** (exit) " <> Exception.format_exit(reason)
{:ok, msg}
_ ->
:none
end
end
def translate(_min_level, :info, :report,
{:std_info, [application: app, exited: reason, type: _type]}) do
{:ok, "Application #{app} exited with reason #{Exception.format_exit(reason)}"}
end
def translate(_min_level, _level, _kind, _message) do
:none
end
end
|
lib/logger/lib/logger/translator.ex
| 0.761272 | 0.41561 |
translator.ex
|
starcoder
|
defmodule StathamLogger do
@moduledoc ~S"""
Elixir Logger backend with Datadog integration and extensible formatting.
Code is mostly borrowed from built-in Elixir [:console Logger backend](https://github.com/elixir-lang/elixir/blob/master/lib/logger/lib/logger/backends/console.ex)
## Options
* `:level` - the level to be logged by this backend.
Note that messages are filtered by the general
`:level` configuration for the `:logger` application first.
* `:metadata` - the metadata to be printed by `$metadata`.
Defaults to an empty list (no metadata).
Setting `:metadata` to `:all` prints all metadata. See
the "Metadata" section for more information.
* `:sanitize_options` - options, passed as a second argument to
`StathamLogger.Loggable.sanitize/2` implementations.
Built-in implementations use `:filter_keys` and `:max_string_size` options:
* `:filter_keys` - specify confidential keys, to hide corresponding values from metadata.
Defaults to `[]`\
For example, given metadata:
```elixir
[
request: %{user_id: "id"}
]
```
and `filter_keys: {:discard, [:user_id]}`
resulting JSON would be
```json
{
"request": {"user_id" => "[FILTERED]"}
}
```
* `:max_string_size` - maximum length of string values. Defaults to `nil`.\
For example, given `max_string_size: 10` => "Lorem ipsu...".
* `:device` - the device to log error messages to. Defaults to
`:user` but can be changed to something else such as `:standard_error`.
* `:max_buffer` - maximum events to buffer while waiting
for a confirmation from the IO device (default: 32).
Once the buffer is full, the backend will block until
a confirmation is received.
"""
@behaviour :gen_event
defstruct buffer: [],
buffer_size: 0,
device: nil,
level: nil,
max_buffer: nil,
metadata: nil,
sanitize_options: [],
output: nil,
ref: nil
@impl true
def init(__MODULE__) do
config = Application.get_env(:logger, StathamLogger, [])
device = Keyword.get(config, :device, :user)
if Process.whereis(device) do
{:ok, init(config, %__MODULE__{})}
else
{:error, :ignore}
end
end
def init({__MODULE__, opts}) when is_list(opts) do
config = configure_merge(Application.get_env(:logger, StathamLogger, []), opts)
{:ok, init(config, %__MODULE__{})}
end
@impl true
def handle_call({:configure, options}, state) do
{:ok, :ok, configure(options, state)}
end
@impl true
def handle_event({level, _gl, {Logger, msg, ts, md}}, state) do
%{level: log_level, ref: ref, buffer_size: buffer_size, max_buffer: max_buffer} = state
cond do
not meet_level?(level, log_level) ->
{:ok, state}
is_nil(ref) ->
{:ok, log_event(level, msg, ts, md, state)}
buffer_size < max_buffer ->
{:ok, buffer_event(level, msg, ts, md, state)}
buffer_size === max_buffer ->
state = buffer_event(level, msg, ts, md, state)
{:ok, await_io(state)}
end
end
def handle_event(:flush, state) do
{:ok, flush(state)}
end
def handle_event(_, state) do
{:ok, state}
end
@impl true
def handle_info({:io_reply, ref, msg}, %{ref: ref} = state) do
{:ok, handle_io_reply(msg, state)}
end
def handle_info({:DOWN, ref, _, pid, reason}, %{ref: ref}) do
raise "device #{inspect(pid)} exited: " <> Exception.format_exit(reason)
end
def handle_info(_, state) do
{:ok, state}
end
@impl true
def code_change(_old_vsn, state, _extra) do
{:ok, state}
end
@impl true
@spec terminate(any, any) :: :ok
def terminate(_reason, _state) do
:ok
end
## Helpers
defp meet_level?(_lvl, nil), do: true
defp meet_level?(lvl, min) do
Logger.compare_levels(lvl, min) != :lt
end
defp configure(options, state) do
config = configure_merge(Application.get_env(:logger, StathamLogger, []), options)
Application.put_env(:logger, StathamLogger, config)
init(config, state)
end
defp init(config, state) do
level = Keyword.get(config, :level, :debug)
device = Keyword.get(config, :device, :user)
max_buffer = Keyword.get(config, :max_buffer, 32)
sanitize_options = Keyword.get(config, :sanitize_options, [])
metadata =
config
|> Keyword.get(:metadata, [])
|> configure_metadata()
%{
state
| metadata: metadata,
level: level,
device: device,
max_buffer: max_buffer,
sanitize_options: sanitize_options
}
end
defp configure_metadata(:all), do: :all
defp configure_metadata(metadata), do: Enum.reverse(metadata)
defp configure_merge(env, options), do: Keyword.merge(env, options, fn _key, _v1, v2 -> v2 end)
defp log_event(level, msg, ts, md, %{device: device} = state) do
output = format_event(level, msg, ts, md, state)
%{state | ref: async_io(device, output), output: output}
end
defp buffer_event(level, msg, ts, md, state) do
%{buffer: buffer, buffer_size: buffer_size} = state
buffer = [buffer | format_event(level, msg, ts, md, state)]
%{state | buffer: buffer, buffer_size: buffer_size + 1}
end
defp async_io(name, output) when is_atom(name) do
case Process.whereis(name) do
device when is_pid(device) ->
async_io(device, output)
nil ->
raise "no device registered with the name #{inspect(name)}"
end
end
defp async_io(device, output) when is_pid(device) do
ref = Process.monitor(device)
send(device, {:io_request, self(), ref, {:put_chars, :unicode, output}})
ref
end
defp await_io(%{ref: nil} = state), do: state
defp await_io(%{ref: ref} = state) do
receive do
{:io_reply, ^ref, :ok} ->
handle_io_reply(:ok, state)
{:io_reply, ^ref, error} ->
error
|> handle_io_reply(state)
|> await_io()
{:DOWN, ^ref, _, pid, reason} ->
raise "device #{inspect(pid)} exited: " <> Exception.format_exit(reason)
end
end
defp format_event(level, message, timestamp, metadata, state) do
%{metadata: metadata_keys, sanitize_options: sanitize_options} = state
raw_metadata =
metadata
|> Map.new()
sanitized_metadata =
raw_metadata
|> take_metadata(metadata_keys)
|> StathamLogger.Loggable.sanitize(sanitize_options)
event = StathamLogger.DatadogFormatter.format_event(level, message, timestamp, raw_metadata, sanitized_metadata)
[Jason.encode_to_iodata!(event) | "\n"]
end
defp take_metadata(metadata, :all) do
metadata
end
defp take_metadata(metadata, keys) do
Map.take(metadata, keys)
end
defp log_buffer(%{buffer_size: 0, buffer: []} = state), do: state
defp log_buffer(state) do
%{device: device, buffer: buffer} = state
%{state | ref: async_io(device, buffer), buffer: [], buffer_size: 0, output: buffer}
end
defp handle_io_reply(:ok, %{ref: ref} = state) do
Process.demonitor(ref, [:flush])
log_buffer(%{state | ref: nil, output: nil})
end
defp handle_io_reply({:error, {:put_chars, :unicode, _} = error}, state) do
retry_log(error, state)
end
defp handle_io_reply({:error, :put_chars}, %{output: output} = state) do
retry_log({:put_chars, :unicode, output}, state)
end
defp handle_io_reply({:error, error}, _) do
raise "failure while logging console messages: " <> inspect(error)
end
defp retry_log(error, %{device: device, ref: ref, output: dirty} = state) do
Process.demonitor(ref, [:flush])
try do
:unicode.characters_to_binary(dirty)
rescue
ArgumentError ->
clean = ["failure while trying to log malformed data: ", inspect(dirty), ?\n]
%{state | ref: async_io(device, clean), output: clean}
else
{_, good, bad} ->
clean = [good | Logger.Formatter.prune(bad)]
%{state | ref: async_io(device, clean), output: clean}
_ ->
# A well behaved IO device should not error on good data
raise "failure while logging consoles messages: " <> inspect(error)
end
end
defp flush(%{ref: nil} = state), do: state
defp flush(state) do
state
|> await_io()
|> flush()
end
end
|
lib/statham_logger.ex
| 0.836771 | 0.771176 |
statham_logger.ex
|
starcoder
|
defmodule GrowthBook do
@external_resource "README.md"
@moduledoc "README.md"
|> File.read!()
|> String.split("<!-- MDOC !-->")
|> Enum.fetch!(1)
alias GrowthBook.Condition
alias GrowthBook.Context
alias GrowthBook.Feature
alias GrowthBook.Experiment
alias GrowthBook.ExperimentResult
alias GrowthBook.FeatureResult
alias GrowthBook.FeatureRule
alias GrowthBook.Helpers
require Logger
@typedoc """
Bucket range
A tuple that describes a range of the numberline between `0` and `1`.
The tuple has 2 parts, both floats - the start of the range and the end. For example:
```
{0.3, 0.7}
```
"""
@type bucket_range() :: {float(), float()}
@typedoc """
Feature key
A key for a feature. This is a string that references a feature.
"""
@type feature_key() :: String.t()
@typedoc """
Namespace
A tuple that specifies what part of a namespace an experiment includes. If two experiments are
in the same namespace and their ranges don't overlap, they wil be mutually exclusive.
The tuple has 3 parts:
1. The namespace id (`String.t()`)
2. The beginning of the range (`float()`, between `0` and `1`)
3. The end of the range (`float()`, between `0` and `1`)
For example:
```
{"namespace1", 0, 0.5}
```
"""
@type namespace() :: {String.t(), float(), float()}
@doc false
@spec get_feature_result(
term(),
FeatureResult.source(),
Experiment.t() | nil,
ExperimentResult.t() | nil
) :: FeatureResult.t()
def get_feature_result(value, source, experiment \\ nil, experiment_result \\ nil) do
%FeatureResult{
value: value,
on: Helpers.cast_boolish(value),
on?: Helpers.cast_boolish(value),
off: not Helpers.cast_boolish(value),
off?: not Helpers.cast_boolish(value),
source: source,
experiment: experiment,
experiment_result: experiment_result
}
end
@doc false
# NOTE: This is called "getResult" in the JS SDK, but the guide says "getExperimentResult"
@spec get_experiment_result(Context.t(), Experiment.t() | nil, integer(), boolean()) ::
ExperimentResult.t()
def get_experiment_result(
%Context{} = context,
%Experiment{} = experiment,
variation_id \\ 0,
in_experiment? \\ false
) do
hash_attribute = experiment.hash_attribute || "id"
variation_id =
if variation_id < 0 or variation_id > length(experiment.variations),
do: 0,
else: variation_id
%ExperimentResult{
value: Enum.at(experiment.variations, variation_id),
variation_id: variation_id,
in_experiment?: in_experiment?,
hash_attribute: hash_attribute,
hash_value: Map.get(context.attributes, hash_attribute) || ""
}
end
@doc """
Determine feature state for a given context
This function takes a context and a feature key, and returns a `GrowthBook.FeatureResult` struct.
"""
@spec feature(Context.t(), feature_key()) :: FeatureResult.t()
def feature(%Context{features: features} = context, feature_id)
when is_map_key(features, feature_id) do
%{^feature_id => %Feature{} = feature} = features
cond do
# No rules, using default value
feature.rules == [] -> get_feature_result(feature.default_value, :default_value)
true -> find_matching_feature_rule(context, feature, feature_id)
end
end
def feature(%Context{} = context, missing_feature_id) do
Logger.debug(
"No feature with id: #{missing_feature_id}, known features are: #{inspect(Map.keys(context.features))}"
)
get_feature_result(nil, :unknown_feature)
end
@doc false
@spec find_matching_feature_rule(Context.t(), Feature.t(), feature_key()) :: FeatureResult.t()
def find_matching_feature_rule(%Context{} = context, %Feature{} = feature, feature_id) do
Enum.find_value(feature.rules, fn %FeatureRule{} = rule ->
cond do
# Skip this rule if the condition doesn't evaluate to true
rule.condition && not Condition.eval_condition(context.attributes, rule.condition) ->
Logger.debug(
"#{feature_id}: Skipping rule #{rule.key} because condition evaluated to false"
)
false
# Feature being forced with coverage
not is_nil(rule.force) and not is_nil(rule.coverage) ->
hash_value = Map.get(context.attributes, rule.hash_attribute || "id")
# If the hash value is empty, or if the rule is excluded because of coverage, skip
cond do
hash_value in [nil, ""] ->
Logger.debug("#{feature_id}: Skipping rule #{rule.key} because hash value is empty")
false
Helpers.hash(hash_value <> feature_id) > rule.coverage ->
Logger.debug(
"#{feature_id}: Skipping rule #{rule.key} because it's outside coverage"
)
false
true ->
Logger.debug("#{feature_id}: Force value from rule #{rule.key}")
get_feature_result(rule.force, :force)
end
# Feature being forced without coverage
not is_nil(rule.force) ->
Logger.debug("#{feature_id}: Force value from rule #{rule.key}")
get_feature_result(rule.force, :force)
# Skip invalid rule
rule.variations in [[], nil] ->
Logger.debug("#{feature_id}: Skipping rule #{rule.key} because it has no variations")
false
# Run the experiment
true ->
experiment = %Experiment{
key: rule.key || feature_id,
variations: rule.variations,
coverage: rule.coverage,
weights: rule.weights,
hash_attribute: rule.hash_attribute,
namespace: rule.namespace
}
%ExperimentResult{} = experiment_result = run(context, experiment)
if experiment_result.in_experiment? do
get_feature_result(
experiment_result.value,
:experiment,
experiment,
experiment_result
)
else
Logger.debug(
"#{feature_id}: Skipping rule #{rule.key} because it is not in the experiment"
)
false
end
end
end) || get_feature_result(feature.default_value, :default_value)
end
@doc """
Run an experiment for the given context
This function takes a context and an experiment, and returns an `GrowthBook.ExperimentResult` struct.
"""
@spec run(Context.t(), Experiment.t()) :: ExperimentResult.t()
def run(context, experiment)
# 2. When the context is disabled
def run(%Context{enabled?: false} = context, %Experiment{} = experiment) do
Logger.debug("Experiment is disabled")
get_experiment_result(context, experiment)
end
# 1. If experiment has less than 2 variations
def run(%Context{} = context, %Experiment{variations: variations} = experiment)
when length(variations) < 2 do
Logger.debug("Experiment is invalid: has less than 2 variations")
get_experiment_result(context, experiment)
end
def run(%Context{} = context, %Experiment{key: key, variations: variations} = experiment) do
variations_count = length(variations)
# 2.5. Merge in experiment overrides from context
experiment = Experiment.merge_with_overrides(experiment, context.overrides)
query_string_override =
not is_nil(context.url) &&
Helpers.get_query_string_override(key, context.url, variations_count)
hash_value = Map.get(context.attributes, experiment.hash_attribute || "id")
# 9. Get bucket ranges and choose variation
bucket_ranges =
Helpers.get_bucket_ranges(variations_count, experiment.coverage || 1.0, experiment.weights)
hash = if hash_value, do: Helpers.hash(hash_value <> key)
assigned_variation = Helpers.choose_variation(hash, bucket_ranges)
cond do
# 3. If a variation is forced from a query string, return forced variation
query_string_override ->
Logger.debug("#{key}: Forced variation from query string: #{query_string_override}")
get_experiment_result(context, experiment, query_string_override)
# 4. If a variation is forced in the context, return forced variation
is_map_key(context.forced_variations, key) ->
Logger.debug("#{key}: Forced variation from context: #{context.forced_variations[key]}")
get_experiment_result(context, experiment, context.forced_variations[key])
# 5. Exclude if experiment is inactive or in draft
experiment.active? == false or experiment.status == "draft" ->
Logger.debug("#{key}: Experiment is inactive (or in draft)")
get_experiment_result(context, experiment)
# 6. Skip if hash value is empty
hash_value in [nil, ""] ->
Logger.debug("#{key}: Skipping experiment because hash value is empty")
get_experiment_result(context, experiment)
# 7. Exclude if user not in experiment's namespace
experiment.namespace && not Helpers.in_namespace?(hash_value, experiment.namespace) ->
Logger.debug("#{key}: Skipping experiment because user is not in namespace")
get_experiment_result(context, experiment)
# 8. Exclude if condition is set and it doesn't evaluate to true
experiment.condition &&
not Condition.eval_condition(context.attributes, experiment.condition) ->
Logger.debug("#{key}: Skipping experiment because condition evaluated to false")
get_experiment_result(context, experiment)
# NOTE: Legacy URL and Group targetting is omitted in favor of conditions
# 10. Exclude if not in experiment
assigned_variation < 0 ->
Logger.debug("#{key}: Skipping experiment because user is not assigned to variation")
get_experiment_result(context, experiment)
# 11. If experiment has forced variation
experiment.force ->
Logger.debug("#{key}: Forced variation from experiment: #{experiment.force}")
get_experiment_result(context, experiment, experiment.force)
# 12. Exclude if in QA mode
context.qa_mode? ->
Logger.debug("#{key}: Skipping experiment because QA mode is enabled")
get_experiment_result(context, experiment)
# 12.5. Exclude if experiment is stopped
experiment.status == "stopped" ->
get_experiment_result(context, experiment)
# 13. Experiment is active
true ->
Logger.debug("#{key}: Experiment is active")
get_experiment_result(context, experiment, assigned_variation, true)
end
end
end
|
lib/growth_book.ex
| 0.869645 | 0.70912 |
growth_book.ex
|
starcoder
|
defmodule Blurhash.Encoder do
import Blurhash.Utils
alias Blurhash.Base83
use Bitwise
defp validate_pixel_count(pixels, width, height) when byte_size(pixels) == width * height * 3 do
:ok
end
defp validate_pixel_count(_, _, _) do
{:error, :malformed_pixels}
end
defp validate_component_count(x, y) do
cond do
x > 9 or y > 9 -> {:error, :too_many_components}
x < 1 or y < 1 -> {:error, :too_little_components}
true -> :ok
end
end
defp calculate_component(pixels, component_x, component_y, width, height, acc \\ {{0, 0, 0}, 0})
defp calculate_component(
<<r::8, g::8, b::8, rest::binary>>,
component_x,
component_y,
width,
height,
{{acc_r, acc_g, acc_b}, index} = acc
) do
pixel_x = rem(index, width)
pixel_y = floor(index / width)
# DC
normalization_factor =
unless component_x == 0 and component_y == 0,
do: 2,
else: 1
basis =
normalization_factor * :math.cos(:math.pi() * pixel_x * component_x / width) *
:math.cos(:math.pi() * pixel_y * component_y / height)
linear_r = srgb_to_linear(r)
linear_g = srgb_to_linear(g)
linear_b = srgb_to_linear(b)
acc_r = acc_r + basis * linear_r
acc_g = acc_g + basis * linear_g
acc_b = acc_b + basis * linear_b
acc = {{acc_r, acc_g, acc_b}, index + 1}
calculate_component(rest, component_x, component_y, width, height, acc)
end
defp calculate_component(_, _, _, width, height, {{r, g, b}, _}) do
scale = 1 / (width * height)
{r * scale, g * scale, b * scale}
end
defp calculate_components(pixels, x, y, width, height) do
for y <- 0..(y - 1),
x <- 0..(x - 1) do
{{x, y}, calculate_component(pixels, x, y, width, height)}
end
end
defp encode_size_flag(x, y) do
Base83.encode_number(x - 1 + (y - 1) * 9, 1)
end
defp encode_dc({_, {linear_r, linear_g, linear_b}}) do
r = linear_to_srgb(linear_r)
g = linear_to_srgb(linear_g)
b = linear_to_srgb(linear_b)
Base83.encode_number(bsl(r, 16) + bsl(g, 8) + b, 4)
end
defp encode_ac([]) do
{Base83.encode_number(0, 1), ""}
end
defp encode_ac(ac) do
max_ac = Enum.reduce(ac, -2, fn {_, {r, g, b}}, max_ac -> Enum.max([max_ac, r, g, b]) end)
quantized_max_ac = floor(max(0, min(82, floor(max_ac * 166 - 0.5))))
max_ac_for_quantization = (quantized_max_ac + 1) / 166
encoded_max_ac = Base83.encode_number(quantized_max_ac, 1)
encoded_components =
Enum.reduce(ac, "", fn {_, {r, g, b}}, hash ->
r = quantize_color(r, max_ac_for_quantization)
g = quantize_color(g, max_ac_for_quantization)
b = quantize_color(b, max_ac_for_quantization)
hash <> Base83.encode_number(r * 19 * 19 + g * 19 + b, 2)
end)
{encoded_max_ac, encoded_components}
end
defp encode_blurhash([dc | ac], x, y) do
size_flag = encode_size_flag(x, y)
dc = encode_dc(dc)
{max_ac, ac} = encode_ac(ac)
size_flag <> max_ac <> dc <> ac
end
def encode(pixels, width, height, components_x, components_y) do
with :ok <- validate_pixel_count(pixels, width, height),
:ok <- validate_component_count(components_x, components_y) do
# {:ok, [dc | ac], max_ac} =
components = calculate_components(pixels, components_x, components_y, width, height)
{:ok, encode_blurhash(components, components_x, components_y)}
end
end
end
|
lib/blurhash/encoder.ex
| 0.542136 | 0.584212 |
encoder.ex
|
starcoder
|
defmodule TimeZoneInfo.Updater.Impl do
@moduledoc false
# Handles the automatic update and the initial setup.
@behaviour TimeZoneInfo.Updater
alias TimeZoneInfo.{
DataConfig,
DataPersistence,
DataStore,
Downloader,
ExternalTermFormat,
FileArchive,
IanaParser,
Listener,
Transformer,
UtcDateTime
}
require Logger
@type step ::
:run
| :initial
| :force
| :check
| :disabled
| :finally
| :maybe
@seconds_per_day 24 * 60 * 60
@default_lookahead 5
@doc """
Updates the TimeZoneInfo data.
With `opt` `:run` an normal update process is started. The `opt` `:force`
forced an update process.
Returns
- `:ok` if automatic updates are disabled.
- `{:next, seconds}` where `seconds` is the time to wait until the next update.
- `{:error, reason}` in case of an error.
"""
@spec update(opt :: :run | :force) :: :ok | {:next, Calendar.second()} | {:error, term()}
def update(step \\ :run) do
with {:error, _} = error <- step |> step() |> do_update() do
Listener.on_update(error)
error
end
end
defp do_update(:initial) do
Listener.on_update(:initial)
with {:ok, data} <- DataPersistence.fetch(),
{:ok, data_config} <- fetch_data_config(),
{:ok, data} <- DataConfig.update_time_zones(data, data_config[:time_zones]) do
do_update({:initial, data, DataConfig.equal?(data, data_config)})
else
{:error, {:time_zones_not_found, _}} = error ->
force_update(error)
{:error, :enoent} = error ->
force_update(error)
error ->
error
end
end
defp do_update({:initial, data, true}) do
with :ok <- DataStore.put(data) do
do_update(:check)
end
end
defp do_update({:initial, data, false}) do
Listener.on_update(:config_changed)
with :disabled <- force_update(:disabled) do
DataStore.put(data)
end
end
defp do_update(:force), do: force_update(:ok)
defp do_update(:check) do
Listener.on_update(:check)
with {:ok, interval} <- fetch_env(:update), do: do_update(interval)
end
defp do_update(:disabled), do: :ok
defp do_update(:daily) do
with {:ok, last_update} <- DataPersistence.fetch_last_update() do
now = UtcDateTime.now(:unix)
case last_update + @seconds_per_day - now do
next when next > 0 ->
Listener.on_update(:not_required)
{:next, now + next}
_next ->
with :ok <- do_update(:maybe),
:ok <- DataPersistence.put_last_update(now) do
{:next, now + @seconds_per_day}
end
end
end
end
defp do_update(:maybe) do
with {:ok, checksum_persistence} <- DataPersistence.checksum(),
{:ok, data} when not is_atom(data) <- download(),
{:ok, checksum_download} <- ExternalTermFormat.checksum(data) do
case checksum_persistence == checksum_download do
true ->
Listener.on_update(:up_to_date)
:ok
false ->
do_update(:finally, data)
end
else
{:ok, :not_modified} ->
Listener.on_update(:up_to_date)
:ok
error ->
error
end
end
defp do_update(:finally, data) do
Listener.on_update(:update)
with {:ok, time_zones} <- fetch_env(:time_zones),
{:ok, data} <- DataConfig.update_time_zones(data, time_zones),
:ok <- DataStore.put(data),
:ok <- DataPersistence.put(data) do
:ok
end
end
defp force_update(on_disabled) do
Listener.on_update(:force)
now = UtcDateTime.now(:unix)
with {:ok, update} when update != :disabled <- fetch_env(:update),
{:ok, data} when not is_atom(data) <- download(),
:ok <- do_update(:finally, data) do
{:next, now + @seconds_per_day}
else
{:ok, :not_modified} ->
up_to_date(now)
{:ok, :disabled} ->
on_disabled
error ->
error
end
end
defp up_to_date(timestamp) do
with :ok <- DataPersistence.put_last_update(timestamp) do
Listener.on_update(:up_to_date)
{:next, timestamp + @seconds_per_day}
end
end
defp download do
Listener.on_update(:download)
with {:ok, files} <- files(),
{:ok, time_zones} <- fetch_env(:time_zones),
{:ok, lookahead} <- fetch_env(:lookahead) do
opts = [files: files, time_zones: time_zones, lookahead: lookahead]
opts =
case DataPersistence.checksum() do
{:ok, checksum} -> Keyword.put(opts, :checksum, checksum)
_ -> opts
end
case Downloader.download(opts) do
{:ok, :iana, {200, data}} ->
transform(data, opts)
{:ok, mode, {200, data}} when mode in [:etf, :ws] ->
ExternalTermFormat.decode(data)
{:ok, _mode, {304, _body}} ->
{:ok, :not_modified}
{:ok, _mode, response} ->
{:error, response}
error ->
error
end
end
end
defp transform(data, opts) do
with {:ok, version, content} <- extract(data),
{:ok, parsed} <- IanaParser.parse(content) do
{:ok, Transformer.transform(parsed, version, opts)}
end
end
defp extract(data) do
with {:ok, files} <- files(),
{:ok, contents} <- FileArchive.extract(data, files) do
{version, contents} = Map.pop(contents, "version")
{:ok, String.trim(version), join(contents)}
end
end
defp join(files) do
files |> Enum.map(fn {_name, content} -> content end) |> Enum.join("\n")
end
defp step(:run) do
case DataStore.empty?() do
true -> :initial
false -> :check
end
end
defp step(step), do: step
defp files do
with {:ok, files} <- fetch_env(:files) do
{:ok, ["version" | files]}
end
end
defp fetch_data_config do
with {:ok, time_zones} <- fetch_env(:time_zones),
{:ok, lookahead} <- fetch_env(:lookahead),
{:ok, files} <- fetch_env(:files) do
{:ok, [time_zones: time_zones, lookahead: lookahead, files: files]}
end
end
defp fetch_env(:files) do
with :error <- Application.fetch_env(:time_zone_info, :files) do
{:error, {:invalid_config, :files}}
end
end
defp fetch_env(:update) do
case Application.fetch_env(:time_zone_info, :update) do
{:ok, value} when value in [:disabled, :daily] ->
{:ok, value}
{:ok, value} ->
{:error, {:invalid_config, [update: value]}}
:error ->
{:error, {:invalid_config, :update}}
end
end
defp fetch_env(:lookahead) do
case Application.get_env(:time_zone_info, :lookahead, @default_lookahead) do
value when value > 0 ->
{:ok, value}
value ->
{:error, {:invalid_config, [lookahead: value]}}
end
end
defp fetch_env(:time_zones) do
case Application.get_env(:time_zone_info, :time_zones, :all) do
:all ->
{:ok, :all}
[] ->
{:error, {:invalid_config, [time_zones: []]}}
list when is_list(list) ->
case Enum.all?(list, fn item -> is_binary(item) end) do
true ->
{:ok, list}
false ->
{:error, {:invalid_config, [time_zones: list]}}
end
value ->
{:error, {:invalid_config, [time_zones: value]}}
end
end
end
|
lib/time_zone_info/updater/impl.ex
| 0.849862 | 0.5119 |
impl.ex
|
starcoder
|
defmodule ExDash.Injector do
@moduledoc """
Injector sets Dash Anchors and makes style tweaks to passed ExDoc HTML files.
Dash Anchors are used within Dash to build the Table of Contents per page.
Currently supported:
- Types
- Functions
- Macros
- Callbacks
Style tweaks include hiding the sidebar and padding tweaks to match Dash's Hex Doc styles.
"""
alias Floki
alias ExDash.Injector.{Type,Function,Callback,Macro}
@type doc_path :: String.t
@type id :: String.t
@type html_content :: String.t
@dash_anchor_injectors [Type, Function, Callback, Macro]
# Injector callbacks
@callback find_ids(html_content) :: [id]
@callback match_and_anchor(id) :: [{String.t, String.t}]
@doc """
inject_all/1 takes a path to an html file and runs some injections over it.
The updated html is then re-written to the same path.
"""
@spec inject_all(doc_path) :: none
def inject_all(doc_path) do
updated_content =
doc_path
|> File.read!()
|> inject_dash_anchors()
|> inject_style_tweaks()
File.write!(doc_path, updated_content)
end
@spec inject_dash_anchors(html_content) :: html_content
defp inject_dash_anchors(html_content, dash_anchor_injectors \\ @dash_anchor_injectors) do
dash_anchor_injectors
|> Enum.reduce(html_content, fn injector, html_content ->
meta_type_ids =
injector.find_ids(html_content)
match_and_anchors =
meta_type_ids
|> Enum.map(&injector.match_and_anchor/1)
find_and_replace_in_html(html_content, match_and_anchors)
end)
end
@ex_doc_html_match_and_replace [
{
"<section class=\"content\"",
"<section class=\"content\" style=\"padding-left: 0;\""
}, {
"<button class=\"sidebar-toggle\">",
"<button class=\"sidebar-toggle\" style=\"visibility: hidden\">"
}, {
"<section class=\"sidebar\"",
"<section class=\"sidebar\" style=\"visibility: hidden\""
}, {
"<div id=\"content\" class=\"content-inner\">",
"<div id=\"content\" class=\"content-inner\" style=\"margin: 0; margin-top: -28px; padding: 0px 14px;\">"
}
]
@spec inject_style_tweaks(html_content) :: html_content
defp inject_style_tweaks(html_content) do
find_and_replace_in_html(html_content, @ex_doc_html_match_and_replace)
end
@spec find_and_replace_in_html(html_content, [{match :: String.t, replacement :: String.t}]) :: html_content
defp find_and_replace_in_html(html_content, matches_and_replacements) do
matches_and_replacements
|> Enum.reduce(html_content, fn {match, replace}, html_content ->
String.replace(html_content, match, replace)
end)
end
@doc """
Returns a list of ids for the given selector and parse function.
"""
@spec find_ids_in_list(html_content, String.t, ((html_content) -> String.t)) :: [id]
def find_ids_in_list(html_content, list_selector, id_parser) do
html_content
|> Floki.find(list_selector)
|> case do
[] -> []
types ->
types |> Enum.map(&id_parser.(&1))
end
|> Enum.flat_map(&(&1))
end
end
|
lib/injector.ex
| 0.68215 | 0.473536 |
injector.ex
|
starcoder
|
defmodule Exsm.Transition do
@moduledoc """
Exsm module responsible for control transitions,
guard functions and callbacks (before and after).
This is meant to be for internal use only.
"""
@doc """
Function responsible for checking if the transition from a state to another
was specifically declared.
This is meant to be for internal use only.
"""
@spec declared_transition?(list, atom, atom) :: boolean
def declared_transition?(transitions, prev_state, next_state) do
case Map.fetch(transitions, prev_state) do
{:ok, declared_states} when is_list(declared_states) ->
Enum.member?(declared_states, next_state)
:error ->
false
end
end
@doc """
Function responsible to run all before_transitions callbacks or
fallback to a boilerplate behaviour.
This is meant to be for internal use only.
"""
@spec before_callbacks(struct | map, atom, atom, module) :: {:ok, struct} | {:error, String.t()}
def before_callbacks(struct, prev_state, next_state, module) do
case run_or_fallback(
&module.before_transition/3,
&before_fallback/4,
struct,
prev_state,
next_state,
module._field()
) do
{:ok, struct} -> {:ok, struct}
{:error, reason} -> {:error, reason}
end
end
@doc """
Function responsible to run all after_transitions callbacks or
fallback to a boilerplate behaviour.
This is meant to be for internal use only.
"""
@spec after_callbacks(struct | map, atom, atom, module) :: struct
def after_callbacks(struct, prev_state, next_state, module) do
run_or_fallback(
&module.after_transition/3,
&after_fallback/4,
struct,
prev_state,
next_state,
module._field()
)
end
@doc """
This function will try to trigger persistence, if declared, to the struct
changing state.
This is meant to be for internal use only.
"""
@spec persist_struct(struct | map, atom, atom, module) :: struct
def persist_struct(struct, prev_state, next_state, module) do
run_or_fallback(
&module.persist/3,
&persist_fallback/4,
struct,
prev_state,
next_state,
module._field()
)
end
@doc """
Function resposible for triggering transitions persistence.
This is meant to be for internal use only.
"""
@spec log_transition(struct | map, atom, atom, module) :: struct
def log_transition(struct, prev_state, next_state, module) do
run_or_fallback(
&module.log_transition/3,
&log_transition_fallback/4,
struct,
prev_state,
next_state,
module._field()
)
end
# Private function that receives a function, a callback,
# a struct and the related state. It tries to execute the function,
# rescue for a couple of specific Exceptions and passes it forward
# to the callback, that will re-raise it if not related to
# guard_transition nor before | after call backs
defp run_or_fallback(func, callback, struct, prev_state, next_state, field) do
func.(struct, prev_state, next_state)
rescue
error in UndefinedFunctionError -> callback.(struct, next_state, error, field)
error in FunctionClauseError -> callback.(struct, next_state, error, field)
end
defp before_fallback(struct, _state, error, _field) do
if error.function == :before_transition && error.arity == 3 do
{:ok, struct}
else
raise error
end
end
defp persist_fallback(struct, state, error, field) do
if error.function == :persist && error.arity == 3 do
Map.put(struct, field, state)
else
raise error
end
end
defp log_transition_fallback(struct, _state, error, _field) do
if error.function == :log_transition && error.arity == 3 do
struct
else
raise error
end
end
defp after_fallback(struct, _state, error, _field) do
if error.function == :after_transition && error.arity == 3 do
struct
else
raise error
end
end
end
|
lib/exsm/transition.ex
| 0.777596 | 0.421314 |
transition.ex
|
starcoder
|
defmodule ParkingTweets.Garage do
@moduledoc """
Struct to represent information about a parking garage
"""
defstruct [:id, :name, :updated_at, :status, capacity: -1, utilization: 0, alternates: []]
def id(%__MODULE__{id: id}), do: id
def status?(%__MODULE__{status: status}) do
is_binary(status)
end
def free_spaces(%__MODULE__{} = garage) do
garage.capacity - garage.utilization
end
def utilization_percent(%__MODULE__{} = garage) do
div(garage.utilization * 100, garage.capacity)
end
def utilization_percent_or_status(%__MODULE__{status: nil} = garage) do
utilization_percent(garage)
end
def utilization_percent_or_status(%__MODULE__{status: status}) do
status
end
@doc "Create a new garage"
def new(opts) do
__MODULE__
|> struct!(opts)
|> override_capacity()
|> set_updated_at()
end
for {garage_id, new_capacity} <- Application.get_env(:parking_tweets, :capacity_overrides) do
defp override_capacity(%{id: unquote(garage_id)} = garage) do
garage = %{garage | capacity: unquote(new_capacity)}
if garage.utilization >= garage.capacity do
%{garage | status: "FULL"}
else
garage
end
end
end
defp override_capacity(garage) do
garage
end
defp set_updated_at(%{updated_at: %DateTime{}} = garage) do
garage
end
defp set_updated_at(garage) do
%{garage | updated_at: DateTime.utc_now()}
end
@doc "Convert a JSON-API map to a Garage"
def from_json_api(%{"id" => id, "attributes" => attributes}) do
{:ok, updated_at, _} = DateTime.from_iso8601(Map.fetch!(attributes, "updated_at"))
properties =
attributes
|> Map.fetch!("properties")
|> Enum.reduce(
%{},
fn %{"name" => name, "value" => value}, properties ->
Map.put(properties, name, value)
end
)
new(
id: id,
updated_at: updated_at,
capacity: Map.get(properties, "capacity", -1),
utilization: Map.get(properties, "utilization", 0),
status: Map.get(properties, "status", nil)
)
end
@doc """
Has the garage not been updated recently?
Uses the :stale_garage_timeout configuration to determine how many seconds a garage is allowed to not be updated.
iex> zero = DateTime.from_naive!(~N[1970-01-01T00:00:00], "Etc/UTC")
iex> half_hour = DateTime.from_naive!(~N[1970-01-01T00:30:00], "Etc/UTC")
iex> two_hour = DateTime.from_naive!(~N[1970-01-01T02:00:00], "Etc/UTC")
iex> garage = new(updated_at: zero)
iex> stale?(garage, half_hour)
false
iex> stale?(garage, two_hour)
true
"""
def stale?(%__MODULE__{updated_at: updated_at}, %DateTime{} = current_time) do
difference = DateTime.to_unix(current_time) - DateTime.to_unix(updated_at)
difference > Application.get_env(:parking_tweets, :stale_garage_timeout)
end
@doc "Updates the name of a garage"
def put_name(%__MODULE__{} = garage, name) when is_binary(name) do
%{garage | name: name}
end
@doc "Updates the alternate garages"
def put_alternates(%__MODULE__{} = garage, [%__MODULE__{} | _] = alternates) do
%{garage | alternates: alternates}
end
end
|
lib/parking_tweets/garage.ex
| 0.793746 | 0.478894 |
garage.ex
|
starcoder
|
defmodule Arrow do
@moduledoc """
Elixir bindings for [Apache Arrow](https://arrow.apache.org/).
This is currently an experimental library to bring Arrow and its ecosystem
to Elixir. It's very early work in progress.
It uses the Rust implementation via [rustler](https://github.com/rusterlium/rustler/).
NOTE:
It is not an offical library of the Apache Arrow project!
In addition to Arrow, the library comes with support for
reading and writing [Parquet](https://parquet.apache.org/) files.
"""
alias Arrow.Conversion
@doc """
Creates an Arrow array.
For example to create an integer array:
iex> arr = Arrow.array([1, 5, 3, nil, 6])
#Arrow.Int64Array
[1, 5, 3, nil, 6]
Here, the type was inferred for us, but it is also possible to
provide one explicitly:
iex> arr = Arrow.array([1, 5, 3, nil, 6], type: {:f, 32})
#Arrow.Float32Array
[1.0, 5.0, 3.0, nil, 6.0]
"""
@doc type: :creation
def array(arg, opts \\ [])
def array(arg, opts) do
type = Arrow.Type.normalize!(opts[:type] || Arrow.Type.infer(arg))
arr =
case type do
{:f, _} -> Enum.map(arg, &Conversion.to_float/1)
{:u, 1} -> Enum.map(arg, &Conversion.to_bool/1)
{:u, _} -> Enum.map(arg, &Conversion.to_int/1)
{:s, _} -> Enum.map(arg, &Conversion.to_int/1)
{:timestamp_us, _} -> Enum.map(arg, &Conversion.to_timestamp(&1, :us))
{:date, 32} -> Enum.map(arg, &Conversion.to_days/1)
_ -> arg
end
Arrow.Native.make_array(arr, type)
end
def record_batch(schema, columns) do
columns =
for {field, column} <- List.zip([schema.fields, columns]),
do: prepare_column(field, column)
%Arrow.RecordBatch{reference: Arrow.Native.make_record_batch(schema, columns)}
end
defp prepare_column(%{data_type: dtype}, column) do
case dtype do
{:f, _} -> Enum.map(column, &Conversion.to_float/1)
{:timestamp_us, 64} -> Enum.map(column, &Conversion.to_timestamp(&1, :us))
{:date, 32} -> Enum.map(column, &Conversion.to_days/1)
_ -> column
end
end
end
|
lib/arrow.ex
| 0.687105 | 0.666137 |
arrow.ex
|
starcoder
|
defmodule Tensorflow.ValuesDef.ExternalValuesEntry do
@moduledoc false
use Protobuf, map: true, syntax: :proto3
@type t :: %__MODULE__{
key: String.t(),
value: String.t()
}
defstruct [:key, :value]
field(:key, 1, type: :string)
field(:value, 2, type: :string)
end
defmodule Tensorflow.ValuesDef do
@moduledoc false
use Protobuf, syntax: :proto3
@type t :: %__MODULE__{
values: [String.t()],
external_values: %{String.t() => String.t()}
}
defstruct [:values, :external_values]
field(:values, 1, repeated: true, type: :string)
field(:external_values, 2,
repeated: true,
type: Tensorflow.ValuesDef.ExternalValuesEntry,
map: true
)
end
defmodule Tensorflow.ControlFlowContextDef do
@moduledoc false
use Protobuf, syntax: :proto3
@type t :: %__MODULE__{
ctxt: {atom, any}
}
defstruct [:ctxt]
oneof(:ctxt, 0)
field(:cond_ctxt, 1, type: Tensorflow.CondContextDef, oneof: 0)
field(:while_ctxt, 2, type: Tensorflow.WhileContextDef, oneof: 0)
end
defmodule Tensorflow.CondContextDef do
@moduledoc false
use Protobuf, syntax: :proto3
@type t :: %__MODULE__{
context_name: String.t(),
pred_name: String.t(),
pivot_name: String.t(),
branch: integer,
values_def: Tensorflow.ValuesDef.t() | nil,
nested_contexts: [Tensorflow.ControlFlowContextDef.t()]
}
defstruct [
:context_name,
:pred_name,
:pivot_name,
:branch,
:values_def,
:nested_contexts
]
field(:context_name, 1, type: :string)
field(:pred_name, 2, type: :string)
field(:pivot_name, 3, type: :string)
field(:branch, 4, type: :int32)
field(:values_def, 5, type: Tensorflow.ValuesDef)
field(:nested_contexts, 6,
repeated: true,
type: Tensorflow.ControlFlowContextDef
)
end
defmodule Tensorflow.WhileContextDef do
@moduledoc false
use Protobuf, syntax: :proto3
@type t :: %__MODULE__{
context_name: String.t(),
parallel_iterations: integer,
back_prop: boolean,
swap_memory: boolean,
pivot_name: String.t(),
pivot_for_pred_name: String.t(),
pivot_for_body_name: String.t(),
loop_exit_names: [String.t()],
loop_enter_names: [String.t()],
values_def: Tensorflow.ValuesDef.t() | nil,
maximum_iterations_name: String.t(),
nested_contexts: [Tensorflow.ControlFlowContextDef.t()]
}
defstruct [
:context_name,
:parallel_iterations,
:back_prop,
:swap_memory,
:pivot_name,
:pivot_for_pred_name,
:pivot_for_body_name,
:loop_exit_names,
:loop_enter_names,
:values_def,
:maximum_iterations_name,
:nested_contexts
]
field(:context_name, 1, type: :string)
field(:parallel_iterations, 2, type: :int32)
field(:back_prop, 3, type: :bool)
field(:swap_memory, 4, type: :bool)
field(:pivot_name, 5, type: :string)
field(:pivot_for_pred_name, 6, type: :string)
field(:pivot_for_body_name, 7, type: :string)
field(:loop_exit_names, 8, repeated: true, type: :string)
field(:loop_enter_names, 10, repeated: true, type: :string)
field(:values_def, 9, type: Tensorflow.ValuesDef)
field(:maximum_iterations_name, 11, type: :string)
field(:nested_contexts, 12,
repeated: true,
type: Tensorflow.ControlFlowContextDef
)
end
|
lib/tensorflow/core/protobuf/control_flow.pb.ex
| 0.771672 | 0.59752 |
control_flow.pb.ex
|
starcoder
|
defmodule Textmatrix.Buffer do
@moduledoc """
`Textmatrix.Buffer{}` encapsules the characters present in the matrix split up in lines an characters.
"""
# the default filling character is a space
@default_empty_char 32
alias Textmatrix.{
Buffer,
Line
}
@type t :: %Buffer{
lines: list(Line.t())
}
defstruct lines: []
@doc """
new/0 returns a new empty buffer
"""
@spec new() :: Buffer.t()
def new do
%Buffer{}
end
@doc """
ensure_capacity/2 resizes the buffer to the minimum requirted capacity. If the
buffer is already of or above the required size, it will return the buffer as is.
"""
@spec ensure_capacity(Buffer.t(), integer()) :: Buffer.t()
def ensure_capacity(buffer, max_index) when is_number(max_index) do
if length(buffer.lines) <= max_index do
missing_lines = max_index - length(buffer.lines)
missing_range = Enum.map(0..missing_lines, fn _ -> Line.new() end)
%Buffer{lines: buffer.lines ++ missing_range}
else
buffer
end
end
@doc """
write/4 takes the buffer, x and y location and a string to write at that location.
It returns an updated buffer.
"""
@spec write(Buffer.t(), non_neg_integer(), non_neg_integer(), binary()) :: Buffer.t()
def write(%Buffer{} = buffer, x, y, string)
when is_integer(x) and is_integer(y) and is_binary(string) and x >= 0 and y >= 0 do
buffer = Buffer.ensure_capacity(buffer, y)
lines =
List.update_at(buffer.lines, y, fn line ->
Line.write_string(line, x, string)
end)
%Buffer{buffer | lines: lines}
end
@doc """
write_vertical/4 takes the buffer, x and y location and a string to write at the given y location
going downwards. It returns the updated buffer.
"""
@spec write_vertical(Buffer.t(), non_neg_integer(), non_neg_integer(), binary()) :: Buffer.t()
def write_vertical(%Buffer{} = buffer, x, y, string)
when is_integer(x) and is_integer(y) and is_binary(string) and x >= 0 and y >= 0 do
buffer = Buffer.ensure_capacity(buffer, y + String.length(string))
lines =
string
|> String.graphemes()
|> Enum.with_index(y)
|> Enum.reduce(buffer.lines, fn {char, index}, acc ->
List.update_at(acc, index, &Line.write_string(&1, x, char))
end)
%Buffer{buffer | lines: lines}
end
@doc """
to_string/1 takes the buffer and converts it into a multiline string.
"""
@spec to_string(Buffer.t()) :: binary()
def to_string(buffer, default_char \\ @default_empty_char) do
buffer.lines
|> Enum.map(&Line.to_string(&1, default_char))
|> Enum.join("\n")
end
defimpl String.Chars, for: __MODULE__ do
def to_string(buffer),
do: Textmatrix.Buffer.to_string(buffer)
end
end
|
lib/textmatrix/buffer.ex
| 0.792865 | 0.529507 |
buffer.ex
|
starcoder
|
defmodule DryValidation.Types.Integer do
@moduledoc """
Represents an integer type.
Will try to cast strings into integer values.
```
DryValidation.schema do
required :age, Types.Integer
end
```
"""
alias DryValidation.Types.Func
@doc false
def cast(value) when is_number(value), do: value
def cast(value) when is_binary(value) do
case Integer.parse(value) do
{number, ""} -> number
_ -> value
end
end
def cast(value), do: value
@doc false
def valid?(value) when is_number(value), do: true
def valid?(_value), do: false
@doc """
Validates that the input value is greater than the value of the first function argument.
```
DryValidation.schema do
required :age, Types.Integer.greater_than(18)
end
```
"""
def greater_than(value) do
%Func{
fn: fn v -> v > value end,
type: __MODULE__,
error_message: "is not greater than #{value}"
}
end
@doc """
Validates that the input value is greater than or equal to the value of the first function argument.
```
DryValidation.schema do
required :age, Types.Integer.greater_than_or_equal(18)
end
```
"""
def greater_than_or_equal(value) do
%Func{
fn: fn v -> v >= value end,
type: __MODULE__,
error_message: "is not greater than or equal to #{value}"
}
end
@doc """
Validates that the input value is less than the value of the first function argument.
```
DryValidation.schema do
required :age, Types.Integer.less_than(100)
end
```
"""
def less_than(value) do
%Func{
fn: fn v -> v < value end,
type: __MODULE__,
error_message: "is not less than #{value}"
}
end
@doc """
Validates that the input value is less than or equal to the value of the first function argument.
```
DryValidation.schema do
required :age, Types.Integer.less_than_or_equal(100)
end
```
"""
def less_than_or_equal(value) do
%Func{
fn: fn v -> v <= value end,
type: __MODULE__,
error_message: "is not less than or equal to #{value}"
}
end
end
|
lib/types/integer.ex
| 0.885372 | 0.892093 |
integer.ex
|
starcoder
|
defmodule TeslaMate.Vehicles.Vehicle.Summary do
import TeslaMate.Convert, only: [miles_to_km: 2, mph_to_kmh: 1]
alias TeslaApi.Vehicle.State.{Drive, Charge, VehicleState}
alias TeslaApi.Vehicle
alias TeslaMate.Log.Car
defstruct ~w(
car display_name state since healthy latitude longitude heading battery_level usable_battery_level
ideal_battery_range_km est_battery_range_km rated_battery_range_km charge_energy_added
speed outside_temp inside_temp is_climate_on is_preconditioning locked sentry_mode
plugged_in scheduled_charging_start_time charge_limit_soc charger_power windows_open doors_open
odometer shift_state charge_port_door_open time_to_full_charge charger_phases
charger_actual_current charger_voltage version update_available update_version is_user_present geofence
model trim_badging exterior_color wheel_type spoiler_type trunk_open frunk_open elevation
)a
def into(nil, %{state: :start, healthy?: healthy?, car: car}) do
%__MODULE__{
state: :unavailable,
healthy: healthy?,
trim_badging: get_car_attr(car, :trim_badging),
exterior_color: get_car_attr(car, :exterior_color),
spoiler_type: get_car_attr(car, :spoiler_type),
wheel_type: get_car_attr(car, :wheel_type),
model: get_car_attr(car, :model),
car: car
}
end
def into(vehicle, attrs) do
%{
state: state,
since: since,
healthy?: healthy?,
car: car,
elevation: elevation,
geofence: gf
} = attrs
%__MODULE__{
format_vehicle(vehicle)
| state: format_state(state),
since: since,
healthy: healthy?,
elevation: elevation,
geofence: gf,
trim_badging: get_car_attr(car, :trim_badging),
exterior_color: get_car_attr(car, :exterior_color),
spoiler_type: get_car_attr(car, :spoiler_type),
wheel_type: get_car_attr(car, :wheel_type),
model: get_car_attr(car, :model),
car: car
}
end
defp format_state({:driving, {:offline, _}, _id}), do: :offline
defp format_state({:driving, _state, _id}), do: :driving
defp format_state({state, _, _}) when is_atom(state), do: state
defp format_state({state, _}) when is_atom(state), do: state
defp format_state(state) when is_atom(state), do: state
defp get_car_attr(%Car{exterior_color: v}, :exterior_color), do: v
defp get_car_attr(%Car{spoiler_type: v}, :spoiler_type), do: v
defp get_car_attr(%Car{trim_badging: v}, :trim_badging), do: v
defp get_car_attr(%Car{wheel_type: v}, :wheel_type), do: v
defp get_car_attr(%Car{model: v}, :model), do: v
defp get_car_attr(nil, _key), do: nil
defp format_vehicle(%Vehicle{} = vehicle) do
%__MODULE__{
# General
display_name: vehicle.display_name,
# Drive State
latitude: get_in_struct(vehicle, [:drive_state, :latitude]),
longitude: get_in_struct(vehicle, [:drive_state, :longitude]),
speed: speed(vehicle),
shift_state: get_in_struct(vehicle, [:drive_state, :shift_state]),
heading: get_in_struct(vehicle, [:drive_state, :heading]),
# Charge State
plugged_in: plugged_in(vehicle),
battery_level: charge(vehicle, :battery_level),
usable_battery_level: charge(vehicle, :usable_battery_level),
charge_energy_added: charge(vehicle, :charge_energy_added),
charge_limit_soc: charge(vehicle, :charge_limit_soc),
charge_port_door_open: charge(vehicle, :charge_port_door_open),
charger_actual_current: charge(vehicle, :charger_actual_current),
charger_phases: charge(vehicle, :charger_phases),
charger_power: charge(vehicle, :charger_power),
charger_voltage: charge(vehicle, :charger_voltage),
est_battery_range_km: charge(vehicle, :est_battery_range) |> miles_to_km(2),
ideal_battery_range_km: charge(vehicle, :ideal_battery_range) |> miles_to_km(2),
rated_battery_range_km: charge(vehicle, :battery_range) |> miles_to_km(2),
time_to_full_charge: charge(vehicle, :time_to_full_charge),
scheduled_charging_start_time:
charge(vehicle, :scheduled_charging_start_time) |> to_datetime(),
# Climate State
is_climate_on: get_in_struct(vehicle, [:climate_state, :is_climate_on]),
is_preconditioning: get_in_struct(vehicle, [:climate_state, :is_preconditioning]),
outside_temp: get_in_struct(vehicle, [:climate_state, :outside_temp]),
inside_temp: get_in_struct(vehicle, [:climate_state, :inside_temp]),
# Vehicle State
odometer: get_in_struct(vehicle, [:vehicle_state, :odometer]) |> miles_to_km(2),
locked: get_in_struct(vehicle, [:vehicle_state, :locked]),
sentry_mode: get_in_struct(vehicle, [:vehicle_state, :sentry_mode]),
windows_open: window_open(vehicle),
doors_open: doors_open(vehicle),
trunk_open: trunk_open(vehicle),
frunk_open: frunk_open(vehicle),
is_user_present: get_in_struct(vehicle, [:vehicle_state, :is_user_present]),
version: version(vehicle),
update_available: update_available(vehicle),
update_version: update_version(vehicle)
}
end
defp charge(vehicle, key), do: get_in_struct(vehicle, [:charge_state, key])
defp speed(%Vehicle{drive_state: %Drive{speed: s}}) when not is_nil(s), do: mph_to_kmh(s)
defp speed(_vehicle), do: nil
defp plugged_in(%Vehicle{charge_state: nil}), do: nil
defp plugged_in(%Vehicle{vehicle_state: nil}), do: nil
defp plugged_in(%Vehicle{charge_state: %Charge{charge_port_door_open: :unknown}}), do: :unknown
defp plugged_in(%Vehicle{charge_state: %Charge{charge_port_cold_weather_mode: false} = c}) do
c.charge_port_latch == "Engaged" and c.charge_port_door_open
end
defp plugged_in(%Vehicle{charge_state: %Charge{charge_port_cold_weather_mode: true} = c}) do
c.charging_state != "Disconnected"
end
defp plugged_in(_vehicle), do: nil
defp window_open(%Vehicle{vehicle_state: vehicle_state}) do
case vehicle_state do
%VehicleState{fd_window: fd, fp_window: fp, rd_window: rd, rp_window: rp}
when is_number(fd) and is_number(fp) and is_number(rd) and is_number(rp) ->
fd > 0 or fp > 0 or rd > 0 or rp > 0
_ ->
nil
end
end
defp doors_open(%Vehicle{vehicle_state: vehicle_state}) do
case vehicle_state do
%VehicleState{df: df, pf: pf, dr: dr, pr: pr}
when is_number(df) and is_number(pf) and is_number(dr) and is_number(pr) ->
df > 0 or pf > 0 or dr > 0 or pr > 0
_ ->
nil
end
end
defp trunk_open(%Vehicle{vehicle_state: %VehicleState{rt: rt}}) when is_number(rt), do: rt > 0
defp trunk_open(_vehicle), do: nil
defp frunk_open(%Vehicle{vehicle_state: %VehicleState{ft: ft}}) when is_number(ft), do: ft > 0
defp frunk_open(_vehicle), do: nil
defp version(vehicle) do
with %Vehicle{vehicle_state: %VehicleState{car_version: v}} when is_binary(v) <- vehicle,
[version | _] <- String.split(v, " ") do
version
else
_ -> nil
end
end
defp update_available(vehicle) do
case get_in_struct(vehicle, [:vehicle_state, :software_update, :status]) do
status when status in ["available", "downloading", "downloading_wifi_wait"] -> true
status when is_binary(status) -> false
nil -> nil
end
end
defp update_version(vehicle) do
case get_in_struct(vehicle, [:vehicle_state, :software_update, :version]) do
version when is_binary(version) -> List.first(String.split(version, " "))
nil -> nil
end
end
defp to_datetime(val) when val in [nil, :unknown], do: val
defp to_datetime(ts), do: DateTime.from_unix!(ts)
defp get_in_struct(struct, keys) do
Enum.reduce(keys, struct, fn key, acc -> if acc, do: Map.get(acc, key) end)
end
end
|
lib/teslamate/vehicles/vehicle/summary.ex
| 0.730001 | 0.463869 |
summary.ex
|
starcoder
|
defmodule Multiverses.Supervisor do
@moduledoc """
This module is intended to be a drop-in replacement for `Supervisor`.
It launches the supervisor and the supervisor *unconditionally*
inherits the `:"$caller"` of whoever launched it.
## Usage
This module should only be used when you are creating a custom
[module-based Supervisor](https://hexdocs.pm/elixir/master/Supervisor.html#module-module-based-supervisors).
### Example:
```elixir
defmodule MyApp.CustomSupervisor do
use Multiverses, with: Supervisor
use Supervisor
def start_link(arg, opts) do
Supervisor.start_link(__MODULE__, arg, opts)
end
@impl true
def init(_arg) do
children = [
... supervised children
]
Supervisor.init(children, strategy: :one_for_one)
end
end
```
"""
use Multiverses.Clone,
module: Supervisor,
except: [start_link: 2, start_link: 3]
defmacro __using__(opts) do
quote do
@behaviour Supervisor
# implements child_spec in the same way that GenServer does.
def child_spec(init_arg) do
default = %{
id: __MODULE__,
start: {__MODULE__, :start_link, [init_arg]},
type: :supervisor
}
Supervisor.child_spec(default, unquote(Macro.escape(opts)))
end
defoverridable child_spec: 1
end
end
@doc "See `Supervisor.start_link/2`."
def start_link(module, init_state, opts \\ []) do
__MODULE__.do_start(:link, module, init_state, opts)
end
# inject a startup function equivalent to GenServer's do_start.
# note that, here we're going to inject a custom "init_it" function
# into this selfsame module (instead of using the :gen_server) init_it
# which will catch the callers parameters that we're sending over.
require Multiverses
@doc false
def do_start(link, module, arg, options) do
portal = [callers: Multiverses.link()]
case Keyword.pop(options, :name) do
{nil, opts} ->
init_arg = {self(), module, arg}
:gen.start(__MODULE__, link, :supervisor, init_arg, opts ++ portal)
{atom, _opts} when is_atom(atom) ->
raise ArgumentError, "atom name not supported with multiverses"
{{:global, _term}, _opts} ->
raise ArgumentError, "global not supported with multiverses"
{{:via, via_module, _term}, _opts} when is_atom(via_module) ->
raise ArgumentError, "via not supported with multiverses"
{other, _} ->
# trick dialyzer into not complaining about non-local returns.
case :erlang.phash2(1, 1) do
0 ->
raise ArgumentError, """
expected :name option to be one of the following:
* nil
* atom
* {:global, term}
* {:via, module, term}
Got: #{inspect(other)}
"""
1 ->
:ignore
end
end
end
# inject the init_it function that trampolines to gen_server init_it.
# since this callback is called inside of the spawned gen_server
# process, we can paint this process with the call chain that
# lets us identify the correct, sharded universe.
@doc false
def init_it(starter, self_param, name, module, args, options!) do
Multiverses.port(options![:callers])
options! = Keyword.delete(options!, :callers)
# dirty little secret: Supervisors are just GenServers under the hood.
:gen_server.init_it(starter, self_param, name, module, args, options!)
end
end
|
lib/multiverses/supervisor.ex
| 0.825765 | 0.794026 |
supervisor.ex
|
starcoder
|
defmodule Stripe.Charges do
@moduledoc """
Functions for working with charges at Stripe. Through this API you can:
* create a charge,
* update a charge,
* get a charge,
* list charges,
* count charges,
* refund a charge,
* partially refund a charge.
Stripe API reference: https://stripe.com/docs/api#charges
"""
@endpoint "charges"
@doc """
Create a charge.
Creates a charge for a customer or card using amount and params. `params`
must include a source.
Returns `{:ok, charge}` tuple.
## Examples
params = [
source: [
object: "card",
number: "4111111111111111",
exp_month: 10,
exp_year: 2020,
country: "US",
name: "<NAME>",
cvc: 123
],
description: "1000 Widgets"
]
{:ok, charge} = Stripe.Charges.create(1000, params)
"""
def create(amount, params) do
create amount, params, Stripe.config_or_env_key
end
@doc """
Create a charge. Accepts Stripe API key.
Creates a charge for a customer or card using amount and params. `params`
must include a source.
Returns `{:ok, charge}` tuple.
## Examples
{:ok, charge} = Stripe.Charges.create(1000, params, key)
"""
def create(amount, params, key) do
#default currency
params = Keyword.put_new params, :currency, "USD"
#drop in the amount
params = Keyword.put_new params, :amount, amount
Stripe.make_request_with_key(:post, @endpoint, key, params)
|> Stripe.Util.handle_stripe_response
end
@doc """
Get a list of charges.
Gets a list of charges.
Accepts the following parameters:
* `limit` - a limit of items to be returned (optional; defaults to 10).
Returns a `{:ok, charges}` tuple, where `charges` is a list of charges.
## Examples
{:ok, charges} = Stripe.charges.list() # Get a list of 10 charges
{:ok, charges} = Stripe.charges.list(20) # Get a list of 20 charges
"""
def list(params \\ [])
def list(limit) when is_integer(limit) do
list Stripe.config_or_env_key, limit
end
@doc """
Get a list of charges.
Gets a list of charges.
Accepts the following parameters:
* `params` - a list of params supported by Stripe (optional; defaults to []). Available parameters are:
`customer`, `ending_before`, `limit` and `source`.
Returns a `{:ok, charges}` tuple, where `charges` is a list of charges.
## Examples
{:ok, charges} = Stripe.Charges.list(source: "card") # Get a list of charges for cards
"""
def list(params) do
list(Stripe.config_or_env_key, params)
end
@doc """
Get a list of charges. Accepts Stripe API key.
Gets a list of charges.
Accepts the following parameters:
* `limit` - a limit of items to be returned (optional; defaults to 10).
Returns a `{:ok, charges}` tuple, where `charges` is a list of charges.
## Examples
{:ok, charges} = Stripe.charges.list("my_key") # Get a list of up to 10 charges
{:ok, charges} = Stripe.charges.list("my_key", 20) # Get a list of up to 20 charges
"""
def list(key, limit) when is_integer(limit) do
Stripe.make_request_with_key(:get, "#{@endpoint}?limit=#{limit}", key)
|> Stripe.Util.handle_stripe_response
end
@doc """
Get a list of charges. Accepts Stripe API key.
Gets a list of charges.
Accepts the following parameters:
* `params` - a list of params supported by Stripe (optional; defaults to
`[]`). Available parameters are: `customer`, `ending_before`, `limit` and
`source`.
Returns a `{:ok, charges}` tuple, where `charges` is a list of charges.
## Examples
{:ok, charges} = Stripe.Charges.list("my_key", source: "card") # Get a list of charges for cards
"""
def list(key, params) do
Stripe.make_request_with_key(:get, "#{@endpoint}", key, %{}, %{}, [params: params])
|> Stripe.Util.handle_stripe_response
end
@doc """
Update a charge.
Updates a charge with changeable information.
Accepts the following parameters:
* `params` - a list of params to be updated (optional; defaults to `[]`).
Available parameters are: `description`, `metadata`, `receipt_email`,
`fraud_details` and `shipping`.
Returns a `{:ok, charge}` tuple.
## Examples
params = [
description: "Changed charge"
]
{:ok, charge} = Stripe.Charges.change("charge_id", params)
"""
def change(id, params) do
change id, params, Stripe.config_or_env_key
end
@doc """
Update a charge. Accepts Stripe API key.
Updates a charge with changeable information.
Accepts the following parameters:
* `params` - a list of params to be updated (optional; defaults to `[]`).
Available parameters are: `description`, `metadata`, `receipt_email`,
`fraud_details` and `shipping`.
Returns a `{:ok, charge}` tuple.
## Examples
params = [
description: "Changed charge"
]
{:ok, charge} = Stripe.Charges.change("charge_id", params, "my_key")
"""
def change(id, params, key) do
Stripe.make_request_with_key(:post, "#{@endpoint}/#{id}", key, params)
|> Stripe.Util.handle_stripe_response
end
@doc """
Capture a charge.
Captures a charge that is currently pending.
Note: you can default a charge to be automatically captured by setting `capture: true` in the charge create params.
Returns a `{:ok, charge}` tuple.
## Examples
{:ok, charge} = Stripe.Charges.capture("charge_id")
"""
def capture(id) do
capture id, Stripe.config_or_env_key
end
@doc """
Capture a charge. Accepts Stripe API key.
Captures a charge that is currently pending.
Note: you can default a charge to be automatically captured by setting `capture: true` in the charge create params.
Returns a `{:ok, charge}` tuple.
## Examples
{:ok, charge} = Stripe.Charges.capture("charge_id", "my_key")
"""
def capture(id,key) do
Stripe.make_request_with_key(:post, "#{@endpoint}/#{id}/capture", key)
|> Stripe.Util.handle_stripe_response
end
@doc """
Get a charge.
Gets a charge.
Returns a `{:ok, charge}` tuple.
## Examples
{:ok, charge} = Stripe.Charges.get("charge_id")
"""
def get(id) do
get id, Stripe.config_or_env_key
end
@doc """
Get a charge. Accepts Stripe API key.
Gets a charge.
Returns a `{:ok, charge}` tuple.
## Examples
{:ok, charge} = Stripe.Charges.get("charge_id", "my_key")
"""
def get(id, key) do
Stripe.make_request_with_key(:get, "#{@endpoint}/#{id}", key)
|> Stripe.Util.handle_stripe_response
end
@doc """
Refund a charge.
Refunds a charge completely.
Note: use `refund_partial` if you just want to perform a partial refund.
Returns a `{:ok, charge}` tuple.
## Examples
{:ok, charge} = Stripe.Charges.refund("charge_id")
"""
def refund(id) do
refund id, Stripe.config_or_env_key
end
@doc """
Refund a charge. Accepts Stripe API key.
Refunds a charge completely.
Note: use `refund_partial` if you just want to perform a partial refund.
Returns a `{:ok, charge}` tuple.
## Examples
{:ok, charge} = Stripe.Charges.refund("charge_id", "my_key")
"""
def refund(id, key) do
Stripe.make_request_with_key(:post, "#{@endpoint}/#{id}/refunds", key)
|> Stripe.Util.handle_stripe_response
end
@doc """
Partially refund a charge.
Refunds a charge partially.
Accepts the following parameters:
* `amount` - amount to be refunded (required).
Returns a `{:ok, charge}` tuple.
## Examples
{:ok, charge} = Stripe.Charges.refund_partial("charge_id", 500)
"""
def refund_partial(id, amount) do
refund_partial id, amount, Stripe.config_or_env_key
end
@doc """
Partially refund a charge. Accepts Stripe API key.
Refunds a charge partially.
Accepts the following parameters:
* `amount` - amount to be refunded (required).
Returns a `{:ok, charge}` tuple.
## Examples
{:ok, charge} = Stripe.Charges.refund_partial("charge_id", 500, "my_key")
"""
def refund_partial(id, amount, key) do
params = [amount: amount]
Stripe.make_request_with_key(:post, "#{@endpoint}/#{id}/refunds", key, params)
|> Stripe.Util.handle_stripe_response
end
@doc """
Get total number of charges.
Gets total number of charges.
Returns `{:ok, count}` tuple.
## Examples
{:ok, count} = Stripe.Charges.count()
"""
def count do
count Stripe.config_or_env_key
end
@doc """
Get total number of charges. Accepts Stripe API key.
Gets total number of charges.
Returns `{:ok, count}` tuple.
## Examples
{:ok, count} = Stripe.Charges.count("key")
"""
def count(key) do
Stripe.Util.count "#{@endpoint}", key
end
end
|
lib/stripe/charges.ex
| 0.884863 | 0.636861 |
charges.ex
|
starcoder
|
defmodule Kino.ETS do
@moduledoc """
A widget for interactively viewing an ETS table.
## Examples
tid = :ets.new(:users, [:set, :public])
Kino.ETS.new(tid)
Kino.ETS.new(:elixir_config)
"""
@behaviour Kino.Table
alias Kino.Utils
@type t :: Kino.Table.t()
@doc """
Starts a widget process representing the given ETS table.
Note that private tables cannot be read by an arbitrary process,
so the given table must have either public or protected access.
"""
@spec new(:ets.tid()) :: t()
def new(tid) do
case :ets.info(tid, :protection) do
:private ->
raise ArgumentError,
"the given table must be either public or protected, but a private one was given"
:undefined ->
raise ArgumentError,
"the given table identifier #{inspect(tid)} does not refer to an existing ETS table"
_ ->
:ok
end
Kino.Table.new(__MODULE__, {tid})
end
@impl true
def init({tid}) do
table_name = :ets.info(tid, :name)
name = "ETS #{inspect(table_name)}"
info = %{name: name, features: [:refetch, :pagination]}
{:ok, info, %{tid: tid}}
end
@impl true
def get_data(rows_spec, state) do
records = get_records(state.tid, rows_spec)
rows = Enum.map(records, &record_to_row/1)
total_rows = :ets.info(state.tid, :size)
columns = records |> Utils.Table.keys_for_records() |> Utils.Table.keys_to_columns()
{:ok, %{columns: columns, rows: rows, total_rows: total_rows}, state}
end
defp get_records(tid, rows_spec) do
query = :ets.table(tid)
cursor = :qlc.cursor(query)
if rows_spec.offset > 0 do
:qlc.next_answers(cursor, rows_spec.offset)
end
records = :qlc.next_answers(cursor, rows_spec.limit)
:qlc.delete_cursor(cursor)
records
end
defp record_to_row(record) do
fields =
record
|> Tuple.to_list()
|> Enum.with_index()
|> Map.new(fn {val, idx} -> {idx, inspect(val)} end)
%{fields: fields}
end
end
|
lib/kino/ets.ex
| 0.81772 | 0.485722 |
ets.ex
|
starcoder
|
defmodule Phoenix.Socket do
@moduledoc ~S"""
A socket implementation that multiplexes messages over channels.
`Phoenix.Socket` is used as a module for establishing and maintaining
the socket state via the `Phoenix.Socket` struct.
Once connected to a socket, incoming and outgoing events are routed to
channels. The incoming client data is routed to channels via transports.
It is the responsibility of the socket to tie transports and channels
together.
By default, Phoenix supports both websockets and longpoll when invoking
`Phoenix.Endpoint.socket/3` in your endpoint:
socket "/socket", MyApp.Socket, websocket: true, longpoll: false
The command above means incoming socket connections can be made via
a WebSocket connection. Events are routed by topic to channels:
channel "room:lobby", MyApp.LobbyChannel
See `Phoenix.Channel` for more information on channels.
## Socket Behaviour
Socket handlers are mounted in Endpoints and must define two callbacks:
* `connect/3` - receives the socket params, connection info if any, and
authenticates the connection. Must return a `Phoenix.Socket` struct,
often with custom assigns
* `id/1` - receives the socket returned by `connect/3` and returns the
id of this connection as a string. The `id` is used to identify socket
connections, often to a particular user, allowing us to force disconnections.
For sockets requiring no authentication, `nil` can be returned
## Examples
defmodule MyApp.UserSocket do
use Phoenix.Socket
channel "room:*", MyApp.RoomChannel
def connect(params, socket, _connect_info) do
{:ok, assign(socket, :user_id, params["user_id"])}
end
def id(socket), do: "users_socket:#{socket.assigns.user_id}"
end
# Disconnect all user's socket connections and their multiplexed channels
MyApp.Endpoint.broadcast("users_socket:" <> user.id, "disconnect", %{})
## Socket fields
* `:id` - The string id of the socket
* `:assigns` - The map of socket assigns, default: `%{}`
* `:channel` - The current channel module
* `:channel_pid` - The channel pid
* `:endpoint` - The endpoint module where this socket originated, for example: `MyApp.Endpoint`
* `:handler` - The socket module where this socket originated, for example: `MyApp.UserSocket`
* `:joined` - If the socket has effectively joined the channel
* `:join_ref` - The ref sent by the client when joining
* `:ref` - The latest ref sent by the client
* `:pubsub_server` - The registered name of the socket's pubsub server
* `:topic` - The string topic, for example `"room:123"`
* `:transport` - An identifier for the transport, used for logging
* `:transport_pid` - The pid of the socket's transport process
* `:serializer` - The serializer for socket messages
## Logging
Logging for socket connections is set via the `:log` option, for example:
use Phoenix.Socket, log: :debug
Defaults to the `:info` log level. Pass `false` to disable logging.
## Garbage collection
It's possible to force garbage collection in the transport process after
processing large messages. For example, to trigger such from your channels,
run:
send(socket.transport_pid, :garbage_collect)
## Client-server communication
The encoding of server data and the decoding of client data is done
according to a serializer, defined in `Phoenix.Socket.Serializer`.
By default, JSON encoding is used to broker messages to and from
clients with `Phoenix.Socket.V2.JSONSerializer`.
The serializer `decode!` function must return a `Phoenix.Socket.Message`
which is forwarded to channels except:
* `"heartbeat"` events in the "phoenix" topic - should just emit an OK reply
* `"phx_join"` on any topic - should join the topic
* `"phx_leave"` on any topic - should leave the topic
Each message also has a `ref` field which is used to track responses.
The server may send messages or replies back. For messages, the
ref uniquely identifies the message. For replies, the ref matches
the original message. Both data-types also include a join_ref that
uniquely identifes the currently joined channel.
The `Phoenix.Socket` implementation may also sent special messages
and replies:
* `"phx_error"` - in case of errors, such as a channel process
crashing, or when attempting to join an already joined channel
* `"phx_close"` - the channel was gracefully closed
Phoenix ships with a JavaScript implementation of both websocket
and long polling that interacts with Phoenix.Socket and can be
used as reference for those interested in implementing custom clients.
## Custom sockets and transports
See the `Phoenix.Socket.Transport` documentation for more information on
writing your own socket that does not leverage channels or for writing
your own transports that interacts with other sockets.
## Custom channels
You can list any module as a channel as long as it implements
a `start_link/1` function that receives a tuple with three elements:
{auth_payload, from, socket}
A custom channel implementation MUST invoke
`GenServer.reply(from, reply_payload)` during its initialization
with a custom `reply_payload` that will be sent as a reply to the
client. Failing to do so will block the socket forever.
A custom channel receives `Phoenix.Socket.Message` structs as regular
messages from the transport. Replies to those messages and custom
messages can be sent to the socket at any moment by building an
appropriate `Phoenix.Socket.Reply` and `Phoenix.Socket.Message`
structs, encoding them with the serializer and dispatching the
serialized result to the transport.
For example, to handle "phx_leave" messages, which is recommended
to be handled by all channel implementations, one may do:
def handle_info(
%Message{topic: topic, event: "phx_leave"} = message,
%{topic: topic, serializer: serializer, transport_pid: transport_pid} = socket
) do
send transport_pid, serializer.encode!(build_leave_reply(message))
{:stop, {:shutdown, :left}, socket}
end
We also recommend all channels to monitor the `transport_pid`
on `init` and exit if the transport exits. We also advise to rewrite
`:normal` exit reasons (usually due to the socket being closed)
to the `{:shutdown, :closed}` to guarantee links are broken on
the channel exit (as a `:normal` exit does not break links):
def handle_info({:DOWN, _, _, transport_pid, reason}, %{transport_pid: transport_pid} = socket) do
reason = if reason == :normal, do: {:shutdown, :closed}, else: reason
{:stop, reason, socket}
end
Any process exit is treated as an error by the socket layer unless
a `{:socket_close, pid, reason}` message is sent to the socket before
shutdown.
Custom channel implementations cannot be tested with `Phoenix.ChannelTest`
and are currently considered experimental. The underlying API may be
changed at any moment.
**Note:** in future Phoenix versions we will require custom channels
to provide a custom `child_spec/1` function instead of `start_link/1`.
Since the default behaviour of `child_spec/1` is to invoke `start_link/1`,
this behaviour should be backwards compatible in almost all cases.
"""
require Logger
require Phoenix.Endpoint
alias Phoenix.Socket
alias Phoenix.Socket.{Broadcast, Message, Reply}
@doc """
Receives the socket params and authenticates the connection.
## Socket params and assigns
Socket params are passed from the client and can
be used to verify and authenticate a user. After
verification, you can put default assigns into
the socket that will be set for all channels, ie
{:ok, assign(socket, :user_id, verified_user_id)}
To deny connection, return `:error`.
See `Phoenix.Token` documentation for examples in
performing token verification on connect.
"""
@callback connect(params :: map, Socket.t) :: {:ok, Socket.t} | :error
@callback connect(params :: map, Socket.t, connect_info :: map) :: {:ok, Socket.t} | :error
@doc ~S"""
Identifies the socket connection.
Socket IDs are topics that allow you to identify all sockets for a given user:
def id(socket), do: "users_socket:#{socket.assigns.user_id}"
Would allow you to broadcast a `"disconnect"` event and terminate
all active sockets and channels for a given user:
MyApp.Endpoint.broadcast("users_socket:" <> user.id, "disconnect", %{})
Returning `nil` makes this socket anonymous.
"""
@callback id(Socket.t) :: String.t | nil
@optional_callbacks connect: 2, connect: 3
defmodule InvalidMessageError do
@moduledoc """
Raised when the socket message is invalid.
"""
defexception [:message]
end
defstruct assigns: %{},
channel: nil,
channel_pid: nil,
endpoint: nil,
handler: nil,
id: nil,
joined: false,
join_ref: nil,
private: %{},
pubsub_server: nil,
ref: nil,
serializer: nil,
topic: nil,
transport: nil,
transport_pid: nil
@type t :: %Socket{
assigns: map,
channel: atom,
channel_pid: pid,
endpoint: atom,
handler: atom,
id: nil,
joined: boolean,
ref: term,
private: %{},
pubsub_server: atom,
serializer: atom,
topic: String.t,
transport: atom,
transport_pid: pid,
}
defmacro __using__(opts) do
quote do
## User API
import Phoenix.Socket
@behaviour Phoenix.Socket
@before_compile Phoenix.Socket
Module.register_attribute(__MODULE__, :phoenix_channels, accumulate: true)
@phoenix_log Keyword.get(unquote(opts), :log, :info)
## Callbacks
@behaviour Phoenix.Socket.Transport
@doc false
def child_spec(opts) do
Phoenix.Socket.__child_spec__(__MODULE__, opts)
end
@doc false
def connect(map), do: Phoenix.Socket.__connect__(__MODULE__, map, @phoenix_log)
@doc false
def init(state), do: Phoenix.Socket.__init__(state)
@doc false
def handle_in(message, state), do: Phoenix.Socket.__in__(message, state)
@doc false
def handle_info(message, state), do: Phoenix.Socket.__info__(message, state)
@doc false
def terminate(reason, state), do: Phoenix.Socket.__terminate__(reason, state)
end
end
## USER API
@doc """
Adds a key/value pair to socket assigns.
## Examples
iex> socket.assigns[:token]
nil
iex> socket = assign(socket, :token, "bar")
iex> socket.assigns[:token]
"bar"
"""
def assign(socket = %Socket{}, key, value) do
put_in socket.assigns[key], value
end
@doc """
Defines a channel matching the given topic and transports.
* `topic_pattern` - The string pattern, for example `"room:*"`, `"users:*"`,
or `"system"`
* `module` - The channel module handler, for example `MyApp.RoomChannel`
* `opts` - The optional list of options, see below
## Options
* `:assigns` - the map of socket assigns to merge into the socket on join
## Examples
channel "topic1:*", MyChannel
## Topic Patterns
The `channel` macro accepts topic patterns in two flavors. A splat (the `*`
character) argument can be provided as the last character to indicate a
`"topic:subtopic"` match. If a plain string is provided, only that topic will
match the channel handler. Most use-cases will use the `"topic:*"` pattern to
allow more versatile topic scoping.
See `Phoenix.Channel` for more information
"""
defmacro channel(topic_pattern, module, opts \\ []) do
# Tear the alias to simply store the root in the AST.
# This will make Elixir unable to track the dependency between
# endpoint <-> socket and avoid recompiling the endpoint
# (alongside the whole project) whenever the socket changes.
module = tear_alias(module)
quote do
@phoenix_channels {unquote(topic_pattern), unquote(module), unquote(opts)}
end
end
defp tear_alias({:__aliases__, meta, [h|t]}) do
alias = {:__aliases__, meta, [h]}
quote do
Module.concat([unquote(alias)|unquote(t)])
end
end
defp tear_alias(other), do: other
@doc false
defmacro transport(_name, _module, _config \\ []) do
quote do
IO.warn "transport/3 in Phoenix.Socket is deprecated and has no effect"
end
end
defmacro __before_compile__(env) do
channels = Module.get_attribute(env.module, :phoenix_channels)
channel_defs =
for {topic_pattern, module, opts} <- channels do
topic_pattern
|> to_topic_match()
|> defchannel(module, opts)
end
quote do
unquote(channel_defs)
def __channel__(_topic), do: nil
end
end
defp to_topic_match(topic_pattern) do
case String.split(topic_pattern, "*") do
[prefix, ""] -> quote do: <<unquote(prefix) <> _rest>>
[bare_topic] -> bare_topic
_ -> raise ArgumentError, "channels using splat patterns must end with *"
end
end
defp defchannel(topic_match, channel_module, opts) do
quote do
def __channel__(unquote(topic_match)), do: unquote({channel_module, Macro.escape(opts)})
end
end
## CALLBACKS IMPLEMENTATION
def __child_spec__(handler, opts) do
import Supervisor.Spec
endpoint = Keyword.fetch!(opts, :endpoint)
shutdown = Keyword.get(opts, :shutdown, 5_000)
partitions = Keyword.get(opts, :partitions) || System.schedulers_online()
worker_opts = [shutdown: shutdown, restart: :temporary]
worker = worker(Phoenix.Channel.Server, [], worker_opts)
args = {endpoint, handler, partitions, worker}
supervisor(Phoenix.Socket.PoolSupervisor, [args], id: handler)
end
def __connect__(user_socket, map, log) do
%{
endpoint: endpoint,
options: options,
transport: transport,
params: params,
connect_info: connect_info
} = map
vsn = params["vsn"] || "1.0.0"
meta = Map.merge(map, %{vsn: vsn, user_socket: user_socket, log: log})
Phoenix.Endpoint.instrument(endpoint, :phoenix_socket_connect, meta, fn ->
case negotiate_serializer(Keyword.fetch!(options, :serializer), vsn) do
{:ok, serializer} ->
user_socket
|> user_connect(endpoint, transport, serializer, params, connect_info)
|> log_connect_result(user_socket, log)
:error -> :error
end
end)
end
defp log_connect_result(result, _user_socket, false = _level), do: result
defp log_connect_result({:ok, _} = result, user_socket, level) do
Logger.log(level, fn -> "Replied #{inspect(user_socket)} :ok" end)
result
end
defp log_connect_result(:error = result, user_socket, level) do
Logger.log(level, fn -> "Replied #{inspect(user_socket)} :error" end)
result
end
def __init__({state, %{id: id, endpoint: endpoint} = socket}) do
_ = id && endpoint.subscribe(id, link: true)
{:ok, {state, %{socket | transport_pid: self()}}}
end
def __in__({payload, opts}, {state, socket}) do
%{topic: topic} = message = socket.serializer.decode!(payload, opts)
handle_in(Map.get(state.channels, topic), message, state, socket)
end
def __info__({:DOWN, ref, _, pid, reason}, {state, socket}) do
case state.channels_inverse do
%{^pid => {topic, join_ref}} ->
state = delete_channel(state, pid, topic, ref)
{:push, encode_on_exit(socket, topic, join_ref, reason), {state, socket}}
%{} ->
{:ok, {state, socket}}
end
end
def __info__(%Broadcast{event: "disconnect"}, state) do
{:stop, {:shutdown, :disconnected}, state}
end
def __info__({:socket_push, opcode, payload}, state) do
{:push, {opcode, payload}, state}
end
def __info__({:socket_close, pid, _reason}, {state, socket}) do
case state.channels_inverse do
%{^pid => {topic, join_ref}} ->
{^pid, monitor_ref} = Map.fetch!(state.channels, topic)
state = delete_channel(state, pid, topic, monitor_ref)
{:push, encode_close(socket, topic, join_ref), {state, socket}}
%{} ->
{:ok, {state, socket}}
end
end
def __info__(:garbage_collect, state) do
:erlang.garbage_collect(self())
{:ok, state}
end
def __info__(_, state) do
{:ok, state}
end
def __terminate__(_reason, _state_socket) do
:ok
end
defp negotiate_serializer(serializers, vsn) when is_list(serializers) do
case Version.parse(vsn) do
{:ok, vsn} ->
serializers
|> Enum.find(:error, fn {_serializer, vsn_req} -> Version.match?(vsn, vsn_req) end)
|> case do
{serializer, _vsn_req} ->
{:ok, serializer}
:error ->
Logger.error "The client's requested transport version \"#{vsn}\" " <>
"does not match server's version requirements of #{inspect serializers}"
:error
end
:error ->
Logger.error "Client sent invalid transport version \"#{vsn}\""
:error
end
end
defp user_connect(handler, endpoint, transport, serializer, params, connect_info) do
# The information in the Phoenix.Socket goes to userland and channels.
socket = %Socket{
handler: handler,
endpoint: endpoint,
pubsub_server: endpoint.__pubsub_server__,
serializer: serializer,
transport: transport
}
# The information in the state is kept only inside the socket process.
state = %{
channels: %{},
channels_inverse: %{}
}
connect_result =
if function_exported?(handler, :connect, 3) do
handler.connect(params, socket, connect_info)
else
handler.connect(params, socket)
end
case connect_result do
{:ok, %Socket{} = socket} ->
case handler.id(socket) do
nil ->
{:ok, {state, socket}}
id when is_binary(id) ->
{:ok, {state, %{socket | id: id}}}
invalid ->
Logger.error "#{inspect handler}.id/1 returned invalid identifier " <>
"#{inspect invalid}. Expected nil or a string."
:error
end
:error ->
:error
invalid ->
connect_arity = if function_exported?(handler, :connect, 3), do: "connect/3", else: "connect/2"
Logger.error "#{inspect handler}. #{connect_arity} returned invalid value #{inspect invalid}. " <>
"Expected {:ok, socket} or :error"
:error
end
end
defp handle_in(_, %{ref: ref, topic: "phoenix", event: "heartbeat"}, state, socket) do
reply = %Reply{
ref: ref,
topic: "phoenix",
status: :ok,
payload: %{}
}
{:reply, :ok, encode_reply(socket, reply), {state, socket}}
end
defp handle_in(nil, %{event: "phx_join", topic: topic, ref: ref} = message, state, socket) do
case socket.handler.__channel__(topic) do
{channel, opts} ->
case Phoenix.Channel.Server.join(socket, channel, message, opts) do
{:ok, reply, pid} ->
reply = %Reply{join_ref: ref, ref: ref, topic: topic, status: :ok, payload: reply}
state = put_channel(state, pid, topic, ref)
{:reply, :ok, encode_reply(socket, reply), {state, socket}}
{:error, reply} ->
reply = %Reply{join_ref: ref, ref: ref, topic: topic, status: :error, payload: reply}
{:reply, :error, encode_reply(socket, reply), {state, socket}}
end
_ ->
{:reply, :error, encode_ignore(socket, message), {state, socket}}
end
end
defp handle_in({pid, ref}, %{event: "phx_join", topic: topic} = message, state, socket) do
Logger.debug fn ->
"Duplicate channel join for topic \"#{topic}\" in #{inspect(socket.handler)}. " <>
"Closing existing channel for new join."
end
:ok = shutdown_duplicate_channel(pid)
state = delete_channel(state, pid, topic, ref)
handle_in(nil, message, state, socket)
end
defp handle_in({pid, _ref}, message, state, socket) do
send(pid, message)
{:ok, {state, socket}}
end
defp handle_in(nil, message, state, socket) do
{:reply, :error, encode_ignore(socket, message), {state, socket}}
end
defp put_channel(state, pid, topic, join_ref) do
%{channels: channels, channels_inverse: channels_inverse} = state
monitor_ref = Process.monitor(pid)
%{
state |
channels: Map.put(channels, topic, {pid, monitor_ref}),
channels_inverse: Map.put(channels_inverse, pid, {topic, join_ref})
}
end
defp delete_channel(state, pid, topic, monitor_ref) do
%{channels: channels, channels_inverse: channels_inverse} = state
Process.demonitor(monitor_ref, [:flush])
%{
state |
channels: Map.delete(channels, topic),
channels_inverse: Map.delete(channels_inverse, pid)
}
end
defp encode_on_exit(socket, topic, ref, _reason) do
message = %Message{join_ref: ref, ref: ref, topic: topic, event: "phx_error", payload: %{}}
encode_reply(socket, message)
end
defp encode_ignore(%{handler: handler} = socket, %{ref: ref, topic: topic}) do
Logger.warn fn -> "Ignoring unmatched topic \"#{topic}\" in #{inspect(handler)}" end
reply = %Reply{ref: ref, topic: topic, status: :error, payload: %{reason: "unmatched topic"}}
encode_reply(socket, reply)
end
defp encode_reply(%{serializer: serializer}, message) do
{:socket_push, opcode, payload} = serializer.encode!(message)
{opcode, payload}
end
defp encode_close(socket, topic, join_ref) do
message = %Message{join_ref: join_ref, ref: join_ref, topic: topic, event: "phx_close", payload: %{}}
encode_reply(socket, message)
end
defp shutdown_duplicate_channel(pid) do
ref = Process.monitor(pid)
Process.exit(pid, {:shutdown, :duplicate_join})
receive do
{:DOWN, ^ref, _, _, _} -> :ok
after
5_000 ->
Process.exit(pid, :kill)
receive do: ({:DOWN, ^ref, _, _, _} -> :ok)
end
end
end
|
lib/phoenix/socket.ex
| 0.928894 | 0.591959 |
socket.ex
|
starcoder
|
defmodule Spherical.R2.Rectangle do
@moduledoc ~S"""
Represents a closed axis-aligned rectangle in βΒ².
Describes every point in two-dimensional space by means of two
coordinates.
"""
alias __MODULE__
alias Spherical.R1.Interval
alias Spherical.R2.Point
defstruct x: %Interval{}, y: %Interval{}
@type t :: %Rectangle{x: Interval.t, y: Interval.t}
# API
@doc "Returns an empty rectangle."
def new do
%Rectangle{}
end
@doc "Returns a rectangle with the given intervals."
def new(%Interval{}=x, %Interval{}=y) do
%Rectangle{x: x, y: y}
end
@doc "Returns a rectangle that contains the given `points`."
def from_points([%Point{}=first|others]) do
rectangle = %Rectangle{x: %Interval{lo: first.x, hi: first.x},
y: %Interval{lo: first.y, hi: first.y}}
others
|> Enum.reduce rectangle, fn(point, r) -> add_point(r, point) end
end
@doc """
Returns a rectangle with the given `center` and `size`.
Both dimensions of size **must** be non-negative.
"""
def from_center(%Point{}=center, %Point{}=size) do
new(Interval.new(center.x - size.x / 2, center.x + size.x / 2),
Interval.new(center.y - size.y / 2, center.y + size.y / 2))
end
@doc "Checks whether the `rectangle` is empty."
def is_empty?(%Rectangle{}=rectangle) do
Interval.is_empty?(rectangle.x)
end
@doc """
Checks whether the `rectangle` is valid.
This requires the width to be empty if the height is empty.
"""
def is_valid?(%Rectangle{}=rectangle) do
Interval.is_empty?(rectangle.x) == Interval.is_empty?(rectangle.y)
end
@doc "Returns the center of the `rectangle` in βΒ²"
def center(%Rectangle{x: x, y: y}) do
Point.new(Interval.center(x), Interval.center(y))
end
@doc """
Returns all four vertices of the `rectangle`.
Vertices are returned in CCW direction starting with the lower left
corner.
"""
def vertices(%Rectangle{x: x, y: y}) do
[Point.new(x.lo, y.lo),
Point.new(x.hi, y.lo),
Point.new(x.hi, y.hi),
Point.new(x.lo, y.hi)]
end
@doc """
Returns the width and height of this `rectangle` in (x,y)-space.
Empty rectangles have a negative width and height.
"""
def size(%Rectangle{x: x, y: y}) do
Point.new(Interval.length(x), Interval.length(y))
end
@doc """
Checks whether the `rectangle` contains the given `point`.
Rectangles are closed regions, i.e. they contain their boundary.
"""
def contains_point?(%Rectangle{}=rectangle, %Point{}=point) do
Interval.contains?(rectangle.x, point.x) &&
Interval.contains?(rectangle.y, point.y)
end
@doc """
Returns true if the given `point` is contained in the interior of the
`rectangle` (i.e. the region excluding its boundary).
"""
def interior_contains_point?(%Rectangle{}=rectangle, %Point{}=point) do
Interval.interior_contains?(rectangle.x, point.x) &&
Interval.interior_contains?(rectangle.y, point.y)
end
@doc "Checks whether the `first` rectangle contains the `second`."
def contains?(%Rectangle{}=first, %Rectangle{}=second) do
Interval.contains?(first.x, second.x) &&
Interval.contains?(first.y, second.y)
end
@doc """
Checks whether the interior of the `first` rectangle contains all of
the points of the `second` (including its boundary).
"""
def interior_contains?(%Rectangle{}=first, %Rectangle{}=second) do
Interval.interior_contains?(first.x, second.x) &&
Interval.interior_contains?(first.y, second.y)
end
@doc """
Checks whether the `first` rectangle and the `second` have any
points in common.
"""
def intersects?(%Rectangle{}=first, %Rectangle{}=second) do
Interval.intersects?(first.x, second.x) &&
Interval.intersects?(first.y, second.y)
end
@doc """
Checks whether the interior of the `first` rectangle intersects any
point (including the boundary) of the `second`.
"""
def interior_intersects?(%Rectangle{}=first, %Rectangle{}=second) do
Interval.interior_intersects?(first.x, second.x) &&
Interval.interior_intersects?(first.y, second.y)
end
@doc """
Expands the `rectangle` to include the given `point`.
The rectangle is expanded by the minimum amount possible.
"""
def add_point(%Rectangle{}=rectangle, %Point{}=point) do
# TODO: Is this an R2 function instead?
%{rectangle| x: Interval.add_point(rectangle.x, point.x),
y: Interval.add_point(rectangle.y, point.y)}
end
@doc """
Expands the `first` rectangle to include the `second`.
This is the same as replacing the one rectangle by the union of the
two rectangles, but is more efficient.
"""
def add_rectangle(%Rectangle{}=first, %Rectangle{}=second) do
union(first, second)
end
@doc """
Returns the closest point in the `rectangle` to the given `point`.
The `rectangle` must be non-empty.
"""
def clamp_point(%Rectangle{}=rectangle, %Point{}=point) do
Point.new(Interval.clamp_point(rectangle.x, point.x),
Interval.clamp_point(rectangle.y, point.y))
end
@doc """
Returns a `rectangle` that has been expanded in the x-direction by
`margin.x` and in y-direction by `margin.y`.
If either `margin` is empty, then shrink the interval on the
corresponding sides instead. The resulting rectangle may be
empty. Any expansion of an empty rectangle remains empty.
"""
def expanded(%Rectangle{}=rectangle, %Point{}=margin) do
x = Interval.expanded(rectangle.x, margin.x)
y = Interval.expanded(rectangle.y, margin.y)
if Interval.is_empty?(x) || Interval.is_empty?(y) do
Rectangle.new
else
%{rectangle| x: x, y: y}
end
end
@doc """
Returns a `rectangle` that has been expanded by the amount on all
sides by `margin`.
"""
def expanded_by_margin(%Rectangle{}=rectangle, margin) when is_number(margin) do
expanded(rectangle, Point.new(margin))
end
@doc """
Returns the smallest rectangle containing the union of the `first`
and `second` rectangles.
"""
def union(%Rectangle{}=first, %Rectangle{}=second) do
Rectangle.new(Interval.union(first.x, second.x),
Interval.union(first.y, second.y))
end
@doc """
Returns the smallest rectangle containing the intersection of the
`first` and `second` rectangles.
"""
def intersection(%Rectangle{}=first, %Rectangle{}=second) do
x = Interval.intersection(first.x, second.x)
y = Interval.intersection(first.y, second.y)
if Interval.is_empty?(x) || Interval.is_empty?(y) do
Rectangle.new
else
Rectangle.new(x, y)
end
end
@doc """
Returns true if the x- and y-intervals of the two rectangles are the
same up to the given tolerance.
"""
def approx_equals(%Rectangle{}=first, %Rectangle{}=second) do
Interval.approx_equal(first.x, second.x) &&
Interval.approx_equal(first.y, second.y)
end
end
|
lib/spherical/r2/rectangle.ex
| 0.927888 | 0.936401 |
rectangle.ex
|
starcoder
|
defmodule Readability do
@moduledoc """
Readability library for extracting & curating articles.
## Example
```elixir
@type html :: binary
# Just pass url
%Readability.Summary{title: title, authors: authors, article_html: article} = Readability.summarize(url)
# Extract title
Readability.title(html)
# Extract authors.
Readability.authors(html)
# Extract only text from article
article = html
|> Readability.article
|> Readability.readable_text
# Extract article with transformed html
article = html
|> Readability.article
|> Readability.raw_html
```
"""
alias Readability.TitleFinder
alias Readability.AuthorFinder
alias Readability.ArticleBuilder
alias Readability.Summary
alias Readability.Helper
@default_options [retry_length: 250,
min_text_length: 125,
remove_unlikely_candidates: true,
weight_classes: true,
clean_conditionally: true,
remove_empty_nodes: true,
min_image_width: 130,
min_image_height: 80,
ignore_image_format: [],
blacklist: nil,
whitelist: nil,
page_url: nil
]
@regexes [unlikely_candidate: ~r/hidden|^hid$| hid$| hid |^hid |banner|combx|comment|community|disqus|extra|foot|header|hidden|lightbox|modal|menu|meta|nav|remark|rss|shoutbox|sidebar|sidebar-item|aside|sponsor|ad-break|agegate|pagination|pager|popup|ad-wrapper|advertisement|social|popup|yom-remote|share|social|mailmunch|relatedposts|sharedaddy|sumome-share/i,
ok_maybe_its_a_candidate: ~r/and|article|body|column|main|shadow/i,
positive: ~r/article|body|content|entry|hentry|main|page|pagination|post|text|blog|story|article/i,
negative: ~r/hidden|^hid|combx|comment|com-|contact|foot|footer|footnote|link|masthead|media|meta|outbrain|promo|related|scroll|shoutbox|sidebar|sponsor|shopping|tags|tool|utility|widget|modal/i,
div_to_p_elements: ~r/<(a|blockquote|dl|div|img|ol|p|pre|table|ul)/i,
replace_brs: ~r/(<br[^>]*>[ \n\r\t]*){2,}/i,
replace_fonts: ~r/<(\/?)font[^>]*>/i,
replace_xml_version: ~r/<\?xml.*\?>/i,
normalize: ~r/\s{2,}|(<hr\/?>){2,}/,
video: ~r/\/\/(www\.)?(dailymotion|youtube|youtube-nocookie|player\.vimeo)\.com/i,
protect_attrs: ~r/^(?!id|rel|for|summary|title|href|data-src|src|srcdoc|height|width|class)/i
]
@type html_tree :: tuple | list
@type raw_html :: binary
@type url :: binary
@type options :: list
@doc """
summarize the primary readable content of a webpage.
"""
@spec summarize(url, options) :: Summary.t
def summarize(url, opts \\ []) do
opts = @default_options
|> Keyword.merge(opts)
|> Keyword.merge([page_url: url])
httpoison_options = Application.get_env :readability, :httpoison_options, []
%{status_code: _, body: raw_html, headers: headers} = HTTPoison.get!(url, [], httpoison_options)
html_tree = Helper.ungzip(raw_html, headers)
|> Helper.normalize
|> Helper.remove_attrs(regexes(:protect_attrs))
|> Helper.to_absolute(url)
article_tree = html_tree |> ArticleBuilder.build(opts)
%Summary{title: title(html_tree),
authors: authors(html_tree),
article_html: readable_html(article_tree),
article_text: readable_text(article_tree)
}
end
def summarize_source(raw_html, opts \\ []) do
url = Keyword.get(opts, :url)
opts = @default_options
|> Keyword.merge(opts)
opts = case url do
nil -> opts
_ -> opts |> Keyword.merge([page_url: url])
end
html_tree = raw_html
|> Helper.normalize
|> Helper.remove_attrs(regexes(:protect_attrs))
html_tree = case url do
nil -> html_tree
_ ->
html_tree |> Helper.to_absolute(url)
end
article_tree = html_tree |> ArticleBuilder.build(opts)
%Summary{title: title(html_tree),
authors: authors(html_tree),
article_html: readable_html(article_tree),
article_text: readable_text(article_tree)
}
end
@doc """
Extract title
## Example
iex> title = Readability.title(html_str)
"Some title in html"
"""
@spec title(binary | html_tree) :: binary
def title(raw_html) when is_binary(raw_html) do
raw_html
|> Helper.normalize
|> title
end
def title(html_tree), do: TitleFinder.title(html_tree)
@doc """
Extract authors
## Example
iex> authors = Readability.authors(html_str)
["<NAME>", "chrismccord"]
"""
@spec authors(binary | html_tree) :: list[binary]
def authors(html) when is_binary(html), do: html |> parse |> authors
def authors(html_tree), do: AuthorFinder.find(html_tree)
@doc """
Using a variety of metrics (content score, classname, element types), find the content that is
most likely to be the stuff a user wants to read
## Example
iex> article_tree = Redability(html_str)
# returns article that is tuple
"""
@spec article(binary, options) :: html_tree
def article(raw_html, opts \\ []) do
opts = Keyword.merge(@default_options, opts)
raw_html
|> Helper.normalize
|> Helper.remove_attrs(regexes(:protect_attrs))
|> ArticleBuilder.build(opts)
end
@doc """
return attributes, tags cleaned html
"""
@spec readable_html(html_tree) :: binary
def readable_html(html_tree) do
html_tree
|> Helper.remove_attrs(regexes(:protect_attrs))
|> raw_html
end
@doc """
return only text binary from html_tree
"""
@spec readable_text(html_tree) :: binary
def readable_text(html_tree) do
# TODO: Remove image caption when extract only text
tags_to_br = ~r/<\/(p|div|article|h\d)/i
html_str = html_tree |> raw_html
Regex.replace(tags_to_br, html_str, &("\n#{&1}"))
|> Floki.parse
|> Floki.text
|> String.strip
end
@doc """
return raw html binary from html_tree
"""
@spec raw_html(html_tree) :: binary
def raw_html(html_tree) do
html_tree |> Floki.raw_html
end
def parse(raw_html) when is_binary(raw_html), do: Floki.parse(raw_html)
def regexes(key), do: @regexes[key]
def default_options, do: @default_options
end
|
lib/readability.ex
| 0.715026 | 0.654729 |
readability.ex
|
starcoder
|
defmodule NewRelic.Transaction do
@moduledoc """
Transaction Reporting
To enable Transaction reporting, you must instrument your Plug pipeline with a single line.
The `NewRelic.Transaction` macro injects the required plugs to wire up automatic
Transaction reporting.
Be sure to `use` this as early in your Plug pipeline as possible to ensure the most
accurate response times.
```elixir
defmodule MyApp do
use Plug.Router
use NewRelic.Transaction
# ...
end
```
To ignore reporting the current transaction, call:
```elixir
NewRelic.ignore_transaction()
```
Inside a Transaction, the agent will track work across processes that are spawned as
well as work done inside a Task Supervisor. When using `Task.Supervisor.async_nolink`
you can signal to the agent not to track the work done inside the Task, which will
exclude it from the current Transaction. To do this, send in an additional option:
```elixir
Task.Supervisor.async_nolink(
MyTaskSupervisor,
fn -> do_work() end,
new_relic: :no_track
)
```
"""
defmacro __using__(_) do
quote do
plug(NewRelic.Transaction.Plug)
plug(NewRelic.DistributedTrace.Plug)
use NewRelic.Transaction.ErrorHandler
end
end
@doc """
If you send a custom error response in your own `Plug.ErrorHandler`,
you **MUST** manually alert the agent of the error!
```elixir
defmodule MyPlug do
# ...
use Plug.ErrorHandler
def handle_errors(conn, error) do
NewRelic.Transaction.handle_errors(conn, error)
send_resp(conn, 500, "Oops!")
end
end
```
"""
def handle_errors(conn, error) do
NewRelic.DistributedTrace.Tracker.cleanup(self())
NewRelic.Transaction.Plug.add_stop_attrs(conn)
NewRelic.Transaction.Reporter.fail(error)
NewRelic.Transaction.Reporter.complete(self(), :async)
end
@doc false
def start_transaction(category, name) do
NewRelic.Transaction.Reporter.start_other_transaction(category, name)
NewRelic.DistributedTrace.generate_new_context()
|> NewRelic.DistributedTrace.track_transaction(transport_type: "Other")
:ok
end
@doc false
def stop_transaction() do
NewRelic.Transaction.Reporter.complete(self(), :sync)
:ok
end
@doc false
def ignore_transaction() do
NewRelic.Transaction.Reporter.ignore_transaction()
NewRelic.DistributedTrace.Tracker.cleanup(self())
end
end
|
lib/new_relic/transaction.ex
| 0.817574 | 0.782912 |
transaction.ex
|
starcoder
|
defmodule DiffieHellman do
@moduledoc """
Diffie-Hellman is a method of securely exchanging keys in a public-key
cryptosystem. Two users, Alice and Bob, want to share a secret between
themselves, while ensuring nobody else can read it.
Step 0: Alice and Bob agree on two prime numbers, P and G. An attacker, Eve,
can intercept these numbers, but without one of Alice or Bob's private keys,
we'll see Eve can't do anything useful with them.
Step 1: Alice and Bob each generate a private key between 1 and P-1.
P).
Step 2: Using the initial primes P and G, Alice and Bob calculate their
public keys by raising G to the power of their private key, then calculating
the modulus of that number by P. ((G**private_key) % P)
Step 3: Alice and Bob exchange public keys. Alice and Bob calculate a secret
shared key by raising the other's public key to the power of their private
key, then doing a modulus of the result by P. Due to the way modulus math
works, they should both generate the same shared key.
Alice calculates: (bob_public ** alice_private) % P
Bob calculates: (alice_public ** bob_private) % P
As long as their private keys are never lost or transmitted, only they know
their private keys, so even if Eve has P, G, and both public keys, she can't
do anything with them.
A video example is available at:
https://www.khanacademy.org/computing/computer-science/cryptography/modern-crypt/v/diffie-hellman-key-exchange-part-2
"""
@doc """
Given a prime integer `prime_p`, return a random integer between 1 and `prime_p` - 1
"""
@spec generate_private_key(prime_p :: integer) :: integer
def generate_private_key(prime_p) do
:rand.uniform(prime_p - 1)
end
@doc """
Given two prime integers as generators (`prime_p` and `prime_g`), and a private key,
generate a public key using the mathematical formula:
(prime_g ** private_key) % prime_p
"""
@spec generate_public_key(prime_p :: integer, prime_g :: integer, private_key :: integer) ::
integer
def generate_public_key(prime_p, prime_g, private_key) do
prime_g
|> :crypto.mod_pow(private_key, prime_p)
|> :binary.decode_unsigned()
end
@doc """
Given a prime integer `prime_p`, user B's public key, and user A's private key,
generate a shared secret using the mathematical formula:
(public_key_b ** private_key_a) % prime_p
"""
@spec generate_shared_secret(
prime_p :: integer,
public_key_b :: integer,
private_key_a :: integer
) :: integer
def generate_shared_secret(prime_p, public_key_b, private_key_a) do
generate_public_key(prime_p, public_key_b, private_key_a)
end
end
|
diffie-hellman/lib/diffie_hellman.ex
| 0.806358 | 0.693953 |
diffie_hellman.ex
|
starcoder
|
defmodule AWS.CloudHSMV2 do
@moduledoc """
For more information about AWS CloudHSM, see [AWS CloudHSM](http://aws.amazon.com/cloudhsm/) and the [AWS CloudHSM User Guide](https://docs.aws.amazon.com/cloudhsm/latest/userguide/).
"""
alias AWS.Client
alias AWS.Request
def metadata do
%AWS.ServiceMetadata{
abbreviation: "CloudHSM V2",
api_version: "2017-04-28",
content_type: "application/x-amz-json-1.1",
credential_scope: nil,
endpoint_prefix: "cloudhsmv2",
global?: false,
protocol: "json",
service_id: "CloudHSM V2",
signature_version: "v4",
signing_name: "cloudhsm",
target_prefix: "BaldrApiService"
}
end
@doc """
Copy an AWS CloudHSM cluster backup to a different region.
"""
def copy_backup_to_region(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "CopyBackupToRegion", input, options)
end
@doc """
Creates a new AWS CloudHSM cluster.
"""
def create_cluster(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "CreateCluster", input, options)
end
@doc """
Creates a new hardware security module (HSM) in the specified AWS CloudHSM
cluster.
"""
def create_hsm(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "CreateHsm", input, options)
end
@doc """
Deletes a specified AWS CloudHSM backup.
A backup can be restored up to 7 days after the DeleteBackup request is made.
For more information on restoring a backup, see `RestoreBackup`.
"""
def delete_backup(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "DeleteBackup", input, options)
end
@doc """
Deletes the specified AWS CloudHSM cluster.
Before you can delete a cluster, you must delete all HSMs in the cluster. To see
if the cluster contains any HSMs, use `DescribeClusters`. To delete an HSM, use
`DeleteHsm`.
"""
def delete_cluster(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "DeleteCluster", input, options)
end
@doc """
Deletes the specified HSM.
To specify an HSM, you can use its identifier (ID), the IP address of the HSM's
elastic network interface (ENI), or the ID of the HSM's ENI. You need to specify
only one of these values. To find these values, use `DescribeClusters`.
"""
def delete_hsm(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "DeleteHsm", input, options)
end
@doc """
Gets information about backups of AWS CloudHSM clusters.
This is a paginated operation, which means that each response might contain only
a subset of all the backups. When the response contains only a subset of
backups, it includes a `NextToken` value. Use this value in a subsequent
`DescribeBackups` request to get more backups. When you receive a response with
no `NextToken` (or an empty or null value), that means there are no more backups
to get.
"""
def describe_backups(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "DescribeBackups", input, options)
end
@doc """
Gets information about AWS CloudHSM clusters.
This is a paginated operation, which means that each response might contain only
a subset of all the clusters. When the response contains only a subset of
clusters, it includes a `NextToken` value. Use this value in a subsequent
`DescribeClusters` request to get more clusters. When you receive a response
with no `NextToken` (or an empty or null value), that means there are no more
clusters to get.
"""
def describe_clusters(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "DescribeClusters", input, options)
end
@doc """
Claims an AWS CloudHSM cluster by submitting the cluster certificate issued by
your issuing certificate authority (CA) and the CA's root certificate.
Before you can claim a cluster, you must sign the cluster's certificate signing
request (CSR) with your issuing CA. To get the cluster's CSR, use
`DescribeClusters`.
"""
def initialize_cluster(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "InitializeCluster", input, options)
end
@doc """
Gets a list of tags for the specified AWS CloudHSM cluster.
This is a paginated operation, which means that each response might contain only
a subset of all the tags. When the response contains only a subset of tags, it
includes a `NextToken` value. Use this value in a subsequent `ListTags` request
to get more tags. When you receive a response with no `NextToken` (or an empty
or null value), that means there are no more tags to get.
"""
def list_tags(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "ListTags", input, options)
end
@doc """
Modifies attributes for AWS CloudHSM backup.
"""
def modify_backup_attributes(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "ModifyBackupAttributes", input, options)
end
@doc """
Modifies AWS CloudHSM cluster.
"""
def modify_cluster(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "ModifyCluster", input, options)
end
@doc """
Restores a specified AWS CloudHSM backup that is in the `PENDING_DELETION`
state.
For mor information on deleting a backup, see `DeleteBackup`.
"""
def restore_backup(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "RestoreBackup", input, options)
end
@doc """
Adds or overwrites one or more tags for the specified AWS CloudHSM cluster.
"""
def tag_resource(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "TagResource", input, options)
end
@doc """
Removes the specified tag or tags from the specified AWS CloudHSM cluster.
"""
def untag_resource(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "UntagResource", input, options)
end
end
|
lib/aws/generated/cloud_hsm_v2.ex
| 0.824709 | 0.472805 |
cloud_hsm_v2.ex
|
starcoder
|
defmodule Andy.Profiles.Rover.GMDefs.Being do
@moduledoc "The GM definition for :being"
alias Andy.GM.{GenerativeModelDef, Intention, Conjecture, Prediction}
import Andy.GM.Utils
def gm_def() do
%GenerativeModelDef{
name: :being,
conjectures: [
conjecture(:thriving)
],
contradictions: [],
priors: %{thriving: %{about: :self, values: %{is: true}}},
intentions: %{
express_opinion_about_life: %Intention{
intent_name: :say,
valuator: opinion_about_thriving(),
repeatable: false
}
}
}
end
# Conjectures
defp conjecture(:thriving) do
%Conjecture{
name: :thriving,
self_activated: true,
activator: opinion_activator(:self),
predictors: [
no_change_predictor(:safe, default: %{is: true}),
sated_predictor(),
free_predictor()
],
valuator: thriving_belief_valuator(),
intention_domain: [:express_opinion_about_life]
}
end
# Conjecture activators
defp sated_predictor() do
fn conjecture_activation, [round | _previous_rounds] ->
about = conjecture_activation.about
safe? = current_perceived_value(round, about, :safe, :is, default: true)
if safe? do
%Prediction{
conjecture_name: :sated,
about: about,
expectations: current_perceived_values(round, about, :sated, default: %{is: true})
}
else
nil
end
end
end
defp free_predictor() do
fn conjecture_activation, [round | _previous_rounds] ->
about = conjecture_activation.about
safe? = current_perceived_value(round, about, :safe, :is, default: true)
sated? = current_perceived_value(round, about, :sated, :is, default: true)
if safe? and sated? do
%Prediction{
conjecture_name: :free,
about: about,
expectations: current_perceived_values(round, about, :free, default: %{is: true})
}
else
nil
end
end
end
# Conjecture belief valuators
defp thriving_belief_valuator() do
fn conjecture_actuation, [round | _previous_rounds] ->
about = conjecture_actuation.about
sated? = current_perceived_value(round, about, :sated, :is, default: false)
safe? = current_perceived_value(round, about, :safe, :is, default: false)
free? = current_perceived_value(round, about, :free, :is, default: true)
%{is: safe? and sated? and free?}
end
end
# Intention valuators
defp opinion_about_thriving() do
fn %{is: thriving?} ->
if thriving?, do: saying("Life is good"), else: nil
end
end
end
|
lib/andy/profiles/rover/gm_defs/being.ex
| 0.627837 | 0.401658 |
being.ex
|
starcoder
|
defmodule ExJSONPath do
@moduledoc """
This module implements a JSONPath evaluator.
"""
alias ExJSONPath.ParsingError
@opaque path_token :: String.t() | pos_integer()
@opaque op :: :> | :>= | :< | :<= | :== | :!=
@opaque compiled_path ::
list(
{:access, path_token()}
| {:access, {op(), compiled_path(), term()}}
| {:recurse, path_token()}
| {:slice, non_neg_integer(), non_neg_integer(), non_neg_integer()}
| :wildcard
)
@doc """
Evaluate JSONPath on given input.
Returns `{:ok, [result1 | results]}` on success, {:error, %ExJSONPath.ParsingError{}} otherwise.
## Examples
iex> ExJSONPath.eval(%{"a" => %{"b" => 42}}, "$.a.b")
{:ok, [42]}
iex> ExJSONPath.eval([%{"v" => 1}, %{"v" => 2}, %{"v" => 3}], "$[?(@.v > 1)].v")
{:ok, [2, 3]}
iex> ExJSONPath.eval(%{"a" => %{"b" => 42}}, "$.x.y")
{:ok, []}
"""
@spec eval(term(), String.t() | compiled_path()) ::
{:ok, list(term())} | {:error, ParsingError.t()}
def eval(input, jsonpath)
def eval(input, path) when is_binary(path) do
with {:ok, compiled} <- compile(path) do
eval(input, compiled)
end
end
def eval(input, compiled_path) when is_list(compiled_path) do
{:ok, recurse(input, compiled_path)}
end
@doc """
Parse and compile a path.
Returns a {:ok, compiled_path} on success, {:error, reason} otherwise.
"""
@spec compile(String.t()) :: {:ok, compiled_path()} | {:error, ParsingError.t()}
def compile(path) when is_binary(path) do
with charlist = String.to_charlist(path),
{:ok, tokens, _} <- :jsonpath_lexer.string(charlist),
{:ok, compiled} <- :jsonpath_parser.parse(tokens) do
{:ok, compiled}
else
{:error, {_line, :jsonpath_lexer, error_desc}, _} ->
message_string =
error_desc
|> :jsonpath_lexer.format_error()
|> List.to_string()
{:error, %ParsingError{message: message_string}}
{:error, {_line, :jsonpath_parser, message}} ->
message_string =
message
|> :jsonpath_parser.format_error()
|> List.to_string()
{:error, %ParsingError{message: message_string}}
end
end
defp recurse(item, []),
do: [item]
defp recurse(enumerable, [{:access, {op, path, value}} | t])
when is_list(enumerable) or is_map(enumerable) do
results =
Enum.reduce(enumerable, [], fn entry, acc ->
item =
case entry do
{_key, value} -> value
value -> value
end
with [value_at_path] <- recurse(item, path),
true <- compare(op, value_at_path, value),
[leaf_value] <- recurse(item, t) do
[leaf_value | acc]
else
[] -> acc
false -> acc
end
end)
Enum.reverse(results)
end
defp recurse(map, [{:access, a} | t]) when is_map(map) do
case Map.fetch(map, a) do
{:ok, next_item} -> recurse(next_item, t)
:error -> []
end
end
defp recurse(array, [{:access, a} | t]) when is_list(array) and is_integer(a) do
case Enum.fetch(array, a) do
{:ok, next_item} -> recurse(next_item, t)
:error -> []
end
end
defp recurse(_any, [{:access, _a} | _t]),
do: []
defp recurse(enumerable, [{:recurse, a} | t] = path)
when is_map(enumerable) or is_list(enumerable) do
descent_results =
Enum.reduce(enumerable, [], fn
{_key, item}, acc ->
acc ++ recurse(item, path)
item, acc ->
acc ++ recurse(item, path)
end)
case safe_fetch(enumerable, a) do
{:ok, item} -> recurse(item, t) ++ descent_results
:error -> descent_results
end
end
defp recurse(_any, [{:recurse, _a} | _t]),
do: []
defp recurse(map, [{:slice, _first, _last, _step} | _t]) when is_map(map),
do: []
defp recurse(enumerable, [{:slice, first, :last, step} | t]),
do: recurse(enumerable, [{:slice, first, Enum.count(enumerable), step} | t])
defp recurse(_enumerable, [{:slice, index, index, _step} | _t]),
do: []
defp recurse(enumerable, [{:slice, first, last, step} | t]) do
enumerable
|> Enum.slice(Range.new(first, last - 1))
|> Enum.take_every(step)
|> Enum.reduce([], fn item, acc -> acc ++ recurse(item, t) end)
end
defp recurse(enumerable, [{:union, union_list} | t]) do
Enum.reduce(union_list, [], fn union_item, acc ->
acc ++ recurse(enumerable, [union_item | t])
end)
end
defp recurse(%{} = map, [:wildcard | t]) do
Map.values(map)
|> Enum.reduce([], fn item, acc -> acc ++ recurse(item, t) end)
end
defp recurse(list, [:wildcard | t]) when is_list(list) do
Enum.reduce(list, [], fn item, acc -> acc ++ recurse(item, t) end)
end
defp safe_fetch(list, index) when is_list(list) and is_integer(index),
do: Enum.fetch(list, index)
defp safe_fetch(list, _index) when is_list(list),
do: :error
defp safe_fetch(%{} = map, key),
do: Map.fetch(map, key)
defp compare(op, value1, value2) do
case op do
:> ->
value1 > value2
:>= ->
value1 >= value2
:< ->
value1 < value2
:<= ->
value1 <= value2
:== ->
value1 == value2
:!= ->
value1 != value2
end
end
end
|
lib/ex_json_path.ex
| 0.728941 | 0.466603 |
ex_json_path.ex
|
starcoder
|
defmodule Vex.Validators.By do
@moduledoc """
Ensure a value meets a custom criteria.
Provide a function that will accept a value and return a true/false result.
## Options
None, a function with arity 1 must be provided.
* `:function`: The function to check. Should have an arity of 1 and return true/false.
* `:message`: Optional. A custom error message. May be in EEx format
and use the fields described in "Custom Error Messages," below.
The function can be provided in place of the keyword list if no other options are needed.
## Examples
iex> Vex.Validators.By.validate(2, &(&1 == 2))
:ok
iex> Vex.Validators.By.validate(3, &(&1 == 2))
{:error, "must be valid"}
iex> Vex.Validators.By.validate(["foo", "foo"], &is_list/1)
:ok
iex> Vex.Validators.By.validate("sgge", fn (word) -> word |> String.reverse == "eggs" end)
:ok
iex> Vex.Validators.By.validate(nil, [function: &is_list/1, allow_nil: true])
:ok
iex> Vex.Validators.By.validate({}, [function: &is_list/1, allow_blank: true])
:ok
iex> Vex.Validators.By.validate([1], [function: &is_list/1, message: "must be a list"])
:ok
iex> Vex.Validators.By.validate("a", [function: &is_list/1, message: "must be a list"])
{:error, "must be a list"}
## Custom Error Messages
Custom error messages (in EEx format), provided as :message, can use the following values:
iex> Vex.Validators.By.__validator__(:message_fields)
[value: "The bad value"]
An example:
iex> Vex.Validators.By.validate("blah", [function: &is_list/1, message: "<%= inspect value %> isn't a list"])
{:error, ~S("blah" isn't a list)}
"""
use Vex.Validator
@message_fields [value: "The bad value"]
def validate(value, func) when is_function(func), do: validate(value, function: func)
def validate(value, options) when is_list(options) do
unless_skipping(value, options) do
function = Keyword.get(options, :function)
if function.(value) do
:ok
else
{:error, message(options, "must be valid", value: value)}
end
end
end
end
|
lib/vex/validators/by.ex
| 0.848109 | 0.53522 |
by.ex
|
starcoder
|
defmodule Scidata.CIFAR10 do
@moduledoc """
Module for downloading the [CIFAR10 dataset](https://www.cs.toronto.edu/~kriz/cifar.html).
"""
require Scidata.Utils
alias Scidata.Utils
@base_url "https://www.cs.toronto.edu/~kriz/"
@dataset_file "cifar-10-binary.tar.gz"
@train_images_shape {50000, 3, 32, 32}
@train_labels_shape {50000}
@test_images_shape {10000, 3, 32, 32}
@test_labels_shape {10000}
@doc """
Downloads the CIFAR10 training dataset or fetches it locally.
Returns a tuple of format:
{{images_binary, images_type, images_shape},
{labels_binary, labels_type, labels_shape}}
If you want to one-hot encode the labels, you can:
labels_binary
|> Nx.from_binary(labels_type)
|> Nx.new_axis(-1)
|> Nx.equal(Nx.tensor(Enum.to_list(0..9)))
## Examples
iex> Scidata.CIFAR10.download()
{{<<59, 43, 50, 68, 98, 119, 139, 145, 149, 149, 131, 125, 142, 144, 137, 129,
137, 134, 124, 139, 139, 133, 136, 139, 152, 163, 168, 159, 158, 158, 152,
148, 16, 0, 18, 51, 88, 120, 128, 127, 126, 116, 106, 101, 105, 113, 109,
112, ...>>, {:u, 8}, {50000, 3, 32, 32}},
{<<6, 9, 9, 4, 1, 1, 2, 7, 8, 3, 4, 7, 7, 2, 9, 9, 9, 3, 2, 6, 4, 3, 6, 6, 2,
6, 3, 5, 4, 0, 0, 9, 1, 3, 4, 0, 3, 7, 3, 3, 5, 2, 2, 7, 1, 1, 1, ...>>,
{:u, 8}, {50000}}}
"""
def download() do
download_dataset(:train)
end
@doc """
Downloads the CIFAR10 test dataset or fetches it locally.
Accepts the same options as `download/1`.
"""
def download_test() do
download_dataset(:test)
end
defp parse_images(content) do
{images, labels} =
for <<example::size(3073)-binary <- content>>, reduce: {[], []} do
{images, labels} ->
<<label::size(8)-bitstring, image::size(3072)-binary>> = example
{[image | images], [label | labels]}
end
{Enum.reverse(images), Enum.reverse(labels)}
end
defp download_dataset(dataset_type) do
files = Utils.get!(@base_url <> @dataset_file).body
{images, labels} =
files
|> Enum.filter(fn {fname, _} ->
String.match?(
List.to_string(fname),
case dataset_type do
:train -> ~r/data_batch/
:test -> ~r/test_batch/
end
)
end)
|> Enum.map(fn {_, content} -> Task.async(fn -> parse_images(content) end) end)
|> Enum.map(&Task.await(&1, :infinity))
|> Enum.unzip()
images = IO.iodata_to_binary(images)
labels = IO.iodata_to_binary(labels)
{{images, {:u, 8}, if(dataset_type == :test, do: @test_images_shape, else: @train_images_shape)},
{labels, {:u, 8}, if(dataset_type == :test, do: @test_labels_shape, else: @train_labels_shape)}}
end
end
|
lib/scidata/cifar10.ex
| 0.796767 | 0.716268 |
cifar10.ex
|
starcoder
|
defmodule NewRelic do
@moduledoc """
New Relic Agent - Public API
"""
@doc """
Set the name of the current transaction.
The first segment will be treated as the Transaction namespace,
and commonly contains the name of the framework.
**Notes:**
* At least 2 segments are required to light up the Transactions UI in APM
In the following example, you will see `/custom/transaction/name`
in the Transaction list.
```elixir
NewRelic.set_transaction_name("/Plug/custom/transaction/name")
```
"""
defdelegate set_transaction_name(name), to: NewRelic.Transaction.Reporter
@doc """
Report custom attributes on the current Transaction
Reporting nested data structures is supported by auto-flattening them
into a list of key-value pairs.
```elixir
NewRelic.add_attributes(foo: "bar")
# "foo" => "bar"
NewRelic.add_attributes(map: %{foo: "bar", baz: "qux"})
# "map.foo" => "bar"
# "map.baz" => "qux"
# "map.size" => 2
NewRelic.add_attributes(list: ["a", "b", "c"])
# "list.0" => "a"
# "list.1" => "b"
# "list.2" => "c"
# "list.length" => 3
```
**Notes:**
* Nested Lists and Maps are truncated at 10 items since there are a limited number
of attributes that can be reported on Transaction events
"""
defdelegate add_attributes(custom_attributes), to: NewRelic.Transaction.Reporter
@doc false
defdelegate incr_attributes(attrs), to: NewRelic.Transaction.Reporter
@doc """
Start an "Other" Transaction.
This will begin monitoring the current process as an "Other" Transaction
(ie: Not a "Web" Transaction). The first argument will be considered
the "category", the second is the "name".
Examples:
```elixir
NewRelic.start_transaction("GenStage", "MyConsumer/EventType")
NewRelic.start_transaction("Task", "TaskName")
```
**Notes:**
* Don't use this to track Web Transactions - for that,
`use NewRelic.Transaction` in your Plug pipeline so that we can properly
categorize as Web Transactions in the UI.
* Do _not_ use this for processes that live a very long time, doing so
will risk a memory leak tracking attributes in the transaction!
* You can't start a new transaction within an existing one. Any process
spawned inside a transaction belongs to that transaction.
* If multiple transactions are started in the same Process, you must
call `NewRelic.stop_transaction()` to mark the end of the transaction.
"""
@spec start_transaction(String.t(), String.t()) :: :ok
defdelegate start_transaction(category, name), to: NewRelic.Transaction
@doc """
Stop an "Other" Transaction.
If multiple transactions are started in the same Process, you must
call `NewRelic.stop_transaction()` to mark the end of the transaction.
"""
@spec stop_transaction() :: :ok
defdelegate stop_transaction(), to: NewRelic.Transaction
@doc """
Define an "Other" transaction within the given block. The return value of
the block is returned.
See `start_transaction` and `stop_transaction` for more details.
"""
defmacro other_transaction(category, name, do: block) do
quote do
NewRelic.start_transaction(unquote(category), unquote(name))
res = unquote(block)
NewRelic.stop_transaction()
res
end
end
@doc """
Call within a transaction to prevent it from reporting.
```elixir
def index(conn, _) do
NewRelic.ignore_transaction()
send_resp(conn, 200, "Health check OK")
end
```
"""
defdelegate ignore_transaction(), to: NewRelic.Transaction
@doc """
Store information about the type of work the current span is doing.
Options:
- `:generic, custom: attributes`
- `:http, url: url, method: method, component: component`
- `:datastore, statement: statement, instance: instance, address: address, hostname: hostname, component: component`
"""
defdelegate set_span(type, attributes), to: NewRelic.DistributedTrace
@doc """
You must manually instrument outgoing HTTP calls to connect them to a Distributed Trace.
The agent will automatically read request headers and detect if the request is a part
of a Distributed Trace, but outgoing requests need an extra header:
```elixir
HTTPoison.get(url, ["x-api-key": "secret"] ++ NewRelic.distributed_trace_headers(:http))
```
**Notes:**
* Call `NewRelic.distributed_trace_headers` immediately before making the
request since calling the function marks the "start" time of the request.
"""
defdelegate distributed_trace_headers(type), to: NewRelic.DistributedTrace
@deprecated "Use distributed_trace_headers/1 instead"
defdelegate create_distributed_trace_payload(type),
to: NewRelic.DistributedTrace,
as: :distributed_trace_headers
@doc """
To get detailed information about a particular process, you can install a Process sampler.
You must tell the Agent about your process from within the process.
For a `GenServer`, this function call should be made in the `init` function:
```elixir
defmodule ImportantProcess do
use GenServer
def init(:ok) do
NewRelic.sample_process
{:ok, %{}}
end
end
```
Once installed, the agent will report `ElixirSample` events with:
* `category = "Process"`
* `message_queue_length`
* `reductions`
* `memory_kb`
"""
defdelegate sample_process, to: NewRelic.Sampler.Process
@doc """
Report a Custom event to NRDB.
```elixir
NewRelic.report_custom_event("EventType", %{"foo" => "bar"})
```
"""
defdelegate report_custom_event(type, attributes),
to: NewRelic.Harvest.Collector.CustomEvent.Harvester
@doc """
Report a Custom metric.
```elixir
NewRelic.report_custom_metric("My/Metric", 123)
```
"""
defdelegate report_custom_metric(name, value),
to: NewRelic.Harvest.Collector.Metric.Harvester
@doc false
defdelegate report_aggregate(meta, values), to: NewRelic.Aggregate.Reporter
@doc false
defdelegate report_sample(category, values), to: NewRelic.Sampler.Reporter
@doc false
defdelegate report_span(span), to: NewRelic.Harvest.Collector.SpanEvent.Harvester
@doc false
defdelegate report_metric(identifier, values), to: NewRelic.Harvest.Collector.Metric.Harvester
@doc false
defdelegate log(level, message), to: NewRelic.Logger
@doc false
defdelegate manual_shutdown(), to: NewRelic.Harvest.Supervisor
end
|
lib/new_relic.ex
| 0.901707 | 0.847653 |
new_relic.ex
|
starcoder
|
defmodule ScenicStarter.Scene.Splash do
@moduledoc """
Sample splash scene.
This scene demonstrate a very simple animation and transition to another scene.
It also shows how to load a static texture and paint it into a rectangle.
"""
use Scenic.Scene
alias Scenic.Graph
alias Scenic.ViewPort
import Scenic.Primitives, only: [{:rect, 3}, {:update_opts, 2}]
@parrot_path :code.priv_dir(:scenic_starter)
|> Path.join("/static/images/scenic_parrot.png")
@parrot_hash Scenic.Cache.Support.Hash.file!(@parrot_path, :sha)
@parrot_width 62
@parrot_height 114
@graph Graph.build()
|> rect(
{@parrot_width, @parrot_height},
id: :parrot,
fill: {:image, {@parrot_hash, 0}}
)
@animate_ms 30
@finish_delay_ms 1000
# --------------------------------------------------------
def init(first_scene, opts) do
viewport = opts[:viewport]
# calculate the transform that centers the parrot in the viewport
{:ok, %ViewPort.Status{size: {vp_width, vp_height}}} = ViewPort.info(viewport)
position = {
vp_width / 2 - @parrot_width / 2,
vp_height / 2 - @parrot_height / 2
}
# load the parrot texture into the cache
Scenic.Cache.Static.Texture.load(@parrot_path, @parrot_hash)
# move the parrot into the right location
graph = Graph.modify(@graph, :parrot, &update_opts(&1, translate: position))
# start a very simple animation timer
{:ok, timer} = :timer.send_interval(@animate_ms, :animate)
state = %{
viewport: viewport,
timer: timer,
graph: graph,
first_scene: first_scene,
alpha: 0
}
{:ok, state, push: graph}
end
# --------------------------------------------------------
# A very simple animation. A timer runs, which increments a counter. The counter
# Is applied as an alpha channel to the parrot png.
# When it is fully saturated, transition to the first real scene
def handle_info(
:animate,
%{timer: timer, alpha: a} = state
)
when a >= 256 do
:timer.cancel(timer)
Process.send_after(self(), :finish, @finish_delay_ms)
{:noreply, state}
end
def handle_info(:finish, state) do
go_to_first_scene(state)
{:noreply, state}
end
def handle_info(:animate, %{alpha: alpha, graph: graph} = state) do
graph =
Graph.modify(
graph,
:parrot,
&update_opts(&1, fill: {:image, {@parrot_hash, alpha}})
)
{:noreply, %{state | graph: graph, alpha: alpha + 2}, push: graph}
end
# --------------------------------------------------------
# short cut to go right to the new scene on user input
def handle_input({:cursor_button, {_, :press, _, _}}, _context, state) do
go_to_first_scene(state)
{:noreply, state}
end
def handle_input({:key, _}, _context, state) do
go_to_first_scene(state)
{:noreply, state}
end
def handle_input(_input, _context, state), do: {:noreply, state}
# --------------------------------------------------------
defp go_to_first_scene(%{viewport: vp, first_scene: first_scene}) do
ViewPort.set_root(vp, {first_scene, nil})
end
end
|
lib/scenes/splash.ex
| 0.856257 | 0.651594 |
splash.ex
|
starcoder
|
defmodule Coherence.LockableService do
@moduledoc """
Lockable disables an account after too many failed login attempts.
Enabled with the `--lockable` installation option, after 5 failed login
attempts, the user is locked out of their account for 5 minutes.
This option adds the following fields to the user schema:
* :failed_attempts, :integer - The number of failed login attempts.
* :locked_at, :datetime - The time and date when the account was locked.
The following configuration is used to customize lockable behavior:
* :unlock_timeout_minutes (20) - The number of minutes to wait before unlocking the account.
* :max_failed_login_attempts (5) - The number of failed login attempts before locking the account.
By default, a locked account will be unlocked after the `:unlock_timeout_minutes` expires or the
is unlocked using the `unlock` API.
In addition, the `--unlock-with-token` option can be given to the installer to allow
a user to unlock their own account by requesting an email be sent with an link containing an
unlock token.
With this option installed, the following field is added to the user schema:
* :unlock_token, :string
"""
use Coherence.Config
import Coherence.Authentication.Utils, only: [random_string: 1]
import Coherence.EmailService
alias Coherence.{Controller, Messages, Schemas}
require Logger
@type changeset :: Ecto.Changeset.t
@type schema :: Ecto.Schema.t
@type schema_or_error :: schema | {:error, changeset}
@doc false
@spec clear_unlock_values(schema, module) :: nil | :ok | String.t
def clear_unlock_values(user, user_schema) do
if user.unlock_token or user.locked_at do
schema =
:unlock
|> Controller.changeset(user_schema, user, %{unlock_token: nil, locked_at: nil})
|> Schemas.update
case schema do
{:error, changeset} ->
lockable_failure changeset
_ ->
:ok
end
end
end
@doc """
Log an error message when lockable update fails.
"""
@spec lockable_failure(changeset) :: :ok
def lockable_failure(changeset) do
Logger.error "Failed to update lockable attributes " <> inspect(changeset.errors)
end
def send_unlock_email(user) do
if Config.mailer?() do
send_user_email :unlock, user, unlock_url(user.unlock_token)
{:ok, Messages.backend().unlock_instructions_sent() }
else
{:error, Messages.backend().mailer_required() }
end
end
@doc """
Unlock a user account.
Clears the `:locked_at` field on the user model and updates the database.
"""
@spec unlock!(schema) :: schema_or_error
def unlock!(user) do
user_schema = Config.user_schema
changeset = user_schema.unlock user
if user_schema.locked?(user) do
Schemas.update changeset
else
changeset = Ecto.Changeset.add_error changeset, :locked_at, Messages.backend().not_locked()
{:error, changeset}
end
end
def unlock_token(user) do
token = random_string 48
[Config.module, Coherence, Schemas]
|> Module.concat
|> apply(:update_user, [user, %{unlock_token: token}])
end
defp unlock_url(token), do:
apply(Module.concat(Config.web_module, Endpoint), :url, [])
<> Config.unlock_path <> "/" <> token
end
|
lib/coherence/services/lockable_service.ex
| 0.730674 | 0.41837 |
lockable_service.ex
|
starcoder
|
defmodule Infusionsoft.Schemas do
@moduledoc false
# Provides functions for transforming field names.
# The "to" functions take lists of Common names and turn them into REST or XML names.
# The "from" functions take lists of REST or XML names and turn them into Common names.
alias Infusionsoft.Schemas.XML.Contact, as: ContactXML
@doc """
Takes a list of Common names and returns XML names.
Names that aren't in the standard set of Common names will be treated as custom field names.
Names that don't match anything trigger an error showing all the names that didn't match.
Inputs are not case sensitive, but be careful to include any spaces.
For example, "fIrSt NaMe" is fine but "Firstname" would fail.
## Examples
iex> Infusionsoft.Schemas.to_xml(["First Name"], "test_token", :contacts)
{:ok, ["FirstName"]}
iex> Infusionsoft.Schemas.to_xml(["lAsT nAmE"], "test_token", :contacts)
{:ok, ["LastName"]}
iex> Infusionsoft.Schemas.to_xml(["Last Name"], "test_token", :not_a_valid_type)
{:error, "The type \\"not_a_valid_type\\" is invalid"}
iex> Infusionsoft.Schemas.to_xml(["Not a valid name"], "test_token", :contacts)
{:error, "The name \\"Not a valid name\\" is not a standard or custom contact field"}
"""
@spec to_xml([String.t()], String.t(), nil | String.t(), atom()) ::
{:ok, list()} | {:error, String.t()}
def to_xml(names, token, app, :contacts) when is_list(names),
do: ContactXML.to(names, token, app)
def to_xml(names, _token, _app, type) when is_list(names),
do: {:error, ~s(The type "#{type}" is invalid)}
@doc """
Takes a map of key / value pairs with XML name keys and returns the map with Common name keys.
Names that aren't in the standard set of XML names will be treated as custom field names.
Names that don't match anything trigger an error showing all the names that didn't match.
"""
@spec keys_to_xml(map(), String.t(), nil | String.t(), :contacts) ::
{:ok, map()} | {:error, binary()}
def keys_to_xml(map, token, app, :contacts) do
pairs = Enum.map(map, fn {k, v} -> {to_xml([k], token, app, :contacts), v} end)
case Enum.filter(pairs, fn {{status, _}, _} -> status == :error end) do
[] ->
{:ok, Enum.into(pairs, %{}, fn {{_, [k]}, v} -> {k, v} end)}
errors ->
{:error, errors |> Enum.map(fn {{_, message}, _} -> message end) |> Enum.join(", ")}
end
end
@doc """
Takes a list of XML names and returns Common names.
Names that aren't in the standard set of XML names will be treated as custom field names.
Names that don't match anything trigger an error showing all the names that didn't match.
## Examples
iex> Infusionsoft.Schemas.from_xml(["FirstName"], "test_token", :contacts)
{:ok, ["<NAME>"]}
iex> Infusionsoft.Schemas.from_xml(["LastName"], "test_token", :not_a_valid_type)
{:error, "The type \\"not_a_valid_type\\" is invalid"}
iex> Infusionsoft.Schemas.from_xml(["Not a valid name"], "test_token", :contacts)
{:error, "The name \\"Not a valid name\\" is not a standard or custom contact field"}
"""
@spec from_xml([String.t()], String.t(), nil | String.t(), atom()) ::
{:ok, list()} | {:error, String.t()}
def from_xml(names, token, app, :contacts) when is_list(names),
do: ContactXML.from(names, token, app)
def from_xml(names, _token, _app, type) when is_list(names),
do: {:error, ~s(The type "#{type}" is invalid)}
@doc """
Takes a map of key / value pairs with Common name keys and returns the map with XML name keys.
Names that aren't in the standard set of Common names will be treated as custom field names.
Names that don't match anything trigger an error showing all the names that didn't match.
"""
@spec keys_from_xml(map(), String.t(), nil | String.t(), atom()) ::
{:ok, map()} | {:error, binary()}
def keys_from_xml(map, token, app, :contacts) do
pairs = Enum.map(map, fn {k, v} -> {from_xml([k], token, app, :contacts), v} end)
case Enum.filter(pairs, fn {{status, _}, _} -> status == :error end) do
[] ->
{:ok, Enum.into(pairs, %{}, fn {{_, [k]}, v} -> {k, v} end)}
errors ->
{:error, errors |> Enum.map(fn {{_, message}, _} -> message end) |> Enum.join(", ")}
end
end
end
|
lib/infusionsoft/schemas.ex
| 0.852813 | 0.490114 |
schemas.ex
|
starcoder
|
defmodule Exile.Stream do
@moduledoc """
Defines a `Exile.Stream` struct returned by `Exile.stream!/2`.
"""
alias Exile.Process
alias Exile.Process.Error
defmodule Sink do
@moduledoc false
defstruct [:process]
defimpl Collectable do
def into(%{process: process} = stream) do
collector_fun = fn
:ok, {:cont, x} ->
:ok = Process.write(process, x)
:ok, :done ->
:ok = Process.close_stdin(process)
stream
:ok, :halt ->
:ok = Process.close_stdin(process)
end
{:ok, collector_fun}
end
end
end
defstruct [:process, :stream_opts]
@type t :: %__MODULE__{}
@doc false
def __build__(cmd_with_args, opts) do
{stream_opts, process_opts} =
Keyword.split(opts, [:exit_timeout, :max_chunk_size, :input, :use_stderr])
with {:ok, stream_opts} <- normalize_stream_opts(stream_opts) do
process_opts = Keyword.put(process_opts, :use_stderr, stream_opts[:use_stderr])
{:ok, process} = Process.start_link(cmd_with_args, process_opts)
start_input_streamer(%Sink{process: process}, stream_opts.input)
%Exile.Stream{process: process, stream_opts: stream_opts}
else
{:error, error} -> raise ArgumentError, message: error
end
end
@doc false
defp start_input_streamer(sink, input) do
case input do
:no_input ->
:ok
{:enumerable, enum} ->
spawn_link(fn ->
Enum.into(enum, sink)
end)
{:collectable, func} ->
spawn_link(fn ->
func.(sink)
end)
end
end
defimpl Enumerable do
def reduce(arg, acc, fun) do
%{process: process, stream_opts: %{use_stderr: use_stderr} = stream_opts} = arg
start_fun = fn -> :normal end
next_fun = fn :normal ->
case Process.read_any(process, stream_opts.max_chunk_size) do
:eof ->
{:halt, :normal}
{:ok, {:stdout, x}} when use_stderr == false ->
{[IO.iodata_to_binary(x)], :normal}
{:ok, {stream, x}} when use_stderr == true ->
{[{stream, IO.iodata_to_binary(x)}], :normal}
{:error, errno} ->
raise Error, "Failed to read from the external process. errno: #{errno}"
end
end
after_fun = fn exit_type ->
try do
# always close stdin before stoping to give the command chance to exit properly
Process.close_stdin(process)
result = Process.await_exit(process, stream_opts.exit_timeout)
case {exit_type, result} do
{_, :timeout} ->
Process.kill(process, :sigkill)
raise Error, "command fail to exit within timeout: #{stream_opts[:exit_timeout]}"
{:normal, {:ok, {:exit, 0}}} ->
:ok
{:normal, {:ok, error}} ->
raise Error, "command exited with status: #{inspect(error)}"
{exit_type, error} ->
Process.kill(process, :sigkill)
raise Error, "command exited with exit_type: #{exit_type}, error: #{inspect(error)}"
end
after
Process.stop(process)
end
end
Stream.resource(start_fun, next_fun, after_fun).(acc, fun)
end
def count(_stream) do
{:error, __MODULE__}
end
def member?(_stream, _term) do
{:error, __MODULE__}
end
def slice(_stream) do
{:error, __MODULE__}
end
end
defp normalize_input(term) do
cond do
is_nil(term) ->
{:ok, :no_input}
!is_function(term) && Enumerable.impl_for(term) ->
{:ok, {:enumerable, term}}
is_function(term, 1) ->
{:ok, {:collectable, term}}
true ->
{:error, "`:input` must be either Enumerable or a function which accepts collectable"}
end
end
defp normalize_max_chunk_size(max_chunk_size) do
case max_chunk_size do
nil ->
{:ok, 65536}
max_chunk_size when is_integer(max_chunk_size) and max_chunk_size > 0 ->
{:ok, max_chunk_size}
_ ->
{:error, ":max_chunk_size must be a positive integer"}
end
end
defp normalize_exit_timeout(timeout) do
case timeout do
nil ->
{:ok, :infinity}
timeout when is_integer(timeout) and timeout > 0 ->
{:ok, timeout}
_ ->
{:error, ":exit_timeout must be either :infinity or an integer"}
end
end
defp normalize_use_stderr(use_stderr) do
case use_stderr do
nil ->
{:ok, false}
use_stderr when is_boolean(use_stderr) ->
{:ok, use_stderr}
_ ->
{:error, ":use_stderr must be a boolean"}
end
end
defp normalize_stream_opts(opts) when is_list(opts) do
with {:ok, input} <- normalize_input(opts[:input]),
{:ok, exit_timeout} <- normalize_exit_timeout(opts[:exit_timeout]),
{:ok, max_chunk_size} <- normalize_max_chunk_size(opts[:max_chunk_size]),
{:ok, use_stderr} <- normalize_use_stderr(opts[:use_stderr]) do
{:ok,
%{
input: input,
exit_timeout: exit_timeout,
max_chunk_size: max_chunk_size,
use_stderr: use_stderr
}}
end
end
defp normalize_stream_opts(_), do: {:error, "stream_opts must be a keyword list"}
end
|
lib/exile/stream.ex
| 0.677901 | 0.408424 |
stream.ex
|
starcoder
|
defmodule Day20 do
use Aoc2018
@type direction :: :n | :e | :w | :s
@type branch :: [direction]
@type path :: [direction | branch]
@type vertex :: {integer(), integer()}
@spec part_one(binary()) :: integer()
def part_one(input) do
input
|> String.trim()
|> parse()
|> longest_path()
end
@spec part_two(binary()) :: non_neg_integer()
def part_two(input) do
input
|> String.trim()
|> parse()
|> path_to_graph({0, 0})
|> distances_from_start({0, 0})
|> Enum.map(fn {_vertex, distance} -> distance end)
|> Enum.count(fn distance -> distance >= 1000 end)
end
@spec path_to_graph(path, vertex) :: Graph.t()
def path_to_graph(path, starting_vertex) do
{graph, _} =
Enum.reduce(path, {Graph.new(), starting_vertex}, fn
path, {graph, previous_vertex} ->
case path do
direction when is_atom(direction) ->
new_vertex = move(previous_vertex, direction)
graph = Graph.add_vertex(graph, new_vertex, edge_from: previous_vertex, from_edge_label: direction)
{graph, new_vertex}
branches when is_list(branches) ->
graph =
[graph | Enum.map(branches, &path_to_graph(&1, previous_vertex))]
|> Graph.merge()
{graph, previous_vertex}
end
end)
graph
end
@spec distances_from_start(Graph.t(), any()) :: [{any(), integer()}]
def distances_from_start(graph, start_vertex) do
{distances, _visited} =
Graph.traverse_breadth_first(graph, start_vertex, %{start_vertex => 0}, fn prev_vertex, curr_vertex, distances ->
new_distance_to_curr = distances[prev_vertex] + 1
case distances[curr_vertex] do
nil ->
Map.put(distances, curr_vertex, new_distance_to_curr)
best_distance_to_curr_vertex_so_far ->
if new_distance_to_curr < best_distance_to_curr_vertex_so_far do
Map.put(distances, curr_vertex, new_distance_to_curr)
else
distances
end
end
end)
distances
end
@spec move(vertex, direction) :: vertex
def move({x, y}, :n), do: {x, y - 1}
def move({x, y}, :s), do: {x, y + 1}
def move({x, y}, :e), do: {x + 1, y}
def move({x, y}, :w), do: {x - 1, y}
@spec longest_path(path) :: integer()
def longest_path(path) when is_list(path) do
longest_path(0, path)
end
@spec longest_path(integer(), path) :: integer()
defp longest_path(acc, path) do
case path do
[x | rest] when is_atom(x) ->
longest_path(acc + 1, rest)
[branches | rest] when is_list(branches) ->
# we don't need to walk loops - they can be safely skipped
if [] in branches do
longest_path(acc, rest)
else
longest_branch =
branches
|> Enum.map(&longest_path(0, &1))
|> Enum.max()
longest_path(acc + longest_branch, rest)
end
[] ->
acc
end
end
@spec parse(binary()) :: path | {binary(), [path]}
def parse(input) when is_binary(input) do
size = byte_size(input) - 2
<<"^", path::binary-size(size), "$">> = input
parse_path([], {[], []}, path)
end
@spec parse_path(list(), {list(), [path]}, binary()) :: path | {binary(), [path]}
defp parse_path(stack, {acc, branches}, string)
when is_list(stack) and is_list(acc) and is_list(branches) and is_binary(string) do
case string do
<<>> ->
if stack != [] || branches != [] do
raise """
Reached end of input, but there's still some remaining stuff:
stack = #{inspect(stack)}
branches = #{inspect(branches)}.
"""
end
Enum.reverse(acc)
<<?N::utf8, rest::binary>> ->
parse_path(stack, {[:n | acc], branches}, rest)
<<?E::utf8, rest::binary>> ->
parse_path(stack, {[:e | acc], branches}, rest)
<<?W::utf8, rest::binary>> ->
parse_path(stack, {[:w | acc], branches}, rest)
<<?S::utf8, rest::binary>> ->
parse_path(stack, {[:s | acc], branches}, rest)
<<?(::utf8, rest::binary>> ->
parse_path([{acc, branches} | stack], {[], []}, rest)
<<?)::utf8, rest::binary>> ->
branches = [Enum.reverse(acc) | branches] |> Enum.reverse()
[{prev_acc, prev_branches} | stack] = stack
parse_path(stack, {[branches | prev_acc], prev_branches}, rest)
<<?|::utf8, rest::binary>> ->
parse_path(stack, {[], [Enum.reverse(acc) | branches]}, rest)
end
end
end
|
lib/day20.ex
| 0.713631 | 0.583381 |
day20.ex
|
starcoder
|
defmodule Brodex do
@moduledoc """
Brodex is a thin wrapper of [`:brod`](https://hex.pm/packages/brod).
## Configuration
See [brod README](https://github.com/klarna/brod) for details.
```elixir
config :brod,
clients: [
my_client: [
endpoints: [{'127.0.0.1', 9092}],
reconnect_cool_down_seconds: 10
]
]
```
If you use [mix release](https://hexdocs.pm/mix/Mix.Tasks.Release.html)
```elixir
# config/releases.exs
config :brod,
clients: [
my_client: [
endpoints: Brodex.parse_endpoints(System.fetch_env!("KAFKA_ENDPOINTS)),
reconnect_cool_down_seconds: 10
]
]
```
"""
@typedoc "[`:brod.endpoint`](https://hexdocs.pm/brod/brod.html#type-endpoint)"
@type endpoint :: {binary() | :inet.hostname(), non_neg_integer}
@typedoc "[`:brod.client_id`](https://hexdocs.pm/brod/brod.html#type-client_id)"
@type client_id :: atom
@typedoc "[`:brod.client`](https://hexdocs.pm/brod/brod.html#type-client)"
@type client :: client_id | pid
@typedoc "[`:brod.client_config`](https://hexdocs.pm/brod/brod.html#type-client_config)"
@type client_config :: :proplists.proplist()
@typedoc "[`:brod.connection`](https://hexdocs.pm/brod/brod.html#type-connection)"
@type connection :: pid
@typedoc "[`:brod.bootstrap`](https://hexdocs.pm/brod/brod.html#type-bootstrap)"
@type bootstrap :: [endpoint] | {[endpoint], client_config}
@typedoc "[`:brod.topic`](https://hexdocs.pm/brod/brod.html#type-topic)"
@type topic :: binary
@typedoc "[`:brod.partition`](https://hexdocs.pm/brod/brod.html#type-partition)"
@type partition :: int32
@typedoc "[`:brod.key`](https://hexdocs.pm/brod/brod.html#type-key)"
@type key :: :undefined | binary
@typedoc "[`:brod.offset`](https://hexdocs.pm/brod/brod.html#type-offset)"
@type offset :: int64
@typedoc "[`:brod.msg_ts`](https://hexdocs.pm/brod/brod.html#type-msg_ts)"
@type msg_ts :: int64
@typedoc "[`:brod.call_ref`](https://hexdocs.pm/brod/brod.html#type-call_ref)"
@type call_ref ::
{:brod_call_ref, caller :: :undefined | pid, callee :: :undefined | pid,
ref :: :undefined | reference}
@typedoc "[`:brod_producer.config`](https://hexdocs.pm/brod/brod_producer.html#type-config)"
@type producer_config :: :proplists.proplist()
@typedoc "[`:brod_consumer.config`](https://hexdocs.pm/brod/brod_consumer.html#type-config)"
@type consumer_config :: :proplists.proplist()
@typedoc "[`:brod.value`](https://hexdocs.pm/brod/brod.html#type-value)"
@type value ::
:undefined
| iodata
| {msg_ts, binary}
| [{key, value}]
| [{msg_ts, key, value}]
| msg_input
| batch_input
@typedoc "[`:kpro.headers`](https://hexdocs.pm/kafka_protocol/kpro.html#type-headers)"
@type headers :: [{binary, binary}]
@typedoc "[`:kpro.msg_input`](https://hexdocs.pm/kafka_protocol/kpro.html#type-msg_input)"
@type msg_input :: %{headers: headers, ts: msg_ts, key: key, value: value}
@typedoc "[`:kpro.batch_input`](https://hexdocs.pm/kafka_protocol/kpro.html#type-batch_input)"
@type batch_input :: [msg_input]
@typedoc "[`:brod.partitioner`](https://hexdocs.pm/brod/brod.html#type-partitioner)"
@type partitioner :: (topic, pos_integer, key, value -> {:ok, partition}) | :random | :hash
@typedoc "[`:brod.group_id`](https://hexdocs.pm/brod/brod.html#type-group_id)"
@type group_id :: binary
@typedoc "[`:brod.group_config`](https://hexdocs.pm/brod/brod.html#type-group_config)"
@type group_config :: :proplists.proplist()
@typedoc "[`:kpro.int32`](https://hexdocs.pm/kafka_protocol/kpro.html#type-int32)"
@type int32 :: -2_147_483_648..2_147_483_647
@typedoc "[`:kpro.int64`](https://hexdocs.pm/kafka_protocol/kpro.html#type-int64)"
@type int64 :: -9_223_372_036_854_775_808..9_223_372_036_854_775_807
@doc """
Wrapper of [`:brod.start_client/3`](https://hexdocs.pm/brod/brod.html#start_client-3).
"""
@spec start_client([endpoint], client_id, :brod.client_config()) ::
:ok | {:error, term}
def start_client(endpoints, client_id, options \\ []),
do: :brod.start_client(endpoints, client_id, options)
@doc """
Wrapper of [`:brod.stop_client/1`](https://hexdocs.pm/brod/brod.html#stop_client-1).
"""
@spec stop_client(client) :: :ok
def stop_client(client),
do: :brod.stop_client(client)
@doc """
Wrapper of [`:brod.start_consumer/3`](https://hexdocs.pm/brod/brod.html#start_consumer-3).
"""
@spec start_consumer(client, topic, consumer_config) :: :ok
def start_consumer(client, topic, consumer_config \\ []),
do: :brod.start_consumer(client, topic, consumer_config)
@doc """
Wrapper of [`:brod.start_producer/3`](https://hexdocs.pm/brod/brod.html#start_producer-3).
"""
@spec start_producer(client, topic, producer_config) :: :ok
def start_producer(client, topic, producer_config \\ []),
do: :brod.start_producer(client, topic, producer_config)
@doc """
Wrapper of [`:brod.produce/5`](https://hexdocs.pm/brod/brod.html#produce-5).
"""
@spec produce_async(client, topic, input :: {key, value} | value, partition | partitioner) ::
{:ok, call_ref} | {:error, term}
def produce_async(client, topic, input, partition_spec \\ :hash)
def produce_async(client, topic, value, partition_spec) when is_list(value) or is_map(value),
do: :brod.produce(client, topic, partition_spec, "", value)
def produce_async(client, topic, {key, value}, partition_spec),
do: :brod.produce(client, topic, partition_spec, key, value)
@doc """
Wrapper of [`:brod.produce_sync/5`](https://hexdocs.pm/brod/brod.html#produce_sync-5).
"""
@spec produce_sync(client, topic, input :: {key, value} | value, partition | partitioner) ::
:ok | {:error, term}
def produce_sync(client, topic, input, partition_spec \\ :hash)
def produce_sync(client, topic, value, partition_spec) when is_list(value) or is_map(value),
do: :brod.produce_sync(client, topic, partition_spec, "", value)
def produce_sync(client, topic, {key, value}, partition_spec),
do: :brod.produce_sync(client, topic, partition_spec, key, value)
@doc """
Wrapper of [`:brod.get_metadata/3`](https://hexdocs.pm/brod/brod.html#get_metadata-3)
"""
@spec get_metadata([endpoint], :all | [topic], :brod.conn_config()) ::
{:ok, :kpro.struct()} | {:error, term}
def get_metadata(endpoints, topic, connect_config \\ []),
do: :brod.get_metadata(endpoints, topic, connect_config)
@doc """
Wrapper of [`:brod.get_partitions_count/2`](https://hexdocs.pm/brod/brod.html#get_partitions_count-2)
"""
@spec get_partitions_count(client, topic) :: {:ok, pos_integer} | {:error, term}
def get_partitions_count(client, topic), do: :brod.get_partitions_count(client, topic)
@doc """
Wrapper of [`:brod.list_all_groups/2`](https://hexdocs.pm/brod/brod.html#list_all_groups-2)
"""
@spec list_all_consumer_groups([endpoint], :brod.conn_config()) :: [
{endpoint, [:brod.cg()] | {:error, term}}
]
def list_all_consumer_groups(endpoints, connect_config \\ []),
do: :brod.list_all_groups(endpoints, connect_config)
@doc """
Wrapper of [`:brod.list_groups/2`](https://hexdocs.pm/brod/brod.html#list_groups-2)
"""
@spec list_consumer_groups(endpoint, :brod.conn_config()) ::
{:ok, [:brod.cg()]} | {:error, term}
def list_consumer_groups(endpoint, connect_config \\ []),
do: :brod.list_groups(endpoint, connect_config)
@type fetch_opt ::
{:max_wait_time, non_neg_integer}
| {:min_bytes, non_neg_integer}
| {:max_bytes, non_neg_integer}
@doc """
Wrapper of [`:brod.fetch/5`](https://hexdocs.pm/brod/brod.html#fetch-5)
"""
@spec fetch(
connection | client_id | bootstrap,
topic,
partition,
offset,
[fetch_opt]
) ::
{:ok, {offset, [Brodex.Message.record()]}} | {:error, term}
def fetch(conn_spec, topic, partition, offset, options \\ []),
do: :brod.fetch(conn_spec, topic, partition, offset, Enum.into(options, %{}))
@doc """
Parse endpoints.
## Examples
iex> Brodex.parse_endpoints("kafka1:9000,kafka:9001")
[{'kafka1', 9000}, {'kafka', 9001}]
"""
@spec parse_endpoints(String.t()) :: [endpoint]
def parse_endpoints(endpoints)
def parse_endpoints(endpoints) when is_binary(endpoints) do
endpoints
|> String.split(",")
|> Enum.map(fn host_port ->
[host, port] = String.split(host_port, ":")
{port, ""} = Integer.parse(port)
{to_charlist(host), port}
end)
end
end
|
lib/brodex.ex
| 0.808219 | 0.773131 |
brodex.ex
|
starcoder
|
defmodule Level10.Games do
@moduledoc """
This module is the interface into game logic. All presenters within the web
domain should interface only with this module for controlling games. Its
children shouldn't be touched directly.
Most of the functions in the module are client functions that give
instructions to a game server, but some of them will interact instead with
the distributed registry and supervisor, or with Phoenix Presence or PubSub.
"""
alias Level10.Accounts.User
alias Level10.Presence
alias Level10.Games.{Game, GameRegistry, GameServer, GameSupervisor, Levels, Player, Settings}
require Logger
@typep game_name :: {:via, module, term}
@max_creation_attempts 10
@doc """
Add one or more cards to a group that is already on the table
"""
@spec add_to_table(
Game.join_code(),
Player.id(),
Player.id(),
non_neg_integer(),
Game.cards(),
timeout()
) ::
:ok | :invalid_group | :level_incomplete | :needs_to_draw | :not_your_turn
def add_to_table(join_code, player_id, table_id, position, cards_to_add, timeout \\ 5000) do
GenServer.call(
via(join_code),
{:add_to_table, {player_id, table_id, position, cards_to_add}},
timeout
)
end
@doc """
Get the current count of active games in play.
"""
@spec count() :: non_neg_integer()
def count do
%{active: count} = Supervisor.count_children(GameSupervisor)
count
end
@doc """
Create a new game with the player named as its creator.
"""
@spec create_game(User.t(), Settings.t()) :: {:ok, Game.join_code(), Player.id()} | :error
def create_game(user, settings) do
player = Player.new(user)
do_create_game(player, settings, @max_creation_attempts)
end
@doc """
Returns a Player struct representing the player who created the game.
"""
@spec creator(Game.join_code(), timeout()) :: Player.t()
def creator(join_code, timeout \\ 5000) do
GenServer.call(via(join_code), :creator, timeout)
end
@doc """
Check to see if the current player has drawn a card yet.
## Examples
iex> current_player_has_drawn?("ABCD")
true
"""
@spec current_player_has_drawn?(Game.join_code(), timeout()) :: boolean()
def current_player_has_drawn?(join_code, timeout \\ 5000) do
GenServer.call(via(join_code), :current_turn_drawn?, timeout)
end
@doc """
Delete a game.
## Examples
iex> delete_game("ABCD")
:ok
"""
@spec delete_game(Game.join_code(), reason :: term, timeout) :: :ok
def delete_game(join_code, reason \\ :normal, timeout \\ :infinity) do
GenServer.stop(via(join_code), reason, timeout)
end
@doc """
Deletes the specified player from the game. This is only allowed if the game
is still in the lobby stage.
If the player is currently alone in the game, the game will be deleted as
well.
"""
@spec delete_player(Game.join_code(), Player.id(), timeout()) ::
:ok | :already_started | :deleted
def delete_player(join_code, player_id, timeout \\ 5000) do
result = GenServer.call(via(join_code), {:delete_player, player_id}, timeout)
with :empty_game <- result, do: delete_game(join_code)
end
@doc """
Discard a card from the player's hand
## Examples
iex> discard_card("ABCD", "9c34b9fe-3104-44b3-b21b-28140e2e3624", %Card{color: :green, value: :twelve})
:ok
"""
@spec discard_card(Game.join_code(), Player.id(), Card.t(), timeout()) ::
:ok | :needs_to_draw | :not_your_turn
def discard_card(join_code, player_id, card, timeout \\ 5000) do
GenServer.call(via(join_code), {:discard, {player_id, card}}, timeout)
end
@doc """
Take the top card from either the draw pile or discard pile and add it to the
player's hand
## Examples
iex> draw_card("ABCD", "9c34b9fe-3104-44b3-b21b-28140e2e3624", :draw_pile)
%Card{color: :green, value: :twelve}
iex> draw_card("ABCD", "9c34b9fe-3104-44b3-b21b-28140e2e3624", :discard_pile)
%Card{color: :green, value: :twelve}
"""
@spec draw_card(Game.join_code(), Player.id(), :discard_pile | :draw_pile, timeout()) ::
Card.t() | :already_drawn | :empty_discard_pile | :not_your_turn | :skip
def draw_card(join_code, player_id, source, timeout \\ 5000) do
GenServer.call(via(join_code), {:draw, {player_id, source}}, timeout)
end
@doc """
Returns whether or not a game with the specified join code exists.
## Examples
iex> exists?("ABCD")
true
iex> exists?("ASDF")
false
"""
@spec exists?(Game.join_code()) :: boolean()
def exists?(join_code) do
case Horde.Registry.lookup(GameRegistry, join_code) do
[] -> false
_ -> true
end
end
@doc """
Returns whether or not the specified game is finished.
## Examples
iex> finished?("ABCD")
true
"""
@spec finished?(Game.join_code(), timeout()) :: boolean()
def finished?(join_code, timeout \\ 5000) do
GenServer.call(via(join_code), :finished?, timeout)
end
@doc """
Returns the game with the specified join code.
## Examples
iex> get("ABCD")
%Game{}
"""
@spec get(Game.join_code(), timeout()) :: Game.t()
def get(join_code, timeout \\ 5000) do
GenServer.call(via(join_code), :get, timeout)
end
@doc """
Get the player whose turn it currently is.
## Examples
iex> get_current_turn("ABCD")
%Player{id: "ffe6629a-faff-4053-b7b8-83c3a307400f", name: "Player 1"}
"""
@spec get_current_turn(Game.join_code(), timeout()) :: Player.t()
def get_current_turn(join_code, timeout \\ 5000) do
GenServer.call(via(join_code), :current_player, timeout)
end
@doc """
Get the count of cards in each player's hand.
## Examples
iex> get_hand_counts("ABCD")
%{"179539f0-661e-4b56-ac67-fec916214223" => 10, "000cc69a-bb7d-4d3e-ae9f-e42e3dcac23e" => 3}
"""
@spec get_hand_counts(Game.join_code(), timeout()) :: %{
optional(Player.id()) => non_neg_integer()
}
def get_hand_counts(join_code, timeout \\ 5000) do
GenServer.call(via(join_code), :hand_counts, timeout)
end
@doc """
Get the hand of the specified player.
## Examples
iex> get_hand_for_player("ABCD", "557489d0-1ef2-4763-9b0b-d2ea3c80fd99")
[%Card{color: :green, value: :twelve}, %Card{color: :blue, value: :nine}, ...]
"""
@spec get_hand_for_player(Game.join_code(), Player.id(), timeout()) :: list(Card.t())
def get_hand_for_player(join_code, player_id, timeout \\ 5000) do
GenServer.call(via(join_code), {:hand, player_id}, timeout)
end
@doc """
Get the level information for each player in the game.
## Examples
iex> get_levels("ABCD")
%{
"04ba446e-0b2a-49f2-8dbf-7d9742548842" => [set: 4, run: 4],
"86800484-8e73-4408-bd15-98a57871694f" => [run: 7],
}
"""
@spec get_levels(Game.join_code(), timeout()) :: %{optional(Player.t()) => Levels.level()}
def get_levels(join_code, timeout \\ 5000) do
levels = GenServer.call(via(join_code), :levels, timeout)
for {player_id, level_number} <- levels,
into: %{},
do: {player_id, Levels.by_number(level_number)}
end
@doc """
Get the player whose turn will come after the player specified.
## Examples
iex> get_next_player("ABCD", "103b1a2c-e3fd-4cfb-bdcd-8842cf5c8012")
%Player{id: "27aada8a-a9d4-4b00-a306-92d1e507a3cd"}
"""
@spec get_next_player(Game.join_code(), Player.id(), timeout()) :: Player.t()
def get_next_player(join_code, player_id, timeout \\ 5000) do
GenServer.call(via(join_code), {:next_player, player_id}, timeout)
end
@doc """
Get the list of players in a game.
## Examples
iex> get_players("ABCD")
[
%Player{id: "601a07a1-b229-47e5-ad13-dbe0599c90e9", name: "Player 1"},
%Player{id: "a0d2ef3e-e44c-4a58-b90d-a56d88224700", name: "Player 2"}
]
"""
@spec get_players(Game.join_code(), timeout) :: list(Player.t())
def get_players(join_code, timeout \\ 5000) do
GenServer.call(via(join_code), :players, timeout)
end
@doc """
Gets the set of IDs of players who are ready for the next round to begin.
"""
@spec get_players_ready(Game.join_code(), timeout()) :: MapSet.t(Player.id())
def get_players_ready(join_code, timeout \\ 5000) do
GenServer.call(via(join_code), :players_ready, timeout)
end
@doc """
Get the round number for the current round.
"""
@spec get_round_number(Game.join_code(), timeout()) :: non_neg_integer()
def get_round_number(join_code, timeout \\ 5000) do
GenServer.call(via(join_code), :current_round, timeout)
end
@doc """
Get the scores for all players in a game.
## Examples
iex> get_scores("ABCD")
%{
"e486056e-4a01-4239-9f00-6f7f57ca8d54" => {3, 55},
"38379e46-4d29-4a22-a245-aa7013ec3c33" => {2, 120}
}
"""
@spec get_scores(Game.join_code(), timeout()) :: Game.scores()
def get_scores(join_code, timeout \\ 5000) do
GenServer.call(via(join_code), :scoring, timeout)
end
@doc """
Gets the set of players that will be skipped on their next turn.
## Examples
iex> get_skipped_players("ABCD")
#MapSet<["a66f96e0-dfd9-493e-9bb9-47cb8baed530"]
"""
@spec get_skipped_players(Game.join_code(), timeout()) :: MapSet.t(Player.id())
def get_skipped_players(join_code, timeout \\ 5000) do
GenServer.call(via(join_code), :skipped_players, timeout)
end
@doc """
Get the settings for the game.
## Examples
iex> get_settings("ABCD")
%Level10.Games.Settings{}
"""
@spec get_settings(Game.join_code(), timeout()) :: Settings.t()
def get_settings(join_code, timeout \\ 5000) do
GenServer.call(via(join_code), :settings, timeout)
end
@doc """
Get the table: the cards that have been played to complete levels by each
player.
## Examples
iex> get_table("ABCD")
%{
"12a29ba6-fe6f-4f81-8c89-46ef8aff4b82" => %{
0 => [
%Level10.Games.Card{color: :black, value: :wild},
%Level10.Games.Card{color: :blue, value: :twelve},
%Level10.Games.Card{color: :red, value: :twelve}
],
1 => [
%Level10.Games.Card{color: :black, value: :wild},
%Level10.Games.Card{color: :green, value: :ten},
%Level10.Games.Card{color: :blue, value: :ten}
]
}
}
"""
@spec get_table(Game.join_code(), timeout()) :: Game.table()
def get_table(join_code, timeout \\ 5000) do
GenServer.call(via(join_code), :table, timeout)
end
@doc """
Get the top card from the discard pile.
## Examples
iex> get_top_discarded_card("ABCD")
%Card{color: :green, value: :twelve}
iex> get_top_discarded_card("ABCD")
nil
"""
@spec get_top_discarded_card(Game.join_code(), timeout()) :: Card.t() | nil
def get_top_discarded_card(join_code, timeout \\ 5000) do
GenServer.call(via(join_code), :top_discarded_card, timeout)
end
@doc """
Attempts to join a game. Will return an ok tuple with the player ID for the
new player if joining is successful, or an atom with a reason if not.
## Examples
iex> join_game("ABCD", "Player One")
{:ok, "9bbfeacb-a006-4646-8776-83cca0ad03eb"}
iex> join_game("ABCD", "Player One")
:already_started
iex> join_game("ABCD", "Player One")
:full
iex> join_game("ABCD", "Player One")
:not_found
"""
@spec join_game(Game.join_code(), User.t(), timeout()) ::
:ok | :already_started | :full | :not_found
def join_game(join_code, user, timeout \\ 5000) do
player = Player.new(user)
if exists?(join_code) do
GenServer.call(via(join_code), {:join, player}, timeout)
else
:not_found
end
end
@doc """
Returns a list of all of the join codes for games that are currently active.
This can then be used for things like monitoring and garbage collection.
## Examples
iex> list_join_codes()
["ABCD", "EFGH"]
"""
@spec list_join_codes :: list(Game.join_code())
def list_join_codes do
for {_, pid, _, _} <- Supervisor.which_children(GameSupervisor) do
Horde.Registry.keys(GameRegistry, pid)
end
end
@doc """
Get the list of players currently present in the specified game.
"""
@spec list_presence(Game.join_code()) :: %{optional(Player.id()) => map()}
def list_presence(join_code) do
Presence.list("game:" <> join_code)
end
@doc """
Stores in the game state that the specified player is ready to move on to the
next stage of the game.
"""
@spec mark_player_ready(Game.join_code(), Player.id()) :: :ok
def mark_player_ready(join_code, player_id) do
result = GenServer.cast(via(join_code), {:player_ready, player_id})
with :game_over <- result, do: delete_game(join_code)
end
@doc """
Returns whether or not the specified player exists within the specified game.
"""
@spec player_exists?(Game.t() | Game.join_code(), Player.id(), timeout()) :: boolean()
def player_exists?(join_code_or_game, player_id, timeout \\ 5000)
def player_exists?(join_code, player_id, timeout) when is_binary(join_code) do
GenServer.call(via(join_code), {:player_exists?, player_id}, timeout)
end
def player_exists?(game, player_id, _), do: Game.player_exists?(game, player_id)
@doc """
Returns the set of players that remain in the game.
"""
@spec remaining_players(Game.join_code(), timeout()) :: MapSet.t()
def remaining_players(join_code, timeout \\ 5000) do
GenServer.call(via(join_code), :remaining_players, timeout)
end
@doc """
Removes the specified player from the game after it has already started.
"""
@spec remove_player(Game.join_code(), Player.id()) :: :ok
def remove_player(join_code, player_id) do
GenServer.cast(via(join_code), {:remove_player, player_id})
end
@doc """
Check whether or not the current round has started.
## Examples
iex> round_started?("ABCD")
true
iex> round_started?("EFGH")
false
"""
@spec round_started?(Game.join_code(), timeout()) :: boolean()
def round_started?(join_code, timeout \\ 5000) do
GenServer.call(via(join_code), :round_started?, timeout)
end
@doc """
Returns the player struct representing the player who won the current round.
"""
@spec round_winner(Game.join_code(), timeout()) :: Player.t() | nil
def round_winner(join_code, timeout \\ 5000) do
GenServer.call(via(join_code), :round_winner, timeout)
end
@doc """
Discards a skip card from the player's hand and specify the player whose next
turn should be skipped.
## Examples
iex> skip_player("ABCD", "9c34b9fe-3104-44b3-b21b-28140e2e3624", "4fabf53c-6449-4d18-ab28-11cf642dee24")
:ok
iex> skip_player("ABCD", "9c34b9fe-3104-44b3-b21b-28140e2e3624", "4fabf53c-6449-4d18-ab28-11cf642dee24")
:ok
"""
@spec skip_player(Game.join_code(), Player.id(), Player.id(), timeout()) ::
:ok | :needs_to_draw | :not_your_turn
def skip_player(join_code, player_id, player_to_skip, timeout \\ 5000) do
GenServer.call(via(join_code), {:skip_player, {player_id, player_to_skip}}, timeout)
end
@doc """
Start the game.
"""
@spec start_game(Game.join_code()) :: :ok
def start_game(join_code) do
GenServer.cast(via(join_code), :start_game)
end
@doc """
Check whether or not a game has started.
## Examples
iex> started?("ABCD")
true
iex> started?("EFGH")
false
"""
@spec started?(Game.join_code(), timeout()) :: boolean()
def started?(join_code, timeout \\ 5000) do
GenServer.call(via(join_code), :started?, timeout)
end
@doc """
Susbscribe a process to updates for the specified game.
"""
@spec subscribe(String.t(), Player.id()) :: :ok | {:error, term()}
def subscribe(game_code, player_id) do
topic = "game:" <> game_code
with :ok <- Phoenix.PubSub.subscribe(Level10.PubSub, topic),
{:ok, _} <- Presence.track_player(game_code, player_id) do
if player_id != :display, do: Presence.track_user(player_id, game_code)
:ok
end
end
@doc """
Set the given player's table to the given cards.
"""
@spec table_cards(Game.join_code(), Player.id(), Game.player_table(), timeout()) ::
:ok | :already_set | :needs_to_draw | :not_your_turn
def table_cards(join_code, player_id, player_table, timeout \\ 5000) do
GenServer.call(via(join_code), {:table_cards, {player_id, player_table}}, timeout)
end
@doc """
Unsubscribe a process from updates for the specified game.
"""
@spec unsubscribe(String.t(), Player.id()) :: :ok | {:error, term()}
def unsubscribe(game_code, player_id) do
topic = "game:" <> game_code
with :ok <- Phoenix.PubSub.unsubscribe(Level10.PubSub, topic) do
Presence.untrack(self(), topic, player_id)
end
end
@doc """
Update the specified game using the provided function. This isn't meant to be
used for anything other than administrative debugging.
"""
@spec update(Game.join_code(), (Game.t() -> Game.t())) :: :ok
def update(join_code, fun) do
GenServer.cast(via(join_code), {:update, fun})
end
# Private
@spec do_create_game(Player.t(), Settings.t(), non_neg_integer()) ::
{:ok, Game.join_code()} | :error
defp do_create_game(player, settings, attempts_remaining)
defp do_create_game(_player, _settings, 0) do
:error
end
defp do_create_game(player, settings, attempts_remaining) do
join_code = Game.generate_join_code()
game = %{
id: join_code,
start: {GameServer, :start_link, [{join_code, player, settings}, [name: via(join_code)]]},
shutdown: 1000,
restart: :temporary
}
case Horde.DynamicSupervisor.start_child(GameSupervisor, game) do
{:ok, _pid} ->
Logger.info(["Created game ", join_code])
{:ok, join_code}
{:error, {:already_started, _pid}} ->
do_create_game(player, settings, attempts_remaining - 1)
end
end
@spec via(Game.join_code()) :: game_name()
defp via(join_code) do
{:via, Horde.Registry, {GameRegistry, join_code}}
end
end
|
lib/level10/games.ex
| 0.843493 | 0.40116 |
games.ex
|
starcoder
|
defmodule Panpipe.AST.Node do
@moduledoc """
Behaviour implemented by all nodes of the Panpipe AST.
The Panpipe AST is a Elixir representation of the
[Pandoc data structure for a format-neutral representation of documents](http://hackage.haskell.org/package/pandoc-types-1.17.5.4/docs/Text-Pandoc-Definition.html).
Each of the nodes of this AST data structure is a struct implementing the `Panpipe.AST.Node`
behaviour and directly matches the respective Pandoc element.
Each node type implements Elixir's `Enumerable` protocol as a pre-order tree
traversal.
"""
# TODO: This attempt to define a type for struct implementing this behaviour is copied from RDF.Graph & co. and won't work probably ...
@type t :: module
@doc """
Returns a list of the children of a node.
"""
@callback children(t) :: [t]
@doc """
Returns the type of child expected for a AST node.
This function returns either `:block` or `:inline`.
"""
@callback child_type() :: atom
@doc """
Returns if the AST node module represents a block element.
"""
@callback block?() :: bool
@doc """
Returns if the AST node module represents an inline element.
"""
@callback inline?() :: bool
@doc """
Produces the Pandoc AST data structure of a Panpipe AST node.
"""
@callback to_pandoc(t) :: map
@doc """
Transforms an Panpipe AST node recursively.
see `Panpipe.AST.Node.transform/2`
"""
@callback transform(t, fun) :: t
@shared_fields parent: nil
@doc """
Produces the Pandoc AST data structure of the given Panpipe AST `node`.
## Examples
iex> %Panpipe.AST.Header{level: 1, children: [%Panpipe.AST.Str{string: "Example"}]}
...> |> Panpipe.AST.Node.to_pandoc()
%{
"c" => [1, ["", [], []], [%{"c" => "Example", "t" => "Str"}]],
"t" => "Header"
}
"""
def to_pandoc(%mod{} = node), do: mod.to_pandoc(node)
@doc """
Transforms the AST under the given Panpipe AST `node` by applying the given transformation function recursively.
The given function will be passed all nodes in pre-order and will replace those
nodes for which the transformation function `fun` returns a non-`nil` replacement
value.
A node can also be replaced with a sequence of new nodes by returning a list of
nodes in the transformation function.
If you want to remove a node, you can return an empty list or a `Panpipe.AST.Null`
node.
The transformation will be applied recursively also on children of the replaced
values. You can prohibit that by returning the replacement in a halt tuple like
this: `{:halt, replacement}`.
## Examples
Panpipe.ast!(input: "file.md")
|> Panpipe.transform(fn
%Panpipe.AST.Header{} = header ->
%Panpipe.AST.Header{header | level: header.level + 1}
_ -> nil
end)
Panpipe.ast!(input: "file.md")
|> Panpipe.transform(fn
%Panpipe.AST.Header{} = header ->
{:halt, %Panpipe.AST.Header{header | level: header.level + 1}}
_ -> nil
end)
"""
def transform(%mod{} = node, fun), do: mod.transform(node, fun)
@doc """
Returns if the given AST `node` is a block element.
"""
def block?(node)
def block?(%mod{}), do: mod.block?()
@doc """
Returns if the given AST `node` is an inline element.
"""
def inline?(node)
def inline?(%mod{}), do: mod.inline?()
@doc """
Returns the type of child expected for the given AST `node`.
This function returns either `:block` or `:inline`.
"""
def child_type(node)
def child_type(%mod{}), do: mod.child_type()
@doc false
defmacro __using__(opts) do
node_type = Keyword.fetch!(opts, :type)
fields = fields(node_type, Keyword.get(opts, :fields, []))
quote do
@behaviour Panpipe.AST.Node
import Panpipe.AST.Node
defstruct unquote(fields)
def children(node)
def children(%{children: children}), do: children
def children(_), do: []
if unquote(node_type) == :block do
def block?(), do: true
else
def block?(), do: false
end
if unquote(node_type) == :inline do
def inline?(), do: true
else
def inline?(), do: false
end
def transform(node, fun), do: do_transform(node, fun)
defimpl Panpipe.Pandoc.Conversion do
def convert(node, opts) do
with {:ok, result} <-
node
|> Panpipe.Document.fragment()
|> Panpipe.Document.to_pandoc()
|> Jason.encode!()
|> Panpipe.Pandoc.call(Keyword.put(opts, :from, :json))
do
if result do
Panpipe.Pandoc.Conversion.Utils.post_process(result, node, opts)
end
else
_ -> nil
end
end
end
defimpl Enumerable do
def member?(_node, _), do: {:error, __MODULE__}
def count(_node), do: {:error, __MODULE__}
def slice(_node), do: {:error, __MODULE__}
def reduce(_, {:halt, acc}, _fun), do: {:halted, acc}
def reduce(node, {:suspend, acc}, fun) do
{:suspended, acc, &reduce(node, &1, fun)}
end
def reduce(node, {:cont, acc}, fun) do
unquote(__CALLER__.module).children(node)
|> Enum.reduce(fun.(node, acc), fn child, result ->
Enumerable.reduce(%{child | parent: node}, result, fun)
end)
end
end
defoverridable [children: 1, transform: 2]
end
end
@doc !"""
This is a general implementation of the `Panpipe.AST.Node.transform/2` function.
Do not use it directly, but instead call the `Panpipe.AST.Node.transform/2` implementation
of a node, which might have a different implementation.
"""
def do_transform(node, fun)
def do_transform(%{children: children} = node, fun) do
%{node | children: do_transform_children(children, node, fun)}
end
def do_transform(node, _), do: node
@doc false
def do_transform_children(children, node, fun) do
Enum.flat_map(children, fn child ->
case fun.(%{child | parent: node}) do
{:halt, mapped_children} ->
mapped_children
|> List.wrap()
|> Enum.map(fn mapped_child -> %{mapped_child | parent: nil} end)
nil ->
transform(child, fun)
|> List.wrap()
mapped_children ->
mapped_children
|> List.wrap()
|> Enum.map(fn mapped_child ->
transform(%{mapped_child | parent: nil}, fun)
end)
end
end)
end
defp fields(:block, fields) do
[
children: []
]
++ @shared_fields
|> Keyword.merge(to_keywords(fields))
end
defp fields(:inline, fields) do
@shared_fields
|> Keyword.merge(to_keywords(fields))
end
defp to_keywords(list) do
if Keyword.keyword?(list) do
list
else
Enum.map list, fn
{_, _} = keyword -> keyword
field -> {field, nil}
end
end
end
end
|
lib/panpipe/ast/node.ex
| 0.68637 | 0.732532 |
node.ex
|
starcoder
|
defmodule Countries do
@doc """
Returns all countries.
"""
def all do
countries()
end
@doc """
Returns one country by given alpha2 country code or name
## Examples
iex> %Countries.Country{name: name} = Countries.get("PL")
iex> name
"Poland"
iex> %Countries.Country{alpha2: alpha2} = Countries.get("Poland")
iex> alpha2
"PL"
"""
def get(attrs) when bit_size(attrs) == 16 do
[country] = filter_by(:alpha2, attrs)
country
end
def get(attrs) do
country = filter_by(:name, attrs)
case length(country) do
0 ->
[]
1 ->
List.first(country)
end
end
@doc """
Filters countries by given attribute.
Returns a list of `Countries.Country` structs
## Examples
iex> countries = Countries.filter_by(:region, "Europe")
iex> Enum.count(countries)
51
iex> Enum.map(countries, &Map.get(&1, :alpha2)) |> Enum.take(5)
["AD", "AL", "AT", "AX", "BA"]
iex> countries = Countries.filter_by(:unofficial_names, "Reino Unido")
iex> Enum.count(countries)
1
iex> Enum.map(countries, &Map.get(&1, :name)) |> List.first
"United Kingdom of Great Britain and Northern Ireland"
"""
def filter_by(attribute, value) do
Enum.filter(countries(), fn country ->
country
|> Map.get(attribute)
|> equals_or_contains_in_list(value)
end)
end
defp equals_or_contains_in_list(nil, _), do: false
defp equals_or_contains_in_list([], _), do: false
defp equals_or_contains_in_list([attribute | rest], value) do
if equals_or_contains_in_list(attribute, value) do
true
else
equals_or_contains_in_list(rest, value)
end
end
defp equals_or_contains_in_list(attribute, value),
do: normalize(attribute) == normalize(value)
defp normalize(value) when is_integer(value),
do: value |> Integer.to_string() |> normalize()
defp normalize(value) when is_binary(value),
do: value |> String.downcase() |> String.replace(~r/\s+/, "")
defp normalize(value), do: value
@doc """
Checks if country for specific attribute and value exists.
Returns boolean
## Examples
iex> Countries.exists?(:name, "Poland")
true
iex> Countries.exists?(:name, "Polande")
false
"""
def exists?(attribute, value) do
filter_by(attribute, value) |> length > 0
end
# -- Load countries from yaml files once on compile time ---
# Ensure :yamerl is running
Application.start(:yamerl)
@countries Countries.Loader.load()
defp countries do
@countries
end
end
|
lib/countries.ex
| 0.828176 | 0.452415 |
countries.ex
|
starcoder
|
defmodule Nats.Connection do
@moduledoc ~S"""
This module contains functions to connect to a NATS server.
The defaults are oriented around getting new people to NATS
up and running quickly. Please overwrite for your production
environment.
"""
require Logger
@tcp_attrs [:host, :port, :tcp]
@poolboy_attrs [:pool_size, :pool_max_overflow]
@client_attrs [:lang, :version, :verbose, :pedantic, :user, :password]
@incode_defaults [lang: "elixir",
version: Nats.Mixfile.version,
verbose: false,
pedantic: false,
host: "localhost",
port: 4222,
tcp: [:binary, active: false],
pool_size: 1,
pool_max_overflow: 1]
@doc """
Returns all available options for configuring your NATS client
Accessing the TCP server
* `:host` - Which IP or Hostname to connect to (default: 'localhost')
* `:port` - On which port (default: 4222)
* `:tcp` - Additional TCP options (default: [:binary, active: false])
Configuring the client
* `:lang` - The client language, which is "elixir" (should not change)
* `:version` - The client version, which is found in mix.exs
* `:verbose` - Do you want verbose outputs? Defaults to false
* `:pedantic` - Do you have a schtickler or not? Defaults to false
* `:user` - The user that is accessing the NATS server.
This is omitted by default
* `:password` - The user's password for accessing the NATS server.
This is omitted by default
Configuring the connection pool (based on poolboy)
* `:pool_size` - How many connections to maintain (default: 1)
* `:pool_max_overflow` - How many overflows to support (default: 1)
"""
def opts(), do: opts([])
@doc """
Return all available options for configuring your NATS client, but
`overwrite` the loaded configurations with the provided `overwrite` map.
Look at `Nats.opts/0` for more details
"""
def opts(overwrite), do: _opts(overwrite, @tcp_attrs ++ @poolboy_attrs ++ @client_attrs)
defp _opts(overwrite, params), do: default_opts |> Keyword.merge(overwrite) |> Keyword.take(params)
@doc """
Returns all connection pooling options, including
* `:pool_size` - How many connections to maintain (default: 1)
* `:pool_max_overflow` - How many overflows to support (default: 1)
"""
def poolboy_opts(), do: poolboy_opts([])
@doc """
Returns all connection pooling options, but
`overwrite` the loaded configurations with the provided `overwrite` map.
Look at `Nats.poolboy_opts/0` for more details
"""
def poolboy_opts(overwrite), do: _opts(overwrite, @poolboy_attrs)
@doc """
Returns all NATS server options, including
* `:host` - Which IP or Hostname to connect to (default: 'localhost')
* `:port` - On which port (default: 4222)
* `:tcp` - Additional TCP options (default: [:binary, active: false])
"""
def tcp_opts(), do: tcp_opts([])
@doc """
Returns all NATS server options, but
`overwrite` the loaded configurations with the provided `overwrite` map.
Look at `Nats.tcp_opts/0` for more details
"""
def tcp_opts(overwrite), do: _opts(overwrite, @tcp_attrs)
@doc """
Attempt to connect to the NATS tcp server, using the configurations from `Nats.tcp_opts/0`.
"""
def connect(), do: connect([])
@doc """
Attempt to connect to the NATS tcp server, but
`overwrite` the loaded configurations from `Nats.tcp_opts/1`.
"""
def connect(overwrite), do: overwrite |> tcp_opts |> _connect
defp _connect(server) do
Logger.info("Attempting to connect to NATS Server: #{server[:host]}:#{server[:port]}")
{ok, socket} = :gen_tcp.connect(server[:host] |> to_char_list,
server[:port] |> to_i,
server[:tcp])
case ok do
:ok -> Logger.info("Connected.")
_ -> Logger.error("Unable to connect to NATS Server: #{ok} (#{socket})")
end
{ok, socket}
end
defp default_opts do
configs = Application.get_env(:nats, __MODULE__)
case configs do
nil -> Logger.debug("No configs loaded (config :nats, Nats.Connection), using in code defaults")
_ -> Logger.debug("NATS configs: #{configs |> inspect}")
end
Keyword.merge(@incode_defaults, configs || [])
end
defp to_i(str) when is_binary(str), do: to_i(Integer.parse(str))
defp to_i({as_i, _}), do: as_i
defp to_i(as_i), do: as_i
end
|
lib/nats/connection.ex
| 0.728169 | 0.453685 |
connection.ex
|
starcoder
|
defmodule OMG.Eth do
@moduledoc """
Library for common code of the adapter/port to contracts deployed on Ethereum.
NOTE: The library code is not intended to be used outside of `OMG.Eth`: use `OMG.Eth.RootChain` and `OMG.Eth.Token` as main
entrypoints to the contract-interaction functionality.
NOTE: This wrapper is intended to be as thin as possible, only offering a consistent API to the Ethereum JSONRPC client and contracts.
Handles other non-contract queries to the Ethereum client.
Notes on encoding: All APIs of `OMG.Eth` and the submodules with contract APIs always use raw, decoded binaries
for binaries - never use hex encoded binaries. Such binaries may be passed as is onto `ABI` related functions,
however they must be encoded/decoded when entering/leaving the `Ethereumex` realm
"""
import OMG.Eth.Encoding
require Logger
@type address :: <<_::160>>
@type hash :: <<_::256>>
@type send_transaction_opts() :: [send_transaction_option()]
@type send_transaction_option() :: {:passphrase, binary()}
@spec node_ready() :: :ok | {:error, :geth_still_syncing | :geth_not_listening}
def node_ready do
case Ethereumex.HttpClient.eth_syncing() do
{:ok, false} -> :ok
{:ok, _} -> {:error, :geth_still_syncing}
{:error, :econnrefused} -> {:error, :geth_not_listening}
end
end
@doc """
Checks geth syncing status, errors are treated as not synced.
Returns:
* false - geth is synced
* true - geth is still syncing.
"""
@spec syncing?() :: boolean
def syncing?, do: node_ready() != :ok
@doc """
Send transaction to be singed by a key managed by Ethereum node, geth or parity.
For geth, account must be unlocked externally.
If using parity, account passphrase must be provided directly or via config.
"""
@spec send_transaction(map(), send_transaction_opts()) :: {:ok, hash()} | {:error, any()}
def send_transaction(txmap, opts \\ []) do
case backend() do
:geth ->
with {:ok, receipt_enc} <- Ethereumex.HttpClient.eth_send_transaction(txmap), do: {:ok, from_hex(receipt_enc)}
:parity ->
with {:ok, passphrase} <- get_signer_passphrase(txmap.from),
opts = Keyword.merge([passphrase: passphrase], opts),
params = [txmap, Keyword.get(opts, :passphrase, "")],
{:ok, receipt_enc} <- Ethereumex.HttpClient.request("personal_sendTransaction", params, []) do
{:ok, from_hex(receipt_enc)}
end
end
end
def backend do
Application.fetch_env!(:omg_eth, :eth_node)
|> String.to_existing_atom()
end
def get_ethereum_height do
case Ethereumex.HttpClient.eth_block_number() do
{:ok, height_hex} ->
{:ok, int_from_hex(height_hex)}
other ->
other
end
end
@doc """
Returns placeholder for non-existent Ethereum address
"""
@spec zero_address :: address()
def zero_address, do: <<0::160>>
def call_contract(contract, signature, args, return_types) do
data = signature |> ABI.encode(args)
with {:ok, return} <- Ethereumex.HttpClient.eth_call(%{to: to_hex(contract), data: to_hex(data)}),
do: decode_answer(return, return_types)
end
defp decode_answer(enc_return, return_types) do
enc_return
|> from_hex()
|> ABI.TypeDecoder.decode(return_types)
|> case do
[single_return] -> {:ok, single_return}
other when is_list(other) -> {:ok, List.to_tuple(other)}
end
end
@spec contract_transact(address, address, binary, [any], keyword) :: {:ok, hash()} | {:error, any}
def contract_transact(from, to, signature, args, opts \\ []) do
data = encode_tx_data(signature, args)
txmap =
%{from: to_hex(from), to: to_hex(to), data: data}
|> Map.merge(Map.new(opts))
|> encode_all_integer_opts()
send_transaction(txmap)
end
defp encode_all_integer_opts(opts) do
opts
|> Enum.filter(fn {_k, v} -> is_integer(v) end)
|> Enum.into(opts, fn {k, v} -> {k, to_hex(v)} end)
end
def get_bytecode!(path_project_root, contract_name) do
"0x" <> read_contracts_bin!(path_project_root, contract_name)
end
defp encode_tx_data(signature, args) do
signature
|> ABI.encode(args)
|> to_hex()
end
defp encode_constructor_params(args, types) do
args
|> ABI.TypeEncoder.encode_raw(types)
# NOTE: we're not using `to_hex` because the `0x` will be appended to the bytecode already
|> Base.encode16(case: :lower)
end
def deploy_contract(addr, bytecode, types, args, opts) do
enc_args = encode_constructor_params(types, args)
txmap =
%{from: to_hex(addr), data: bytecode <> enc_args}
|> Map.merge(Map.new(opts))
|> encode_all_integer_opts()
{:ok, _txhash} = send_transaction(txmap)
end
defp read_contracts_bin!(path_project_root, contract_name) do
path = "_build/contracts/#{contract_name}.bin"
case File.read(Path.join(path_project_root, path)) do
{:ok, contract_json} ->
contract_json
{:error, reason} ->
raise(
RuntimeError,
"Can't read #{path} because #{inspect(reason)}, try running mix deps.compile plasma_contracts"
)
end
end
defp event_topic_for_signature(signature) do
signature |> ExthCrypto.Hash.hash(ExthCrypto.Hash.kec()) |> to_hex()
end
defp filter_not_removed(logs) do
logs |> Enum.filter(&(not Map.get(&1, "removed", true)))
end
def get_ethereum_events(block_from, block_to, signature, contract) do
topic = event_topic_for_signature(signature)
try do
{:ok, logs} =
Ethereumex.HttpClient.eth_get_logs(%{
fromBlock: to_hex(block_from),
toBlock: to_hex(block_to),
address: to_hex(contract),
topics: ["#{topic}"]
})
{:ok, filter_not_removed(logs)}
catch
_ -> {:error, :failed_to_get_ethereum_events}
end
end
def parse_event(%{"data" => data} = log, {signature, keys}) do
decoded_values =
data
|> from_hex()
|> ABI.TypeDecoder.decode(ABI.FunctionSelector.decode(signature))
Enum.zip(keys, decoded_values)
|> Map.new()
|> common_parse_event(log)
end
def parse_events_with_indexed_fields(
%{"data" => data, "topics" => [_event_sig | indexed_data]} = log,
{non_indexed_keys, non_indexed_key_types},
{indexed_keys, indexed_keys_types}
) do
decoded_non_indexed_fields =
data
|> from_hex()
|> ABI.TypeDecoder.decode(non_indexed_key_types)
non_indexed_fields =
Enum.zip(non_indexed_keys, decoded_non_indexed_fields)
|> Map.new()
decoded_indexed_fields =
for {encoded, type_sig} <- Enum.zip(indexed_data, indexed_keys_types) do
[decoded] =
encoded
|> from_hex()
|> ABI.TypeDecoder.decode([type_sig])
decoded
end
indexed_fields =
Enum.zip(indexed_keys, decoded_indexed_fields)
|> Map.new()
Map.merge(non_indexed_fields, indexed_fields)
|> common_parse_event(log)
end
@doc """
Gets the decoded call data of a contract call, based on a particular Ethereum-tx hash and some info on the contract
function.
`eth_tx_hash` is expected encoded in raw binary format, as usual
NOTE: function name and rich information about argument names and types is used, rather than its compact signature
(like elsewhere) because `ABI.decode` has some issues with parsing signatures in this context.
"""
@spec get_call_data(binary(), binary(), list(atom), list(atom)) :: map
def get_call_data(eth_tx_hash, name, arg_names, arg_types) do
{:ok, %{"input" => eth_tx_input}} = Ethereumex.HttpClient.eth_get_transaction_by_hash(to_hex(eth_tx_hash))
encoded_input = from_hex(eth_tx_input)
function_inputs =
ABI.decode(
ABI.FunctionSelector.parse_specification_item(%{
"type" => "function",
"name" => name,
"inputs" => Enum.map(arg_types, &%{"type" => to_string(&1)}),
"outputs" => []
}),
encoded_input
)
Enum.zip(arg_names, function_inputs)
|> Map.new()
end
defp common_parse_event(result, %{"blockNumber" => eth_height}) do
result
|> Map.put(:eth_height, int_from_hex(eth_height))
end
defp get_signer_passphrase("0x00a329c0648769a73afac7f9381e08fb4<PASSWORD>") do
# Parity coinbase address in dev mode, passphrase is empty
{:ok, ""}
end
defp get_signer_passphrase(_) do
case System.get_env("SIGNER_PASSPHRASE") do
nil ->
_ = Logger.error("Passphrase missing. Please provide the passphrase to Parity managed account.")
{:error, :passphrase_missing}
value ->
{:ok, value}
end
end
end
|
apps/omg_eth/lib/eth.ex
| 0.908562 | 0.541348 |
eth.ex
|
starcoder
|
defmodule Iona do
@moduledoc File.read!("#{__DIR__}/../README.md")
@type template_opts :: [
{:path, Path.t()},
{:include, [Path.t()]},
{:helpers, [atom]}
]
@type source_opts :: [
{:path, Path.t()},
{:include, [Path.t()]}
]
@type supported_format_t :: atom
@type tex_t :: iodata
@type eex_tex_t :: binary
@type executable_t :: binary
@type processing_opts :: [
{:preprocess, [executable_t]}
| {:processor, executable_t}
| {:processor_env, map}
| {:preprocessor_env, map}
| {:compilation_passes, non_neg_integer}
]
@doc """
Fill in a template with assignments, with TeX escaping support
```
{:ok, template} = [title: "An Article", author: "<NAME>"]
|> Iona.template(path: "/path/to/article.tex")
Iona.write(template, "/path/to/article.pdf")
```
"""
@spec template(assigns :: Keyword.t() | map, criteria :: eex_tex_t) ::
{:ok, Iona.Template.t()} | {:error, term}
def template(assigns, criteria) when is_binary(criteria) do
assigns |> Iona.Template.fill(%Iona.Template{body: criteria})
end
@spec template(assigns :: Keyword.t() | map, criteria :: template_opts) ::
{:ok, Iona.Template.t()} | {:error, term}
def template(assigns, criteria) when is_list(criteria) do
template = case Keyword.get(criteria, :body) do
nil -> %Iona.Template{
body_path: Keyword.get(criteria, :path),
include: Keyword.get(criteria, :include, []),
helpers: Keyword.get(criteria, :helpers, [])
}
body -> %Iona.Template{
body: body,
include: Keyword.get(criteria, :include, []),
helpers: Keyword.get(criteria, :helpers, [])
}
end
assigns |> Iona.Template.fill(template)
end
@doc """
The same as `template/2`, but raises `Iona.ProcessingError` if it fails.
Returns the template otherwise.
```
[title: "An Article", author: "<NAME>"]
|> Iona.template!(path: "/path/to/article.tex")
|> Iona.write("/path/to/article.pdf")
```
"""
@spec template!(assigns :: Keyword.t() | map, criteria :: eex_tex_t | template_opts) ::
Iona.Template.t()
def template!(assigns, criteria) do
case template(assigns, criteria) do
{:ok, result} -> result
{:error, err} -> raise Iona.Processing.ProcessingError, message: err
end
end
# Note: The \\ in the example below is escaping to support ExDoc.
# In the actual LaTeX source, this would be \documentclass
@doc """
Define the document source, either as a raw TeX binary or the path to a `.tex` file.
As raw TeX:
```
Iona.source("\\documentclass[12pt]{article} ...")
```
From a file:
```
Iona.source(path: "/path/to/document.tex")
```
When providing a file path, you can also define additional files needed
for processing. They will be copied to the temporary directory where processing
will take place.
```elixir
Iona.source(path: "/path/to/document.tex",
include: ["/path/to/document.bib",
"/path/to/documentclass.sty"])
```
However, when possible, files should be placed in the search path of your TeX
installation.
"""
@spec source(criteria :: binary) :: Iona.Source.t()
def source(criteria) when is_binary(criteria) do
%Iona.Source{content: criteria}
end
@spec source(criteria :: source_opts) :: Iona.Source.t()
def source(criteria) when is_list(criteria) do
%Iona.Source{
path: Keyword.get(criteria, :path, nil),
content: Keyword.get(criteria, :binary, nil),
include: Keyword.get(criteria, :include, [])
}
end
@doc """
Generate a formatted document as a string.
Without processing options:
```
{:ok, pdf_string} = Iona.source(path: "/path/to/document.tex")
|> Iona.to(:pdf)
```
With processing options:
```
{:ok, pdf_string} = Iona.source(path: "/path/to/document.tex")
|> Iona.to(:pdf, processor: "xetex")
```
"""
@spec to(
input :: Iona.Input.t(),
format :: supported_format_t,
opts :: processing_opts
) :: {:ok, binary} | {:error, binary}
def to(input, format, opts \\ []) do
case input |> Iona.Processing.process(format, opts) do
{:ok, document} -> document |> Iona.Document.read()
{:error, error} -> {:error, error}
end
end
@doc """
The same as `to/3`, but raises `Iona.ProcessingError` if it fails.
Returns the document content otherwise.
```
Iona.source(path: "/path/to/document.tex")
|> Iona.to!(:pdf)
|> MyModule.do_something_with_pdf_string
```
If writing to a file, see `write/3` and `write/4`, as they are both
shorter to type and have better performance characteristics.
"""
@spec to!(
input :: Iona.Input.t(),
format :: supported_format_t,
opts :: processing_opts
) :: binary
def to!(input, format, opts \\ []) do
case to(input, format, opts) do
{:ok, result} -> result
{:error, err} -> raise Iona.Processing.ProcessingError, message: err
end
end
@doc """
Generate a formatted document to a file path.
Without processing options:
```
:ok = Iona.source(path: "/path/to/document.tex")
|> Iona.write("/path/to/document.pdf")
```
With processing options:
```
:ok = Iona.source(path: "/path/to/document.tex")
|> Iona.write("/path/to/document.pdf",
processor: "xetex")
```
"""
@spec write(input :: Iona.Input.t(), path :: Path.t(), opts :: processing_opts) ::
:ok | {:error, term}
def write(input, path, opts \\ []) do
input
|> Iona.Processing.process(path |> Iona.Processing.to_format(), opts)
|> case do
{:ok, document} ->
Iona.Document.write(document, path)
{:error, error} ->
{:error, error}
end
end
@doc """
The same as `write/3` but raises `Iona.ProcessingError` if it fails.
Without processing options:
```
Iona.source(path: "/path/to/document.tex")
|> Iona.write!("/path/to/document.pdf")
```
With processing options:
```
Iona.source(path: "/path/to/document.tex")
|> Iona.write!("/path/to/document.pdf", processor: "xetex")
```
"""
@spec write!(input :: Iona.Input.t(), path :: Path.t(), opts :: processing_opts) :: :ok
def write!(input, path, opts \\ []) do
case write(input, path, opts) do
:ok -> :ok
{:error, err} -> raise Iona.Processing.ProcessingError, message: err
end
end
@doc """
Generate a directory with a build script that can be run to finalize the build.
## Examples
iex> Iona.source(path: "academic.tex")
iex> |> Iona.prepare!("/path/to/build/directory", :pdf, preprocess: ~w(latex bibtex latex))
:ok
"""
@spec prepare(
input :: Iona.Input.t(),
output :: Path.t(),
format :: Iona.supported_format_t(),
opts :: processing_opts
) :: :ok | {:error, term}
def prepare(input, output, format, opts \\ []) do
with {:ok, commands} <- Iona.Processing.prepare(input, output, format, opts),
:ok <- write_build_script(output, commands) do
:ok
else
{:error, error} ->
{:error, error}
end
end
@doc """
The same as `prepare/4` but raises `Iona.ProcessingError` if it fails.
"""
@spec prepare!(
input :: Iona.Input.t(),
output :: Path.t(),
format :: Iona.supported_format_t(),
opts :: processing_opts
) :: :ok
def prepare!(input, output, format, opts \\ []) do
case prepare(input, output, format, opts) do
:ok -> :ok
{:error, err} -> raise Iona.Processing.ProcessingError, message: err
end
end
@spec write_build_script(Path.t(), [String.t()]) :: :ok | {:error, File.posix()}
defp write_build_script(directory, commands) do
script = Path.join(directory, "build.sh")
with :ok <- File.write(script, script_content(commands)),
:ok <- File.chmod(script, 0o755) do
:ok
else
{:error, error} ->
{:error, error}
end
end
@spec script_content([String.t()]) :: iodata
defp script_content(commands) do
[
"#!/bin/sh\n",
Enum.intersperse(commands, " && \\\n")
]
end
end
|
lib/iona.ex
| 0.802052 | 0.728784 |
iona.ex
|
starcoder
|
defmodule TeslaOAuth2ClientAuth.PrivateKeyJWT do
@moduledoc """
Tesla middleware that implements the `"private_key_jwt"` authentication scheme for
[https://openid.net/specs/openid-connect-core-1_0.html#ClientAuthentication](OpenID Connect clients)
The client configuration must contain a `"jwks"` member whose value is a list of JWKs,
including private keys. This middleware will sign the JWTs with the first encryption key
found that conforms to the signature algorithm.
To determine the signature algorithm to use, this middleware:
- uses the client's `"token_endpoint_auth_signing_alg"` value if present, and check it against
the server metadata `"token_endpoint_auth_signing_alg_values_supported"`
- otherwise uses the `"token_endpoint_auth_signing_alg_values_supported"` server metadata and
picks one algorithm that is suitable for signing
- otherwise raises
The options of this middleware are:
- `:jwt_lifetime`: the lifetime of the JWT in seconds. Defaults to `30`
- `:jwt_jti_callback`: a `(TeslaOAuth2ClientAuth.opts() -> String.t())` function that returns
the `"jti"` field of the JWT. Defaults to a random 16-bytes base64 encoded string
- `:jwt_additional_claims`: claims added to the JWT. They have precedence over the default
claims
"""
@behaviour Tesla.Middleware
@assertion_type "urn:ietf:params:oauth:client-assertion-type:jwt-bearer"
@impl true
def call(%Tesla.Env{body: %{}} = env, next, opts) do
client_id = opts[:client_config]["client_id"] || raise "Missing client id"
body =
env.body
|> Map.put("client_id", client_id)
|> Map.put("client_assertion_type", @assertion_type)
|> Map.put("client_assertion", build_assertion(opts))
%Tesla.Env{env | body: body}
|> Tesla.run(next)
end
defp build_assertion(opts) do
client_id = opts[:client_config]["client_id"] || raise "Missing client id"
jwks = opts[:client_config]["jwks"]["keys"] || raise "Missing jwks`"
issuer =
opts[:server_metadata]["token_endpoint"] ||
raise "Missing token endpoint to be used as the audience from server metadata"
lifetime = opts[:jwt_lifetime] || 30
sig_alg = sig_alg(opts[:client_config], opts[:server_metadata])
jti =
case opts[:jwt_jti_callback] do
callback when is_function(callback, 1) ->
callback.(opts)
nil ->
gen_jti()
end
message =
%{
iss: client_id,
sub: client_id,
aud: issuer,
jti: jti,
exp: now() + lifetime,
iat: now()
}
|> Map.merge(opts[:jwt_additional_claims] || %{})
|> Jason.encode!()
signature_key(jwks, sig_alg)
|> JOSE.JWK.from_map()
|> JOSE.JWS.sign(message, %{"alg" => sig_alg})
|> JOSE.JWS.compact()
|> elem(1)
end
defp sig_alg(client_config, server_metadata) do
case client_config do
%{"token_endpoint_auth_signing_alg" => "none"} ->
raise "illegal `token_endpoint_auth_signing_alg` in client configuration: `none`"
%{"token_endpoint_auth_signing_alg" => alg} ->
if alg in (server_metadata["token_endpoint_auth_signing_alg_values_supported"] || []) do
alg
else
raise "client's token endpoint auth algorithm not supported by the authorization server"
end
_ ->
server_metadata["token_endpoint_auth_signing_alg_values_supported"]
|> Enum.find(fn alg ->
alg in [
"Ed25519",
"Ed448",
"EdDSA",
"ES256",
"ES384",
"ES512",
"Poly1305",
"PS256",
"PS384",
"PS512",
"RS256",
"RS384",
"RS512"
]
end)
|> case do
alg when is_binary(alg) ->
alg
nil ->
raise "no suitable signature algorithm supported by the authorization server"
end
end
end
defp signature_key(jwks, sig_alg) do
case JOSEUtils.JWKS.signature_keys(jwks, sig_alg) do
[jwk | _] ->
jwk
_ ->
raise "no suitable signature key found in client's `jwks`"
end
end
defp gen_jti(), do: :crypto.strong_rand_bytes(16) |> Base.encode64(padding: false)
defp now(), do: System.system_time(:second)
end
|
lib/tesla_oauth2_client_auth/private_key_jwt.ex
| 0.862786 | 0.472744 |
private_key_jwt.ex
|
starcoder
|
defmodule Jeeves.Anonymous do
@moduledoc """
Implement an anonymous service.
### Usage
To create the service:
* Create a module that implements the API you want. This API will be
expressed as a set of public functions. Each function will be
defined to accept the current state as its first parameter. If a
function wants to change the state, it must end with a call to the
`Jeeves.Common.update_state/2` function (which will have been
imported into your module automatically).
For this example, we'll call the module `MyService`.
* Add the line `use Jeeves.Anonymous` to the top of this module.
To consume the service:
* Create an instance of the service with `MyJeeves.run()`. You can pass
initial state to the service as an optional parameter. This call returns
a handle to this service instance.
* Call the API functions in the service, using the handle as a first parameter.
### Example
defmodule Accumulator do
using Jeeves.Anonymous, state: 0
def current_value(acc), do: acc
def increment(acc, by \\ 1) do
update_state(acc + by)
end
end
with acc = Accumulator.run(10) do
Accumulator.increment(acc, 3)
Accumulator.increment(acc, 2)
Accumulator.current_value(acc) # => 15
end
### Options
You can pass a keyword list to `use Jeeves.Anonymous:`
* `state:` _value_
Set the detail initial state of the service to `value`. This can be
overridden by passing a different value to the `run` function.
* `showcode:` _boolean_
If truthy, dump a representation of the generated code to STDOUT during
compilation.
"""
@doc false
defmacro __using__(opts \\ []) do
Jeeves.Common.generate_common_code(
__CALLER__.module,
__MODULE__,
opts,
_name = nil)
end
@doc false
defmacro generate_code_callback(_) do
Jeeves.Common.generate_code(__CALLER__.module, __MODULE__)
end
@doc false
def generate_api_call(_options, {call, _body}) do
quote do
def(unquote(call), do: unquote(api_body(call)))
end
end
@doc false
defp api_body(call) do
{ server, request } = call_signature(call)
quote do
GenServer.call(unquote(var!(server)), unquote(request))
end
end
@doc false
def generate_handle_call(_options, {call, _body}) do
{ state, request } = call_signature(call)
quote do
def handle_call(unquote(request), _, unquote(var!(state))) do
__MODULE__.Implementation.unquote(call)
|> Jeeves.Common.create_genserver_response(unquote(var!(state)))
end
end
end
@doc false
def generate_implementation(_options, {call, body}) do
quote do
def(unquote(call), unquote(body))
end
end
@doc !"only used for pools"
def generate_delegator(_options, {_call, _body}), do: nil
# given def fred(store, a, b) return { store, { :fred, a, b }}
@doc false
def call_signature({ name, _, [ server | args ] }) do
{
var!(server),
{ :{}, [], [ name | Enum.map(args, fn a -> var!(a) end) ] }
}
end
end
|
lib/jeeves/anonymous.ex
| 0.772058 | 0.668454 |
anonymous.ex
|
starcoder
|
defmodule GitRekt do
@moduledoc false
alias GitRekt.Git
defmodule GitCommit do
@moduledoc """
Represents a Git commit.
"""
defstruct [:oid, :__ref__]
@type t :: %__MODULE__{oid: Git.oid, __ref__: Git.commit}
defimpl Inspect do
def inspect(commit, _opts), do: "<GitCommit:#{Git.oid_fmt_short(commit.oid)}>"
end
end
defmodule GitRef do
@moduledoc """
Represents a Git reference.
"""
defstruct [:oid, :name, :prefix, :type]
@type t :: %__MODULE__{oid: Git.oid, name: binary, prefix: binary, type: :branch | :tag}
defimpl Inspect do
def inspect(ref, _opts), do: "<GitRef:#{ref.prefix}#{ref.name}>"
end
defimpl String.Chars do
def to_string(ref), do: Path.join(ref.prefix, ref.name)
end
end
defmodule GitTag do
@moduledoc """
Represents a Git tag.
"""
defstruct [:oid, :name, :__ref__]
@type t :: %__MODULE__{oid: Git.oid, name: :binary, __ref__: Git.tag}
defimpl Inspect do
def inspect(tag, _opts), do: "<GitTag:#{tag.name}>"
end
end
defmodule GitBlob do
@moduledoc """
Represents a Git blob.
"""
defstruct [:oid, :__ref__]
@type t :: %__MODULE__{oid: Git.oid, __ref__: Git.blob}
defimpl Inspect do
def inspect(blob, _opts), do: "<GitBlob:#{Git.oid_fmt_short(blob.oid)}>"
end
end
defmodule GitTree do
@moduledoc """
Represents a Git tree.
"""
defstruct [:oid, :__ref__]
@type t :: %__MODULE__{oid: Git.oid, __ref__: Git.tree}
defimpl Inspect do
def inspect(tree, _opts), do: "<GitTree:#{Git.oid_fmt_short(tree.oid)}>"
end
end
defmodule GitTreeEntry do
@moduledoc """
Represents a Git tree entry.
"""
defstruct [:oid, :name, :mode, :type]
@type t :: %__MODULE__{oid: Git.oid, name: binary, mode: integer, type: :blob | :tree}
defimpl Inspect do
def inspect(tree_entry, _opts), do: "<GitTreeEntry:#{tree_entry.name}>"
end
end
defmodule GitIndex do
@moduledoc """
Represents a Git index.
"""
defstruct [:__ref__]
@type t :: %__MODULE__{__ref__: Git.index}
defimpl Inspect do
def inspect(index, _opts), do: "<GitIndex:#{inspect index.__ref__}>"
end
end
defmodule GitIndexEntry do
@moduledoc """
Represents a Git index entry.
"""
@enforce_keys [:mode, :oid, :path, :file_size]
defstruct [
ctime: :undefined,
mtime: :undefined,
dev: :undefined,
ino: :undefined,
mode: nil,
uid: :undefined,
gid: :undefined,
file_size: 0,
oid: nil,
flags: :undefined,
flags_extended: :undefined,
path: nil
]
@type t :: %__MODULE__{
ctime: pos_integer | :undefined,
mtime: pos_integer | :undefined,
dev: pos_integer | :undefined,
ino: pos_integer | :undefined,
mode: pos_integer,
uid: pos_integer | :undefined,
gid: pos_integer | :undefined,
file_size: non_neg_integer,
oid: binary,
flags: pos_integer | :undefined,
flags_extended: pos_integer | :undefined,
path: binary
}
defimpl Inspect do
def inspect(index_entry, _opts), do: "<GitIndexEntry:#{index_entry.path}>"
end
end
defmodule GitDiff do
@moduledoc """
Represents a Git diff.
"""
defstruct [:__ref__]
@type t :: %__MODULE__{__ref__: Git.diff}
defimpl Inspect do
def inspect(diff, _opts), do: "<GitDiff:#{inspect diff.__ref__}>"
end
end
defmodule GitOdb do
@moduledoc """
Represents a Git ODB.
"""
defstruct [:__ref__]
@type t :: %__MODULE__{__ref__: Git.odb}
defimpl Inspect do
def inspect(odb, _opts), do: "<GitOdb:#{inspect odb.__ref__}>"
end
end
defmodule GitWritePack do
@moduledoc """
Represents a Git writepack.
"""
defstruct [:__ref__]
@type t :: %__MODULE__{__ref__: Git.odb_writepack}
defimpl Inspect do
def inspect(writepack, _opts), do: "<GitWritePack:#{inspect writepack.__ref__}>"
end
end
defmodule GitError do
@moduledoc false
defexception [:message, :code]
end
end
|
apps/gitrekt/lib/gitrekt.ex
| 0.756897 | 0.471041 |
gitrekt.ex
|
starcoder
|
defmodule Galaxy.Gossip do
@moduledoc """
This clustering strategy uses multicast UDP to gossip node names
to other nodes on the network. These packets are listened for on
each node as well, and a connection will be established between the
two nodes if they are reachable on the network, and share the same
magic cookie. In this way, a cluster of nodes may be formed dynamically.
The gossip protocol is extremely simple, with a prelude followed by the node
name which sent the packet. The node name is parsed from the packet, and a
connection attempt is made. It will fail if the two nodes do not share a cookie.
By default, the gossip occurs on port 45892, using the multicast address 192.168.127.12
A TTL of 1 will limit packets to the local network, and is the default TTL.
Optionally, `delivery_mode: :broadcast` option can be set which disables multicast and
only uses broadcasting. This limits connectivity to local network but works on in
scenarios where multicast is not enabled. Use `multicast_addr` as the broadcast address.
"""
use GenServer
require Logger
alias Galaxy.Gossip.Crypto
@default_ip {0, 0, 0, 0}
@default_port 45_892
@default_multicast_addr {230, 1, 1, 251}
@default_multicast_ttl 1
@default_delivery_mode :multicast
@default_security false
def start_link(options) do
{sup_opts, opts} = Keyword.split(options, [:name])
GenServer.start_link(__MODULE__, opts, sup_opts)
end
@impl true
def init(options) do
unless secret_key_base = options[:secret_key_base] do
raise ArgumentError, "expected :secret_key_base option to be given"
end
unless topology = options[:topology] do
raise ArgumentError, "expected :topology option to be given"
end
port = Keyword.get(options, :port, @default_port)
if_addr = Keyword.get(options, :ip, @default_ip)
multicast_addr = Keyword.get(options, :multicast_addr, @default_multicast_addr)
force_secure = Keyword.get(options, :force_secure, @default_security)
opts = [
:binary,
reuseaddr: true,
broadcast: true,
active: true,
ip: if_addr,
add_membership: {multicast_addr, {0, 0, 0, 0}}
]
{:ok, socket} =
:gen_udp.open(
port,
opts ++ multicast_opts(options) ++ reuse_port_opts()
)
state = %{
topology: topology,
socket: socket,
port: port,
multicast_addr: multicast_addr,
secret_key_base: secret_key_base,
force_secure: force_secure
}
send(self(), :heartbeat)
{:ok, state}
end
@sol_socket 0xFFFF
@so_reuseport 0x0200
defp reuse_port_opts do
case :os.type() do
{:unix, os_name} when os_name in [:darwin, :freebsd, :openbsd, :netbsd] ->
[{:raw, @sol_socket, @so_reuseport, <<1::native-32>>}]
_ ->
[]
end
end
defp multicast_opts(config) do
case Keyword.get(config, :delivery_mode, @default_delivery_mode) do
:broadcast ->
[]
:multicast ->
if multicast_if = Keyword.get(config, :multicast_if, false) do
multicast_ttl = Keyword.get(config, :multicast_ttl, @default_multicast_ttl)
[
multicast_if: multicast_if,
multicast_ttl: multicast_ttl,
multicast_loop: true
]
else
[]
end
end
end
@impl true
def handle_info(:heartbeat, state) do
raw_payload = "heartbeat::" <> :erlang.term_to_binary(%{node: node()})
{iv, encrypted_payload} = Crypto.encrypt(raw_payload, state.secret_key_base)
:gen_udp.send(
state.socket,
state.multicast_addr,
state.port,
[iv, encrypted_payload]
)
Process.send_after(self(), :heartbeat, :rand.uniform(5_000))
{:noreply, state}
end
def handle_info({:udp, _, _, _, "Peer:" <> name}, state) do
handle_peer(name, state)
end
def handle_info({:udp, _, _, _, "heartbeat::" <> data}, state) do
handle_heartbeat({:unsafe, data}, state)
end
def handle_info({:udp, _, _, _, <<iv::binary-16, data::binary>>}, state) do
handle_heartbeat({:safe, iv, data}, state)
end
def handle_info({:udp, _, _, _, _}, state) do
{:noreply, state}
end
defp handle_peer(name, %{force_secure: false} = state) do
name
|> String.to_atom()
|> maybe_connect_node(state)
{:noreply, state}
end
defp handle_peer(name, state) do
Logger.debug(["Gossip refused unsecure node ", name |> to_string(), " to connect"])
{:noreply, state}
end
defp handle_heartbeat({:unsafe, payload}, %{force_secure: false} = state) do
with {:ok, unserialized_payload} <- unserialize_heartbeat_payload(payload) do
maybe_connect_node(unserialized_payload, state)
end
{:noreply, state}
end
defp handle_heartbeat({:unsafe, _}, state) do
Logger.debug("Gossip refused unsecure node to connect")
{:noreply, state}
end
defp handle_heartbeat({:safe, iv, data}, state) do
with {:ok, bin_data} <- Crypto.decrypt(data, iv, state.secret_key_base),
{:ok, payload} <- validate_heartbeat_message(bin_data),
{:ok, unserialized_payload} <- unserialize_heartbeat_payload(payload) do
maybe_connect_node(unserialized_payload, state)
end
{:noreply, state}
end
def address(ip),
do: ip |> to_charlist() |> :inet.parse_address()
defp validate_heartbeat_message("heartbeat::" <> payload), do: {:ok, payload}
defp validate_heartbeat_message(_), do: {:error, :bad_request}
@impl true
def terminate(_reason, %{socket: socket}) do
:gen_udp.close(socket)
end
defp unserialize_heartbeat_payload(payload) do
unserialized_payload =
payload
|> :erlang.binary_to_term()
|> Map.get(:node)
{:ok, unserialized_payload}
rescue
ArgumentError ->
{:error, :bad_format}
end
defp maybe_connect_node(name, state) when is_atom(name) and name != node() do
unless name in state.topology.members() do
case state.topology.connect_nodes([name]) do
{[], _} ->
:ok
{[name], _} ->
Logger.debug(["Gossip connected ", name |> to_string(), " node"])
end
end
:ok
end
defp maybe_connect_node(_, _) do
:ok
end
end
defmodule Galaxy.Gossip.Crypto do
@moduledoc false
def encrypt(data, secret) do
iv = :crypto.strong_rand_bytes(16)
key = :crypto.hash(:sha256, secret)
padded_data = pkcs7_pad(data)
encrypted_data = :crypto.block_encrypt(:aes_cbc256, key, iv, padded_data)
{iv, encrypted_data}
end
def decrypt(data, iv, secret) do
with {:ok, padded_data} <- decrypt_block(data, iv, secret) do
pkcs7_unpad(padded_data)
end
end
defp decrypt_block(data, iv, secret) do
key = :crypto.hash(:sha256, secret)
try do
{:ok, :crypto.block_decrypt(:aes_cbc256, key, iv, data)}
rescue
ArgumentError ->
{:error, :cant_decrypt}
end
end
defp pkcs7_pad(data) do
bytes_remaining = rem(byte_size(data), 16)
padding_size = 16 - bytes_remaining
[data, :binary.copy(<<padding_size>>, padding_size)]
end
defp pkcs7_unpad(<<>>) do
{:ok, ""}
end
defp pkcs7_unpad(data) do
padding_size = :binary.last(data)
if padding_size <= 16 do
message_size = byte_size(data)
left = binary_part(data, message_size, -padding_size)
right = :binary.copy(<<padding_size>>, padding_size)
if left === right,
do: {:ok, binary_part(data, 0, message_size - padding_size)},
else: {:error, :malformed}
else
{:error, :malformed}
end
end
end
|
lib/galaxy/gossip.ex
| 0.713631 | 0.589894 |
gossip.ex
|
starcoder
|
defmodule Exshome.Tag.Mapping do
@moduledoc """
Computes the tag mapping.
"""
@enforce_keys [:type, :key, :value]
defstruct [:type, :key, :child_key, :value]
@type t() :: %__MODULE__{
type: :simple | :nested_atom_map | :nested_binary_map,
key: any(),
child_key: String.t() | atom(),
value: module()
}
def compute_tag_mapping(params) do
tag_data =
for {module, tags} <- params,
tag <- tags do
to_tag_data(module, tag)
end
tag_data = Enum.group_by(tag_data, & &1.key)
for {key, values} <- tag_data, into: %{} do
nested_values =
values
|> validate_partial_mapping(key)
|> values_to_mapping()
{key, nested_values}
end
end
defp to_tag_data(module, {tag, []}) do
%__MODULE__{type: :simple, key: tag, value: module}
end
defp to_tag_data(module, {parent_key, [key: child_key]}) when is_atom(child_key) do
%__MODULE__{
type: :nested_atom_map,
key: parent_key,
child_key: child_key,
value: module
}
end
defp to_tag_data(module, {parent_key, [key: child_key]}) when is_binary(child_key) do
%__MODULE__{
type: :nested_binary_map,
key: parent_key,
child_key: child_key,
value: module
}
end
def validate_partial_mapping([%__MODULE__{type: type} | _] = values, key) do
case Enum.uniq_by(values, & &1.type) do
[_single_type] ->
:ok
data ->
modules = Enum.map(data, & &1.value)
raise "#{key} has mixed types in modules: #{inspect(modules)}"
end
duplicate_values = duplicated_by(values, :value)
unless duplicate_values == [] do
raise "#{key} has duplicate values: #{inspect(duplicate_values)}"
end
unless type == :simple do
duplicate_keys = duplicated_by(values, :child_key)
unless duplicate_keys == [] do
raise "#{key} has duplicate keys: #{inspect(duplicate_keys)}"
end
end
values
end
defp duplicated_by(values, field) do
values
|> Enum.frequencies_by(fn value -> Map.from_struct(value)[field] end)
|> Enum.filter(&(elem(&1, 1) > 1))
|> Enum.map(&elem(&1, 0))
end
def values_to_mapping([%__MODULE__{type: :simple} | _] = values) do
values |> Enum.map(& &1.value) |> MapSet.new()
end
def values_to_mapping([%__MODULE__{type: type} | _] = values)
when type in [:nested_atom_map, :nested_binary_map] do
values |> Enum.map(&{&1.child_key, &1.value}) |> Enum.into(%{})
end
end
|
lib/exshome/tag/mapping.ex
| 0.801004 | 0.556339 |
mapping.ex
|
starcoder
|
defprotocol Focusable do
@doc "View the data that an optic focuses on."
def view(optic, structure)
@doc "Modify the data that an optic focuses on."
def over(optic, structure, f)
@doc "Set the data that an optic focuses on."
def set(optic, structure, value)
end
defmodule Focus do
alias Focus.Types
@moduledoc "Common functions usable by lenses, prisms, and traversals."
@doc """
Wrapper around Focusable.view/2
Arguments can be passed in with either the lens first and data structure second or vice versa.
Passing the data structure first allows Focus.view/2 to fit neatly in pipeline operations.
## Examples
iex> marge = %{
...> name: "Marge",
...> address: %{
...> street: "123 Fake St.",
...> city: "Springfield"
...> }
...> }
iex> address_lens = Lens.make_lens(:address)
iex> address_lens
...> |> Focus.view(marge)
%{street: "123 Fake St.", city: "Springfield"}
iex> marge
...> |> Focus.view(address_lens)
%{street: "123 Fake St.", city: "Springfield"}
"""
@spec view(Types.optic, Types.traversable) :: any | nil
@spec view(Types.traversable, Types.optic) :: any | nil
def view(optic = %{get: _, put: _}, structure), do: Focusable.view(optic, structure)
def view(structure, optic = %{get: _, put: _}), do: Focusable.view(optic, structure)
@doc """
Wrapper around Focusable.over/3
Arguments can be passed in with either the lens first and data structure second or vice versa.
Passing the data structure first allows Focus.over/3 to fit neatly in pipeline operations.
## Examples
iex> marge = %{
...> name: "Marge",
...> address: %{
...> street: "123 Fake St.",
...> city: "Springfield"
...> }
...> }
iex> name_lens = Lens.make_lens(:name)
iex> name_lens
...> |> Focus.over(marge, &String.upcase/1)
%{
name: "MARGE",
address: %{
street: "123 Fake St.",
city: "Springfield"
}
}
iex> marge
...> |> Focus.over(name_lens, &String.upcase/1)
%{
name: "MARGE",
address: %{
street: "123 Fake St.",
city: "Springfield"
}
}
"""
@spec over(Types.optic, Types.traversable, ((any) -> any)) :: Types.traversable
@spec over(Types.traversable, Types.optic, ((any) -> any)) :: Types.traversable
def over(optic = %{get: _, put: _}, structure, f), do: Focusable.over(optic, structure, f)
def over(structure, optic = %{get: _, put: _}, f), do: Focusable.over(optic, structure, f)
@doc """
Wrapper around Focusable.set/3
Arguments can be passed in with either the lens first and data structure second or vice versa.
Passing the data structure first allows Focus.set/3 to fit neatly in pipeline operations.
## Examples
iex> marge = %{
...> name: "Marge",
...> address: %{
...> street: "123 Fake St.",
...> city: "Springfield"
...> }
...> }
iex> name_lens = Lens.make_lens(:name)
iex> name_lens
...> |> Focus.set(marge, "Marjorie")
%{
name: "Marjorie",
address: %{
street: "123 Fake St.",
city: "Springfield"
}
}
iex> marge
...> |> Focus.set(name_lens, "Marjorie")
%{
name: "Marjorie",
address: %{
street: "123 Fake St.",
city: "Springfield"
}
}
"""
@spec set(Types.traversable, Types.optic, (any)) :: Types.traversable
@spec set(Types.optic, Types.traversable, any) :: Types.traversable
def set(optic = %{get: _, put: _}, structure, v), do: Focusable.set(optic, structure, v)
def set(structure, optic = %{get: _, put: _}, v), do: Focusable.set(optic, structure, v)
@doc """
Compose with most general lens on the left
## Examples
iex> marge = %{
...> name: "Marge",
...> address: %{
...> street: "123 Fake St.",
...> city: "Springfield"
...> }
...> }
iex> address_lens = Lens.make_lens(:address)
iex> street_lens = Lens.make_lens(:street)
iex> composed = Focus.compose(address_lens, street_lens)
iex> Focus.view(composed, marge)
"123 Fake St."
"""
@spec compose(Types.optic, Types.optic) :: Types.optic
def compose(%{get: get_x, put: set_x}, %{get: get_y, put: set_y}) do
%Lens{
get: fn s ->
case get_x.(s) do
{:error, {:lens, :bad_path}} ->
{:error, {:lens, :bad_path}}
x ->
get_y.(x)
end
end,
put: fn s ->
fn f ->
case get_x.(s) do
{:error, {:lens, :bad_path}} ->
{:error, {:lens, :bad_path}}
x ->
set_x.(s).(set_y.(x).(f))
end
end
end
}
end
@doc """
Infix lens composition
## Examples
iex> import Focus
iex> marge = %{name: "Marge", address: %{
...> local: %{number: 123, street: "Fake St."},
...> city: "Springfield"}
...> }
iex> address_lens = Lens.make_lens(:address)
iex> local_lens = Lens.make_lens(:local)
iex> street_lens = Lens.make_lens(:street)
iex> address_lens ~> local_lens ~> street_lens |> Focus.view(marge)
"Fake St."
"""
@spec (Types.optic) ~> (Types.optic) :: Types.optic
def x ~> y do
compose(x, y)
end
@doc """
Compose a pair of lenses to operate at the same level as one another.
Calling Focus.view/2, Focus.over/3, or Focus.set/3 on an alongside composed
pair returns a two-element tuple of the result.
## Examples
iex> nums = [1,2,3,4,5,6]
iex> Focus.alongside(Lens.idx(0), Lens.idx(3))
...> |> Focus.view(nums)
{1, 4}
iex> bart = %{name: "Bart", parents: {"Homer", "Marge"}, age: 10}
iex> Focus.alongside(Lens.make_lens(:name), Lens.make_lens(:age))
...> |> Focus.view(bart)
{"Bart", 10}
"""
@spec alongside(Types.optic, Types.optic) :: Types.optic
def alongside(%{get: get_x, put: set_x}, %{get: get_y, put: set_y}) do
%Lens{
get: fn s ->
{get_x.(s), get_y.(s)}
end,
put: fn s ->
fn f ->
{set_x.(s).(f), set_y.(s).(f)}
end
end
}
end
@doc """
Given a list of lenses and a structure, apply Focus.view/2 for each lens
to the structure.
## Examples
iex> homer = %{
...> name: "Homer",
...> job: "Nuclear Safety Inspector",
...> children: ["Bart", "Lisa", "Maggie"]
...> }
iex> lenses = Lens.make_lenses(homer)
iex> [lenses.name, lenses.children]
...> |> Focus.view_list(homer)
["Homer", ["Bart", "Lisa", "Maggie"]]
"""
@spec view_list(list(Types.optic), Types.traversable) :: [any]
def view_list(lenses, structure) when is_list(lenses) do
for lens <- lenses do
Focus.view(lens, structure)
end
end
@doc """
Check whether an optic's target is present in a data structure.
## Examples
iex> first_elem = Lens.idx(1)
iex> first_elem |> Focus.has([0])
false
iex> name = Lens.make_lens(:name)
iex> name |> Focus.has(%{name: "Homer"})
true
"""
@spec has(Types.optic, Types.traversable) :: boolean
def has(optic, structure) do
case Focus.view(optic, structure) do
nil -> false
{:error, _} -> false
_ -> true
end
end
@doc """
Check whether an optic's target is not present in a data structure.
## Examples
iex> first_elem = Lens.idx(1)
iex> first_elem |> Focus.hasnt([0])
true
iex> name = Lens.make_lens(:name)
iex> name |> Focus.hasnt(%{name: "Homer"})
false
"""
@spec hasnt(Types.optic, Types.traversable) :: boolean
def hasnt(optic, structure), do: !has(optic, structure)
@doc """
Partially apply a lens to Focus.over/3, fixing the lens argument and
returning a function that takes a Types.traversable and an update function.
## Examples
iex> upcase_name = Lens.make_lens(:name)
...> |> Focus.fix_over(&String.upcase/1)
iex> %{name: "Bart", parents: {"Homer", "Marge"}}
...> |> upcase_name.()
%{name: "BART", parents: {"Homer", "Marge"}}
iex> fst = Lens.idx(0)
iex> states = [:maryland, :texas, :illinois]
iex> Focus.over(fst, states, &String.upcase(Atom.to_string(&1)))
["MARYLAND", :texas, :illinois]
"""
@spec fix_over(Types.optic, ((any) -> any)) :: ((Types.traversable) -> Types.traversable)
def fix_over(%{get: _, put: _} = lens, f \\ fn x -> x end) when is_function(f) do
fn structure ->
Focus.over(lens, structure, f)
end
end
@doc """
Partially apply a lens to Focus.set/3, fixing the optic argument and
returning a function that takes a Types.traversable and a new value.
## Examples
iex> name_setter = Lens.make_lens(:name)
...> |> Focus.fix_set
iex> %{name: "Bart", parents: {"Homer", "Marge"}}
...> |> name_setter.("Lisa")
%{name: "Lisa", parents: {"Homer", "Marge"}}
iex> fst = Lens.idx(0)
iex> states = [:maryland, :texas, :illinois]
iex> Focus.over(fst, states, &String.upcase(Atom.to_string(&1)))
["MARYLAND", :texas, :illinois]
"""
@spec fix_set(Types.optic) :: ((Types.traversable, any) -> Types.traversable)
def fix_set(%{get: _, put: _} = lens) do
fn structure, val ->
Focus.set(lens, structure, val)
end
end
@doc """
Fix Focus.view/2 on a given optic. This partially applies Focus.view/2 with the given
optic and returns a function that takes a Types.traversable structure.
## Examples
iex> view_name = Lens.make_lens(:name)
...> |> Focus.fix_view
iex> homer = %{name: "Homer"}
iex> view_name.(homer)
"Homer"
iex> [homer, %{name: "Marge"}, %{name: "Bart"}]
...> |> Enum.map(&view_name.(&1))
["Homer", "Marge", "Bart"]
"""
@spec fix_view(Types.optic) :: (Types.traversable -> any)
def fix_view(%{get: _, put: _} = optic) do
fn structure ->
Focus.view(optic, structure)
end
end
end
|
lib/focus.ex
| 0.910962 | 0.503845 |
focus.ex
|
starcoder
|
defmodule Still.Compiler.ViewHelpers.ResponsiveImage do
@moduledoc """
Generates a set of images to ensure they are responsive.
See `Still.Preprocessor.Image` for details on these transformations.
"""
alias Still.Compiler.Incremental
alias Still.Compiler.ViewHelpers.{ContentTag, UrlFor}
import Still.Utils
require Logger
@default_nr_of_sizes 4
@doc """
Returns an image tag with the `src` and `srcset`.
If `:sizes` or `:transformations` are present in `opts`, they will be passed
to `Still.Preprocessor.Image`.
If `:sizes` is not set, the default will be 25%, 50%, 75% and 100% of the
input file's width.
"""
@spec render(file :: String.t(), list()) :: String.t()
def render(file, opts \\ []) do
{image_opts, opts} = Keyword.split(opts, [:sizes, :transformations])
output_files =
file
|> do_render(image_opts)
|> get_output_files()
ContentTag.render("img", nil, [
{:src, render_src(output_files)},
{:srcset, render_srcset(output_files)} | opts
])
end
defp do_render(file, image_opts) do
opts = Map.new(image_opts)
Incremental.Registry.get_or_create_file_process(file)
|> Incremental.Node.render(get_render_data(file, opts))
end
defp get_output_files(%{metadata: %{image_output_files: output_files}}) do
output_files |> Enum.sort_by(&elem(&1, 0))
end
defp render_src(output_files) do
{_, biggest_output_file} = output_files |> List.last()
UrlFor.render(biggest_output_file)
end
defp render_srcset(output_files) do
output_files
|> Enum.map(fn {size, file} ->
"#{UrlFor.render(file)} #{size}w"
end)
|> Enum.join(", ")
end
defp get_render_data(_, %{sizes: _} = image_opts) do
%{image_opts: image_opts}
end
defp get_render_data(file, image_opts) do
{:ok, %{width: width}} =
file
|> get_input_path()
|> get_image_info()
step_width = Integer.floor_div(width, @default_nr_of_sizes)
image_opts =
image_opts
|> Map.put(
:sizes,
1..@default_nr_of_sizes
|> Enum.map(&(&1 * step_width))
)
%{image_opts: image_opts}
end
end
|
lib/still/compiler/view_helpers/responsive_image.ex
| 0.831349 | 0.425516 |
responsive_image.ex
|
starcoder
|
defmodule Turbo.Ecto.Utils do
@moduledoc """
Utils functions.
"""
def done({:error, reason}), do: {:error, reason}
def done(result), do: {:ok, result}
@doc """
Converts all (string) map keys to atoms
## Examples
iex> map = %{"a" => 1, "b" => %{"c" => 3, "d" => 4}}
iex> Turbo.Ecto.Utils.symbolize_keys(map)
%{a: 1, b: %{c: 3, d: 4}}
"""
@spec symbolize_keys(map()) :: map()
def symbolize_keys(map) do
Enum.reduce(map, %{}, fn {k, v}, m ->
v =
case is_map(v) do
true -> symbolize_keys(v)
false -> v
end
map_atom_put(m, k, v)
end)
end
defp map_atom_put(m, k, v) do
if is_binary(k), do: Map.put(m, String.to_atom(k), v), else: Map.put(m, k, v)
end
@doc """
Converts all (atoms) map keys to string.
## Example
iex> map = %{a: 1, b: %{c: 3, d: 4}}
iex> Turbo.Ecto.Utils.stringify_keys(map)
%{"a" => 1, "b" => %{"c" => 3, "d" => 4}}
"""
@spec stringify_keys(map()) :: map()
def stringify_keys(map = %{}) do
Enum.into(map, %{}, fn {k, v} -> {to_string(k), stringify_keys(v)} end)
end
def stringify_keys([head | rest]) do
[stringify_keys(head) | stringify_keys(rest)]
end
def stringify_keys(not_a_map) do
not_a_map
end
@doc """
At the map object or list object, delete the key with Value is_nil or == "",
and recursion is also considered.
## Examples
iex> Turbo.Ecto.Utils.compaction!(%{nil_nil: nil, not_nil: "a value", nested: %{nil_val: nil, other: "other"}})
%{not_nil: "a value", nested: %{other: "other"}}
iex> Turbo.Ecto.Utils.compaction!(%{nil_nil: nil, not_nil: "a value", nested: %{nil_val: nil, other: "other", nested_empty: %{}}})
%{not_nil: "a value", nested: %{other: "other"}}
iex> Turbo.Ecto.Utils.compaction!([nil, "string", %{nil_nil: nil, not_nil: "a value", nested: %{nil_val: nil, other: "other", nested_empty: %{}}}, ["nested", nil, 2]])
["string", %{not_nil: "a value", nested: %{other: "other"}}, ["nested", 2]]
"""
@spec compaction!(Map.t() | List.t()) :: Map.t() | List.t() | %ArgumentError{}
def compaction!(value)
def compaction!(value) when is_map(value) do
compactor = fn {k, v}, acc ->
cond do
is_map(v) and Enum.empty?(v) -> acc
is_map(v) or is_list(v) -> Map.put_new(acc, k, compaction!(v))
true -> Map.put_new(acc, k, v)
end
end
value
|> Enum.reduce(%{}, compactor)
|> compactify!
end
def compaction!(value) when is_list(value) do
compactor = fn elem, acc ->
cond do
is_list(elem) and Enum.empty?(elem) -> acc
is_list(elem) or is_map(elem) -> acc ++ [compaction!(elem)]
is_nil(elem) -> acc
true -> acc ++ [elem]
end
end
value
|> Enum.reduce([], compactor)
|> compactify!
end
@doc """
Takes a map or list and removes keys or elements that have nil or empty
values, or are empty maps.
## Examples
iex> Turbo.Ecto.Utils.compactify!(%{nil_key: nil, not_nil: "nil"})
%{not_nil: "nil"}
iex> Turbo.Ecto.Utils.compactify!([1, nil, "string", %{key: :value}])
[1, "string", %{key: :value}]
iex> Turbo.Ecto.Utils.compactify!([a: nil, b: 2, c: "string"])
[b: 2, c: "string"]
iex> Turbo.Ecto.Utils.compactify!(%{empty: %{}, not: "not"})
%{not: "not"}
iex> Turbo.Ecto.Utils.compactify!({"not", "a map"})
** (ArgumentError) expecting a map or a list, got: {"not", "a map"}
"""
def compactify!(map) when is_map(map) do
map
|> Enum.reject(fn {_k, v} -> is_nil(v) || is_empty_string(v) || empty_map(v) end)
|> Enum.into(%{})
end
def compactify!(list) when is_list(list) do
list
|> Keyword.keyword?()
|> compactify!(list)
end
def compactify!(not_map_or_list),
do:
raise(ArgumentError, message: "expecting a map or a list, got: #{inspect(not_map_or_list)}")
def compactify!(true, list), do: Enum.reject(list, fn {_k, v} -> is_nil(v) end)
def compactify!(false, list), do: Enum.reject(list, fn elem -> is_nil(elem) end)
defp empty_map(map),
do: is_map(map) && not Map.has_key?(map, :__struct__) && Enum.empty?(map)
defp is_empty_string(s), do: s == ""
end
|
lib/turbo_ecto/utils.ex
| 0.833596 | 0.477432 |
utils.ex
|
starcoder
|
defmodule AnalysisPrep.Probability do
@moduledoc """
Provide basic probability functions
"""
import Statistics.Math, only: [factorial: 1, floor: 1]
import AnalysisPrep, only: [is_range: 1, sum_map: 1]
@doc """
The probability of an event, given a sample space of equiprobable outcomes.
The event can be either a set of outcomes, or a predicate (true for outcomes in the event).
Examples
iex> p([1,2], [1,2,3,4])
Ratio.new(2,4)
iex> p([1,2,3], 2..5)
Ratio.new(2,4)
iex> p([:a], %{a: 1, b: 2})
Ratio.new(1,3)
"""
def p(predicate, space) when is_function(predicate) do
event = such_that(predicate, space)
p(event, space)
end
def p(event, space) do
if !is_range(space) && is_map(space) do
space
|> Enum.filter(fn({k, _}) ->
Enum.member?(event, k)
end)
|> Enum.into(%{})
|> sum_map
|> Ratio.new(sum_map(space))
else
event = MapSet.new(event)
space = MapSet.new(space)
numerator = MapSet.intersection(event, space) |> MapSet.size
denominator = MapSet.size(space)
Ratio.new(numerator, denominator)
end
end
@doc """
Filter a data series by a predicate.
Examples
iex> such_that(fn(e) -> rem(e, 2) == 0 end, 1..10) |> Enum.to_list
[2,4,6,8,10]
iex> such_that(& &1 == :a, %{a: 1, b: 2})
%{a: 1}
"""
def such_that(predicate, space) do
cond do
is_range(space) -> such_that(predicate, space, :enumerable)
space |> Enum.filter(& predicate.(&1)) |> MapSet.new
is_map(space) -> such_that(predicate, space, :map)
true -> such_that(predicate, space, :enumerable)
end
end
def such_that(predicate, space, :enumerable) do
space |> Enum.filter(& predicate.(&1)) |> MapSet.new
end
def such_that(predicate, space, :map) do
space
|> Enum.filter(fn({k, _}) ->
predicate.(k)
end)
|> Enum.into(%{})
end
@doc """
Joint probability from two map-like distributions.
Example
iex> joint(%{a: 0.3, b: 0.6}, %{x: 0.25, y: 0.75})
%{{:a, :x} => 0.075, {:a, :y} => 0.22499999999999998, {:b, :x} => 0.15, {:b, :y} => 0.44999999999999996}
"""
def joint(a, b) do
Enum.flat_map(a, fn({k1, v1}) ->
Enum.map(b, fn({k2, v2}) ->
{{k1, k2}, v1 * v2}
end)
end)
|> Enum.into(%{})
end
@doc """
The cross produce of all items from two collections.
Uses arrays for each pair.
Examples
iex> cross(1..2, 4..5)
[[1,4],[1,5],[2,4],[2,5]]
"""
def cross(a, b) do
Enum.flat_map(a, fn(e) ->
Enum.map(b, fn(f) -> [e, f] end)
end)
end
@doc """
Get combinations of n items at a time, returned as combinations
Examples
iex> combinations(1..3)
[[3,2], [3,1], [2,1]]
iex> combinations(1..4, 3)
[[4,3,2], [4,3,1], [4,2,1], [3,2,1]]
"""
def combinations(list, n \\ 2) do
list |> Combination.combine(n)
end
@doc """
Generate samples from a series
Examples
iex> sample(0..5) <= 5
true
iex> sample [42]
42
iex> length sample(0..5, 2)
2
iex> sample []
nil
iex> sample 1..5, 0
nil
"""
def sample(list, n \\ 1)
def sample([], _), do: nil
def sample(_, 0), do: nil
def sample(list, 1) do
set_seed!()
hd get_sample(list, 1)
end
def sample(list, n) do
set_seed!()
get_sample(list, n)
end
defp get_sample(list, n) do
Enum.map(1..n, fn(_) -> Enum.random(list) end)
end
defp set_seed! do
# << i1 :: unsigned-integer-32, i2 :: unsigned-integer-32, i3 :: unsigned-integer-32>> = :crypto.strong_rand_bytes(12)
# :rand.seed(:exsplus, {i1, i2, i3})
end
@doc """
The number of ways to choose c items from a list of n items
Examples
iex> choose(3,2)
3.0
iex> choose(12,4)
495.0
iex> choose(3,0)
0
"""
@spec choose(integer,integer) :: integer
def choose(_, 0), do: 0
def choose(n, c) do
floor(factorial(n) / (factorial(n - c) * factorial(c)))
end
end
|
lib/analysis_prep/probability.ex
| 0.830732 | 0.734643 |
probability.ex
|
starcoder
|
defmodule DG do
@external_resource "README.md"
@moduledoc File.read!("README.md") |> String.split("<!-- DOC -->") |> List.last()
defstruct dg: nil, opts: []
def new(opts \\ []) do
{digraph_opts, opts} = Keyword.pop(opts, :digraph_opts, [])
%__MODULE__{dg: :digraph.new(digraph_opts), opts: opts}
end
def new(vertices, opts) do
Enum.into(vertices, DG.new(opts))
end
def new(vertices, edges, opts) do
Enum.into(edges, new(vertices, opts))
end
def options(%__MODULE__{opts: opts}) do
opts
end
def options(%__MODULE__{opts: opts} = dg, new_opts) do
%{dg | opts: Keyword.merge(opts, new_opts)}
end
def add_edge(%__MODULE__{dg: dg}, v1, v2) do
:digraph.add_edge(dg, v1, v2)
end
def add_edge(%__MODULE__{dg: dg}, v1, v2, label) do
:digraph.add_edge(dg, v1, v2, label)
end
def add_edge(%__MODULE__{dg: dg}, e, v1, v2, label) do
:digraph.add_edge(dg, e, v1, v2, label)
end
def add_vertex(%__MODULE__{dg: dg}) do
:digraph.add_vertex(dg)
end
def add_vertex(%__MODULE__{dg: dg}, v) do
:digraph.add_vertex(dg, v)
end
def add_vertex(%__MODULE__{dg: dg}, v, label) do
:digraph.add_vertex(dg, v, label)
end
def del_edge(%__MODULE__{dg: dg}, e) do
:digraph.del_edge(dg, e)
end
def del_edges(%__MODULE__{dg: dg}, edges) do
:digraph.del_edges(dg, edges)
end
def del_path(%__MODULE__{dg: dg}, v1, v2) do
:digraph.del_path(dg, v1, v2)
end
def del_vertex(%__MODULE__{dg: dg}, v) do
:digraph.del_vertex(dg, v)
end
def del_vertices(%__MODULE__{dg: dg}, vertices) do
:digraph.del_vertices(dg, vertices)
end
def delete(%__MODULE__{dg: dg}) do
:digraph.delete(dg)
end
def edge(%__MODULE__{dg: dg}, e) do
:digraph.edge(dg, e)
end
def edges(%__MODULE__{dg: dg}) do
:digraph.edges(dg)
end
def edges(%__MODULE__{dg: dg}, v) do
:digraph.edges(dg, v)
end
def get_cycle(%__MODULE__{dg: dg}, v) do
:digraph.get_cycle(dg, v)
end
def get_path(%__MODULE__{dg: dg}, v1, v2) do
:digraph.get_path(dg, v1, v2)
end
def get_short_cycle(%__MODULE__{dg: dg}, v) do
:digraph.get_short_cycle(dg, v)
end
def get_short_path(%__MODULE__{dg: dg}, v1, v2) do
:digraph.get_short_path(dg, v1, v2)
end
def in_degree(%__MODULE__{dg: dg}, v) do
:digraph.in_degree(dg, v)
end
def in_edges(%__MODULE__{dg: dg}, v) do
:digraph.in_edges(dg, v)
end
def in_neighbours(%__MODULE__{dg: dg}, v) do
:digraph.in_neighbours(dg, v)
end
def info(%__MODULE__{dg: dg}) do
:digraph.info(dg)
end
def no_edges(%__MODULE__{dg: dg}) do
:digraph.no_edges(dg)
end
def no_vertices(%__MODULE__{dg: dg}) do
:digraph.no_vertices(dg)
end
def out_degree(%__MODULE__{dg: dg}, v) do
:digraph.out_degree(dg, v)
end
def out_edges(%__MODULE__{dg: dg}, v) do
:digraph.out_edges(dg, v)
end
def out_neighbours(%__MODULE__{dg: dg}, v) do
:digraph.out_neighbours(dg, v)
end
def vertex(%__MODULE__{dg: dg}, v) do
:digraph.vertex(dg, v)
end
def vertices(%__MODULE__{dg: dg}) do
:digraph.vertices(dg)
end
def arborescence_root(%__MODULE__{dg: dg}) do
:digraph_utils.arborescence_root(dg)
end
def components(%__MODULE__{dg: dg}) do
:digraph_utils.components(dg)
end
def condensation(%__MODULE__{dg: dg}) do
:digraph_utils.condensation(dg)
end
def cyclic_strong_components(%__MODULE__{dg: dg}) do
:digraph_utils.cyclic_strong_components(dg)
end
def is_acyclic(%__MODULE__{dg: dg}) do
:digraph_utils.is_acyclic(dg)
end
def is_arborescence(%__MODULE__{dg: dg}) do
:digraph_utils.is_arborescence(dg)
end
def is_tree(%__MODULE__{dg: dg}) do
:digraph_utils.is_tree(dg)
end
def loop_vertices(%__MODULE__{dg: dg}) do
:digraph_utils.loop_vertices(dg)
end
def postorder(%__MODULE__{dg: dg}) do
:digraph_utils.postorder(dg)
end
def preorder(%__MODULE__{dg: dg}) do
:digraph_utils.preorder(dg)
end
def reachable(%__MODULE__{dg: dg}, vertices) do
:digraph_utils.reachable(vertices, dg)
end
def reachable_neighbours(%__MODULE__{dg: dg}, vertices) do
:digraph_utils.reachable_neighbours(vertices, dg)
end
def reaching(%__MODULE__{dg: dg}, vertices) do
:digraph_utils.reaching(vertices, dg)
end
def reaching_neighbours(%__MODULE__{dg: dg}, vertices) do
:digraph_utils.reaching_neighbours(vertices, dg)
end
def strong_components(%__MODULE__{dg: dg}) do
:digraph_utils.strong_components(dg)
end
def subgraph(%__MODULE__{dg: dg}, vertices) do
:digraph_utils.subgraph(dg, vertices)
end
def subgraph(%__MODULE__{dg: dg}, vertices, options) do
:digraph_utils.subgraph(dg, vertices, options)
end
def topsort(%__MODULE__{dg: dg}) do
:digraph_utils.topsort(dg)
end
end
|
lib/dg.ex
| 0.767864 | 0.459864 |
dg.ex
|
starcoder
|
if Code.ensure_loaded?(Fiet) and Code.ensure_loaded?(Timex) do
defmodule RssWatcher.Feed.Fiet do
@moduledoc """
A Fiet + Timex based RSS parser. Used by default in subscriptions.
Add the following to your dependancies:
```
{:fiet, "~> 0.2.1"},
{:timex, "~> 3.0"}
```
And add `:timex` to your list of extra_applications.
"""
@moduledoc since: "0.1.0"
alias RssWatcher.Feed
require Logger
@behaviour RssWatcher.Feed
@impl true
@spec parse_feed(String.t(), Keyword.t()) :: {:error, any} | {:ok, RssWatcher.Feed.t()}
def parse_feed(xml, _) do
with {:ok, parsed} <- Fiet.parse(xml),
{:ok, updated_at} <- parse_timestamp(parsed.updated_at) do
items =
parsed.items
|> Enum.map(&parse_item/1)
|> Enum.filter(& &1)
{:ok,
%Feed{
title: trim(parsed.title),
link: parsed.link,
description: trim(parsed.description),
updated_at: updated_at,
items: items
}}
else
{:error, _reason} = otherwise -> otherwise
end
end
defp parse_item(item) do
with {:ok, published_at} <- parse_timestamp(item.published_at) do
%Feed.Item{
id: item.id,
title: trim(item.title),
description: trim(item.description),
published_at: published_at,
link: item.link
}
else
_ ->
Logger.warn("Invalid timestamp for feed item: #{item.published_at}")
nil
end
end
def trim(nil), do: nil
def trim(string) when is_binary(string), do: String.trim(string)
@timestamp_formats [
"{RFC822}",
"{RFC822z}",
"{RFC1123}",
"{RFC1123z}",
"{RFC3339}",
"{RFC3339z}"
]
defp parse_timestamp(nil), do: {:error, :no_timestamp}
defp parse_timestamp(timestamp), do: try_parse_timestamp(timestamp, @timestamp_formats)
defp try_parse_timestamp(timestamp, []),
do: {:error, "Unknown format for timestamp #{timestamp}."}
defp try_parse_timestamp(timestamp, [format | rest]) do
case Timex.parse(timestamp, format) do
{:ok, _result} = outcome -> outcome
_ -> try_parse_timestamp(timestamp, rest)
end
end
end
end
|
lib/rss_watcher/feed/adapter/feit.ex
| 0.758511 | 0.797004 |
feit.ex
|
starcoder
|
defmodule PokerHandValue do
@moduledoc """
A library to rate and compare poker hands. Any hand with 5 or more cards can be rated and is therefore suitable for texas hold em and other > 5 card poker types.
## Example
```elixir
hand = "As Ad Ac Js Jd"
PokerHandValue.rate_hand(hand) # => {:full_house, 7.1411}
hand2 = "2c 3d 4h 5s 6c"
PokerHandValue.rate_hand(hand2) # => {:straight, 5.06}
PokerHandValue.compare_hands(hand, hand2) # => :gt
```
"""
import List, only: [first: 1, last: 1]
import Map, only: [values: 1]
import Enum,
only: [
map: 2,
filter: 2,
sort: 1,
take: 2,
sort_by: 2,
group_by: 2,
reverse: 1,
uniq_by: 2,
member?: 2,
reduce: 2
]
@doc """
Compares two hands. returns :gt if the value of the first hand is greater, :lt if lesses and :eq if they have same value
## Examples
iex> PokerHandValue.compare_hands("Js Jh 2h 2c 3h", "Qs 2c 5h Jh 10s")
:gt
iex> PokerHandValue.compare_hands("As Ah Ad 7h 3d", "2h 3h 4h 5h 6h")
:lt
iex> PokerHandValue.compare_hands("Ks Kh 2d 2h 3d", "Kc Kd 2c 2s 3c")
:eq
"""
def compare_hands(first, second) when is_binary(first),
do: compare_hands(rate_hand(first), second)
def compare_hands(first, second) when is_binary(second),
do: compare_hands(first, rate_hand(second))
def compare_hands(first, second) do
{_, first} = first
{_, second} = second
cond do
first > second -> :gt
second > first -> :lt
first === second -> :eq
end
end
@doc """
Converts a hand to a rating
## Examples
iex> PokerHandValue.rate_hand("Qh Jh 2h 7h 3h")
{:flush, 6.1211070302}
iex> PokerHandValue.rate_hand("As Ah Ad 7h 3d")
{:three_of_a_kind, 4.140703}
iex> PokerHandValue.rate_hand("As Ah Kd 2h 3d")
{:pair, 2.14130302}
"""
def rate_hand(hand) when is_binary(hand), do: rate_hand(parse_hand(hand))
def rate_hand(hand) do
case get_best_hand(hand, get_hands()) do
{:straight_flush, score} -> {:straight_flush, 9 + score}
{:four_of_a_kind, score} -> {:four_of_a_kind, 8 + score}
{:full_house, score} -> {:full_house, 7 + score}
{:flush, score} -> {:flush, 6 + score}
{:straight, score} -> {:straight, 5 + score}
{:three_of_a_kind, score} -> {:three_of_a_kind, 4 + score}
{:two_pair, score} -> {:two_pair, 3 + score}
{:pair, score} -> {:pair, 2 + score}
{:high_card, score} -> {:high_card, 1 + score}
end
end
@doc """
Matches every hand
## Examples
iex> PokerHandValue.match_high_card([{:hearts, 4}, {:diamonds, 2}, {:spades, 9}, {:hearts, 11}, {:diamonds, 3}])
{:high_card, 0.1109040302}
iex> PokerHandValue.match_high_card([{:hearts, 4}, {:diamonds, 2}, {:spades, 9}, {:hearts, 11}, {:diamonds, 3}, {:clubs, 12}])
{:high_card, 0.1211090403}
"""
def match_high_card(hand) when is_binary(hand), do: match_high_card(parse_hand(hand))
def match_high_card(hand) do
hand
|> remove_suits
|> sort()
|> reverse()
|> take(5)
|> count_score(:high_card)
end
@doc """
Matches a pair
## Examples
iex> PokerHandValue.match_pair([{:hearts, 4}, {:diamonds, 2}, {:spades, 9}, {:hearts, 9}, {:diamonds, 3}])
{:pair, 0.09040302}
iex> PokerHandValue.match_pair([{:hearts, 5}, {:diamonds, 2}, {:spades, 13}, {:hearts, 13}, {:diamonds, 3}])
{:pair, 0.13050302}
iex> PokerHandValue.match_pair([{:hearts, 5}, {:diamonds, 2}, {:spades, 13}, {:hearts, 13}, {:diamonds, 3}, {:clubs, 7}])
{:pair, 0.13070503}
iex> PokerHandValue.match_pair([{:hearts, 5}, {:diamonds, 2}, {:spades, 13}, {:hearts, 7}, {:diamonds, 3}])
nil
"""
def match_pair(hand) when is_binary(hand), do: match_pair(parse_hand(hand))
def match_pair(hand) do
hand = remove_suits(hand)
pair = get_highest_pair(hand)
cond do
pair != nil ->
count_score([pair | remove_cards_from_hand(hand, pair) |> take(3)], :pair)
true ->
nil
end
end
@doc """
Matches two pairs
## Examples
iex> PokerHandValue.match_two_pair([{:hearts, 4}, {:diamonds, 4}, {:spades, 9}, {:hearts, 6}, {:diamonds, 6}])
{:two_pair, 0.060409}
iex> PokerHandValue.match_two_pair([{:hearts, 4}, {:diamonds, 4}, {:spades, 9}, {:hearts, 6}, {:diamonds, 6}, {:hearts, 2}])
{:two_pair, 0.060409}
iex> PokerHandValue.match_two_pair([{:hearts, 4}, {:diamonds, 2}, {:spades, 9}, {:hearts, 6}, {:diamonds, 6}])
nil
iex> PokerHandValue.match_two_pair([{:hearts, 4}, {:diamonds, 2}, {:spades, 9}, {:hearts, 10}, {:diamonds, 6}])
nil
"""
def match_two_pair(hand) when is_binary(hand), do: match_two_pair(parse_hand(hand))
def match_two_pair(hand) do
hand = remove_suits(hand)
pair = get_highest_pair(hand)
hand = remove_cards_from_hand(hand, pair)
second_pair = get_highest_pair(hand)
hand = remove_cards_from_hand(hand, second_pair)
cond do
pair != nil && second_pair != nil ->
count_score([pair, second_pair] ++ [first(hand)], :two_pair)
true ->
nil
end
end
@doc """
Matches three of a kind
## Examples
iex> PokerHandValue.match_three_of_a_kind([{:hearts, 12}, {:diamonds, 12}, {:spades, 12}, {:hearts, 3}, {:diamonds, 6}])
{:three_of_a_kind, 0.120603}
iex> PokerHandValue.match_three_of_a_kind([{:hearts, 12}, {:diamonds, 12}, {:spades, 12}, {:hearts, 3}, {:diamonds, 6}, {:spades, 2}])
{:three_of_a_kind, 0.120603}
iex> PokerHandValue.match_three_of_a_kind([{:hearts, 12}, {:diamonds, 12}, {:spades, 10}, {:hearts, 3}, {:diamonds, 6}])
nil
iex> PokerHandValue.match_three_of_a_kind([{:hearts, 12}, {:diamonds, 10}, {:spades, 9}, {:hearts, 3}, {:diamonds, 6}])
nil
"""
def match_three_of_a_kind(hand) when is_binary(hand),
do: match_three_of_a_kind(parse_hand(hand))
def match_three_of_a_kind(hand) do
hand = remove_suits(hand)
three_of_a_kind = get_highest_three_of_a_kind(hand)
cond do
three_of_a_kind != nil ->
count_score(
[three_of_a_kind | remove_cards_from_hand(hand, three_of_a_kind) |> take(2)],
:three_of_a_kind
)
true ->
nil
end
end
@doc """
Matches a straight
## Examples
iex> PokerHandValue.match_straight([{:hearts, 4}, {:diamonds, 5}, {:spades, 6}, {:hearts, 7}, {:diamonds, 8}])
{:straight, 0.08}
iex> PokerHandValue.match_straight([{:hearts, 14}, {:diamonds, 5}, {:spades, 3}, {:hearts, 4}, {:diamonds, 2}])
{:straight, 0.05}
iex> PokerHandValue.match_straight([{:hearts, 14}, {:diamonds, 5}, {:spades, 3}, {:hearts, 4}, {:diamonds, 2}, {:spades, 10}])
{:straight, 0.05}
iex> PokerHandValue.match_straight([{:hearts, 14}, {:diamonds, 13}, {:spades, 12}, {:hearts, 11}, {:diamonds, 10}])
{:straight, 0.14}
iex> PokerHandValue.match_straight([{:hearts, 13}, {:diamonds, 5}, {:spades, 9}, {:hearts, 7}, {:diamonds, 8}])
nil
"""
def match_straight(hand) when is_binary(hand), do: match_straight(parse_hand(hand))
def match_straight(hand) do
hand = remove_suits(hand)
hand = prefix_one_if_contains_ace(hand)
straight_from =
hand
|> sort()
|> take(length(hand) - 4)
|> filter(&is_straight_from_number!(&1, hand))
cond do
length(straight_from) > 0 ->
count_score([first(straight_from) + 4], :straight)
true ->
nil
end
end
@doc """
Matches a flush
## Examples
iex> PokerHandValue.match_flush([{:hearts, 9}, {:hearts, 11}, {:hearts, 14}, {:hearts, 7}, {:hearts, 3}])
{:flush, 0.1411090703}
iex> PokerHandValue.match_flush([{:hearts, 9}, {:hearts, 11}, {:hearts, 14}, {:hearts, 7}, {:hearts, 3}, {:hearts, 2}])
{:flush, 0.1411090703}
iex> PokerHandValue.match_flush([{:hearts, 9}, {:diamonds, 11}, {:hearts, 2}, {:spades, 7}, {:hearts, 3}])
nil
"""
def match_flush(hand) when is_binary(hand), do: match_flush(parse_hand(hand))
def match_flush(hand) do
flush =
hand
|> sort_by(fn card -> elem(card, 1) end)
|> group_by(fn card -> elem(card, 0) end)
|> values()
|> filter(fn suit -> length(suit) >= 5 end)
|> last()
cond do
flush != nil ->
count_score(
flush |> remove_suits |> sort() |> reverse(),
:flush
)
true ->
nil
end
end
@doc """
Matches a full house, specifying the three of a kind value
## Examples
iex> PokerHandValue.match_full_house([{:hearts, 9}, {:diamonds, 9}, {:spades, 9}, {:hearts, 3}, {:diamonds, 3}])
{:full_house, 0.0903}
iex> PokerHandValue.match_full_house([{:hearts, 2}, {:diamonds, 9}, {:spades, 9}, {:hearts, 3}, {:diamonds, 3}])
nil
"""
def match_full_house(hand) when is_binary(hand), do: match_full_house(parse_hand(hand))
def match_full_house(hand) do
hand = remove_suits(hand)
three_of_a_kind = get_highest_three_of_a_kind(hand)
hand = remove_cards_from_hand(hand, three_of_a_kind)
pair = get_highest_pair(hand)
cond do
three_of_a_kind != nil && pair != nil -> count_score([three_of_a_kind, pair], :full_house)
true -> nil
end
end
@doc """
Matches four of a kind, specifying the value
## Examples
iex> PokerHandValue.match_four_of_a_kind([{:hearts, 12}, {:diamonds, 12}, {:spades, 12}, {:hearts, 12}, {:diamonds, 6}])
{:four_of_a_kind, 0.1206}
iex> PokerHandValue.match_four_of_a_kind([{:hearts, 12}, {:diamonds, 12}, {:spades, 12}, {:hearts, 12}, {:diamonds, 6}, {:spades, 4}])
{:four_of_a_kind, 0.1206}
iex> PokerHandValue.match_four_of_a_kind([{:hearts, 12}, {:diamonds, 11}, {:spades, 12}, {:hearts, 12}, {:diamonds, 6}])
nil
"""
def match_four_of_a_kind(hand) when is_binary(hand), do: match_four_of_a_kind(parse_hand(hand))
def match_four_of_a_kind(hand) do
hand = remove_suits(hand)
four_of_a_kind = get_highest_four_of_a_kind(hand)
hand = remove_cards_from_hand(hand, four_of_a_kind)
cond do
four_of_a_kind != nil ->
count_score([four_of_a_kind] ++ [first(hand)], :four_of_a_kind)
true ->
nil
end
end
@doc """
Matches a straight flush
## Examples
iex> PokerHandValue.match_straight_flush([{:hearts, 9}, {:hearts, 5}, {:hearts, 6}, {:hearts, 7}, {:hearts, 8}])
{:straight_flush, 0.09}
iex> PokerHandValue.match_straight_flush([{:hearts, 9}, {:hearts, 5}, {:hearts, 10}, {:hearts, 7}, {:hearts, 8}])
nil
"""
def match_straight_flush(hand) when is_binary(hand), do: match_straight_flush(parse_hand(hand))
def match_straight_flush(hand) do
straight_flush =
hand
|> uniq_by(fn card -> elem(card, 1) end)
|> group_by(fn card -> elem(card, 0) end)
|> values()
|> filter(fn suit -> length(suit) >= 5 end)
|> map(&match_straight(&1))
|> filter(&(&1 != nil))
cond do
length(straight_flush) > 0 ->
{:straight_flush, straight_flush |> first() |> elem(1)}
true ->
nil
end
end
@doc """
Matches four of a kind, specifying the value
## Examples
iex> PokerHandValue.parse_hand("Qs 10s 2d Ah 5c")
[{:spades, 12}, {:spades, 10}, {:diamonds, 2}, {:hearts, 14}, {:clubs, 5}]
iex> PokerHandValue.parse_hand("Invalid hand")
** (RuntimeError) Unable to parse hand
"""
def parse_hand(hand) do
hand
|> String.split()
|> map(fn card ->
card_list = card |> String.split_at(-1)
try do
{
lookup(card_list |> elem(1) |> String.upcase()),
lookup(card_list |> elem(0) |> String.upcase())
}
rescue
_ -> raise "Unable to parse hand"
end
end)
end
defp lookup("A"), do: 14
defp lookup("K"), do: 13
defp lookup("Q"), do: 12
defp lookup("J"), do: 11
defp lookup("S"), do: :spades
defp lookup("D"), do: :diamonds
defp lookup("H"), do: :hearts
defp lookup("C"), do: :clubs
defp lookup(num) do
String.to_integer(num)
end
defp get_method_name(hand) do
String.to_atom("match_" <> to_string(hand))
end
defp get_best_hand(hand, [hand_to_try | all_hands]) when is_list(hand) do
best_hand = apply(__MODULE__, get_method_name(hand_to_try), [hand])
cond do
best_hand != nil -> best_hand
true -> get_best_hand(hand, all_hands)
end
end
defp is_straight_from_number!(number, hand) do
length(hand -- make_straight_from(number)) - length(hand) == -5
end
defp remove_suits(hand) do
hand
|> map(&elem(&1, 1))
|> sort()
end
defp make_two_digit(digit) do
cond do
String.length(digit) == 1 -> "0" <> digit
true -> digit
end
end
defp get_highest_four_of_a_kind(hand) do
get_highest(hand, 4)
end
defp get_highest_three_of_a_kind(hand) do
get_highest(hand, 3)
end
defp get_highest_pair(hand) do
get_highest(hand, 2)
end
defp get_highest(hand, amount) do
pair =
hand
|> group_by(& &1)
|> values()
|> filter(&(length(&1) == amount))
|> last()
cond do
is_list(pair) -> first(pair)
true -> nil
end
end
defp count_score(cards, value) do
cards
|> take(5)
|> map(&Integer.to_string(&1))
|> map(&make_two_digit(&1))
|> reduce(fn item, acc -> acc <> item end)
|> make_fractional
|> (fn score -> {value, score} end).()
end
defp make_fractional(score) do
import String, only: [to_integer: 1, pad_trailing: 3]
to_integer(score) / to_integer(pad_trailing("1", String.length(score) + 1, "0"))
end
defp remove_cards_from_hand(hand, cards) when cards == nil do
hand
end
defp remove_cards_from_hand(hand, cards) do
hand
|> filter(&(&1 != cards))
|> sort()
|> reverse()
end
defp prefix_one_if_contains_ace(hand) do
cond do
member?(hand, 14) -> [1 | hand]
true -> hand
end
end
defp make_straight_from(num) do
num..(num + 4)
|> reverse()
end
defp get_hands do
[
:straight_flush,
:four_of_a_kind,
:full_house,
:flush,
:straight,
:three_of_a_kind,
:two_pair,
:pair,
:high_card
]
end
end
|
lib/poker_hand_value.ex
| 0.801664 | 0.870377 |
poker_hand_value.ex
|
starcoder
|
defmodule Aoc.Year2020.Day01 do
@moduledoc """
Solution to Day 01 of 2020: Report Repair
## --- Day 1: Report Repair ---
After saving Christmas five years in a row, you've decided to take a vacation at
a nice resort on a tropical island. Surely, Christmas will go on without you.
The tropical island has its own currency and is entirely cash-only. The gold
coins used there have a little picture of a starfish; the locals just call them
*stars*. None of the currency exchanges seem to have heard of them, but somehow,
you'll need to find fifty of these coins by the time you arrive so you can pay
the deposit on your room.
To save your vacation, you need to get all *fifty stars* by December 25th.
Collect stars by solving puzzles. Two puzzles will be made available on each day
in the Advent calendar; the second puzzle is unlocked when you complete the
first. Each puzzle grants *one star*. Good luck!
Before you leave, the Elves in accounting just need you to fix your *expense
report* (your puzzle input); apparently, something isn't quite adding up.
Specifically, they need you to *find the two entries that sum to `2020`* and
then multiply those two numbers together.
For example, suppose your expense report contained the following:
`1721
979
366
299
675
1456
`In this list, the two entries that sum to `2020` are `1721` and `299`.
Multiplying them together produces `1721 * 299 = 514579`, so the correct answer
is `*514579*`.
Of course, your expense report is much larger. *Find the two entries that sum to
`2020`; what do you get if you multiply them together?*
"""
@doc """
Return the product of the two numbers that sum to 2020
"""
def part_1(input) do
{n, m} =
input
|> parse_input()
|> find_2020()
n * m
end
# Split the input string into a sorted list of integers
defp parse_input(input) do
input
|> String.split()
|> Enum.map(&String.to_integer/1)
|> Enum.sort()
end
# Create a reversed copy of the list and pass the two lists to the recursive function
defp find_2020(sorted_list), do: find_2020(sorted_list, Enum.reverse(sorted_list))
# Sum the smallest and the biggest number of the list
# - if the sum is bigger than 2020, drop the biggest number
# - if the sum is smaller than 2020, drop the smallest number
# - else, we found 2020 and return the two numbers in a tuple
defp find_2020([hs | _] = s, [hr | tr]) when hs + hr > 2020, do: find_2020(s, tr)
defp find_2020([hs | ts], [hr | _] = r) when hs + hr < 2020, do: find_2020(ts, r)
defp find_2020([hs | _], [hr | _]), do: {hs, hr}
@doc """
"""
def part_2(input) do
input
end
end
|
lib/aoc/year_2020/day_01.ex
| 0.772874 | 0.698895 |
day_01.ex
|
starcoder
|
defmodule SocialParser do
@moduledoc """
SocialParser is used to parse out common social message commponents
such as hashtags, mentions and urls.
"""
@whitespace_chars [?\s, ?\t, ?\n]
@breaking_chars [?#, ?@, ?+ | @whitespace_chars]
@doc """
Returns a list of three element tuples (`{:type, "content", {start_pos, end_pos}}`) containing
all components found for the given `message`
Prefixes used
* `#` for hashtags
* `@` or `+` for mentions
* `http://` or `https://` for links
Usage
iex> SocialParser.parse("hi @you checkout http://example.com/ that +someone hosted #example")
[
{:text, "hi ", {0, 3}},
{:mention, "@you", {4, 8}},
{:text, " checkout ", {9, 19}},
{:link, "http://example.com/", {20, 39}},
{:text, " that ", {40, 46}},
{:mention, "+someone", {47, 55}},
{:text, " hosted ", {56, 64}},
{:hashtag, "#example", {65, 73}}
]
"""
@spec parse(binary) :: list
def parse(message) do
message
|> parse([])
|> Enum.reverse
end
@doc """
Returns a map of all components for a given `message`
Usage
iex> SocialParser.extract("hi @you checkout http://example.com/ that +someone hosted #example")
%{
hashtags: ["#example"],
mentions: ["@you", "+someone"],
links: ["http://example.com/"],
text: ["hi ", " checkout ", " that ", " hosted "]
}
"""
@spec extract(binary) :: map
def extract(message) do
message
|> parse
|> Enum.group_by(&map_key(elem(&1, 0)), &elem(&1, 1))
end
@doc """
Returns a map of all components for a given `message` filtered by a list of
atoms specified in the `components`
The available atoms are, `:hashtags`, `:mentions`, `:links` and `:text`
Usage
iex> SocialParser.extract("hi @you checkout http://example.com/", [:mentions, :links])
%{
mentions: ["@you"],
links: ["http://example.com/"],
}
"""
@spec extract(binary, list) :: map
def extract(message, components) do
message
|> extract
|> Map.take(components)
end
defp map_key(:hashtag), do: :hashtags
defp map_key(:mention), do: :mentions
defp map_key(:link), do: :links
defp map_key(key), do: key
defp parse(<<>>, acc),
do: acc
defp parse("http://" <> <<rest::binary>>, acc),
do: parse_component(rest, acc, "//:ptth", :link)
defp parse("https://" <> <<rest::binary>>, acc),
do: parse_component(rest, acc, "//:sptth", :link)
defp parse(<<?#::utf8, rest::binary>>, acc),
do: parse_component(rest, acc, "#", :hashtag)
defp parse(<<?@::utf8, rest::binary>>, acc),
do: parse_component(rest, acc, "@", :mention)
defp parse(<<?+::utf8, rest::binary>>, acc),
do: parse_component(rest, acc, "+", :mention)
defp parse(<<c::utf8, rest::binary>>, acc),
do: parse_component(rest, acc, <<c>>, :text)
defp parse_component("http://" <> <<rest::binary>>, acc, value, type) do
acc = add_to_acc(acc, type, value)
parse_component(rest, acc, "//:ptth", :link)
end
defp parse_component("https://" <> <<rest::binary>>, acc, value, type) do
acc = add_to_acc(acc, type, value)
parse_component(rest, acc, "//:sptth", :link)
end
defp parse_component(<<c::utf8, rest::binary>>, acc, value, :link)
when c in @whitespace_chars do
acc = add_to_acc(acc, :link, value)
parse(<<c>> <> rest, acc)
end
defp parse_component(<<c::utf8, rest::binary>>, acc, value, :text)
when c in @whitespace_chars do
parse_component(rest, acc, <<c>> <> value, :text)
end
defp parse_component(<<c::utf8, rest::binary>>, acc, value, type)
when type != :link and c in @breaking_chars do
acc = add_to_acc(acc, type, value)
parse(<<c>> <> rest, acc)
end
defp parse_component(<<c::utf8, rest::binary>>, acc, value, type) do
parse_component(rest, acc, <<c>> <> value, type)
end
defp parse_component(<<>>, acc, value, type) do
add_to_acc(acc, type, value)
end
defp add_to_acc(acc, key, value) do
count = get_next_count(acc)
value = String.reverse(value)
value_len = String.length(value)
value_pos = {count, count + value_len}
[{key, value, value_pos}] ++ acc
end
defp get_next_count([]), do: 0
defp get_next_count([{_, _, {_, count}} | _]), do: count + 1
end
|
lib/social_parser.ex
| 0.739328 | 0.515132 |
social_parser.ex
|
starcoder
|
defmodule Rolodex.Field do
@moduledoc """
Shared logic for parsing parameter fields.
`Rolodex.RequestBody`, `Rolodex.Response`, and `Rolodex.Schema` each use this
module to parse parameter metadata. `new/1` transforms a bare map into a
standardized parameter definition format. `get_refs/1` takes a parameter map
returned by `new/1 and traverses it, searching for any refs to a RequestBody,
Response, or Schema.
"""
alias Rolodex.{Headers, RequestBody, Response, Schema}
@type ref_type :: :headers | :request_body | :response | :schema
@ref_types [:headers, :request_body, :response, :schema]
@doc """
Parses parameter data into maps with a standardized shape.
Every field within the map returned will have a `type`. Some fields, like lists
and objects, have other data nested within. Other fields hold references (called
`refs`) to `Rolodex.RequestBody`, `Rolodex.Response` or `Rolodex.Schema` modules.
You can think of the output as an AST of parameter data that a `Rolodex.Processor`
behaviour can serialize into documentation output.
## Examples
### Parsing primitive data types (e.g. `integer`)
Valid options for a primitive are:
- `enum` - a list of possible values
- `desc`
- `default`
- `format`
- `maximum`
- `minimum`
- `required`
# Creating a simple field with a primitive type
iex> Rolodex.Field.new(:integer)
%{type: :integer}
# With additional options
iex> Rolodex.Field.new(type: :integer, desc: "My count", enum: [1, 2])
%{type: :integer, desc: "My count", enum: [1, 2]}
### OpenAPI string formats
When serializing docs for OpenAPI (i.e. Swagger), the following primitive field
types will be converted into string formats:
- `date`
- `datetime`
- `date-time`
- `password`
- `byte`
- `binary`
- `uuid`
- `email`
- `uri`
For example:
# The following field
iex> Rolodex.Field.new(:date)
%{type: :date}
# Will be serialized like the following for OpenAPI docs
%{type: :string, format: :date}
### Parsing collections: objects and lists
# Create an object
iex> Rolodex.Field.new(type: :object, properties: %{id: :uuid, name: :string})
%{
type: :object,
properties: %{
id: %{type: :uuid},
name: %{type: :string}
}
}
# Shorthand for creating an object: a top-level map or keyword list
iex> Rolodex.Field.new(%{id: :uuid, name: :string})
%{
type: :object,
properties: %{
id: %{type: :uuid},
name: %{type: :string}
}
}
# Create a list
iex> Rolodex.Field.new(type: :list, of: [:string, :uuid])
%{
type: :list,
of: [
%{type: :string},
%{type: :uuid}
]
}
# Shorthand for creating a list: a list of types
iex> Rolodex.Field.new([:string, :uuid])
%{
type: :list,
of: [
%{type: :string},
%{type: :uuid}
]
}
### Arbitrary collections
Use the `one_of` type to describe a field that can be one of the provided types
iex> Rolodex.Field.new(type: :one_of, of: [:string, :uuid])
%{
type: :one_of,
of: [
%{type: :string},
%{type: :uuid}
]
}
### Working with refs
iex> defmodule DemoSchema do
...> use Rolodex.Schema
...>
...> schema "DemoSchema" do
...> field :id, :uuid
...> end
...> end
iex>
iex> # Creating a field with a `Rolodex.Schema` as the top-level type
iex> Rolodex.Field.new(DemoSchema)
%{type: :ref, ref: Rolodex.FieldTest.DemoSchema}
iex>
iex> # Creating a collection field with various members, including a nested schema
iex> Rolodex.Field.new(type: :list, of: [:string, DemoSchema])
%{
type: :list,
of: [
%{type: :string},
%{type: :ref, ref: Rolodex.FieldTest.DemoSchema}
]
}
"""
@spec new(atom() | module() | list() | map()) :: map()
def new(opts)
def new(type) when is_atom(type), do: new(type: type)
def new(opts) when is_list(opts) do
case Keyword.keyword?(opts) do
true ->
opts
|> Map.new()
|> new()
# List shorthand: if a plain list is provided, turn it into a `type: :list` field
false ->
new(%{type: :list, of: opts})
end
end
def new(opts) when is_map(opts) and map_size(opts) == 0, do: %{}
def new(opts) when is_map(opts), do: create_field(opts)
defp create_field(%{type: :object, properties: props} = metadata) do
resolved_props = Map.new(props, fn {k, v} -> {k, new(v)} end)
%{metadata | properties: resolved_props}
end
defp create_field(%{type: :list, of: items} = metadata) do
resolved_items = Enum.map(items, &new/1)
%{metadata | of: resolved_items}
end
defp create_field(%{type: :one_of, of: items} = metadata) do
resolved_items = Enum.map(items, &new/1)
%{metadata | of: resolved_items}
end
defp create_field(%{type: type} = metadata) do
cond do
get_ref_type(type) in @ref_types -> %{type: :ref, ref: type}
true -> metadata
end
end
# Object shorthand: if a map is provided without a reserved `type: <type>`
# identifier, turn it into a `type: :object` field
defp create_field(data) when is_map(data) do
new(%{type: :object, properties: data})
end
@doc """
Traverses a formatted map returned by `new/1` and returns a unique list of all
refs to `Rolodex.Response` and `Rolodex.Schema` modules within.
## Examples
iex> defmodule NestedSchema do
...> use Rolodex.Schema
...>
...> schema "NestedSchema" do
...> field :id, :uuid
...> end
...> end
iex>
iex> defmodule TopSchema do
...> use Rolodex.Schema
...>
...> schema "TopSchema", desc: "An example" do
...> # Atomic field with no description
...> field :id, :uuid
...>
...> # Atomic field with a description
...> field :name, :string, desc: "The schema's name"
...>
...> # A field that refers to another, nested object
...> field :other, NestedSchema
...>
...> # A field that is an array of items of one-or-more types
...> field :multi, :list, of: [:string, NestedSchema]
...>
...> # A field that is one of the possible provided types
...> field :any, :one_of, of: [:string, NestedSchema]
...> end
...> end
iex>
iex> # Searching for refs in a formatted map
iex> Rolodex.Field.new(type: :list, of: [TopSchema, NestedSchema])
...> |> Rolodex.Field.get_refs()
[Rolodex.FieldTest.NestedSchema, Rolodex.FieldTest.TopSchema]
"""
@spec get_refs(module() | map()) :: [module()]
def get_refs(field)
def get_refs(%{of: items}) when is_list(items) do
items
|> Enum.reduce(MapSet.new(), &collect_refs_for_item/2)
|> Enum.to_list()
end
def get_refs(%{type: :object, properties: props}) when is_map(props) do
props
|> Enum.reduce(MapSet.new(), fn {_, item}, refs -> collect_refs_for_item(item, refs) end)
|> Enum.to_list()
end
def get_refs(%{type: :ref, ref: object}) when is_atom(object) do
[object]
end
def get_refs(field) when is_map(field) do
field
|> Enum.reduce(MapSet.new(), fn {_, value}, refs -> collect_refs_for_item(value, refs) end)
|> Enum.to_list()
end
def get_refs(_), do: []
defp collect_refs_for_item(item, refs) do
case get_refs(item) do
[] ->
refs
objects ->
objects
|> MapSet.new()
|> MapSet.union(refs)
end
end
@doc """
Takes a module and determines if it is a known shared module ref type: Headers,
RequestBody, Response, or Schema.
"""
@spec get_ref_type(module()) :: ref_type() | :error
def get_ref_type(mod) do
cond do
RequestBody.is_request_body_module?(mod) -> :request_body
Response.is_response_module?(mod) -> :response
Schema.is_schema_module?(mod) -> :schema
Headers.is_headers_module?(mod) -> :headers
true -> :error
end
end
end
|
lib/rolodex/field.ex
| 0.924849 | 0.647659 |
field.ex
|
starcoder
|
defmodule Asteroid.ObjectStore.DeviceCode.Riak do
@moduledoc """
Riak implementation of the `Asteroid.ObjectStore.DeviceCode` behaviour
## Initializing a Riak bucket type
```console
$ sudo riak-admin bucket-type create ephemeral_token '{"props":{"datatype":"map", "backend":"leveldb_mult"}}'
ephemeral_token created
$ sudo riak-admin bucket-type activate ephemeral_token
ephemeral_token has been activated
```
## Options
The options (`Asteroid.ObjectStore.DeviceCode.opts()`) are:
- `:bucket_type`: an `String.t()` for the bucket type that must be created beforehand in
Riak. No defaults, **mandatory**
- `bucket_name`: a `String.t()` for the bucket name. Defaults to `"device_code"`
- `:purge_interval`: the `integer()` interval in seconds the purge process will be triggered,
or `:no_purge` to disable purge. Defaults to `300` (5 minutes)
- `:rows`: the maximum number of results that a search will return. Defaults to `1_000_000`.
Search is used by the purge process.
## Installation function
The `install/1` function executes the following actions:
- it installs a custom schema (`asteroid_object_store_device_code_riak_schema`)
- it creates a new index (`asteroid_object_store_device_code_riak_index`) on the bucket
(and not the bucket type - so as to avoid collisions)
This is necessary to:
1. Efficiently index expiration timestamp
2. Disable indexing of raw device code data
## Purge process
The purge process uses the `Singleton` library. Therefore the purge process will be unique
per cluster (and that's probably what you want if you use Riak).
"""
require Logger
@behaviour Asteroid.ObjectStore.DeviceCode
@impl true
def install(opts) do
bucket_type = opts[:bucket_type] || raise "Missing bucket type"
bucket_name = opts[:bucket_name] || "device_code"
with :ok <-
Riak.Search.Schema.create(
schema_name(),
(:code.priv_dir(:asteroid) ++ '/riak/object_store_device_code_schema.xml')
|> File.read!()
),
:ok <- Riak.Search.Index.put(index_name(), schema_name()),
:ok <- Riak.Search.Index.set({bucket_type, bucket_name}, index_name()) do
Logger.info(
"#{__MODULE__}: created device code store `#{bucket_name}` " <>
"of bucket type `#{bucket_type}`"
)
:ok
else
e ->
"#{__MODULE__}: failed to create device code store `#{bucket_name}` " <>
"of bucket type `#{bucket_type}` (reason: #{inspect(e)})"
{:error, "#{inspect(e)}"}
end
catch
:exit, e ->
bucket_type = opts[:bucket_type] || raise "Missing bucket type"
bucket_name = opts[:bucket_name] || "device_code"
"#{__MODULE__}: failed to create device code store `#{bucket_name}` " <>
"of bucket type `#{bucket_type}` (reason: #{inspect(e)})"
{:error, "#{inspect(e)}"}
end
@impl true
def start_link(opts) do
opts = Keyword.merge([purge_interval: 300], opts)
# we launch the process anyway because we need to return a process
# but the singleton will do nothing if the value is `:no_purge`
Singleton.start_child(__MODULE__.Purge, opts, __MODULE__)
end
@impl true
def get(device_code_id, opts) do
bucket_type = opts[:bucket_type] || raise "Missing bucket type"
bucket_name = opts[:bucket_name] || "device_code"
case Riak.find(bucket_type, bucket_name, device_code_id) do
res when not is_nil(res) ->
device_code =
res
|> Riak.CRDT.Map.get(:register, "device_code_data_binary")
|> Base.decode64!(padding: false)
|> :erlang.binary_to_term()
Logger.debug(
"#{__MODULE__}: getting device code `#{device_code_id}`, " <>
"value: `#{inspect(device_code)}`"
)
{:ok, device_code}
nil ->
Logger.debug("#{__MODULE__}: getting device code `#{device_code_id}`, " <> "value: `nil`")
{:ok, nil}
end
catch
:exit, e ->
{:error, "#{inspect(e)}"}
end
@impl true
def get_from_user_code(user_code, opts) do
query = "user_code:\"#{String.replace(user_code, "\"", "\\\"")}\""
case search(query, opts) do
{:ok, [device_code_id]} ->
get(device_code_id, opts)
{:ok, [_ | _]} ->
{:error, "Duplicate user code in the device authorization flow"}
{:error, _} = error ->
error
end
end
@impl true
def put(device_code, opts) do
bucket_type = opts[:bucket_type] || raise "Missing bucket type"
bucket_name = opts[:bucket_name] || "device_code"
riak_map = Riak.CRDT.Map.new()
device_code_data_binary =
device_code
|> :erlang.term_to_binary()
|> Base.encode64(padding: false)
|> Riak.CRDT.Register.new()
riak_map = Riak.CRDT.Map.put(riak_map, "device_code_data_binary", device_code_data_binary)
riak_map =
Riak.CRDT.Map.put(
riak_map,
"user_code",
Riak.CRDT.Register.new(device_code.user_code)
)
riak_map =
if device_code.data["exp"] != nil do
Riak.CRDT.Map.put(
riak_map,
"exp_int",
Riak.CRDT.Register.new(to_string(device_code.data["exp"]))
)
else
Logger.warn(
"Inserting device code with no expiration: #{String.slice(device_code.id, 1..5)}..."
)
riak_map
end
Riak.update(riak_map, bucket_type, bucket_name, device_code.id)
Logger.debug(
"#{__MODULE__}: stored device code `#{device_code.id}`, " <>
"value: `#{inspect(device_code)}`"
)
:ok
catch
:exit, e ->
{:error, "#{inspect(e)}"}
end
@impl true
def delete(device_code_id, opts) do
bucket_type = opts[:bucket_type] || raise "Missing bucket type"
bucket_name = opts[:bucket_name] || "device_code"
Riak.delete(bucket_type, bucket_name, device_code_id)
Logger.debug("#{__MODULE__}: deleted device code `#{device_code_id}`")
:ok
catch
:exit, e ->
{:error, "#{inspect(e)}"}
end
@doc """
Searches in Riak-stored device code
This function is used internaly and made available for user convenience. Device codes are
stored in the following fields:
| Field name | Indexed as |
|-----------------------------------|:-------------:|
| device_code_data_binary_register | *not indexed* |
| user_code_register | string |
| exp_int_register | int |
Note that you are responsible for escaping values accordingly with Solr escaping.
"""
@spec search(String.t(), Asteroid.ObjectStore.DeviceCode.opts()) ::
{:ok, [Asteroid.OAuth2.DeviceAuthorization.device_code()]}
| {:error, any()}
def search(search_query, opts) do
case Riak.Search.query(index_name(), search_query, rows: opts[:rows] || 1_000_000) do
{:ok, {:search_results, result_list, _, _}} ->
{:ok,
for {_index_name, attribute_list} <- result_list do
:proplists.get_value("_yz_rk", attribute_list)
end}
{:error, _} = error ->
error
end
end
@spec schema_name() :: String.t()
defp schema_name(), do: "asteroid_object_store_device_code_riak_schema"
@doc false
@spec index_name() :: String.t()
def index_name(), do: "asteroid_object_store_device_code_riak_index"
end
|
lib/asteroid/object_store/device_code/riak.ex
| 0.869984 | 0.773794 |
riak.ex
|
starcoder
|
defmodule Comeonin do
@moduledoc """
Defines a behaviour for higher-level password hashing functions.
"""
@type opts :: keyword
@type password :: binary
@type user_struct :: map | nil
@doc """
Hashes a password and returns the password hash in a map, with the
password set to nil.
In the default implementation, the key for the password hash is
`:password_hash`. A different key can be used by using the `hash_key`
option.
## Example with Ecto
The `put_pass_hash` function below is an example of how you can use
`add_hash` to add the password hash to the Ecto changeset.
defp put_pass_hash(%Ecto.Changeset{valid?: true, changes:
%{password: password}} = changeset) do
change(changeset, add_hash(password))
end
defp put_pass_hash(changeset), do: changeset
This function will return a changeset with `%{password_hash: password_<PASSWORD>, password: <PASSWORD>}`
added to the `changes` map.
"""
@callback add_hash(password, opts) :: map
@doc """
Checks the password by comparing its hash with the password hash found
in a user struct, or map.
The first argument to `check_pass` should be a user struct, a regular
map, or nil.
In the default implementation, if the input to the first argument,
the user struct, is nil, then the `no_user_verify` function is run,
so as to prevent user enumeration. This can be disabled by setting
the `hide_user` option to false.
## Example
The following is an example of calling this function with no options:
def verify_user(%{"password" => password} = params) do
params
|> Accounts.get_by()
|> check_pass(password)
end
The `Accounts.get_by` function in this example takes the user parameters
(for example, email and password) as input and returns a user struct or nil.
"""
@callback check_pass(user_struct, password, opts) :: {:ok, map} | {:error, String.t()}
@doc """
Runs the password hash function, but always returns false.
This function is intended to make it more difficult for any potential
attacker to find valid usernames by using timing attacks. This function
is only useful if it is used as part of a policy of hiding usernames.
## Hiding usernames
In addition to keeping passwords secret, hiding the precise username
can help make online attacks more difficult. An attacker would then
have to guess a username / password combination, rather than just
a password, to gain access.
This does not mean that the username should be kept completely secret.
Adding a short numerical suffix to a user's name, for example, would be
sufficient to increase the attacker's work considerably.
If you are implementing a policy of hiding usernames, it is important
to make sure that the username is not revealed by any other part of
your application.
"""
@callback no_user_verify(opts) :: false
defmacro __using__(_) do
quote do
@behaviour Comeonin
@behaviour Comeonin.PasswordHash
@impl Comeonin
def add_hash(password, opts \\ []) do
hash_key = opts[:hash_key] || :password_hash
%{hash_key => hash_pwd_salt(password, opts), :password => <PASSWORD>}
end
@impl Comeonin
def check_pass(user, password, opts \\ [])
def check_pass(nil, _password, opts) do
unless opts[:hide_user] == false, do: no_user_verify(opts)
{:error, "invalid user-identifier"}
end
def check_pass(user, password, opts) when is_binary(password) do
case get_hash(user, opts[:hash_key]) do
{:ok, hash} ->
if verify_pass(password, hash), do: {:ok, user}, else: {:error, "invalid password"}
_ ->
{:error, "no password hash found in the user struct"}
end
end
def check_pass(_, _, _) do
{:error, "password is not a string"}
end
defp get_hash(%{password_hash: hash}, nil), do: {:ok, hash}
defp get_hash(%{encrypted_password: hash}, nil), do: {:ok, hash}
defp get_hash(_, nil), do: nil
defp get_hash(user, hash_key) do
if hash = Map.get(user, hash_key), do: {:ok, hash}
end
@impl Comeonin
def no_user_verify(opts \\ []) do
hash_pwd_salt("", opts)
false
end
defoverridable Comeonin
end
end
end
|
lib/comeonin.ex
| 0.890404 | 0.538741 |
comeonin.ex
|
starcoder
|
defmodule Harald.DataType.ManufacturerData do
@moduledoc """
> The Manufacturer Specific data type is used for manufacturer specific data.
Reference: Core Specification Supplement, Part A, section 1.4.1
Modules under the `Harald.ManufacturerData` scope should implement the
`Harald.ManufacturerDataBehaviour` and `Harald.Serializable` behaviours.
"""
alias Harald.DataType.ManufacturerData.Apple
require Harald.AssignedNumbers.CompanyIdentifiers, as: CompanyIdentifiers
@modules [Apple]
@doc """
Returns a list of implementation modules.
"""
def modules, do: @modules
@doc """
Serializes manufacturer data.
"""
def serialize(data)
Enum.each(@modules, fn
module ->
def serialize({unquote(module.company()), data}) do
data
|> unquote(module).serialize()
|> case do
{:ok, bin} ->
{:ok, <<unquote(CompanyIdentifiers.id(module.company())), bin::binary>>}
:error ->
error = %{
remaining: data,
serialized: <<unquote(CompanyIdentifiers.id(module.company()))>>
}
{:error, error}
end
end
end)
def serialize({:error, _} = ret), do: ret
def serialize(ret), do: {:error, ret}
@doc """
Deserializes a manufacturer data binary.
iex> deserialize(<<76, 0, 2, 21, 172, 185, 137, 206, 253, 163, 76, 179, 137, 41, 101, 34, 252, 127, 2, 42, 181, 255, 224, 255, 225>>)
{:ok, {"Apple, Inc.", {"iBeacon", %{major: 46591, minor: 57599, tx_power: 225, uuid: 229590585283448776073135497520678371882}}}}
"""
def deserialize(binary)
Enum.each(@modules, fn
module ->
def deserialize(
<<unquote(CompanyIdentifiers.id(module.company()))::little-size(16), sub_bin::binary>> =
bin
) do
case unquote(module).deserialize(sub_bin) do
{:ok, data} -> {:ok, {unquote(module).company, data}}
{:error, _} -> {:error, bin}
end
end
end)
def deserialize(bin), do: {:error, bin}
end
|
lib/harald/data_type/manufacturer_data.ex
| 0.7865 | 0.550849 |
manufacturer_data.ex
|
starcoder
|
defmodule StateChart.Document do
use StateChart.Definition do
enum Binding, :binding, 1, [
early: 0,
late: 1
]
field(map(:var, __MODULE__.Data), :datamodel, 2)
field(__MODULE__.Transition, :initial, 3)
field(:string, :name, 4)
repeated(__MODULE__.State, :states, 5)
private(:__states__, %{})
computed(:states, fn
(%{states: s}) when is_list(s) ->
:erlang.list_to_tuple(s)
(%{states: s}) when is_tuple(s) ->
s
end)
end
alias __MODULE__.{Query,State,Transition}
def resolve(_, %State{} = s) do
s
end
def resolve(%{states: s}, ref) do
{:ok, state} = Map.fetch(s, ref)
state
end
@doc """
Compute transitions
"""
def transitions_by(%{states: s} = model, configuration, filter) do
configuration
|> Stream.flat_map(fn(%{ancestors: ancestors} = state) ->
ancestors
|> resolve_list(s)
|> Stream.concat([state])
end)
|> Stream.flat_map(fn(%{transitions: t}) -> t end)
|> Stream.filter(filter)
|> Enum.into(MapSet.new())
|> priority_enabled_transitions(model)
|> Enum.sort(&Transition.compare/2)
end
defp priority_enabled_transitions(transitions, model) do
{consistent, inconsistent} = inconsistent_transitions(model, transitions)
resolve_conflicts(model, consistent, inconsistent)
end
defp inconsistent_transitions(model, transitions) do
inconsistent = transitions
|> Stream.flat_map(fn(%{scope: s1} = t1) ->
transitions
|> Stream.filter(fn(%{scope: s2}) ->
!Query.orthoganal?(model, s1, s2)
end)
|> Stream.map(fn(t2) ->
if Transition.compare(t1, t2), do: t1, else: t2
end)
end)
|> Enum.into(MapSet.new())
{MapSet.difference(transitions, inconsistent), inconsistent}
end
defp resolve_conflicts(model, consistent, inconsistent) do
case MapSet.size(inconsistent) do
0 ->
consistent
1 ->
MapSet.union(consistent, inconsistent)
_ ->
{new_consistent, inconsistent} = inconsistent_transitions(model, inconsistent)
consistent = MapSet.union(consistent, new_consistent)
resolve_conflicts(model, consistent, inconsistent)
end
end
@doc """
Compute all of the enter states
"""
def enter_states(model, configuration, history, transitions) do
acc = {MapSet.new(), configuration, MapSet.new()}
acc = transitions
|> Enum.reduce(acc, fn(%{targets: targets, scope: scope}, acc) ->
targets
|> Enum.reduce(acc, &add_state_and_ancestors(model, &1, scope, history, &2))
end)
{states, configuration, _} = acc
# root-first sorting
states = Enum.sort(states, &State.compare/2)
{states, configuration}
end
defp add_state_and_ancestors(%{states: s} = model, target, scope, history, acc) do
acc = add_state_and_descendants(model, target, history, acc)
model
|> Query.ancestors(target, scope)
|> resolve_list(s)
|> Enum.reduce(acc, fn
(%{type: :composite} = state, {states, configuration, processed}) ->
states = MapSet.put(states, state)
processed = MapSet.put(processed, state)
{states, configuration, processed}
(state, acc) ->
add_state_and_descendants(model, state, history, acc)
end)
end
defp add_state_and_descendants(
%{states: s} = model,
%{ref: ref, type: type, parent: parent, children: children} = state,
history,
{states, configuration, processed} = acc
) do
if MapSet.member?(processed, state) do
acc
else
processed = MapSet.put(processed, state)
acc = {states, configuration, processed}
if type == :history do
case Map.fetch(history, ref) do
{:ok, history_states} ->
Enum.reduce(history_states, acc, &add_state_and_ancestors(model, &1, parent, history, &2))
_ ->
states = MapSet.put(states, state)
configuration = MapSet.put(configuration, state)
{states, configuration, processed}
end
else
states = MapSet.put(states, state)
case type do
:parallel ->
children
|> resolve_list(s)
|> Stream.filter(fn(%{type: t}) -> t != :history end)
|> Enum.reduce(acc, &add_state_and_descendants(model, &1, history, &2))
:composite ->
states = MapSet.put(states, state)
acc = {states, configuration, processed}
if Enum.any?(children, &MapSet.member?(processed, &1)) do
acc
else
%{initial: i} = state
add_state_and_descendants(model, i, history, acc)
end
t when t in [:initial, :basic, :final] ->
configuration = MapSet.put(configuration, state)
{states, configuration, processed}
true ->
acc
end
end
end
end
defp add_state_and_descendants(%{states: s} = model, ref, history, acc) do
add_state_and_descendants(model, elem(s, ref), history, acc)
end
@doc """
Compute all of the exit states given a transition set and previous configuration
"""
def exit_states(%{states: s} = model, configuration, transitions) do
transitions
|> Stream.filter(fn(%{targets: t}) -> t != [] end)
|> Enum.reduce({MapSet.new(), configuration}, fn
(%Transition{scope: nil}, acc) ->
acc
(%Transition{scope: scope}, acc) ->
%{ref: scope, descendants_set: desc} = elem(s, scope)
configuration
|> Stream.filter(&MapSet.member?(desc, &1))
|> Enum.reduce(acc, fn(state, {states, configuration}) ->
states = model
|> Query.ancestors(state, scope)
|> resolve_list(s)
|> Enum.into(states)
configuration = MapSet.delete(configuration, state)
{states, configuration}
end)
end)
|> case do
{states, configuration} ->
# leaf-node first sorting
states = Enum.sort(states, &!State.compare(&1, &2))
{states, configuration}
end
end
defp resolve_list(list, s) do
Stream.map(list, &elem(s, &1))
end
end
|
lib/state_chart/document.ex
| 0.717012 | 0.445288 |
document.ex
|
starcoder
|
defmodule Trike.CloudEvent do
@moduledoc """
Represents a standard CloudEvent as well as a function for creating new
CloudEvents from OCS messages.
"""
alias Trike.OcsRawMessage
@type t() :: %__MODULE__{
specversion: String.t(),
type: String.t(),
source: String.t(),
id: String.t(),
partitionkey: String.t(),
time: DateTime.t(),
data: OcsRawMessage.t()
}
@derive Jason.Encoder
@enforce_keys [:id, :partitionkey, :time, :data]
defstruct @enforce_keys ++
[
source: "opstech3.mbta.com/trike",
specversion: "1.0",
type: "com.mbta.ocs.raw_message"
]
@doc """
Creates a CloudEvent struct given a full OCS message, the current time, and a
partition key.
"""
@spec from_ocs_message(binary(), DateTime.t(), String.t()) :: {:ok, t()} | {:error, term()}
def from_ocs_message(message, current_time, partition_key) do
case message_time(message, current_time) do
{:ok, time} ->
id = :crypto.hash(:sha, [DateTime.to_iso8601(time), message]) |> Base.encode64()
{:ok,
%__MODULE__{
time: time,
id: id,
partitionkey: partition_key,
data: %OcsRawMessage{
received_time: current_time,
raw: message
}
}}
{:error, error} ->
{:error, error}
end
end
@spec message_time(binary(), DateTime.t()) :: {:ok, DateTime.t()} | {:error, term()}
defp message_time(message, current_time) do
with [_count, _type, raw_time, _rest] <- String.split(message, ",", parts: 4),
{:ok, time} <- Time.from_iso8601(raw_time),
{:ok, eastern_time} <-
DateTime.shift_zone(current_time, "America/New_York"),
{:ok, timestamp} <-
DateTime.new(DateTime.to_date(eastern_time), time, "America/New_York") do
{:ok, timestamp}
else
error -> {:error, error}
end
end
end
|
lib/trike/cloud_event.ex
| 0.841305 | 0.446676 |
cloud_event.ex
|
starcoder
|
defmodule Terminus.Omni do
@moduledoc """
Module for conveniently fetching data from both [Bitbus](https://bitbus.network)
and [Bitsocket](https://bitbus.network) concurrently.
`Terminus.Omni` replicates the functionality of legacy Planaria APIs by allowing
you to query for both confirmed and unconfirmed transactions in one call. HTTP
requests to both APIs occur concurrently and a combined result is returned when
both APIs have yielded a response.
Mempool transactions are intelligently handled. The Bitsocket event database
is queried to safely gather enough transactions to cover the time period since
the last block, and then cross-referenced with confirmed transactions to
ensure there are no duplicates. It just... works!
## Examples
Use `fetch/2` to query for a combined set of confirmed and unconfimred
transactions.
iex> Terminus.Omni.fetch(%{
...> find: %{ "out.s2" => "1LtyME6b5AnMopQrBPLk4FGN8UBuhxKqrn" },
...> limit: 5
...> }, token: token)
{:ok, %{
c: [...], # 5 confirmed tx
u: [...], # 5 unconfirmed tx
}}
Alterntivly, `find/2` allows for a single transaction to be easily found by
it's [`txid`](`t:Terminus.txid/0`), irrespective of whether the transaction is
confirmed or in the mempool.
iex> Terminus.Omni.find(txid, token: token)
{:ok, %{
"tx" => %{"h" => "fca7bdd7658613418c54872212811cf4c5b4f8ee16864eaf70cb1393fb0df6ca"},
...
}}
"""
# Using exponential distribution with mean of 600 seconds, 99% of blocks are
# mined within 2,765 seconds - @Deadloch
@safe_unconfirmed_window 2800
@doc """
Crawls Bitbus and Bitsocket for transactions using the given query and returns
a map containing both confirmed and mempool transactions.
Returns the result in an `:ok` / `:error` tuple pair.
If no limit is specified in the given [`bitquery`](`t:Terminus.bitquery/0`)
map, a default limit of `20` is added to the query. This limit is applied to
both sets of results, meaning a maximum of 40 unique transactions may be
returned.
## Options
The accepted options are:
* `token` - Planaria authentication token. **Required**.
* `host` - The Bitbus host. Defaults to `:txo`.
## Examples
Get the 5 latest confirmed and unconfirmed WeatherSV transactions.
iex> Terminus.Omni.fetch(%{
...> find: %{ "out.s2" => "1LtyME6b5AnMopQrBPLk4FGN8UBuhxKqrn" },
...> limit: 5
...> }, token: token)
{:ok, %{
c: [...], # 5 confirmed tx
u: [...], # 5 unconfirmed tx
}}
"""
@spec fetch(Terminus.bitquery, keyword) ::
{:ok, map} |
{:error, Exception.t}
def fetch(query, options \\ [])
def fetch(query, options) when is_binary(query),
do: query |> Jason.decode! |> fetch(options)
def fetch(%{} = query, options) do
query = query
|> Terminus.HTTPStream.normalize_query
|> update_in(["q", "limit"], &default_limit/1)
timeout = Keyword.get(options, :timeout, :infinity)
tasks = [bb_task, bs_task] = [
bitbus_task(query, options),
bitsocket_task(query, options)
]
result = Task.yield_many(tasks, timeout)
|> Enum.reduce_while(%{}, fn
# Put Bitbus txns directly into results
{^bb_task, {:ok, {:ok, txns}}}, res ->
{:cont, Map.put(res, :c, txns)}
# Filter Bitsocket to remove dupes and reinforce the limit
{^bs_task, {:ok, {:ok, txns}}}, res ->
confimed_txids = res.c
|> Enum.map(& &1["tx"]["h"])
{txns, _} = txns
|> Enum.reject(& Enum.member?(confimed_txids, &1["tx"]["h"]))
|> Enum.split(query["q"]["limit"])
{:cont, Map.put(res, :u, txns)}
{_task, {:ok, {:error, reason}}}, _res ->
{:halt, {:error, reason}}
{_task, {:error, reason}}, _res ->
{:halt, {:error, reason}}
{task, nil}, _res ->
Task.shutdown(task, :brutal_kill)
{:halt, {:error, %RuntimeError{message: "Fetch request timed out."}}}
end)
case result do
{:error, reason} ->
{:error, reason}
result ->
{:ok, result}
end
end
@doc """
As `fetch/2` but returns the result or raises an exception if it fails.
"""
@spec fetch!(Terminus.bitquery, keyword) :: map
def fetch!(query, options \\ []) do
case fetch(query, options) do
{:ok, data} ->
data
{:error, error} ->
raise error
end
end
@doc """
Query Bitbus and Bitsocket for a single transaction by the given [`txid`](`t:Terminus.txid/0`).
Returns the result in an `:ok` / `:error` tuple pair.
## Options
The accepted options are:
* `token` - Planaria authentication token. **Required**.
* `host` - The Bitbus host. Defaults to `:txo`.
## Examples
iex> Terminus.Omni.find(txid, token: token)
{:ok, %{
"tx" => %{"h" => "fca7bdd7658613418c54872212811cf4c5b4f8ee16864eaf70cb1393fb0df6ca"},
...
}}
"""
@spec find(Terminus.txid, keyword) :: {:ok, map} | {:error, Exception.t}
def find(txid, options \\ []) do
query = %{
"find" => %{"tx.h" => txid},
"limit" => 1
}
case fetch(query, options) do
{:ok, %{c: c, u: u}} ->
{:ok, List.first(c ++ u)}
{:error, error} ->
{:error, error}
end
end
@doc """
As `find/2` but returns the result or raises an exception if it fails.
"""
@spec find!(Terminus.txid, keyword) :: map
def find!(query, options \\ []) do
case find(query, options) do
{:ok, data} ->
data
{:error, error} ->
raise error
end
end
# The asynchronous Bitbus fetch task
defp bitbus_task(query, options) do
query = query
|> update_in(["q", "sort"], &default_bitbus_sort/1)
Task.async(Terminus.Bitbus, :fetch, [query, options])
end
# The asynchronous Bitsocket fetch task
# Checks the last ~46 minutes from the event database and doubles the limit to
# ensure deduplicaion still leaves enough transactions.
defp bitsocket_task(query, options) do
mempool_window = Keyword.get(options, :mempool_window, @safe_unconfirmed_window)
ts = :os.system_time(:millisecond) - (mempool_window * 1000)
query = query
|> update_in(["q", "find"], & Map.put(&1, "timestamp", %{"$gt" => ts}))
|> update_in(["q", "limit"], & &1 * 2)
|> update_in(["q", "sort"], &default_bitsocket_sort/1)
Task.async(Terminus.Bitsocket, :fetch, [query, options])
end
# Puts the default limit into the query
defp default_limit(nil), do: 20
defp default_limit(limit), do: limit
# Puts the default sort params into the Bitbus query
defp default_bitbus_sort(nil), do: %{"blk.i" => -1}
defp default_bitbus_sort(sort), do: sort
# Puts the default sort params into the Bitsocket query
defp default_bitsocket_sort(nil), do: %{"timestamp" => -1}
defp default_bitsocket_sort(sort), do: sort
end
|
lib/terminus/omni.ex
| 0.896419 | 0.675537 |
omni.ex
|
starcoder
|
defmodule KeyX.Shamir.Arithmetic do
import Kernel, except: [+: 2, *: 2, /: 2]
import Bitwise
import Enum, only: [at: 2]
alias KeyX.Shamir.Tables
@type polynomial :: nonempty_list(non_neg_integer)
@spec polynomial(non_neg_integer, non_neg_integer) :: polynomial
def polynomial(intercept, degree) do
[ intercept | (:crypto.strong_rand_bytes(degree) |> :binary.bin_to_list()) ]
end
@spec evaluate(polynomial, non_neg_integer) :: non_neg_integer
def evaluate(poly, x) when x === 0 do
poly |> at(0)
end
def evaluate(poly, x) do
[ poly_tail | poly_rest_rev ] = Enum.reverse(poly)
Enum.reduce(poly_rest_rev, poly_tail, fn(poly_coef, acc) ->
(acc * x) + poly_coef
end)
end
def interpolate(x_samples, y_samples, x)
when length(x_samples) == length(x_samples) do
# Setup interpolation env
limit = length(x_samples) - 1
# Loop through all the x & y samples, reducing them to an answer
Enum.reduce 0..limit, 0, fn(i, result) ->
# skip i == j on inner reduce
inner_rng = Enum.reject(0..limit, &(&1 == i))
basis = Enum.reduce inner_rng, 1, fn(j, basis) ->
basis * ( (x + at(x_samples, j) )
/ (at(x_samples, i) + at(x_samples, j)) )
end
group = basis * at(y_samples, i)
result + group
end
end
def interpolate(_x_samples, _y_samples, _x), do: raise "Invalid arguments"
def _lhs / rhs when rhs === 0, do: raise ArithmeticError
def lhs / rhs do
zero = 0
ret = Kernel.-(Tables.log(lhs), Tables.log(rhs))
|> Kernel.+(255)
|> rem(255)
|> Tables.exp
if (lhs ===0), do: zero, else: ret
end
# Multiplies two numbers in GF(2^8)
def lhs * rhs do
zero = 0
ret = Kernel.+(Tables.log(lhs), Tables.log(rhs))
|> rem(255)
|> Tables.exp
if :erlang.or(lhs === 0, rhs === 0), do: zero, else: ret
end
# @spec evaluate(polynomial, non_neg_integer) :: non_neg_integer
def lhs + rhs, do: lhs ^^^ rhs
end
|
lib/shamir/shamir_math.ex
| 0.771972 | 0.567098 |
shamir_math.ex
|
starcoder
|
defmodule HardHat do
@moduledoc """
A simple wrapper for the [Travis CI API](https://docs.travis-ci.com/api/).
"""
alias HardHat.Client
alias HardHat.Response
require Logger
@request_headers [
{"Accept", "application/vnd.travis-ci.2+json"},
{"Content-Type", "application/json"},
{"User-Agent", "HardHat/#{Mix.Project.config[:version]}"},
]
@doc """
Issues an authenticated DELETE request to the given `path`.
The `params` will be encoded as query strings and appended to the final URL.
"""
@spec delete(Client.t, String.t, list) :: HardHat.Response.t
def delete(%Client{} = client, path, params \\ []) do
__request__(client, :delete, url(client, path, params))
end
@doc """
Issues an authenticated GET request to the given `path`.
The `params` will be encoded as query strings and appended to the final URL.
"""
@spec get(Client.t, String.t, list) :: HardHat.Response.t
def get(%Client{} = client, path, params \\ []) do
__request__(client, :get, url(client, path, params))
end
@doc """
Issues an authenticated POST request to the given `path`.
"""
@spec post(Client.t, String.t, HTTPoison.body) :: HardHat.Response.t
def post(%Client{} = client, path, body \\ "") do
__request__(client, :post, url(client, path), body)
end
@doc """
Issues an authenticated PUT request to the given `path`.
"""
@spec put(Client.t, String.t, HTTPoison.body) :: HardHat.Response.t
def put(%Client{} = client, path, body \\ "") do
__request__(client, :put, url(client, path), body)
end
@doc false
@spec __request__(Client.t, atom, String.t, HTTPoison.body) :: HardHat.Response.t
def __request__(%Client{} = client, method, url, body \\ "") do
upcase_method = method |> to_string |> String.upcase
_ = Logger.debug("#{upcase_method} #{url}")
resp = HTTPoison.request!(method, url, body, headers(client))
%Response{body: resp.body, status_code: resp.status_code}
end
@spec url(Client.t, String.t, Keyword.t) :: String.t
defp url(%Client{endpoint: endpoint}, path, params \\ []) do
endpoint <> path |> append_params(params)
end
# Appends query string parameters to the given `url`.
@spec append_params(String.t, Keyword.t) :: String.t
defp append_params(url, params) do
_append_params(URI.parse(url), params)
end
@spec _append_params(URI.t, list) :: URI.t
defp _append_params(%URI{} = uri, []) do
uri
end
defp _append_params(%URI{query: nil} = uri, params) do
Map.put(uri, :query, URI.encode_query(params))
end
defp _append_params(%URI{} = uri, params) do
{_, new_uri} =
Map.get_and_update(uri, :query, fn current_value ->
new_value = current_value <> "&" <> URI.encode_query(params)
{current_value, new_value}
end)
new_uri
end
# https://docs.travis-ci.com/api/#making-requests
@spec headers(Client.t) :: list
defp headers(%Client{} = client) do
@request_headers ++ authorization_header(client.auth)
end
# https://docs.travis-ci.com/api/#authentication
@spec authorization_header(Client.auth) :: list
defp authorization_header(%{access_token: token}) do
[{"Authorization", ~s(token "#{token}")}]
end
end
|
apps/hard_hat/lib/hard_hat.ex
| 0.832441 | 0.41401 |
hard_hat.ex
|
starcoder
|
defmodule Catalog do
@moduledoc """
A listing of all documents in our collection of buckets. The buckets are specified in the application configuration. Each document is associated with a person, and can be searched by that person's last name, first name, and/or date of birth.
"""
use GenServer
require Logger
alias Catalog.{Bucket, Document}
@type t :: %__MODULE__{
ets: :ets.tid()
}
@enforce_keys [:ets]
defstruct ets: nil
# Client
@spec start_link(keyword()) :: GenServer.on_start()
def start_link(opts) do
name = Keyword.get(opts, :name, __MODULE__)
GenServer.start_link(__MODULE__, opts, name: name)
end
@spec lookup(String.t() | nil, String.t() | nil, Date.t() | nil) ::
{:ok, [Document.t()]} | {:error, String.t()}
@spec lookup(GenServer.server(), String.t() | nil, String.t() | nil, Date.t() | nil) ::
{:ok, [Document.t()]} | {:error, String.t()}
def lookup(pid \\ __MODULE__, last_name, first_name, date_of_birth)
def lookup(_, nil, nil, nil),
do: {:error, "Bad request. You must include at least one field to search on."}
def lookup(pid, last_name, first_name, date_of_birth) do
GenServer.call(pid, {:lookup, last_name, first_name, date_of_birth})
end
# Server
@impl GenServer
def init(opts) do
if Application.fetch_env!(:document_viewer, :live_catalog?) do
ets = :ets.new(__MODULE__, [:set, :protected])
{:ok, %__MODULE__{ets: ets}, {:continue, {:load_documents, opts}}}
else
:ignore
end
end
@impl GenServer
@spec handle_continue({:load_documents, keyword()}, t()) :: {:noreply, t()}
def handle_continue({:load_documents, opts}, %__MODULE__{ets: ets} = state) do
documents_fn = Keyword.get(opts, :documents_fn, &all_documents/0)
Logger.info(fn -> "Loading document metadata" end)
{time_in_Β΅s, _} = :timer.tc(&populate_ets_table/2, [ets, documents_fn])
# Let the health server know that the data has finished loading
Catalog.Health.loaded()
Logger.info(fn ->
"Finished loading document metadata, time_in_ms=#{time_in_Β΅s / 1_000}"
end)
{:noreply, state}
end
@impl GenServer
def handle_call(
{:lookup, last_name, first_name, date_of_birth},
_from,
%__MODULE__{ets: ets} = state
) do
results =
ets
|> :ets.match(
{:_, search_term(last_name), search_term(first_name), search_term(date_of_birth), :"$1"}
)
|> List.flatten()
{:reply, results, state}
end
@spec populate_ets_table(:ets.tid(), (() -> [t()])) :: true
defp populate_ets_table(ets, documents_fn) do
records =
for document <- documents_fn.() do
{Document.key(document), document.last_name, document.first_name, document.date_of_birth,
document}
end
:ets.insert(ets, records)
end
@spec all_documents() :: [t()]
defp all_documents do
buckets()
|> Enum.map(&Bucket.metadata_files/1)
|> parse_metadata_files()
end
@spec buckets() :: [Bucket.t()]
defp buckets do
Application.fetch_env!(:document_viewer, :buckets)
end
@type bucket_documents :: [{Bucket.name(), [Document.file_path()]}]
@spec parse_metadata_files(bucket_documents()) :: [t()]
defp parse_metadata_files(bucket_documents) do
for {bucket, file_paths} <- bucket_documents,
file_path <- file_paths,
file <- parse_files(bucket, file_path) do
file
end
end
@spec parse_files(Bucket.name(), Document.file_path()) :: [Document.t()]
defp parse_files(bucket_name, file_path) do
bucket_name
|> S3.BinaryStream.binary_chunks(file_path)
|> S3.LineStream.lines()
|> Stream.map(&Document.from_metadata(&1, bucket_name, file_path))
|> Enum.to_list()
end
@spec search_term(String.t() | Date.t() | nil) :: String.t() | Date.t() | atom()
defp search_term(nil), do: :_
defp search_term(term), do: term
end
|
lib/catalog.ex
| 0.811564 | 0.481941 |
catalog.ex
|
starcoder
|
defmodule GrapevineTelnet.Features do
@moduledoc """
Struct and functions for tracking Telnet option statuses
"""
defstruct gmcp: false, packages: [], messages: [], message_cache: %{}
@doc """
Enable GMCP on the telnet state
"""
def enable_gmcp(state) do
features = Map.put(state.features, :gmcp, true)
Map.put(state, :features, features)
end
@doc """
Add GMCP packages to the feature set
"""
def packages(state, packages) do
packages =
Enum.map(packages, fn package ->
List.first(String.split(package, " "))
end)
packages = Map.get(state.features, :packages) ++ packages
packages = Enum.uniq(packages)
features = Map.put(state.features, :packages, packages)
Map.put(state, :features, features)
end
@doc """
Check if a GMCP message is enabled and can be forwarded
"""
def message_enabled?(%{features: features}, message) do
message in features.messages
end
@doc """
Cache the message for repeating to a reloaded browser
"""
def cache_message(state, message, data)
def cache_message(state, "Client.Modals" <> _, _data), do: state
def cache_message(state, message, data) do
cache = Map.put(state.features.message_cache, message, data)
features = Map.put(state.features, :message_cache, cache)
Map.put(state, :features, features)
end
@doc """
Load all of the supported packages that the client should turn on
"""
def supported_packages(%{game: game}) when game != nil do
gauge_packages = Enum.map(game.gauges, & &1.package)
Enum.uniq(gauge_packages ++ client_setting_packages(game) ++ base_packages())
end
def supported_packages(_), do: []
defp client_setting_packages(%{client_settings: client_settings}) do
character_package = Map.get(client_settings, :character_package)
case is_nil(character_package) do
true ->
[]
false ->
[character_package]
end
end
defp client_setting_packages(_), do: []
@doc """
Load all of the supported packages that the client should turn on
"""
def cache_supported_messages(state = %{game: game}) when game != nil do
messages =
game.gauges
|> Enum.map(& &1.message)
|> Enum.uniq()
messages = messages ++ base_messagse()
features = Map.put(state.features, :messages, messages)
Map.put(state, :features, features)
end
def cache_supported_messages(state), do: state
@doc """
Base packages that the client will try to turn on
"""
def base_packages(), do: ["Client.Modals 1"]
@doc """
Base messages that are known for the client
"""
def base_messagse(), do: ["Client.Modals.Open"]
end
|
apps/telnet/lib/telnet/features.ex
| 0.709221 | 0.445831 |
features.ex
|
starcoder
|
defmodule EctoHooks do
@moduledoc """
When `use`-ed in a module that also `use`-es `Ecto.Repo`, augments the following
`Ecto.Repo` callbacks to provide user definable hooks following successful
execution.
Hooks to `MyApp.EctoSchema.after_get/1`:
- `all/2`
- `get/3`
- `get!/3`
- `get_by/3`
- `get_by!/3`
- `one/2`
- `one!/2`
Hooks to `MyApp.EctoSchema.after_delete/1`:
- `delete/2`
- `delete!/2`
Hooks to `MyApp.EctoSchema.after_insert/1`:
- `insert/2`
- `insert!/2`
Hooks to `MyApp.EctoSchema.after_update/1`:
- `update/2`
- `update!/2`
Hooks to `MyApp.EctoSchema.after_insert/1` or to `MyApp.Ecto.Schema.after_update/1`:
- `insert_or_update/2`
- `insert_or_update!/2`
Hooks to `MyApp.EctoSchema.before_delete/1`:
- `delete/2`
- `delete!/2`
Hooks to `MyApp.EctoSchema.before_insert/1`:
- `insert/2`
- `insert!/2`
Hooks to `MyApp.EctoSchema.before_update/1`:
- `update/2`
- `update!/2`
Hooks to `MyApp.EctoSchema.before_insert/1` or to `MyApp.Ecto.Schema.before_update/1`:
- `insert_or_update/2`
- `insert_or_update!/2`
Please note that for all `after_*` hooks, the result of executing a `MyApp.Repo.*` callback
is what ultimately gets returned from the hook, and thus you should aim to write logic
that is transparent and does not break the expected semantics or behaviour of said
callback.
Any results wrapped within an `{:ok, _}` or `{:error, _}` are also returned re-wrapped
as expected.
For all `before_*` hooks, the result returned by hook is passed directly to the `MyApp.Repo.*`
callback called and thus care must be made to be aware of any implicit changes to changesets
prior to writing to the database.
The hooking functionality provided by `EctoHooks` can be pretty useful for resolving
virtual fields, but can also prove useful for centralising some logging or telemetry
logic. Note that because any business logic is executed synchronously after the
hooked `Ecto.Repo` callback, one should avoid doing any blocking or potentially
terminating logic within hooks as weird or strange behaviour may occur.
## Example usage:
```elixir
def MyApp.Repo do
use Ecto.Repo,
otp_app: :my_app,
adapter: Ecto.Adapters.Postgres
use EctoHooks
end
def MyApp.User do
use Ecto.Changeset
require Logger
schema "users" do
field :first_name, :string
field :last_name, :string
field :full_name, :string, virtual: true
end
def before_insert(changeset) do
Logger.warning("updating a user...")
changeset
end
def after_get(%__MODULE__{first_name: first_name, last_name: last_name} = user) do
%__MODULE__{user | full_name: first_name <> " " <> last_name}
end
end
```
"""
defmacro __using__(_opts) do
quote do
@hooks unquote(__MODULE__)
defoverridable all: 2,
delete: 2,
delete!: 2,
get: 3,
get!: 3,
get_by: 3,
get_by!: 3,
insert: 2,
insert!: 2,
insert_or_update: 2,
insert_or_update!: 2,
one: 2,
one!: 2,
update: 2,
update!: 2
def insert(changeset, opts) do
changeset = @hooks.before_insert(changeset)
with {:ok, result} <- super(changeset, opts) do
{:ok, @hooks.after_insert(result)}
end
end
def insert!(changeset, opts) do
changeset
|> @hooks.before_insert
|> super(opts)
|> @hooks.after_insert
end
def update(changeset, opts) do
changeset = @hooks.before_update(changeset)
with {:ok, result} <- super(changeset, opts) do
{:ok, @hooks.after_update(result)}
end
end
def update!(changeset, opts) do
changeset
|> @hooks.before_update
|> super(opts)
|> @hooks.after_update
end
def get(query, id, opts) do
with %{__meta__: %Ecto.Schema.Metadata{}} = result <- super(query, id, opts) do
@hooks.after_get(result)
end
end
def get!(query, id, opts) do
query
|> super(id, opts)
|> @hooks.after_get
end
def get_by(query, clauses, opts) do
with %{__meta__: %Ecto.Schema.Metadata{}} = result <- super(query, clauses, opts) do
@hooks.after_get(result)
end
end
def get_by!(query, clauses, opts) do
query
|> super(clauses, opts)
|> @hooks.after_get
end
def one(query, opts) do
with %{__meta__: %Ecto.Schema.Metadata{}} = result <- super(query, opts) do
@hooks.after_get(result)
end
end
def one!(query, opts) do
query
|> super(opts)
|> @hooks.after_get
end
def all(query, opts) do
query
|> super(opts)
|> Enum.map(&@hooks.after_get/1)
end
def delete(changeset_or_query, opts) do
changeset_or_query = @hooks.before_delete(changeset_or_query)
with {:ok, result} <- super(changeset_or_query, opts) do
{:ok, @hooks.after_delete(result)}
end
end
def delete!(changeset_or_query, opts) do
changeset_or_query
|> @hooks.before_delete
|> super(opts)
|> @hooks.after_delete
end
def insert_or_update(
%Ecto.Changeset{data: %{__meta__: %{state: :loaded}}} = changeset,
opts
) do
changeset = @hooks.before_update(changeset)
with {:ok, result} <- super(changeset, opts) do
{:ok, @hooks.after_update(result)}
end
end
def insert_or_update(
%Ecto.Changeset{data: %{__meta__: %{state: :built}}} = changeset,
opts
) do
changeset = @hooks.before_insert(changeset)
with {:ok, result} <- super(changeset, opts) do
{:ok, @hooks.after_insert(result)}
end
end
def insert_or_update(changeset, opts) do
super(changeset, opts)
end
def insert_or_update!(
%Ecto.Changeset{data: %{__meta__: %{state: :loaded}}} = changeset,
opts
) do
changeset
|> @hooks.before_update
|> super(opts)
|> @hooks.after_update
end
def insert_or_update!(
%Ecto.Changeset{data: %{__meta__: %{state: :built}}} = changeset,
opts
) do
changeset
|> @hooks.before_insert
|> super(opts)
|> @hooks.after_insert
end
def insert_or_update!(changeset, opts) do
super(changeset, opts)
end
end
end
@before_callbacks [:before_delete, :before_insert, :before_update]
@after_callbacks [:after_delete, :after_get, :after_insert, :after_update]
for callback <- @before_callbacks do
@doc false
def unquote(callback)(%{__struct__: Ecto.Changeset, data: %schema{}} = changeset) do
if function_exported?(schema, unquote(callback), 1) do
schema.unquote(callback)(changeset)
else
changeset
end
end
def unquote(callback)(%schema{} = data) do
if function_exported?(schema, unquote(callback), 1) do
schema.unquote(callback)(data)
else
data
end
end
def unquote(callback)(changeset) do
changeset
end
end
for callback <- @after_callbacks do
@doc false
def unquote(callback)(%schema{} = data) do
if function_exported?(schema, unquote(callback), 1) do
schema.unquote(callback)(data)
else
data
end
end
def unquote(callback)(data) do
data
end
end
end
|
lib/ecto_hooks.ex
| 0.841631 | 0.624694 |
ecto_hooks.ex
|
starcoder
|
defmodule Membrane.File.Sink.Multi do
@moduledoc """
Element that writes buffers to a set of files. File is switched on event.
Files are named according to `naming_fun` passed in options.
This function receives sequential number of file and should return string.
It defaults to `path/to/file0.ext`, `path/to/file1.ext`, ...
The event type, which starts writing to a next file is passed by `split_event` option.
It defaults to `Membrane.File.SplitEvent`.
"""
use Membrane.Sink
@common_file Membrane.File.CommonFileBehaviour.get_impl()
def_options location: [
spec: Path.t(),
description: "Base path to the file, will be passed to the naming function"
],
extension: [
spec: String.t(),
default: "",
description: """
Extension of the file, should be preceeded with dot (.). It is
passed to the naming function.
"""
],
naming_fun: [
spec: (Path.t(), non_neg_integer, String.t() -> Path.t()),
default: &__MODULE__.default_naming_fun/3,
description: """
Function accepting base path, sequential number and file extension,
and returning file path as a string. Default one generates
path/to/file0.ext, path/to/file1.ext, ...
"""
],
split_event: [
spec: Membrane.Event.t(),
default: Membrane.File.SplitEvent,
description: "Event causing switching to a new file"
]
@spec default_naming_fun(Path.t(), non_neg_integer(), String.t()) :: Path.t()
def default_naming_fun(path, i, ext), do: [path, i, ext] |> Enum.join() |> Path.expand()
def_input_pad :input, demand_unit: :buffers, caps: :any
@impl true
def handle_init(%__MODULE__{} = options) do
{:ok,
%{
naming_fun: &options.naming_fun.(options.location, &1, options.extension),
split_on: options.split_event,
fd: nil,
index: 0
}}
end
@impl true
def handle_stopped_to_prepared(_ctx, state), do: {:ok, open(state)}
@impl true
def handle_prepared_to_playing(_ctx, state) do
{{:ok, demand: :input}, state}
end
@impl true
def handle_event(:input, %split_on{}, _ctx, %{split_on: split_on} = state) do
state =
state
|> close()
|> open()
{:ok, state}
end
def handle_event(pad, event, ctx, state), do: super(pad, event, ctx, state)
@impl true
def handle_write(:input, buffer, _ctx, %{fd: fd} = state) do
:ok = @common_file.write!(fd, buffer)
{{:ok, demand: :input}, state}
end
@impl true
def handle_prepared_to_stopped(_ctx, state), do: {:ok, close(state)}
defp open(%{naming_fun: naming_fun, index: index} = state) do
fd = @common_file.open!(naming_fun.(index), :write)
%{state | fd: fd}
end
defp close(%{fd: fd, index: index} = state) do
:ok = @common_file.close!(fd)
%{state | fd: nil, index: index + 1}
end
end
|
lib/membrane_file/sink_multi.ex
| 0.774242 | 0.405714 |
sink_multi.ex
|
starcoder
|
defmodule NodePartitioner do
@moduledoc """
The `NodePartitioner` module handles rotating the nodes in the cluster to give a fair node to schedule a process on.
There are many nodes in the cluster, and each node is capable of different amounts of capacity.
To fairly schedule this, `System.schedulers/0` is executed on the target node to figure out how many schedulers there are.
Then, that node is put into the list of "available nodes" that many times. This is repeated for each node in the cluster.
Thus, fairly scheduling more work to nodes that can handle more work.
Simply calling `NodePartitioner.next_worker/1` with no arguments will get an available node. If all nodes in the cluster
have been iterated over, the partitioner will re-iterate over the nodes.
TODO: it may be considered to use something like `swarm` for delegation or whatnot
## Examples
iex> Node.ping NodePartitioner.next_worker
:pong
"""
use GenServer
@doc false
def start_link(opts \\ []) do
GenServer.start_link(__MODULE__, :ok, opts)
end
@doc """
Gets a node to schedule work onto. If a node has more than one scheduler, it will be returned as many times as it has schedulers.
"""
def next_worker(server \\ NodePartitioner) do
GenServer.call(server, :next)
end
@impl true
@doc false
def init(:ok) do
{:ok, get_available_nodes()}
end
@impl true
@doc false
def handle_call(:next, sender, []) do
handle_call(:next, sender, get_available_nodes())
end
@impl true
@doc false
def handle_call(:next, sender, [node] ++ nodes) do
if Node.ping(node) == :pang do
handle_call(:next, sender, nodes)
else
{:reply, node, nodes}
end
end
@doc """
Takes the current node and connected nodes (with `Node.list/0`), this will
1. Find how many schedulers a node has
2. Repeat that node in the list for as many times it has schedulers
3. Return it as a list
"""
defp get_available_nodes do
([node()] ++ Node.list())
|> Stream.map(fn node -> {node, schedulers_of(node)} end)
|> Stream.map(fn {node, schedulers_available} ->
Stream.cycle([0])
|> Stream.take(schedulers_available)
|> Stream.map(fn _ -> node end)
end)
|> Stream.flat_map(fn node -> node end)
|> Enum.to_list()
end
@doc """
Executes `System.schedulers/0` on a target node and returns how many schedulers that node has
"""
@spec schedulers_of(node()) :: number()
defp schedulers_of(target) do
me = self()
Node.spawn_link(target, fn -> send(me, {:schedulers_of, target, System.schedulers()}) end)
receive do
{:schedulers_of, ^target, schedulers} -> schedulers
end
end
end
|
lib/node_partitioner.ex
| 0.788257 | 0.716243 |
node_partitioner.ex
|
starcoder
|
defmodule URI2.Query do
@moduledoc """
Utilities for working with url query strings.
Namespaced under `URI2` as it is a replacement for the native `URI` module.
Calls the original module until the query API is finalised.
All functions are pure,
They return try tuples when there is a possibility of failure
"""
@doc """
Returns the query as a list of keyvalue pairs.
This parsing looses no information
## Examples
iex> URI2.Query.parse("foo=1&bar=2")
{:ok, [{"foo", "1"}, {"bar", "2"}]}
iex> URI2.Query.parse("%ZZ")
{:error, :invalid_query}
*Directly using `URI.decode_next_query_pair/1` is not possible as it is private*
"""
def parse(query_string) when is_binary(query_string) do
try do
{:ok, URI.query_decoder(query_string) |> Enum.to_list()}
rescue
_error in ArgumentError ->
{:error, :invalid_query}
end
end
@doc """
Decode a query string into a nested map of values
iex> URI2.Query.decode("percentages[]=15&percentages[]=99+%21")
{:ok, %{"percentages" => ["15", "99 !"]}}
"""
def decode(query_string) when is_binary(query_string) do
case parse(query_string) do
{:ok, key_value_pairs} ->
build_nested(key_value_pairs)
{:error, reason} ->
{:error, reason}
end
end
@doc """
Use bracket notation for nested queries.
Note this is not a formal part of the query specification.
## Examples
iex> URI2.Query.build_nested([{"foo", "1"}, {"bar", "2"}])
{:ok, %{"foo" => "1", "bar" => "2"}}
iex> URI2.Query.build_nested([{"foo[]", "1"}, {"foo[]", "2"}])
{:ok, %{"foo" => ["1", "2"]}}
iex> URI2.Query.build_nested([{"foo[bar]", "1"}, {"foo[baz]", "2"}])
{:ok, %{"foo" => %{"bar" => "1", "baz" => "2"}}}
iex> URI2.Query.build_nested([{"foo[bar][baz]", "1"}])
{:ok, %{"foo" => %{"bar" => %{"baz" => "1"}}}}
iex> URI2.Query.build_nested([{"foo[bar][]", "1"}, {"foo[bar][]", "2"}])
{:ok, %{"foo" => %{"bar" => ["1", "2"]}}}
# I think this case does not work because it is ambiguous whether the second kv item should be added to the first list item.
# iex> URI2.Query.build_nested([{"foo[][bar]", "1"}, {"foo[][baz]", "2"}])
# {:ok, %{"foo" => [%{"bar" => "1"}, %{"baz" => "2"}]}}
"""
def build_nested(key_value_pairs, nested \\ %{})
def build_nested([], nested) do
{:ok, nested}
end
def build_nested([{key, value} | key_value_pairs], nested) do
updated =
case :binary.split(key, "[") do
[key] ->
put_single_value(nested, key, value)
[key, "]"] ->
put_array_entry(nested, key, value)
[key, rest] ->
case :binary.split(rest, "]") do
[subkey, rest] ->
put_sub_query(nested, key, [{subkey <> rest, value}])
end
end
case updated do
{:ok, nested} ->
build_nested(key_value_pairs, nested)
{:error, reason} ->
{:error, reason}
end
end
defp put_sub_query(map, key, key_value_pairs) do
case Map.get(map, key, %{}) do
subquery = %{} ->
{:ok, subquery} = build_nested(key_value_pairs, subquery)
{:ok, Map.put(map, key, subquery)}
other ->
{:error, {:key_already_defined_as, other}}
end
end
defp put_single_value(map, key, value) do
case Map.has_key?(map, key) do
false ->
{:ok, Map.put_new(map, key, value)}
true ->
{:error, {:key_already_defined_as, Map.get(map, key)}}
end
end
defp put_array_entry(map, key, value) do
case Map.get(map, key, []) do
values when is_list(values) ->
{:ok, Map.put(map, key, values ++ [value])}
other ->
{:error, {:key_already_defined_as, other}}
end
end
end
|
lib/uri2_query.ex
| 0.827236 | 0.538498 |
uri2_query.ex
|
starcoder
|
defmodule SimpleStatEx do
@moduledoc """
SimpleStatEx is a lightweight library that supports logging simple statistics for any elixir project, including
the Phoenix Framework. Stats are stored via ecto to your data store or in memory. They are rolled up by category
and time window and can be queried conveniently. SimpleStatEx provides the recommended interface to your stats.
"""
alias SimpleStatEx.{SimpleStat, SimpleStatHolder, SimpleStatQuery}
alias SimpleStatEx.Util.{HandleTime, DataAccess}
alias SimpleStatEx.Query.Stat
@doc """
Generate a stat model based on passed arguments
## Examples
iex> SimpleStatEx.stat("index visit", :daily)
%SimpleStat{category: "index visit", period: "daily", count: 1, ...}
"""
def stat(category) when is_binary(category) do
case HandleTime.round(:daily, Timex.now()) do
{:ok, time} ->
{:ok, %SimpleStat{category: category, period: HandleTime.period_to_string!(:daily), time: time}}
{:error, reason} ->
{:error, reason}
end
end
def stat(category, period, count \\ 1) when is_binary(category) do
case HandleTime.round(period, Timex.now()) do
{:ok, time} ->
{:ok, %SimpleStat{category: category, period: HandleTime.period_to_string!(period), count: count, time: time}}
{:error, reason} ->
{:error, reason}
end
end
@doc """
Attempt to transform any simple stat operation into using memory instead of repository. Meant for use in piping from
other parts of this interface such as `stat` and `query`.
## Example
iex> SimpleStatEx.stat("mongol visit") |> SimpleStatEx.memory() |> SimpleStatEx.save()
iex> SimpleStatEx.query("mongol visit") |> SimpleStatEx.memory() |> SimpleStatEx.get()
"""
def memory({:ok, %SimpleStat{} = simple_stat}) do
pid = DataAccess.lookup_bucket(simple_stat)
{:ok, %SimpleStatHolder{simple_stat: simple_stat, category_bucket_pid: pid}}
end
def memory({:ok, %SimpleStat{} = simple_stat, %SimpleStatQuery{} = simple_stat_query}) do
pid = DataAccess.lookup_bucket(simple_stat)
{:ok, %SimpleStatHolder{simple_stat: simple_stat, category_bucket_pid: pid}, simple_stat_query}
end
@doc """
Save a stat or stat container to the datastore or to state. If within the time and period of a stat of the same
category, updates the counter, incrementing by your new stat's count.
## Example
iex> SimpleStatEx.stat("index visit") |> SimpleStatEx.save()
{:ok,
%SimpleStatEx.SimpleStat{__meta__: #Ecto.Schema.Metadata<:loaded, "simplestats">,
category: "index visit", count: 1, id: 1,
inserted_at: ~N[2018-01-10 05:50:35.225979], period: "daily",
time: #DateTime<2018-01-10 00:00:00Z>,
updated_at: ~N[2018-01-10 05:50:35.225986]}}
"""
def save({:ok, simple_stat}) do
Stat.insert(simple_stat)
end
def save(error_reason) do
error_reason
end
@doc """
Build a stat query that can be used to obtain results from the database or stat set. You are free to query
using Ecto in any way you like, Simple Stats helpers simple give you an easy interface to query in the
suggested way, and are compatible with the Stat Sets held in memory.
## Example
iex> SimpleStatEx.query("index visit", :daily) |> SimpleStatEx.limit(10) |> SimpleStatEx.get()
"""
def query(category, period) when is_binary(category) do
case HandleTime.period_to_string(period) do
{:ok, period_string} ->
{:ok, %SimpleStat{category: category, period: period_string}, %SimpleStatQuery{}}
{:error, reason} ->
{:error, reason}
end
end
def query(%SimpleStat{category: category, period: period}) do
query(category, period)
end
def query(category) when is_binary(category) do
query(category, :daily)
end
@doc """
Add a limit to a stat query, overriding the default `1`
## Example
iex> SimpleStatEx.query("index visit") |> SimpleStatEx.limit(50) |> SimpleStatEx.get()
"""
def limit({:ok, simple_stat, %SimpleStatQuery{} = simple_stat_query}, limit) do
{:ok, simple_stat, %{simple_stat_query | limit: limit}}
end
def limit(error_reason, _) do
error_reason
end
@doc """
Add an offset to a stat query, overriding the default `0`
## Example
# Get 1 day stats from 50 days ago
iex> SimpleStatEx.query("index visit") |> SimpleStatEx.offset(50) |> Simple StatEx.get()
"""
def offset({:ok, simple_stat, %SimpleStatQuery{} = simple_stat_query}, offset) do
{:ok, simple_stat, %{simple_stat_query | offset: offset}}
end
def offset(error_reason, _) do
error_reason
end
@doc """
Retrieve a stat using simple stat query builder helpers. This is usually called via pipe from
SimpleStatEx.query.
## Example
iex> SimpleStatEx.get(%SimpleStat{category: "mongol visit", period: :daily}, %SimpleStatQuery{limit: 7, offset: 7})
{:ok,
[%{category: "mongol visit", period: "daily", time: ~N[2018-01-10 00:00:00.000000],
updated_at: ~N[2018-01-10 05:26:03.562011]}]}
iex> SimpleStatEx.query("mongol visit") |> SimpleStatEx.limit(7) |> SimpleStatEx.offset(7) |> SimpleStatEx.get()
{:ok,
[%{category: "test", period: "daily", time: ~N[2018-01-10 00:00:00.000000],
updated_at: ~N[2018-01-10 05:26:03.562011]}]}
"""
def get({simple_stat, %SimpleStatQuery{} = simple_stat_query}) do
get({:ok, simple_stat, simple_stat_query})
end
def get({:ok, simple_stat, %SimpleStatQuery{} = simple_stat_query}) do
Stat.retrieve(simple_stat, simple_stat_query)
end
def get({:error, reason}) do
{:error, reason}
end
def get!(stat_query_tuple) do
{:ok, result} = get(stat_query_tuple)
result
end
@doc """
See get/1 above but only return one result with no list structure
## Example
iex> SimpleStatEx.get(%SimpleStatQuery{category: "mongol visit", period: :daily}, :single)
{:ok,
%{category: "test", period: "daily", time: ~N[2018-01-10 00:00:00.000000],
updated_at: ~N[2018-01-10 05:26:03.562011]}}
"""
def get(stat_query_tuple, :single) do
{:ok, [result|_]} = get(stat_query_tuple)
{:ok, result}
end
def get!(stat_query_tuple, :single) do
[result|_] = get!(stat_query_tuple)
result
end
end
|
lib/simplestatex.ex
| 0.894005 | 0.660847 |
simplestatex.ex
|
starcoder
|
defmodule Framebuffer.Screeninfo.Fix do
@moduledoc """
A representation of `fb_fix_screeninfo` as defined in `linux/fb.h`. This encapsulates
device-independent info about the buffer device and its current format. These properties
are immutable from userspace by direct calls, though they may change when the format
is changed.
"""
defstruct [
:id,
:accel,
:capabilities,
:line_length,
:mmio_len,
:mmio_start,
:smem_len,
:smem_start,
:type,
:type_aux,
:visual,
:xpanstep,
:ypanstep,
:ywrapstep
]
@type t() :: %__MODULE__{
id: binary(),
accel: non_neg_integer(),
capabilities: [capability_t()],
line_length: non_neg_integer(),
mmio_len: non_neg_integer(),
mmio_start: non_neg_integer(),
smem_len: non_neg_integer(),
smem_start: non_neg_integer(),
type: type_t(),
type_aux: non_neg_integer(),
visual: visual_t(),
xpanstep: non_neg_integer(),
ypanstep: non_neg_integer(),
ywrapstep: non_neg_integer()
}
@type accel_t() ::
:none
| :accel_atariblitt
| :accel_amigablitt
| :accel_s3_trio64
| :accel_ncr_77c32blt
| :accel_s3_virge
| :accel_ati_mach64gx
| :accel_dec_tga
| :accel_ati_mach64ct
| :accel_ati_mach64vt
| :accel_ati_mach64gt
| :accel_sun_creator
| :accel_sun_cgsix
| :accel_sun_leo
| :accel_ims_twinturbo
| :accel_3dlabs_permedia2
| :accel_matrox_mga2064w
| :accel_matrox_mga1064sg
| :accel_matrox_mga2164w
| :accel_matrox_mga2164w_agp
| :accel_matrox_mgag100
| :accel_matrox_mgag200
| :accel_sun_cg14
| :accel_sun_bwtwo
| :accel_sun_cgthree
| :accel_sun_tcx
| :accel_matrox_mgag400
| :accel_nv3
| :accel_nv4
| :accel_nv5
| :accel_ct_6555x
| :accel_3dfx_banshee
| :accel_ati_rage128
| :accel_igs_cyber2000
| :accel_igs_cyber2010
| :accel_igs_cyber5000
| :accel_sis_glamour
| :accel_3dlabs_permedia3
| :accel_ati_radeon
| :accel_i810
| :accel_sis_glamour_2
| :accel_sis_xabre
| :accel_i830
| :accel_nv_10
| :accel_nv_20
| :accel_nv_30
| :accel_nv_40
| :accel_xgi_volari_v
| :accel_xgi_volari_z
| :accel_omap1610
| :accel_trident_tgui
| :accel_trident_3dimage
| :accel_trident_blade3d
| :accel_trident_bladexp
| :accel_cirrus_alpine
| :accel_neomagic_nm2070
| :accel_neomagic_nm2090
| :accel_neomagic_nm2093
| :accel_neomagic_nm2097
| :accel_neomagic_nm2160
| :accel_neomagic_nm2200
| :accel_neomagic_nm2230
| :accel_neomagic_nm2360
| :accel_neomagic_nm2380
| :accel_pxa3xx
| :accel_savage4
| :accel_savage3d
| :accel_savage3d_mv
| :accel_savage2000
| :accel_savage_mx_mv
| :accel_savage_mx
| :accel_savage_ix_mv
| :accel_savage_ix
| :accel_prosavage_pm
| :accel_prosavage_km
| :accel_s3twister_p
| :accel_s3twister_k
| :accel_supersavage
| :accel_prosavage_ddr
| :accel_prosavage_ddrk
| :accel_puv3_unigfx
@type capability_t() :: :fourcc
@type type_t() :: :packed_pixel | :planes | :interleaved_planes | :text | :vga_planes | :fourcc
@type visual_t() ::
:mono01
| :mono10
| :truecolor
| :pseudocolor
| :directcolor
| :static_pseudocolor
| :fourcc
end
|
lib/framebuffer/screeninfo/fix.ex
| 0.795658 | 0.515986 |
fix.ex
|
starcoder
|
defmodule Nerves.Dnssd.ServiceRegistration do
@moduledoc """
Ensure a service name survives an application restart.
The common interface is through [`Nerves.Dnssd.register()`](index.html)
For example: I have 2 services on
my network, both competing for a service name "Foo Service". The first service to
be available on the network is claiming this name, hence the service that starts
second will be named "Foo Service (2)". For the second service we need to persist
this name, so that when the service starts up again it will advertise itself as
"Foo Service (2)" (even if the first service is no longer available on the network).
To achieve this, we apply a simple name mapping: if a name is registered for and
the service knows internally it has been using another name in the past, it will
use the name used before.
Note that this module only handles the simple case where we want to register a
`{name, protocol, port}` tuple on a `.local` domain using the current host name.
See also: [https://tools.ietf.org/html/rfc6762#section-9](https://tools.ietf.org/html/rfc6762#section-9)
"""
use GenServer
require Logger
def start_link([], name, protocol, port, txt) do
GenServer.start_link(__MODULE__, [name, protocol, port, txt])
end
# Server callbacks
def init([name, protocol, port, txt]) do
service = {name, protocol}
{:ok, ref} = :dnssd.register(service_name(service), protocol, port, txt)
{:ok, {ref, service}}
end
def handle_info({:dnssd, ref, {:register, :add, {registered_name, protocol, domain}}}, {ref, service} = state) do
Logger.info "Registered service '#{registered_name}' for #{protocol}#{domain}"
update_name(service, registered_name)
{:noreply, state}
end
def handle_info({:dnssd, ref, {:register, :remove, {registered_name, protocol, domain}}}, {ref, _service} = state) do
Logger.info "Deregistered service '#{registered_name}' for #{protocol}#{domain}"
{:stop, :normal, state}
end
def handle_info(info, state) do
Logger.warn "Unexpected message: #{inspect info}; state: #{inspect state}"
{:noreply, state}
end
defp service_name({name, _protocol} = service) do
case SystemRegistry.match(%{config: %{dnssd: %{service => :_}}}) do
%{config: %{dnssd: %{^service => alt_name}}} -> alt_name
_ -> name
end
end
defp update_name(service, new_name) do
SystemRegistry.update([:config, :dnssd, service], new_name)
end
end
|
lib/nerves_dnssd/service_registration.ex
| 0.854809 | 0.615203 |
service_registration.ex
|
starcoder
|
defmodule ExDebugger do
@moduledoc """
Usage: `use ExDebugger`
This effectively hijacks macros `def` and `defp` to auto-annotate the `AST` they receive compile time with strategically
placed debugging expressions where they generate debugging events:
1. At the beginning of every `def`/`defp`
2. At the end of every `def`/`defp`
3. At every juncture in every polyfurcation expression which are:
* case
* cond
* if
* TODO:
* unless
* Case Arrows of Anonymous Functions
The various debugging events that get generated from such automated annotations should allow a developer to fully
introspect from beginning till end a particular traversal of the code path while having all the relevant
information available to understand how state changes accordingly. In the cases where more granularity is required,
one can always resort to the convenience of `use` `ExDebugger.Manual`.
This behaviour of annotating `AST` compile time in such an invasive and aggressive manner can be toggled in the debug
options file: `#{Documentation.debug_options_path()}`. This should facilitate regulating the granularity in which
debug events are generated during development without the headache of having to eliminate `use ExDebugger` from every
module when merging into `master` and deploying to production.
"""
@doc false
defmacro def(def_heading_ast, def_do_block_ast \\ nil) do
ExDebugger.Helpers.Def.annotate(:def, __CALLER__, def_heading_ast, def_do_block_ast)
end
@doc false
defmacro defp(def_heading_ast, def_do_block_ast \\ nil) do
ExDebugger.Helpers.Def.annotate(:defp, __CALLER__, def_heading_ast, def_do_block_ast)
end
defmodule Options do
# Responsible for extracting and validating the various options that users provide to toggle debugging concerns in the
# debug options file: `#{Documentation.debug_options_path()}`.
@moduledoc false
defstruct [
:global_output,
:default_output,
:capture_medium,
:warn
]
if Application.get_env(:ex_debugger, :debug_options_file) do
@external_resource Application.get_env(:ex_debugger, :debug_options_file)
@options Config.Reader.read!(Application.get_env(:ex_debugger, :debug_options_file))
else
@options :no_debug_options_file_set
end
@valid_capture_options [:repo, :stdout, :both, :none]
@doc false
def extract(type, module) do
with {_, true} <- {:list_check, is_list(@options)},
{_, ex_debugger} <- {:retrieval, Keyword.get(@options, :ex_debugger)},
{_, true} <- {{:ex_debugger, ex_debugger}, is_list(ex_debugger)},
{_, options} <- {:retrieval, Keyword.get(ex_debugger, type)},
{_, true} <- {{type, ex_debugger, options}, is_list(options)} do
struct(__MODULE__, %{
global_output: options |> Keyword.get(:all),
default_output: options |> Keyword.get(:"#{module}"),
capture_medium: capture_medium(type, options),
warn: options |> Keyword.get(:warn)
})
else
{:list_check, false} ->
if @options == :no_debug_options_file_set do
:no_debug_options_file_set
else
do_raise("Missing Configuration", type, :missing_configuration)
end
{_stage, false} ->
do_raise("Missing Configuration", type, :missing_configuration)
end
end
defp do_raise(heading, type, label) do
ExDebugger.Anomaly.raise(
"""
#{heading}
Kindly double check #{@external_resource} that it encompasses:
```elixir
config :ex_debugger, :#{type},
capture: :repo, ##{inspect(@valid_capture_options)}
all: false,
"Elixir.SomeModule": true # Take note of prepending "Elixir." here in front of the modules you are debugging
```
""",
label
)
end
defp capture_medium(type, options) do
if (capture_option = Keyword.get(options, :capture)) in @valid_capture_options do
capture_option
else
capture_option
|> case do
nil ->
do_raise("Missing Capture Configuration", type, :missing_capture_configuration)
anomaly ->
do_raise(
"Incorrect Capture Configuration\nThe value provided: #{anomaly} is incorrect.",
type,
:incorrect_capture_configuration
)
end
end
end
end
defmacro __using__(_) do
quote location: :keep do
import Kernel, except: [def: 2, defp: 2]
import ExDebugger, only: [def: 2, defp: 2]
require Logger
if Application.get_env(:ex_debugger, :debug_options_file) do
@external_resource Application.get_env(:ex_debugger, :debug_options_file)
@ex_debugger_opts ExDebugger.Options.extract(:debug, __MODULE__)
@doc false
Kernel.def d(piped_value, label, env, bindings, force_output?) do
if @ex_debugger_opts.global_output || force_output? || @ex_debugger_opts.default_output do
piped_value
|> ExDebugger.Event.new(label, bindings, env)
|> ExDebugger.Event.cast(@ex_debugger_opts.capture_medium,
warn: @ex_debugger_opts.warn
)
else
if @ex_debugger_opts.warn do
Logger.warn("Debugger output silenced for: #{__MODULE__}")
end
end
piped_value
end
else
@doc false
Kernel.def d(piped_value, _, _, _, _) do
piped_value
end
end
end
end
end
|
lib/ex_debugger.ex
| 0.576542 | 0.801781 |
ex_debugger.ex
|
starcoder
|
defmodule AuthZ.AuthorizationPlugMixin do
@moduledoc ~S"""
Allows to create a plug enforcing authorization for protected routes.
A user-defined module must `use` this module, making the user module a plug, and
implement the `handle_authorization/3` and `handle_authentication_error/2`
behaviours. The `handle_authorization/3` and `handle_authentication_error/2`
callbacks receive a `Plug.Conn` struct and an atom identifying the set of routes
that require authorization, and must return a `Plug.Conn` struct;
`handle_authorization/3` additionally receives the logged in user.
Example:
```
defmodule MyAppWeb.Plugs.EnsureAuthorized do
use AuthZ.AuthorizationPlugMixin
import Plug.Conn
import Phoenix.Controller
alias MyApp.Accounts.User
def handle_authentication_error(conn, :admin_routes),
do: conn |> put_status(401) |> text("unauthenticated") |> halt()
def handle_authorization(conn, %User{type: "admin"}, :admin_routes),
do: conn
def handle_authorization(conn, _, _),
do: conn |> put_status(403) |> text("unauthorized") |> halt()
end
```
`EnsureAuthorized` is now a plug which can be used in the router:
```
pipeline :ensure_admin_routes_authorized do
plug MyAppWeb.Plugs.EnsureAuthorized,
resource: :admin_routes
end
scope "/admin", MyAppWeb, as: :admin do
pipe_through [:browser, :ensure_admin_routes_authorized]
# code
end
```
"""
@callback handle_authorization(Plug.Conn.t(), term, atom) :: Plug.Conn.t()
@callback handle_authentication_error(Plug.Conn.t(), atom) :: Plug.Conn.t()
defmacro __using__(_args) do
this_module = __MODULE__
quote do
@behaviour unquote(this_module)
def init(opts) do
Keyword.fetch!(opts, :resource)
opts
end
def call(conn, opts) do
current_user = Map.get(conn.assigns, :current_user)
authorize(conn, current_user, opts)
end
defp authorize(conn, nil, opts) do
__MODULE__.handle_authentication_error(conn, Keyword.fetch!(opts, :resource))
end
defp authorize(conn, current_user, opts) do
__MODULE__.handle_authorization(conn, current_user, Keyword.fetch!(opts, :resource))
end
end
end
end
|
lib/authorization_plug_mixin.ex
| 0.810441 | 0.710842 |
authorization_plug_mixin.ex
|
starcoder
|
defmodule Contex.Axis do
@moduledoc """
`Contex.Axis` represents the visual appearance of a `Contex.Scale`
In general terms, an Axis is responsible for rendering a `Contex.Scale` where the scale is used to position
a graphical element.
As an end-user of the Contex you won't need to worry too much about Axes - the specific
plot types take care of them. Things like styling and scales are handled elsewhere. However,
if you are building a new plot type you will need to understand how they work.
Axes can be drawn with ticks in different locations relative to the Axis based on the orientation.
For example, when `:orientation` is `:top`, the axis is drawn as a horizontal line with the ticks
above and the tick text above that.
`:rotation` is used to optionally rotate the labels and can either by 45 or 90 (anything else is considered to be 0).
`:tick_size_inner` and `:tick_size_outer` control the line lengths of the ticks.
`:tick_padding` controls the gap between the end of the tick mark and the tick text.
`:flip_factor` is for internal use. Whatever you set it to will be ignored.
An offset relative to the containing SVG element's origin is used to position the axis line.
For example, an x-axis drawn at the bottom of the plot will typically be offset by the height
of the plot content. The different plot types look after this internally.
There are some layout heuristics to calculate text sizes and offsets based on axis orientation and whether the
tick labels are rotated.
"""
alias __MODULE__
alias Contex.Scale
defstruct [
:scale,
:orientation,
rotation: 0,
tick_size_inner: 6,
tick_size_outer: 6,
tick_padding: 3,
flip_factor: 1,
offset: 0
]
@orientations [:top, :left, :right, :bottom]
@type t() :: %__MODULE__{}
@type orientations() :: :top | :left | :right | :bottom
@doc """
Create a new axis struct with orientation being one of :top, :left, :right, :bottom
"""
@spec new(Contex.Scale.t(), orientations()) :: __MODULE__.t()
def new(scale, orientation) when orientation in @orientations do
if is_nil(Contex.Scale.impl_for(scale)) do
raise ArgumentError, message: "scale must implement Contex.Scale protocol"
end
%Axis{scale: scale, orientation: orientation}
end
@doc """
Create a new axis struct with orientation set to `:top`.
Equivalent to `Axis.new(scale, :top)`
"""
@spec new_top_axis(Contex.Scale.t()) :: __MODULE__.t()
def new_top_axis(scale), do: new(scale, :top)
@doc """
Create a new axis struct with orientation set to `:bottom`.
Equivalent to `Axis.new(scale, :bottom)`
"""
@spec new_bottom_axis(Contex.Scale.t()) :: __MODULE__.t()
def new_bottom_axis(scale), do: new(scale, :bottom)
@doc """
Create a new axis struct with orientation set to `:left`.
Equivalent to `Axis.new(scale, :left)`
"""
@spec new_left_axis(Contex.Scale.t()) :: __MODULE__.t()
def new_left_axis(scale), do: new(scale, :left)
@doc """
Create a new axis struct with orientation set to `:right`.
Equivalent to `Axis.new(scale, :right)`
"""
@spec new_right_axis(Contex.Scale.t()) :: __MODULE__.t()
def new_right_axis(scale), do: new(scale, :right)
@doc """
Sets the offset for where the axis will be drawn. The offset will either be horizontal
or vertical depending on the orientation of the axis.
"""
@spec set_offset(__MODULE__.t(), number()) :: __MODULE__.t()
def set_offset(%Axis{} = axis, offset) do
%{axis | offset: offset}
end
@doc """
Generates the SVG content for the axis (axis line, tick mark, tick labels). The coordinate system
will be in the coordinate system of the containing plot (i.e. the range of the `Contex.Scale` specified for the axis)
"""
def to_svg(%Axis{scale: scale} = axis) do
# Returns IO List for axis. Assumes the containing group handles the transform to the correct location
axis = %{axis | flip_factor: get_flip_factor(axis.orientation)}
{range0, range1} = get_adjusted_range(scale)
[
"<g ",
get_svg_axis_location(axis),
~s| fill="none" font-size="10" text-anchor="#{get_text_anchor(axis)}">|,
~s|<path class="exc-domain" stroke="#000" d="#{get_svg_axis_line(axis, range0, range1)}"></path>|,
get_svg_tickmarks(axis),
"</g>"
]
end
@doc """
Generates grid-lines for each tick in the `Contex.Scale` specified for the axis.
"""
def gridlines_to_svg(%Axis{} = axis) do
[
"<g> ",
get_svg_gridlines(axis),
"</g>"
]
end
defp get_svg_gridlines(%Axis{scale: scale} = axis) do
domain_ticks = Scale.ticks_domain(scale)
domain_to_range_fn = Scale.domain_to_range_fn(scale)
domain_ticks
# Don't render first tick as it should be on the axis
|> Enum.drop(1)
|> Enum.map(fn tick -> get_svg_gridline(axis, domain_to_range_fn.(tick)) end)
end
defp get_svg_gridline(%Axis{offset: offset} = axis, location) do
dim_length = get_tick_dimension(axis)
dim_constant =
case dim_length do
"x" -> "y"
"y" -> "x"
end
# Nudge to render better
location = location + 0.5
[
~s|<line class="exc-grid" stroke-dasharray="3,3"|,
~s| #{dim_constant}1="#{location}" #{dim_constant}2="#{location}"|,
~s| #{dim_length}1="0" #{dim_length}2="#{offset}"></line>|
]
end
defp get_svg_axis_location(%Axis{orientation: orientation}) when orientation in [:top, :left] do
" "
end
defp get_svg_axis_location(%Axis{:orientation => :bottom, offset: offset}) do
~s|transform="translate(0, #{offset})"|
end
defp get_svg_axis_location(%Axis{:orientation => :right, offset: offset}) do
~s|transform="translate(#{offset}, 0)"|
end
defp get_text_anchor(%Axis{orientation: orientation}) do
case orientation do
:right -> "start"
:left -> "end"
_ -> "middle"
end
end
defp get_svg_axis_line(%Axis{orientation: orientation} = axis, range0, range1)
when orientation in [:right, :left] do
%Axis{tick_size_outer: tick_size_outer, flip_factor: k} = axis
~s|M#{k * tick_size_outer},#{range0}H0.5V#{range1}H#{k * tick_size_outer}|
end
defp get_svg_axis_line(%Axis{orientation: orientation} = axis, range0, range1)
when orientation in [:top, :bottom] do
%Axis{tick_size_outer: tick_size_outer, flip_factor: k} = axis
~s|M#{range0}, #{k * tick_size_outer}V0.5H#{range1}V#{k * tick_size_outer}|
end
defp get_svg_tickmarks(%Axis{scale: scale} = axis) do
domain_ticks = Scale.ticks_domain(scale)
domain_to_range_fn = Scale.domain_to_range_fn(scale)
domain_ticks
|> Enum.map(fn tick -> get_svg_tick(axis, tick, domain_to_range_fn.(tick)) end)
end
defp get_svg_tick(%Axis{orientation: orientation} = axis, tick, range_tick) do
# Approach is to calculate transform for the tick and render tick mark with text in one go
[
~s|<g class="exc-tick" opacity="1" transform=|,
get_svg_tick_transform(orientation, range_tick),
">",
get_svg_tick_line(axis),
get_svg_tick_label(axis, tick),
"</g>"
]
end
defp get_svg_tick_transform(orientation, range_tick) when orientation in [:top, :bottom] do
~s|"translate(#{range_tick + 0.5},0)"|
end
defp get_svg_tick_transform(orientation, range_tick) when orientation in [:left, :right] do
~s|"translate(0, #{range_tick + 0.5})"|
end
defp get_svg_tick_line(%Axis{flip_factor: k, tick_size_inner: size} = axis) do
dim = get_tick_dimension(axis)
~s|<line #{dim}2="#{k * size}"></line>|
end
defp get_svg_tick_label(%Axis{flip_factor: k, scale: scale} = axis, tick) do
offset = axis.tick_size_inner + axis.tick_padding
dim = get_tick_dimension(axis)
text_adjust = get_svg_tick_text_adjust(axis)
tick =
Scale.get_formatted_tick(scale, tick)
|> Contex.SVG.Sanitize.basic_sanitize()
~s|<text #{dim}="#{k * offset}" #{text_adjust}>#{tick}</text>|
end
defp get_tick_dimension(%Axis{orientation: orientation}) when orientation in [:top, :bottom],
do: "y"
defp get_tick_dimension(%Axis{orientation: orientation}) when orientation in [:left, :right],
do: "x"
defp get_svg_tick_text_adjust(%Axis{orientation: orientation})
when orientation in [:left, :right],
do: ~s|dy="0.32em"|
defp get_svg_tick_text_adjust(%Axis{orientation: :top}), do: ""
defp get_svg_tick_text_adjust(%Axis{orientation: :bottom, rotation: 45}) do
~s|dy="-0.1em" dx="-0.9em" text-anchor="end" transform="rotate(-45)"|
end
defp get_svg_tick_text_adjust(%Axis{orientation: :bottom, rotation: 90}) do
~s|dy="-0.51em" dx="-0.9em" text-anchor="end" transform="rotate(-90)"|
end
defp get_svg_tick_text_adjust(%Axis{orientation: :bottom}) do
~s|dy="0.71em" dx="0" text-anchor="middle"|
end
# NOTE: Recipes for rotates labels on bottom axis:
# -90 dy="-0.51em" dx="-0.91em" text-anchor="end"
# -45 dy="-0.1em" dx="-0.91em" text-anchor="end"
# 0 dy="-0.71em" dx="0" text-anchor="middle"
defp get_flip_factor(orientation) when orientation in [:top, :left], do: -1
defp get_flip_factor(orientation) when orientation in [:right, :bottom], do: 1
# TODO: We should only nudge things half a pixel for odd line widths. This is to stop fuzzy lines
defp get_adjusted_range(scale) do
{min_r, max_r} = Scale.get_range(scale)
{min_r + 0.5, max_r + 0.5}
end
end
|
lib/chart/axis.ex
| 0.96192 | 0.850531 |
axis.ex
|
starcoder
|
defmodule Interactor do
use Behaviour
alias Interactor.TaskSupervisor
@moduledoc """
A tool for modeling events that happen in your application.
TODO: More on interactor concept
Interactor provided a behaviour and functions to execute the behaviours.
To use simply `use Interactor` in a module and implement the `handle_call/1`
callback. When `use`-ing you can optionaly include a Repo option which will
be used to execute any Ecto.Changesets or Ecto.Multi structs you return.
Interactors supports three callbacks:
* `before_call/1` - Useful for manipulating input etc.
* `handle_call/1` - The meat, usually returns an Ecto.Changeset or Ecto.Multi.
* `after_call/1` - Useful for metrics, publishing events, etc
Interactors can be called in three ways:
* `Interactor.call/2` - Executes callbacks, optionaly insert, and return results.
* `Interactor.call_task/2` - Same as call, but returns a `Task` that can be awated on.
* `Interactor.call_aysnc/2` - Same as call, but does not return results.
Example:
defmodule CreateArticle do
use Interactor, repo: Repo
def handle_call(%{attributes: attrs, author: author}) do
cast(%Article{}, attrs, [:title, :body])
|> put_change(:author_id, author.id)
end
end
Interactor.call(CreateArticle, %{attributes: params, author: current_user})
"""
@doc """
The primary callback. Typically returns an Ecto.Changeset or an Ecto.Multi.
"""
@callback handle_call(map) :: any
@doc """
A callback executed before handle_call. Useful for normalizing inputs.
"""
@callback before_call(map) :: map
@doc """
A callback executed after handle_call and after the Repo executes.
Useful for publishing events, tracking metrics, and other non-transaction
worthy calls.
"""
@callback after_call(any) :: any
@doc """
Executes the `before_call/1`, `handle_call/1`, and `after_call/1` callbacks.
If an Ecto.Changeset or Ecto.Multi is returned by `handle_call/1` and a
`repo` options was passed to `use Interactor` the changeset or multi will be
executed and the results returned.
"""
@spec call_task(module, map) :: Task.t
def call(interactor, context) do
context
|> interactor.before_call
|> interactor.handle_call
|> Interactor.Handler.handle(interactor.__repo)
|> interactor.after_call
end
@doc """
Wraps `call/2` in a supervised Task. Returns the Task.
Useful if you want async, but want to await results.
"""
@spec call_task(module, map) :: Task.t
def call_task(interactor, map) do
Task.Supervisor.async(TaskSupervisor, Interactor, :call, [interactor, map])
end
@doc """
Executes `call/2` asynchronously via a supervised task. Returns {:ok, pid}.
Primary use case is task you want completely asynchronos with no care for
return values.
Async can be disabled in tests by setting (will still return {:ok, pid}):
config :interactor,
force_syncronous_tasks: true
"""
@spec call_async(module, map) :: {:ok, pid}
def call_async(interactor, map) do
if sync_tasks do
t = Task.Supervisor.async(TaskSupervisor, Interactor, :call, [interactor, map])
Task.await(t)
{:ok, t.pid}
else
Task.Supervisor.start_child(TaskSupervisor, Interactor, :call, [interactor, map])
end
end
defmacro __using__(opts) do
quote do
@behaviour Interactor
@doc false
def __repo, do: unquote(opts[:repo])
unquote(define_callback_defaults)
end
end
defp define_callback_defaults do
quote do
def before_call(c), do: c
def after_call(r), do: r
defoverridable [before_call: 1, after_call: 1]
end
end
defp sync_tasks do
Application.get_env(:interactor, :force_syncronous_tasks, false)
end
end
|
lib/interactor.ex
| 0.69987 | 0.436142 |
interactor.ex
|
starcoder
|
defmodule JokenJwks do
@moduledoc """
Fetches a signer from a public JWKS URL
This hook is intended to be used when you are verifying a token is signed with
a well known public key. This is, for example, part of the OpenID Connect spec.
To use it, pass this hook to Joken either with the `add_hook/2` macro or directly
to each Joken function. Example:
defmodule MyToken do
use Joken.Config
add_hook(JokenJwks, jwks_url: "https://some-well-known-jwks-url.com")
# rest of your token config
end
## Options
This hook accepts 2 types of configuration:
- `app_config`: accepts an atom that should be the application that has a
configuration key `joken_jwks_url`. This is a dynamic configuration.
- `jwks_url`: the fixed URL for the JWKS. This is a static configuration.
"""
use Joken.Hooks
@cache :joken_jwks_cache
@impl true
def before_verify(_hook_options, {:error, reason}, _token, _signer), do: {:error, reason}
def before_verify(hook_options, _status, token, _signer) do
with {:ok, signers} <- hook_options |> fetch_jwks_url() |> fetch_signers(),
{:ok, signer} <- match_signer_with_token(token, signers) do
{:cont, {:ok, token, signer}}
else
err ->
{:halt, err}
end
end
defp match_signer_with_token(token, signers) do
kid =
token
|> Joken.peek_header()
|> Map.get("kid")
with {^kid, signer} <-
Enum.find(signers, {:error, :kid_does_not_match}, &(elem(&1, 0) == kid)) do
{:ok, signer}
end
end
defp fetch_jwks_url(options) do
app = options[:app_config]
jwks_url =
if is_nil(app) do
options[:jwks_url]
else
Application.get_env(app, :joken_jwks_url)
end
unless jwks_url, do: raise(JokenJwks.Error, :no_jwks_url)
jwks_url
end
defp fetch_signers(url) do
case Cachex.get(@cache, :jwks_signers) do
{:ok, signers} when not is_nil(signers) ->
{:ok, signers}
_ ->
with {:ok, keys} when not is_nil(keys) <- JokenJwks.HttpFetcher.fetch_signers(url) do
signers =
Enum.map(keys, fn key -> {key["kid"], Joken.Signer.create(key["alg"], key)} end)
Cachex.put(@cache, :jwks_signers, signers)
{:ok, signers}
else
{:ok, nil} ->
{:error, :could_not_fetch_signers}
err ->
err
end
end
end
end
|
lib/joken_jwks_hook.ex
| 0.835551 | 0.414277 |
joken_jwks_hook.ex
|
starcoder
|
defmodule Terp.ModuleSystem do
@moduledoc """
Functionality supporting a basic module system, e.g. export functions from a
module and import them in another.
"""
alias Terp.AST
alias Terp.Error
alias Terp.Parser
alias Terp.TypeSystem
alias RoseTree.Zipper
@doc """
Imports the definitions that are exported from external modules
into the current one.
Current implementation:
1. Receives a list of filenames that contain the modules to import
along with the current environment.
2. Recurses through the list of filenames to:
a. Read the source code in the file,
b. parse the file and convert it into an AST,
c. evaluate the AST in the current environment,
d. parse out the list definitions the module provides,
e. parse out all of the definitions in the module,
f. hide un-exported definitions from the environment by unbinding them.
3. Returns the environment that now contains the exported definitions
from the required modules.
"""
def require_modules(module_names, env), do: require_modules(module_names, env, [])
def require_modules([], env, imports) do
stringified_imports = Enum.map(imports, fn {f, i} -> "#{f}: #{Enum.join(i, ", ")}" end)
{{:ok, {:imported, Enum.join(stringified_imports, "\n")}}, env}
end
def require_modules([module_name | module_names], env, imports) do
with {:ok, module} <- File.read(module_name <> ".tp"),
{:ok, _types} = TypeSystem.check_src(module),
ast = module |> Parser.parse() |> Enum.flat_map(&AST.to_tree/1),
{_res, environment} = Terp.eval_ast(ast, env) do
provides = find_exported_definitions(ast)
defined = find_node_values_of_type(ast, [:__let, :__letrec])
cleaned_environment = hide_private_fns({provides, defined}, environment)
updated_imports = [{module_name, provides} | imports]
require_modules(module_names, cleaned_environment, updated_imports)
else
{:error, :enoent} ->
{:error, {:module_doesnt_exist, module_name}}
%Error{} = error ->
error
end
end
# Loads the list of functions defined in a module of a given type.
@spec find_node_values_of_type([RoseTree.t], [atom()]) :: [atom] | [String.t]
defp find_node_values_of_type(trees, node_types) do
nodes = find_node_types(trees, node_types)
if Enum.empty?(nodes) do
[]
else
nodes
|> Enum.map(&RoseTree.to_list/1)
|> Enum.map(fn [_p | [_i | name]] -> List.first(name) end)
end
end
@spec find_exported_definitions([RoseTree.t]) :: [atom] | [String.t]
defp find_exported_definitions(trees) do
nodes = find_node_types(trees, [:__provide])
if Enum.empty?(nodes) do
[]
else
nodes
|> Enum.map(&RoseTree.to_list/1)
|> Enum.flat_map(fn [_p | [_i | name]] -> name end)
end
end
# After loading the required module, hides the private
# functions from the environment by resetting them to :unbound.
defp hide_private_fns({provided, defined}, environment) do
Enum.reduce(defined, environment,
fn (definition, environment) ->
if Enum.member?(provided, definition) do
environment
else
fn name ->
if name == definition, do: {:error, {:unbound, name}}, else: environment.(name)
end
end
end
)
end
# Filter the trees in a module to find only those of the given types.
defp find_node_types(trees, node_types) do
trees
|> Enum.filter(fn tree ->
first_node = tree
|> Zipper.from_tree()
|> Zipper.first_child()
|> Zipper.lift(&Zipper.to_tree/1)
Enum.member?(node_types, first_node.node)
end)
end
end
|
lib/terp/module_system.ex
| 0.654564 | 0.563588 |
module_system.ex
|
starcoder
|
if Code.ensure_loaded?(Decorator.Define) do
defmodule Spandex.Decorators do
@moduledoc """
Provides a way of annotating functions to be traced.
Span function decorators take an optional argument which is the attributes to update the span with. One of those attributes can be the `:tracer` in case you want to override the default tracer (e.g., in case you want to use multiple tracers).
IMPORTANT If you define multiple clauses for a function, you'll have to decorate all of the ones you want to span.
Note: Decorators don't magically do everything. It often makes a lot of sense to use `Tracer.update_span` from within your function to add details that are only available inside that same function.
defmodule Foo do
use Spandex.Decorators
@decorate trace()
def bar(a) do
a * 2
end
@decorate trace(service: "ecto", type: "sql")
def databaz(a) do
a * 3
end
end
"""
@tracer Application.get_env(:spandex, :decorators)[:tracer]
use Decorator.Define, span: 0, span: 1, trace: 0, trace: 1
def trace(body, context) do
trace([], body, context)
end
def trace(attributes, body, context) do
name = Keyword.get(attributes, :name, default_name(context))
tracer = Keyword.get(attributes, :tracer, @tracer)
attributes = Keyword.delete(attributes, :tracer)
quote do
require unquote(tracer)
unquote(tracer).trace unquote(name), unquote(attributes) do
unquote(body)
end
end
end
def span(body, context) do
span([], body, context)
end
def span(attributes, body, context) do
name = Keyword.get(attributes, :name, default_name(context))
tracer = Keyword.get(attributes, :tracer, @tracer)
attributes =
attributes
|> Keyword.delete(:tracer)
|> Keyword.put_new(:resource, name)
quote do
require unquote(tracer)
unquote(tracer).span unquote(name), unquote(attributes) do
unquote(body)
end
end
end
defp default_name(%{module: module, name: function, arity: arity}) do
module =
module
|> Atom.to_string()
|> String.trim_leading("Elixir.")
"#{module}.#{function}/#{arity}"
end
end
end
|
lib/decorators.ex
| 0.658198 | 0.512693 |
decorators.ex
|
starcoder
|
defmodule Servy.ServicesSupervisor do
use Supervisor
@doc """
Restart Strategies and Other Supervisor Options
In the video, we initialized both supervisors with the :one_for_one restart strategy, like so
Supervisor.init(children, strategy: :one_for_one)
Here's a quick summary of all the restart strategies:
- :one_for_one means if a child process terminates, only that process is restarted.
None of the other child processes being supervised are affected.
- :one_for_all means if any child process terminates, the supervisor will terminate all
the other children. All the child processes are then restarted.
You'll typically use this strategy if all the child processes depend on each other and
should therefore have their fates tied together.
- :rest_for_one means if a child process terminates,
the rest of the child processes that were started after it
(those children listed after the terminated child) are terminated as well.
The terminated child process and the rest of the child processes are then restarted.
- :simple_one_for_one is intended for situations where you need to dynamically attach children.
This strategy is restricted to cases when the supervisor only has one child specification.
Using this specification as a recipe, the supervisor can dynamically spawn multiple child
processes that are then attached to the supervisor. You would use this strategy
if you needed to create a pool of similar worker processes, for example.
In addition to the strategy option, you can also initialize a supervisor with the following other options:
- :max_restarts indicates the maximum number of restarts allowed within a specific time frame.
After this threshold is met, the supervisor gives up. This is used to prevent infinite restarts.
The default is 3 restarts.
- :max_seconds indicates the time frame in which :max_restarts applies. The default is 5 seconds.
def child_spec(args) do
%{
id: Servy.ServicesSupervisor,
start: {Servy.ServicesSupervisor, :start_link, [[]]},
restart: :permanent,
type: :supervisor
}
end
"""
def start_link(_args) do
IO.puts("Starting the services supervisor...")
Supervisor.start_link(__MODULE__, :ok, name: __MODULE__)
end
def init(:ok) do
children = [
Servy.PledgeServer,
{Servy.SensorServer, :timer.minutes(60)},
Servy.FourOhFourCounter
]
# [strategy: :one_for_one, max_restarts: 5, max_seconds: 10]
options = [strategy: :one_for_one]
Supervisor.init(children, options)
end
end
|
servy/lib/servy/services_supervisor.ex
| 0.620277 | 0.544196 |
services_supervisor.ex
|
starcoder
|
defmodule JSONPatch.Path do
@moduledoc false
@doc ~S"""
Splits a JSON Pointer (RFC 6901) path into its components.
Path keys are converted to integers if possible, otherwise
remaining strings.
Example:
iex> JSONPatch.Path.split_path("/a/b/22/c")
["a", "b", 22, "c"]
iex> JSONPatch.Path.split_path("")
[]
"""
@spec split_path(String.t()) :: [String.t() | non_neg_integer]
def split_path(path)
def split_path(""), do: []
def split_path(path) do
path
|> String.replace_leading("/", "")
|> String.split("/")
|> Enum.map(&convert_number/1)
end
## Converts string-formatted integers back to integers.
## Returns other inputs unchanged.
defp convert_number("0"), do: 0
defp convert_number(str) do
# array indices with leading zeros are invalid, so don't convert them
if String.match?(str, ~r"^[1-9]+[0-9]*$") do
String.to_integer(str)
else
# ~ escape handling per RFC 6901
str |> String.replace("~1", "/") |> String.replace("~0", "~")
end
end
@doc ~S"""
Traverses `data` according to the given `path`, returning `{:ok, value}`
if a value was found at that path, or `{:error, reason}` otherwise.
"""
@spec get_value_at_path(JSONPatch.json_document(), String.t()) :: JSONPatch.return_value()
def get_value_at_path(data, path) when is_binary(path) do
value_at_path(data, split_path(path))
end
@spec value_at_path(JSONPatch.json_document(), [String.t()]) :: JSONPatch.return_value()
defp value_at_path(data, []), do: {:ok, data}
defp value_at_path(data, [key | rest]) when is_number(key) and is_list(data) do
case Enum.at(data, key, :missing) do
:missing -> {:error, :path_error, "out-of-bounds index #{key}"}
value -> value_at_path(value, rest)
end
end
defp value_at_path(data, ["-" | rest]) when is_list(data) do
case Enum.count(data) do
0 -> {:error, :path_error, "can't use index '-' with empty array"}
c -> value_at_path(data, [c-1 | rest])
end
end
defp value_at_path(data, [key | _rest]) when is_list(data) do
{:error, :path_error, "can't index into array with string #{key}"}
end
defp value_at_path(%{} = data, [key | rest]) do
case Map.get(data, to_string(key), :missing) do
:missing -> {:error, :path_error, "missing key #{key}"}
value -> value_at_path(value, rest)
end
end
defp value_at_path(data, _) do
{:error, :path_error, "can't index into value #{data}"}
end
@doc ~S"""
Attempts to remove the value at the given path. Returns the updated
`{:ok, data}`, otherwise `{:error, reason}.
Examples:
iex> %{"a" => %{"b" => 1, "c" => 2}} |> JSONPatch.Path.remove_value_at_path("/a/b")
{:ok, %{"a" => %{"c" => 2}}}
iex> %{"a" => [1, 2, 3, 4]} |> JSONPatch.Path.remove_value_at_path("/a/2")
{:ok, %{"a" => [1, 2, 4]}}
iex> %{"a" => [1, 2, %{"c" => 3}, 4]} |> JSONPatch.Path.remove_value_at_path("/a/2/c")
{:ok, %{"a" => [1, 2, %{}, 4]}}
"""
@spec remove_value_at_path(JSONPatch.json_document(), String.t()) :: JSONPatch.return_value()
def remove_value_at_path(data, path) do
remove_at_path(data, split_path(path))
end
@spec remove_at_path(JSONPatch.json_document(), [String.t()]) ::
JSONPatch.return_value() | {:ok, :removed}
defp remove_at_path(_data, []), do: {:ok, :removed}
defp remove_at_path(data, [key | rest]) when is_list(data) and is_number(key) do
if key >= Enum.count(data) do
{:error, :path_error, "out-of-bounds index #{key}"}
else
case remove_at_path(Enum.at(data, key), rest) do
{:ok, :removed} -> {:ok, List.delete_at(data, key)}
{:ok, value} -> {:ok, List.replace_at(data, key, value)}
err -> err
end
end
end
defp remove_at_path(data, ["-" | rest]) when is_list(data) do
case Enum.count(data) do
0 -> {:error, :path_error, "can't use index '-' with empty array"}
c -> remove_at_path(data, [c-1 | rest])
end
end
defp remove_at_path(data, [key | _rest]) when is_list(data) do
{:error, :path_error, "can't index into array with string #{key}"}
end
defp remove_at_path(%{} = data, [key | rest]) do
keystr = to_string(key)
if !Map.has_key?(data, keystr) do
{:error, :path_error, "missing key #{keystr}"}
else
case remove_at_path(data[keystr], rest) do
{:ok, :removed} -> {:ok, Map.delete(data, keystr)}
{:ok, value} -> {:ok, Map.put(data, keystr, value)}
err -> err
end
end
end
defp remove_at_path(data, _) do
{:error, :path_error, "can't index into value #{data}"}
end
@doc ~S"""
Attempts to add the value at the given path. Returns the updated
`{:ok, data}`, otherwise `{:error, reason}.
Examples:
iex> %{"a" => %{"c" => 2}} |> JSONPatch.Path.add_value_at_path("/a/b", 1)
{:ok, %{"a" => %{"c" => 2, "b" => 1}}}
iex> %{"a" => [1, 2, 3, 4]} |> JSONPatch.Path.add_value_at_path("/a/2", "woot")
{:ok, %{"a" => [1, 2, "woot", 3, 4]}}
"""
@spec add_value_at_path(JSONPatch.json_document(), String.t(), JSONPatch.json_encodable()) ::
JSONPatch.return_value()
def add_value_at_path(data, path, value) do
add_at_path(data, split_path(path), value)
end
@spec add_at_path(JSONPatch.json_document(), [String.t()], JSONPatch.json_encodable()) ::
JSONPatch.return_value()
defp add_at_path(_data, [], value), do: {:ok, value}
defp add_at_path(data, [key | rest], value) when is_list(data) and is_number(key) do
cond do
key > Enum.count(data) ->
{:error, :path_error, "out-of-bounds index #{key}"}
rest == [] ->
{:ok, List.insert_at(data, key, value)}
:else ->
with {:ok, v} <- add_at_path(Enum.at(data, key), rest, value) do
{:ok, List.replace_at(data, key, v)}
else
err -> err
end
end
end
defp add_at_path(data, ["-"], value) when is_list(data) do
{:ok, data ++ [value]}
end
defp add_at_path(data, [key | _rest], _value) when is_list(data) do
{:error, :path_error, "can't index into array with string #{key}"}
end
defp add_at_path(%{} = data, [key | rest], value) do
keystr = to_string(key)
cond do
rest == [] ->
{:ok, Map.put(data, keystr, value)}
:else ->
with {:ok, v} <- add_at_path(data[keystr], rest, value) do
{:ok, Map.put(data, keystr, v)}
else
err -> err
end
end
end
defp add_at_path(data, _, _) do
{:error, :path_error, "can't index into value #{data}"}
end
end
|
lib/json_patch/path.ex
| 0.87444 | 0.533884 |
path.ex
|
starcoder
|
defmodule ExAlsa do
@moduledoc """
A NIF-backed library for interfacing with ALSA.
"""
@typedoc "Sample rates allowed"
@type rates() :: 44100 | 48000 | 96000 | 192_000
@type options :: %{
channels: pos_integer(),
rate: rates(),
period_size: pos_integer(),
buffer_size: pos_integer(),
periods: pos_integer(),
start_threshold: pos_integer()
}
@type handle :: any()
@doc false
@on_load :load_nifs
def load_nifs do
:ok =
:code.priv_dir(:ex_alsa)
|> Path.join("ex_alsa")
|> :erlang.load_nif(0)
end
@doc """
Creates a handler which opens an audio interface to the sound card and device.
"""
@spec open_handle(String.t()) :: handle()
def open_handle(device) do
_open_handle(String.to_charlist(device))
end
@doc """
Pass parameters and attempt to set them in ALSA. Will set both HW params and
SW params. Often what you pass will not be what was set. This is dependent on
limitations set by your soundcard.
## Options
Depending on what you set here, it will change the structure of the payload
you `write/2`. So pay attention.
* `channels` - a representation of sound coming from or going to a single point
e.g. A single microphone can produce one channel of audio and a single
speaker can receive one channel of audio. Headphones would receive 2 channels of sound. [Wildlife Acoustics](https://www.wildlifeacoustics.com/resources/faqs/what-is-an-audio-channel)
* `rate` - The number of [frames](TODO) that are inputted or outputted per second (Hz).
* `period_size` - The number of [frames](TODO) inputted/outputted before the sound card
checks for more. This is typically buffer_size / periods. It will often be overwritten by what your sound card declares.
* `periods` - The number of periods in a buffer.
* `buffer_size` - The number of [frames](TODO) buffered.
* `start_threshold` - The number of initial frames played or captured in order to begin.
"""
@spec set_params(handle(), options()) :: handle()
def set_params(handle, options) do
channels = Map.get(options, :channels, 1)
rate = Map.fetch!(options, :rate)
period_size = Map.get(options, :period_size)
periods = Map.get(options, :periods, 2)
buffer_size = Map.get(options, :buffer_size, periods * period_size)
start_threshold = Map.get(options, :start_threshold, buffer_size * 2)
_set_params(
handle,
channels,
rate,
period_size,
periods,
buffer_size,
start_threshold
)
end
@doc """
Writes to the soundcard. ExAlsa NIF uses the synchronous `snd_pcm_writei`.
`write` will prevent overruns (when more frames are sent than what's available
in the buffer), by dismissing them and returning the # of frames available in
the buffer. It will not prevent underruns (sending too little frames). See the
tests for an example that prevents both overruns and most underruns.
"""
@spec write(handle(), charlist()) :: {:error, integer()} | {:ok, integer(), integer()}
def write(_handle, _frames) do
:erlang.nif_error(:not_loaded)
end
@spec _open_handle(charlist()) :: handle()
def _open_handle(_device) do
:erlang.nif_error(:not_loaded)
end
@spec _set_params(
handle(),
pos_integer(),
rates(),
pos_integer(),
pos_integer(),
pos_integer(),
pos_integer()
) :: handle()
def _set_params(
_handle,
_channels,
_rate,
_period_size,
_periods,
_buffer_size,
_start_threshold
) do
:erlang.nif_error(:not_loaded)
end
end
|
lib/ex_alsa.ex
| 0.804252 | 0.525064 |
ex_alsa.ex
|
starcoder
|
defmodule EarmarkParser.Enum.Ext do
@moduledoc ~S"""
Some extensions of Enum functions
"""
@doc ~S"""
`reduce_with_end` is like `Enum.reduce` for lists, but the reducer function is called for
each element of the list with the tuple `{:element, element}` and the accumulator and once
more at the end with `:end` and the accumulator
iex(1)> reducer =
...(1)> fn {:element, nil}, {partial, result} -> {[], [Enum.sum(partial)|result]}
...(1)> {:element, val}, {partial, result} -> {[val|partial], result}
...(1)> :end, {partial, result} -> [Enum.sum(partial)|result] |> Enum.reverse
...(1)> end
...(1)> [1, 2, nil, 4, 1, 0, nil, 3, 2, 2]
...(1)> |> reduce_with_end({[], []}, reducer)
[3, 5, 7]
**N.B.** that in the treatment of `:end` we can change the shape of the accumulator w/o any
penalty concerning the complexity of the reducer function
"""
def reduce_with_end(collection, initial_acc, reducer_fn)
def reduce_with_end([], acc, reducer_fn) do
reducer_fn.(:end, acc)
end
def reduce_with_end([ele|rest], acc, reducer_fn) do
reduce_with_end(rest, reducer_fn.({:element, ele}, acc), reducer_fn)
end
@doc ~S"""
Like map_reduce but reversing the list
iex(2)> replace_nil_and_count = fn ele, acc ->
...(2)> if ele, do: {ele, acc}, else: {"", acc + 1}
...(2)> end
...(2)> ["y", nil, "u", nil, nil, "a", nil] |> reverse_map_reduce(0, replace_nil_and_count)
{ ["", "a", "", "", "u", "", "y"], 4 }
"""
def reverse_map_reduce(list, initial, fun) do
_reverse_map_reduce(list, initial, [], fun)
end
# Helpers {{{
defp _reverse_map_reduce(list, acc, result, fun)
defp _reverse_map_reduce([], acc, result, _fun), do: {result, acc}
defp _reverse_map_reduce([fst|rst], acc, result, fun) do
{new_ele, new_acc} = fun.(fst, acc)
_reverse_map_reduce(rst, new_acc, [new_ele|result], fun)
end
# }}}
end
# SPDX-License-Identifier: Apache-2.0
|
lib/earmark_parser/enum/ext.ex
| 0.698227 | 0.607139 |
ext.ex
|
starcoder
|
defmodule Owl.Spinner do
@moduledoc ~S"""
A spinner widget.
Simply run any long-running task using `run/2`:
Owl.Spinner.run(
fn -> Process.sleep(5_000) end,
labels: [ok: "Done", error: "Failed", processing: "Please wait..."]
)
Multiple spinners can be run simultaneously:
long_running_tasks =
Enum.map([9000, 8000, 4000, 6000], fn delay ->
fn -> Process.sleep(delay) end
end)
long_running_tasks
|> Task.async_stream(&Owl.Spinner.run/1, timeout: :infinity)
|> Stream.run()
Multiline frames are supported as well:
Owl.Spinner.run(fn -> Process.sleep(5_000) end,
frames: [
processing: [
"ββββββ€β€β€β€βββββ\nβ βββ \\ β\nβ βββ O β\nβ OOO β",
"ββββββ€β€β€β€βββββ\nβ ββββ β\nβ ββββ β\nβ OOOO β",
"ββββββ€β€β€β€βββββ\nβ / βββ β\nβ O βββ β\nβ OOO β",
"ββββββ€β€β€β€βββββ\nβ ββββ β\nβ ββββ β\nβ OOOO β"
]
]
)
### Where can I get alternative frames?
* https://github.com/blackode/elixir_cli_spinners/blob/master/lib/cli_spinners/spinners.ex
* https://www.google.com/search?q=ascii+spinners
"""
use GenServer, restart: :transient
@type id :: any()
@type label :: Owl.Data.t()
@type frame :: Owl.Data.t()
@default_processing_frames ["β ", "β ", "β Ή", "β Έ", "β Ό", "β ΄", "β ¦", "β §", "β ", "β "]
@doc false
def start_link(opts) do
id = Keyword.fetch!(opts, :id)
GenServer.start_link(__MODULE__, opts, name: {:via, Registry, {Owl.WidgetsRegistry, id}})
end
# we define child_spec just to disable doc
@doc false
def child_spec(init_arg) do
super(init_arg)
end
@doc """
Runs a spinner during execution of `process_function` and returns its result.
The spinner is started, and automatically stopped after the function returns, regardless if there was an error when executing the function.
It is a wrapper around `start/1` and `stop/1`. The only downside of `run/2` is that it is not possible to update
a label while `process_function` is executing.
If function returns `:ok` or `{:ok, value}` then spinner will be stopped with `:ok` resolution.
If function returns `:error` or `{:error, reason}` then spinner will be stopped with `:error` resolution.
## Options
* `:refresh_every` - period of changing frames. Defaults to `100`.
* `:frames` - allows to set frames for different states of spinner:
* `:processing` - list of frames which are rendered until spinner is stopped.
Defaults to `#{inspect(@default_processing_frames)}`.
* `:ok` - frame that is rendered when spinner is stopped with `:ok` resolution.
Defaults to `Owl.Data.tag("β", :green)`.
* `:error` - frame that is rendered when spinner is stopped with `:error` resolution.
Defaults to `Owl.Data.tag("β", :red)`.
* `:labels` - allows to set labels for different states of spinner:
* `:processing` - label that is rendered during processing. Cannot be changed during execution of `process_function`.
Defaults to `nil`.
* `:ok` - label that is rendered when spinner is stopped with `:ok` resolution. A function with arity 1 can be
passed in order to format a label based on result of `process_function`.
Defaults to `nil`.
* `:error` - label that is rendered when spinner is stopped with `:error` resolution. A function with arity 1
can be passed in order to format a label based on result of `process_function`.
Defaults to `nil`.
## Examples
Owl.Spinner.run(fn -> Process.sleep(5_000) end)
=> :ok
Owl.Spinner.run(fn -> Process.sleep(5_000) end,
frames: [
# an ASCII fish going back and forth
processing: [
">))'>",
" >))'>",
" >))'>",
" <'((<",
"<'((<"
]
]
)
=> :ok
Owl.Spinner.run(
fn ->
Process.sleep(5_000)
{:error, :oops}
end,
labels: [
error: fn reason -> "Failed: \#{inspect(reason)}" end,
processing: "Processing..."
]
)
=> {:error, :oops}
"""
@spec run(process_function :: (() -> :ok | :error | {:ok, value} | {:error, reason}),
refresh_every: non_neg_integer(),
frames: [ok: frame(), error: frame(), processing: [frame()]],
labels: [
ok: label() | (nil | value -> label() | nil) | nil,
error: label() | (nil | reason -> label()) | nil,
processing: label() | nil
]
) :: :ok | :error | {:ok, value} | {:error, reason}
when value: any, reason: any
def run(process_function, opts \\ []) do
id = make_ref()
with {:ok, _server_pid} <-
start(
opts
|> Keyword.take([:refresh_every, :live_screen_server, :frames, :labels])
|> Keyword.update(:labels, [], fn labels -> Keyword.take(labels, [:processing]) end)
|> Keyword.put(:id, id)
) do
try do
result = process_function.()
labels = Keyword.get(opts, :labels, [])
case result do
:ok ->
label = maybe_get_lazy_label(labels, :ok, nil)
stop(id: id, resolution: :ok, label: label)
{:ok, value} ->
label = maybe_get_lazy_label(labels, :ok, value)
stop(id: id, resolution: :ok, label: label)
:error ->
label = maybe_get_lazy_label(labels, :error, nil)
stop(id: id, resolution: :error, label: label)
{:error, reason} ->
label = maybe_get_lazy_label(labels, :error, reason)
stop(id: id, resolution: :error, label: label)
end
result
rescue
e ->
stop(id: id, resolution: :error)
reraise(e, __STACKTRACE__)
end
end
end
defp maybe_get_lazy_label(labels, key, value) do
case labels[key] do
callback when is_function(callback, 1) -> callback.(value)
label -> label
end
end
@doc """
Starts a new spinner.
Must be stopped manually by calling `stop/1`.
## Options
* `:id` - an id of the spinner. Required.
* `:refresh_every` - period of changing frames. Defaults to `100`.
* `:frames` - allows to set frames for different states of spinner:
* `:processing` - list of frames which are rendered until spinner is stopped.
Defaults to `#{inspect(@default_processing_frames)}`.
* `:ok` - frame that is rendered when spinner is stopped with `:ok` resolution.
Defaults to `Owl.Data.tag("β", :green)`.
* `:error` - frame that is rendered when spinner is stopped with `:error` resolution.
Defaults to `Owl.Data.tag("β", :red)`.
* `:labels` - allows to set labels for different states of spinner:
* `:processing` - label that is rendered during processing. Can be changed with `update_label/1`.
Defaults to `nil`.
* `:ok` - label that is rendered when spinner is stopped with `:ok` resolution.
Defaults to `nil`.
* `:error` - label that is rendered when spinner is stopped with `:error` resolution.
Defaults to `nil`.
## Example
Owl.Spinner.start(id: :my_spinner)
Process.sleep(1000)
Owl.Spinner.stop(id: :my_spinner, resolution: :ok)
"""
@spec start(
id: id(),
frames: [ok: frame(), error: frame(), processing: [frame()]],
labels: [ok: label() | nil, error: label() | nil, processing: label() | nil],
refresh_every: non_neg_integer()
) :: DynamicSupervisor.on_start_child()
def start(opts) do
DynamicSupervisor.start_child(Owl.WidgetsSupervisor, {__MODULE__, opts})
end
@doc """
Updates a label of the running spinner.
Overrides a value that is set for `:processing` state on start.
## Options
* `:id` - an id of the spinner. Required.
* `:label` - a new value of the label. Required.
## Example
Owl.Spinner.start(id: :my_spinner)
Owl.Spinner.update_label(id: :my_spinner, label: "Downloading files...")
Process.sleep(1000)
Owl.Spinner.update_label(id: :my_spinner, label: "Checking signatures...")
Process.sleep(1000)
Owl.Spinner.stop(id: :my_spinner, resolution: :ok, label: "Done")
"""
@spec update_label(id: id(), label: label()) :: :ok
def update_label(opts) do
id = Keyword.fetch!(opts, :id)
label = Keyword.fetch!(opts, :label)
GenServer.cast({:via, Registry, {Owl.WidgetsRegistry, id}}, {:update_label, label})
end
@doc """
Stops the spinner.
## Options
* `:id` - an id of the spinner. Required.
* `:resolution` - an atom `:ok` or `:error`. Determines frame and label for final rendering. Required.
* `:label` - a label for final rendering. If not set, then values that are set on spinner start will be used.
## Example
Owl.Spinner.stop(id: :my_spinner, resolution: :ok)
"""
@spec stop(id: id(), resolution: :ok | :error, label: label()) :: :ok
def stop(opts) do
id = Keyword.fetch!(opts, :id)
GenServer.call({:via, Registry, {Owl.WidgetsRegistry, id}}, {:stop, opts})
end
@impl true
def init(opts) do
frames = Keyword.get(opts, :frames, [])
processing_frames = Keyword.get(frames, :processing, @default_processing_frames)
ok_frame = Keyword.get(frames, :ok, Owl.Data.tag("β", :green))
error_frame = Keyword.get(frames, :error, Owl.Data.tag("β", :red))
labels = Keyword.get(opts, :labels, [])
ok_label = Keyword.get(labels, :ok)
error_label = Keyword.get(labels, :error)
processing_label = Keyword.get(labels, :processing)
refresh_every = Keyword.get(opts, :refresh_every, 100)
live_screen_server = opts[:live_screen_server] || Owl.LiveScreen
live_screen_ref = make_ref()
{current_frame, next_processing_frames} = rotate_frames(processing_frames)
Owl.LiveScreen.add_block(live_screen_server, live_screen_ref,
state: %{frame: current_frame, label: processing_label},
render: &render/1
)
Process.send_after(self(), :tick, refresh_every)
{:ok,
%{
refresh_every: refresh_every,
live_screen_ref: live_screen_ref,
live_screen_server: live_screen_server,
processing_frames: next_processing_frames,
ok_frame: ok_frame,
error_frame: error_frame,
ok_label: ok_label,
error_label: error_label,
processing_label: processing_label
}}
end
@impl true
def handle_cast({:update_label, new_value}, state) do
{:noreply, %{state | processing_label: new_value}}
end
@impl true
def handle_call({:stop, opts}, _from, state) do
{frame, label} =
case Keyword.fetch!(opts, :resolution) do
:ok -> {state.ok_frame, Keyword.get(opts, :label, state.ok_label)}
:error -> {state.error_frame, Keyword.get(opts, :label, state.error_label)}
end
Owl.LiveScreen.update(state.live_screen_server, state.live_screen_ref, %{
frame: frame,
label: label
})
Owl.LiveScreen.await_render(state.live_screen_server)
{:stop, :normal, :ok, state}
end
@impl true
def handle_info(:tick, state) do
{current_frame, next_processing_frames} = rotate_frames(state.processing_frames)
Owl.LiveScreen.update(state.live_screen_server, state.live_screen_ref, %{
frame: current_frame,
label: state.processing_label
})
Process.send_after(self(), :tick, state.refresh_every)
{:noreply, %{state | processing_frames: next_processing_frames}}
end
defp rotate_frames([head | rest]) do
{head, rest ++ [head]}
end
defp render(%{frame: frame, label: nil}), do: frame
defp render(%{frame: frame, label: label}), do: [frame, " ", label]
end
|
lib/owl/spinner.ex
| 0.840292 | 0.624837 |
spinner.ex
|
starcoder
|
defmodule Exop.ValidationChecks do
@moduledoc """
Provides low-level validation functions:
* check_type/3
* check_required/3
* check_numericality/3
* check_in/3
* check_not_in/3
* check_format/3
* check_length/3
* check_struct/3
* check_func/3
* check_equals/3
* check_exactly/3
* check_allow_nil/3
"""
@no_check_item :exop_no_check_item
@type check_error :: %{(atom() | String.t()) => String.t()}
@known_types ~w(boolean integer float string tuple map struct list atom module function keyword uuid)a
@doc """
Returns an check_item's value from either a Keyword or a Map by an atom-key.
## Examples
iex> Exop.ValidationChecks.get_check_item(%{a: 1, b: 2}, :a)
1
iex> Exop.ValidationChecks.get_check_item([a: 1, b: 2], :b)
2
iex> Exop.ValidationChecks.get_check_item(%{a: 1, b: 2}, :c)
nil
"""
@spec get_check_item(Keyword.t() | map(), atom() | String.t()) :: any() | nil
def get_check_item(check_items, item_name) when is_map(check_items) do
Map.get(check_items, item_name)
end
def get_check_item(check_items, item_name) when is_list(check_items) do
Keyword.get(check_items, item_name)
end
def get_check_item(_check_items, _item), do: nil
@doc """
Checks whether a check_item has been provided.
Returns a boolean.
## Examples
iex> Exop.ValidationChecks.check_item_present?(%{a: 1, b: 2}, :a)
true
iex> Exop.ValidationChecks.check_item_present?([a: 1, b: 2], :b)
true
iex> Exop.ValidationChecks.check_item_present?(%{a: 1, b: 2}, :c)
false
iex> Exop.ValidationChecks.check_item_present?(%{a: 1, b: nil}, :b)
true
"""
@spec check_item_present?(Keyword.t() | map(), atom() | String.t()) :: boolean()
def check_item_present?(check_items, item_name) when is_map(check_items) do
Map.get(check_items, item_name, @no_check_item) != @no_check_item
end
def check_item_present?(check_items, item_name) when is_list(check_items) do
Keyword.get(check_items, item_name, @no_check_item) != @no_check_item
end
def check_item_present?(_check_items, _item), do: false
@doc """
Checks if an item_name presents in params if its required (true).
## Examples
iex> Exop.ValidationChecks.check_required(%{}, :some_item, false)
true
iex> Exop.ValidationChecks.check_required([a: 1, b: 2], :a, true)
true
iex> Exop.ValidationChecks.check_required(%{a: 1, b: 2}, :b, true)
true
"""
@spec check_required(Keyword.t() | map(), atom() | String.t(), boolean) :: true | check_error
def check_required(_check_items, _item, false), do: true
def check_required(check_items, item_name, true) do
check_item_present?(check_items, item_name) || %{item_name => "is required"}
end
@doc """
Checks the type of an item_name.
## Examples
iex> Exop.ValidationChecks.check_type(%{a: 1}, :a, :integer)
true
iex> Exop.ValidationChecks.check_type(%{a: "1"}, :a, :string)
true
iex> Exop.ValidationChecks.check_type(%{a: nil}, :a, :string)
%{:a => "has wrong type"}
"""
@spec check_type(Keyword.t() | map(), atom() | String.t(), atom()) :: true | check_error
def check_type(check_items, item_name, check) do
if check_item_present?(check_items, item_name) do
check_item = get_check_item(check_items, item_name)
if Enum.member?(@known_types, check) do
do_check_type(check_item, check) || %{item_name => "has wrong type"}
else
true
end
else
true
end
end
defp do_check_type(check_item, :boolean) when is_boolean(check_item), do: true
defp do_check_type(check_item, :integer) when is_integer(check_item), do: true
defp do_check_type(check_item, :float) when is_float(check_item), do: true
defp do_check_type(check_item, :string) when is_binary(check_item), do: true
defp do_check_type(check_item, :tuple) when is_tuple(check_item), do: true
defp do_check_type(%_{} = _check_item, :struct) do
IO.warn("type check with :struct is deprecated, please use :map instead")
true
end
defp do_check_type(_check_item, :struct) do
IO.warn("type check with :struct is deprecated, please use :map instead")
false
end
defp do_check_type(check_item, :map) when is_map(check_item), do: true
defp do_check_type(check_item, :list) when is_list(check_item), do: true
defp do_check_type(check_item, :atom) when is_atom(check_item), do: true
defp do_check_type(check_item, :function) when is_function(check_item), do: true
defp do_check_type([] = _check_item, :keyword), do: true
defp do_check_type([{atom, _} | _] = _check_item, :keyword) when is_atom(atom), do: true
defp do_check_type(check_item, :module) when is_atom(check_item) do
Code.ensure_loaded?(check_item)
end
defp do_check_type(check_item, :uuid) when is_binary(check_item), do: validate_uuid(check_item)
defp do_check_type(_, _), do: false
@doc """
Checks an item_name over numericality constraints.
## Examples
iex> Exop.ValidationChecks.check_numericality(%{a: 3}, :a, %{ equal_to: 3 })
true
iex> Exop.ValidationChecks.check_numericality(%{a: 5}, :a, %{ greater_than_or_equal_to: 3 })
true
iex> Exop.ValidationChecks.check_numericality(%{a: 3}, :a, %{ less_than_or_equal_to: 3 })
true
"""
@spec check_numericality(Keyword.t() | map(), atom() | String.t(), map()) :: true | check_error
def check_numericality(check_items, item_name, checks) do
if check_item_present?(check_items, item_name) do
check_item = get_check_item(check_items, item_name)
cond do
is_number(check_item) ->
result = checks |> Enum.map(&check_number(check_item, item_name, &1))
if Enum.all?(result, &(&1 == true)), do: true, else: result
true ->
%{item_name => "not a number"}
end
else
true
end
end
@spec check_number(number, atom() | String.t(), {atom, number}) :: boolean
defp check_number(number, item_name, {:equal_to, check_value}) do
if number == check_value, do: true, else: %{item_name => "must be equal to #{check_value}"}
end
defp check_number(number, item_name, {:eq, check_value}) do
check_number(number, item_name, {:equal_to, check_value})
end
defp check_number(number, item_name, {:equals, check_value}) do
check_number(number, item_name, {:equal_to, check_value})
end
defp check_number(number, item_name, {:is, check_value}) do
check_number(number, item_name, {:equal_to, check_value})
end
defp check_number(number, item_name, {:greater_than, check_value}) do
if number > check_value, do: true, else: %{item_name => "must be greater than #{check_value}"}
end
defp check_number(number, item_name, {:gt, check_value}) do
check_number(number, item_name, {:greater_than, check_value})
end
defp check_number(number, item_name, {:greater_than_or_equal_to, check_value}) do
if number >= check_value,
do: true,
else: %{item_name => "must be greater than or equal to #{check_value}"}
end
defp check_number(number, item_name, {:min, check_value}) do
check_number(number, item_name, {:greater_than_or_equal_to, check_value})
end
defp check_number(number, item_name, {:gte, check_value}) do
check_number(number, item_name, {:greater_than_or_equal_to, check_value})
end
defp check_number(number, item_name, {:less_than, check_value}) do
if number < check_value, do: true, else: %{item_name => "must be less than #{check_value}"}
end
defp check_number(number, item_name, {:lt, check_value}) do
check_number(number, item_name, {:less_than, check_value})
end
defp check_number(number, item_name, {:less_than_or_equal_to, check_value}) do
if number <= check_value,
do: true,
else: %{item_name => "must be less than or equal to #{check_value}"}
end
defp check_number(number, item_name, {:lte, check_value}) do
check_number(number, item_name, {:less_than_or_equal_to, check_value})
end
defp check_number(number, item_name, {:max, check_value}) do
check_number(number, item_name, {:less_than_or_equal_to, check_value})
end
defp check_number(_number, _item_name, _), do: true
@doc """
Checks whether an item_name is a memeber of a list.
## Examples
iex> Exop.ValidationChecks.check_in(%{a: 1}, :a, [1, 2, 3])
true
"""
@spec check_in(Keyword.t() | map(), atom() | String.t(), list()) :: true | check_error
def check_in(check_items, item_name, check_list) when is_list(check_list) do
check_item = get_check_item(check_items, item_name)
if Enum.member?(check_list, check_item) do
true
else
%{item_name => "must be one of #{inspect(check_list)}"}
end
end
def check_in(_check_items, _item_name, _check_list), do: true
@doc """
Checks whether an item_name is not a memeber of a list.
## Examples
iex> Exop.ValidationChecks.check_not_in(%{a: 4}, :a, [1, 2, 3])
true
"""
@spec check_not_in(Keyword.t() | map(), atom() | String.t(), list()) :: true | check_error
def check_not_in(check_items, item_name, check_list) when is_list(check_list) do
check_item = get_check_item(check_items, item_name)
if Enum.member?(check_list, check_item) do
%{item_name => "must not be included in #{inspect(check_list)}"}
else
true
end
end
def check_not_in(_check_items, _item_name, _check_list), do: true
@doc """
Checks whether an item_name conforms the given format.
## Examples
iex> Exop.ValidationChecks.check_format(%{a: "bar"}, :a, ~r/bar/)
true
"""
@spec check_format(Keyword.t() | map(), atom() | String.t(), Regex.t()) :: true | check_error
def check_format(check_items, item_name, check) do
check_item = get_check_item(check_items, item_name)
if is_binary(check_item) do
if Regex.match?(check, check_item) do
true
else
%{item_name => "has invalid format"}
end
else
true
end
end
@doc """
The alias for `check_format/3`.
Checks whether an item_name conforms the given format.
## Examples
iex> Exop.ValidationChecks.check_regex(%{a: "bar"}, :a, ~r/bar/)
true
"""
@spec check_regex(Keyword.t() | map(), atom() | String.t(), Regex.t()) :: true | check_error
def check_regex(check_items, item_name, check) do
check_format(check_items, item_name, check)
end
@doc """
Checks an item_name over length constraints.
## Examples
iex> Exop.ValidationChecks.check_length(%{a: "123"}, :a, %{min: 0})
[true]
iex> Exop.ValidationChecks.check_length(%{a: ~w(1 2 3)}, :a, %{in: 2..4})
[true]
iex> Exop.ValidationChecks.check_length(%{a: ~w(1 2 3)}, :a, %{is: 3, max: 4})
[true, true]
"""
@spec check_length(Keyword.t() | map(), atom() | String.t(), map()) :: true | [check_error]
def check_length(check_items, item_name, checks) do
check_item = get_check_item(check_items, item_name)
actual_length = get_length(check_item)
for {check, check_value} <- checks, into: [] do
case check do
:min -> check_min_length(item_name, actual_length, check_value)
:max -> check_max_length(item_name, actual_length, check_value)
:is -> check_is_length(item_name, actual_length, check_value)
:in -> check_in_length(item_name, actual_length, check_value)
_ -> true
end
end
end
@spec get_length(any) :: pos_integer
defp get_length(param) when is_number(param), do: param
defp get_length(param) when is_list(param), do: length(param)
defp get_length(param) when is_binary(param), do: String.length(param)
defp get_length(param) when is_atom(param), do: param |> Atom.to_string() |> get_length
defp get_length(param) when is_map(param), do: param |> Map.to_list() |> get_length
defp get_length(param) when is_tuple(param), do: tuple_size(param)
defp get_length(_param), do: 0
@spec check_min_length(atom() | String.t(), pos_integer, number) :: true | check_error
defp check_min_length(item_name, actual_length, check_value) do
actual_length >= check_value ||
%{item_name => "length must be greater than or equal to #{check_value}"}
end
@spec check_max_length(atom() | String.t(), pos_integer, number) :: true | check_error
defp check_max_length(item_name, actual_length, check_value) do
actual_length <= check_value ||
%{item_name => "length must be less than or equal to #{check_value}"}
end
@spec check_is_length(atom() | String.t(), pos_integer, number) :: true | check_error
defp check_is_length(item_name, actual_length, check_value) do
actual_length == check_value || %{item_name => "length must be equal to #{check_value}"}
end
@spec check_in_length(atom() | String.t(), pos_integer, Range.t()) :: true | check_error
defp check_in_length(item_name, actual_length, check_value) do
Enum.member?(check_value, actual_length) ||
%{item_name => "length must be in range #{check_value}"}
end
@doc """
Checks whether an item is expected structure.
## Examples
defmodule SomeStruct1, do: defstruct [:a, :b]
defmodule SomeStruct2, do: defstruct [:b, :c]
Exop.ValidationChecks.check_struct(%{a: %SomeStruct1{}}, :a, %SomeStruct1{})
# true
Exop.ValidationChecks.check_struct(%{a: %SomeStruct1{}}, :a, %SomeStruct2{})
# false
"""
@spec check_struct(Keyword.t() | map(), atom() | String.t(), struct()) :: true | check_error
def check_struct(check_items, item_name, check) do
check_items
|> get_check_item(item_name)
|> validate_struct(check, item_name)
end
@doc """
Checks whether an item is valid over custom validation function.
## Examples
iex> Exop.ValidationChecks.check_func(%{a: 1}, :a, fn(_contract, param)-> param > 0 end)
true
iex> Exop.ValidationChecks.check_func(%{a: 1}, :a, fn(_contract, param)-> is_nil(param) end)
%{a: "isn't valid"}
iex> Exop.ValidationChecks.check_func(%{a: -1}, :a, fn(_contract, _param)-> {:error, :my_error} end)
%{a: :my_error}
iex> Exop.ValidationChecks.check_func(%{a: -1, b: 1}, :a, fn(_contract, param_name, param_value)-> {param_name, param_value} == {:a, -1} end)
true
iex> Exop.ValidationChecks.check_func(%{a: -1, b: 1}, :b, fn(_contract, param_name, param_value)-> {param_name, param_value} != {:a, -1} end)
true
"""
@spec check_func(
Keyword.t() | map(),
atom() | String.t(),
(Keyword.t() | map(), any -> true | false)
) :: true | check_error
def check_func(check_items, item_name, check) do
check_item = get_check_item(check_items, item_name)
check_result =
case :erlang.fun_info(check)[:arity] do
1 ->
check.(check_item)
2 ->
check.(check_items, check_item)
_ ->
check.(check_items, item_name, check_item)
end
case check_result do
{:error, msg} -> %{item_name => msg}
false -> %{item_name => "isn't valid"}
_ -> true
end
end
@doc """
Checks whether a parameter's value exactly equals given value (with type equality).
## Examples
iex> Exop.ValidationChecks.check_equals(%{a: 1}, :a, 1)
true
"""
@spec check_equals(Keyword.t() | map(), atom() | String.t(), any()) :: true | check_error
def check_equals(check_items, item_name, check_value) do
check_item = get_check_item(check_items, item_name)
if check_item === check_value do
true
else
%{item_name => "must be equal to #{inspect(check_value)}"}
end
end
@doc """
The alias for `check_equals/3`.
Checks whether a parameter's value exactly equals given value (with type equality).
## Examples
iex> Exop.ValidationChecks.check_exactly(%{a: 1}, :a, 1)
true
"""
@spec check_exactly(Keyword.t() | map(), atom() | String.t(), any()) :: true | check_error
def check_exactly(check_items, item_name, check_value) do
check_equals(check_items, item_name, check_value)
end
@spec check_allow_nil(Keyword.t() | map(), atom() | String.t(), boolean()) :: true | check_error
def check_allow_nil(_check_items, _item_name, true), do: true
def check_allow_nil(check_items, item_name, false) do
check_item = get_check_item(check_items, item_name)
!is_nil(check_item) || %{item_name => "doesn't allow nil"}
end
@spec validate_struct(any(), any(), atom() | String.t()) :: boolean()
defp validate_struct(%struct{}, %struct{}, _item_name), do: true
defp validate_struct(%struct{}, struct, _item_name) when is_atom(struct), do: true
defp validate_struct(_item, _check, item_name), do: %{item_name => "is not expected struct"}
@spec validate_uuid(binary()) :: boolean()
defp validate_uuid(
<<a1, a2, a3, a4, a5, a6, a7, a8, ?-, b1, b2, b3, b4, ?-, c1, c2, c3, c4, ?-, d1, d2, d3,
d4, ?-, e1, e2, e3, e4, e5, e6, e7, e8, e9, e10, e11, e12>>
) do
<<c(a1), c(a2), c(a3), c(a4), c(a5), c(a6), c(a7), c(a8), ?-, c(b1), c(b2), c(b3), c(b4), ?-,
c(c1), c(c2), c(c3), c(c4), ?-, c(d1), c(d2), c(d3), c(d4), ?-, c(e1), c(e2), c(e3), c(e4),
c(e5), c(e6), c(e7), c(e8), c(e9), c(e10), c(e11), c(e12)>>
catch
:error -> false
else
_ -> true
end
defp validate_uuid(_), do: false
defp c(?0), do: ?0
defp c(?1), do: ?1
defp c(?2), do: ?2
defp c(?3), do: ?3
defp c(?4), do: ?4
defp c(?5), do: ?5
defp c(?6), do: ?6
defp c(?7), do: ?7
defp c(?8), do: ?8
defp c(?9), do: ?9
defp c(?A), do: ?a
defp c(?B), do: ?b
defp c(?C), do: ?c
defp c(?D), do: ?d
defp c(?E), do: ?e
defp c(?F), do: ?f
defp c(?a), do: ?a
defp c(?b), do: ?b
defp c(?c), do: ?c
defp c(?d), do: ?d
defp c(?e), do: ?e
defp c(?f), do: ?f
defp c(_), do: throw(:error)
end
|
lib/exop/validation_checks.ex
| 0.877896 | 0.55447 |
validation_checks.ex
|
starcoder
|
defmodule Geo.WKT.Decoder do
@moduledoc false
alias Geo.{
Point,
PointZ,
PointM,
PointZM,
LineString,
LineStringZ,
Polygon,
PolygonZ,
MultiPoint,
MultiPointZ,
MultiLineString,
MultiPolygon,
MultiPolygonZ,
GeometryCollection
}
@doc """
Takes a WKT string and returns a Geo.geometry struct or list of Geo.geometry
"""
@spec decode(binary) :: {:ok, Geo.geometry()} | {:error, Exception.t()}
def decode(wkb) do
{:ok, decode!(wkb)}
rescue
exception ->
{:error, exception}
end
@doc """
Takes a WKT string and returns a Geo.geometry struct or list of Geo.geometry
"""
@spec decode!(binary) :: Geo.geometry() | no_return
def decode!(wkt) do
wkt_split = String.split(wkt, ";")
{srid, actual_wkt} =
if length(wkt_split) == 2 do
{hd(wkt_split) |> String.replace("SRID=", "") |> String.to_integer(),
List.last(wkt_split)}
else
{nil, wkt}
end
do_decode(actual_wkt, srid)
end
defp do_decode("POINT ZM" <> coordinates, srid) do
%PointZM{coordinates: create_point(coordinates), srid: srid}
end
defp do_decode("POINT Z" <> coordinates, srid) do
%PointZ{coordinates: create_point(coordinates), srid: srid}
end
defp do_decode("POINT M" <> coordinates, srid) do
%PointM{coordinates: create_point(coordinates), srid: srid}
end
defp do_decode("POINT" <> coordinates, srid) do
%Point{coordinates: create_point(coordinates), srid: srid}
end
defp do_decode("LINESTRING" <> coordinates, srid) do
%LineString{coordinates: create_line_string(coordinates), srid: srid}
end
defp do_decode("LINESTRINGZ" <> coordinates, srid) do
%LineStringZ{coordinates: create_line_string(coordinates), srid: srid}
end
defp do_decode("POLYGON" <> coordinates, srid) do
%Polygon{coordinates: create_polygon(coordinates), srid: srid}
end
defp do_decode("POLYGONZ" <> coordinates, srid) do
%PolygonZ{coordinates: create_polygon(coordinates), srid: srid}
end
defp do_decode("MULTIPOINT" <> coordinates, srid) do
%MultiPoint{coordinates: create_line_string(coordinates), srid: srid}
end
defp do_decode("MULTIPOINTZ" <> coordinates, srid) do
%MultiPointZ{coordinates: create_line_string(coordinates), srid: srid}
end
defp do_decode("MULTILINESTRING" <> coordinates, srid) do
%MultiLineString{coordinates: create_polygon(coordinates), srid: srid}
end
defp do_decode("MULTIPOLYGON" <> coordinates, srid) do
%MultiPolygon{coordinates: create_multi_polygon(coordinates), srid: srid}
end
defp do_decode("MULTIPOLYGONZ" <> coordinates, srid) do
%MultiPolygonZ{coordinates: create_multi_polygon(coordinates), srid: srid}
end
defp do_decode("GEOMETRYCOLLECTION" <> coordinates, srid) do
geometries =
String.slice(coordinates, 0..(String.length(coordinates) - 2))
|> String.split(",", parts: 2)
|> Enum.map(fn x ->
case x do
"(" <> wkt ->
do_decode(wkt, srid)
_ ->
do_decode(x, srid)
end
end)
%GeometryCollection{geometries: geometries, srid: srid}
end
defp create_point(coordinates) do
coordinates
|> String.trim()
|> remove_outer_parenthesis
|> String.split()
|> Enum.map(fn x -> binary_to_number(x) end)
|> List.to_tuple()
end
defp create_line_string(coordinates) do
coordinates
|> String.trim()
|> remove_outer_parenthesis
|> String.split(",")
|> Enum.map(&create_point(&1))
end
defp create_polygon(coordinates) do
coordinates
|> String.trim()
|> remove_outer_parenthesis
|> String.split(~r{\), *\(})
|> Enum.map(&repair_str(&1, "(", ")"))
|> Enum.map(&create_line_string(&1))
end
defp create_multi_polygon(coordinates) do
coordinates
|> String.trim()
|> remove_outer_parenthesis
|> String.split(~r{\)\), *\(\(})
|> Enum.map(&repair_str(&1, "((", "))"))
|> Enum.map(&create_polygon(&1))
end
defp binary_to_number(binary) do
if String.contains?(binary, "."), do: String.to_float(binary), else: String.to_integer(binary)
end
defp remove_outer_parenthesis(coordinates) do
coordinates
|> String.replace("(", "", global: false)
|> String.reverse()
|> String.replace(")", "", global: false)
|> String.reverse()
end
def repair_str(str, starts, ends) do
str = if !String.starts_with?(str, starts), do: starts <> str, else: str
str = if !String.ends_with?(str, ends), do: str <> ends, else: str
str
end
end
|
lib/geo/wkt/decoder.ex
| 0.847163 | 0.617369 |
decoder.ex
|
starcoder
|
defmodule Sigaws do
@moduledoc """
A library to sign and verify HTTP requests using AWS Signature V4.
[](http://inch-ci.org/github/handnot2/sigaws)
`Sigaws` does not dictate how you compose and send HTTP requests. You can use
`HTTPoison` or any other HTTP client to do that. The signing functions in this
library work with the HTTP request information provided and return an
Elixir map containing signature related parameters/headers. Similarly,
the verification works with the request information and a provider to
perform verification.
Take look at `plug_sigaws`, a plug built using this library. This plug can be
added to your API pipeline to protect your API endpoints. It can also be added
to browser pipelines to protect access to web resources using "presigned"
URLs with access expiration.
### Examples
#### Signature to be passed as request headers
url = "http://api.endpoint.host:5000/somthing?a=10&b=20"
headers = %{"header1" => "value1", "header2" => "value2"}
{:ok, %{} = sig_data, _} =
Sigaws.sign_req(url,
headers: headers,
region: "delta-quad",
service: "my-service",
access_key: "some-access-key",
secret: "some-secret")
{:ok, resp} = HTTPoison.get(url, Map.merge(headers, sig_data))
#### Signature to be passed in query string ("presigned" URL)
url = "http://api.endpoint.host:5000/somthing?a=10&b=20"
{:ok, %{} = sig_data, _} =
Sigaws.sign_url(url,
body: :unsigned,
expires_in: 5 * 60, # 5 minutes
region: "delta-quad",
service: "my-service",
access_key: "some-access-key",
secret: "some-secret")
presigned_url = Sigaws.Util.add_params_to_url(url, sig_data)
#### Signature Verification
The verification process relies on a provider module that implements
`Sigaws.Provider` behavior. The provider is expected to supply the signing
key based on the information present in the context (primarily the access key).
{:ok, %Sigaws.Ctxt{} = ctxt} =
Sigaws.Verify(conn.request_path,
method: conn.method,
params: conn.query_params,
headers: conn.req_headers,
body: get_raw_body(conn),
provider: SigawsQuickStartProvider)
Checkout this
[Blog post](https://handnot2.github.io/blog/elixir/aws-signature-sigaws)
that shows how to protect Phoenix built REST APIs using `plug_sigaws` and
`sigaws_quickstart_provider` Hex packages.
"""
alias Sigaws.Ctxt
alias Sigaws.Signer
alias Sigaws.Verifier
alias Sigaws.Util
@doc """
Sign the given HTTP request and return the signature data to be treated as request headers.
| Name | Description |
|:------ |:----------- |
| `:method` | A string value -- `GET`, `POST`, `PUT`, etc (defaults to `GET`) |
| `:params`<br/> | A map of query parameters -- merged with the query string in the given url (defaults to an empty map) |
| `:headers` | A map of request headers (defaults to an empty map) |
| `:body`<br/> | A string value (use appropriate encoder) or `:unsigned` or `{:content_hash, hash}` (defaults to an empty string) |
| `:signed_at`<br/> | `DateTime` in UTC or a string in the form `YYYMMDDTHHmmSSZ` (defults to current time in UTC) |
| `:expires_in` | Optional expiration in seconds since the signing time |
| `:region` | A string value |
| `:service` | A string value |
| `:access_key`<br/> | Access key ID used for signing (defaults to `AWS_ACCESS_KEY_ID` environment variable) |
| `:signing_key` | A signing key can be provided instead of a secret key |
| `:secret`<br/> | Used when signing key is not provided (defaults to `AWS_SECRET_ACCESS_KEY` environment variable) |
| `:normalize_path` | Refer to Section 5.2 in RFC 3986 (default is `false`) |
When there are no errors in signing, this function returns: `{:ok, sig_data, info}`
The signature data returned in `sig_data` map include the following:
* `X-Amz-Algorithm`
* `X-Amz-Date`
* `X-Amz-SignedHeaders`
* `Authorization`
The third item `info` is also a map. When the MIX environment is either `:dev`
or `:test`, this info map contains the canonical request (`c_req`) and the
string to sign (`sts`) computed during the signature generation. In all other
MIX environments (including `:prod`) this info will be an empty map.
| Error Returns |
|:------------- |
| `{:error, :invalid_input, _}` |
| `{:error, :invalid_data, _}` |
"""
@spec sign_req(binary, keyword) :: {:ok, map, map} | {:error, atom, binary}
def sign_req(url, additional_named_input) when is_list(additional_named_input) do
with {:ok, vinput} <- validate_signing_input(url, additional_named_input) do
Signer.sign_req(vinput)
end
end
@doc """
Presign the given URL and return the signature data to be treated as query parameters.
Refer to `sign_req/2` for the named input that can be passed along with the URL.
The returned `sig_data` should be merged with any existing query parameters in
the URL while sending the request to the server. (Checkout the examples
at the top.)
When there are no errors in signing, this function returns: `{:ok, sig_data, info}`
The `sig_data` map returned includes the following query parameters:
* `X-Amz-Algorithm`
* `X-Amz-Content-Sha256`
* `X-Amz-Credential`
* `X-Amz-Date`
* `X-Amz-SignedHeaders`
* `X-Amz-Signature`
"""
@spec sign_url(binary, keyword) :: {:ok, map, map} | {:error, atom, binary}
def sign_url(url, additional_named_input) when is_list(additional_named_input) do
with {:ok, vinput} <- validate_signing_input(url, additional_named_input) do
Signer.sign_url(vinput)
end
end
@doc """
Verify the signature of the given HTTP request data.
The request data passed should include the signature information either in
query parameters (presigned request) or in the request headers. Presence of
`X-Amz-Credential` or `X-Amz-Signature` in the query parameters leads to
treatment of the request as a "presigned" request. If not, the signature
data are expected to be in the `Authorization` and other headers.
| Name | Description |
|:------ |:----------- |
| `:method` | Optional string value -- `GET`, `POST`, `PUT`, etc (defaults to `GET`) |
| `:query_string` | Optional string value (defaults to empty string) |
| `:params` | Optinal query parameters (defaults to empty map) |
| `:headers` | Optional request headers (defaults to empty map) |
| `:body`<br/> | Optional raw body -- not decoded values such as JSON (defaults to empty string) |
| `:provider` | Module that implements `Sigaws.Provider` behavior -- required |
| `:normalize_path` | Refer to Section 5.2 in RFC 3986 (default is `false`) |
Upon successful signature verification this function returns `{:ok, %Sigaws.Ctxt{} = ctxt}`. The returned context `Sigaws.Ctx` can be used to make further policy
decisions if desired.
| Error Returns |
|:------------- |
| `{:error, :invalid_input, _}` |
| `{:error, :invalid_data, _}` |
| `{:error, :missing_data, _}` |
| `{:error, :verification_failed, ""}` |
| `{:error, :mismatched, "X-Amz-Date"}` |
"""
@spec verify(binary, keyword) :: {:ok, Ctxt.t()} | {:error, atom, binary}
def verify(req_path, opts) do
opts_map = Map.new(opts)
with {:ok, provider} <- provider_opt(opts_map),
{:ok, method} <- method_opt(opts_map),
{:ok, qs} <- qs_opt(opts_map),
{:ok, params} <- qp_opt(opts_map),
{:ok, headers} <- headers_opt(opts_map),
{:ok, body} <- body_opt(opts_map) do
params = qs |> URI.decode_query() |> Map.merge(params)
headers = headers |> Util.downcase_keys()
validated_opts = %{
method: method,
params: params,
headers: headers,
body: body,
provider: provider
}
if Verifier.presigned?(params) do
Verifier.verify_url(req_path, validated_opts)
else
Verifier.verify_req(req_path, validated_opts)
end
else
_error ->
{:error, :verification_failed, "Signature verification failed"}
end
end
@spec validate_signing_input(binary, keyword) :: {:ok, map} | {:error, atom, binary}
defp validate_signing_input(url, opts) do
with opts_map = Map.new(opts),
{:ok, method} <- method_opt(opts_map),
{:ok, params} <- qp_opt(opts_map),
{:ok, keyword_params} <- kwp_opt(opts_map),
{:ok, headers} <- headers_opt(opts_map),
{:ok, body} <- body_opt(opts_map),
{:ok, signed_at_amz_dt} <- signed_at_opt(opts_map),
{:ok, dt} <- Util.parse_amz_dt(signed_at_amz_dt),
{:ok, rg} <- region_opt(opts_map),
{:ok, sv} <- service_opt(opts_map),
{:ok, creds} <- creds_opts(opts_map),
{:ok, normalize_path} <- normalize_path_opt(opts_map) do
%URI{path: req_path, query: qs} = uri = URI.parse(url)
req_path = if req_path, do: req_path, else: "/"
params = get_params(qs, params, keyword_params)
headers = headers |> Util.downcase_keys() |> Map.put_new("host", uri_host(uri))
signing_key =
case creds do
%{secret: secret} ->
{:ok, key} = dt |> DateTime.to_date() |> Util.signing_key(rg, sv, secret)
key
%{signing_key: key} ->
key
end
{:ok,
%{
req_path: req_path,
method: method,
normalize_path: normalize_path,
params: params,
headers: headers,
body: body,
signed_at: signed_at_amz_dt,
region: rg,
service: sv,
access_key: creds[:access_key],
signing_key: signing_key
}}
end
end
defp get_params(_qs, _params, keyword_params) when length(keyword_params) > 0,
do: keyword_params
defp get_params(qs, params, _keyword_params),
do: (qs || "") |> URI.decode_query() |> Map.merge(params)
defp uri_host(%URI{scheme: "https", host: h, port: 443}), do: h
defp uri_host(%URI{scheme: "http", host: h, port: 80}), do: h
defp uri_host(%URI{host: nil}), do: ""
defp uri_host(%URI{host: h, port: nil}), do: h
defp uri_host(%URI{host: h, port: p}), do: "#{h}:#{p}"
@http_methods ["GET", "PUT", "POST", "PATCH", "DELETE", "HEAD", "OPTIONS"]
@method_error {:error, :invalid_input, "method"}
defp method_opt(%{method: m}) when is_binary(m) do
v = String.upcase(m)
if v in @http_methods, do: {:ok, v}, else: @method_error
end
defp method_opt(%{method: _}), do: @method_error
defp method_opt(_), do: {:ok, "GET"}
@qs_error {:error, :invalid_input, "query_string"}
defp qs_opt(%{query_string: nil}), do: {:ok, ""}
defp qs_opt(%{query_string: q}) when is_binary(q), do: {:ok, q}
defp qs_opt(%{query_string: _}), do: @qs_error
defp qs_opt(_), do: {:ok, ""}
@qp_error {:error, :invalid_input, "params"}
defp qp_opt(%{params: %{} = p}), do: {:ok, p}
defp qp_opt(%{params: p}) when is_list(p), do: {:ok, list_to_map(p)}
defp qp_opt(%{params: _}), do: @qp_error
defp qp_opt(_), do: {:ok, %{}}
@kwp_error {:error, :invalid_input, "keyword_params"}
defp kwp_opt(%{keyword_params: p}) when is_list(p), do: {:ok, p}
defp kwp_opt(%{keyword_params: _}), do: @kwp_error
defp kwp_opt(_), do: {:ok, []}
@headers_error {:error, :invalid_input, "headers"}
defp headers_opt(%{headers: %{} = h}), do: {:ok, h}
defp headers_opt(%{headers: h}) when is_list(h), do: {:ok, list_to_map(h)}
defp headers_opt(%{headers: _}), do: @headers_error
defp headers_opt(_), do: {:ok, %{}}
@body_error {:error, :invalid_input, "body"}
defp body_opt(%{body: :unsigned}), do: {:ok, :unsigned}
defp body_opt(%{body: {:content_hash, hash}}), do: {:ok, {:content_hash, hash}}
defp body_opt(%{body: b}) when is_binary(b), do: {:ok, b}
defp body_opt(%{body: _}), do: @body_error
defp body_opt(_), do: {:ok, ""}
@spec signed_at_opt(map) :: {:ok, binary} | {:error, atom, binary}
@signed_at_error {:error, :invalid_input, "signed_at"}
defp signed_at_opt(%{signed_at: %DateTime{time_zone: "Etc/UTC"} = dt}) do
{:ok, %DateTime{dt | microsecond: {0, 0}} |> Util.amz_dt_iso()}
end
defp signed_at_opt(%{signed_at: s}) when is_binary(s) do
case Util.parse_amz_dt(s) do
{:ok, _} -> {:ok, s}
_ -> @signed_at_error
end
end
defp signed_at_opt(%{signed_at: _}), do: @signed_at_error
defp signed_at_opt(_), do: {:ok, Util.amz_dt_now() |> Util.amz_dt_iso()}
@region_error {:error, :invalid_input, "region"}
defp region_opt(%{region: r}) when is_binary(r), do: {:ok, r}
defp region_opt(%{region: _}), do: @region_error
defp region_opt(_), do: {:ok, "us-east-1"}
@service_error {:error, :invalid_input, "service"}
defp service_opt(%{service: s}) when is_binary(s), do: {:ok, s}
defp service_opt(%{service: _}), do: @service_error
defp service_opt(_), do: {:ok, "s3"}
@access_key_error {:error, :invalid_input, "access_key"}
@secret_error {:error, :invalid_input, "secret/signing_key"}
@spec creds_opts(map) :: {:ok, map} | {:error, term}
defp creds_opts(%{} = opts_map) do
ak = opts_map[:access_key]
sk = opts_map[:signing_key]
se = opts_map[:secret] || System.get_env("AWS_SECRET_ACCESS_KEY")
cond do
is_binary(ak) && is_binary(sk) -> {:ok, %{access_key: ak, signing_key: sk}}
is_binary(ak) && is_binary(se) -> {:ok, %{access_key: ak, secret: se}}
!is_binary(ak) -> @access_key_error
true -> @secret_error
end
end
@provider_error {:error, :invalid_input, "provider"}
defp provider_opt(%{provider: p}) when p != nil and is_atom(p), do: {:ok, p}
defp provider_opt(_), do: @provider_error
@normalize_path_error {:error, :invalid_input, "normalize_path"}
defp normalize_path_opt(%{normalize_path: v}) when is_boolean(v), do: {:ok, v}
defp normalize_path_opt(%{normalize_path: _}), do: {:error, @normalize_path_error}
defp normalize_path_opt(_), do: {:ok, false}
@spec list_to_map([{binary, binary}]) :: map
defp list_to_map(list) do
collect_values = fn {k, v}, acc ->
if Map.has_key?(acc, k) do
Map.put(acc, k, List.wrap(Map.get(acc, k)) ++ List.wrap(v))
else
Map.put(acc, k, v)
end
end
list |> Enum.reduce(%{}, collect_values)
end
end
|
lib/sigaws.ex
| 0.91682 | 0.528473 |
sigaws.ex
|
starcoder
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.