code
stringlengths 114
1.05M
| path
stringlengths 3
312
| quality_prob
float64 0.5
0.99
| learning_prob
float64 0.2
1
| filename
stringlengths 3
168
| kind
stringclasses 1
value |
---|---|---|---|---|---|
defmodule DobbleGenerator.ImageProcessing.Algorithm do
@moduledoc """
We will base our algorithm based on the following key findings:
If a game of Dobble needs (n+1) symbols on each card, n being a primary number then we will need:
A collection of n2 + n + 1 symbols
We will be able to generate n2 + n + 1 unique cards
"n" is a number of symbols.
Requirements:
1: every card has exactly one symbol in common with every other card.
2: each card has the same number of symbols.
3: no symbol appears more than once on a given card.
4: each card must be unique.
5: given n symbols, each symbol must appear on at least one card.
6: there should not be one symbol common to all cards if n>2.
With s symbols per card we can create s+1 cards using s(s+1)/2 symbols in total.
"""
@valid_number_of_symbols [13, 57]
@spec execute(list()) :: list()
def execute(symbols) when length(symbols) in @valid_number_of_symbols do
# The number of symbols on a card has to be a prime number + 1
number_of_symbols_on_card =
case length(symbols) do
13 -> 4
57 -> 8
end
cards = []
# Work out the prime number
n = number_of_symbols_on_card - 1
# Total number of cards that can be generated following the Dobble rules
# e.g. 7^2 + 7 + 1 = 57
_number_of_cards = n * n + n + 1
# Add first set of n+1 cards (e.g. 8 cards)
cards =
for i <- 0..n, reduce: cards do
cards ->
# Add new card with first symbol
cards = cards ++ [[1]]
# Add n+1 symbols on the card (e.g. 8 symbols)
n1 = n - 1
for j <- 0..n1, reduce: cards do
cards -> List.update_at(cards, i, &(&1 ++ [j + 1 + i * n + 1]))
end
end
# Add n sets of n cards
n1 = n + 1
cards =
for k <- 2..n1, reduce: cards do
cards ->
n1 = n - 1
for i <- 0..n1, reduce: cards do
cards ->
# Append a new card with 1 symbol
cards = cards ++ [[k]]
# Add n symbols on the card (e.g. 7 symbols)
for j <- 0..n1, reduce: cards do
cards ->
val = n + 2 + i + (k + 1) * j
val = while_value(val, n, j)
index = length(cards) - 1
List.update_at(cards, index, &(&1 ++ [val]))
end
end
end
{:ok, assign_symbols_to_cards(cards, symbols)}
end
def execute(_symbols), do: {:error, :invalid_number_of_symbols}
defp while_value(val, n, j) do
if val >= n + 2 + (j + 1) * n do
while_value(val - n, n, j)
else
val
end
end
defp assign_symbols_to_cards(cards, symbols) do
for card_numbers <- cards do
for number <- card_numbers do
Enum.at(symbols, number - 1)
end
end
end
end
|
lib/dobble_generator/image_processing/algorithm.ex
| 0.77569 | 0.679764 |
algorithm.ex
|
starcoder
|
defmodule Sanbase.Price.Utils do
import Sanbase.DateTimeUtils, only: [round_datetime: 1]
defguard is_zero(price)
when is_number(price) and price >= -1.0e-7 and price <= 1.0e-7
@spec fetch_last_prices_before(String.t(), DateTime.t()) ::
{number() | nil, number() | nil}
def fetch_last_prices_before(slug, datetime) do
last_record =
Sanbase.Cache.get_or_store({:last_record_before, slug, round_datetime(datetime)}, fn ->
Sanbase.Price.last_record_before(slug, datetime)
end)
case last_record do
{:ok, %{price_usd: price_usd, price_btc: price_btc}} ->
{price_usd, price_btc}
_ ->
{nil, nil}
end
end
@doc """
Converts prices between currencies. Tries intermediate conversions with USD/BTC
if data for direct conversion is not available.
"""
def fetch_last_price_before(slug, slug, _timestamp), do: 1.0
def fetch_last_price_before(currency, "USD", timestamp)
when currency in ["BTC", "bitcoin"] do
{price_usd, _price_btc} = fetch_last_prices_before("bitcoin", timestamp)
price_usd
end
def fetch_last_price_before(currency, "USD", timestamp)
when currency in ["ETH", "ethereum"] do
{price_usd, _price_btc} = fetch_last_prices_before("ethereum", timestamp)
price_usd
end
def fetch_last_price_before(currency, "BTC", timestamp)
when currency in ["ETH", "ethereum"] do
{_price_usd, price_btc} = fetch_last_prices_before("ethereum", timestamp)
price_btc
end
def fetch_last_price_before("USD", "BTC", timestamp) do
{_price_usd, price_btc} = fetch_last_prices_before("bitcoin", timestamp)
case price_btc do
x when is_nil(x) or is_zero(x) -> nil
price -> 1 / price
end
end
@doc ~s"""
We need the next 4 cases when calling from `convert_amount`. There we get
a currency code (a ticker that has to be unique) and we get the project from that
code so we can construct the `ticker_slug` slug name.
"""
def fetch_last_price_before(slug, "USD", timestamp) do
{price_usd, _price_btc} = fetch_last_prices_before(slug, timestamp)
case price_usd do
nil -> fetch_last_price_usd_before_convert_via_btc(slug, timestamp)
price -> price
end
end
def fetch_last_price_before(slug, "BTC", timestamp) do
{_price_usd, price_btc} = fetch_last_prices_before(slug, timestamp)
case price_btc do
nil -> fetch_last_price_btc_before_convert_via_usd(slug, timestamp)
price -> price
end
end
def fetch_last_price_before(slug, "ETH", timestamp) do
fetch_last_price_before_convert_via_intermediate(
slug,
"ethereum",
"USD",
timestamp
)
end
# Private functions
defp fetch_last_price_before_convert_via_intermediate(
slug_from,
slug_to,
slug_interm,
timestamp
) do
with price_from_interm <-
fetch_last_price_before(slug_from, slug_interm, timestamp),
false <- is_nil(price_from_interm) or is_zero(price_from_interm),
price_to_interm <-
fetch_last_price_before(slug_to, slug_interm, timestamp),
false <- is_nil(price_to_interm) or is_zero(price_to_interm) do
price_from_interm / price_to_interm
else
_ -> nil
end
end
defp fetch_last_price_usd_before_convert_via_btc(slug, timestamp) do
with {_price_usd, price_btc} <- fetch_last_prices_before(slug, timestamp),
false <- is_nil(price_btc),
{price_btc_usd, _price_btc_btc} <- fetch_last_prices_before("bitcoin", timestamp),
false <- is_nil(price_btc_usd) do
price_btc * price_btc_usd
else
_ -> nil
end
end
defp fetch_last_price_btc_before_convert_via_usd(slug, timestamp) do
with {price_usd, _price_btc} <- fetch_last_prices_before(slug, timestamp),
false <- is_nil(price_usd) or is_zero(price_usd),
{price_btc_usd, _price_btc_btc} <-
fetch_last_prices_before("bitcoin", timestamp),
false <- is_nil(price_btc_usd) or is_zero(price_btc_usd) do
price_usd / price_btc_usd
else
_ -> nil
end
end
end
|
lib/sanbase/prices/utils.ex
| 0.786049 | 0.610715 |
utils.ex
|
starcoder
|
defmodule Robot do
defstruct [direction: :north, position: {0, 0}]
end
defmodule RobotSimulator do
@directions [
:north,
:east,
:south,
:west
]
@doc """
Create a Robot Simulator given an initial direction and position.
Valid directions are: `:north`, `:east`, `:south`, `:west`
"""
@spec create(direction :: atom, position :: {integer, integer}) :: any
def create(direction \\ nil, position \\ nil)
def create(direction, {x, y} = position)
when direction in @directions and is_integer(x) and is_integer(y) do
%Robot{direction: direction, position: position}
end
def create(direction, _) when direction in @directions do
{:error, "invalid position"}
end
def create(_, {x, y}) when is_integer(x) and is_integer(y) do
{:error, "invalid direction"}
end
def create(_, _), do: %Robot{}
@doc """
Simulate the robot's movement given a string of instructions.
Valid instructions are: "R" (turn right), "L", (turn left), and "A" (advance)
"""
@spec simulate(robot :: any, instructions :: String.t()) :: any
def simulate(robot, instructions) do
instructions
|> String.to_charlist()
|> execute(robot)
end
@doc """
Return the robot's direction.
Valid directions are: `:north`, `:east`, `:south`, `:west`
"""
@spec direction(robot :: any) :: atom
def direction(robot), do: robot.direction
@doc """
Return the robot's position.
"""
@spec position(robot :: any) :: {integer, integer}
def position(robot), do: robot.position
def execute([?A | remaining], robot), do: execute(remaining, move(robot))
def execute([?L | remaining], robot), do: execute(remaining, rotate(-1, robot))
def execute([?R | remaining], robot), do: execute(remaining, rotate(+1, robot))
def execute([], robot), do: robot
def execute(_, _), do: {:error, "invalid instruction"}
defp rotate(offset, robot) do
current_direction_index = Enum.find_index(@directions, &(&1 == robot.direction))
new_direction = Enum.at(@directions, current_direction_index + offset, :north)
%{robot | direction: new_direction}
end
defp move(%Robot{direction: :north, position: {x, y}} = robot), do: %{robot | position: {x, y+1}}
defp move(%Robot{direction: :east, position: {x, y}} = robot), do: %{robot | position: {x+1, y}}
defp move(%Robot{direction: :south, position: {x, y}} = robot), do: %{robot | position: {x, y-1}}
defp move(%Robot{direction: :west, position: {x, y}} = robot), do: %{robot | position: {x-1, y}}
end
|
lib/robot_simulator.ex
| 0.928668 | 0.895933 |
robot_simulator.ex
|
starcoder
|
defmodule Ppc.Plan do
@moduledoc """
Manage product plans.
## Reference
- [Plan docs](https://developer.paypal.com/api/subscriptions/v1/#plans_list)
"""
alias Ppc.{Client, Common}
@path "/v1/billing/plans"
@doc """
Get list of plans.
## Accepted options
- :params with accepted values
- :product_id - filter returned list
- :plan_id - filter by list of plan IDs; max 10 plan IDs.
- :page_size
- :page
- :total_required
- :mini
- :full
## Returned value
{:ok, %{
links: [
%{ encType: "", href: "", method: "GET", rel: "self" }
],
plans: [ -- list of plans -- ]
}
}
Each plan has a following structure
%{
create_time: "2021-08-27T19:29:54Z",
description: "Basic",
id: "P-3A536606RU4549013MEUT2MQ",
links: [
%{
encType: "application/json",
href: "https://api.sandbox.paypal.com/v1/billing/plans/P-3A536606RU4549013MEUT2MQ",
method: "GET",
rel: "self"
}
],
name: "UtilityManager - Basic",
status: "INACTIVE",
usage_type: "LICENSED"
}
"""
@spec list(keyword) :: any
def list(opts \\ []) do
url =
if Keyword.has_key?(opts, :product_id),
do: @path <> "?product_id=#{opts[:product_id]}",
else: @path
Client.get(url, opts)
end
@spec details(String.t(), keyword) :: any
def details(id, opts \\ []) do
url = @path <> "/#{id}"
Client.get(url, opts)
end
@doc """
Create a plan with pricing and billing cycles for subscriptions.
## Body data
Fields marked with * are required.
* product_id
* name
* status allowed values `{CREATED|INACTIVE|ACTIVE}`. Subscriptions can be created only for ACTIVE plans.
* description
* billing_cycles
* payment_preferences
- taxes
- quantity_supported
## Some important rules
- To create a plan `:product_id` must be valid.
- For tiered or volume plans `:fixed_price` field must be `nil`.
- For tiered or volume plans `:quantity_supported` must be always `true`.
## Error results
```
{:error, reason}
```
"""
@spec create(map) :: any
@spec create(map, keyword) :: any
def create(data, opts \\ []) do
Client.post(
@path,
Ppc.Plan.PlanPrototype.prepare_for_transmission(data),
headers: Common.construct_headers_for_create(opts)
)
end
@doc """
Update description, taxes or payment preferences of a plan.
## Example
updates = %{
description: "updated description",
payment_preferences: %{
auto_bill_outstanding: true,
payment_failure_threshold: 2,
setup_fee: Money.new("EUR", "1.01"),
setup_fee_failure_action: :cancel
},
taxes: %{percentage: "21"}
}
Plan.update(account, id, updates)
"""
@spec update(String.t(), map) :: any
@spec update(String.t(), map, keyword) :: any
def update(id, data, opts \\ []) do
accepted_fields = [
"description",
"payment_preferences.auto_bill_outstanding",
"taxes.percentage",
"payment_preferences.payment_failure_threshold",
"payment_preferences.setup_fee",
"payment_preferences.setup_fee_failure_action"
]
{:ok, prev} = details(id, opts)
# Do not flatten money struct/map
{fee_new, data} = pop_in(data, ["payment_preferences", "setup_fee"])
{fee_prev, prev} = pop_in(prev, ["payment_preferences", "setup_fee"])
data =
Common.flat_keys(data)
|> Map.put("payment_preferences.setup_fee", Common.to_map_if_struct(fee_new))
|> Common.normalize_atom_values()
prev_data =
Map.take(prev, ["description", "payment_preferences", "taxes"])
|> Common.flat_keys()
|> Map.put("payment_preferences.setup_fee", fee_prev)
|> Map.take(accepted_fields)
changes =
Common.extract_field_changes(prev_data, data, accepted_fields)
|> Common.construct_update_operations()
case Client.patch(@path <> "/#{id}", changes, opts) do
{:ok, result} ->
{:ok, result}
{:error, reason} ->
{:error, Map.put_new(reason, "field", "summary")}
end
end
@spec activate(String.t(), keyword) :: any
def activate(id, opts) do
url = @path <> "/#{id}/activate"
Client.post(url, nil, opts)
end
@spec deactivate(String.t(), keyword) :: any
def deactivate(id, opts) do
url = @path <> "/#{id}/deactivate"
Client.post(url, nil, opts)
end
@doc """
Update pricing scheme for each existing billing cycle.
The full plan structure has the following shape:
id
product_id
status
...
billing_cycles: []
Where each billing cycle is:
frequency
tenure_type
sequence
total_cycles
pricing_scheme
fixed_price
value
currency_code
But the update object for the pricing scheme has a different shape:
pricing_schemes: [list of update_pricing_scheme]
Definition of update_pricing_scheme:
* billing_cycle_sequence: integer
* pricing_scheme: object
Definition of pricing_scheme:
version: integer
fixed_price: object
pricing_model: enum {VOLUME|TIERED}
tiers: [pricing_tier]
- create_time: string / r-o
- update_time: string / r-o
Definition of pricing_tier:
* starting_quantity: string
ending_quantity: string
amount: Money
## Some rules
- Tiers can be added or removed by redefining the complete tiers list, but the range of value
that those tiers cover in total must be same as the previous one. For example if tho old tiers
defined a price for infinitely large amount and the new tiers defined tiers to max of 200 units,
that would result in an error.
- Fixed price is not supported for tiered pricing schemes.
- It's impossible to add new schemes because frequency of each can't be changed iether.
"""
@spec update_pricing(String.t(), map, keyword) :: any
def update_pricing(id, data, opts) do
url = @path <> "/#{id}/update-pricing-schemes"
case Client.post(url, data, opts) do
{:ok, result} ->
{:ok, result}
{:error, reason} ->
{:error, Map.put_new(reason, "field", "summary")}
end
end
end
|
lib/ppc/plan.ex
| 0.861727 | 0.570989 |
plan.ex
|
starcoder
|
defprotocol Plymio.Vekil do
@moduledoc ~S"""
The `Plymio.Vekil` protocol is implemented by a collection -- the *vekil* -- that
associates *proxies* with *foroms*.
The *vekil* may be thought of as a dictionary where a
*proxy* is a `key` and its `value` is a *forom*.
A *vekil's* *proxies* will usually be homogeneous (e.g. all atoms) but its *forom* are
heterogeneous: the *vekil* may use *forom* of different
implementations.
The values returned by the protocol's accessor functions
(`proxy_fetch/2`, `proxy_get/3`) implement the `Plymio.Vekil.Forom`
protocol. (Whether the values *stored* by a *vekil* implements
`Plymio.Vekil.Forom` is an implementation decision.)
The dictionary may be a `Map` that but is implementation-specific.
## Documentation Terms
See `Plymio.Fontais` for an explanation of common documentation terms.
## Implementation Modules' State
All implementations of both protocols have these fields in their
`struct` which can e.g. be pattern matched.
### Module State Field: `:protocol_name`
This field will be set to `Plymio.Vekil` or `Plymio.Vekil.Forom`.
### Module State Field: `:protocol_impl`
This field will be set to the module's name e.g. `Plymio.Vekil.Form`,
`Plymio.Vekil.Forom.Term`, etc.
## Implementation Modules Test Environment
In the implementation modules' doctests, `VEKILPROT` is
an alias for `Plymio.Vekil`, `VEKILFORM` for `Plymio.Vekil.Form`,
`FOROMTERM` for `Plymio.Vekil.Forom.Term` and so on.
"""
@type opts :: Plymio.Fontais.opts()
@type error :: Plymio.Fontais.error()
@type proxy :: any
@type proxies :: nil | proxy | [proxy]
@type forom :: any
@type product :: any
@type answer :: any
@doc ~S"""
`proxy_get/2` takes a *vekil* and *proxies*.
For each *proxy* in the *proxies*, it checks if the *vekil* contains
the *proxy* and, if so, appends the *proxy's* *forom* to the
existing, found *forom*.
It returns `{:ok, {forom, vekil}` or `{:error, error}`.
"""
@spec proxy_get(t, proxies) :: {:ok, {forom, t}} | {:error, error}
def proxy_get(vekil, proxies)
@doc ~S"""
`proxy_get/3` takes a *vekil*, *proxies* and a default and get the
*proxies'* *forom* from the *vekil*, using the default for unknown
*proxies*
For each *proxy* in the *proxies*, it checks if the *vekil* contains
the *proxy* and, if so, appends the *proxy's* *forom* to the
existing, found *forom*.
If the *proxy* is not found, the "foromised"
(`Plymio.Vekil.forom_normalise/2`) default is added to the
existing, found *forom*
It returns `{:ok, {forom, vekil}` or `{:error, error}`.
"""
@spec proxy_get(t, proxies, any) :: {:ok, {forom, t}} | {:error, error}
def proxy_get(vekil, proxies, default)
@doc ~S"""
`proxy_fetch/2` takes a *vekil* and *proxies* and fetches the *proxies'* *forom* from the *vekil*.
For each *proxy* in the *proxies*, it checks if the *vekil* contains
the *proxy* and, if so, appends the *proxy's* *forom* to the
existing found *forom*, returning `{:ok, {forom, vekil}`.
If any *proxies* are not found, it returns `{:error, error}` where
`error` will be a `KeyError` whose `key` field will be a list of the
missing *proxies*.
For any other error `{:error, error}` is returned.
"""
@spec proxy_fetch(t, proxies) :: {:ok, {forom, t}} | {:error, error}
def proxy_fetch(vekil, proxies)
@doc ~S"""
`proxy_put/2` takes a *vekil* and a list of `{proxy,forom}` tuples
and stores the tuples into the *vekil* returning `{:ok, vekil}`.
"""
@spec proxy_put(t, any) :: {:ok, t} | {:error, error}
def proxy_put(vekil, tuples)
@doc ~S"""
`proxy_put/3` takes a *vekil*, *proxy* and *forom* and stores the
*proxy* in the *vekil* with the *forom* as its value, returning `{:ok, vekil}`.
"""
@spec proxy_put(t, proxy, forom) :: {:ok, t} | {:error, error}
def proxy_put(vekil, proxy, forom)
@doc ~S"""
`proxy_delete/2` takes a *vekil* and one or more *proxies* and
deletes the *proxies* from the *vekil* returning `{:ok, vekil}`.
Unknown *proxies* are ignored.
"""
@spec proxy_delete(t, proxies) :: {:ok, t} | {:error, error}
def proxy_delete(vekil, proxies)
@doc ~S"""
`has_proxy?/2` takes a *vekil* and a *proxy* and returns `true` if
the *vekil* contains the *proxy*, else `false`.
"""
@spec has_proxy?(t, proxy) :: true | false
def has_proxy?(vekil, proxy)
@doc ~S"""
`forom_normalise/2` takes a *vekil* and a value and "normalises" the
value into a *forom*, returning `{:ok, {forom, vekil}}`.
Normalising the value may change the *vekil*.
"""
@spec forom_normalise(t, any) :: {:ok, {struct, t}} | {:error, error}
def forom_normalise(vekil, value)
@doc ~S"""
`update/2` takes a *vekil* and optional *opts* and updates the
fields in the *vekil* with the `{field,value}` tuples in the *opts*,
returning `{:ok, vekil}`.
"""
@spec update(t, opts) :: {:ok, t} | {:error, error}
def update(vekil, opts)
end
|
lib/vekil/protocol/vekil.ex
| 0.884252 | 0.652006 |
vekil.ex
|
starcoder
|
defmodule Mix.Tasks.Cssex.Parser do
use Mix.Task
require Logger
@moduledoc """
Task to parse cssex files into css files.
You can use two types of flags, `--e` (and additionally `--a`) or `--c`.
For the --e flag you use any number of
`--e path/to_cssex/file.cssex=path/to_css/output.css`
And if you want those paths to be relative to some application you can pass it with `--a`
`--a myapp_web`
arguments to specify each entry and its output file, or a single path to the cssex where the output file will be in the same directory, with the same file name but the extension cssex replaced by css
`--e path/to_cssex/file.cssex`
The `--c` flag is used to indicate an entry in the config of the application, in order to read the entry points from there
`--c myapp_web`
So it would look something like:
```
mix cssex.parser --e path/to_cssex/file.cssex=path/to_css/output.css
```
"""
@doc """
Run the parser with `mix cssex.parser`.
Required arguments:
```
--e /source/path.cssex=/final/path.css
--e /source/path.cssex
--e source/path.cssex=final/path.css --a yourapp_web
--e source/path.cssex --a yourapp_web
```
Or
```
--c yourapp_web
```
Where `yourapp_web` specifies a config under the key `CSSEx`, with a key of `:entry_points` composed of tuple pairs of source & destination files.
"""
def run([]),
do:
error(
"either specify the file paths with --e cssex/file.cssex=output/file.css, and/or the --a application flag or a module from where to load a CSSEx config, with --c myapp_web",
64
)
def run(["--c", app_string]) do
app = String.to_atom(app_string)
dir = Application.app_dir(app)
env = Application.get_env(app, CSSEx)
case not is_nil(env) && CSSEx.make_config(env, dir) do
%CSSEx{entry_points: [_ | _]} = config ->
do_run(config)
_ ->
error(
"loading default entry points for app: #{app}. The retrieved config was: #{env} - where instead it was expected a keyword list with an :entry_points entry specifying at least one file",
1
)
end
end
def run(args) do
{options, _, _} = OptionParser.parse(args, strict: [e: :keep, a: :string])
{module, opts} = Keyword.pop_first(options, :a, false)
eps =
Enum.reduce(opts, [], fn {_, paths}, acc ->
case String.split(paths, "=", trim: true) do
[from, to] ->
[{from, to} | acc]
[from] ->
[{from, String.replace(from, ".cssex", ".css")} | acc]
_ ->
error("invalid paths #{paths}", 64)
end
end)
dir =
case module do
false ->
nil
_ ->
String.to_atom(module)
|> Application.app_dir()
end
case length(eps) > 0 do
true ->
CSSEx.make_config([entry_points: eps], dir)
|> do_run()
false ->
error("no paths given", 64)
end
end
defp do_run(%CSSEx{entry_points: eps}) do
cwd = File.cwd!()
tasks =
for {path, final} <- eps do
expanded_base = CSSEx.assemble_path(path, cwd)
expanded_final = CSSEx.assemble_path(final, cwd)
Task.async(fn ->
result =
CSSEx.Parser.parse_file(
nil,
Path.dirname(expanded_base),
Path.basename(expanded_base),
expanded_final
)
{result, expanded_base, expanded_final}
end)
end
processed = Task.yield_many(tasks, 60_000)
Enum.each(processed, fn
{_, {:ok, {{:ok, %{valid?: true}, _}, base, final}}} ->
ok(base, final)
{_, {:ok, {{:error, %{error: error}}, _, _}}} ->
error(error)
error ->
error(error)
end)
case Enum.all?(processed, fn
{_, {task_res, {{processed_res, _, _}, _, _}}} ->
task_res == :ok and processed_res == :ok
_ ->
false
end) do
true -> :ok
_ -> exit({:shutdown, 1})
end
end
defp error(msg, code) do
error(msg)
exit({:shutdown, code})
end
defp error(msg), do: Logger.error("ERROR :: #{inspect(msg)}")
defp ok(base, file), do: Logger.info("PROCESSED :: #{base} into #{inspect(file)}")
end
|
lib/mix/tasks/css.parser.ex
| 0.818918 | 0.77586 |
css.parser.ex
|
starcoder
|
defmodule Day01 do
def part1(input) do
parse(input)
|> Enum.reduce({{0, 0}, 0}, fn command, {location, facing} ->
case command do
{:left, amount} ->
facing = turn_left(facing)
{move(location, facing, amount), facing}
{:right, amount} ->
facing = turn_right(facing)
{move(location, facing, amount), facing}
end
end)
|> elem(0)
|> manhattan_distance
end
def part2(input) do
parse(input)
|> Enum.reduce_while({{0, 0}, 0, MapSet.new()}, fn command, {location, facing, seen} ->
case command do
{:left, amount} ->
facing = turn_left(facing)
update_part2(location, facing, amount, seen)
{:right, amount} ->
facing = turn_right(facing)
update_part2(location, facing, amount, seen)
end
end)
|> manhattan_distance
end
defp update_part2(location, facing, amount, seen) do
Enum.reduce_while(1..amount, {:cont, {location, facing, seen}},
fn _, {:cont, {location, facing, seen}} ->
location = move(location, facing, 1)
case MapSet.member?(seen, location) do
true ->
{:halt, {:halt, location}}
false ->
seen = MapSet.put(seen, location)
{:cont, {:cont, {location, facing, seen}}}
end
end)
end
defp manhattan_distance({x, y}) do
abs(x) + abs(y)
end
defp turn_left(direction) do
rem(4 + direction - 1, 4)
end
defp turn_right(direction) do
rem(4 + direction + 1, 4)
end
defp move({x, y}, direction, amount) do
case direction do
0 -> {x, y + amount}
2 -> {x, y - amount}
1 -> {x + amount, y}
3 -> {x - amount, y}
end
end
defp parse([input]) do
String.split(input, ", ")
|> Enum.map(fn <<turn::size(8), amount::binary>> ->
amount = String.to_integer(amount)
case turn do
?L -> {:left, amount}
?R -> {:right, amount}
end
end)
end
end
|
day01/lib/day01.ex
| 0.662796 | 0.547101 |
day01.ex
|
starcoder
|
defmodule Satori.Stream do
@moduledoc """
The Stream context.
"""
import Ecto.Query, warn: false
alias Satori.Repo
alias Satori.Stream.Observation
@doc """
Returns the list of observation.
## Examples
iex> list_observation()
[%Observation{}, ...]
"""
def list_observation do
Repo.all(Observation)
end
@doc """
Gets a single observation.
Raises `Ecto.NoResultsError` if the Observation does not exist.
## Examples
iex> get_observation!(123)
%Observation{}
iex> get_observation!(456)
** (Ecto.NoResultsError)
"""
def get_observation!(id), do: Repo.get!(Observation, id)
@doc """
Creates a observation.
## Examples
iex> create_observation(%{field: value})
{:ok, %Observation{}}
iex> create_observation(%{field: bad_value})
{:error, %Ecto.Changeset{}}
"""
def create_observation(attrs \\ %{}) do
%Observation{}
|> Observation.changeset(attrs)
|> Repo.insert()
end
@doc """
Updates a observation.
## Examples
iex> update_observation(observation, %{field: new_value})
{:ok, %Observation{}}
iex> update_observation(observation, %{field: bad_value})
{:error, %Ecto.Changeset{}}
"""
def update_observation(%Observation{} = observation, attrs) do
observation
|> Observation.changeset(attrs)
|> Repo.update()
end
@doc """
Deletes a observation.
## Examples
iex> delete_observation(observation)
{:ok, %Observation{}}
iex> delete_observation(observation)
{:error, %Ecto.Changeset{}}
"""
def delete_observation(%Observation{} = observation) do
Repo.delete(observation)
end
@doc """
Returns an `%Ecto.Changeset{}` for tracking observation changes.
## Examples
iex> change_observation(observation)
%Ecto.Changeset{data: %Observation{}}
"""
def change_observation(%Observation{} = observation, attrs \\ %{}) do
Observation.changeset(observation, attrs)
end
end
|
server/satori/lib/satori/stream.ex
| 0.923868 | 0.49347 |
stream.ex
|
starcoder
|
defmodule Grax.Validator do
@moduledoc false
alias Grax.{ValidationError, InvalidIdError}
alias Grax.Schema.{TypeError, CardinalityError}
alias RDF.{IRI, BlankNode, Literal, XSD}
import ValidationError, only: [add_error: 3]
def call(mapping, opts) do
ValidationError.exception(context: Map.get(mapping, :__id__))
|> check_subject_iri(mapping, opts)
|> check_properties(mapping, opts)
|> check_links(mapping, opts)
|> case do
%{errors: []} -> {:ok, mapping}
validation -> {:error, validation}
end
end
defp check_subject_iri(validation, %{__id__: %IRI{}}, _), do: validation
defp check_subject_iri(validation, %{__id__: %BlankNode{}}, _), do: validation
defp check_subject_iri(validation, %{__id__: id}, _) do
add_error(validation, :__id__, InvalidIdError.exception(id: id))
end
defp check_properties(validation, %schema{} = mapping, opts) do
schema.__properties__(:data)
|> Enum.reduce(validation, fn {property, property_schema}, validation ->
value = Map.get(mapping, property)
check_property(validation, property, value, property_schema, opts)
end)
end
defp check_links(validation, %schema{} = mapping, opts) do
schema.__properties__(:link)
|> Enum.reduce(validation, fn {link, link_schema}, validation ->
value = Map.get(mapping, link)
check_link(validation, link, value, link_schema, opts)
end)
end
@doc false
def check_property(validation, property, value, property_schema, opts) do
type = property_schema.type
validation
|> check_cardinality(property, value, type, property_schema.cardinality)
|> check_datatype(property, value, type, opts)
end
@doc false
def check_link(validation, link, value, link_schema, opts) do
type = link_schema.type
validation
|> check_cardinality(link, value, type, link_schema.cardinality)
|> check_resource_type(link, value, type, opts)
end
defp check_cardinality(validation, property, values, {:list_set, _}, cardinality)
when is_list(values) do
count = length(values)
case cardinality do
nil ->
validation
{:min, cardinality} when count >= cardinality ->
validation
cardinality when is_integer(cardinality) and count == cardinality ->
validation
%Range{first: min, last: max} when count >= min and count <= max ->
validation
_ ->
add_error(
validation,
property,
CardinalityError.exception(cardinality: cardinality, value: values)
)
end
end
defp check_cardinality(validation, property, value, {:list_set, _} = type, _) do
add_error(validation, property, TypeError.exception(value: value, type: type))
end
defp check_cardinality(validation, property, value, type, _) when is_list(value) do
add_error(validation, property, TypeError.exception(value: value, type: type))
end
defp check_cardinality(validation, property, nil, _, 1) do
add_error(validation, property, CardinalityError.exception(cardinality: 1, value: nil))
end
defp check_cardinality(validation, _, _, _, _), do: validation
defp check_datatype(validation, _, _, nil, _), do: validation
defp check_datatype(validation, _, nil, _, _), do: validation
defp check_datatype(validation, _, [], _, _), do: validation
defp check_datatype(validation, property, values, {:list_set, type}, opts) do
check_datatype(validation, property, values, type, opts)
end
defp check_datatype(validation, property, values, type, opts) when is_list(values) do
Enum.reduce(values, validation, &check_datatype(&2, property, &1, type, opts))
end
defp check_datatype(validation, property, value, type, _opts) do
if value |> in_value_space?(type) do
validation
else
add_error(validation, property, TypeError.exception(value: value, type: type))
end
end
defp in_value_space?(value, nil), do: value |> Literal.new() |> Literal.valid?()
defp in_value_space?(%BlankNode{}, _), do: false
defp in_value_space?(%IRI{}, IRI), do: true
defp in_value_space?(_, IRI), do: false
defp in_value_space?(value, XSD.String), do: is_binary(value)
defp in_value_space?(%URI{}, XSD.AnyURI), do: true
defp in_value_space?(_, XSD.AnyURI), do: false
defp in_value_space?(value, XSD.Boolean), do: is_boolean(value)
defp in_value_space?(value, XSD.Integer), do: is_integer(value)
defp in_value_space?(value, XSD.Float), do: is_float(value)
defp in_value_space?(value, XSD.Double), do: is_float(value)
defp in_value_space?(%Decimal{}, XSD.Decimal), do: true
defp in_value_space?(_, XSD.Decimal), do: false
defp in_value_space?(%Decimal{}, XSD.Numeric), do: true
defp in_value_space?(value, XSD.Numeric), do: is_number(value)
defp in_value_space?(value, type) do
cond do
XSD.Numeric.datatype?(type) -> is_number(value) or match?(%Decimal{}, value)
true -> true
end
|> if do
value |> type.new(as_value: true) |> Literal.valid?()
end
end
defp check_resource_type(validation, _, %IRI{}, {:resource, _}, _), do: validation
defp check_resource_type(validation, _, %BlankNode{}, {:resource, _}, _), do: validation
defp check_resource_type(validation, _, nil, _, _), do: validation
defp check_resource_type(validation, _, [], _, _), do: validation
defp check_resource_type(validation, link, values, {:list_set, type}, opts) do
check_resource_type(validation, link, values, type, opts)
end
defp check_resource_type(validation, link, values, type, opts) when is_list(values) do
Enum.reduce(values, validation, &check_resource_type(&2, link, &1, type, opts))
end
defp check_resource_type(validation, link, %type{} = value, {:resource, type}, opts) do
case call(value, opts) do
{:ok, _} -> validation
{:error, nested_validation} -> add_error(validation, link, nested_validation)
end
end
defp check_resource_type(validation, link, %_type{} = value, {:resource, class_mapping}, opts)
when is_map(class_mapping) do
case call(value, opts) do
{:ok, _} -> validation
{:error, nested_validation} -> add_error(validation, link, nested_validation)
end
end
defp check_resource_type(validation, link, value, type, _opts) do
add_error(validation, link, TypeError.exception(value: value, type: type))
end
end
|
lib/grax/validator.ex
| 0.737064 | 0.440048 |
validator.ex
|
starcoder
|
defmodule Blackout do
@moduledoc """
A very thin wrapper around Erlang's mnesia used to
provide distributed rate limiting,
with little to no configuration
and a simple API for developer happiness.
"""
@doc """
Setup an mnesia schema and table while joining a cluster.
This function must be called for each node
registered in the cluster on application startup.
The default mnesia table options assume concurrent reads/writes
with **ram only** usage. All options may be overridden except for **:attributes**. Available options can be found at:
[Mnesia Docs](http://erlang.org/doc/man/mnesia.html#create_table-2)
## Default Options
[
attributes: [:bucket_name, :rate_limit],
ram_copies: nodes,
disc_copies: [],
disc_only_copies: [],
storage_properties: [ets: [read_concurrency: true, write_concurrency: true]]
]
## Examples
iex> nodes = [Node.self(), some_other_node]
iex> Blackout.join_cluster(:my_schema, nodes)
{:ok, :atomic}
iex> Blackout.join_cluster(:my_schema, nodes)
{:ok, :already_exists}
"""
def join_cluster(schema_name, nodes \\ [], mnesia_options \\ []) do
:mnesia.start()
nodes =
(nodes ++ [Node.self()])
|> MapSet.new()
|> MapSet.to_list()
mnesia_options = Keyword.delete(mnesia_options, :attributes)
options =
default_options(nodes)
|> Keyword.merge(mnesia_options)
:mnesia.create_table(
schema_name,
options
)
|> case do
{:atomic, :ok} ->
{:ok, :atomic}
{:aborted, {:already_exists, _}} ->
{:ok, :already_exists}
e ->
{:error, e}
end
end
@doc """
Runs an mnesia transaction to check
rate limits for a given bucket name.
## Examples
iex> Blackout.check_bucket(:my_schema, "my_bucket_name", 1, 60_000)
{:atomic, {:ok, 60_000}}
iex> Blackout.check_bucket(:my_schema, "my_bucket_name", 1, 60_000)
{:atomic, {:rate_limited, 59155}}
"""
def check_bucket(schema_name, bucket_name, count_limit, time_limit) do
:mnesia.transaction(fn ->
matches = :mnesia.read(schema_name, bucket_name)
case matches do
# insert inital timestamp and count
[] ->
now = timestamp()
val = {now, 1}
insert_bucket(schema_name, bucket_name, val)
{:ok, time_limit}
# update existing bucket timestamp and count
[{^schema_name, ^bucket_name, {_expiration, _count} = val}] ->
{allow_or_deny, {expiration, time_left, count}} =
check_limited(bucket_name, val, count_limit, time_limit)
insert_bucket(schema_name, bucket_name, {expiration, count})
{allow_or_deny, time_left}
# Bucket value would have to be malformed
# so delete bucket and back off
_ ->
mnesia_delete_bucket(schema_name, bucket_name)
{:rate_limited, time_limit}
end
end)
end
@doc """
Run an mnesia transaction
to delete a bucket by name.
## Examples
iex> Blackout.delete_bucket(:my_schema, "my_bucket_name")
{:atomic, :ok}
"""
def delete_bucket(schema_name, bucket_name) do
:mnesia.transaction(fn ->
mnesia_delete_bucket(schema_name, bucket_name)
end)
end
# PRIVATE
# Milliseconds from unix epoch
defp timestamp(), do: :erlang.system_time(:milli_seconds)
# Update bucket expiration and counter
defp check_limited(_bucket_name, {expiration, current_count}, count_limit, time_limit) do
time_now = timestamp()
milliseconds_since_expiration = time_now - expiration
expired? = milliseconds_since_expiration >= time_limit
time_left = time_limit - milliseconds_since_expiration
if expired? do
# reset
expiration = time_now
time_left = 0
count = 1
{:ok, {expiration, time_left, count}}
else
rate_limited? = current_count >= count_limit
if rate_limited?,
do: {:rate_limited, {expiration, time_left, current_count}},
else: {:ok, {expiration, time_left, current_count + 1}}
end
end
# Used within an mnesia transaction to delete a bucket
defp mnesia_delete_bucket(schema_name, bucket_name) do
:mnesia.delete({schema_name, bucket_name})
end
# Used within an mnesia transaction to insert a new bucket value
defp insert_bucket(schema_name, bucket_name, {_new_expiration, _new_count} = val) do
:mnesia.write({schema_name, bucket_name, val})
end
# Default options for mnesia create table
# These options assume in-memory usage only
defp default_options(nodes) do
[
attributes: [:bucket_name, :rate_limit],
ram_copies: nodes,
disc_copies: [],
disc_only_copies: [],
storage_properties: [ets: [read_concurrency: true, write_concurrency: true]]
]
end
end
|
lib/blackout.ex
| 0.860735 | 0.534066 |
blackout.ex
|
starcoder
|
defmodule Grouper.Group do
@moduledoc """
Starting a group turns whatever is starting it into a "group". This is
typically either run from a standalone process or under a supervisor as the
first child. It is not usually necessary under an `Application`, as the
application itself is used as a group identifier.
In either case, it will "commandeer" the group-leader setting of that
calling process, making all subsequent processes started use it as leader
which is necessary for Grouper's functionality to work.
## Standalone
Typically run in tests or scripts,
In the rare event that two groups must run under the same supervisor, it is
possible to specify a "group key" to distinguish the two groups.
## Application
When running under an application, it's assumed to be your group. As a
convenience, the OTP environment
Attempting to start a group under an application will return a
`:group_under_app` error by default. Forcing a group to run under a
supervisor can break OTP shutdown behavior and is almost always a terrible
idea.
## Restart Behavior
When run under a supervisor, a group may be restarted automatically should
it fail. A flag is set in the supervisor's process dictionary that is used
to detect this.
Since death of a group_leader is fairly catastrophic, this process will not
try to restart it. Rather, all subsequent attempts will automatically fail
with a `:group_leader_died` error.
This should fail the supervisor fairly quickly which will, in turn, more
properly clean up the whole tree. Given that group leaders very rarely
fail, this shouldn't be a common occurence.
## Options
* `:group_key` - when more than one group is running under a single
supervisor, this key is used to disambiguate start and stop requests.
* `:force_group` - when set to `true`, will start a group under an
application. This is dangerous as it interferes with OTP's application
shutdown behavior.
Additional options are passed on to the `Grouper.GroupLeader.start_link/1`
function.
"""
require Logger
alias Grouper.GroupLeader
@doc """
initialize this process and its descendants to be their own group
"""
@spec start_link(keyword()) :: {:ok, pid()} | :ignore | {:error, any}
def start_link(opts \\ []) do
{group_key, opts} = Keyword.pop(opts, :group_key, :default_group_key)
{force_group, opts} = Keyword.pop(opts, :force_group, false)
opts =
opts
|> Keyword.put(:parent, self())
|> Keyword.put_new(:commandeer, true)
app = :application.get_application()
cond do
app != :undefined and not force_group ->
Logger.error(
"groups under applications can break shutdown, requires `force_group` option to override"
)
{:error, :group_under_app}
Process.get({:group_active, group_key}) == true ->
{:error, :group_leader_died}
app == :undefined or force_group ->
Process.put({:group_active, group_key}, true)
GroupLeader.start_link(opts)
end
end
@doc """
deactivates group behavior for this process
"""
@spec stop(keyword()) :: :ok
def stop(opts \\ []) do
{group_key, _opts} = Keyword.pop(opts, :group_key, :default_group_key)
# ignores extra options to allow symmetry with `start_link/1`
case Process.delete({:group_active, group_key}) do
true ->
Process.group_leader()
|> GenServer.stop()
nil ->
raise Grouper.NoGroupError, reason: "no running group to stop"
end
end
@doc """
provides instructions for supervisors to run a group process
"""
@spec child_spec(keyword()) :: {:ok, Supervisor.child_spec()}
def child_spec(opts) do
cspec =
opts
|> GroupLeader.child_spec()
|> Map.put(:start, {__MODULE__, :start_link, opts})
{:ok, cspec}
end
end
|
lib/grouper/group.ex
| 0.81582 | 0.486941 |
group.ex
|
starcoder
|
defmodule Calex.Decoder do
@moduledoc false
# https://rubular.com/r/sXPKG84KfgtfMV
@utc_datetime_pattern ~r/^\d{8}T\d{6}Z$/
@local_datetime_pattern ~r/^\d{8}T\d{6}$/
@date_pattern ~r/^\d{8}$/
@gmt_offset_pattern ~r/^GMT(\+|\-)(\d{2})(\d{2})$/
def decode!(data) do
data
|> decode_lines
|> decode_blocks
end
defp decode_lines(bin) do
bin
|> String.splitter(["\r\n", "\n"])
|> Enum.flat_map_reduce(nil, fn
" " <> rest, acc ->
{[], acc <> rest}
line, prevline ->
{(prevline && [String.replace(prevline, "\\n", "\n")]) || [], line}
end)
|> elem(0)
end
defp decode_blocks([]), do: []
# decode each block as a list
defp decode_blocks(["BEGIN:" <> binkey | rest]) do
{props, [_ | lines_rest]} = Enum.split_while(rest, &(!match?("END:" <> ^binkey, &1)))
key = decode_key(binkey)
# accumulate block of same keys
case decode_blocks(lines_rest) do
[{^key, elems} | props_rest] -> [{key, [decode_blocks(props) | elems]} | props_rest]
props_rest -> [{key, [decode_blocks(props)]} | props_rest]
end
end
# recursive decoding if no BEGIN/END block
defp decode_blocks([prop | rest]), do: [decode_prop(prop) | decode_blocks(rest)]
# decode key,params and value for each prop
defp decode_prop(prop) do
[keyprops, val] = String.split(prop, ":", parts: 2)
case String.split(keyprops, ";") do
["DURATION"] ->
{:duration, {Timex.Duration.parse!(val), []}}
[key] ->
{decode_key(key), {decode_value(val, []), []}}
[key | props] ->
props =
props
|> Enum.map(fn prop ->
[k, v] =
case String.split(prop, "=") do
[k1, v1] ->
[k1, v1]
[k1 | tl] ->
# This case handles malformed X-APPLE-STRUCTURED-LOCATION
# properties that fail to quote-escape `=` characters.
[k1, Enum.join(tl, "=")]
end
{decode_key(k), v}
end)
{decode_key(key), {decode_value(val, props), props}}
end
end
defp decode_value(val, props) do
time_zone = Keyword.get(props, :tzid, "Etc/UTC")
cond do
String.match?(val, @local_datetime_pattern) ->
decode_local_datetime(val, time_zone)
String.match?(val, @utc_datetime_pattern) ->
decode_utc_datetime(val)
String.match?(val, @date_pattern) && Keyword.get(props, :value) == "DATE" ->
decode_date(val)
true ->
val
end
end
defp decode_local_datetime(val, time_zone) do
naive_datetime = Timex.parse!(val, "{YYYY}{0M}{0D}T{h24}{m}{s}")
case Regex.run(@gmt_offset_pattern, time_zone) do
[_, "-", hour, min] ->
naive_datetime
|> DateTime.from_naive!("Etc/UTC")
|> Timex.add(String.to_integer(hour) |> Timex.Duration.from_hours())
|> Timex.add(String.to_integer(min) |> Timex.Duration.from_minutes())
[_, "+", hour, min] ->
naive_datetime
|> DateTime.from_naive!("Etc/UTC")
|> Timex.subtract(String.to_integer(hour) |> Timex.Duration.from_hours())
|> Timex.subtract(String.to_integer(min) |> Timex.Duration.from_minutes())
_ ->
DateTime.from_naive!(naive_datetime, time_zone)
end
end
defp decode_utc_datetime(val) do
val
|> Timex.parse!("{YYYY}{0M}{0D}T{h24}{m}{s}Z")
|> DateTime.from_naive!("Etc/UTC")
end
defp decode_date(val) do
val
|> Timex.parse!("{YYYY}{0M}{0D}")
|> NaiveDateTime.to_date()
end
defp decode_key(bin) do
bin
|> String.replace("-", "_")
|> String.downcase()
|> String.to_atom()
end
end
|
lib/calex/decoder.ex
| 0.779028 | 0.451024 |
decoder.ex
|
starcoder
|
defmodule Flawless.Types do
@moduledoc """
Provides a number of helper functions to deal with types.
"""
@valid_types [
:any,
:string,
:number,
:integer,
:float,
:boolean,
:atom,
:pid,
:ref,
:function,
:port,
:list,
:tuple,
:map,
:struct
]
@type t() :: unquote(Enum.reduce(@valid_types, &{:|, [], [&1, &2]}))
@spec valid_types() :: [t()]
def valid_types(), do: @valid_types
@spec has_type?(any, t()) :: boolean
def has_type?(value, expected_type) do
case expected_type do
:any -> true
:string -> is_binary(value)
:number -> is_number(value)
:integer -> is_integer(value)
:float -> is_float(value)
:boolean -> is_boolean(value)
:atom -> is_atom(value)
:pid -> is_pid(value)
:ref -> is_reference(value)
:function -> is_function(value)
:port -> is_port(value)
:list -> is_list(value)
:tuple -> is_tuple(value)
:struct -> is_struct(value)
:map -> is_map(value) and not is_struct(value)
end
end
@spec type_of(any) :: t()
def type_of(value) do
cond do
is_binary(value) -> :string
is_float(value) -> :float
is_integer(value) -> :integer
is_number(value) -> :number
is_boolean(value) -> :boolean
is_atom(value) -> :atom
is_pid(value) -> :pid
is_reference(value) -> :ref
is_function(value) -> :function
is_port(value) -> :port
is_list(value) -> :list
is_tuple(value) -> :tuple
is_struct(value) -> :struct
is_map(value) -> :map
true -> :any
end
end
@spec cast(any, t(), t()) :: {:ok, any} | {:error, String.t()}
def cast(value, from, to) do
cast_with(value, to, &do_cast(&1, from, to))
end
@spec cast_with(any, t(), (any -> any)) :: {:error, String.t()} | {:ok, any}
def cast_with(value, output_type, converter) do
case converter.(value) do
{:ok, value} -> {:ok, value}
:error -> {:error, "Cannot be cast to #{output_type}."}
{:error, _} -> {:error, "Cannot be cast to #{output_type}."}
end
end
defp do_cast(value, from, to) when from == to do
{:ok, value}
end
defp do_cast(value, :string, :number) do
case {Float.parse(value), Integer.parse(value)} do
{_, {int, ""}} -> {:ok, int}
{{float, ""}, _} -> {:ok, float}
_else -> :error
end
end
defp do_cast(value, :string, :integer) do
case Integer.parse(value) do
{int, ""} -> {:ok, int}
_else -> :error
end
end
defp do_cast(value, :string, :float) do
case Float.parse(value) do
{float, ""} -> {:ok, float}
_else -> :error
end
end
defp do_cast(value, :string, :boolean) do
case String.downcase(value) do
"true" -> {:ok, true}
"false" -> {:ok, false}
_else -> :error
end
end
defp do_cast(value, :string, :atom), do: {:ok, String.to_atom(value)}
defp do_cast(value, :number, :integer), do: {:ok, round(value)}
defp do_cast(value, :number, :float), do: {:ok, value / 1}
defp do_cast(value, :number, :string), do: {:ok, to_string(value)}
defp do_cast(value, :integer, :float), do: {:ok, value / 1}
defp do_cast(value, :integer, :number), do: {:ok, value}
defp do_cast(value, :integer, :string), do: {:ok, Integer.to_string(value)}
defp do_cast(value, :float, :integer), do: {:ok, round(value)}
defp do_cast(value, :float, :number), do: {:ok, value}
defp do_cast(value, :float, :string), do: {:ok, Float.to_string(value)}
defp do_cast(value, :boolean, :string), do: {:ok, to_string(value)}
defp do_cast(value, :atom, :string), do: {:ok, Atom.to_string(value)}
defp do_cast(value, :list, :tuple), do: {:ok, List.to_tuple(value)}
defp do_cast(value, :tuple, :list), do: {:ok, Tuple.to_list(value)}
defp do_cast(value, :struct, :map), do: {:ok, Map.from_struct(value)}
defp do_cast(_, _, _), do: :error
end
|
lib/flawless/types.ex
| 0.838845 | 0.70724 |
types.ex
|
starcoder
|
defmodule PayDayLoan.EtsBackend do
@moduledoc """
ETS-based backend capable of handling raw values, pids, or callbacks.
This is the default backend used by PayDayLoan and is designed for storing
process ids. However, it can be used with raw values or callback functions.
With pids, special care is taken to keep the cache state consistent with
the "alive" state of the processes. If a process is found to be dead, the
key is removed from cache. The `PayDayLoan.ProcessMonitor` process monitors
pids, and we check for alive-ness whenever we resolve a value.
If a callback is stored, then the callback is executed whenever we attempt
to resolve a value - e.g., on `get` or `reduce` or `values` calls. The
callback must return a tuple with `{:ok, value}` on success or
`{:error, :not_found}` on failure.
The functions in this module are documented only to aid in understanding
how the default backend works. They should not be called directly - only
through the PDL API.
"""
@behaviour PayDayLoan.Backend
@doc """
Setup callback, creates the underlying ETS table
"""
@spec setup(PayDayLoan.t()) :: :ok
def setup(%PayDayLoan{backend_payload: backend_payload}) do
_ =
:ets.new(
backend_payload,
[:public, :named_table, {:read_concurrency, true}]
)
:ok
end
@doc """
Perform Enum.reduce on the ETS table
"""
@spec reduce(PayDayLoan.t(), term, ({PayDayLoan.key(), pid}, term -> term)) ::
term
def reduce(pdl = %PayDayLoan{backend_payload: backend_payload}, acc0, reducer)
when is_function(reducer, 2) do
:ets.foldl(
fn {k, v}, acc ->
case resolve_value(v, k, pdl) do
{:ok, resolved_v} -> reducer.({k, resolved_v}, acc)
{:error, :not_found} -> acc
end
end,
acc0,
backend_payload
)
end
@doc """
Returns the number of cached keys
"""
@spec size(PayDayLoan.t()) :: non_neg_integer
def size(%PayDayLoan{backend_payload: backend_payload}) do
:ets.info(backend_payload, :size)
end
@doc """
Returns a list of all cached keys
"""
@spec keys(PayDayLoan.t()) :: [PayDayLoan.key()]
def keys(pdl = %PayDayLoan{}) do
reduce(pdl, [], fn {k, _pid}, acc -> [k | acc] end)
end
@doc """
Returns a list of all cached values
"""
@spec values(PayDayLoan.t()) :: [term]
def values(pdl = %PayDayLoan{}) do
reduce(pdl, [], fn {_k, v}, acc -> [v | acc] end)
end
@doc """
Get the value corresponding to the given key
If the value is a process that is not alive, deletes the entry and returns
`{:error, :not_found}`.
"""
@spec get(PayDayLoan.t(), PayDayLoan.key()) :: {:ok, term} | {:error, :not_found}
def get(pdl = %PayDayLoan{}, key) do
case lookup(pdl, key) do
{:ok, pre_resolve_value} -> resolve_value(pre_resolve_value, key, pdl)
{:error, :not_found} -> {:error, :not_found}
end
end
@doc """
Add a value to the cache and monitor it if it is a pid.
"""
@spec put(PayDayLoan.t(), PayDayLoan.key(), term) :: :ok
def put(pdl = %PayDayLoan{backend_payload: backend_payload}, key, value) do
:ets.insert(backend_payload, {key, value})
if is_pid(value) do
GenServer.cast(pdl.cache_monitor, {:monitor, value})
end
:ok
end
@doc """
Remove a value from cache
"""
@spec delete_value(PayDayLoan.t(), term) :: :ok
def delete_value(%PayDayLoan{backend_payload: backend_payload}, value) do
true = :ets.match_delete(backend_payload, {:_, value})
:ok
end
@doc """
Remove a key from cache
"""
@spec delete(PayDayLoan.t(), PayDayLoan.key()) :: :ok
def delete(%PayDayLoan{backend_payload: backend_payload}, key) do
true = :ets.delete(backend_payload, key)
:ok
end
defp lookup(%PayDayLoan{backend_payload: backend_payload}, key) do
case :ets.lookup(backend_payload, key) do
[{_key, pid}] -> {:ok, pid}
[] -> {:error, :not_found}
end
end
defp resolve_value(cb, key, _pdl) when is_function(cb, 1) do
cb.(key)
end
defp resolve_value(pid, _key, pdl) when is_pid(pid) do
if Process.alive?(pid) do
{:ok, pid}
else
:ok = delete_value(pdl, pid)
{:error, :not_found}
end
end
defp resolve_value(value, _key, _pdl), do: {:ok, value}
end
|
lib/pay_day_loan/ets_backend.ex
| 0.849644 | 0.650322 |
ets_backend.ex
|
starcoder
|
defmodule NearApi.Transaction do
@moduledoc """
Create NEAR [transactions](https://docs.near.org/docs/tutorials/create-transactions) here
"""
require Logger
@type t :: %__MODULE__{
signer_id: String.t(),
receiver_id: String.t(),
nonce: integer,
public_key: NearApi.PublicKey.t(),
block_hash: binary,
actions: [NearApi.Action.t()]
}
use Borsh,
schema: [
signer_id: :string,
public_key: :borsh,
nonce: :u64,
receiver_id: :string,
block_hash: [32],
actions: [:borsh]
]
defstruct [
:signer_id,
:public_key,
:nonce,
:receiver_id,
:block_hash,
:actions
]
@doc """
Creates a NearApi.Transaction struct, not serialized
Parameters:
`from_account` - an account strut NearApi.Account where `account_id` and `key_pair`
`receiver_id` - NEAR account ID who we are sending tokens, e.g. `helloworld.near`
`actions` - a list of transaction actions, e.g. NearApi.Actions.FunctionCall or NearApi.Actions.Transfer
"""
@spec create_transaction(from_account :: NearApi.Account.t(), receiver_id :: String.t(), actions :: list) ::
{:ok, NearApi.Transaction.t()} | {:error, :error_retrieving_access_key}
def create_transaction(from_account, receiver_id, actions) do
public_key = from_account.key_pair.public_key
public_key_encoded = B58.encode58(from_account.key_pair.public_key.data)
account_id = from_account.account_id
with {:ok, key} <- NearApi.RPC.AccessKeys.view_access_key(account_id, nil, public_key_encoded) do
block_hash_raw = key["result"]["block_hash"]
nonce = key["result"]["nonce"] + 1
block_hash = B58.decode58!(block_hash_raw)
{:ok,
%NearApi.Transaction{
signer_id: account_id,
receiver_id: receiver_id,
nonce: nonce,
public_key: public_key,
block_hash: block_hash,
actions: actions
}}
else
error ->
Logger.error("#{__MODULE__}: Cannot retrieve access key: #{inspect(error)}")
{:error, :error_retrieving_access_key}
end
end
def sign_and_serialise(tx, key_pair) do
serialised_tx = borsh_encode(tx)
serialized_tx_hash = :crypto.hash(:sha256, serialised_tx)
signature = NearApi.KeyPair.signature(serialized_tx_hash, key_pair)
st = %NearApi.SignedTransaction{transaction: tx, signature: %NearApi.Signature{key_type: 0, data: signature}}
NearApi.SignedTransaction.borsh_encode(st)
end
def payload(tx, key_pair) do
tx
|> sign_and_serialise(key_pair)
|> Base.encode64(padding: false)
end
end
|
lib/transaction.ex
| 0.860589 | 0.442094 |
transaction.ex
|
starcoder
|
defmodule Membrane.RTP.JitterBuffer.BufferStore do
@moduledoc false
# Store for RTP packets. Packets are stored in `Heap` ordered by packet index. Packet index is
# defined in RFC 3711 (SRTP) as: 2^16 * rollover count + sequence number.
# ## Fields
# - `rollover_count` - count of all performed rollovers (cycles of sequence number)
# - `heap` - contains records containing buffers
# - `prev_index` - index of the last packet that has been served
# - `end_index` - the highest index in the buffer so far, mapping to the most recently produced
# RTP packet placed in JitterBuffer
use Bunch
use Bunch.Access
alias Membrane.{Buffer, RTP}
alias Membrane.RTP.JitterBuffer
alias Membrane.RTP.JitterBuffer.Record
@seq_number_limit 65_536
defstruct prev_index: nil,
end_index: nil,
heap: Heap.new(&Record.rtp_comparator/2),
set: MapSet.new(),
rollover_count: 0
@type t :: %__MODULE__{
prev_index: JitterBuffer.packet_index() | nil,
end_index: JitterBuffer.packet_index() | nil,
heap: Heap.t(),
set: MapSet.t(),
rollover_count: non_neg_integer()
}
@typedoc """
An atom describing an error that may happen during insertion.
"""
@type insert_error :: :late_packet
@typedoc """
An atom describing an error that may happen when fetching a buffer
from the Store.
"""
@type get_buffer_error :: :not_present
@doc """
Inserts buffer into the Store.
Every subsequent buffer must have sequence number Bigger than the previously returned
one or be part of rollover.
"""
@spec insert_buffer(t(), Buffer.t()) :: {:ok, t()} | {:error, insert_error()}
def insert_buffer(store, %Buffer{metadata: %{rtp: %{sequence_number: seq_num}}} = buffer) do
do_insert_buffer(store, buffer, seq_num)
end
@spec do_insert_buffer(t(), Buffer.t(), RTP.Header.sequence_number_t()) ::
{:ok, t()} | {:error, insert_error()}
defp do_insert_buffer(%__MODULE__{prev_index: nil} = store, buffer, 0) do
store = add_record(store, Record.new(buffer, @seq_number_limit))
{:ok, %__MODULE__{store | prev_index: @seq_number_limit - 1}}
end
defp do_insert_buffer(%__MODULE__{prev_index: nil} = store, buffer, seq_num) do
store = add_record(store, Record.new(buffer, seq_num))
{:ok, %__MODULE__{store | prev_index: seq_num - 1}}
end
defp do_insert_buffer(
%__MODULE__{prev_index: prev_index, rollover_count: roc} = store,
buffer,
seq_num
) do
index =
case from_which_cycle(prev_index, seq_num) do
:current -> seq_num + roc * @seq_number_limit
:prev -> seq_num + (roc - 1) * @seq_number_limit
:next -> seq_num + (roc + 1) * @seq_number_limit
end
# TODO: Consider taking some action if the gap between indices is too big
if is_fresh_packet?(prev_index, index) do
record = Record.new(buffer, index)
{:ok, add_record(store, record)}
else
{:error, :late_packet}
end
end
@doc """
Calculates size of the Store.
Size is calculated by counting `slots` between youngest (buffer with
smallest sequence number) and oldest buffer.
If Store has buffers [1,2,10] its size would be 10.
"""
@spec size(__MODULE__.t()) :: number()
def size(store)
def size(%__MODULE__{heap: %Heap{data: nil}}), do: 0
def size(%__MODULE__{prev_index: nil, end_index: last, heap: heap}) do
size = if Heap.size(heap) == 1, do: 1, else: last - Heap.root(heap).index + 1
size
end
def size(%__MODULE__{prev_index: prev_index, end_index: end_index}) do
end_index - prev_index
end
@doc """
Shifts the store to the buffer with the next sequence number.
If this buffer is present, it will be returned.
Otherwise it will be treated as late and rejected on attempt to insert into the store.
"""
@spec shift(t) :: {Record.t() | nil, t}
def shift(store)
def shift(%__MODULE__{prev_index: nil} = store) do
{nil, store}
end
def shift(%__MODULE__{prev_index: prev_index, heap: heap, set: set} = store) do
record = Heap.root(heap)
expected_next_index = prev_index + 1
{result, store} =
if record != nil and record.index == expected_next_index do
updated_heap = Heap.pop(heap)
updated_set = MapSet.delete(set, record.index)
updated_store = %__MODULE__{store | heap: updated_heap, set: updated_set}
{record, updated_store}
else
# TODO: instead of nil use expected_next_index to put in Discontinuity metadata
# after https://github.com/membraneframework/membrane-core/issues/238 is done.
{nil, store}
end
{result, bump_prev_index(store)}
end
@doc """
Shifts the store until the first gap in sequence numbers of records
"""
@spec shift_ordered(t) :: {[Record.t() | nil], t}
def shift_ordered(store) do
shift_while(store, fn %__MODULE__{prev_index: prev_index}, %Record{index: index} ->
index == prev_index + 1
end)
end
@doc """
Shifts the store as long as it contains a buffer with the timestamp older than provided duration
"""
@spec shift_older_than(t, Membrane.Time.t()) :: {[Record.t() | nil], t}
def shift_older_than(store, max_age) do
max_age_timestamp = Membrane.Time.monotonic_time() - max_age
shift_while(store, fn _store, %Record{timestamp: timestamp} ->
timestamp <= max_age_timestamp
end)
end
@doc """
Returns all buffers that are stored in the `BufferStore`.
"""
@spec dump(t()) :: [Record.t()]
def dump(%__MODULE__{} = store) do
{records, _store} = shift_while(store, fn _store, _record -> true end)
records
end
@doc """
Returns timestamp (time of insertion) of a buffer with lowest index
"""
@spec first_record_timestamp(t()) :: Membrane.Time.t() | nil
def first_record_timestamp(%__MODULE__{heap: heap}) do
case Heap.root(heap) do
%Record{timestamp: time} -> time
nil -> nil
end
end
defp is_fresh_packet?(prev_index, index), do: index > prev_index
@spec from_which_cycle(JitterBuffer.packet_index(), RTP.Header.sequence_number_t()) ::
:current | :next | :prev
def from_which_cycle(prev_index, seq_num) do
prev_seq_num = rem(prev_index, @seq_number_limit)
# calculate the distance between prev_seq_num and new seq_num assuming it comes from:
# a) current cycle
distance_if_current = abs(prev_seq_num - seq_num)
# b) previous cycle
distance_if_prev = abs(prev_seq_num - (seq_num - @seq_number_limit))
# c) next cycle
distance_if_next = abs(prev_seq_num - (seq_num + @seq_number_limit))
[
{:current, distance_if_current},
{:next, distance_if_next},
{:prev, distance_if_prev}
]
|> Enum.min_by(fn {_atom, distance} -> distance end)
~> ({result, _value} -> result)
end
@spec shift_while(t, (t, Record.t() -> boolean), [Record.t() | nil]) ::
{[Record.t() | nil], t}
defp shift_while(%__MODULE__{heap: heap} = store, fun, acc \\ []) do
heap
|> Heap.root()
|> case do
nil ->
{Enum.reverse(acc), store}
record ->
if fun.(store, record) do
{record, store} = shift(store)
shift_while(store, fun, [record | acc])
else
{Enum.reverse(acc), store}
end
end
end
defp add_record(%__MODULE__{heap: heap, set: set} = store, %Record{} = record) do
if set |> MapSet.member?(record.index) do
store
else
%__MODULE__{store | heap: Heap.push(heap, record), set: MapSet.put(set, record.index)}
|> update_end_index(record.index)
end
end
defp bump_prev_index(%{prev_index: prev, rollover_count: roc} = store)
when rem(prev + 1, @seq_number_limit) == 0,
do: %__MODULE__{store | prev_index: prev + 1, rollover_count: roc + 1}
defp bump_prev_index(store), do: %__MODULE__{store | prev_index: store.prev_index + 1}
defp update_end_index(%__MODULE__{end_index: last} = store, added_index)
when added_index > last or last == nil,
do: %__MODULE__{store | end_index: added_index}
defp update_end_index(%__MODULE__{end_index: last} = store, added_index)
when last >= added_index,
do: store
end
|
lib/membrane/rtp/jitter_buffer/buffer_store.ex
| 0.77675 | 0.520192 |
buffer_store.ex
|
starcoder
|
defmodule Membrane.Pipeline do
@moduledoc """
Module containing functions for constructing and supervising pipelines.
Pipelines are units that make it possible to instantiate, link and manage
elements and bins in convenient way (actually they should always be used inside
a pipeline). Linking pipeline children together enables them to pass data to one
another, and process it in different ways.
To create a pipeline, use the `__using__/1` macro and implement callbacks
of `Membrane.Pipeline` behaviour. For details on instantiating and linking
children, see `Membrane.ParentSpec`.
"""
use Bunch
alias __MODULE__.{Action, CallbackContext}
alias Membrane.{Child, Pad}
alias Membrane.Core.PlaybackHandler
alias Membrane.CrashGroup
require Membrane.Logger
@typedoc """
Defines options that can be passed to `start/3` / `start_link/3` and received
in `c:handle_init/1` callback.
"""
@type pipeline_options_t :: any
@type state_t :: map | struct
@typedoc """
Defines return values from Pipeline callback functions.
## Return values
* `{:ok, state}` - Save process state, with no actions to change the pipeline.
* `{{:ok, [action]}, state}` - Return a list of actions that will be performed within the
pipline. This can be used to start new children, or to send messages to specific children,
for example. Actions are a tuple of `{type, arguments}`, so may be written in the
form a keyword list. See `Membrane.Pipeline.Action` for more info.
* `{{:error, reason}, state}` - Terminates the pipeline with the given reason.
* `{:error, reason}` - raises a `Membrane.CallbackError` with the error tuple.
"""
@type callback_return_t ::
{:ok | {:ok, [Action.t()]} | {:error, any}, state_t}
| {:error, any}
@doc """
Enables to check whether module is membrane pipeline
"""
@callback membrane_pipeline? :: true
@doc """
Callback invoked on initialization of pipeline process. It should parse options
and initialize pipeline's internal state. Internally it is invoked inside
`c:GenServer.init/1` callback.
"""
@callback handle_init(options :: pipeline_options_t) :: callback_return_t()
@doc """
Callback invoked when pipeline is shutting down.
Internally called in `c:GenServer.terminate/2` callback.
Useful for any cleanup required.
"""
@callback handle_shutdown(reason, state :: state_t) :: :ok
when reason: :normal | :shutdown | {:shutdown, any} | term()
@doc """
Callback invoked when pipeline transition from `:stopped` to `:prepared` state has finished,
that is all of its children are prepared to enter `:playing` state.
"""
@callback handle_stopped_to_prepared(
context :: CallbackContext.PlaybackChange.t(),
state :: state_t
) ::
callback_return_t
@doc """
Callback invoked when pipeline transition from `:playing` to `:prepared` state has finished,
that is all of its children are prepared to be stopped.
"""
@callback handle_playing_to_prepared(
context :: CallbackContext.PlaybackChange.t(),
state :: state_t
) ::
callback_return_t
@doc """
Callback invoked when pipeline is in `:playing` state, i.e. all its children
are in this state.
"""
@callback handle_prepared_to_playing(
context :: CallbackContext.PlaybackChange.t(),
state :: state_t
) ::
callback_return_t
@doc """
Callback invoked when pipeline is in `:playing` state, i.e. all its children
are in this state.
"""
@callback handle_prepared_to_stopped(
context :: CallbackContext.PlaybackChange.t(),
state :: state_t
) ::
callback_return_t
@doc """
Callback invoked when pipeline is in `:terminating` state, i.e. all its children
are in this state.
"""
@callback handle_stopped_to_terminating(
context :: CallbackContext.PlaybackChange.t(),
state :: state_t
) :: callback_return_t
@doc """
Callback invoked when a notification comes in from an element.
"""
@callback handle_notification(
notification :: Membrane.Notification.t(),
element :: Child.name_t(),
context :: CallbackContext.Notification.t(),
state :: state_t
) :: callback_return_t
@doc """
Callback invoked when pipeline receives a message that is not recognized
as an internal membrane message.
Useful for receiving data sent from NIFs or other stuff.
"""
@callback handle_other(
message :: any,
context :: CallbackContext.Other.t(),
state :: state_t
) ::
callback_return_t
@doc """
Callback invoked when a child element starts processing stream via given pad.
"""
@callback handle_element_start_of_stream(
{Child.name_t(), Pad.ref_t()},
context :: CallbackContext.StreamManagement.t(),
state :: state_t
) :: callback_return_t
@doc """
Callback invoked when a child element finishes processing stream via given pad.
"""
@callback handle_element_end_of_stream(
{Child.name_t(), Pad.ref_t()},
context :: CallbackContext.StreamManagement.t(),
state :: state_t
) :: callback_return_t
@doc """
Callback invoked when `Membrane.ParentSpec` is linked and in the same playback
state as pipeline.
This callback can be started from `c:handle_init/1` callback or as
`t:Membrane.Pipeline.Action.spec_t/0` action.
"""
@callback handle_spec_started(
children :: [Child.name_t()],
context :: CallbackContext.SpecStarted.t(),
state :: state_t
) :: callback_return_t
@doc """
Callback invoked upon each timer tick. A timer can be started with `Membrane.Pipeline.Action.start_timer_t`
action.
"""
@callback handle_tick(
timer_id :: any,
context :: CallbackContext.Tick.t(),
state :: state_t
) :: callback_return_t
@doc """
Callback invoked when crash of the crash group happens.
"""
@callback handle_crash_group_down(
group_name :: CrashGroup.name_t(),
context :: CallbackContext.CrashGroupDown.t(),
state :: state_t
) :: callback_return_t
@optional_callbacks handle_init: 1,
handle_shutdown: 2,
handle_stopped_to_prepared: 2,
handle_playing_to_prepared: 2,
handle_prepared_to_playing: 2,
handle_prepared_to_stopped: 2,
handle_stopped_to_terminating: 2,
handle_other: 3,
handle_spec_started: 3,
handle_element_start_of_stream: 3,
handle_element_end_of_stream: 3,
handle_notification: 4,
handle_tick: 3,
handle_crash_group_down: 3
@doc """
Starts the Pipeline based on given module and links it to the current
process.
Pipeline options are passed to module's `c:handle_init/1` callback.
Process options are internally passed to `GenServer.start_link/3`.
Returns the same values as `GenServer.start_link/3`.
"""
@spec start_link(
module,
pipeline_options :: pipeline_options_t,
process_options :: GenServer.options()
) :: GenServer.on_start()
def start_link(module, pipeline_options \\ nil, process_options \\ []),
do: do_start(:start_link, module, pipeline_options, process_options)
@doc """
Does the same as `start_link/3` but starts process outside of supervision tree.
"""
@spec start(
module,
pipeline_options :: pipeline_options_t,
process_options :: GenServer.options()
) :: GenServer.on_start()
def start(module, pipeline_options \\ nil, process_options \\ []),
do: do_start(:start, module, pipeline_options, process_options)
defp do_start(method, module, pipeline_options, process_options) do
if module |> pipeline? do
Membrane.Logger.debug("""
Pipeline start link: module: #{inspect(module)},
pipeline options: #{inspect(pipeline_options)},
process options: #{inspect(process_options)}
""")
apply(GenServer, method, [
Membrane.Core.Pipeline,
{module, pipeline_options},
process_options
])
else
Membrane.Logger.error("""
Cannot start pipeline, passed module #{inspect(module)} is not a Membrane Pipeline.
Make sure that given module is the right one and it uses Membrane.Pipeline
""")
{:error, {:not_pipeline, module}}
end
end
@doc """
Changes pipeline's playback state to `:stopped` and terminates its process.
It accpets two options:
* `blocking?` - tells whether to stop the pipeline synchronously
* `timeout` - if `blocking?` is set to true it tells how much
time (ms) to wait for pipeline to get terminated. Defaults to 5000.
"""
@spec stop_and_terminate(pipeline :: pid, Keyword.t()) ::
:ok | {:error, :timeout}
def stop_and_terminate(pipeline, opts \\ []) do
blocking? = Keyword.get(opts, :blocking?, false)
timeout = Keyword.get(opts, :timeout, 5000)
ref = if blocking?, do: Process.monitor(pipeline)
PlaybackHandler.request_playback_state_change(pipeline, :terminating)
if blocking?,
do: wait_for_down(ref, timeout),
else: :ok
end
defp wait_for_down(ref, timeout) do
receive do
{:DOWN, ^ref, _process, _pid, _reason} ->
:ok
after
timeout ->
{:error, :timeout}
end
end
@doc """
Changes playback state to `:playing`.
"""
@spec play(pid) :: :ok
def play(pid), do: Membrane.Core.PlaybackHandler.request_playback_state_change(pid, :playing)
@doc """
Changes playback state to `:prepared`.
"""
@spec prepare(pid) :: :ok
def prepare(pid),
do: Membrane.Core.PlaybackHandler.request_playback_state_change(pid, :prepared)
@doc """
Changes playback state to `:stopped`.
"""
@spec stop(pid) :: :ok
def stop(pid), do: Membrane.Core.PlaybackHandler.request_playback_state_change(pid, :stopped)
@doc """
Checks whether module is a pipeline.
"""
@spec pipeline?(module) :: boolean
def pipeline?(module) do
module |> Bunch.Module.check_behaviour(:membrane_pipeline?)
end
@doc false
defmacro __before_compile__(_env) do
quote do
unless Enum.any?(0..2, &Module.defines?(__MODULE__, {:start_link, &1})) do
@doc """
Starts the pipeline `#{inspect(__MODULE__)}` and links it to the current process.
A proxy for `#{inspect(unquote(__MODULE__))}.start_link/3`
"""
@spec start_link(
pipeline_options :: unquote(__MODULE__).pipeline_options_t(),
process_options :: GenServer.options()
) :: GenServer.on_start()
def start_link(pipeline_options \\ nil, process_options \\ []) do
unquote(__MODULE__).start_link(__MODULE__, pipeline_options, process_options)
end
end
unless Enum.any?(0..2, &Module.defines?(__MODULE__, {:start, &1})) do
@doc """
Starts the pipeline `#{inspect(__MODULE__)}` without linking it
to the current process.
A proxy for `#{inspect(unquote(__MODULE__))}.start/3`
"""
@spec start(
pipeline_options :: unquote(__MODULE__).pipeline_options_t(),
process_options :: GenServer.options()
) :: GenServer.on_start()
def start(pipeline_options \\ nil, process_options \\ []) do
unquote(__MODULE__).start(__MODULE__, pipeline_options, process_options)
end
end
unless Module.defines?(__MODULE__, {:play, 1}) do
@doc """
Changes playback state of pipeline to `:playing`.
"""
@spec play(pid()) :: :ok
defdelegate play(pipeline), to: unquote(__MODULE__)
end
unless Module.defines?(__MODULE__, {:prepare, 1}) do
@doc """
Changes playback state to `:prepared`.
"""
@spec prepare(pid) :: :ok
defdelegate prepare(pipeline), to: unquote(__MODULE__)
end
unless Module.defines?(__MODULE__, {:stop, 1}) do
@doc """
Changes playback state to `:stopped`.
"""
@spec stop(pid) :: :ok
defdelegate stop(pid), to: unquote(__MODULE__)
end
unless Enum.any?(1..2, &Module.defines?(__MODULE__, {:stop_and_terminate, &1})) do
@doc """
Changes pipeline's playback state to `:stopped` and terminates its process.
"""
@spec stop_and_terminate(pid, Keyword.t()) :: :ok
defdelegate stop_and_terminate(pipeline, opts \\ []), to: unquote(__MODULE__)
end
end
end
@doc """
Brings all the stuff necessary to implement a pipeline.
Options:
- `:bring_spec?` - if true (default) imports and aliases `Membrane.ParentSpec`
- `:bring_pad?` - if true (default) requires and aliases `Membrane.Pad`
"""
defmacro __using__(options) do
bring_spec =
if options |> Keyword.get(:bring_spec?, true) do
quote do
import Membrane.ParentSpec
alias Membrane.ParentSpec
end
end
bring_pad =
if options |> Keyword.get(:bring_pad?, true) do
quote do
require Membrane.Pad
alias Membrane.Pad
end
end
# credo:disable-for-next-line Credo.Check.Refactor.LongQuoteBlocks
quote do
alias unquote(__MODULE__)
@behaviour unquote(__MODULE__)
@before_compile Pipeline
unquote(bring_spec)
unquote(bring_pad)
@impl true
def membrane_pipeline?, do: true
@impl true
def handle_init(_options), do: {:ok, %{}}
@impl true
def handle_shutdown(_reason, _state), do: :ok
@impl true
def handle_stopped_to_prepared(_ctx, state), do: {:ok, state}
@impl true
def handle_prepared_to_playing(_ctx, state), do: {:ok, state}
@impl true
def handle_playing_to_prepared(_ctx, state), do: {:ok, state}
@impl true
def handle_prepared_to_stopped(_ctx, state), do: {:ok, state}
@impl true
def handle_stopped_to_terminating(_ctx, state), do: {:ok, state}
@impl true
def handle_other(message, _ctx, state), do: {:ok, state}
@impl true
def handle_spec_started(new_children, _ctx, state), do: {:ok, state}
@impl true
def handle_element_start_of_stream({element, pad}, _ctx, state), do: {:ok, state}
@impl true
def handle_element_end_of_stream({element, pad}, _ctx, state), do: {:ok, state}
@impl true
def handle_notification(notification, element, _ctx, state), do: {:ok, state}
@impl true
def handle_crash_group_down(_group_name, _ctx, state), do: {:ok, state}
defoverridable handle_init: 1,
handle_shutdown: 2,
handle_stopped_to_prepared: 2,
handle_playing_to_prepared: 2,
handle_prepared_to_playing: 2,
handle_prepared_to_stopped: 2,
handle_stopped_to_terminating: 2,
handle_other: 3,
handle_spec_started: 3,
handle_element_start_of_stream: 3,
handle_element_end_of_stream: 3,
handle_notification: 4,
handle_crash_group_down: 3
end
end
end
|
lib/membrane/pipeline.ex
| 0.908906 | 0.667229 |
pipeline.ex
|
starcoder
|
defmodule Game do
def new(%{width: width, height: height, mines: mines}) do
:random.seed(:erlang.now)
indices = get_indices(width, height)
mine_list = Enum.take_random(indices, mines)
for index <- indices, into: %{} do
{index, generate_field(index, mine_list)}
end
end
def get_indices(width, height) do
for x <- 1..width, y <- 1..height do {x, y} end
end
defp generate_field({x, y}, mine_list) do
mine = {x, y} in mine_list
adjacent = get_neighbors({x, y})
|> MapSet.new
|> MapSet.intersection(MapSet.new(mine_list))
|> MapSet.size
%{
state: :hidden,
mine: mine,
adjacent: adjacent
}
end
defp generate_neighbor({x_delta, y_delta}, {x, y}) do
{x + x_delta, y + y_delta}
end
defp get_neighbors({x, y}) do
for x_delta <- -1..1, y_delta <- -1..1 do {x_delta, y_delta} end
|> Enum.filter(fn(el) -> el != {0, 0} end)
|> Enum.map(&generate_neighbor(&1, {x, y}))
end
def reveal(game, {x, y}) do
cond do
{x, y} not in Map.keys(game) -> game
game[{x, y}].state == :hidden -> do_reveal(game, {x, y})
true -> game
end
end
def do_reveal(game, {x, y}) do
indices = get_reveal_fields(MapSet.new, game, {x, y}) |> List.flatten |> Enum.uniq
updated_fields = for index <- indices, into: %{} do
{index, %{game[index] | :state => :revealed}}
end
Map.merge(game, updated_fields)
end
def get_reveal_fields(ignore_neighbors, game, {x, y}) do
cond do
not Map.has_key?(game, {x, y}) -> []
game[{x, y}].adjacent > 0 or game[{x, y}].mine -> [{x, y}]
true -> [{x, y}] ++ Enum.map(
Enum.filter(
get_neighbors({x, y}),
fn({x, y}) -> {x, y} not in ignore_neighbors end
),
&get_reveal_fields(
ignore_neighbors |> MapSet.union(get_neighbors({x, y}) |> MapSet.new),
game,
&1
)
)
end
end
def flag(game, {x, y}) do
remaining_flags = get_remaining_flags(game)
cond do
{x, y} not in Map.keys(game) -> game
remaining_flags <= 0 and game[{x, y}].state != :flagged -> game
game[{x, y}].state == :revealed -> game
game[{x, y}].state == :hidden -> %{game | {x, y} => %{game[{x, y}] | :state => :flagged}}
game[{x, y}].state == :flagged -> %{game | {x, y} => %{game[{x, y}] | :state => :hidden}}
end
end
def get_remaining_flags(game) do
mines = Enum.filter(game, fn({{_x, _y}, field}) -> field.mine end) |> length
flagged = Enum.filter(game, fn({{_x, _y}, field}) -> field.state == :flagged end) |> length
mines - flagged
end
def game_check(game) do
{revealed, hidden} = Enum.split_with(
game,
fn({_index, field}) -> field.state == :revealed end
)
remaining_flags = get_remaining_flags(game)
cond do
Enum.any?(revealed, fn({_index, field}) -> field.mine end) -> {:lose, ""}
Enum.all?(hidden, fn({_index, field}) -> field.mine end) -> {:win, ""}
true -> {:continue, Integer.to_string(remaining_flags)}
end
end
def get_field_translate({x, y}, field_size) do
{(x-1)*field_size, (y-1)*field_size}
end
def coord_to_index({coord_x, coord_y}, grid_offset, field_size) do
{coord_x, coord_y} = {coord_x - grid_offset, coord_y - grid_offset}
{1 + floor(coord_x / field_size), 1 + floor(coord_y / field_size)}
end
end
|
lib/game.ex
| 0.545528 | 0.628778 |
game.ex
|
starcoder
|
defmodule Ecto.Repo do
@moduledoc """
Defines a repository.
A repository maps to an underlying data store, controlled by the
adapter. For example, Ecto ships with a Postgres adapter that
stores data into a PostgreSQL database.
When used, the repository expects the `:otp_app` as option.
The `:otp_app` should point to an OTP application that has
the repository configuration. For example, the repository:
defmodule Repo do
use Ecto.Repo, otp_app: :my_app
end
Could be configured with:
config :my_app, Repo,
adapter: Ecto.Adapters.Postgres,
database: "ecto_simple",
username: "postgres",
password: "<PASSWORD>",
hostname: "localhost"
Most of the configuration that goes into the `config` is specific
to to the adapter, so check `Ecto.Adapters.Postgres` documentation
for more information. However, some configuration is shared across
all adapters, they are:
* `:priv` - the directory where to keep repository data, like
migrations, schema and more. Defaults to "priv/YOUR_REPO"
* `:url` - an URL that specifies storage information. Read below
for more information
## URLs
Repositories by default support URLs. For example, the configuration
above could be rewriten to:
config :my_app, Repo,
url: "ecto://postgres:postgres@localhost/ecto_simple"
The schema can be of any value. The path represents the database name
while options are simply merged in.
URLs also support `{:system, "KEY"}` to be given, telling Ecto to load
the configuration from the system environment instead:
config :my_app, Repo,
url: {:system, "DATABASE_URL"}
"""
use Behaviour
@type t :: module
@doc false
defmacro __using__(opts) do
quote bind_quoted: [opts: opts] do
@behaviour Ecto.Repo
{otp_app, adapter} = Ecto.Repo.Config.parse(__MODULE__, opts)
@otp_app otp_app
@adapter adapter
@before_compile adapter
require Logger
def config do
Ecto.Repo.Config.config(@otp_app, __MODULE__)
end
def start_link do
@adapter.start_link(__MODULE__, config())
end
def stop do
@adapter.stop(__MODULE__)
end
def transaction(opts \\ [], fun) when is_list(opts) do
@adapter.transaction(__MODULE__, opts, fun)
end
def rollback(value) do
@adapter.rollback(__MODULE__, value)
end
def all(queryable, opts \\ []) do
Ecto.Repo.Queryable.all(__MODULE__, @adapter, queryable, opts)
end
def get(queryable, id, opts \\ []) do
Ecto.Repo.Queryable.get(__MODULE__, @adapter, queryable, id, opts)
end
def get!(queryable, id, opts \\ []) do
Ecto.Repo.Queryable.get!(__MODULE__, @adapter, queryable, id, opts)
end
def one(queryable, opts \\ []) do
Ecto.Repo.Queryable.one(__MODULE__, @adapter, queryable, opts)
end
def one!(queryable, opts \\ []) do
Ecto.Repo.Queryable.one!(__MODULE__, @adapter, queryable, opts)
end
defmacro update_all(queryable, values, opts \\ []) do
Ecto.Repo.Queryable.update_all(__MODULE__, @adapter, queryable,
values, opts)
end
def delete_all(queryable, opts \\ []) do
Ecto.Repo.Queryable.delete_all(__MODULE__, @adapter, queryable, opts)
end
def insert(model, opts \\ []) do
Ecto.Repo.Model.insert(__MODULE__, @adapter, model, opts)
end
def update(model, opts \\ []) do
Ecto.Repo.Model.update(__MODULE__, @adapter, model, opts)
end
def delete(model, opts \\ []) do
Ecto.Repo.Model.delete(__MODULE__, @adapter, model, opts)
end
def preload(model_or_models, preloads) do
Ecto.Repo.Preloader.preload(model_or_models, __MODULE__, preloads)
end
def adapter do
@adapter
end
def __repo__ do
true
end
def log({_, cmd, params}, fun) do
prev = :os.timestamp()
try do
fun.()
after
Logger.debug fn ->
next = :os.timestamp()
diff = :timer.now_diff(next, prev)
data = Enum.map params, fn
%Ecto.Query.Tagged{value: value} -> value
value -> value
end
[cmd, ?\s, inspect(data), ?\s, ?(, inspect(div(diff, 100) / 10), ?m, ?s, ?)]
end
end
end
defoverridable [log: 2]
end
end
@doc """
Returns the adapter tied to the repository.
"""
defcallback adapter() :: Ecto.Adapter.t
@doc """
Simply returns true to mark this module as a repository.
"""
defcallback __repo__ :: true
@doc """
Returns the adapter configuration stored in the `:otp_app` environment.
"""
defcallback config() :: Keyword.t
@doc """
Starts any connection pooling or supervision and return `{:ok, pid}`
or just `:ok` if nothing needs to be done.
Returns `{:error, {:already_started, pid}}` if the repo already
started or `{:error, term}` in case anything else goes wrong.
"""
defcallback start_link() :: {:ok, pid} | :ok |
{:error, {:already_started, pid}} |
{:error, term}
@doc """
Stops any connection pooling or supervision started with `start_link/1`.
"""
defcallback stop() :: :ok
@doc """
Fetches a single model from the data store where the primary key matches the
given id.
Returns `nil` if no result was found. If the model in the queryable
has no primary key `Ecto.NoPrimaryKeyError` will be raised.
## Options
* `:timeout` - The time in milliseconds to wait for the call to finish,
`:infinity` will wait indefinitely (default: 5000)
* `:log` - When false, does not log the query
"""
defcallback get(Ecto.Queryable.t, term, Keyword.t) :: Ecto.Model.t | nil | no_return
@doc """
Similar to `get/3` but raises `Ecto.NotSingleResult` if no record was found.
## Options
* `:timeout` - The time in milliseconds to wait for the call to finish,
`:infinity` will wait indefinitely (default: 5000);
* `:log` - When false, does not log the query
"""
defcallback get!(Ecto.Queryable.t, term, Keyword.t) :: Ecto.Model.t | nil | no_return
@doc """
Fetches a single result from the query.
Returns `nil` if no result was found.
## Options
* `:timeout` - The time in milliseconds to wait for the call to finish,
`:infinity` will wait indefinitely (default: 5000);
* `:log` - When false, does not log the query
"""
defcallback one(Ecto.Queryable.t, Keyword.t) :: Ecto.Model.t | nil | no_return
@doc """
Similar to `one/3` but raises `Ecto.NotSingleResult` if no record was found.
## Options
* `:timeout` - The time in milliseconds to wait for the call to finish,
`:infinity` will wait indefinitely (default: 5000);
* `:log` - When false, does not log the query
"""
defcallback one!(Ecto.Queryable.t, Keyword.t) :: Ecto.Model.t | nil | no_return
@doc """
Preloads all associations on the given model or models.
`preloads` is a list of associations that can be nested in rose
tree structure:
node :: atom | {atom, node} | [node]
In case the associatin was already loaded, preload won't attempt
to reload it.
"""
defcallback preload([Ecto.Model.t] | Ecto.Model.t, preloads :: term) ::
[Ecto.Model.t] | Ecto.Model.t
@doc """
Fetches all entries from the data store matching the given query.
May raise `Ecto.QueryError` if query validation fails.
## Options
* `:timeout` - The time in milliseconds to wait for the call to finish,
`:infinity` will wait indefinitely (default: 5000);
* `:log` - When false, does not log the query
## Example
# Fetch all post titles
query = from p in Post,
select: p.title
MyRepo.all(query)
"""
defcallback all(Ecto.Query.t, Keyword.t) :: [Ecto.Model.t] | no_return
@doc """
Updates all entries matching the given query with the given values.
This operation does not run the model `before_update` and
`after_update` callbacks.
## Options
* `:timeout` - The time in milliseconds to wait for the call to finish,
`:infinity` will wait indefinitely (default: 5000);
* `:log` - When false, does not log the query
## Examples
MyRepo.update_all(Post, title: "New title")
MyRepo.update_all(p in Post, visits: fragment("? + 1", p.visits))
from(p in Post, where: p.id < 10)
|> MyRepo.update_all(title: "New title")
"""
defmacrocallback update_all(Macro.t, Keyword.t, Keyword.t) :: integer | no_return
@doc """
Deletes all entries matching the given query.
This operation does not run the model `before_delete` and
`after_delete` callbacks.
## Options
* `:timeout` - The time in milliseconds to wait for the call to finish,
`:infinity` will wait indefinitely (default: 5000);
* `:log` - When false, does not log the query
## Examples
MyRepo.delete_all(Post)
from(p in Post, where: p.id < 10) |> MyRepo.delete_all
"""
defcallback delete_all(Ecto.Queryable.t, Keyword.t) :: integer | no_return
@doc """
Inserts a model or a changeset.
In case a model is given, the model is converted into a changeset
with all model non-virtual fields as part of the changeset.
In case a changeset is given, the changes in the changeset are
merged with the model fields, and all of them are sent to the
database.
If any `before_insert` or `after_insert` callback is registered
in the given model, they will be invoked with the changeset.
## Options
* `:timeout` - The time in milliseconds to wait for the call to finish,
`:infinity` will wait indefinitely (default: 5000);
* `:log` - When false, does not log the query
## Example
post = MyRepo.insert %Post{title: "Ecto is great"}
"""
defcallback insert(Ecto.Model.t | Ecto.Changeset.t, Keyword.t) :: Ecto.Model.t | no_return
@doc """
Updates a model or changeset using its primary key.
In case a model is given, the model is converted into a changeset
with all model non-virtual fields as part of the changeset. For this
reason, it is preferred to use changesets as they perform dirty
tracking and avoid sending data that did not change to the database
over and over. In case there are no changes in the changeset, no
data is sent to the database at all.
In case a changeset is given, only the changes in the changeset
will be updated, leaving all the other model fields intact.
If any `before_update` or `after_update` callback are registered
in the given model, they will be invoked with the changeset.
If the model has no primary key, `Ecto.NoPrimaryKeyError` will be raised.
## Options
* `:timeout` - The time in milliseconds to wait for the call to finish,
`:infinity` will wait indefinitely (default: 5000);
* `:log` - When false, does not log the query
## Example
post = MyRepo.get!(Post, 42)
post = %{post | title: "New title"}
MyRepo.update(post)
"""
defcallback update(Ecto.Model.t | Ecto.Changeset.t, Keyword.t) :: Ecto.Model.t | no_return
@doc """
Deletes a model using its primary key.
If any `before_delete` or `after_delete` callback are registered
in the given model, they will be invoked with the changeset.
If the model has no primary key, `Ecto.NoPrimaryKeyError` will be raised.
## Options
* `:timeout` - The time in milliseconds to wait for the call to finish,
`:infinity` will wait indefinitely (default: 5000);
* `:log` - When false, does not log the query
## Example
[post] = MyRepo.all(from(p in Post, where: p.id == 42))
MyRepo.delete(post)
"""
defcallback delete(Ecto.Model.t, Keyword.t) :: Ecto.Model.t | no_return
@doc """
Runs the given function inside a transaction.
If an unhandled error occurs the transaction will be rolled back.
If no error occurred the transaction will be commited when the
function returns. A transaction can be explicitly rolled back
by calling `rollback/1`, this will immediately leave the function
and return the value given to `rollback` as `{:error, value}`.
A successful transaction returns the value returned by the function
wrapped in a tuple as `{:ok, value}`. Transactions can be nested.
## Options
* `:timeout` - The time in milliseconds to wait for the call to finish,
`:infinity` will wait indefinitely (default: 5000);
* `:log` - When false, does not log begin/commit/rollback queries
## Examples
MyRepo.transaction(fn ->
MyRepo.update(%{alice | balance: alice.balance - 10})
MyRepo.update(%{bob | balance: bob.balance + 10})
end)
# In the following example only the comment will be rolled back
MyRepo.transaction(fn ->
MyRepo.insert(%Post{})
MyRepo.transaction(fn ->
MyRepo.insert(%Comment{})
raise "error"
end)
end)
# Roll back a transaction explicitly
MyRepo.transaction(fn ->
p = MyRepo.insert(%Post{})
if not Editor.post_allowed?(p) do
MyRepo.rollback(:posting_not_allowed)
end
end)
"""
defcallback transaction(Keyword.t, fun) :: {:ok, any} | {:error, any}
@doc """
Rolls back the current transaction.
The transaction will return the value given as `{:error, value}`.
"""
defcallback rollback(any) :: no_return
@doc ~S"""
Enables logging of adapter actions such as sending queries to the database.
By default writes to Logger but can be overriden to customize behaviour.
You must always return the result of calling the given function.
## Examples
The default implementation of the `log/2` function is shown below:
def log({_, cmd, params}, fun) do
prev = :os.timestamp()
try do
fun.()
after
Logger.debug fn ->
next = :os.timestamp()
diff = :timer.now_diff(next, prev)
data = Enum.map params, fn
%Ecto.Query.Tagged{value: value} -> value
value -> value
end
[cmd, ?\s, inspect(data), ?\s, ?(, inspect(div(diff, 100) / 10), ?m, ?s, ?)]
end
end
end
"""
defcallback log({atom, iodata, [term]}, function :: (() -> any)) :: any
end
|
lib/ecto/repo.ex
| 0.876423 | 0.496399 |
repo.ex
|
starcoder
|
defmodule Odoo do
@moduledoc """
Library to access Odoo JSON-RPC API.
Provides the following methods for interacting with Odoo:
- login
- search
- search_read
- read
- read_group
- create
- write
- delete
"""
@doc """
- Login in Odoo and set session_id for future calls
### Params
- user: string Odoo user
- password: string <PASSWORD>
- database: string Odoo database
- url: string Odoo url, http or https
### Examples
```
iex> {:ok, odoo} = Odoo.login(
"admin", "admin", "mydatabasename",
"https://mydatabasename.odoo.com")
{:ok,
%Odoo.Session{
cookie: "session_id=c8e544d0b305920afgdgsfdfdsa7b0cfe; Expires=Fri, 06-May-2022 23:16:12 GMT; Max-Age=7776000; HttpOnly; Path=/",
database: "mydatabasename",
password: "<PASSWORD>",
url: "https://mydatabasename.odoo.com",
user: "admin",
user_context: %{"lang" => "en_US", "tz" => "Asia/Calcutta", "uid" => 2}
}}
```
"""
@spec login(String.t(), String.t(), String.t(), String.t()) ::
{:ok, Odoo.Session.t()} | {:error, String.t()}
def login(user, password, database, url) do
Odoo.Core.login(user, password, database, parse_url(url))
end
defp parse_url(url) do
if String.last(url) == "/" do
String.slice(url, 0..-2)
else
url
end
end
@doc """
### Examples
- Search and read with default options (limit, domain, fields, offset and order)
```{:ok, res} = Odoo.search_read(odoo, "res.partner")```
- Put options to tune the query:
```elixir
{:ok, result} = Odoo.search_read(
odoo,
"res.partner",
[
limit: 1,
domain: [["name", "ilike", "Antonio"]],
fields: ["name", "street"],
offset: 11,
order: "name asc"])
{:ok,
[
%{
"id" => 226,
"name" => "<NAME>",
"street" => "Calle principal 1"
}
]}
```
- Search and read active and archived records (by default odoo only return active records)
```elixir
{:ok, partners} = Odoo.search_read(
odoo,
"res.partner", [
fields: ["name"],
offset: 0,
limit: 10,
order: "id",
domain: [["active", "in", [true,false]]]
]
)
```
"""
def search_read(odoo = %Odoo.Session{}, model, opts \\ []) do
Odoo.Core.search_read(odoo, model, opts)
end
@doc """
- Search by domain. Return single id or id list.
### Params
- Arguments in keyword list format.
- Required opts:
- domain: list of list
- Optional arguments
- limit: int, max number of rows to return from odoo
- offset: int, offset over the default values to return
### Examples
```elixir
iex> {:ok, partner_ids} = Odoo.search(
odoo,
"res.partner",
[ domain: [
["name", "ilike", "Antonia%"],
["customer", "=", true],
["create_date",">=","2021-06-01"]
],
limit: 5,
offset: 10,
order: "name"
])
{:ok, [318519, 357088, 237581, 378802, 258340]}
```
- Return one value is a single integer for the id
- Return more than one value is a list of integers
- Can also return {:error, message} if the operation fails.
"""
def search(odoo = %Odoo.Session{}, model, opts \\ []) do
Odoo.Core.search(odoo, model, opts)
end
@doc """
- Create objects
- Return {:ok, new_object_id} or {:error, message}
### Examples
```elixir
iex> {:ok, odoo} = Odoo.login()
iex> {:ok, product_id} = Odoo.create(odoo, "product.product", [name: "mi mega producto3"])
{:ok, 63}
```
"""
def create(odoo = %Odoo.Session{}, model, opts \\ []) do
Odoo.Core.create(odoo, model, opts)
end
@doc """
- Read objects by id
- Return {:ok, objects_list} or {:error, message}
### Examples
```elixir
iex> {:ok, odoo} = Odoo.login()
iex> {:ok, product} = Odoo.read(
odoo,
"product.product",
[63],
[fields: ["name", "categ_id"]])
{:ok, [%{"categ_id" => [1, "All"], "id" => 63, "name" => "mi mega producto3"}]}
```
"""
def read(odoo = %Odoo.Session{}, model, object_id, opts \\ []) do
Odoo.Core.read(odoo, model, object_id, opts)
end
@doc """
- Read and group objects
### Params
- :fields
- :domain
- :groupby
- :lazy
- :orderby
- :offset
### Examples
```elixir
iex> {:ok, result} = Odoo.read_group(
odoo,
"account.invoice", [
domain: [["date_invoice", ">=", "2021-11-01"]],
groupby: ["date_invoice:month"],
fields: ["number", "partner_id"], limit: 2, lazy: true])
%{
"__domain" => [
"&",
"&",
["date_invoice", ">=", "2022-01-01"],
["date_invoice", "<", "2022-02-01"],
["date_invoice", ">=", "2021-11-01"]
],
"date_invoice:month" => "enero 2022",
"date_invoice_count" => 61
},
%{
"__domain" => [
"&",
"&",
["date_invoice", ">=", "2022-02-01"],
["date_invoice", "<", "2022-03-01"],
["date_invoice", ">=", "2021-11-01"]
],
"date_invoice:month" => "febrero 2022",
"date_invoice_count" => 32
}
]}
```
"""
def read_group(odoo = %Odoo.Session{}, model, opts \\ []) do
Odoo.Core.read_group(odoo, model, opts)
end
@doc """
- Update objects by id
### Examples
```elixir
iex> {:ok, odoo} = Odoo.login()
iex> {:ok, result} = Odoo.write(odoo, "product.product", [63], [name: "Mega Pro 3"])
{:ok, true}
```
"""
def write(odoo = %Odoo.Session{}, model, object_id, opts \\ []) do
Odoo.Core.write(odoo, model, object_id, opts)
end
@doc """
- Delete objects by id
### Examples
```elixir
iex> {:ok, result} = Odoo.delete(odoo, "product.product", [63])
{:ok, true}
```
"""
def delete(odoo = %Odoo.Session{}, model, object_id) do
Odoo.Core.delete(odoo, model, object_id)
end
@doc """
Pagination over results in search_read (launch call to api odoo)
### Examples
```elixir
iex> {:ok, result} = Odoo.search_read(
odoo, "product.product", limit: 5, fields: ["name"], order: "id asc")
{:ok,
%Odoo.Result{
data: [
%{"id" => 1, "name" => "Restaurant Expenses"},
%{"id" => 2, "name" => "Hotel Accommodation"},
%{"id" => 3, "name" => "Virtual Interior Design"},
%{"id" => 4, "name" => "Virtual Home Staging"},
%{"id" => 5, "name" => "Office Chair"}
],
model: "product.product",
opts: [limit: 5, fields: ["name"], order: "id asc"]
}}
iex> {:ok, result2} = Odoo.next(odoo, result)
{:ok,
%Odoo.Result{
data: [
%{"id" => 6, "name" => "Office Lamp"},
%{"id" => 7, "name" => "Office Design Software"},
%{"id" => 8, "name" => "Desk Combination"},
%{"id" => 9, "name" => "Customizable Desk"},
%{"id" => 10, "name" => "Customizable Desk"}
],
model: "product.product",
opts: [offset: 5, limit: 5, fields: ["name"], order: "id asc"]
}}
```
"""
def next(odoo = %Odoo.Session{}, result) do
new_opts = Odoo.Result.next(result.opts)
Odoo.search_read(odoo, result.model, new_opts)
end
@doc """
Get previous page results (launch call to api odoo)
### Examples
```elixir
...
iex> {:ok, result2} = Odoo.next(odoo, result)
{:ok,
%Odoo.Result{
data: [
%{"id" => 6, "name" => "Office Lamp"},
%{"id" => 7, "name" => "Office Design Software"},
%{"id" => 8, "name" => "Desk Combination"},
%{"id" => 12, "name" => "Customizable Desk"},
%{"id" => 13, "name" => "Customizable Desk"}
],
model: "product.product",
opts: [offset: 5, limit: 5, fields: ["name"], order: "id asc"]
}}
iex> {:ok, result3} = Odoo.prev(odoo, result2)
{:ok,
%Odoo.Result{
data: [
%{"id" => 1, "name" => "Restaurant Expenses"},
%{"id" => 2, "name" => "Hotel Accommodation"},
%{"id" => 3, "name" => "Virtual Interior Design"},
%{"id" => 4, "name" => "Virtual Home Staging"},
%{"id" => 5, "name" => "Office Chair"}
],
model: "product.product",
opts: [offset: 0, limit: 5, fields: ["name"], order: "id asc"]
}}
```
"""
def prev(odoo = %Odoo.Session{}, result) do
new_opts = Odoo.Result.prev(result.opts)
Odoo.search_read(odoo, result.model, new_opts)
end
end
|
lib/odoo.ex
| 0.756627 | 0.594728 |
odoo.ex
|
starcoder
|
defmodule ActionMap do
@moduledoc """
Maps action names to names of modules which implement the `:Action` behaviour
"""
@actions %{
"multiplot" => "MultiplotAction",
"scatter" => "ScatterAction",
"ellipse" => "EllipseAction",
"timeseries" => "TimeSeriesAction"
}
@doc """
Lists the available actions with their descriptions.
"""
@spec list_actions([String.t(), ...]) :: [{iodata(), iodata()}, ...]
def list_actions(filter \\ []) do
Enum.map(Map.keys(@actions), fn action -> {action, description(action)} end)
|> (fn actions ->
if Enum.empty?(filter) do
actions
else
actions |> Enum.filter(fn {action, _description} -> action not in filter end)
end
end).()
end
def module_for_action(action) do
with :error <- Map.fetch(@actions, action) do
{:error, "Unrecognised action \"#{action}\""}
end
end
def get_module(module_name) do
module_name |> (&String.to_existing_atom("Elixir.#{&1}")).()
end
@spec execute(iodata(), map() | [map()], map()) :: atom() | {atom(), iodata()}
def execute(action, args, options) do
with {:ok, module_name} <- module_for_action(action), module = module_name |> get_module do
if is_list(args) and false == run_for_module(module, :expects_list?, []) do
args |> Enum.map(&execute(action, &1, options))
else
with {:ok, _} <-
module |> (&Action.validate(&1, args, options)).(),
do: module |> (&run_for_module(&1, :execute, [args, options])).()
end
end
end
defp run_for_module(module, function_name, arg_list) when is_binary(module) do
module |> get_module |> run_for_module(function_name, arg_list)
end
defp run_for_module(module, function_name, arg_list) do
module |> apply(function_name, arg_list)
end
@spec requirements(iodata()) :: map() | {atom(), iodata()}
def requirements(action) do
case Map.fetch(@actions, action) do
:error ->
{:error, "Unrecognised action \"#{action}\""}
{:ok, module_name} ->
module_name |> (&run_for_module(&1, :requirements, [])).()
end
end
@spec expected_options(iodata()) :: map() | {atom(), iodata()}
def expected_options(action) do
case Map.fetch(@actions, action) do
:error ->
{:error, "Unrecognised action \"#{action}\""}
{:ok, module_name} ->
module_name |> (&run_for_module(&1, :expected_options, [])).()
end
end
@spec description(iodata()) :: iodata() | {atom(), iodata()}
def description(action) do
case Map.fetch(@actions, action) do
:error ->
{:error, "Unrecognised action \"#{action}\""}
{:ok, module_name} ->
module_name |> (&run_for_module(&1, :description, [])).()
end
end
@spec action_info(iodata()) :: map() | {atom(), iodata()}
def action_info(action) do
case Map.fetch(@actions, action) do
:error ->
{:error, "Unrecognised action \"#{action}\""}
{:ok, module_name} ->
[:requirements, :description, :expected_options]
|> Enum.map(&{&1, run_for_module(module_name, &1, [])})
|> (&([{:action, action}] ++ &1)).()
|> Map.new()
end
end
end
|
apps/imposc/lib/core/actions/action_map.ex
| 0.751283 | 0.505859 |
action_map.ex
|
starcoder
|
defmodule Aecore.Chain.Identifier do
@moduledoc """
Utility module for interacting with identifiers.
Our binaries like account pubkey or hashes will already be represented as encoded (with already specified tag) binaries, using the following format:
<<Tag:1/unsigned-integer-unit:8, Binary:32/binary-unit:8>>,
Where Tag is a non-negative integer ranging from 1 to 6 (at the current state of this documentation, for more info - :aecore, :binary_ids list in config.exs)
and Binary is a regular 32 byte binary
"""
alias __MODULE__
defstruct type: :undefined, value: ""
@typedoc "Structure of the Identifier Transaction type"
@type t() :: %Identifier{type: type(), value: value()}
@type type() :: :account | :name | :commitment | :oracle | :contract | :channel
@type value() :: binary()
@tag_size 8
@spec create_identity(value(), type()) :: Identifier.t()
def create_identity(value, type)
when is_atom(type) and is_binary(value) do
%Identifier{type: type, value: value}
end
@spec valid?(Identifier.t() | list(Identifier.t()), value()) :: boolean()
def valid?(%Identifier{value: value} = id, type) do
create_identity(value, type) == id
end
def valid?(ids_list, type) when is_list(ids_list) do
Enum.all?(ids_list, fn id -> valid?(id, type) end)
end
def valid?(_, _) do
false
end
@spec create_encoded_to_binary(type(), value()) :: binary()
def create_encoded_to_binary(value, type) do
value
|> create_identity(type)
|> encode_to_binary()
end
# API needed for RLP
@spec encode_to_binary(Identifier.t()) :: binary()
def encode_to_binary(%Identifier{value: value, type: type}) do
tag = type_to_tag(type)
<<tag::unsigned-integer-size(@tag_size), value::binary>>
end
@spec decode_from_binary(binary()) :: tuple() | {:error, String.t()}
def decode_from_binary(<<tag::unsigned-integer-size(@tag_size), data::binary>>)
when is_binary(data) do
case tag_to_type(tag) do
{:error, msg} ->
{:error, msg}
{:ok, type} ->
{:ok, %Identifier{type: type, value: data}}
end
end
@spec decode_from_binary_to_value(binary(), type()) :: value() | {:error, String.t()}
def decode_from_binary_to_value(data, type) do
case decode_from_binary(data) do
{:ok, %Identifier{type: ^type, value: value}} ->
{:ok, value}
{:ok, %Identifier{type: received_type}} ->
{:error, "#{__MODULE__}: Unexpected type. Expected #{type}, but got #{received_type}"}
{:error, _} = error ->
error
end
end
@spec encode_list_to_binary(list(t())) :: list(binary())
def encode_list_to_binary([]), do: []
def encode_list_to_binary([head | rest]) do
[encode_to_binary(head) | encode_list_to_binary(rest)]
end
@spec decode_list_from_binary(list(binary())) ::
{:ok, list(Identifier.t())} | {:error, String.t()}
def decode_list_from_binary([]), do: {:ok, []}
def decode_list_from_binary([head | rest]) do
with {:ok, head_decoded} <- decode_from_binary(head),
{:ok, rest_decoded} <- decode_list_from_binary(rest) do
{:ok, [head_decoded | rest_decoded]}
else
{:error, _} = error -> error
end
end
defp type_to_tag(:account), do: 1
defp type_to_tag(:name), do: 2
defp type_to_tag(:commitment), do: 3
defp type_to_tag(:oracle), do: 4
defp type_to_tag(:contract), do: 5
defp type_to_tag(:channel), do: 6
defp tag_to_type(1), do: {:ok, :account}
defp tag_to_type(2), do: {:ok, :name}
defp tag_to_type(3), do: {:ok, :commitment}
defp tag_to_type(4), do: {:ok, :oracle}
defp tag_to_type(5), do: {:ok, :contract}
defp tag_to_type(6), do: {:ok, :channel}
defp tag_to_type(_), do: {:error, "#{__MODULE__}: Invalid tag"}
end
|
apps/aecore/lib/aecore/chain/identifier.ex
| 0.832747 | 0.438725 |
identifier.ex
|
starcoder
|
defmodule FarmbotCore.Firmware.RxBuffer do
@moduledoc """
Serial character buffering doesn't always make sense. When
reading serial input of line oriented data, you might not
always capture a full line of text.
* You might see only the end of a line.
* You might get a complete line in two parts.
* You might get garbled text because the device is not
truly online yet.
* If you are lucky, You might get the whole line in one part.
Because serial data is not emitted in predictable sizes, and
because the GCode spec is line based, we need an extra layer
of safety to ensure we only get data that makes sense:
* When parsing an incoming stream of new data, discard all
data prior to the first @new_line. Data before the first
@new_line is never complete and is potentially garbage.
* Normalize tokens by removing carraige returns, extra
spaces, etc..
* Ensure that buffer consumers only get complete lines of
data and never a half finished line.
"""
require Logger
alias __MODULE__, as: State
defstruct output: [], buffer: "", ready: false
@doc ~S"""
Create a new line buffer object.
iex> new("r88 Q00")
%FarmbotCore.Firmware.RxBuffer{
buffer: "",
output: ["R88 Q00"],
ready: true
}
"""
def new(string \\ "") do
puts(%State{}, string)
end
@doc ~S"""
Create a new line buffer by appending to an existing buffer.
iex> new("r88 Q00\n") |> puts("R99 ARDUINO STARTUP COMPLETE\n")
%FarmbotCore.Firmware.RxBuffer{
buffer: "",
output: ["R99 ARDUINO STARTUP COMPLETE\n"],
ready: true
}
"""
def puts(state, string) do
%{state | output: [String.upcase(string)], ready: true}
end
@doc ~S"""
Harvest well-formed data from a line buffer. Returns a tuple
containing a new line buffer at element 0 and an array of
strings that are guaranteed to be properly buffered.
iex> new("r88 Q00\n")
...> |> puts("R99 ARDUINO STARTUP COMPLETE\n")
...> |> puts("r99 InCoMpLeTe DaTA")
...> |> gets()
{
%RxBuffer{
buffer: "",
output: [],
ready: true
},
["R99 INCOMPLETE DATA"]
}
"""
def gets(state) do
{%{state | output: []}, state.output}
end
end
|
lib/firmware/rx_buffer.ex
| 0.673084 | 0.444565 |
rx_buffer.ex
|
starcoder
|
defmodule Gossip.Client do
@moduledoc """
Behaviour for integrating Gossip into your game
"""
defmodule Core do
@moduledoc """
Callbacks for the "channels" flag
This is the only _required_ module.
"""
@doc """
Get the game's User Agent.
This should return the game name with a version number.
"""
@callback user_agent() :: Gossip.user_agent()
@doc """
Get the channels you want to subscribe to on start
"""
@callback channels() :: [Gossip.channel_name()]
@doc """
Get the current names of connected players
Used in the heartbeat
"""
@callback players() :: [Gossip.player_name()]
@doc """
A callback to know when the socket is authenticated
"""
@callback authenticated() :: :ok
@doc """
A new message was received from Gossip on a channel
"""
@callback message_broadcast(Gossip.message()) :: :ok
end
defmodule Players do
@moduledoc """
Callbacks for the "players" flag
"""
@doc """
A player has signed in
"""
@callback player_sign_in(Gossip.game_name(), Gossip.player_name()) :: :ok
@doc """
A player has signed out
"""
@callback player_sign_out(Gossip.game_name(), Gossip.player_name()) :: :ok
@doc """
Player status update
You will receive this callback anytime a `players/status` event is sent. These are sent
after calling `Gossip.fetch_players/0` and periodically updated from the local
player cache, `Gossip.Players`.
"""
@callback player_update(Gossip.game_name(), [Gossip.player_name()]) :: :ok
end
defmodule Tells do
@moduledoc """
Callbacks for the "tells" flag
"""
@doc """
New tell received
"""
@callback tell_receive(Gossip.game_name(), from_player :: Gossip.player_name(), to_player :: Gossip.player_name(), Gossip.message()) :: :ok
end
defmodule Games do
@moduledoc """
Callbacks for the "games" flag
"""
@doc """
Game status update
"""
@callback game_update(Gossip.game()) :: :ok
@doc """
A game connected
"""
@callback game_connect(Gossip.game_name()) :: :ok
@doc """
A game disconnected
"""
@callback game_disconnect(Gossip.game_name()) :: :ok
end
defmodule SystemCallback do
@moduledoc """
A behavior for system level callbacks
"""
@type state :: map()
@type event :: map()
@callback authenticated(state()) :: {:ok, state()}
@callback process(state(), event()) :: {:ok, state()}
end
end
|
lib/gossip/client.ex
| 0.802865 | 0.411081 |
client.ex
|
starcoder
|
defmodule Slugy do
@moduledoc ~S"""
A Phoenix library to generate slug for your schema fields
## Examples
Let's suppose we have a `Post` schema and we want to generate a slug from `title` field and save it to the `slug` field. To achieve that we need to call `slugify/2` following the changeset pipeline passing the desireable field. `slugify/2` generates the slug and put it to the changeset.
defmodule Post do
use Ecto.Schema
import Ecto.Changeset
import Slugy
embedded_schema do
field(:title, :string)
field(:slug, :string)
end
def changeset(post, attrs) do
post
|> cast(attrs, [:title, :type])
|> slugify(:title)
end
end
Running this code on iex console you can see the slug generated as a new change to be persisted.
iex> Post.changeset(%Post{}, %{title: "A new Post"}).changes
%{title: "A new Post", slug: "a-new-post"}
Slugy just generates a slug if the field's value passed to `slugify/2` comes with a new value to persist in `attrs` (in update cases) or if the struct is a new record to save.
"""
import Ecto.Changeset
@doc ~S"""
The `slugify/2` expects a changeset as a first parameter and an atom to the second one.
The function will check if there is a change on the "key" field and if affirmative generates the slug and assigns to the `slug` field, otherwise do nothing and just returns the changeset.
iex> Post.changeset(%Post{}, %{title: "A new Post"}).changes
%{slug: "a-new-post", title: "A new Post"}
## Composed slug
If you need a composed slug **e.g.** a post `title` and the `type` like so `"how-to-use-slugy-video"` you need to pass the `with` key that expects a list of atom keys.
defmodule Content do
use Ecto.Schema
import Ecto.Changeset
import Slugy
embedded_schema do
field :name, :string
field :type, :string
field :slug, :string
end
def changeset(post, attrs) do
post
|> cast(attrs, [:name, :type])
|> slugify(with: [:name, :type])
end
end
iex> Content.changeset(%Content{}, %{name: "Elixir", type: "video"}).changes
%{name: "Elixir", type: "video", slug: "elixir-video"}
## Slugify from a map field
In rare cases you need to generate slugs from a field inside a map field that represents a jsonb column on your database.
For example by having a struct like below and we want a slug from `data -> title`:
defmodule PostWithMapField do
use Ecto.Schema
import Ecto.Changeset
import Slugy
embedded_schema do
field :data, :map
field :slug, :string
end
def changeset(post, attrs) do
post
|> cast(attrs, [:data])
|> slugify([:data, :title])
end
end
Just pass a list with the keys following the path down to the desirable field.
iex> PostWithMapField.changeset(%PostWithMapField{}, %{data: %{title: "This is my AWESOME title"}}).changes
%{data: %{title: "This is my AWESOME title"}, slug: "this-is-my-awesome-title"}
## Routes
And lastly if you want to have your routes with the slug as the `:id` param implement the `Phoenix.Param` protocol to your slugified schema. `Phoenix.Param` will extract the slug in place of the `:id`.
defmodule Post do
@derive {Phoenix.Param, key: :slug}
schema "posts" do
# ...
end
def changeset(post, attrs) do
# ...
end
end
For more information about `Phoenix.Param` protocol see in [https://hexdocs.pm/phoenix/Phoenix.Param.html](https://hexdocs.pm/phoenix/Phoenix.Param.html)
## Installation
Add to your `mix.exs` file.
def deps do
[
{:slugy, "~> 4.1.0"}
]
end
Don’t forget to update your dependencies.
$ mix deps.get
"""
def slugify(changeset, with: fields) when is_list(fields) do
with true <- any_change?(changeset, fields),
str when not is_nil(str) <- compose_fields(changeset, fields) do
put_change(changeset, :slug, slugify(str))
else
_ -> changeset
end
end
def slugify(changeset, key) when is_atom(key) do
if str = get_change(changeset, key) do
put_change(changeset, :slug, slugify(str))
else
changeset
end
end
def slugify(changeset, nested_field) when is_list(nested_field) do
with str when not is_nil(str) <- get_in(changeset.changes, nested_field) do
put_change(changeset, :slug, slugify(str))
else
_ -> changeset
end
end
@doc """
Returns a downcased dashed string.
## Examples
iex> Slugy.slugify("Vamo que vamo")
"vamo-que-vamo"
"""
def slugify(str) when is_binary(str) do
str
|> String.trim()
|> String.normalize(:nfd)
|> String.replace(~r/\s\s+/, " ")
|> String.replace(~r/[^A-z\s\d-]/u, "")
|> String.replace(~r/\s/, "-")
|> String.replace(~r/--+/, "-")
|> String.downcase()
end
defp compose_fields(_changeset, []), do: ""
defp compose_fields(changeset, [head | tail]) do
"#{get_field(changeset, head)} " <> compose_fields(changeset, tail)
end
defp any_change?(changeset, fields) do
Enum.any?(fields, fn field -> get_change(changeset, field) end)
end
end
|
lib/slugy.ex
| 0.826991 | 0.570122 |
slugy.ex
|
starcoder
|
defmodule Janus.Mock.Transport do
@moduledoc """
Allows to mock transport module in a predictable way.
One has to pass list of tuples representing request-response pairs to `c:connect/1` callback which will put them in the returned state.
For each `c:send/3` invocation `#{inspect(__MODULE__)}` will try to match on the first occurrence of given request and return
corresponding response. The matched tuple is then removed returned state.
The module takes into consideration that each request will have `:transaction` field added by `Janus.Connection` module,
therefore it will extract `:transaction` field and put it to the corresponding response.
## Example
```elixir
defmodule Test do
alias Janus.{Connection, Session}
defmodule Handler, do: use Janus.Handler
@request_response_pairs [
{
%{
janus: :create
},
%{
"janus" => "success",
"data" => %{"id" => "session id"}
}
}
]
def test() do
{:ok, conn} = Connection.start_link(
Janus.Mock.Transport,
@request_response_pairs,
Handler,
{}
)
# session module will send `create` request on start
# then mock transport will match on this request and
# respond with a success response containing session id
{:ok, session} = Session.start_link(conn)
end
end
```
## Keep alive interval
To mock `c:keepalive_interval/0` callback one has to set proper config variable.
```elixir
config :elixir_janus, Janus.Mock.Transport, keepalive_interval: 100
```
"""
@behaviour Janus.Transport
@impl true
def connect(pairs) do
Janus.Mock.assert_pairs_shape(pairs)
{:ok, %{pairs: pairs}}
end
@impl true
def send(payload, _timeout, %{pairs: pairs} = state) do
{transaction, payload} = Map.pop(payload, :transaction)
{response, pairs} = Janus.Mock.get_response(payload, pairs)
response =
if not is_nil(transaction) do
Map.put(response, "transaction", transaction)
else
response
end
send(self(), response)
{:ok, %{state | pairs: pairs}}
end
@impl true
def handle_info(message, state) do
{:ok, message, state}
end
@impl true
def keepalive_interval() do
case Application.get_env(:elixir_janus, __MODULE__, nil) do
nil -> nil
[keepalive_interval: interval] -> interval
end
end
end
|
lib/mock/transport.ex
| 0.899868 | 0.788094 |
transport.ex
|
starcoder
|
defmodule Raft do
@moduledoc """
A simple implementation of the raft protocol
https://raft.github.io/
"""
use GenServer
alias Raft.PG
alias Raft.Server
alias Raft.State
require Logger
@key :raft
@election_initial_delay 500
@election_random_delay 500
def start_link() do
GenServer.start_link(__MODULE__, [], name: __MODULE__)
end
@doc """
Check the state of the election, look for a current leader
"""
def leader_check(pid) do
GenServer.cast(pid, {:leader, :check, self()})
end
@doc """
Let the new follower know about the current term and who the leader is
"""
def notify_of_leader(pid, term) do
GenServer.cast(pid, {:leader, :notice, self(), node(), term})
end
@doc """
Announce a node as running for leader
"""
def announce_candidate(pid, term) do
GenServer.cast(pid, {:election, :running, self(), term})
end
@doc """
Vote for a node as the leader
"""
def vote_for(pid, term) do
GenServer.cast(pid, {:election, :cast_vote, self(), term})
end
@doc """
Set the node as the new leader for a term
"""
def new_leader(pid, term) do
GenServer.cast(pid, {:election, :winner, self(), node(), term})
end
@doc """
Get debug information out of the raft server
"""
def debug() do
GenServer.call(Raft, :debug)
end
@doc """
Check if the current node is the leader
"""
@spec node_is_leader?() :: boolean()
def node_is_leader?() do
case :ets.lookup(@key, :is_leader?) do
[{_, value}] when is_boolean(value) ->
value
_ ->
false
end
end
def init(_) do
PG.join()
:ets.new(@key, [:set, :protected, :named_table])
:ets.insert(@key, {:is_leader?, false})
send(self(), {:election, :check})
start_election(1)
:ok = :net_kernel.monitor_nodes(true)
state = %State{
state: "candidate",
term: 0,
highest_seen_term: 0,
votes: []
}
{:ok, state}
end
def handle_call(:state, _from, state) do
{:reply, Map.put(state, :node, node()), state}
end
def handle_call(:debug, _from, state) do
debug = Server.debug(state)
{:reply, debug, state}
end
def handle_cast({:leader, :check, pid}, state) do
{:ok, state} = Server.leader_check(state, pid)
{:noreply, state}
end
def handle_cast({:leader, :notice, leader_pid, leader_node, term}, state) do
{:ok, state} = Server.set_leader(state, leader_pid, leader_node, term)
{:noreply, state}
end
def handle_cast({:election, :running, pid, term}, state) do
{:ok, state} = Server.vote_leader(state, pid, term)
{:noreply, state}
end
def handle_cast({:election, :cast_vote, pid, term}, state) do
{:ok, state} = Server.vote_received(state, pid, term)
{:noreply, state}
end
def handle_cast({:election, :winner, leader_pid, leader_node, term}, state) do
{:ok, state} = Server.set_leader(state, leader_pid, leader_node, term)
{:noreply, state}
end
def handle_info({:election, :check}, state) do
{:ok, state} = Server.look_for_leader(state)
{:noreply, state}
end
def handle_info({:election, :start, term}, state) do
{:ok, state} = Server.start_election(state, term)
{:noreply, state}
end
def handle_info({:election, :check_election_status, term}, state) do
{:ok, state} = Server.check_election_status(state, term)
{:noreply, state}
end
def handle_info({:nodeup, _node}, state) do
Process.send_after(self(), :assert_leader, 300)
{:noreply, state}
end
def handle_info({:nodedown, node}, state) do
{:ok, state} = Server.node_down(state, node)
{:noreply, state}
end
def handle_info(:assert_leader, state) do
{:ok, state} = Server.assert_leader(state)
{:noreply, state}
end
def start_election(term) do
Process.send_after(
self(),
{:election, :start, term},
@election_initial_delay + :rand.uniform(@election_random_delay)
)
end
end
|
lib/raft.ex
| 0.682891 | 0.61986 |
raft.ex
|
starcoder
|
defmodule RethinkDB.Query.Macros do
alias RethinkDB.Q
alias RethinkDB.Query
@moduledoc false
defmacro operate_on_two_args(op, opcode, options \\ []) do
opt_support = Keyword.get(options, :opts, false)
quote do
def unquote(op)(left, right) do
%Q{query: [unquote(opcode), [wrap(left), wrap(right)]]}
end
if unquote(opt_support) do
def unquote(op)(left, right, opts) when is_map(opts) or is_list(opts) do
%Q{query: [unquote(opcode), [wrap(left), wrap(right)], make_opts(opts)]}
end
end
end
end
defmacro operate_on_three_args(op, opcode, options \\ []) do
opt_support = Keyword.get(options, :opts, false)
quote do
def unquote(op)(arg1, arg2, arg3) do
%Q{query: [unquote(opcode), [wrap(arg1), wrap(arg2), wrap(arg3)]]}
end
if unquote(opt_support) do
def unquote(op)(arg1, arg2, arg3, opts) when is_map(opts) or is_list(opts) do
%Q{query: [unquote(opcode), [wrap(arg1), wrap(arg2), wrap(arg3)], make_opts(opts)]}
end
end
end
end
defmacro operate_on_list(op, opcode, options \\ []) do
opt_support = Keyword.get(options, :opts, false)
quote do
def unquote(op)(args) when is_list(args) do
%Q{query: [unquote(opcode), Enum.map(args, &wrap/1)]}
end
if unquote(opt_support) do
def unquote(op)(args, opts) when is_list(args) and (is_map(opts) or is_list(opts)) do
%Q{query: [unquote(opcode), Enum.map(args, &wrap/1), make_opts(opts)]}
end
end
end
end
defmacro operate_on_seq_and_list(op, opcode, options \\ []) do
opt_support = Keyword.get(options, :opts, false)
quote do
def unquote(op)(seq, args) when is_list(args) and args != [] do
%Q{query: [unquote(opcode), [wrap(seq) | Enum.map(args, &wrap/1)]]}
end
if unquote(opt_support) do
def unquote(op)(seq, args, opts)
when is_list(args) and args != [] and (is_map(opts) or is_list(opts)) do
%Q{query: [unquote(opcode), [wrap(seq) | Enum.map(args, &wrap/1)], make_opts(opts)]}
end
end
end
end
defmacro operate_on_single_arg(op, opcode, options \\ []) do
opt_support = Keyword.get(options, :opts, false)
quote do
def unquote(op)(arg) do
%Q{query: [unquote(opcode), [wrap(arg)]]}
end
if unquote(opt_support) do
def unquote(op)(arg, opts) when is_map(opts) or is_list(opts) do
%Q{query: [unquote(opcode), [wrap(arg)], make_opts(opts)]}
end
end
end
end
defmacro operate_on_optional_second_arg(op, opcode) do
quote do
def unquote(op)(arg) do
%Q{query: [unquote(opcode), [wrap(arg)]]}
end
def unquote(op)(left, right = %Q{}) do
%Q{query: [unquote(opcode), [wrap(left), wrap(right)]]}
end
def unquote(op)(arg, opts) when is_map(opts) do
%Q{query: [unquote(opcode), [wrap(arg)], opts]}
end
def unquote(op)(left, right, opts) when is_map(opts) do
%Q{query: [unquote(opcode), [wrap(left), wrap(right)], opts]}
end
def unquote(op)(left, right) do
%Q{query: [unquote(opcode), [wrap(left), wrap(right)]]}
end
end
end
defmacro operate_on_zero_args(op, opcode, options \\ []) do
opt_support = Keyword.get(options, :opts, false)
quote do
def unquote(op)(), do: %Q{query: [unquote(opcode)]}
if unquote(opt_support) do
def unquote(op)(opts) when is_map(opts) or is_list(opts) do
%Q{query: [unquote(opcode), make_opts(opts)]}
end
end
end
end
def wrap(list) when is_list(list), do: Query.make_array(Enum.map(list, &wrap/1))
def wrap(q = %Q{}), do: q
def wrap(t = %RethinkDB.Pseudotypes.Time{}) do
m = Map.from_struct(t) |> Map.put_new("$reql_type$", "TIME")
wrap(m)
end
def wrap(t = %DateTime{utc_offset: utc_offset, std_offset: std_offset}) do
offset = utc_offset + std_offset
offset_negative = offset < 0
offset_hour = div(abs(offset), 3600)
offset_minute = rem(abs(offset), 3600)
time_zone =
if offset_negative do
"-"
else
"+"
end <>
String.pad_leading(Integer.to_string(offset_hour), 2, "0") <>
":" <> String.pad_leading(Integer.to_string(offset_minute), 2, "0")
wrap(%{
"$reql_type$" => "TIME",
"epoch_time" => DateTime.to_unix(t, :millisecond) / 1000,
"timezone" => time_zone
})
end
def wrap(map) when is_map(map) do
Enum.map(map, fn {k, v} ->
{k, wrap(v)}
end)
|> Enum.into(%{})
end
def wrap(f) when is_function(f), do: Query.func(f)
def wrap(t) when is_tuple(t), do: wrap(Tuple.to_list(t))
def wrap(data), do: data
def make_opts(opts) when is_map(opts), do: wrap(opts)
def make_opts(opts) when is_list(opts), do: Enum.into(opts, %{})
end
|
lib/rethinkdb/query/macros.ex
| 0.532668 | 0.487002 |
macros.ex
|
starcoder
|
import Kernel, except: [apply: 2]
defmodule Ecto.Query.Builder.Dynamic do
@moduledoc false
alias Ecto.Query.Builder
@doc """
Builds a dynamic expression.
"""
@spec build([Macro.t], Macro.t, Macro.Env.t) :: Macro.t
def build(binding, expr, env) do
{query, vars} = Builder.escape_binding(quote(do: query), binding, env)
{expr, {params, :acc}} = Builder.escape(expr, :any, {%{}, :acc}, vars, env)
params = Builder.escape_params(params)
quote do
%Ecto.Query.DynamicExpr{fun: fn query ->
_ = unquote(query)
{unquote(expr), unquote(params)}
end,
binding: unquote(Macro.escape(binding)),
file: unquote(env.file),
line: unquote(env.line)}
end
end
@doc """
Expands a dynamic expression for insertion into the given query.
"""
def fully_expand(query, %{file: file, line: line, binding: binding} = dynamic) do
{expr, {binding, params, _count}} = expand(query, dynamic, {binding, [], 0})
{expr, binding, Enum.reverse(params), file, line}
end
@doc """
Expands a dynamic expression as part of an existing expression.
Any dynamic expression parameter is prepended and the parameters
list is not reversed. This is useful when the dynamic expression
is given in the middle of an expression.
"""
def partially_expand(query, %{binding: binding} = dynamic, params, count) do
{expr, {_binding, params, count}} = expand(query, dynamic, {binding, params, count})
{expr, params, count}
end
defp expand(query, %{fun: fun}, {binding, params, count}) do
{dynamic_expr, dynamic_params} =
fun.(query)
Macro.postwalk(dynamic_expr, {binding, params, count}, fn
{:^, meta, [ix]}, {binding, params, count} ->
case Enum.fetch!(dynamic_params, ix) do
{%Ecto.Query.DynamicExpr{binding: new_binding} = dynamic, _} ->
binding = if length(new_binding) > length(binding), do: new_binding, else: binding
expand(query, dynamic, {binding, params, count})
param ->
{{:^, meta, [count]}, {binding, [param | params], count + 1}}
end
expr, acc ->
{expr, acc}
end)
end
end
|
lib/ecto/query/builder/dynamic.ex
| 0.754373 | 0.469277 |
dynamic.ex
|
starcoder
|
defmodule Integer do
@moduledoc """
Functions for working with integers.
"""
import Bitwise
@doc """
Determines if an integer is odd.
Returns `true` if `n` is an odd number, otherwise `false`.
Allowed in guard clauses.
"""
defmacro is_odd(n) do
quote do: (unquote(n) &&& 1) == 1
end
@doc """
Determines if an integer is even.
Returns `true` if `n` is an even number, otherwise `false`.
Allowed in guard clauses.
"""
defmacro is_even(n) do
quote do: (unquote(n) &&& 1) == 0
end
@doc """
Returns the ordered digits for the given non-negative integer.
An optional base value may be provided representing the radix for the returned
digits.
## Examples
iex> Integer.digits(101)
[1, 0, 1]
iex> Integer.digits(58127, 2)
[1, 1, 1, 0, 0, 0, 1, 1, 0, 0, 0, 0, 1, 1, 1, 1]
"""
@spec digits(non_neg_integer, non_neg_integer) :: [non_neg_integer]
def digits(n, base \\ 10) when is_integer(n) and n >= 0
and is_integer(base) and base >= 2 do
do_digits(n, base, [])
end
defp do_digits(0, _base, []), do: [0]
defp do_digits(0, _base, acc), do: acc
defp do_digits(n, base, acc) do
do_digits div(n,base), base, [rem(n,base) | acc]
end
@doc """
Returns the integer represented by the ordered digits.
An optional base value may be provided representing the radix for the digits.
## Examples
iex> Integer.undigits([1, 0, 1])
101
iex> Integer.undigits([1, 4], 16)
20
"""
@spec undigits([integer], integer) :: integer
def undigits(digits, base \\ 10) when is_integer(base) do
do_undigits(digits, base, 0)
end
defp do_undigits([], _base, acc), do: acc
defp do_undigits([digit | tail], base, acc) do
do_undigits(tail, base, acc * base + digit)
end
@doc """
Converts a binary to an integer.
If successful, returns a tuple of the form `{integer, remainder_of_binary}`.
Otherwise `:error`.
## Examples
iex> Integer.parse("34")
{34,""}
iex> Integer.parse("34.5")
{34,".5"}
iex> Integer.parse("three")
:error
"""
@spec parse(binary) :: {integer, binary} | :error
def parse(<< ?-, bin :: binary >>) do
case do_parse(bin) do
:error -> :error
{number, remainder} -> {-number, remainder}
end
end
def parse(<< ?+, bin :: binary >>) do
do_parse(bin)
end
def parse(bin) when is_binary(bin) do
do_parse(bin)
end
defp do_parse(<< char, bin :: binary >>) when char in ?0..?9, do: do_parse(bin, char - ?0)
defp do_parse(_), do: :error
defp do_parse(<< char, rest :: binary >>, acc) when char in ?0..?9 do
do_parse rest, 10 * acc + (char - ?0)
end
defp do_parse(bitstring, acc) do
{acc, bitstring}
end
@doc """
Returns a binary which corresponds to the text representation
of `some_integer`.
Inlined by the compiler.
## Examples
iex> Integer.to_string(123)
"123"
"""
@spec to_string(integer) :: String.t
def to_string(some_integer) do
:erlang.integer_to_binary(some_integer)
end
@doc """
Returns a binary which corresponds to the text representation
of `some_integer` in base `base`.
Inlined by the compiler.
## Examples
iex> Integer.to_string(100, 16)
"64"
"""
@spec to_string(integer, 2..36) :: String.t
def to_string(some_integer, base) do
:erlang.integer_to_binary(some_integer, base)
end
@doc """
Returns a char list which corresponds to the text representation of the given integer.
Inlined by the compiler.
## Examples
iex> Integer.to_char_list(7)
'7'
"""
@spec to_char_list(integer) :: char_list
def to_char_list(number) do
:erlang.integer_to_list(number)
end
@doc """
Returns a char list which corresponds to the text representation of the
given integer in the given base.
Inlined by the compiler.
## Examples
iex> Integer.to_char_list(1023, 16)
'3FF'
"""
@spec to_char_list(integer, 2..36) :: char_list
def to_char_list(number, base) do
:erlang.integer_to_list(number, base)
end
end
|
lib/elixir/lib/integer.ex
| 0.948082 | 0.590573 |
integer.ex
|
starcoder
|
defmodule QueryBuilder do
require Ecto.Query
alias Ecto.Query
defmacro __using__(opts) do
quote do
require QueryBuilder.Schema
QueryBuilder.Schema.__using__(unquote(opts))
end
end
@doc ~S"""
Preloads the associations.
Bindings are automatically set if joins have been made, or if it is preferable to
join (i.e. one-to-one associations are preferable to include into the query result
rather than emitting separate DB queries).
Example:
```
QueryBuilder.preload(query, [role: :permissions, articles: [:stars, comments: :user]])
```
"""
def preload(query, assoc_fields) do
ensure_query_has_binding(query)
|> QueryBuilder.Query.Preload.preload(assoc_fields)
end
@doc ~S"""
An AND where query expression.
Example:
```
QueryBuilder.where(query, firstname: "John")
```
"""
def where(query, filters) do
where(query, [], filters)
end
@doc ~S"""
An AND where query expression.
Associations are passed in second argument; fields from these associations can then
be referenced by writing the field name, followed by the "@" character and the
association name, as an atom. For example: `:name@users`.
Example:
```
QueryBuilder.where(query, [role: :permissions], name@permissions: :write)
```
"""
def where(query, assoc_fields, filters) do
ensure_query_has_binding(query)
|> QueryBuilder.Query.Where.where(assoc_fields, filters)
end
@doc ~S"""
An order by query expression.
Example:
```
QueryBuilder.order_by(query, lastname: :asc, firstname: :asc)
```
"""
def order_by(query, value) do
order_by(query, [], value)
end
@doc ~S"""
An order by query expression.
For more about the second argument, see `where/3`.
Example:
```
QueryBuilder.order_by(query, :articles, title@articles: :asc)
```
"""
def order_by(query, assoc_fields, value) do
ensure_query_has_binding(query)
|> QueryBuilder.Query.OrderBy.order_by(assoc_fields, value)
end
@doc ~S"""
A join query expression.
Third argument `type` may be passed one of the possible values for
`Ecto.Query.join/5`'s qualifier argument.
Example:
```
QueryBuilder.join(query, :articles, :left)
```
"""
def join(query, assoc_fields, type) do
ensure_query_has_binding(query)
|> QueryBuilder.Query.Join.join(assoc_fields, type)
end
@doc ~S"""
Allows to pass a list of operations through a keyword list.
Example:
```
QueryBuilder.from_list(query, [
where: [name: "John", city: "Anytown"],
preload: [articles: :comments]
])
```
"""
def from_list(query, []), do: query
def from_list(query, [{operation, arguments} | tail]) do
arguments =
cond do
is_tuple(arguments) -> Tuple.to_list(arguments)
is_list(arguments) -> [arguments]
true -> List.wrap(arguments)
end
apply(__MODULE__, operation, [query | arguments])
|> from_list(tail)
end
defp ensure_query_has_binding(query) do
schema = QueryBuilder.Utils.root_schema(query)
unless Query.has_named_binding?(query, schema._binding()) do
schema._query()
else
query
end
end
end
|
lib/query_builder.ex
| 0.876463 | 0.850841 |
query_builder.ex
|
starcoder
|
defmodule ArtemisWeb.ViewHelper.Form do
use Phoenix.HTML
import Phoenix.HTML.Tag
@doc """
Returns a blank option value
"""
def blank_option(), do: [key: " ", value: ""]
@doc """
Returns option data for a select field
Options
:key_field -> atom. required if passing a list of maps or list of keyword lists
:value_field -> atom. required if passing a list of maps or list of keyword lists
:blank_option -> boolean. include a blank option
Example:
select_options(["one", "two"])
Returns:
[
[key: "one", value: "one"],
[key: "two", value: "two"]
]
"""
def select_options(data, options \\ []) do
results =
data
|> Enum.map(&select_option(&1, options))
|> Enum.reject(&is_nil(Keyword.get(&1, :value)))
case Keyword.get(options, :blank_option) do
true -> [blank_option() | results]
_ -> results
end
end
defp select_option(entry, options) when is_map(entry) do
key_field = Keyword.get(options, :field) || Keyword.fetch!(options, :key_field)
value_field = Keyword.get(options, :field) || Keyword.fetch!(options, :value_field)
value = Map.get(entry, value_field)
key = Map.get(entry, key_field) || value
[
key: key,
value: value
]
end
defp select_option(entry, options) when is_list(entry) do
key_field = Keyword.get(options, :key_field)
value_field = Keyword.get(options, :value_field)
value = Keyword.get(entry, value_field)
key = Keyword.get(entry, key_field) || value
[
key: key,
value: value
]
end
defp select_option(entry, _options), do: [key: entry, value: entry]
@doc """
Returns the value of a changeset field
"""
def get_changeset_value(changeset, field), do: Ecto.Changeset.get_field(changeset, field)
@doc """
Render a standalone select input form field. Note, if using `form_for`, use
the Phoenix built-in function `select` instead.
Expects `data` to be in the form of a list of keyword pairs:
[
[key: "Option One", value: "option-value-1"],
[key: "Option Two", value: "option-value-2"]
]
"""
def select_tag(data, options \\ []) do
name = Keyword.get(options, :name)
placeholder = Keyword.get(options, :placeholder)
class = Keyword.get(options, :class, "enhanced")
content_tag(:select, class: class, name: name, placeholder: placeholder) do
Enum.map(data, fn [key: key, value: value] ->
content_tag(:option, value: value) do
key
end
end)
end
end
@doc """
From Phoenix.HTML.Form >= 2.14. Can be removed in the future once mix.exs
version matches.
"""
def deprecated_options_for_select(options, selected_values) do
{:safe,
escaped_options_for_select(
options,
selected_values |> List.wrap() |> Enum.map(&html_escape/1)
)}
end
defp escaped_options_for_select(options, selected_values) do
Enum.reduce(options, [], fn
{option_key, option_value}, acc ->
[acc | option(option_key, option_value, [], selected_values)]
options, acc when is_list(options) ->
{option_key, options} = Keyword.pop(options, :key)
option_key ||
raise ArgumentError,
"expected :key key when building <option> from keyword list: #{inspect(options)}"
{option_value, options} = Keyword.pop(options, :value)
option_value ||
raise ArgumentError,
"expected :value key when building <option> from keyword list: #{inspect(options)}"
[acc | option(option_key, option_value, options, selected_values)]
option, acc ->
[acc | option(option, option, [], selected_values)]
end)
end
defp option(group_label, group_values, [], value)
when is_list(group_values) or is_map(group_values) do
section_options = escaped_options_for_select(group_values, value)
{:safe, contents} = content_tag(:optgroup, {:safe, section_options}, label: group_label)
contents
end
defp option(option_key, option_value, extra, value) do
option_key = html_escape(option_key)
option_value = html_escape(option_value)
opts = [value: option_value, selected: option_value in value] ++ extra
{:safe, contents} = content_tag(:option, option_key, opts)
contents
end
@doc """
Render hidden fields for each value
"""
def hidden_fields(items) do
Enum.map(items, fn item ->
hidden_field(item)
end)
end
@doc """
Render a hidden field
"""
def hidden_field(key, values) when is_map(values) do
Enum.map(values, fn {next_key, value} ->
hidden_field("#{key}[#{next_key}]", value)
end)
end
def hidden_field(key, values) when is_list(values) do
Enum.map(values, fn value ->
hidden_field("#{key}[]", value)
end)
end
def hidden_field(key, value) do
tag(:input, name: key, type: :hidden, value: value)
end
@doc """
Render a hidden field
"""
def hidden_field(values) when is_map(values) do
Enum.map(values, fn {key, value} ->
hidden_field(key, value)
end)
end
def hidden_field({key, values}) when is_map(values) do
Enum.map(values, fn {next_key, value} ->
hidden_field("#{key}[#{next_key}]", value)
end)
end
def hidden_field({key, values}) when is_list(values) do
Enum.map(values, fn value ->
hidden_field("#{key}[]", value)
end)
end
def hidden_field({key, value}), do: hidden_field(key, value)
end
|
apps/artemis_web/lib/artemis_web/view_helpers/form.ex
| 0.848314 | 0.490236 |
form.ex
|
starcoder
|
defmodule Parse.TripUpdates do
@moduledoc """
Parser for the GTFS-RT TripUpdates protobuf output.
"""
@behaviour Parse
alias Model.Prediction
use Timex
import :binary, only: [copy: 1]
def parse("{" <> _ = blob) do
Parse.CommuterRailDepartures.JSON.parse(blob)
end
def parse(blob) do
blob
|> Parse.Realtime.FeedMessage.decode()
|> (fn message -> message.entity end).()
|> Stream.map(fn entity -> entity.trip_update end)
|> Stream.flat_map(&parse_trip_update/1)
end
def parse_trip_update(update) do
base = %Prediction{
trip_id: copy(update.trip.trip_id),
route_id: copy(update.trip.route_id),
direction_id: update.trip.direction_id,
vehicle_id: vehicle_id(update.vehicle),
schedule_relationship: trip_relationship(update.trip.schedule_relationship)
}
update.stop_time_update
|> Stream.reject(&is_nil(&1.stop_id))
|> Enum.map(&parse_stop_time_update(&1, base))
|> remove_last_departure_time([])
end
defp parse_stop_time_update(update, %Prediction{} = base) do
%{
base
| stop_id: copy(update.stop_id),
stop_sequence: update.stop_sequence,
arrival_time: parse_stop_time_event(update.arrival),
departure_time: parse_stop_time_event(update.departure),
schedule_relationship:
stop_time_relationship(update.schedule_relationship, base.schedule_relationship)
}
end
def parse_stop_time_event(%{time: seconds}) when is_integer(seconds) and seconds > 0 do
Parse.Timezone.unix_to_local(seconds)
end
def parse_stop_time_event(_) do
nil
end
defp vehicle_id(%{id: id}), do: id
defp vehicle_id(_), do: nil
defp trip_relationship(nil) do
nil
end
defp trip_relationship(:SCHEDULED) do
nil
end
defp trip_relationship(:ADDED) do
:added
end
defp trip_relationship(:UNSCHEDULED) do
:unscheduled
end
defp trip_relationship(:CANCELED) do
# add the extra L
:cancelled
end
defp stop_time_relationship(:SCHEDULED, nil) do
nil
end
defp stop_time_relationship(:SKIPPED, nil) do
:skipped
end
defp stop_time_relationship(:NO_DATA, nil) do
:no_data
end
defp stop_time_relationship(_relationship, existing) do
existing
end
defp remove_last_departure_time([], _) do
[]
end
defp remove_last_departure_time([last], acc) do
last = %{last | departure_time: nil}
Enum.reverse([last | acc])
end
defp remove_last_departure_time([first | rest], acc) do
remove_last_departure_time(rest, [first | acc])
end
end
|
apps/parse/lib/parse/trip_updates.ex
| 0.680879 | 0.450903 |
trip_updates.ex
|
starcoder
|
defmodule AWS.CognitoIdentityProvider do
@moduledoc """
Using the Amazon Cognito User Pools API, you can create a user pool to manage
directories and users.
You can authenticate a user to obtain tokens related to user identity and access
policies.
This API reference provides information about user pools in Amazon Cognito User
Pools.
For more information, see the Amazon Cognito Documentation.
"""
@doc """
Adds additional user attributes to the user pool schema.
"""
def add_custom_attributes(client, input, options \\ []) do
request(client, "AddCustomAttributes", input, options)
end
@doc """
Adds the specified user to the specified group.
Calling this action requires developer credentials.
"""
def admin_add_user_to_group(client, input, options \\ []) do
request(client, "AdminAddUserToGroup", input, options)
end
@doc """
Confirms user registration as an admin without using a confirmation code.
Works on any user.
Calling this action requires developer credentials.
"""
def admin_confirm_sign_up(client, input, options \\ []) do
request(client, "AdminConfirmSignUp", input, options)
end
@doc """
Creates a new user in the specified user pool.
If `MessageAction` is not set, the default is to send a welcome message via
email or phone (SMS).
This message is based on a template that you configured in your call to create
or update a user pool. This template includes your custom sign-up instructions
and placeholders for user name and temporary password.
Alternatively, you can call `AdminCreateUser` with “SUPPRESS” for the
`MessageAction` parameter, and Amazon Cognito will not send any email.
In either case, the user will be in the `FORCE_CHANGE_PASSWORD` state until they
sign in and change their password.
`AdminCreateUser` requires developer credentials.
"""
def admin_create_user(client, input, options \\ []) do
request(client, "AdminCreateUser", input, options)
end
@doc """
Deletes a user as an administrator.
Works on any user.
Calling this action requires developer credentials.
"""
def admin_delete_user(client, input, options \\ []) do
request(client, "AdminDeleteUser", input, options)
end
@doc """
Deletes the user attributes in a user pool as an administrator.
Works on any user.
Calling this action requires developer credentials.
"""
def admin_delete_user_attributes(client, input, options \\ []) do
request(client, "AdminDeleteUserAttributes", input, options)
end
@doc """
Disables the user from signing in with the specified external (SAML or social)
identity provider.
If the user to disable is a Cognito User Pools native username + password user,
they are not permitted to use their password to sign-in. If the user to disable
is a linked external IdP user, any link between that user and an existing user
is removed. The next time the external user (no longer attached to the
previously linked `DestinationUser`) signs in, they must create a new user
account. See
[AdminLinkProviderForUser](https://docs.aws.amazon.com/cognito-user-identity-pools/latest/APIReference/API_AdminLinkProviderForUser.html).
This action is enabled only for admin access and requires developer credentials.
The `ProviderName` must match the value specified when creating an IdP for the
pool.
To disable a native username + password user, the `ProviderName` value must be
`Cognito` and the `ProviderAttributeName` must be `Cognito_Subject`, with the
`ProviderAttributeValue` being the name that is used in the user pool for the
user.
The `ProviderAttributeName` must always be `Cognito_Subject` for social identity
providers. The `ProviderAttributeValue` must always be the exact subject that
was used when the user was originally linked as a source user.
For de-linking a SAML identity, there are two scenarios. If the linked identity
has not yet been used to sign-in, the `ProviderAttributeName` and
`ProviderAttributeValue` must be the same values that were used for the
`SourceUser` when the identities were originally linked using `
AdminLinkProviderForUser` call. (If the linking was done with
`ProviderAttributeName` set to `Cognito_Subject`, the same applies here).
However, if the user has already signed in, the `ProviderAttributeName` must be
`Cognito_Subject` and `ProviderAttributeValue` must be the subject of the SAML
assertion.
"""
def admin_disable_provider_for_user(client, input, options \\ []) do
request(client, "AdminDisableProviderForUser", input, options)
end
@doc """
Disables the specified user.
Calling this action requires developer credentials.
"""
def admin_disable_user(client, input, options \\ []) do
request(client, "AdminDisableUser", input, options)
end
@doc """
Enables the specified user as an administrator.
Works on any user.
Calling this action requires developer credentials.
"""
def admin_enable_user(client, input, options \\ []) do
request(client, "AdminEnableUser", input, options)
end
@doc """
Forgets the device, as an administrator.
Calling this action requires developer credentials.
"""
def admin_forget_device(client, input, options \\ []) do
request(client, "AdminForgetDevice", input, options)
end
@doc """
Gets the device, as an administrator.
Calling this action requires developer credentials.
"""
def admin_get_device(client, input, options \\ []) do
request(client, "AdminGetDevice", input, options)
end
@doc """
Gets the specified user by user name in a user pool as an administrator.
Works on any user.
Calling this action requires developer credentials.
"""
def admin_get_user(client, input, options \\ []) do
request(client, "AdminGetUser", input, options)
end
@doc """
Initiates the authentication flow, as an administrator.
Calling this action requires developer credentials.
"""
def admin_initiate_auth(client, input, options \\ []) do
request(client, "AdminInitiateAuth", input, options)
end
@doc """
Links an existing user account in a user pool (`DestinationUser`) to an identity
from an external identity provider (`SourceUser`) based on a specified attribute
name and value from the external identity provider.
This allows you to create a link from the existing user account to an external
federated user identity that has not yet been used to sign in, so that the
federated user identity can be used to sign in as the existing user account.
For example, if there is an existing user with a username and password, this API
links that user to a federated user identity, so that when the federated user
identity is used, the user signs in as the existing user account.
The maximum number of federated identities linked to a user is 5.
Because this API allows a user with an external federated identity to sign in as
an existing user in the user pool, it is critical that it only be used with
external identity providers and provider attributes that have been trusted by
the application owner.
This action is enabled only for admin access and requires developer credentials.
"""
def admin_link_provider_for_user(client, input, options \\ []) do
request(client, "AdminLinkProviderForUser", input, options)
end
@doc """
Lists devices, as an administrator.
Calling this action requires developer credentials.
"""
def admin_list_devices(client, input, options \\ []) do
request(client, "AdminListDevices", input, options)
end
@doc """
Lists the groups that the user belongs to.
Calling this action requires developer credentials.
"""
def admin_list_groups_for_user(client, input, options \\ []) do
request(client, "AdminListGroupsForUser", input, options)
end
@doc """
Lists a history of user activity and any risks detected as part of Amazon
Cognito advanced security.
"""
def admin_list_user_auth_events(client, input, options \\ []) do
request(client, "AdminListUserAuthEvents", input, options)
end
@doc """
Removes the specified user from the specified group.
Calling this action requires developer credentials.
"""
def admin_remove_user_from_group(client, input, options \\ []) do
request(client, "AdminRemoveUserFromGroup", input, options)
end
@doc """
Resets the specified user's password in a user pool as an administrator.
Works on any user.
When a developer calls this API, the current password is invalidated, so it must
be changed. If a user tries to sign in after the API is called, the app will get
a PasswordResetRequiredException exception back and should direct the user down
the flow to reset the password, which is the same as the forgot password flow.
In addition, if the user pool has phone verification selected and a verified
phone number exists for the user, or if email verification is selected and a
verified email exists for the user, calling this API will also result in sending
a message to the end user with the code to change their password.
Calling this action requires developer credentials.
"""
def admin_reset_user_password(client, input, options \\ []) do
request(client, "AdminResetUserPassword", input, options)
end
@doc """
Responds to an authentication challenge, as an administrator.
Calling this action requires developer credentials.
"""
def admin_respond_to_auth_challenge(client, input, options \\ []) do
request(client, "AdminRespondToAuthChallenge", input, options)
end
@doc """
Sets the user's multi-factor authentication (MFA) preference, including which
MFA options are enabled and if any are preferred.
Only one factor can be set as preferred. The preferred MFA factor will be used
to authenticate a user if multiple factors are enabled. If multiple options are
enabled and no preference is set, a challenge to choose an MFA option will be
returned during sign in.
"""
def admin_set_user_m_f_a_preference(client, input, options \\ []) do
request(client, "AdminSetUserMFAPreference", input, options)
end
@doc """
Sets the specified user's password in a user pool as an administrator.
Works on any user.
The password can be temporary or permanent. If it is temporary, the user status
will be placed into the `FORCE_CHANGE_PASSWORD` state. When the user next tries
to sign in, the InitiateAuth/AdminInitiateAuth response will contain the
`NEW_PASSWORD_REQUIRED` challenge. If the user does not sign in before it
expires, the user will not be able to sign in and their password will need to be
reset by an administrator.
Once the user has set a new password, or the password is permanent, the user
status will be set to `Confirmed`.
"""
def admin_set_user_password(client, input, options \\ []) do
request(client, "AdminSetUserPassword", input, options)
end
@doc """
*This action is no longer supported.* You can use it to configure only SMS MFA.
You can't use it to configure TOTP software token MFA. To configure either type
of MFA, use
[AdminSetUserMFAPreference](https://docs.aws.amazon.com/cognito-user-identity-pools/latest/APIReference/API_AdminSetUserMFAPreference.html)
instead.
"""
def admin_set_user_settings(client, input, options \\ []) do
request(client, "AdminSetUserSettings", input, options)
end
@doc """
Provides feedback for an authentication event as to whether it was from a valid
user.
This feedback is used for improving the risk evaluation decision for the user
pool as part of Amazon Cognito advanced security.
"""
def admin_update_auth_event_feedback(client, input, options \\ []) do
request(client, "AdminUpdateAuthEventFeedback", input, options)
end
@doc """
Updates the device status as an administrator.
Calling this action requires developer credentials.
"""
def admin_update_device_status(client, input, options \\ []) do
request(client, "AdminUpdateDeviceStatus", input, options)
end
@doc """
Updates the specified user's attributes, including developer attributes, as an
administrator.
Works on any user.
For custom attributes, you must prepend the `custom:` prefix to the attribute
name.
In addition to updating user attributes, this API can also be used to mark phone
and email as verified.
Calling this action requires developer credentials.
"""
def admin_update_user_attributes(client, input, options \\ []) do
request(client, "AdminUpdateUserAttributes", input, options)
end
@doc """
Signs out users from all devices, as an administrator.
It also invalidates all refresh tokens issued to a user. The user's current
access and Id tokens remain valid until their expiry. Access and Id tokens
expire one hour after they are issued.
Calling this action requires developer credentials.
"""
def admin_user_global_sign_out(client, input, options \\ []) do
request(client, "AdminUserGlobalSignOut", input, options)
end
@doc """
Returns a unique generated shared secret key code for the user account.
The request takes an access token or a session string, but not both.
"""
def associate_software_token(client, input, options \\ []) do
request(client, "AssociateSoftwareToken", input, options)
end
@doc """
Changes the password for a specified user in a user pool.
"""
def change_password(client, input, options \\ []) do
request(client, "ChangePassword", input, options)
end
@doc """
Confirms tracking of the device.
This API call is the call that begins device tracking.
"""
def confirm_device(client, input, options \\ []) do
request(client, "ConfirmDevice", input, options)
end
@doc """
Allows a user to enter a confirmation code to reset a forgotten password.
"""
def confirm_forgot_password(client, input, options \\ []) do
request(client, "ConfirmForgotPassword", input, options)
end
@doc """
Confirms registration of a user and handles the existing alias from a previous
user.
"""
def confirm_sign_up(client, input, options \\ []) do
request(client, "ConfirmSignUp", input, options)
end
@doc """
Creates a new group in the specified user pool.
Calling this action requires developer credentials.
"""
def create_group(client, input, options \\ []) do
request(client, "CreateGroup", input, options)
end
@doc """
Creates an identity provider for a user pool.
"""
def create_identity_provider(client, input, options \\ []) do
request(client, "CreateIdentityProvider", input, options)
end
@doc """
Creates a new OAuth2.0 resource server and defines custom scopes in it.
"""
def create_resource_server(client, input, options \\ []) do
request(client, "CreateResourceServer", input, options)
end
@doc """
Creates the user import job.
"""
def create_user_import_job(client, input, options \\ []) do
request(client, "CreateUserImportJob", input, options)
end
@doc """
Creates a new Amazon Cognito user pool and sets the password policy for the
pool.
"""
def create_user_pool(client, input, options \\ []) do
request(client, "CreateUserPool", input, options)
end
@doc """
Creates the user pool client.
"""
def create_user_pool_client(client, input, options \\ []) do
request(client, "CreateUserPoolClient", input, options)
end
@doc """
Creates a new domain for a user pool.
"""
def create_user_pool_domain(client, input, options \\ []) do
request(client, "CreateUserPoolDomain", input, options)
end
@doc """
Deletes a group.
Currently only groups with no members can be deleted.
Calling this action requires developer credentials.
"""
def delete_group(client, input, options \\ []) do
request(client, "DeleteGroup", input, options)
end
@doc """
Deletes an identity provider for a user pool.
"""
def delete_identity_provider(client, input, options \\ []) do
request(client, "DeleteIdentityProvider", input, options)
end
@doc """
Deletes a resource server.
"""
def delete_resource_server(client, input, options \\ []) do
request(client, "DeleteResourceServer", input, options)
end
@doc """
Allows a user to delete himself or herself.
"""
def delete_user(client, input, options \\ []) do
request(client, "DeleteUser", input, options)
end
@doc """
Deletes the attributes for a user.
"""
def delete_user_attributes(client, input, options \\ []) do
request(client, "DeleteUserAttributes", input, options)
end
@doc """
Deletes the specified Amazon Cognito user pool.
"""
def delete_user_pool(client, input, options \\ []) do
request(client, "DeleteUserPool", input, options)
end
@doc """
Allows the developer to delete the user pool client.
"""
def delete_user_pool_client(client, input, options \\ []) do
request(client, "DeleteUserPoolClient", input, options)
end
@doc """
Deletes a domain for a user pool.
"""
def delete_user_pool_domain(client, input, options \\ []) do
request(client, "DeleteUserPoolDomain", input, options)
end
@doc """
Gets information about a specific identity provider.
"""
def describe_identity_provider(client, input, options \\ []) do
request(client, "DescribeIdentityProvider", input, options)
end
@doc """
Describes a resource server.
"""
def describe_resource_server(client, input, options \\ []) do
request(client, "DescribeResourceServer", input, options)
end
@doc """
Describes the risk configuration.
"""
def describe_risk_configuration(client, input, options \\ []) do
request(client, "DescribeRiskConfiguration", input, options)
end
@doc """
Describes the user import job.
"""
def describe_user_import_job(client, input, options \\ []) do
request(client, "DescribeUserImportJob", input, options)
end
@doc """
Returns the configuration information and metadata of the specified user pool.
"""
def describe_user_pool(client, input, options \\ []) do
request(client, "DescribeUserPool", input, options)
end
@doc """
Client method for returning the configuration information and metadata of the
specified user pool app client.
"""
def describe_user_pool_client(client, input, options \\ []) do
request(client, "DescribeUserPoolClient", input, options)
end
@doc """
Gets information about a domain.
"""
def describe_user_pool_domain(client, input, options \\ []) do
request(client, "DescribeUserPoolDomain", input, options)
end
@doc """
Forgets the specified device.
"""
def forget_device(client, input, options \\ []) do
request(client, "ForgetDevice", input, options)
end
@doc """
Calling this API causes a message to be sent to the end user with a confirmation
code that is required to change the user's password.
For the `Username` parameter, you can use the username or user alias. The method
used to send the confirmation code is sent according to the specified
AccountRecoverySetting. For more information, see [Recovering User Accounts](https://docs.aws.amazon.com/cognito/latest/developerguide/how-to-recover-a-user-account.html)
in the *Amazon Cognito Developer Guide*. If neither a verified phone number nor
a verified email exists, an `InvalidParameterException` is thrown. To use the
confirmation code for resetting the password, call
[ConfirmForgotPassword](https://docs.aws.amazon.com/cognito-user-identity-pools/latest/APIReference/API_ConfirmForgotPassword.html).
"""
def forgot_password(client, input, options \\ []) do
request(client, "ForgotPassword", input, options)
end
@doc """
Gets the header information for the .csv file to be used as input for the user
import job.
"""
def get_csv_header(client, input, options \\ []) do
request(client, "GetCSVHeader", input, options)
end
@doc """
Gets the device.
"""
def get_device(client, input, options \\ []) do
request(client, "GetDevice", input, options)
end
@doc """
Gets a group.
Calling this action requires developer credentials.
"""
def get_group(client, input, options \\ []) do
request(client, "GetGroup", input, options)
end
@doc """
Gets the specified identity provider.
"""
def get_identity_provider_by_identifier(client, input, options \\ []) do
request(client, "GetIdentityProviderByIdentifier", input, options)
end
@doc """
This method takes a user pool ID, and returns the signing certificate.
"""
def get_signing_certificate(client, input, options \\ []) do
request(client, "GetSigningCertificate", input, options)
end
@doc """
Gets the UI Customization information for a particular app client's app UI, if
there is something set.
If nothing is set for the particular client, but there is an existing pool level
customization (app `clientId` will be `ALL`), then that is returned. If nothing
is present, then an empty shape is returned.
"""
def get_u_i_customization(client, input, options \\ []) do
request(client, "GetUICustomization", input, options)
end
@doc """
Gets the user attributes and metadata for a user.
"""
def get_user(client, input, options \\ []) do
request(client, "GetUser", input, options)
end
@doc """
Gets the user attribute verification code for the specified attribute name.
"""
def get_user_attribute_verification_code(client, input, options \\ []) do
request(client, "GetUserAttributeVerificationCode", input, options)
end
@doc """
Gets the user pool multi-factor authentication (MFA) configuration.
"""
def get_user_pool_mfa_config(client, input, options \\ []) do
request(client, "GetUserPoolMfaConfig", input, options)
end
@doc """
Signs out users from all devices.
It also invalidates all refresh tokens issued to a user. The user's current
access and Id tokens remain valid until their expiry. Access and Id tokens
expire one hour after they are issued.
"""
def global_sign_out(client, input, options \\ []) do
request(client, "GlobalSignOut", input, options)
end
@doc """
Initiates the authentication flow.
"""
def initiate_auth(client, input, options \\ []) do
request(client, "InitiateAuth", input, options)
end
@doc """
Lists the devices.
"""
def list_devices(client, input, options \\ []) do
request(client, "ListDevices", input, options)
end
@doc """
Lists the groups associated with a user pool.
Calling this action requires developer credentials.
"""
def list_groups(client, input, options \\ []) do
request(client, "ListGroups", input, options)
end
@doc """
Lists information about all identity providers for a user pool.
"""
def list_identity_providers(client, input, options \\ []) do
request(client, "ListIdentityProviders", input, options)
end
@doc """
Lists the resource servers for a user pool.
"""
def list_resource_servers(client, input, options \\ []) do
request(client, "ListResourceServers", input, options)
end
@doc """
Lists the tags that are assigned to an Amazon Cognito user pool.
A tag is a label that you can apply to user pools to categorize and manage them
in different ways, such as by purpose, owner, environment, or other criteria.
You can use this action up to 10 times per second, per account.
"""
def list_tags_for_resource(client, input, options \\ []) do
request(client, "ListTagsForResource", input, options)
end
@doc """
Lists the user import jobs.
"""
def list_user_import_jobs(client, input, options \\ []) do
request(client, "ListUserImportJobs", input, options)
end
@doc """
Lists the clients that have been created for the specified user pool.
"""
def list_user_pool_clients(client, input, options \\ []) do
request(client, "ListUserPoolClients", input, options)
end
@doc """
Lists the user pools associated with an AWS account.
"""
def list_user_pools(client, input, options \\ []) do
request(client, "ListUserPools", input, options)
end
@doc """
Lists the users in the Amazon Cognito user pool.
"""
def list_users(client, input, options \\ []) do
request(client, "ListUsers", input, options)
end
@doc """
Lists the users in the specified group.
Calling this action requires developer credentials.
"""
def list_users_in_group(client, input, options \\ []) do
request(client, "ListUsersInGroup", input, options)
end
@doc """
Resends the confirmation (for confirmation of registration) to a specific user
in the user pool.
"""
def resend_confirmation_code(client, input, options \\ []) do
request(client, "ResendConfirmationCode", input, options)
end
@doc """
Responds to the authentication challenge.
"""
def respond_to_auth_challenge(client, input, options \\ []) do
request(client, "RespondToAuthChallenge", input, options)
end
@doc """
Configures actions on detected risks.
To delete the risk configuration for `UserPoolId` or `ClientId`, pass null
values for all four configuration types.
To enable Amazon Cognito advanced security features, update the user pool to
include the `UserPoolAddOns` key`AdvancedSecurityMode`.
"""
def set_risk_configuration(client, input, options \\ []) do
request(client, "SetRiskConfiguration", input, options)
end
@doc """
Sets the UI customization information for a user pool's built-in app UI.
You can specify app UI customization settings for a single client (with a
specific `clientId`) or for all clients (by setting the `clientId` to `ALL`). If
you specify `ALL`, the default configuration will be used for every client that
has no UI customization set previously. If you specify UI customization settings
for a particular client, it will no longer fall back to the `ALL` configuration.
To use this API, your user pool must have a domain associated with it.
Otherwise, there is no place to host the app's pages, and the service will throw
an error.
"""
def set_u_i_customization(client, input, options \\ []) do
request(client, "SetUICustomization", input, options)
end
@doc """
Set the user's multi-factor authentication (MFA) method preference, including
which MFA factors are enabled and if any are preferred.
Only one factor can be set as preferred. The preferred MFA factor will be used
to authenticate a user if multiple factors are enabled. If multiple options are
enabled and no preference is set, a challenge to choose an MFA option will be
returned during sign in.
"""
def set_user_m_f_a_preference(client, input, options \\ []) do
request(client, "SetUserMFAPreference", input, options)
end
@doc """
Set the user pool multi-factor authentication (MFA) configuration.
"""
def set_user_pool_mfa_config(client, input, options \\ []) do
request(client, "SetUserPoolMfaConfig", input, options)
end
@doc """
*This action is no longer supported.* You can use it to configure only SMS MFA.
You can't use it to configure TOTP software token MFA. To configure either type
of MFA, use
[SetUserMFAPreference](https://docs.aws.amazon.com/cognito-user-identity-pools/latest/APIReference/API_SetUserMFAPreference.html)
instead.
"""
def set_user_settings(client, input, options \\ []) do
request(client, "SetUserSettings", input, options)
end
@doc """
Registers the user in the specified user pool and creates a user name, password,
and user attributes.
"""
def sign_up(client, input, options \\ []) do
request(client, "SignUp", input, options)
end
@doc """
Starts the user import.
"""
def start_user_import_job(client, input, options \\ []) do
request(client, "StartUserImportJob", input, options)
end
@doc """
Stops the user import job.
"""
def stop_user_import_job(client, input, options \\ []) do
request(client, "StopUserImportJob", input, options)
end
@doc """
Assigns a set of tags to an Amazon Cognito user pool.
A tag is a label that you can use to categorize and manage user pools in
different ways, such as by purpose, owner, environment, or other criteria.
Each tag consists of a key and value, both of which you define. A key is a
general category for more specific values. For example, if you have two versions
of a user pool, one for testing and another for production, you might assign an
`Environment` tag key to both user pools. The value of this key might be `Test`
for one user pool and `Production` for the other.
Tags are useful for cost tracking and access control. You can activate your tags
so that they appear on the Billing and Cost Management console, where you can
track the costs associated with your user pools. In an IAM policy, you can
constrain permissions for user pools based on specific tags or tag values.
You can use this action up to 5 times per second, per account. A user pool can
have as many as 50 tags.
"""
def tag_resource(client, input, options \\ []) do
request(client, "TagResource", input, options)
end
@doc """
Removes the specified tags from an Amazon Cognito user pool.
You can use this action up to 5 times per second, per account
"""
def untag_resource(client, input, options \\ []) do
request(client, "UntagResource", input, options)
end
@doc """
Provides the feedback for an authentication event whether it was from a valid
user or not.
This feedback is used for improving the risk evaluation decision for the user
pool as part of Amazon Cognito advanced security.
"""
def update_auth_event_feedback(client, input, options \\ []) do
request(client, "UpdateAuthEventFeedback", input, options)
end
@doc """
Updates the device status.
"""
def update_device_status(client, input, options \\ []) do
request(client, "UpdateDeviceStatus", input, options)
end
@doc """
Updates the specified group with the specified attributes.
Calling this action requires developer credentials.
If you don't provide a value for an attribute, it will be set to the default
value.
"""
def update_group(client, input, options \\ []) do
request(client, "UpdateGroup", input, options)
end
@doc """
Updates identity provider information for a user pool.
"""
def update_identity_provider(client, input, options \\ []) do
request(client, "UpdateIdentityProvider", input, options)
end
@doc """
Updates the name and scopes of resource server.
All other fields are read-only.
If you don't provide a value for an attribute, it will be set to the default
value.
"""
def update_resource_server(client, input, options \\ []) do
request(client, "UpdateResourceServer", input, options)
end
@doc """
Allows a user to update a specific attribute (one at a time).
"""
def update_user_attributes(client, input, options \\ []) do
request(client, "UpdateUserAttributes", input, options)
end
@doc """
Updates the specified user pool with the specified attributes.
You can get a list of the current user pool settings using
[DescribeUserPool](https://docs.aws.amazon.com/cognito-user-identity-pools/latest/APIReference/API_DescribeUserPool.html).
If you don't provide a value for an attribute, it will be set to the default
value.
"""
def update_user_pool(client, input, options \\ []) do
request(client, "UpdateUserPool", input, options)
end
@doc """
Updates the specified user pool app client with the specified attributes.
You can get a list of the current user pool app client settings using
[DescribeUserPoolClient](https://docs.aws.amazon.com/cognito-user-identity-pools/latest/APIReference/API_DescribeUserPoolClient.html).
If you don't provide a value for an attribute, it will be set to the default
value.
"""
def update_user_pool_client(client, input, options \\ []) do
request(client, "UpdateUserPoolClient", input, options)
end
@doc """
Updates the Secure Sockets Layer (SSL) certificate for the custom domain for
your user pool.
You can use this operation to provide the Amazon Resource Name (ARN) of a new
certificate to Amazon Cognito. You cannot use it to change the domain for a user
pool.
A custom domain is used to host the Amazon Cognito hosted UI, which provides
sign-up and sign-in pages for your application. When you set up a custom domain,
you provide a certificate that you manage with AWS Certificate Manager (ACM).
When necessary, you can use this operation to change the certificate that you
applied to your custom domain.
Usually, this is unnecessary following routine certificate renewal with ACM.
When you renew your existing certificate in ACM, the ARN for your certificate
remains the same, and your custom domain uses the new certificate automatically.
However, if you replace your existing certificate with a new one, ACM gives the
new certificate a new ARN. To apply the new certificate to your custom domain,
you must provide this ARN to Amazon Cognito.
When you add your new certificate in ACM, you must choose US East (N. Virginia)
as the AWS Region.
After you submit your request, Amazon Cognito requires up to 1 hour to
distribute your new certificate to your custom domain.
For more information about adding a custom domain to your user pool, see [Using Your Own Domain for the Hosted
UI](https://docs.aws.amazon.com/cognito/latest/developerguide/cognito-user-pools-add-custom-domain.html).
"""
def update_user_pool_domain(client, input, options \\ []) do
request(client, "UpdateUserPoolDomain", input, options)
end
@doc """
Use this API to register a user's entered TOTP code and mark the user's software
token MFA status as "verified" if successful.
The request takes an access token or a session string, but not both.
"""
def verify_software_token(client, input, options \\ []) do
request(client, "VerifySoftwareToken", input, options)
end
@doc """
Verifies the specified user attributes in the user pool.
"""
def verify_user_attribute(client, input, options \\ []) do
request(client, "VerifyUserAttribute", input, options)
end
@spec request(AWS.Client.t(), binary(), map(), list()) ::
{:ok, map() | nil, map()}
| {:error, term()}
defp request(client, action, input, options) do
client = %{client | service: "cognito-idp"}
host = build_host("cognito-idp", client)
url = build_url(host, client)
headers = [
{"Host", host},
{"Content-Type", "application/x-amz-json-1.1"},
{"X-Amz-Target", "AWSCognitoIdentityProviderService.#{action}"}
]
payload = encode!(client, input)
headers = AWS.Request.sign_v4(client, "POST", url, headers, payload)
post(client, url, payload, headers, options)
end
defp post(client, url, payload, headers, options) do
case AWS.Client.request(client, :post, url, payload, headers, options) do
{:ok, %{status_code: 200, body: body} = response} ->
body = if body != "", do: decode!(client, body)
{:ok, body, response}
{:ok, response} ->
{:error, {:unexpected_response, response}}
error = {:error, _reason} -> error
end
end
defp build_host(_endpoint_prefix, %{region: "local", endpoint: endpoint}) do
endpoint
end
defp build_host(_endpoint_prefix, %{region: "local"}) do
"localhost"
end
defp build_host(endpoint_prefix, %{region: region, endpoint: endpoint}) do
"#{endpoint_prefix}.#{region}.#{endpoint}"
end
defp build_url(host, %{:proto => proto, :port => port}) do
"#{proto}://#{host}:#{port}/"
end
defp encode!(client, payload) do
AWS.Client.encode!(client, payload, :json)
end
defp decode!(client, payload) do
AWS.Client.decode!(client, payload, :json)
end
end
|
lib/aws/generated/cognito_identity_provider.ex
| 0.866175 | 0.462655 |
cognito_identity_provider.ex
|
starcoder
|
defmodule EV do
@moduledoc """
EV is a library for implementing events-based architecture.
At a high level the goal is to split data processing and persistance (or API calls). An event is a standardised intermediary passed between these to steps.
Suppose you have a function to create a user:
```elixir
def create_user(params) do
params
|> User.changeset(params)
|> Repo.insert()
end
```
Let's refactor this function using EV:
```elixir
def create_user(params) do
params
|> User.new_changeset(params)
|> EV.ChangesetHelper.fetch_changes()
|> EV.maybe_publish(:user_created, nil)
|> EV.maybe_apply()
end
@impl EV.Handler
def handle(%{type: :user_created, payload: payload}, _opts) do
payload
|> User.changeset(params)
|> Repo.insert()
end
```
Now if you call the `create_user/1` function you'll see something like this `{:ok, {%EV.Event{...} = _event, %User{...} = _user}}`.
The event here is a struct which holds data such as type, payload, etc. Crucially, events hold all data necessary for the handler to execute its function.
Here, this means a user can be created just from the event.
While in this example the event was just returned alongside the user, the most basic usage would involve actually saving the event to a database.
To achive this you can use the `EV.Publishers.DatabasePersisted` publisher, `EV.Applicators.DatabasePersisted` applicator,
or write your own using the `EV.Publisher` and `EV.Applicator` behaviours, respectively.
Let's now move on to tying it all together by discussing the event lifecycle.
1. An event changeset is created with supplied data.
```elixir
%Ecto.Changeset{..., changes: %{
payload: %{...},
issuer: %{...},
version: 1,
published_at: ~U[2022-01-06 18:09:39.218Z]
}}
```
2. The changeset is passed to the configured publisher for processing. This returns an event struct.
```elixir
%EV.Event{
payload: %{...},
issuer: %{...},
version: 1,
published_at: ~U[2022-01-06 18:09:39.218Z]
}
```
3. The event is turned into a changeset again.
```elixir
%Ecto.Changeset{..., data: %{
payload: %{...},
issuer: %{...},
version: 1,
published_at: ~U[2022-01-06 18:09:39.218Z]
},
changes: %{
applied_at: ~U[2022-01-06 18:11:44.225Z]
}}
```
4. The event is passed to the configured applicator for processing.
```elixir
%EV.Event{
payload: %{...},
issuer: %{...},
version: 1,
published_at: ~U[2022-01-06 18:09:39.218Z],
applied_at: ~U[2022-01-06 18:11:44.225Z]
}
```
5. The event is passed to the handler.
Steps 1-2. are triggered by `EV.publish/4` and `EV.maybe_publish/4`. Steps 3-4. are triggered by `EV.apply/2` and `EV.maybe_apply/2`.
Step 5. is triggered from the applicator, but it's technically optional to call the handler at all.
When using the `EV.Publishers.DatabasePersisted` publisher and `EV.Applicators.DatabasePersisted` applicator,
steps 2. and 4. would be when the event is first saved and later updated (to include the `applied_at` date).
## Okay, but why?
There are many benefits to this architecture, but here are some examples:
1. Auditing. You can go through the history of your application's state to see what happened and who triggered it.
2. Rollback/rollfarward. You can replicate the database state at any given point in time.
3. Testing. You can replicate a part of a staging/production database (or even the whole thing) locally to test and debug.
4. Asynchronous execution. You may opt to apply events asynchronously or in a background job.
## Configuration
Everything can be configured either globally or by passing options to the releveant function(s). Explicitly passed options override global config.
Some functions' options documentation includes a note "full path ...". This refers to the path when configuring this option globally.
When passing options directly to the function you may use the short path.
Take a look at the difference in `EV.ChangesetHelper.fetch_changes/2` configuration in the example below.
Explicitly passed options are meant to override the global config and/or be used for testing and debugging. Configuring common options globally is prefered,
especially for shared options.
Your custom publishers and applicators can have their own options. To learn more see `EV.ConfigHelper`.
### Example
With global config:
```elixir
config :ev,
publisher: EV.Publishers.DatabasePersisted,
applicator: EV.Applicators.DatabasePersisted,
events: [
user_created: [version: 1, handler: &User.handle/2],
],
persistence_opts: [
repo: MyApp.Repo,
table_name: "events",
migration_version: "0.1.0"
],
changeset_helper_opts: [
carry_fields: [:id]
]
def create_user(params) do
params
|> User.new_changeset(params)
|> EV.ChangesetHelper.fetch_changes()
|> EV.maybe_publish(:user_created, nil)
|> EV.maybe_apply()
end
```
With supplied options:
```elixir
def create_user(params) do
params
|> User.new_changeset(params)
|> EV.ChangesetHelper.fetch_changes(carry_fields: [:id])
|> EV.maybe_publish(
:user_created,
nil,
publisher: EV.Publishers.DatabasePersisted,
events: [
user_created: [version: 1]
],
persistence_opts: [repo: MyApp.Repo]
)
|> EV.maybe_apply(
applicator: EV.Applicators.DatabasePersisted,
events: [
user_created: [handler: &User.handle/2]
],
persistence_opts: [
repo: MyApp.Repo,
table_name: "events",
migration_version: "0.1.0"
]
)
end
```
## Versioning
Events must be versioned in order to avoid having invalid events.
When a handler's logic changes the corresponding event should be given a version bump. The handler itself should be split into two function headers, each matching on the version.
Alternatively, existing events can be migrated if possible.
"""
import Kernel, except: [apply: 2]
@doc """
Publishes an event. Calls supplied publisher.
## Options
* `publisher` - optional; module used for the publication; see `EV.Publisher` for more details;
defaults to `EV.Publishers.Default`; example values:
* `EV.Publishers.Default` - returns the event without persisting it anywhere
* `EV.Publishers.DatabasePersisted` - saves the event in a database, using `Ecto`;
requires `:repo` to be supplied in `:publisher_opts`
* `:events`
* `event_type`
* `:version` - required; positive integer used as the event version
## Examples
```elixir
iex> {:ok, event} = EV.publish(%{a: 1, b: 2}, :something_happened, %{type: :system}, events: [something_happened: [version: 1]])
iex> event.type
:something_happened
iex> event.version
1
iex> event.payload
%{"a" => 1, "b" => 2}
iex> event.issuer
%{"type" => "system"}
```
"""
@spec publish(payload :: term(), type :: atom(), issuer :: map() | nil, opts :: Keyword.t()) ::
{:ok, EV.Event.t()} | {:error, any()}
def publish(payload, type, issuer, opts \\ []) do
publisher = EV.ConfigHelper.get_config(opts, :publisher, EV.Publishers.Default)
version = EV.ConfigHelper.fetch_config!(opts, [:events, type, :version])
%{
payload: payload,
type: type,
issuer: issuer,
version: version,
published_at: DateTime.utc_now()
}
|> EV.Event.publish_changeset()
|> publisher.call(opts)
end
@doc """
Publishes an event only if given a tuple of `{:ok, payload}`.
For more details see `publish/4`.
"""
@spec maybe_publish(
maybe_payload :: {:ok, map()} | {:error, any()},
type :: atom(),
issuer :: map() | nil,
opts :: Keyword.t()
) :: {:ok, EV.Event.t()} | {:error, any()}
def maybe_publish(maybe_payload, type, issuer, opts \\ [])
def maybe_publish({:ok, payload}, type, issuer, opts) do
publish(payload, type, issuer, opts)
end
def maybe_publish(error, _type, _issuer, _opts), do: error
@doc """
Applies an event. Calls supplied applicator.
## Options
* `applicator` - optional; module used for the publication; see `EV.Applicator` for more details;
defaults to `EV.Applicators.Default`; example values:
* `EV.Applicators.Default` - returns the event without persisting it anywhere
* `EV.Applicators.DatabasePersisted` - saves the event in a database, using `Ecto`;
requires `:repo` to be supplied in `:publisher_opts`
* `:events`
* `event_type`
* `:handler` - required; positive integer used as the event version
## Examples
```elixir
iex> {:ok, event} = EV.publish(%{a: 1, b: 2}, :something_happened, %{type: :system}, events: [something_happened: [version: 1]])
iex> event.type
:something_happened
iex> event.version
1
iex> event.payload
%{"a" => 1, "b" => 2}
iex> event.issuer
%{"type" => "system"}
```
"""
@spec apply(event :: EV.Event.t(), opts :: Keyword.t()) :: {:ok | :error, any()}
def apply(%{type: type} = event, opts \\ []) do
applicator = EV.ConfigHelper.get_config(opts, :applicator, EV.Applicators.Default)
handler = EV.ConfigHelper.fetch_config!(opts, [:events, type, :handler])
event
|> EV.Event.apply_changeset(%{applied_at: DateTime.utc_now()})
|> applicator.call(handler, opts)
end
@doc """
Applies an event only if given a tuple of `{:ok, event}`.
For more details see `apply/2`.
"""
@spec maybe_apply(maybe_event :: {:ok, EV.Event.t()} | {:error, any()}, opts :: Keyword.t()) ::
{:ok | :error, any()}
def maybe_apply(maybe_event, opts \\ [])
def maybe_apply({:ok, event}, opts) do
apply(event, opts)
end
def maybe_apply(error, _opts), do: error
end
|
lib/ev.ex
| 0.942771 | 0.869659 |
ev.ex
|
starcoder
|
defmodule GGity.Scale.Shape.Manual do
@moduledoc false
alias GGity.{Draw, Labels}
alias GGity.Scale.Shape
@type t() :: %__MODULE__{}
defstruct levels: nil,
transform: nil,
labels: :waivers,
guide: :legend,
values: []
@spec new(keyword()) :: Shape.Manual.t()
def new(options) do
values =
options
|> Keyword.get(:values)
|> set_values()
options = Keyword.put_new(options, :values, values)
struct(Shape.Manual, options)
end
defp set_values(nil),
do: raise(ArgumentError, "Manual scales must be passed a :values option with scale values.")
defp set_values([value | _other_values] = values) when is_binary(value) do
values
end
@spec train(Shape.Manual.t(), list(binary())) :: Shape.Manual.t()
def train(scale, [level | _other_levels] = levels) when is_list(levels) and is_binary(level) do
number_of_levels = length(levels)
palette =
scale.values
|> Stream.cycle()
|> Enum.take(number_of_levels)
|> List.to_tuple()
values_map =
levels
|> Stream.with_index()
|> Stream.map(fn {level, index} ->
{level, elem(palette, index)}
end)
|> Enum.into(%{})
struct(scale, levels: levels, transform: fn value -> values_map[to_string(value)] end)
end
@spec draw_legend(Shape.Manual.t(), binary(), number()) :: iolist()
def draw_legend(%Shape.Manual{guide: :none}, _label, _key_height), do: []
def draw_legend(%Shape.Manual{levels: []}, _labe, _key_heightl), do: []
def draw_legend(%Shape.Manual{levels: [_]}, _label, _key_height), do: []
def draw_legend(%Shape.Manual{levels: levels} = scale, label, key_height) do
[
Draw.text(
"#{label}",
x: "0",
y: "-5",
class: "gg-text gg-legend-title",
text_anchor: "left"
),
Stream.with_index(levels)
|> Enum.map(fn {level, index} -> draw_legend_item(scale, {level, index}, key_height) end)
]
end
defp draw_legend_item(scale, {level, index}, key_height) do
marker = scale.transform.(level)
size =
case marker do
character when is_binary(character) -> 7 / 15 * key_height
_otherwise -> key_height / 3
end
[
Draw.rect(
x: "0",
y: "#{key_height * index}",
height: key_height,
width: key_height,
class: "gg-legend-key"
),
Draw.marker(
marker,
{key_height / 2, key_height / 2 + key_height * index},
size,
fill: "black",
fill_opacity: "1"
),
Draw.text(
"#{Labels.format(scale, level)}",
x: "#{5 + key_height}",
y: "#{10 + key_height * index}",
class: "gg-text gg-legend-text",
text_anchor: "left"
)
]
end
end
|
lib/ggity/scale/shape_manual.ex
| 0.855384 | 0.479321 |
shape_manual.ex
|
starcoder
|
defmodule Talib.BollingerBand do
alias Talib.SMA
alias Talib.Average
require OK
require Logger
@moduledoc ~S"""
Defines a Bollinger bands.
## History
Version: 1.0
https://stockcharts.com/school/doku.php?id=chart_school:technical_indicators:bollinger_bands
Audited by:
| Name | Title |
| :----------- | :---------------- |
| | |
"""
@typedoc """
Defines a Bollinger Band price volatility.
* :period - Period used to calculate SMA, typically 20
* :deviation - Multiplier to standard deviation from SMA typically 2
* :values - List of values resulting from the calculation {upper, middle, lower}
"""
@type t :: %Talib.BollingerBand{
period: integer,
deviation: integer,
values: [number]
}
defstruct period: 0,
deviation: 0,
values: []
@doc """
Gets the BBand of a list.
The return tuple looks like the following: {MACD, MACD Signal}.
Raises `NoDataError` if the given list is an empty list.
## Examples
iex>Talib.BollingerBand.from_list([1, 2, 3, 4, 5, 6], 3, 2)
{:ok, %Talib.BollingerBand{
period: 3,
deviation: 2,
values: [
{nil, nil, nil},
{nil, nil, nil},
{3.0, 2.0, 1.0},
{4.6329931618554525, 3.0, 1.367006838144548},
{5.6329931618554525, 4.0, 2.367006838144548},
{6.6329931618554525, 5.0, 3.367006838144548}
]
}}
iex>Talib.BollingerBand.from_list([], 3, 2)
{:error, :no_data}
"""
@spec from_list([number], integer, integer) ::
{:ok, Talib.BollingerBand.t()}
| {:error, atom}
def from_list(data, period \\ 20, deviation \\ 2),
do: calculate(data, period, deviation)
@doc """
Gets the BBand of a list.
The return tuple looks like the following: {Upper Band, Middle, Lower Band}.
Raises `NoDataError` if the given list is an empty list.
## Examples
iex>Talib.BollingerBand.from_list!([1, 2, 3], 3, 2)
%Talib.BollingerBand{
deviation: 2,
period: 3,
values: [
{nil, nil, nil},
{nil, nil, nil},
{3.0, 2.0, 1.0}
]
}
iex>Talib.BollingerBand.from_list!([], 20, 2)
** (NoDataError) no data error
"""
@spec from_list!([number], integer, integer) ::
Talib.BBand.t()
| no_return
def from_list!(data, period \\ 20, deviation \\ 2) do
case calculate(data, period, deviation) do
{:ok, result} -> result
{:error, :no_data} -> raise NoDataError
end
end
defp calculate_bband_point(mid, _stddev, _deviation) when is_nil(mid) do
{nil, nil, nil}
end
defp calculate_bband_point(_mid, stddev, _deviation) when is_nil(stddev) do
{nil, nil, nil}
end
defp calculate_bband_point(mid, stddev, deviation) when is_float(stddev) and is_float(mid) do
band = stddev * deviation
{mid + band, mid, mid - band}
end
defp calculate_bband_point(mid, stddev_series, deviation) when is_list(stddev_series) do
stddev = Average.deviation!(stddev_series)
calculate_bband_point(mid, stddev, deviation)
end
@doc false
@spec calculate([number], integer, integer) ::
{:ok, Talib.BollingerBand.t()}
| {:error, atom}
defp calculate(data, period, deviation) do
OK.try do
%SMA{values: middle_band} <- SMA.from_list(data, period)
bband_ =
data
|> Enum.reverse()
|> Enum.chunk_every(period, 1, [nil])
|> Enum.reverse()
|> Enum.map(&Enum.reverse(&1))
deficit = length(data) - length(bband_)
empty =
Stream.cycle([nil])
|> Enum.take(period)
bband =
Stream.cycle([empty])
|> Enum.take(deficit)
|> Kernel.++(bband_)
|> Enum.zip(middle_band)
|> Enum.map(fn {series, m} -> calculate_bband_point(m, series, deviation) end)
# bband = Enum.chunk_every(shaped_data, period, 1, [7])
# IO.inspect bband, limit: :infinity
# IO.inspect length(bband)
# |> Enum.zip(middle_band)
# |> Enum.map(fn({series, m}) -> calculate_bband_point(m, series, deviation) end)
after
{:ok,
%Talib.BollingerBand{
period: period,
deviation: deviation,
values: bband
}}
rescue
:no_data -> {:error, :no_data}
end
end
end
|
lib/talib/bollinger_band.ex
| 0.875048 | 0.656101 |
bollinger_band.ex
|
starcoder
|
defmodule Snap.Cluster do
@moduledoc """
Defines a cluster.
A cluster maps to an Elasticsearch endpoint.
When used, the cluster expects `:otp_app` as an option. The `:otp_app`
should point to an OTP application that has the cluster configuration. For
example, this cluster:
```
defmodule MyApp.Cluster do
use Snap.Cluster, otp_app: :my_app
end
```
Can be configured with:
```
config :my_app, MyApp.Cluster,
url: "http://localhost:9200",
username: "username",
password: "password",
pool_size: 10
```
"""
defmacro __using__(opts) do
quote do
alias Snap.Cluster.Supervisor
alias Snap.Request
def init(config) do
{:ok, config}
end
defoverridable init: 1
@doc """
Returns the config map that the Cluster was defined with.
"""
def config() do
Supervisor.config(__MODULE__)
end
@doc """
Returns the otp_app that the Cluster was defined with.
"""
def otp_app() do
unquote(opts[:otp_app])
end
def get(path, params \\ [], headers \\ [], opts \\ []) do
Request.request(__MODULE__, "GET", path, nil, params, headers, opts)
end
def post(path, body \\ nil, params \\ [], headers \\ [], opts \\ []) do
Request.request(__MODULE__, "POST", path, body, params, headers, opts)
end
def put(path, body \\ nil, params \\ [], headers \\ [], opts \\ []) do
Request.request(__MODULE__, "PUT", path, body, params, headers, opts)
end
def delete(path, params \\ [], headers \\ [], opts \\ []) do
Request.request(__MODULE__, "DELETE", path, nil, params, headers, opts)
end
def child_spec(opts) do
%{
id: __MODULE__,
start: {__MODULE__, :start_link, [opts]},
type: :supervisor
}
end
def start_link(config \\ []) do
otp_app = unquote(opts[:otp_app])
config = Application.get_env(otp_app, __MODULE__, config)
{:ok, config} = init(config)
Supervisor.start_link(__MODULE__, otp_app, config)
end
end
end
@typedoc "The path of the HTTP endpoint"
@type path :: String.t()
@typedoc "The query params, which will be appended to the path"
@type params :: Keyword.t()
@typedoc "The body of the HTTP request"
@type body :: String.t() | nil | binary() | map()
@typedoc "Any additional HTTP headers sent with the request"
@type headers :: Mint.Types.headers()
@typedoc "Options passed through to the request"
@type opts :: Keyword.t()
@typedoc "The result from an HTTP operation"
@type result :: success() | error()
@typedoc "A successful results from an HTTP operation"
@type success :: {:ok, map()}
@typedoc "An error from an HTTP operation"
@type error :: {:error, Snap.ResponseError.t() | Mint.Types.error() | Jason.DecodeError.t()}
@doc """
Sends a GET request.
Returns either:
* `{:ok, response}` - where response is a map representing the parsed JSON response.
* `{:error, error}` - where the error can be a struct of either:
* `Snap.ResponseError`
* `Mint.TransportError`
* `Mint.HTTPError`
* `Jason.DecodeError`
"""
@callback get(path, params, headers, opts) :: result()
@doc """
Sends a POST request.
Returns either:
* `{:ok, response}` - where response is a map representing the parsed JSON response.
* `{:error, error}` - where the error can be a struct of either:
* `Snap.ResponseError`
* `Mint.TransportError`
* `Mint.HTTPError`
* `Jason.DecodeError`
"""
@callback post(path, body, params, headers, opts) :: result()
@doc """
Sends a PUT request.
Returns either:
* `{:ok, response}` - where response is a map representing the parsed JSON response.
* `{:error, error}` - where the error can be a struct of either:
* `Snap.ResponseError`
* `Mint.TransportError`
* `Mint.HTTPError`
* `Jason.DecodeError`
"""
@callback put(path, body, params, headers, opts) :: result()
@doc """
Sends a DELETE request.
Returns either:
* `{:ok, response}` - where response is a map representing the parsed JSON response.
* `{:error, error}` - where the error can be a struct of either:
* `Snap.ResponseError`
* `Mint.TransportError`
* `Mint.HTTPError`
* `Jason.DecodeError`
"""
@callback delete(path, params, headers, opts) :: result()
@doc """
Returns the config in use by this cluster.
"""
@callback config() :: Keyword.t()
@doc """
Sets up the config for the cluster.
Override this to dynamically load a config from somewhere other than your
application config.
"""
@callback init(Keyword.t() | nil) :: {:ok, Keyword.t()}
end
|
lib/snap/cluster.ex
| 0.916629 | 0.783699 |
cluster.ex
|
starcoder
|
defmodule Rajska.QueryAuthorization do
@moduledoc """
Absinthe middleware to ensure query permissions.
## Usage
[Create your Authorization module and add it and QueryAuthorization to your Absinthe.Schema](https://hexdocs.pm/rajska/Rajska.html#module-usage). Then set the permitted role to access a query or mutation:
```elixir
mutation do
field :create_user, :user do
arg :params, non_null(:user_params)
middleware Rajska.QueryAuthorization, permit: :all
resolve &AccountsResolver.create_user/2
end
field :update_user, :user do
arg :id, non_null(:integer)
arg :params, non_null(:user_params)
middleware Rajska.QueryAuthorization, [permit: :user, scope: User] # same as [permit: :user, scope: User, args: :id]
resolve &AccountsResolver.update_user/2
end
field :delete_user, :user do
arg :id, non_null(:integer)
middleware Rajska.QueryAuthorization, permit: :admin
resolve &AccountsResolver.delete_user/2
end
end
```
Query authorization will call `c:Rajska.Authorization.role_authorized?/2` to check if the [user](https://hexdocs.pm/rajska/Rajska.Authorization.html#c:get_current_user/1) [role](https://hexdocs.pm/rajska/Rajska.Authorization.html#c:get_user_role/1) is authorized to perform the query.
"""
alias Absinthe.Resolution
alias Rajska.QueryScopeAuthorization
@behaviour Absinthe.Middleware
def call(%{context: context} = resolution, [{:permit, permission} | _scope] = config) do
validate_permission!(context, permission)
context
|> Rajska.apply_auth_mod(:context_role_authorized?, [context, permission])
|> update_result(resolution)
|> QueryScopeAuthorization.call(config)
end
defp validate_permission!(context, permitted_roles) do
valid_roles = Rajska.apply_auth_mod(context, :valid_roles)
unless permission_valid?(valid_roles, permitted_roles) do
raise """
Invalid permission passed to QueryAuthorization: #{inspect(permitted_roles)}.
Allowed permission: #{inspect(valid_roles)}.
"""
end
end
defp permission_valid?(valid_roles, permitted_roles) when is_list(permitted_roles) do
Enum.all?(permitted_roles, & permission_valid?(valid_roles, &1))
end
defp permission_valid?(valid_roles, permitted_role) when is_atom(permitted_role) do
Enum.member?(valid_roles, permitted_role)
end
defp update_result(true, resolution), do: resolution
defp update_result(false, %{context: context} = resolution) do
Resolution.put_result(resolution, {:error, Rajska.apply_auth_mod(context, :unauthorized_message, [resolution])})
end
end
|
lib/middlewares/query_authorization.ex
| 0.796451 | 0.776623 |
query_authorization.ex
|
starcoder
|
defmodule Solid.Tag do
@moduledoc """
Control flow tags can change the information Liquid shows using programming logic.
More info: https://shopify.github.io/liquid/tags/control-flow/
"""
alias Solid.{Expression, Argument, Context}
defmodule CustomTag do
@moduledoc """
This module define behaviour for custom tag.
To implement new custom tag you need to create new module that implement `CustomTag` behaviour:
defmodule MyCustomTag do
import NimbleParsec
@behaviour Solid.Tag.CustomTag
@impl true
def spec() do
space = Solid.Parser.Literal.whitespace(min: 0)
ignore(string("{%"))
|> ignore(space)
|> ignore(string("my_tag"))
|> ignore(space)
|> ignore(string("%}"))
end
@impl true
def render(_context, _binding, _options) do
[text: "my first tag"]
end
end
- `spec` define how to parse your tag
- `render` define how to render your tag
Then add custom tag to your parser
defmodule MyParser do
use Solid.Parser.Base, custom_tag: [{"my_tag", MyCustomTag}]
end
Then pass your tag to render function
"{% my_tag %}"
|> Solid.parse!(parser: MyParser)
|> Solid.render(tags: %{"my_tag" => MyCustomTag})
"""
@type rendered_data :: {:text, binary()} | {:object, keyword()} | {:tag, list()}
@doc """
Build and return NimbleParsec expression to parse your tag. There are some helper expressions that you can use in :
- `Solid.Parser.Literal`
- `Solid.Parser.Variable`
- `Solid.Parser.Argument`
"""
@callback spec() :: NimbleParsec.t()
@doc """
Define how to render your custom tag.
Third argument is options that you pass to `Solid.render/2` function
"""
@callback render(Solid.Context.t(), list(), keyword()) ::
list(rendered_data) | {list(rendered_data), Solid.Context.t()}
end
@doc """
Evaluate a tag and return the condition that succeeded or nil
"""
@spec eval(any, Context.t(), keyword()) :: {iolist | nil, Context.t()}
def eval(tag, context, options) do
case do_eval(tag, context, options) do
{text, context} -> {text, context}
text -> {text, context}
end
end
defp do_eval([], _context, _options), do: nil
defp do_eval([cycle_exp: cycle], context, _options) do
{context, result} = Context.run_cycle(context, cycle)
{[text: result], context}
end
defp do_eval([custom_tag: tag], context, options) do
[tag_name | tag_data] = tag
tags = Keyword.get(options, :tags, %{})
result =
if(Map.has_key?(tags, tag_name)) do
[text: tags[tag_name].render(context, tag_data)]
else
nil
end
{result, context}
end
defp do_eval([{:if_exp, exp} | _] = tag, context, _options) do
if eval_expression(exp[:expression], context), do: throw({:result, exp})
elsif_exps = tag[:elsif_exps]
if elsif_exps do
result = Enum.find(elsif_exps, &eval_elsif(&1, context))
if result, do: throw({:result, elem(result, 1)})
end
else_exp = tag[:else_exp]
if else_exp, do: throw({:result, else_exp})
catch
{:result, result} -> result[:result]
end
defp do_eval([{:unless_exp, exp} | _] = tag, context, _options) do
unless eval_expression(exp[:expression], context), do: throw({:result, exp})
elsif_exps = tag[:elsif_exps]
if elsif_exps do
result = Enum.find(elsif_exps, &eval_elsif(&1, context))
if result, do: throw({:result, elem(result, 1)})
end
else_exp = tag[:else_exp]
if else_exp, do: throw({:result, else_exp})
catch
{:result, result} -> result[:result]
end
defp do_eval([{:case_exp, field} | [{:whens, when_map} | _]] = tag, context, _options) do
result = when_map[Argument.get(field, context)]
if result do
result
else
tag[:else_exp][:result]
end
end
defp do_eval(
[assign_exp: [field: [field_name], argument: argument, filters: filters]],
context,
_options
) do
new_value = Argument.get(argument, context, filters: filters)
context = %{context | vars: Map.put(context.vars, field_name, new_value)}
{nil, context}
end
defp do_eval(
[capture_exp: [field: [field_name], result: result]],
context,
options
) do
{captured, context} = Solid.render(result, context, options)
context = %{
context
| vars: Map.put(context.vars, field_name, IO.iodata_to_binary(captured))
}
{nil, context}
end
defp do_eval([counter_exp: [{operation, default}, field]], context, _options) do
value = Argument.get([field], context, scopes: [:counter_vars]) || default
{:field, [field_name]} = field
context = %{
context
| counter_vars: Map.put(context.counter_vars, field_name, value + operation)
}
{[text: to_string(value)], context}
end
defp do_eval([break_exp: _], context, _options) do
throw({:break_exp, [], context})
end
defp do_eval([continue_exp: _], context, _options) do
throw({:continue_exp, [], context})
end
defp do_eval(
[
for_exp:
[
{:field, [enumerable_key]},
{:enumerable, enumerable},
{:parameters, parameters} | _
] = exp
],
context,
options
) do
enumerable =
enumerable
|> enumerable(context)
|> apply_parameters(parameters)
do_for(enumerable_key, enumerable, exp, context, options)
end
defp do_eval([raw_exp: raw], context, _options) do
{[text: raw], context}
end
defp do_eval(
[render_exp: [template: template_binding, arguments: argument_binding]],
context,
options
) do
template = Argument.get(template_binding, context)
binding_vars =
Keyword.get(argument_binding || [], :named_arguments, [])
|> Argument.parse_named_arguments(context)
|> Enum.concat()
|> Map.new()
{file_system, instance} = options[:file_system] || {Solid.BlankFileSystem, nil}
template_str = file_system.read_template_file(template, instance)
template = Solid.parse!(template_str, options)
rendered_text = Solid.render(template, binding_vars, options)
{[text: rendered_text], context}
end
defp do_eval([{custom_tag, tag_data}], context, options) do
tags = Keyword.get(options, :tags, %{})
if(Map.has_key?(tags, custom_tag)) do
case tags[custom_tag].render(context, tag_data, options) do
text when is_binary(text) -> [text: text]
result -> result
end
else
[text: nil]
end
end
defp do_for(_, [], exp, context, _options) do
exp = Keyword.get(exp, :else_exp)
{exp[:result], context}
end
defp do_for(enumerable_key, enumerable, exp, context, options) do
exp = Keyword.get(exp, :result)
length = Enum.count(enumerable)
{result, context} =
enumerable
|> Enum.with_index(0)
|> Enum.reduce({[], context}, fn {v, index}, {acc_result, acc_context_initial} ->
acc_context =
acc_context_initial
|> set_enumerable_value(enumerable_key, v)
|> maybe_put_forloop_map(enumerable_key, index, length)
try do
{result, acc_context} = Solid.render(exp, acc_context, options)
acc_context = restore_initial_forloop_value(acc_context, acc_context_initial)
{[result | acc_result], acc_context}
catch
{:break_exp, partial_result, context} ->
throw({:result, [partial_result | acc_result], context})
{:continue_exp, partial_result, context} ->
{[partial_result | acc_result], context}
end
end)
context = %{context | iteration_vars: Map.delete(context.iteration_vars, enumerable_key)}
{[text: Enum.reverse(result)], context}
catch
{:result, result, context} ->
context = %{context | iteration_vars: Map.delete(context.iteration_vars, enumerable_key)}
{[text: Enum.reverse(result)], context}
end
defp set_enumerable_value(acc_context, key, value) do
iteration_vars = Map.put(acc_context.iteration_vars, key, value)
%{acc_context | iteration_vars: iteration_vars}
end
defp maybe_put_forloop_map(acc_context, key, index, length) when key != "forloop" do
map = build_forloop_map(index, length)
iteration_vars = Map.put(acc_context.iteration_vars, "forloop", map)
%{acc_context | iteration_vars: iteration_vars}
end
defp maybe_put_forloop_map(acc_context, _key, _index, _length) do
acc_context
end
defp build_forloop_map(index, length) do
%{
"index" => index + 1,
"index0" => index,
"rindex" => length - index,
"rindex0" => length - index - 1,
"first" => index == 0,
"last" => length == index + 1,
"length" => length
}
end
defp restore_initial_forloop_value(acc_context, %{
iteration_vars: %{"forloop" => initial_forloop}
}) do
iteration_vars = Map.put(acc_context.iteration_vars, "forloop", initial_forloop)
%{acc_context | iteration_vars: iteration_vars}
end
defp restore_initial_forloop_value(acc_context, _) do
acc_context
end
defp enumerable([range: [first: first, last: last]], context) do
first = integer_or_field(first, context)
last = integer_or_field(last, context)
first..last
end
defp enumerable(field, context), do: Argument.get(field, context) || []
defp apply_parameters(enumerable, parameters) do
enumerable
|> offset(parameters)
|> limit(parameters)
|> reversed(parameters)
end
defp offset(enumerable, %{offset: offset}) do
Enum.slice(enumerable, offset..-1)
end
defp offset(enumerable, _), do: enumerable
defp limit(enumerable, %{limit: limit}) do
Enum.slice(enumerable, 0..(limit - 1))
end
defp limit(enumerable, _), do: enumerable
defp reversed(enumerable, %{reversed: _}) do
Enum.reverse(enumerable)
end
defp reversed(enumerable, _), do: enumerable
defp integer_or_field(value, _context) when is_integer(value), do: value
defp integer_or_field(field, context), do: Argument.get([field], context)
defp eval_elsif({:elsif_exp, elsif_exp}, context) do
eval_expression(elsif_exp[:expression], context)
end
defp eval_expression(exps, context), do: Expression.eval(exps, context)
end
|
lib/solid/tag.ex
| 0.88387 | 0.529203 |
tag.ex
|
starcoder
|
defmodule Bolt.Cogs.BanRange do
@moduledoc false
@behaviour Nosedrum.Command
alias Bolt.Constants
alias Bolt.Moderation
alias Bolt.Paginator
alias Nosedrum.Predicates
alias Nostrum.Api
alias Nostrum.Cache.GuildCache
alias Nostrum.Struct.Embed
alias Nostrum.Struct.Guild
alias Nostrum.Struct.Message
alias Nostrum.Struct.User
@impl true
def usage,
do: [
"banrange <lower:snowflake> [to] <upper:snowflake> [reason:str...]",
"banrange from <lower:snowflake> [reason:str...]"
]
@impl true
def description,
do: """
Ban a range of users by user ID. Infractions will be stored in the database.
Requires the `BAN_MEMBERS` permission.
**This command bans all selected members without confirmation**.
Use the `uidrange` command to see who would be affected.
**Examples**:
```rs
// Ban all users with ID >= 12345
banrange from 12345
// Ban all users with ID >= 12345 and <= 21479
banrange 12345 to 21479
// Same as above, but provide a reason for the infraction database
banrange 12345 to 21479 raid bots
```
"""
@impl true
def predicates,
do: [&Predicates.guild_only/1, Predicates.has_permission(:ban_members)]
@impl true
def command(msg, ["from", lower | reason_list]) do
reason = Enum.join(reason_list, " ")
case Integer.parse(lower) do
{start, ""} ->
msg.guild_id
|> GuildCache.select!(& &1.members)
|> Stream.filter(fn {flake, _member} -> flake >= start end)
|> execute(msg.guild_id, msg.author, reason)
|> display(msg)
:error ->
Api.create_message!(msg.channel_id, "🚫 invalid snowflake, sorry")
end
end
# No infinite recursion is possible here: If `banrange a to to` is run,
# we just head into this function twice, chopping out the "to" in each call.
def command(msg, [lower, "to", upper | reason_list]) do
command(msg, [lower, upper | reason_list])
end
def command(msg, [lower, upper | reason_list]) do
reason = Enum.join(reason_list, " ")
with {start, ""} <- Integer.parse(lower),
{stop, ""} <- Integer.parse(upper) do
msg.guild_id
|> GuildCache.select!(& &1.members)
|> Stream.filter(fn {flake, _member} -> flake >= start and flake <= stop end)
|> execute(msg.guild_id, msg.author, reason)
|> display(msg)
else
:error ->
Api.create_message!(msg.channel_id, "🚫 invalid snowflakes, sorry")
end
end
def command(msg, _args) do
response = "ℹ️ usage:\n```\n#{Enum.join(usage(), "\n")}\n```"
Api.create_message!(msg.channel_id, response)
end
@spec execute([User.id()], Guild.id(), User.t(), String.t()) :: {:ok, Message.t()}
defp execute(targets, guild_id, actor, reason) do
targets
|> Stream.map(fn {snowflake, _member} -> snowflake end)
|> Stream.map(&Moderation.ban("#{&1}", guild_id, actor, reason))
|> Stream.map(&format_entry/1)
|> Stream.chunk_every(15)
|> Enum.map(&%Embed{description: Enum.join(&1, "\n")})
end
defp format_entry({:ok, infraction, user}) do
"- successfully banned #{user} (##{infraction.id})"
end
defp format_entry({:error, reason, user}) do
"- failed to ban #{user} (#{reason})"
end
def display(pages, message) do
base_page = %Embed{
title: "Ranged ban results",
color: Constants.color_blue()
}
Paginator.paginate_over(message, base_page, pages)
end
end
|
lib/bolt/cogs/banrange.ex
| 0.810254 | 0.606324 |
banrange.ex
|
starcoder
|
defmodule ModelFox do
@moduledoc """
This is the main module in the `modelfox` package.
"""
defmodule Model do
@moduledoc """
Use this struct to load a model, make predictions, and log events to the app.
"""
@type t :: %__MODULE__{
model: reference,
log_queue: [ModelFox.event()],
modelfox_url: String.t()
}
defstruct [
:model,
:log_queue,
:modelfox_url
]
end
defmodule LoadModelOptions do
@moduledoc """
These are the options passed when loading a model.
## `modelfox_url`
If you are running the app locally or on your own server, use this field to provide the url to it. If not specified, the default value is https://app.modelfox.dev.
"""
@type t :: %__MODULE__{
modelfox_url: String.t()
}
defstruct [
:modelfox_url
]
end
@typedoc """
This is the input type of `ModelFox.predict`. A predict input is a map from atoms or strings to strings or floats. The keys should match the columns in the CSV file you trained your model with.
"""
@type predict_input :: %{(atom | String.t()) => String.t() | float}
defmodule PredictOptions do
@moduledoc """
These are the options passed to `ModelFox.predict`.
## `threshold`
If your model is a binary classifier, use this field to make predictions using the threshold you chose on the tuning page of the app. The default value is `0.5`.
## `compute_feature_contributions`
Computing feature contributions is disabled by default. If you set this field to `true`, you will be able to access the feature contributions with the `feature_contributions` field of the predict output.
"""
@type t :: %__MODULE__{
threshold: float,
compute_feature_contributions: boolean
}
@derive Jason.Encoder
defstruct [
threshold: 0.5,
compute_feature_contributions: false
]
end
@typedoc """
This is the return type of `ModelFox.predict`.
"""
@type predict_output ::
{:regression, RegressionPredictOutput.t()}
| {:binary_classification, BinaryClassificationPredictOutput.t()}
| {:multiclass_classification, MulticlassClassificationPredictOutput.t()}
defmodule RegressionPredictOutput do
@moduledoc """
`ModelFox.predict` outputs `{:regression, RegressionPredictOutput.t()}` when the model's task is regression.
## `value`
This is the predicted value.
## `feature_contributions`
If computing feature contributions was enabled in the predict options, this value will explain the model's output, showing how much each feature contributed to the output.
"""
@type t :: %__MODULE__{
value: float,
feature_contributions: FeatureContributions.t() | nil
}
@derive {Jason.Encoder, except: [:feature_contributions]}
defstruct [
:value,
:feature_contributions
]
end
defmodule BinaryClassificationPredictOutput do
@moduledoc """
`ModelFox.predict` outputs `{:binary_classification, BinaryClassificationPredictOutput.t()}` when the model's task is binary classification.
## `class_name`
This is the name of the predicted class.
## `probability`
This is the probability the model assigned to the predicted class.
## `feature_contributions`
If computing feature contributions was enabled in the predict options, this value will explain the model's output, showing how much each feature contributed to the output.
"""
@type t :: %__MODULE__{
class_name: String.t(),
probability: float,
feature_contributions: FeatureContributions.t() | nil
}
@derive {Jason.Encoder, except: [:feature_contributions]}
defstruct [
:class_name,
:probability,
:feature_contributions
]
end
defmodule MulticlassClassificationPredictOutput do
@moduledoc """
`ModelFox.predict` outputs `{:multiclass_classification, MulticlassClassificationPredictOutput.t()}` when the model's task is multiclass classification.
## `class_name`
This is the name of the predicted class.
## `probability`
This is the probability the model assigned to the predicted class.
## `probabilities`
This value maps from class names to the probability the model assigned to each class.
## `feature_contributions`
If computing feature contributions was enabled in the predict options, this value will explain the model's output, showing how much each feature contributed to the output. This value maps from class names to `FeatureContributions` values for each class. The class with the `FeatureContributions` value with the highest `output_value` is the predicted class.
"""
@type t :: %__MODULE__{
class_name: String.t(),
probability: float,
probabilities: [float],
feature_contributions: FeatureContributions.t() | nil
}
@derive {Jason.Encoder, except: [:feature_contributions]}
defstruct [
:class_name,
:probability,
:probabilities,
:feature_contributions
]
end
defmodule FeatureContributions do
@moduledoc """
This is a description of the feature contributions for the prediction if the task is regression or binary classification, or for a single class if the task is multiclass classification.
## `baseline_value`
This is the value the model would output if all features had baseline values.
## `output_value`
This is the value the model output. Any difference from the `baseline_value` is because of the deviation of the features from their baseline values.
## `entries`
This list will contain one entry for each of the model's features. Note that features are computed from columns, so there will likely be more features than columns.
"""
@type t :: %__MODULE__{
baseline_value: float,
output_value: float,
entries: [ModelFox.feature_contribution_entry()]
}
defstruct [
:baseline_value,
:output_value,
:entries
]
end
@typedoc """
This identifies the type of a feature contribution.
"""
@type feature_contribution_entry ::
{:identity, IdentityFeatureContribution.t()}
| {:normalized, NormalizedFeatureContribution.t()}
| {:one_hot_encoded, OneHotEncodedFeatureContribution.t()}
| {:bag_of_words, BagOfWordsFeatureContribution.t()}
| {:bag_of_words_cosine_similarity, BagOfWordsCosineSimilarityFeatureContribution.t()}
| {:word_embedding, WordEmbeddingFeatureContribution.t()}
defmodule IdentityFeatureContribution do
@moduledoc """
This describes the contribution of a feature from an identity feature group.
## `column_name`
This is the name of the source column for the identity feature group.
## `feature_value`
This is the value of the feature.
## `feature_contribution_value`
This is the amount that the feature contributed to the output.
"""
@type t :: %__MODULE__{
column_name: String.t(),
feature_value: float,
feature_contribution_value: float
}
defstruct [
:column_name,
:feature_value,
:feature_contribution_value
]
end
defmodule NormalizedFeatureContribution do
@moduledoc """
This describes the contribution of a feature from a normalized feature group.
## `column_name`
This is the name of the source column for the normalized feature group.
## `feature_value`
This is the value of the feature.
## `feature_contribution_value`
This is the amount that the feature contributed to the output.
"""
@type t :: %__MODULE__{
column_name: String.t(),
feature_value: float,
feature_contribution_value: float
}
defstruct [
:column_name,
:feature_value,
:feature_contribution_value
]
end
defmodule OneHotEncodedFeatureContribution do
@moduledoc """
This describes the contribution of a feature from a one hot encoded feature group.
## `column_name`
This is the name of the source column for the one hot encoded feature group.
## `variant`
This is the enum variant the feature indicates the presence of.
## `feature_value`
This is the value of the feature.
## `feature_contribution_value`
This is the amount that the feature contributed to the output.
"""
@type t :: %__MODULE__{
column_name: String.t(),
variant: String.t(),
feature_value: float,
feature_contribution_value: float
}
defstruct [
:column_name,
:variant,
:feature_value,
:feature_contribution_value
]
end
defmodule BagOfWordsFeatureContribution do
@moduledoc """
This describes the contribution of a feature from a bag of words feature group.
## `column_name`
This is the name of the source column for the bag of words feature group.
## `ngram`
This is the ngram for the feature.
## `feature_value`
This is the value of the feature.
## `feature_contribution_value`
This is the amount that the feature contributed to the output.
"""
@type ngram :: String.t() | {String.t(), String.t()}
@type t :: %__MODULE__{
column_name: String.t(),
ngram: ngram,
feature_value: float,
feature_contribution_value: float
}
defstruct [
:column_name,
:ngram,
:feature_value,
:feature_contribution_value
]
end
defmodule BagOfWordsCosineSimilarityFeatureContribution do
@moduledoc """
This describes the contribution of a feature from a bag of words cosine similarity feature group.
## `column_name_a`
This is the name of the source column a for the bag of words cosine similarity feature group.
## `column_name_b`
This is the name of the source column b for the bag of words cosine similarity feature group.
## `feature_value`
This is the value of the feature.
## `feature_contribution_value`
This is the amount that the feature contributed to the output.
"""
@type ngram :: String.t() | {String.t(), String.t()}
@type t :: %__MODULE__{
column_name_a: String.t(),
column_name_b: String.t(),
feature_value: float,
feature_contribution_value: float
}
defstruct [
:column_name_a,
:column_name_b,
:feature_value,
:feature_contribution_value
]
end
defmodule WordEmbeddingFeatureContribution do
@moduledoc """
This describes the contribution of a feature from a word embedding feature group.
## `column_name`
This is the name of the source column for the word embedding feature group.
## `value_index`
This is the index of the feature in the word embedding.
## `feature_contribution_value`
This is the amount that the feature contributed to the output.
"""
@type t :: %__MODULE__{
column_name: String.t(),
value_index: integer,
feature_contribution_value: float
}
defstruct [
:column_name,
:value_index,
:feature_contribution_value
]
end
@type true_value :: String.t() | float
defmodule LogPredictionArgs do
@moduledoc """
This is the type of the argument to `ModelFox.log_prediction` and `ModelFox.enqueue_log_prediction` which specifies the details of the prediction to log.
## `identifier`
This is a unique identifier for the prediction, which will associate it with a true value event and allow you to look it up in the app.
## `input`
This is the same `ModelFox.predict_input` value that you passed to `ModelFox.predict`.
## `options`
This is the same `ModelFox.PredictOptions` value that you passed to `ModelFox.predict`.
## `output`
This is the output returned by `ModelFox.predict`.
"""
@type t :: %__MODULE__{
identifier: String.t(),
input: ModelFox.predict_input(),
options: PredictOptions.t() | nil,
output: ModelFox.predict_output()
}
defstruct [
:identifier,
:input,
:options,
:output
]
end
defmodule LogTrueValueArgs do
@moduledoc """
This is the type of the argument to `ModelFox.log_true_value` and `ModelFox.enqueue_log_true_value` which specifies the details of the true value to log.
## `identifier`
This is a unique identifier for the prediction, which will associate it with a true value event and allow you to look it up in the app.
## `true_value`
This is the true value for the prediction.
"""
@type t :: %__MODULE__{
identifier: String.t(),
true_value: ModelFox.true_value()
}
defstruct [
:identifier,
:true_value
]
end
@type event :: PredictionEvent.t() | TrueValueEvent.t()
defmodule PredictionEvent do
@moduledoc """
"""
@type t :: %__MODULE__{
type: :prediction,
model_id: String.t(),
date: String.t(),
identifier: String.t(),
input: ModelFox.predict_input(),
options: PredictOptions.t() | nil,
output: ModelFox.predict_output()
}
@derive Jason.Encoder
defstruct [
:type,
:model_id,
:date,
:identifier,
:input,
:options,
:output
]
end
defmodule TrueValueEvent do
@moduledoc """
"""
@type t :: %__MODULE__{
type: :true_value,
model_id: String.t(),
date: String.t(),
identifier: String.t(),
true_value: ModelFox.true_value()
}
@derive Jason.Encoder
defstruct [
:type,
:model_id,
:date,
:identifier,
:true_value
]
end
@on_load {:init, 0}
def init do
sys_arch = to_string(:erlang.system_info(:system_architecture))
nif_path =
cond do
String.match?(sys_arch, ~r/x86_64-(pc|unknown)-linux-gnu/) ->
"x86_64-linux-gnu/libmodelfox_elixir"
String.match?(sys_arch, ~r/(aarch64|arm)-(pc|unknown)-linux-gnu/) ->
"aarch64-linux-gnu/libmodelfox_elixir"
String.match?(sys_arch, ~r/x86_64-(alpine|pc)-linux-musl/) ->
"x86_64-linux-musl/libmodelfox_elixir"
String.match?(sys_arch, ~r/(aarch64|arm)-(alpine|pc)-linux-musl/) ->
"aarch64-linux-musl/libmodelfox_elixir"
String.match?(sys_arch, ~r/x86_64-apple-darwin[0-9]+\.[0-9]+\.[0-9]+/) ->
"x86_64-macos/libmodelfox_elixir"
String.match?(sys_arch, ~r/(aarch64|arm)-apple-darwin[0-9]+\.[0-9]+\.[0-9]+/) ->
"aarch64-macos/libmodelfox_elixir"
String.match?(sys_arch, ~r/win32/) ->
"x86_64-windows-msvc/modelfox_elixir"
true ->
raise "ModelFox for Elixir does not yet support your combination of CPU architecture and operating system. Open an issue at https://github.com/modelfoxdotdev/modelfox/issues/new or email us at <EMAIL> to complain."
end
path = :filename.join(:code.priv_dir(:modelfox), nif_path)
:ok = :erlang.load_nif(path, nil)
end
@doc """
Load a model from a `.modelfox` file at `path`.
"""
@spec load_model_from_path(String.t(), LoadModelOptions | nil) :: Model.t()
def load_model_from_path(path, options \\ nil) do
model = _load_model_from_path(path)
modelfox_url = if options, do: options.modelfox_url, else: "https://app.modelfox.dev"
%Model{
model: model,
log_queue: [],
modelfox_url: modelfox_url
}
end
@doc """
Load a model from a binary instead of a file. You should use this only if you already have a `.modelfox` loaded into memory. Otherwise, use `ModelFox.load_model_from_path`, which is faster because it memory maps the file.
"""
@spec load_model_from_binary(String.t(), LoadModelOptions | nil) :: Model.t()
def load_model_from_binary(binary, options \\ nil) do
model = _load_model_from_binary(binary)
modelfox_url = if options, do: options.modelfox_url, else: "https://app.modelfox.dev"
%Model{
model: model,
log_queue: [],
modelfox_url: modelfox_url
}
end
@doc """
Retrieve the model's id.
"""
@spec model_id(Model.t()) :: String.t()
def model_id(model) do
_model_id(model.model)
end
@doc """
Make a prediction!
"""
@spec predict(Model.t(), ModelFox.predict_input(), PredictOptions.t() | nil) ::
ModelFox.predict_output()
def predict(model, input, options \\ nil) do
_predict(model.model, input, options)
end
@doc """
Send a prediction event to the app. If you want to batch events, you can use `ModelFox.enqueue_log_prediction` instead.
"""
@spec log_prediction(Model.t(), LogPredictionArgs.t()) :: {:ok, any} | {:error, any}
def log_prediction(model, args) do
event = prediction_event(model, args)
log_events(model.modelfox_url, [event])
end
@doc """
Add a prediction event to the queue. Remember to call `ModelFox.flush_log_queue` at a later point to send the event to the app.
"""
@spec enqueue_log_prediction(Model.t(), LogPredictionArgs.t()) :: Model.t()
def enqueue_log_prediction(model, args) do
event = prediction_event(model, args)
%{model | log_queue: model.log_queue ++ [event]}
end
@doc """
Send a true value event to the app. If you want to batch events, you can use `ModelFox.enqueue_log_true_value` instead.
"""
@spec log_true_value(Model.t(), LogTrueValueArgs.t()) :: {:ok, any} | {:error, any}
def log_true_value(model, args) do
event = true_value_event(model, args)
log_events(model.modelfox_url, [event])
end
@doc """
Add a true value event to the queue. Remember to call `ModelFox.flush_log_queue` at a later point to send the event to the app.
"""
@spec enqueue_log_true_value(Model.t(), LogTrueValueArgs.t()) :: Model.t()
def enqueue_log_true_value(model, args) do
event = true_value_event(model, args)
%{model | log_queue: model.log_queue ++ [event]}
end
@doc """
Send all events in the queue to the app.
"""
@spec flush_log_queue(Model.t()) :: Model.t()
def flush_log_queue(model) do
log_events(model.modelfox_url, model.log_queue)
%{model | log_queue: []}
end
@spec log_events(String.t(), [ModelFox.event()]) :: {:ok, any} | {:error, any}
defp log_events(modelfox_url, events) do
url = modelfox_url <> "/track"
headers = %{"Content-Type": "application/json"}
body = Jason.encode!(events)
HTTPoison.post(url, body, headers)
end
@spec prediction_event(Model.t(), LogPredictionArgs.t()) :: PredictionEvent.t()
defp prediction_event(model, args) do
model_id = _model_id(model.model)
%PredictionEvent{
date: DateTime.utc_now() |> DateTime.to_iso8601(),
identifier: args.identifier,
input: args.input,
model_id: model_id,
options: args.options,
output: args.output,
type: :prediction
}
end
@spec true_value_event(Model.t(), LogTrueValueArgs.t()) :: TrueValueEvent.t()
defp true_value_event(model, args) do
model_id = _model_id(model.model)
%TrueValueEvent{
date: DateTime.utc_now() |> DateTime.to_iso8601(),
identifier: args.identifier,
model_id: model_id,
true_value: args.true_value,
type: :true_value
}
end
defp _load_model_from_path(_) do
:erlang.nif_error(:nif_not_loaded)
end
defp _load_model_from_binary(_) do
:erlang.nif_error(:nif_not_loaded)
end
defp _model_id(_) do
:erlang.nif_error(:nif_not_loaded)
end
defp _predict(_, _, _) do
:erlang.nif_error(:nif_not_loaded)
end
end
|
languages/elixir/lib/tangram.ex
| 0.944511 | 0.71773 |
tangram.ex
|
starcoder
|
defmodule Google.Bigtable.Admin.V2.Table do
@moduledoc false
use Protobuf, syntax: :proto3
@type t :: %__MODULE__{
name: String.t(),
cluster_states: %{String.t() => Google.Bigtable.Admin.V2.Table.ClusterState.t()},
column_families: %{String.t() => Google.Bigtable.Admin.V2.ColumnFamily.t()},
granularity: integer
}
defstruct [:name, :cluster_states, :column_families, :granularity]
field :name, 1, type: :string
field :cluster_states, 2,
repeated: true,
type: Google.Bigtable.Admin.V2.Table.ClusterStatesEntry,
map: true
field :column_families, 3,
repeated: true,
type: Google.Bigtable.Admin.V2.Table.ColumnFamiliesEntry,
map: true
field :granularity, 4, type: Google.Bigtable.Admin.V2.Table.TimestampGranularity, enum: true
end
defmodule Google.Bigtable.Admin.V2.Table.ClusterState do
@moduledoc false
use Protobuf, syntax: :proto3
@type t :: %__MODULE__{
replication_state: integer
}
defstruct [:replication_state]
field :replication_state, 1,
type: Google.Bigtable.Admin.V2.Table.ClusterState.ReplicationState,
enum: true
end
defmodule Google.Bigtable.Admin.V2.Table.ClusterState.ReplicationState do
@moduledoc false
use Protobuf, enum: true, syntax: :proto3
field :STATE_NOT_KNOWN, 0
field :INITIALIZING, 1
field :PLANNED_MAINTENANCE, 2
field :UNPLANNED_MAINTENANCE, 3
field :READY, 4
end
defmodule Google.Bigtable.Admin.V2.Table.ClusterStatesEntry do
@moduledoc false
use Protobuf, map: true, syntax: :proto3
@type t :: %__MODULE__{
key: String.t(),
value: Google.Bigtable.Admin.V2.Table.ClusterState.t()
}
defstruct [:key, :value]
field :key, 1, type: :string
field :value, 2, type: Google.Bigtable.Admin.V2.Table.ClusterState
end
defmodule Google.Bigtable.Admin.V2.Table.ColumnFamiliesEntry do
@moduledoc false
use Protobuf, map: true, syntax: :proto3
@type t :: %__MODULE__{
key: String.t(),
value: Google.Bigtable.Admin.V2.ColumnFamily.t()
}
defstruct [:key, :value]
field :key, 1, type: :string
field :value, 2, type: Google.Bigtable.Admin.V2.ColumnFamily
end
defmodule Google.Bigtable.Admin.V2.Table.TimestampGranularity do
@moduledoc false
use Protobuf, enum: true, syntax: :proto3
field :TIMESTAMP_GRANULARITY_UNSPECIFIED, 0
field :MILLIS, 1
end
defmodule Google.Bigtable.Admin.V2.Table.View do
@moduledoc false
use Protobuf, enum: true, syntax: :proto3
field :VIEW_UNSPECIFIED, 0
field :NAME_ONLY, 1
field :SCHEMA_VIEW, 2
field :REPLICATION_VIEW, 3
field :FULL, 4
end
defmodule Google.Bigtable.Admin.V2.ColumnFamily do
@moduledoc false
use Protobuf, syntax: :proto3
@type t :: %__MODULE__{
gc_rule: Google.Bigtable.Admin.V2.GcRule.t()
}
defstruct [:gc_rule]
field :gc_rule, 1, type: Google.Bigtable.Admin.V2.GcRule
end
defmodule Google.Bigtable.Admin.V2.GcRule do
@moduledoc false
use Protobuf, syntax: :proto3
@type t :: %__MODULE__{
rule: {atom, any}
}
defstruct [:rule]
oneof :rule, 0
field :max_num_versions, 1, type: :int32, oneof: 0
field :max_age, 2, type: Google.Protobuf.Duration, oneof: 0
field :intersection, 3, type: Google.Bigtable.Admin.V2.GcRule.Intersection, oneof: 0
field :union, 4, type: Google.Bigtable.Admin.V2.GcRule.Union, oneof: 0
end
defmodule Google.Bigtable.Admin.V2.GcRule.Intersection do
@moduledoc false
use Protobuf, syntax: :proto3
@type t :: %__MODULE__{
rules: [Google.Bigtable.Admin.V2.GcRule.t()]
}
defstruct [:rules]
field :rules, 1, repeated: true, type: Google.Bigtable.Admin.V2.GcRule
end
defmodule Google.Bigtable.Admin.V2.GcRule.Union do
@moduledoc false
use Protobuf, syntax: :proto3
@type t :: %__MODULE__{
rules: [Google.Bigtable.Admin.V2.GcRule.t()]
}
defstruct [:rules]
field :rules, 1, repeated: true, type: Google.Bigtable.Admin.V2.GcRule
end
defmodule Google.Bigtable.Admin.V2.Snapshot do
@moduledoc false
use Protobuf, syntax: :proto3
@type t :: %__MODULE__{
name: String.t(),
source_table: Google.Bigtable.Admin.V2.Table.t(),
data_size_bytes: integer,
create_time: Google.Protobuf.Timestamp.t(),
delete_time: Google.Protobuf.Timestamp.t(),
state: integer,
description: String.t()
}
defstruct [
:name,
:source_table,
:data_size_bytes,
:create_time,
:delete_time,
:state,
:description
]
field :name, 1, type: :string
field :source_table, 2, type: Google.Bigtable.Admin.V2.Table
field :data_size_bytes, 3, type: :int64
field :create_time, 4, type: Google.Protobuf.Timestamp
field :delete_time, 5, type: Google.Protobuf.Timestamp
field :state, 6, type: Google.Bigtable.Admin.V2.Snapshot.State, enum: true
field :description, 7, type: :string
end
defmodule Google.Bigtable.Admin.V2.Snapshot.State do
@moduledoc false
use Protobuf, enum: true, syntax: :proto3
field :STATE_NOT_KNOWN, 0
field :READY, 1
field :CREATING, 2
end
|
lib/grpc/admin/table.pb.ex
| 0.703244 | 0.46478 |
table.pb.ex
|
starcoder
|
defmodule Cluster.Strategy do
@moduledoc """
This module defines the behaviour for implementing clustering strategies.
"""
defmacro __using__(_) do
quote do
@behaviour Cluster.Strategy
@impl true
def child_spec(args) do
%{id: __MODULE__, type: :worker, start: {__MODULE__, :start_link, [args]}}
end
defoverridable child_spec: 1
end
end
@type topology :: atom
@type bad_nodes :: [{node, reason :: term}]
@type mfa_tuple :: {module, atom, [term]}
@type strategy_args :: [Cluster.Strategy.State.t()]
# Required for supervision of the strategy
@callback child_spec(strategy_args) :: Supervisor.child_spec()
# Starts the strategy
@callback start_link(strategy_args) :: {:ok, pid} | :ignore | {:error, reason :: term}
@doc """
Given a list of node names, attempts to connect to all of them.
Returns `:ok` if all nodes connected, or `{:error, [{node, reason}, ..]}`
if we failed to connect to some nodes.
All failures are logged.
"""
@spec connect_nodes(topology, mfa_tuple, mfa_tuple, [atom()]) :: :ok | {:error, bad_nodes}
def connect_nodes(topology, {_, _, _} = connect, {_, _, _} = list_nodes, nodes)
when is_list(nodes) do
{connect_mod, connect_fun, connect_args} = connect
{list_mod, list_fun, list_args} = list_nodes
ensure_exported!(list_mod, list_fun, length(list_args))
current_node = Node.self()
need_connect =
nodes
|> difference(apply(list_mod, list_fun, list_args))
|> Enum.reject(fn n -> current_node == n end)
bad_nodes =
Enum.reduce(need_connect, [], fn n, acc ->
fargs = connect_args ++ [n]
ensure_exported!(connect_mod, connect_fun, length(fargs))
case apply(connect_mod, connect_fun, fargs) do
true ->
Cluster.Logger.info(topology, "connected to #{inspect(n)}")
acc
false ->
Cluster.Logger.warn(topology, "unable to connect to #{inspect(n)}")
[{n, false} | acc]
:ignored ->
Cluster.Logger.warn(
topology,
"unable to connect to #{inspect(n)}: not part of network"
)
[{n, :ignored} | acc]
end
end)
case bad_nodes do
[] -> :ok
_ -> {:error, bad_nodes}
end
end
@doc """
Given a list of node names, attempts to disconnect from all of them.
Returns `:ok` if all nodes disconnected, or `{:error, [{node, reason}, ..]}`
if we failed to disconnect from some nodes.
All failures are logged.
"""
@spec disconnect_nodes(topology, mfa_tuple, mfa_tuple, [atom()]) :: :ok | {:error, bad_nodes}
def disconnect_nodes(topology, {_, _, _} = disconnect, {_, _, _} = list_nodes, nodes)
when is_list(nodes) do
{disconnect_mod, disconnect_fun, disconnect_args} = disconnect
{list_mod, list_fun, list_args} = list_nodes
ensure_exported!(list_mod, list_fun, length(list_args))
current_node = Node.self()
need_disconnect =
nodes
|> intersection(apply(list_mod, list_fun, list_args))
|> Enum.reject(fn n -> current_node == n end)
bad_nodes =
Enum.reduce(need_disconnect, [], fn n, acc ->
fargs = disconnect_args ++ [n]
ensure_exported!(disconnect_mod, disconnect_fun, length(fargs))
case apply(disconnect_mod, disconnect_fun, fargs) do
true ->
Cluster.Logger.info(topology, "disconnected from #{inspect(n)}")
acc
false ->
Cluster.Logger.warn(
topology,
"disconnect from #{inspect(n)} failed because we're already disconnected"
)
acc
:ignored ->
Cluster.Logger.warn(
topology,
"disconnect from #{inspect(n)} failed because it is not part of the network"
)
acc
reason ->
Cluster.Logger.warn(
topology,
"disconnect from #{inspect(n)} failed with: #{inspect(reason)}"
)
[{n, reason} | acc]
end
end)
case bad_nodes do
[] -> :ok
_ -> {:error, bad_nodes}
end
end
def intersection(_a, []), do: []
def intersection([], _b), do: []
def intersection(a, b) when is_list(a) and is_list(b) do
a |> MapSet.new() |> MapSet.intersection(MapSet.new(b))
end
def difference(a, []), do: a
def difference([], _b), do: []
def difference(a, b) when is_list(a) and is_list(b) do
a |> MapSet.new() |> MapSet.difference(MapSet.new(b))
end
defp ensure_exported!(mod, fun, arity) do
unless function_exported?(mod, fun, arity) do
raise "#{mod}.#{fun}/#{arity} is undefined!"
end
end
end
|
lib/strategy/strategy.ex
| 0.841939 | 0.402891 |
strategy.ex
|
starcoder
|
defmodule Grizzly.ZWave.Commands.MeterReport do
@moduledoc """
This module implements the command METER_REPORT of the COMMAND_CLASS_METER command class.
This command is used to advertise the current meter reading at the sending node.
Params:
* `:meter_type` - the type of metering physical unit being reported (required)
* `:scale` - the unit used (required)
* `:value` - the value being reported (required)
"""
@behaviour Grizzly.ZWave.Command
alias Grizzly.ZWave.{Command, DecodeError}
alias Grizzly.ZWave.CommandClasses.Meter
@type meter_type :: :electric | :gas | :water | :heating | :cooling
@type meter_scale :: atom
@type param ::
{:meter_type, meter_type} | {:scale, meter_scale} | {:value, number}
@impl true
@spec new([param()]) :: {:ok, Command.t()}
def new(params) do
command = %Command{
name: :meter_report,
command_byte: 0x02,
command_class: Meter,
params: params,
impl: __MODULE__
}
{:ok, command}
end
@impl true
def encode_params(command) do
meter_type = Command.param!(command, :meter_type)
meter_type_byte = encode_meter_type(meter_type)
scale_byte = encode_meter_scale(Command.param!(command, :scale), meter_type)
value = Command.param!(command, :value)
precision = precision(value)
int_value = round(value * :math.pow(10, precision))
byte_size = ceil(:math.log2(int_value) / 8)
<<meter_type_byte, precision::size(3), scale_byte::size(2), byte_size::size(3),
int_value::size(byte_size)-unit(8)>>
end
@impl true
@spec decode_params(binary()) :: {:ok, [param()]} | {:error, DecodeError.t()}
def decode_params(
<<_::size(3), meter_type_byte::size(5), precision::size(3), scale_byte::size(2),
size::size(3), int_value::size(size)-unit(8), _::binary>>
) do
with {:ok, meter_type} <- decode_meter_type(meter_type_byte),
{:ok, scale} <- decode_meter_scale(scale_byte, meter_type) do
value = int_value / :math.pow(10, precision)
{:ok, [meter_type: meter_type, scale: scale, value: value]}
else
{:error, %DecodeError{}} = error ->
error
end
end
defp precision(value) when is_number(value) do
case String.split("#{value}", ".") do
[_] -> 0
[_, dec] -> String.length(dec)
end
end
defp encode_meter_type(:electric), do: 0x01
defp encode_meter_type(:gas), do: 0x02
defp encode_meter_type(:water), do: 0x03
defp encode_meter_type(:heating), do: 0x04
defp encode_meter_type(:cooling), do: 0x05
defp encode_meter_scale(:kwh, :electric), do: 0x00
defp encode_meter_scale(:kvah, :electric), do: 0x01
defp encode_meter_scale(:w, :electric), do: 0x02
defp encode_meter_scale(:pulse_count, :electric), do: 0x03
defp encode_meter_scale(:v, :electric), do: 0x04
defp encode_meter_scale(:a, :electric), do: 0x05
defp encode_meter_scale(:power_factor, :electric), do: 0x06
defp encode_meter_scale(:mst, :electric), do: 0x07
defp encode_meter_scale(:cubic_meters, :gas), do: 0x00
defp encode_meter_scale(:cubic_feet, :gas), do: 0x01
defp encode_meter_scale(:pulse_count, :gas), do: 0x03
defp encode_meter_scale(:mst, :gas), do: 0x07
defp encode_meter_scale(:cubic_meters, :water), do: 0x00
defp encode_meter_scale(:cubic_feet, :water), do: 0x00
defp encode_meter_scale(:us_gallons, :water), do: 0x02
defp encode_meter_scale(:pulse_count, :water), do: 0x03
defp encode_meter_scale(:mst, :water), do: 0x07
defp encode_meter_scale(:kwh, :heating), do: 0x00
defp encode_meter_scale(:kwh, :cooling), do: 0x00
defp decode_meter_type(0x01), do: {:ok, :electric}
defp decode_meter_type(0x02), do: {:ok, :gas}
defp decode_meter_type(0x03), do: {:ok, :water}
defp decode_meter_type(0x04), do: {:ok, :heating}
defp decode_meter_type(0x05), do: {:ok, :cooling}
defp decode_meter_type(byte),
do: {:error, %DecodeError{value: byte, param: :meter_type, command: :meter_report}}
defp decode_meter_scale(0x00, :electric), do: {:ok, :kwh}
defp decode_meter_scale(0x01, :electric), do: {:ok, :kvah}
defp decode_meter_scale(0x02, :electric), do: {:ok, :w}
defp decode_meter_scale(0x03, :electric), do: {:ok, :pulse_count}
# defp decode_meter_scale(0x04, :electric), do: {:ok,:v }
# defp decode_meter_scale(0x05, :electric), do: {:ok, :a}
# defp decode_meter_scale(0x06, :electric), do: {:ok,:power_factor }
# defp decode_meter_scale(0x07, :electric), do: {:ok, :mst}
defp decode_meter_scale(0x00, :gas), do: {:ok, :cubic_meters}
defp decode_meter_scale(0x01, :gas), do: {:ok, :cubic_feet}
defp decode_meter_scale(0x03, :gas), do: {:ok, :pulse_count}
# defp decode_meter_scale(0x07, :gas), do: {:ok, :mst}
defp decode_meter_scale(0x00, :water), do: {:ok, :cubic_meters}
defp decode_meter_scale(0x01, :water), do: {:ok, :cubic_feet}
defp decode_meter_scale(0x02, :water), do: {:ok, :us_gallons}
defp decode_meter_scale(0x03, :water), do: {:ok, :pulse_count}
# defp decode_meter_scale(0x07, :water), do: {:ok, :mst}
defp decode_meter_scale(0x00, :heating), do: {:ok, :kwh}
defp decode_meter_scale(0x00, :cooling), do: {:ok, :kwh}
defp decode_meter_scale(byte, _),
do: {:error, %DecodeError{value: byte, param: :meter_type, command: :meter_scale}}
end
|
lib/grizzly/zwave/commands/meter_report.ex
| 0.892659 | 0.435361 |
meter_report.ex
|
starcoder
|
defmodule LayoutOMatic.Layouts.Grid do
import Scenic.Primitives
@moduledoc """
Add a grid to a viewport.
Grids allow you to segment a viewport much like a CSS grid. This allows for
clear and symantic layouts. Creating a grid is as simple as passing a
%GridBuilder{} with some values and your viewport will have a grid ready to
be used.
## Data
* `:viewport` - The viewport struct you want a grid drawn to.
* `:grid_template` - The type and size of columns for the grid. *Required field*.
* `{:equal, number_of_equal_columns}` - Indicates columns will be equally sized and how many of them to be drawn
* `{:percentage, percentage_of_viewport}` - Indicates columns will be a percentage of the viewport and what percentage of the viewport. This option is a list of percentages which cannot exceed 100%.
* `{:relative, percentage_relative_to_object}` - Indicates columns will be drawn relative to another object. This could be used to draw a grid relative to another primitive of component as well as another grid.
* `:max_xy` - The maximum {x,y} the grid should fit into. This will likely be the viewport size in an inital graph. Default `{700, 600}`. This is the default viewport size for a new scenic app.
* `:starting_xy` - The {x,y} the grid should start at. Default is {0, 0}.
* `:grid_ids` - The ids used for each segment of the grid in order to recall the segment later in order to assign a list of objects to it for layouts. Symantically named ids is recommneded. *Required field*
* `:opts` - A list of additional options
* `:draw` - Boolean to determine if the grid should be drawn or not. Useful for making sure objects are falling where expected. Default is `false`.
```
@viewport :scenic_layout_o_matic
|> Application.get_env(:viewport)
|> Map.get(:size)
@grid %GridBuilder{
grid_template: [{:equal, 2}],
max_xy: @viewport,
grid_ids: [:left, :right],
starting_xy: {0, 0}
}
@graph Graph.build()
|> add_specs_to_graph(Grid.grid(@grid),
id: :root_grid
)
def init(_, opts) do
{:ok, opts, push: @graph}
end
"""
defmodule Error do
@moduledoc false
defexception message: nil, data: nil
end
defmodule GridBuilder do
@enforce_keys [:grid_template, :grid_ids]
defstruct viewport: %{},
grid_template: [{:equal, 1}],
# This should come off the viewport map passed in.
max_xy: {700, 600},
starting_xy: {0, 0},
grid_ids: nil,
column_sizes: nil,
opts: [draw: false]
end
def add_grid(%{} = grid) do
struct(GridBuilder, grid)
{starting_x, _} = Map.get(grid, :starting_xy)
{max_x, _} = Map.get(grid, :max_xy)
column_sizes =
Enum.map(Map.get(grid, :grid_template), fn t ->
case elem(t, 0) do
:percent ->
trunc(elem(t, 1) / 100 * max_x - starting_x)
:equal ->
Enum.map(1..elem(t, 1), fn _ ->
div(max_x, elem(t, 1))
end)
:relative ->
trunc(elem(t, 1) / 100 * max_x)
end
end)
Map.put(
grid,
:column_sizes,
List.flatten(column_sizes)
)
|> get_x_coordinates()
end
@doc false
defp get_x_coordinates(grid) do
ids_and_sizes = Enum.zip(Map.get(grid, :grid_ids), Map.get(grid, :column_sizes))
opts = Map.get(grid, :opts)
Enum.map_reduce(ids_and_sizes, [], fn i, acc ->
starting_xy = Map.get(grid, :starting_xy)
{_, max_y} = Map.get(grid, :max_xy)
case acc do
[] ->
{build_grid(max_y, elem(i, 1), starting_xy, elem(i, 0), opts[:draw]),
elem(starting_xy, 0) + elem(i, 1)}
_ ->
{build_grid(max_y, elem(i, 1), {acc, elem(starting_xy, 1)}, elem(i, 0), opts[:draw]),
acc + elem(i, 1)}
end
end)
|> elem(0)
end
@doc false
defp build_grid(max_y, size, starting_xy, id, draw) do
group_spec(
rect_spec({size, max_y},
stroke: {1, :white},
scissor: {size, max_y},
hidden: !draw,
id: id
),
id: String.to_atom(Atom.to_string(id) <> "_group"),
t: {elem(starting_xy, 0), elem(starting_xy, 1)}
)
end
end
|
lib/layouts/grid.ex
| 0.850375 | 0.931649 |
grid.ex
|
starcoder
|
defmodule Code.Identifier do
@moduledoc false
@doc """
Checks if the given identifier is an unary op.
## Examples
iex> Code.Identifier.unary_op(:+)
{:non_associative, 300}
"""
@spec unary_op(atom) :: {:non_associative, precedence :: pos_integer} | :error
def unary_op(op) do
cond do
op in [:&] -> {:non_associative, 90}
op in [:!, :^, :not, :+, :-, :"~~~"] -> {:non_associative, 300}
op in [:@] -> {:non_associative, 320}
true -> :error
end
end
@doc """
Checks if the given identifier is a binary op.
## Examples
iex> Code.Identifier.binary_op(:+)
{:left, 210}
"""
@spec binary_op(atom) :: {:left | :right, precedence :: pos_integer} | :error
def binary_op(op) do
cond do
op in [:<-, :\\] -> {:left, 40}
op in [:when] -> {:right, 50}
op in [:"::"] -> {:right, 60}
op in [:|] -> {:right, 70}
op in [:=] -> {:right, 100}
op in [:||, :|||, :or] -> {:left, 120}
op in [:&&, :&&&, :and] -> {:left, 130}
op in [:==, :!=, :=~, :===, :!==] -> {:left, 140}
op in [:<, :<=, :>=, :>] -> {:left, 150}
op in [:|>, :<<<, :>>>, :<~, :~>, :<<~, :~>>, :<~>, :"<|>"] -> {:left, 160}
op in [:in] -> {:left, 170}
op in [:"^^^"] -> {:left, 180}
op in [:"//"] -> {:right, 190}
op in [:++, :--, :.., :<>, :+++, :---] -> {:right, 200}
op in [:+, :-] -> {:left, 210}
op in [:*, :/] -> {:left, 220}
op in [:**] -> {:left, 230}
op in [:.] -> {:left, 310}
true -> :error
end
end
@doc """
Extracts the name and arity of the parent from the anonymous function identifier.
"""
# Example of this format: -NAME/ARITY-fun-COUNT-
def extract_anonymous_fun_parent(atom) when is_atom(atom) do
with "-" <> rest <- Atom.to_string(atom),
[trailing | reversed] = rest |> String.split("/") |> Enum.reverse(),
[arity, _inner, _count, ""] <- String.split(trailing, "-") do
{reversed |> Enum.reverse() |> Enum.join("/") |> String.to_atom(), arity}
else
_ -> :error
end
end
@doc """
Escapes the given identifier.
"""
@spec escape(binary(), char() | nil, :infinity | non_neg_integer, (char() -> iolist() | false)) ::
{escaped :: iolist(), remaining :: binary()}
def escape(binary, char, limit \\ :infinity, fun \\ &escape_map/1)
when ((char in 0..0x10FFFF or is_nil(char)) and limit == :infinity) or
(is_integer(limit) and limit >= 0) do
escape(binary, char, limit, [], fun)
end
defp escape(<<_, _::binary>> = binary, _char, 0, acc, _fun) do
{acc, binary}
end
defp escape(<<char, t::binary>>, char, count, acc, fun) do
escape(t, char, decrement(count), [acc | [?\\, char]], fun)
end
defp escape(<<?#, ?{, t::binary>>, char, count, acc, fun) do
escape(t, char, decrement(count), [acc | '\\\#{'], fun)
end
defp escape(<<h::utf8, t::binary>>, char, count, acc, fun) do
escaped = if value = fun.(h), do: value, else: escape_char(h)
escape(t, char, decrement(count), [acc | escaped], fun)
end
defp escape(<<a::4, b::4, t::binary>>, char, count, acc, fun) do
escape(t, char, decrement(count), [acc | ['\\x', to_hex(a), to_hex(b)]], fun)
end
defp escape(<<>>, _char, _count, acc, _fun) do
{acc, <<>>}
end
defp escape_char(0), do: '\\0'
defp escape_char(65279), do: '\\uFEFF'
defp escape_char(char)
when char in 0x20..0x7E
when char in 0xA0..0xD7FF
when char in 0xE000..0xFFFD
when char in 0x10000..0x10FFFF do
<<char::utf8>>
end
defp escape_char(char) when char < 0x100 do
<<a::4, b::4>> = <<char::8>>
['\\x', to_hex(a), to_hex(b)]
end
defp escape_char(char) when char < 0x10000 do
<<a::4, b::4, c::4, d::4>> = <<char::16>>
['\\x{', to_hex(a), to_hex(b), to_hex(c), to_hex(d), ?}]
end
defp escape_char(char) when char < 0x1000000 do
<<a::4, b::4, c::4, d::4, e::4, f::4>> = <<char::24>>
['\\x{', to_hex(a), to_hex(b), to_hex(c), to_hex(d), to_hex(e), to_hex(f), ?}]
end
defp escape_map(?\a), do: '\\a'
defp escape_map(?\b), do: '\\b'
defp escape_map(?\d), do: '\\d'
defp escape_map(?\e), do: '\\e'
defp escape_map(?\f), do: '\\f'
defp escape_map(?\n), do: '\\n'
defp escape_map(?\r), do: '\\r'
defp escape_map(?\t), do: '\\t'
defp escape_map(?\v), do: '\\v'
defp escape_map(?\\), do: '\\\\'
defp escape_map(_), do: false
@compile {:inline, to_hex: 1, decrement: 1}
defp to_hex(c) when c in 0..9, do: ?0 + c
defp to_hex(c) when c in 10..15, do: ?A + c - 10
defp decrement(:infinity), do: :infinity
defp decrement(counter), do: counter - 1
end
|
lib/elixir/lib/code/identifier.ex
| 0.789964 | 0.596639 |
identifier.ex
|
starcoder
|
defmodule Radixir.Gateway.Request.BuildTransaction.Action.CreateToken do
@moduledoc false
# @moduledoc """
# Methods to create each map in `CreateToken` action.
# """
alias Radixir.StitchPlan
@type stitch_plans :: list(keyword)
@type params :: keyword
@doc """
Generates stitch plan for `type` map in `CreateToken` action. Value is set to `CreateTokenDefinition`.
## Parameters
- `stitch_plans`: On-going stitch plans that will be stitched into a map.
"""
@spec type(stitch_plans) :: stitch_plans
def type(stitch_plans) do
StitchPlan.type(stitch_plans, type: "CreateTokenDefinition")
end
@doc """
Generates stitch plan for `token_properties` map in `CreateToken` action.
## Parameters
- `stitch_plans`: On-going stitch plans that will be stitched into a map.
- `params`: Keyword list that contains:
- `name` (required, string): Token name
- `description` (required, string): Token description.
- `icon_url` (required, string): Token icon url.
- `url` (required, string): Token url.
- `symbol` (required, string): Token symbol.
- `is_supply_mutable` (required, boolean): Is token supply mutaable?
- `granularity` (required, string): Token granularity.
"""
@spec token_properties(stitch_plans, params) :: stitch_plans
def token_properties(stitch_plans, params) do
schema = [
name: [
type: :string,
required: true
],
description: [
type: :string,
required: true
],
icon_url: [
type: :string,
required: true
],
url: [
type: :string,
required: true
],
symbol: [
type: :string,
required: true
],
is_supply_mutable: [
type: :boolean,
required: true
],
granularity: [
type: :string,
required: true
]
]
results = NimbleOptions.validate!(params, schema)
name = [keys: [:token_properties, :name], value: Keyword.get(results, :name)]
description = [
keys: [:token_properties, :description],
value: Keyword.get(results, :description)
]
icon_url = [keys: [:token_properties, :icon_url], value: Keyword.get(results, :icon_url)]
url = [keys: [:token_properties, :url], value: Keyword.get(results, :url)]
symbol = [keys: [:token_properties, :symbol], value: Keyword.get(results, :symbol)]
is_supply_mutable = [
keys: [:token_properties, :is_supply_mutable],
value: Keyword.get(results, :is_supply_mutable)
]
granularity = [
keys: [:token_properties, :granularity],
value: Keyword.get(results, :granularity)
]
stitch_plan = [name, description, icon_url, url, symbol, is_supply_mutable, granularity]
stitch_plan ++ stitch_plans
end
@doc """
Generates stitch plan for `owner` map in `CreateToken` action.
## Parameters
- `stitch_plans`: On-going stitch plans that will be stitched into a map.
- `params`: Keyword list that contains:
- `address` (required, string): Owner address.
"""
@spec owner(stitch_plans, params) :: stitch_plans
def owner(stitch_plans, params) do
StitchPlan.owner(stitch_plans, params, [:token_properties])
end
@doc """
Generates stitch plan for `token_supply` map in `CreateToken` action.
## Parameters
- `stitch_plans`: On-going stitch plans that will be stitched into a map.
- `params`: Keyword list that contains:
- `value` (required, string): Token supply value.
"""
@spec token_supply(stitch_plans, params) :: stitch_plans
def token_supply(stitch_plans, params) do
schema = [
value: [
type: :string,
required: true
]
]
value =
NimbleOptions.validate!(params, schema)
|> Keyword.get(:value)
stitch_plan = [[keys: [:token_supply, :value], value: value]]
stitch_plan ++ stitch_plans
end
@doc """
Generates stitch plan for `token_identifier` map in `CreateToken` action.
## Parameters
- `stitch_plans`: On-going stitch plans that will be stitched into a map.
- `params`: Keyword list that contains:
- `rri` (required, string): Radix Resource Identifier.
"""
@spec token_identifier(stitch_plans, params) :: stitch_plans
def token_identifier(stitch_plans, params) do
StitchPlan.token_identifier(stitch_plans, params, [:token_supply])
end
@doc """
Generates stitch plan for `to_account` map in `CreateToken` action.
## Parameters
- `stitch_plans`: On-going stitch plans that will be stitched into a map.
- `params`: Keyword list that contains:
- `address` (required, string): Radix address.
"""
@spec to_account(stitch_plans, params) :: stitch_plans
defdelegate to_account(stitch_plans, params), to: StitchPlan
end
|
lib/radixir/gateway/request/build_transaction/action/create_token.ex
| 0.874413 | 0.432243 |
create_token.ex
|
starcoder
|
defmodule Explorer.PolarsBackend.DataFrame do
@moduledoc false
alias Explorer.DataFrame, as: DataFrame
alias Explorer.PolarsBackend.Native
alias Explorer.PolarsBackend.Series, as: PolarsSeries
alias Explorer.PolarsBackend.Shared
alias Explorer.Series, as: Series
@type t :: %__MODULE__{resource: binary(), reference: reference()}
defstruct resource: nil, reference: nil
@behaviour Explorer.Backend.DataFrame
@default_infer_schema_length 1000
# IO
@impl true
def from_csv(
filename,
dtypes,
delimiter,
null_character,
skip_rows,
header?,
encoding,
max_rows,
columns,
infer_schema_length,
parse_dates
) do
infer_schema_length =
if infer_schema_length == nil,
do: max_rows || @default_infer_schema_length,
else: infer_schema_length
dtypes =
Enum.map(dtypes, fn {column_name, dtype} ->
{column_name, Shared.internal_from_dtype(dtype)}
end)
{columns, with_projection} = column_list_check(columns)
df =
Native.df_read_csv(
filename,
infer_schema_length,
header?,
max_rows,
skip_rows,
with_projection,
delimiter,
true,
columns,
dtypes,
encoding,
null_character,
parse_dates
)
case df do
{:ok, df} -> {:ok, Shared.create_dataframe(df)}
{:error, error} -> {:error, error}
end
end
defp column_list_check(list) do
cond do
is_nil(list) ->
{nil, nil}
Enum.all?(list, &is_atom/1) ->
{Enum.map(list, &Atom.to_string/1), nil}
Enum.all?(list, &is_binary/1) ->
{list, nil}
Enum.all?(list, &is_integer/1) ->
{nil, list}
true ->
raise ArgumentError,
"expected :columns to be a list of only integers, only atoms, or only binaries, " <>
"got: #{inspect(list)}"
end
end
@impl true
def to_csv(%DataFrame{data: df}, filename, header?, delimiter) do
<<delimiter::utf8>> = delimiter
case Native.df_to_csv_file(df, filename, header?, delimiter) do
{:ok, _} -> {:ok, filename}
{:error, error} -> {:error, error}
end
end
@impl true
def from_ndjson(filename, infer_schema_length, batch_size) do
with {:ok, df} <- Native.df_read_ndjson(filename, infer_schema_length, batch_size) do
{:ok, Shared.create_dataframe(df)}
end
end
@impl true
def to_ndjson(%DataFrame{data: df}, filename) do
with {:ok, _} <- Native.df_write_ndjson(df, filename) do
{:ok, filename}
end
end
@impl true
def dump_csv(%DataFrame{} = df, header?, delimiter) do
<<delimiter::utf8>> = delimiter
Shared.apply_native(df, :df_to_csv, [header?, delimiter])
end
@impl true
def from_parquet(filename) do
case Native.df_read_parquet(filename) do
{:ok, df} -> {:ok, Shared.create_dataframe(df)}
{:error, error} -> {:error, error}
end
end
@impl true
def to_parquet(%DataFrame{data: df}, filename) do
case Native.df_write_parquet(df, filename) do
{:ok, _} -> {:ok, filename}
{:error, error} -> {:error, error}
end
end
@impl true
def from_ipc(filename, columns) do
{columns, projection} = column_list_check(columns)
case Native.df_read_ipc(filename, columns, projection) do
{:ok, df} -> {:ok, Shared.create_dataframe(df)}
{:error, error} -> {:error, error}
end
end
@impl true
def to_ipc(%DataFrame{data: df}, filename, compression) do
case Native.df_write_ipc(df, filename, compression) do
{:ok, _} -> {:ok, filename}
{:error, error} -> {:error, error}
end
end
# Conversion
@impl true
def lazy, do: Explorer.PolarsBackend.LazyDataFrame
@impl true
def to_lazy(df), do: Shared.apply_native(df, :df_to_lazy)
@impl true
def collect(df), do: df
@impl true
def from_tabular(tabular) do
{columns, %{columns: keys}} = Table.to_columns_with_info(tabular)
keys
|> Enum.map(fn key ->
column_name = to_column_name!(key)
values = Enum.to_list(columns[key])
series_from_list!(column_name, values)
end)
|> from_series_list()
end
@impl true
def from_series(pairs) do
pairs
|> Enum.map(fn {key, series} ->
column_name = to_column_name!(key)
PolarsSeries.rename(series, column_name)
end)
|> from_series_list()
end
defp from_series_list(list) do
list = Enum.map(list, & &1.data)
case Native.df_new(list) do
{:ok, df} -> Shared.create_dataframe(df)
{:error, error} -> raise ArgumentError, error
end
end
defp to_column_name!(column_name) when is_binary(column_name), do: column_name
defp to_column_name!(column_name) when is_atom(column_name), do: Atom.to_string(column_name)
defp to_column_name!(column_name) do
raise ArgumentError,
"expected column name to be either string or atom, got: #{inspect(column_name)}"
end
# Like `Explorer.Series.from_list/2`, but gives a better error message with the series name.
defp series_from_list!(name, list) do
type = Explorer.Shared.check_types!(list)
{list, type} = Explorer.Shared.cast_numerics(list, type)
PolarsSeries.from_list(list, type, name)
rescue
e ->
raise ArgumentError, "cannot create series #{inspect(name)}: " <> Exception.message(e)
end
@impl true
def to_rows(%DataFrame{data: polars_df} = df, atom_keys?) do
names = if atom_keys?, do: df |> names() |> Enum.map(&String.to_atom/1), else: names(df)
polars_df
|> Enum.map(fn s -> s |> Shared.create_series() |> PolarsSeries.to_list() end)
|> Enum.zip_with(fn row -> names |> Enum.zip(row) |> Map.new() end)
end
# Introspection
@impl true
def names(df), do: Shared.apply_native(df, :df_columns)
@impl true
def dtypes(df), do: df |> Shared.apply_native(:df_dtypes) |> Enum.map(&Shared.normalise_dtype/1)
@impl true
def shape(df), do: Shared.apply_native(df, :df_shape)
@impl true
def n_rows(%DataFrame{groups: []} = df), do: Shared.apply_native(df, :df_height)
def n_rows(%DataFrame{groups: groups} = df) do
groupby = Shared.apply_native(df, :df_groups, [groups])
n =
groupby
|> pull("groups")
|> Series.to_list()
|> Enum.map(fn indices -> df |> ungroup([]) |> take(indices) |> n_rows() end)
groupby |> select(["groups"], :drop) |> mutate(n: n) |> group_by(groups)
end
@impl true
def n_columns(df), do: Shared.apply_native(df, :df_width)
# Single table verbs
@impl true
def head(df, rows), do: Shared.apply_native(df, :df_head, [rows])
@impl true
def tail(df, rows), do: Shared.apply_native(df, :df_tail, [rows])
@impl true
def select(df, columns, :keep) when is_list(columns),
do: Shared.apply_native(df, :df_select, [columns])
def select(df, columns, :drop) when is_list(columns),
do: df.data |> drop(columns) |> Shared.update_dataframe(df)
defp drop(polars_df, column_names),
do:
Enum.reduce(column_names, polars_df, fn name, df ->
{:ok, df} = Native.df_drop(df, name)
df
end)
@impl true
def filter(df, %Series{} = mask),
do: Shared.apply_native(df, :df_filter, [mask.data])
@impl true
def mutate(%DataFrame{groups: []} = df, columns) do
Enum.reduce(columns, df, &mutate_reducer/2)
end
def mutate(%DataFrame{groups: groups} = df, columns) do
df
|> Shared.apply_native(:df_groups, [groups])
|> pull("groups")
|> Series.to_list()
|> Enum.map(fn indices -> df |> ungroup([]) |> take(indices) |> mutate(columns) end)
|> Enum.reduce(fn df, acc -> Shared.apply_native(acc, :df_vstack, [df.data]) end)
|> group_by(groups)
end
defp mutate_reducer({column_name, %Series{} = series}, %DataFrame{} = df)
when is_binary(column_name) do
check_series_size(df, series, column_name)
series = PolarsSeries.rename(series, column_name)
Shared.apply_native(df, :df_with_column, [series.data])
end
defp mutate_reducer({column_name, callback}, %DataFrame{} = df)
when is_function(callback),
do: mutate_reducer({column_name, callback.(df)}, df)
defp mutate_reducer({column_name, values}, df) when is_list(values),
do: mutate_reducer({column_name, series_from_list!(column_name, values)}, df)
defp mutate_reducer({column_name, value}, %DataFrame{} = df)
when is_binary(column_name),
do: mutate_reducer({column_name, value |> List.duplicate(n_rows(df))}, df)
defp check_series_size(df, series, column_name) do
df_len = n_rows(df)
s_len = Series.size(series)
if s_len != df_len,
do:
raise(
ArgumentError,
"size of new column #{column_name} (#{s_len}) must match number of rows in the " <>
"dataframe (#{df_len})"
)
end
@impl true
def arrange(%DataFrame{groups: []} = df, columns),
do:
Enum.reduce(columns, df, fn {direction, column}, df ->
Shared.apply_native(df, :df_sort, [column, direction == :desc])
end)
def arrange(%DataFrame{groups: groups} = df, columns) do
df
|> Shared.apply_native(:df_groups, [groups])
|> pull("groups")
|> Series.to_list()
|> Enum.map(fn indices -> df |> ungroup([]) |> take(indices) |> arrange(columns) end)
|> Enum.reduce(fn df, acc -> Shared.apply_native(acc, :df_vstack, [df.data]) end)
|> group_by(groups)
end
@impl true
def distinct(%DataFrame{groups: []} = df, columns, true),
do: Shared.apply_native(df, :df_drop_duplicates, [true, columns])
def distinct(%DataFrame{groups: []} = df, columns, false),
do:
df
|> Shared.apply_native(:df_drop_duplicates, [true, columns])
|> select(columns, :keep)
def distinct(%DataFrame{groups: groups} = df, columns, keep_all?) do
df
|> Shared.apply_native(:df_groups, [groups])
|> pull("groups")
|> Series.to_list()
|> Enum.map(fn indices ->
df |> ungroup([]) |> take(indices) |> distinct(columns, keep_all?)
end)
|> Enum.reduce(fn df, acc -> Shared.apply_native(acc, :df_vstack, [df.data]) end)
|> group_by(groups)
end
@impl true
def rename(df, names) when is_list(names),
do: Shared.apply_native(df, :df_set_column_names, [names])
@impl true
def dummies(df, names),
do:
df
|> select(names, :keep)
|> Shared.apply_native(:df_to_dummies)
@impl true
def sample(df, n, replacement, seed) when is_integer(n) do
indices =
df
|> n_rows()
|> Native.s_seedable_random_indices(n, replacement, seed)
take(df, indices)
end
@impl true
def pull(df, column), do: Shared.apply_native(df, :df_column, [column])
@impl true
def slice(df, offset, length), do: Shared.apply_native(df, :df_slice, [offset, length])
@impl true
def take(df, row_indices), do: Shared.apply_native(df, :df_take, [row_indices])
@impl true
def drop_nil(df, columns), do: Shared.apply_native(df, :df_drop_nulls, [columns])
@impl true
def pivot_longer(df, id_columns, value_columns, names_to, values_to) do
df = Shared.apply_native(df, :df_melt, [id_columns, value_columns])
df
|> names()
|> Enum.map(fn
"variable" -> names_to
"value" -> values_to
name -> name
end)
|> then(&rename(df, &1))
end
@impl true
def pivot_wider(df, id_columns, names_from, values_from, names_prefix) do
df = Shared.apply_native(df, :df_pivot_wider, [id_columns, names_from, values_from])
df =
df
|> names()
|> Enum.map(fn name ->
if name in id_columns, do: name, else: names_prefix <> name
end)
|> then(&rename(df, &1))
df
end
# Two or more table verbs
@impl true
def join(left, right, on, :right), do: join(right, left, on, :left)
def join(left, right, on, how) do
how = Atom.to_string(how)
{left_on, right_on} = Enum.reduce(on, {[], []}, &join_on_reducer/2)
Shared.apply_native(left, :df_join, [right.data, left_on, right_on, how])
end
defp join_on_reducer(column_name, {left, right}) when is_binary(column_name),
do: {[column_name | left], [column_name | right]}
defp join_on_reducer({new_left, new_right}, {left, right}),
do: {[new_left | left], [new_right | right]}
@impl true
def concat_rows(dfs) do
Enum.reduce(dfs, fn x, acc ->
# Polars requires the _order_ of columns to be the same
x = DataFrame.select(x, DataFrame.names(acc))
Shared.apply_native(acc, :df_vstack, [x.data])
end)
end
# Groups
@impl true
def group_by(%DataFrame{groups: groups} = df, new_groups),
do: %DataFrame{df | groups: groups ++ new_groups}
@impl true
def ungroup(df, []), do: %DataFrame{df | groups: []}
def ungroup(df, groups),
do: %DataFrame{df | groups: Enum.filter(df.groups, &(&1 not in groups))}
@impl true
def summarise(%DataFrame{groups: groups} = df, columns) do
columns =
Enum.map(columns, fn {key, values} -> {key, Enum.map(values, &Atom.to_string/1)} end)
df
|> Shared.apply_native(:df_groupby_agg, [groups, columns])
|> ungroup([])
|> DataFrame.arrange(groups)
end
# Inspect
@impl true
def inspect(df, opts) do
{n_rows, _} = shape(df)
Explorer.Backend.DataFrame.inspect(df, "Polars", n_rows, opts)
end
end
defimpl Enumerable, for: Explorer.PolarsBackend.DataFrame do
alias Explorer.PolarsBackend.Native
alias Explorer.PolarsBackend.Series, as: PolarsSeries
def count(df), do: Native.df_width(df)
def slice(df) do
{:ok, size} = count(df)
{:ok, size, &slicing_fun(df, &1, &2)}
end
defp slicing_fun(df, start, length) do
for idx <- start..(start + length - 1) do
{:ok, df} = Native.df_select_at_idx(df, idx)
df
end
end
def reduce(_df, {:halt, acc}, _fun), do: {:halted, acc}
def reduce(df, {:suspend, acc}, fun), do: {:suspended, acc, &reduce(df, &1, fun)}
def reduce(df, {:cont, acc}, fun) do
case Native.df_columns(df) do
{:ok, []} ->
{:done, acc}
{:ok, [head | _tail]} ->
{:ok, next_column} = Native.df_column(df, head)
{:ok, df} = Native.df_drop(df, head)
reduce(df, fun.(next_column, acc), fun)
end
end
def member?(df, %PolarsSeries{} = series) do
{:ok, columns} = Native.df_get_columns(df)
{:ok, Enum.any?(columns, &Native.s_series_equal(&1, series, false))}
end
def member?(_, _), do: {:error, __MODULE__}
end
|
lib/explorer/polars_backend/data_frame.ex
| 0.846006 | 0.420481 |
data_frame.ex
|
starcoder
|
defmodule Dune.Allowlist do
@moduledoc """
Behaviour to customize the modules and functions that are allowed or restricted.
## Warning: security considerations
The default implementation is `Dune.Allowlist.Default`, and should only allow safe
functions: no atom leaks, no execution of arbitrary code, no access to the filesystem / network...
Defining or extending a custom `Dune.Allowlist` module can introduce security risks or bugs.
Please also note that using custom allowlists is still **experimental** and the API for it
might change faster than the rest of the library.
## Defining a new allowlist
In order to define a custom allowlist from scratch, `use Dune.Allowlist` can be used:
defmodule CustomAllowlist do
use Dune.Allowlist
allow Kernel, only: [:+, :*, :-, :/, :div, :rem]
end
Dune.eval_string("4 + 9", allowlist: CustomAllowlist)
## Extending an existing allowlist
Defining an allowlist from scratch can be both daunting and risky.
It is possible to extend an exisiting allowlist instead using the `extend` option:
defmodule ExtendedAllowlist do
use Dune.Allowlist, extend: Dune.Allowlist.Default
allow SomeModule, only: [:authorized]
end
Dune.eval_string("SomeModule.authorized(123)", allowlist: ExtendedAllowlist)
Note: currently, it is not possible to add or restrict functions from modules
that have already been specified.
## Documentation generation
The list of modules and functions with their status can be generated in the `@moduledoc`.
An example can be found in the `Dune.Allowlist.Default` documentation.
If the `__DUNE_ALLOWLIST_FUNCTIONS__` string is found in the `@moduledoc` string,
it will be replaced.
defmodule CustomAllowlist do
@moduledoc \"\"\"
Only allows simple arithmetic
## Allowlist functions
__DUNE_ALLOWLIST_FUNCTIONS__
\"\"\"
use Dune.Allowlist
allow Kernel, only: [:+, :*, :-, :/, :div, :rem]
end
"""
@type status :: :allowed | :restricted | {:shimmed, module, atom}
@doc """
Returns the trust status of a function or macro, specified as a `module`, `fun_name` and `arity` (`mfa`):
- `:allowed` if can be safely use
- `:restricted` if its usage should be forbidden
- a `{:shimmed, module, function_name}` if the function call should be replaced with an alternative implementation
"""
@callback fun_status(module, atom, non_neg_integer) :: Dune.Allowlist.status()
@doc """
Validates the fact that a module implements the `Dune.Allowlist` behaviour.
Raises if not the case.
## Examples
iex> Dune.Allowlist.ensure_implements_behaviour!(DoesNotExists)
** (ArgumentError) could not load module DoesNotExists due to reason :nofile
iex> Dune.Allowlist.ensure_implements_behaviour!(List)
** (ArgumentError) List does not implement the Dune.Allowlist behaviour
"""
@spec ensure_implements_behaviour!(module) :: module
def ensure_implements_behaviour!(module) when is_atom(module) do
Code.ensure_compiled!(module)
implemented? =
module.module_info(:attributes)
|> Keyword.get(:behaviour, [])
|> Enum.member?(Dune.Allowlist)
unless implemented? do
raise ArgumentError,
message: "#{inspect(module)} does not implement the Dune.Allowlist behaviour"
end
module
end
defmacro __using__(opts) do
extend = extract_extend_opt(opts, __CALLER__)
quote do
import Dune.Allowlist, only: [allow: 2]
@behaviour Dune.Allowlist
Module.register_attribute(__MODULE__, :allowlist, accumulate: true)
Module.put_attribute(__MODULE__, :extend_allowlist, unquote(extend))
@before_compile Dune.Allowlist
end
end
@doc """
Adds a new module to the allowlist and specifices which functions to use.
The module must not be already specified in the allowlist.
Must be called after `use Dune.Allowlist`.
## Examples
# allow all functions in a module
allow Time, :all
# only allow specific functions
allow Function, only: [:identity]
# exclude specific functions
allow Calendar, except: [:put_time_zone_database]
Note: `only` and `except` will cover all arities if several functions
share a name.
"""
defmacro allow(module, status) do
quote do
Module.put_attribute(__MODULE__, :allowlist, {unquote(module), unquote(status)})
end
end
defmacro __before_compile__(env) do
Dune.Allowlist.__postprocess__(env.module)
end
defp extract_extend_opt(opts, caller) do
case Keyword.fetch(opts, :extend) do
{:ok, module_ast} ->
Macro.expand(module_ast, caller) |> ensure_implements_behaviour!()
_ ->
nil
end
end
@doc false
def __postprocess__(module) do
extend = Module.get_attribute(module, :extend_allowlist)
spec = generate_spec(module, extend)
update_module_doc(module, spec)
quote do
unquote(def_spec(spec))
unquote(def_fun_status(spec))
end
end
defp generate_spec(module, extend) do
base_spec =
case extend do
nil -> Dune.Allowlist.Spec.new()
allowlist when is_atom(allowlist) -> allowlist.spec()
end
Module.get_attribute(module, :allowlist)
|> Enum.reduce(base_spec, fn {module, status}, acc ->
Dune.Allowlist.Spec.add_new_module(acc, module, status)
end)
end
defp def_spec(spec) do
quote do
@doc false
@spec spec :: Dune.Allowlist.Spec.t()
def spec do
unquote(Macro.escape(spec))
end
end
end
defp def_fun_status(spec) do
defps =
for {m, f, status} = _ <- Dune.Allowlist.Spec.list_fun_statuses(spec) do
quote do
defp do_fun_status(unquote(m), unquote(f)),
do: unquote(Macro.escape(status))
end
end
quote do
@impl Dune.Allowlist
@doc "Implements `c:Dune.Allowlist.fun_status/3`"
def fun_status(module, fun_name, arity)
when is_atom(module) and is_atom(fun_name) and is_integer(arity) and arity >= 0 do
with :defined <- Dune.Parser.RealModule.fun_status(module, fun_name, arity) do
do_fun_status(module, fun_name)
end
end
unquote(defps)
defp do_fun_status(_module, _fun_name), do: :restricted
end
end
defp update_module_doc(module, spec) do
case Module.get_attribute(module, :moduledoc) do
{line, doc} when is_binary(doc) ->
doc =
String.replace(doc, "__DUNE_ALLOWLIST_FUNCTIONS__", fn _ ->
Dune.Allowlist.Docs.document_allowlist(spec)
end)
Module.put_attribute(module, :moduledoc, {line, doc})
_other ->
:ok
end
end
end
|
lib/dune/allowlist.ex
| 0.798776 | 0.567997 |
allowlist.ex
|
starcoder
|
defmodule Bitcoinex.Secp256k1.Math do
@moduledoc """
Contains math utilities when dealing with secp256k1 curve points and scalars.
All of the addition and multiplication uses the secp256k1 curve paramaters.
Several of the jacobian multiplication and addition functions are borrowed heavily from https://github.com/starkbank/ecdsa-elixir/.
"""
alias Bitcoinex.Secp256k1.{Params, Point}
import Bitcoinex.Secp256k1.Point
use Bitwise, only_operators: true
@doc """
pow performs integer pow,
where x is raised to the power of y.
"""
# Integer.pow/2 was added since 1.12.0. This function_exported? can be removed when we decide
# to only support >= 1.12.0 in the future
if function_exported?(Integer, :pow, 2) do
defdelegate pow(base, exponent), to: Integer
else
# copy from https://github.com/elixir-lang/elixir/blob/master/lib/elixir/lib/integer.ex#L104
@spec pow(integer, non_neg_integer) :: integer
def pow(base, exponent) when is_integer(base) and is_integer(exponent) and exponent >= 0 do
guarded_pow(base, exponent)
end
# https://en.wikipedia.org/wiki/Exponentiation_by_squaring
defp guarded_pow(_, 0), do: 1
defp guarded_pow(b, 1), do: b
defp guarded_pow(b, e) when (e &&& 1) == 0, do: guarded_pow(b * b, e >>> 1)
defp guarded_pow(b, e), do: b * guarded_pow(b * b, e >>> 1)
end
@doc """
Inv performs the Extended Euclidean Algorithm to find
the inverse of a number x mod n.
"""
@spec inv(integer, pos_integer) :: integer
def inv(x, n) when is_integer(x) and is_integer(n) and n >= 1 do
do_inv(x, n)
end
defp do_inv(x, _n) when x == 0, do: 0
defp do_inv(x, n), do: do_inv(1, 0, modulo(x, n), n) |> modulo(n)
defp do_inv(lm, hm, low, high) when low > 1 do
r = div(high, low)
do_inv(
hm - lm * r,
lm,
high - low * r,
low
)
end
defp do_inv(lm, _hm, _low, _high) do
lm
end
@spec modulo(integer, integer) :: integer
def modulo(x, n) when is_integer(x) and is_integer(n) do
r = rem(x, n)
if r < 0, do: r + n, else: r
end
@doc """
multiply accepts a point P and scalar n and,
does jacobian multiplication to return resulting point.
"""
def multiply(p, n) when is_point(p) and is_integer(n) do
p
|> toJacobian()
|> jacobianMultiply(n)
|> fromJacobian()
end
@doc """
add accepts points p and q and,
does jacobian addition to return resulting point.
"""
def add(p, q) when is_point(p) and is_point(q) do
jacobianAdd(toJacobian(p), toJacobian(q))
|> fromJacobian()
end
# Convert our point P to jacobian coordinates.
defp toJacobian(p) do
%Point{x: p.x, y: p.y, z: 1}
end
# Convert our jacobian coordinates to a point P on secp256k1 curve.
defp fromJacobian(p) do
z = inv(p.z, Params.curve().p)
%Point{
x:
modulo(
p.x * pow(z, 2),
Params.curve().p
),
y:
modulo(
p.y * pow(z, 3),
Params.curve().p
)
}
end
# double Point P to get point P + P
# We use the dbl-1998-cmo-2 doubling formula.
# For reference, http://www.hyperelliptic.org/EFD/g1p/auto-shortw-jacobian.html.
defp jacobianDouble(p) do
if p.y == 0 do
%Point{x: 0, y: 0, z: 0}
else
# XX = X1^2
xsq =
pow(p.x, 2)
|> modulo(Params.curve().p)
# YY = Y1^2
ysq =
pow(p.y, 2)
|> modulo(Params.curve().p)
# S = 4 * X1 * YY
s =
(4 * p.x * ysq)
|> modulo(Params.curve().p)
# M = 3 * XX + a * Z1^4
m =
(3 * xsq + Params.curve().a * pow(p.z, 4))
|> modulo(Params.curve().p)
# T = M^2 - 2 * S
t =
(pow(m, 2) - 2 * s)
|> modulo(Params.curve().p)
# X3 = T
nx = t
# Y3 = M * (S - T) - 8 * YY^2
ny =
(m * (s - t) - 8 * pow(ysq, 2))
|> modulo(Params.curve().p)
# Z3 = 2 * Y1 * Z1
nz =
(2 * p.y * p.z)
|> modulo(Params.curve().p)
%Point{x: nx, y: ny, z: nz}
end
end
# add points P and Q to get P + Q
# We use the add-1998-cmo-2 addition formula.
# For reference, http://www.hyperelliptic.org/EFD/g1p/auto-shortw-jacobian.html.
defp jacobianAdd(p, q) do
if p.y == 0 do
q
else
if q.y == 0 do
p
else
# U1 = X1 * Z2^2
u1 =
(p.x * pow(q.z, 2))
|> modulo(Params.curve().p)
# U2 = X2 * Z2^2
u2 =
(q.x * pow(p.z, 2))
|> modulo(Params.curve().p)
# S1 = Y1 * Z2^3
s1 =
(p.y * pow(q.z, 3))
|> modulo(Params.curve().p)
# S2 = y2 * Z1^3
s2 =
(q.y * pow(p.z, 3))
|> modulo(Params.curve().p)
if u1 == u2 do
if s1 != s2 do
%Point{x: 0, y: 0, z: 1}
else
jacobianDouble(p)
end
else
# H = U2 - U1
h = u2 - u1
# r = S2 - S1
r = s2 - s1
# HH = H^2
h2 =
(h * h)
|> modulo(Params.curve().p)
# HHH = H * HH
h3 =
(h * h2)
|> modulo(Params.curve().p)
# V = U1 * HH
v =
(u1 * h2)
|> modulo(Params.curve().p)
# X3 = 42 - HHH - 2 * V
nx =
(pow(r, 2) - h3 - 2 * v)
|> modulo(Params.curve().p)
# Y3 = r * (V - X3) - S1 * HHH
ny =
(r * (v - nx) - s1 * h3)
|> modulo(Params.curve().p)
# Z3 = Z1 * Z2 * H
nz =
(h * p.z * q.z)
|> modulo(Params.curve().p)
%Point{x: nx, y: ny, z: nz}
end
end
end
end
# multply point P with scalar n
defp jacobianMultiply(_p, n) when n == 0 do
%Point{x: 0, y: 0, z: 1}
end
defp jacobianMultiply(p, n) when n == 1 do
if p.y == 0 do
%Point{x: 0, y: 0, z: 1}
else
p
end
end
defp jacobianMultiply(p, n)
# This integer is n, the integer order of G for secp256k1.
# Unfortunately cannot call Params.curve.n to get the curve order integer,
# so instead, it is pasted it here.
# In the future we should move it back to Params.
when n < 0 or
n >
115_792_089_237_316_195_423_570_985_008_687_907_852_837_564_279_074_904_382_605_163_141_518_161_494_337 do
if p.y == 0 do
%Point{x: 0, y: 0, z: 1}
else
jacobianMultiply(p, modulo(n, Params.curve().n))
end
end
defp jacobianMultiply(p, n) when rem(n, 2) == 0 do
if p.y == 0 do
%Point{x: 0, y: 0, z: 1}
else
jacobianMultiply(p, div(n, 2))
|> jacobianDouble()
end
end
defp jacobianMultiply(p, n) do
if p.y == 0 do
%Point{x: 0, y: 0, z: 1}
else
jacobianMultiply(p, div(n, 2))
|> jacobianDouble()
|> jacobianAdd(p)
end
end
end
|
lib/secp256k1/math.ex
| 0.919638 | 0.663866 |
math.ex
|
starcoder
|
defmodule SiteEncrypt.Phoenix do
@moduledoc """
`SiteEncrypt` adapter for Phoenix endpoints.
## Usage
1. Add `use SiteEncrypt.Phoenix` to your endpoint immediately after `use Phoenix.Endpoint`
2. Configure https via `configure_https/2`.
3. Add the implementation of `c:SiteEncrypt.certification/0` to the endpoint (the
`@behaviour SiteEncrypt` is injected when this module is used).
"""
use Parent.Supervisor
alias SiteEncrypt.{Acme, Registry}
@doc """
Merges paths to key and certificates to the `:https` configuration of the endpoint config.
Invoke this macro from `c:Phoenix.Endpoint.init/2` to complete the https configuration:
defmodule MyEndpoint do
# ...
@impl Phoenix.Endpoint
def init(_key, config) do
# this will merge key, cert, and chain into `:https` configuration from config.exs
{:ok, SiteEncrypt.Phoenix.configure_https(config)}
# to completely configure https from `init/2`, invoke:
# SiteEncrypt.Phoenix.configure_https(config, port: 4001, ...)
end
# ...
end
The `options` are any valid adapter HTTPS options. For many great tips on configuring HTTPS for
production refer to the [Plug HTTPS guide](https://hexdocs.pm/plug/https.html#content).
"""
defmacro configure_https(config, https_opts \\ []) do
quote bind_quoted: [config: config, https_opts: https_opts] do
https_config =
(Keyword.get(config, :https) || [])
|> Config.Reader.merge(https_opts)
|> Config.Reader.merge(SiteEncrypt.https_keys(__MODULE__))
Keyword.put(config, :https, https_config)
end
end
@doc false
defmacro __using__(_opts) do
quote do
unless Enum.member?(@behaviour, Phoenix.Endpoint),
do: raise("SiteEncrypt.Phoenix must be used after Phoenix.Endpoint")
@behaviour SiteEncrypt
require SiteEncrypt
require SiteEncrypt.Phoenix
plug SiteEncrypt.AcmeChallenge, __MODULE__
@impl SiteEncrypt
def handle_new_cert, do: :ok
defoverridable handle_new_cert: 0
end
end
@doc false
def start_link(endpoint) do
Parent.Supervisor.start_link(
children(endpoint),
name: {:via, Elixir.Registry, {Registry, endpoint}}
)
end
@doc false
def restart_site(endpoint, fun) do
root = Registry.root(endpoint)
Parent.Client.shutdown_all(root)
fun.()
Enum.each(children(endpoint), fn spec -> {:ok, _} = Parent.Client.start_child(root, spec) end)
end
defp children(endpoint) do
[
Parent.child_spec(endpoint, id: :endpoint, start: fn -> start_endpoint(endpoint) end),
Parent.child_spec(Acme.Server,
start: fn -> start_acme_server(endpoint) end,
binds_to: [:endpoint]
)
] ++ SiteEncrypt.Certification.child_specs(endpoint)
end
defp start_endpoint(endpoint) do
config = endpoint.certification()
Registry.store_config(endpoint, config)
SiteEncrypt.initialize_certs(config)
endpoint.start_link([])
end
defp start_acme_server(endpoint) do
config = Registry.config(endpoint)
with endpoint_port when not is_nil(endpoint_port) <- endpoint_port(config),
port when not is_nil(port) <- acme_server_port(config) do
dns = dns(config, endpoint_port)
Acme.Server.start_link(config.id, port, dns, log_level: config.log_level)
else
_ -> :ignore
end
end
defp endpoint_port(%{id: endpoint}) do
if server?(endpoint), do: endpoint.config(:http) |> Keyword.fetch!(:port)
end
defp server?(endpoint) do
with nil <- endpoint.config(:server),
do: Application.get_env(:phoenix, :serve_endpoints, false)
end
defp dns(config, endpoint_port),
do: Enum.into(config.domains, %{}, &{&1, fn -> "localhost:#{endpoint_port}" end})
defp acme_server_port(%{directory_url: {:internal, acme_server_opts}}),
do: Keyword.get(acme_server_opts, :port)
defp acme_server_port(_), do: nil
end
|
lib/site_encrypt/phoenix.ex
| 0.808483 | 0.546557 |
phoenix.ex
|
starcoder
|
defmodule UrbitEx.API.Notifications do
alias UrbitEx.{Airlock, Actions, API}
alias UrbitEx.Timebox
import UrbitEx.HarkStore
@moduledoc """
Client API to interact with `hark-store`, the notifications system of Urbit.
"""
@doc """
Fetches all notifications stored on your ship.
Takes an UrbitEx.Session struct, a type atom (or string), which can be `:archive` or `:inbox`
(for old and current notifications), an offset integer and a count integer.
Returns a Timebox struct.
"""
def fetch(session, type \\ :inbox, offset \\ 0, count \\ 10) do
endpoint = "/~/scry/hark-store/recent/#{type}/#{offset}/#{count}.json"
{:ok, res} = Airlock.get(session.url <> endpoint, session.cookie)
{:ok, b} = Jason.decode(res.body)
b["harkUpdate"]["more"] |> Enum.map(&Timebox.new(&1["timebox"]))
end
## Specific
@doc """
Mark all nodes in a channel as read.
Takes an UrbitEx.Session struct, an UrbitEx.Resource struct for the group and another one for the channel.
"""
def read_channel(session, channel, group, resource) do
json = mark_channel_as_read(group, resource)
body = Actions.poke(session.ship, "hark-store", "hark-action", json)
API.wrap_put(session, channel, [body])
end
@doc """
Mark a nodes *and their children* in a channel as read.
This is used for notes and links, to set them and their children as read, together.
Takes an UrbitEx.Session struct, an UrbitEx.Resource struct for the group and another one for the channel.
It also takes an index string of the target node and the type of channel it belongs, whether `:publish` or `:link`.
"""
# TODO this one's tricky, it asks for "description" and "module" of the graph and the target index of the node
def read_node(session, channel, group, resource, node_index, channel_type) do
json = mark_node_as_read(group, resource, node_index, channel_type)
body = Actions.poke(session.ship, "hark-store", "hark-action", json)
API.wrap_put(session, channel, [body])
end
@doc """
Mark a nodes *and their children* in a channel as read.
This is used for notes and links, to set them and their children as read, together.
Takes an UrbitEx.Session struct, an UrbitEx.Resource struct for the group and another one for the channel.
It also takes an index string of the target node and the type of channel it belongs, whether `:publish` or `:link`.
"""
# TODO this one's tricky, it asks for "description" and "module" of the graph and the target index of the node
def read_whole_node(session, channel, group, resource, node_index, channel_type) do
json = mark_whole_node_as_read(group, resource, node_index, channel_type)
body = Actions.poke(session.ship, "hark-store", "hark-action", json)
API.wrap_put(session, channel, [body])
end
@doc """
Ignore a channel. When ignored, your ship won't track unread nodes from that channel.
Takes an UrbitEx.Session struct and an UrbitEx.Resource struct for the channel.
"""
def mute_channel(session, channel, resource) do
json = set_channel_notifications(:ignore, resource)
body = Actions.poke(session.ship, "hark-store", "hark-action", json)
API.wrap_put(session, channel, [body])
end
@doc """
Unmute a channel. When unmuted, your ship will track unread nodes from that channel.
Takes an UrbitEx.Session struct and an UrbitEx.Resource struct for the channel.
"""
def unmute_channel(session, channel, resource) do
json = set_channel_notifications(:listen, resource)
body = Actions.poke(session.ship, "hark-store", "hark-action", json)
API.wrap_put(session, channel, [body])
end
## Global settings
@doc """
Set Do Not Disturb option. When `true`, you won't receive notifications.
Takes an UrbitEx.Session struct and a boolean.
"""
def do_not_disturb(session, channel, boolean) do
json = set_dnd(boolean)
body = Actions.poke(session.ship, "hark-store", "hark-action", json)
API.wrap_put(session, channel, [body])
end
@doc """
Set whether you want notifications when a node you authored has received replies,
e.g. a channel you host or a notebook post you wrote.
When `false`, you won't receive notifications.
Takes an UrbitEx.Session struct and a boolean.
"""
def watch_replies(session, channel, boolean) do
json = set_watch_replies(boolean)
body = Actions.poke(session.ship, "hark-graph-hook", "hark-graph-hook-action", json)
API.wrap_put(session, channel, [body])
end
@doc """
Set whether you want notifications when you are mentioned in a channel you are subscribed to.
When `false`, you won't receive notifications.
Takes an UrbitEx.Session struct and a boolean.
"""
def watch_mentions(session, channel, boolean) do
json = set_watch_mentions(boolean)
body = Actions.poke(session.ship, "hark-graph-hook", "hark-graph-hook-action", json)
API.wrap_put(session, channel, [body])
end
end
|
lib/api/gall/hark.ex
| 0.659405 | 0.437884 |
hark.ex
|
starcoder
|
defmodule ScrollHat.Font.Big do
@moduledoc """
Large 5x7 Font
Transposed from https://github.com/pimoroni/scroll-phat-hd/blob/master/library/scrollphathd/fonts/font5x7.py
"""
@behaviour ScrollHat.Font
def char_matrix(char, brightness \\ 0xFF)
def char_matrix(0x0, _b) do
[
[0x0, 0x0, 0x0, 0x0, 0x0],
[0x0, 0x0, 0x0, 0x0, 0x0],
[0x0, 0x0, 0x0, 0x0, 0x0],
[0x0, 0x0, 0x0, 0x0, 0x0],
[0x0, 0x0, 0x0, 0x0, 0x0],
[0x0, 0x0, 0x0, 0x0, 0x0],
[0x0, 0x0, 0x0, 0x0, 0x0]
]
end
def char_matrix(0x1, b) do
[
[0x0, 0x0, 0x0, 0x0, 0x0],
[0x0, 0x0, 0x0, 0x0, 0x0],
[b, 0x0, b, b, 0x0],
[0x0, b, 0x0, 0x0, b],
[0x0, b, 0x0, 0x0, b],
[b, 0x0, 0x0, b, 0x0],
[b, 0x0, 0x0, b, 0x0]
]
end
def char_matrix(0x2, b) do
[
[0x0, 0x0, 0x0, 0x0, 0x0],
[0x0, 0x0, 0x0, 0x0, 0x0],
[b, 0x0, 0x0, 0x0, b],
[b, 0x0, 0x0, 0x0, b],
[b, 0x0, 0x0, 0x0, b],
[b, 0x0, 0x0, b, b],
[0x0, b, b, 0x0, b]
]
end
def char_matrix(0x3, b) do
[
[0x0, 0x0, 0x0, 0x0, 0x0],
[0x0, 0x0, 0x0, 0x0, 0x0],
[b, 0x0, 0x0, 0x0, b],
[b, 0x0, 0x0, 0x0, b],
[0x0, b, 0x0, b, 0x0],
[0x0, b, 0x0, b, 0x0],
[0x0, 0x0, b, 0x0, 0x0]
]
end
def char_matrix(0x4, b) do
[
[0x0, 0x0, 0x0, 0x0, 0x0],
[0x0, 0x0, 0x0, 0x0, 0x0],
[b, 0x0, 0x0, 0x0, b],
[b, 0x0, 0x0, 0x0, b],
[b, 0x0, b, 0x0, b],
[b, 0x0, b, 0x0, b],
[0x0, b, 0x0, b, 0x0]
]
end
def char_matrix(0x5, b) do
[
[0x0, 0x0, 0x0],
[0x0, 0x0, 0x0],
[b, 0x0, 0x0],
[b, b, 0x0],
[b, b, b],
[b, b, 0x0],
[b, 0x0, 0x0]
]
end
def char_matrix(0x6, b) do
[
[0x0, 0x0, 0x0, 0x0, 0x0],
[0x0, 0x0, 0x0, 0x0, 0x0],
[0x0, 0x0, b, 0x0, 0x0],
[0x0, b, b, b, 0x0],
[b, b, b, b, b],
[0x0, b, b, b, 0x0],
[0x0, b, b, b, 0x0]
]
end
def char_matrix(0x7, b) do
[
[0x0, 0x0, 0x0, 0x0, 0x0],
[0x0, 0x0, 0x0, 0x0, 0x0],
[0x0, b, b, b, 0x0],
[0x0, b, b, b, 0x0],
[b, b, b, b, b],
[0x0, b, b, b, 0x0],
[0x0, 0x0, b, 0x0, 0x0]
]
end
def char_matrix(0x8, b) do
[
[0x0, 0x0, 0x0, b, 0x0],
[0x0, 0x0, b, 0x0, b],
[0x0, 0x0, b, 0x0, 0x0],
[0x0, 0x0, b, 0x0, 0x0],
[0x0, 0x0, b, 0x0, 0x0],
[b, 0x0, b, 0x0, 0x0],
[0x0, b, 0x0, 0x0, 0x0]
]
end
def char_matrix(0x9, b) do
[
[0x0, 0x0, 0x0, 0x0, 0x0],
[0x0, 0x0, 0x0, 0x0, 0x0],
[b, 0x0, 0x0, 0x0, b],
[0x0, b, 0x0, b, 0x0],
[0x0, 0x0, b, 0x0, 0x0],
[0x0, b, 0x0, b, 0x0],
[b, 0x0, 0x0, 0x0, b]
]
end
def char_matrix(0xA, b) do
[
[0x0, 0x0, 0x0],
[0x0, 0x0, 0x0],
[0x0, 0x0, 0x0],
[0x0, 0x0, 0x0],
[b, b, b],
[b, 0x0, b],
[b, b, b]
]
end
def char_matrix(0xB, b) do
[
[0x0, 0x0, 0x0],
[0x0, 0x0, 0x0],
[0x0, 0x0, 0x0],
[0x0, 0x0, 0x0],
[0x0, b, 0x0],
[b, b, b],
[0x0, b, 0x0]
]
end
def char_matrix(0xC, b) do
[[0x0], [0x0], [0x0], [0x0], [0x0], [0x0], [b]]
end
def char_matrix(0xD, b) do
[
[0x0, 0x0, 0x0],
[0x0, 0x0, 0x0],
[0x0, 0x0, 0x0],
[b, b, b],
[0x0, b, 0x0],
[0x0, b, 0x0],
[0x0, b, 0x0]
]
end
def char_matrix(0xE, b) do
[
[0x0, 0x0, 0x0],
[0x0, 0x0, 0x0],
[b, b, 0x0],
[0x0, 0x0, b],
[b, b, 0x0],
[0x0, 0x0, b],
[b, b, 0x0]
]
end
def char_matrix(0xF, b) do
[
[b, b, b, b, b],
[b, b, 0x0, 0x0, 0x0],
[b, b, 0x0, 0x0, 0x0],
[b, b, b, b, 0x0],
[b, b, 0x0, 0x0, 0x0],
[b, b, 0x0, 0x0, 0x0],
[b, b, 0x0, 0x0, 0x0]
]
end
def char_matrix(0x10, b) do
[
[0x0, 0x0, b, b, b],
[0x0, 0x0, b, 0x0, 0x0],
[0x0, 0x0, b, 0x0, 0x0],
[0x0, 0x0, b, 0x0, 0x0],
[b, 0x0, b, 0x0, 0x0],
[0x0, b, b, 0x0, 0x0],
[0x0, 0x0, b, 0x0, 0x0]
]
end
def char_matrix(0x11, b) do
[
[0x0, 0x0, 0x0, 0x0, 0x0],
[0x0, 0x0, 0x0, 0x0, 0x0],
[0x0, 0x0, 0x0, b, b],
[0x0, 0x0, 0x0, 0x0, b],
[b, b, 0x0, 0x0, b],
[0x0, 0x0, 0x0, 0x0, b],
[0x0, 0x0, 0x0, 0x0, b]
]
end
def char_matrix(0x12, b) do
[
[0x0, 0x0, 0x0],
[0x0, 0x0, 0x0],
[b, b, 0x0],
[0x0, 0x0, b],
[0x0, b, 0x0],
[b, 0x0, 0x0],
[b, b, b]
]
end
def char_matrix(0x13, b) do
[
[0x0, 0x0, 0x0, 0x0, 0x0],
[0x0, 0x0, 0x0, 0x0, 0x0],
[0x0, 0x0, 0x0, 0x0, b],
[0x0, 0x0, 0x0, b, 0x0],
[0x0, 0x0, b, 0x0, 0x0],
[0x0, b, 0x0, 0x0, 0x0],
[b, b, b, b, b]
]
end
def char_matrix(0x14, b) do
[
[0x0, 0x0, 0x0, 0x0],
[0x0, 0x0, 0x0, 0x0],
[0x0, 0x0, 0x0, 0x0],
[0x0, b, b, 0x0],
[b, 0x0, 0x0, b],
[b, 0x0, 0x0, b],
[0x0, b, b, 0x0]
]
end
def char_matrix(0x15, b) do
[
[0x0, 0x0, 0x0, 0x0],
[0x0, 0x0, 0x0, 0x0],
[0x0, 0x0, 0x0, 0x0],
[b, 0x0, b, b],
[b, b, 0x0, 0x0],
[b, 0x0, 0x0, 0x0],
[b, 0x0, 0x0, 0x0]
]
end
def char_matrix(0x16, b) do
[
[0x0, 0x0, 0x0],
[0x0, 0x0, 0x0],
[b, b, b],
[0x0, b, 0x0],
[0x0, b, 0x0],
[0x0, b, 0x0],
[0x0, b, 0x0]
]
end
def char_matrix(0x17, b) do
[
[0x0, 0x0, 0x0, 0x0, b],
[0x0, 0x0, b, b, 0x0],
[b, b, 0x0, 0x0, 0x0],
[0x0, 0x0, b, b, 0x0],
[0x0, 0x0, 0x0, 0x0, b],
[0x0, 0x0, 0x0, 0x0, 0x0],
[b, b, b, b, b]
]
end
def char_matrix(0x18, b) do
[
[0x0, 0x0, 0x0, b, 0x0],
[0x0, 0x0, 0x0, b, 0x0],
[b, b, b, b, b],
[0x0, 0x0, b, 0x0, 0x0],
[b, b, b, b, b],
[0x0, b, 0x0, 0x0, 0x0],
[0x0, b, 0x0, 0x0, 0x0]
]
end
def char_matrix(0x19, b) do
[
[b, 0x0, 0x0, 0x0, 0x0],
[0x0, b, b, 0x0, 0x0],
[0x0, 0x0, 0x0, b, b],
[0x0, b, b, 0x0, 0x0],
[b, 0x0, 0x0, 0x0, 0x0],
[0x0, 0x0, 0x0, 0x0, 0x0],
[b, b, b, b, b]
]
end
def char_matrix(0x1A, b) do
[
[0x0, 0x0, 0x0],
[0x0, 0x0, 0x0],
[0x0, 0x0, 0x0],
[0x0, 0x0, 0x0],
[0x0, 0x0, 0x0],
[0x0, 0x0, 0x0],
[b, b, b]
]
end
def char_matrix(0x1B, b) do
[
[0x0, 0x0, 0x0, 0x0],
[0x0, 0x0, 0x0, 0x0],
[b, b, b, b],
[b, 0x0, 0x0, 0x0],
[b, b, b, 0x0],
[b, 0x0, 0x0, 0x0],
[b, b, b, b]
]
end
def char_matrix(0x1C, b) do
[
[0x0, 0x0, 0x0, 0x0, 0x0],
[0x0, 0x0, 0x0, 0x0, 0x0],
[0x0, 0x0, b, 0x0, 0x0],
[0x0, 0x0, 0x0, b, 0x0],
[b, b, b, b, b],
[0x0, 0x0, 0x0, b, 0x0],
[0x0, 0x0, b, 0x0, 0x0]
]
end
def char_matrix(0x1D, b) do
[
[0x0, 0x0, 0x0, 0x0, 0x0],
[0x0, 0x0, 0x0, 0x0, 0x0],
[b, 0x0, b, b, b],
[b, 0x0, b, 0x0, b],
[b, 0x0, b, 0x0, b],
[b, 0x0, b, 0x0, b],
[b, 0x0, b, b, b]
]
end
def char_matrix(0x1E, b) do
[
[0x0, 0x0, b, 0x0, 0x0],
[0x0, b, b, b, 0x0],
[b, 0x0, b, 0x0, b],
[0x0, 0x0, b, 0x0, 0x0],
[0x0, 0x0, b, 0x0, 0x0],
[0x0, 0x0, b, 0x0, 0x0],
[0x0, 0x0, b, 0x0, 0x0]
]
end
def char_matrix(0x1F, b) do
[
[0x0, 0x0, b, 0x0, 0x0],
[0x0, 0x0, b, 0x0, 0x0],
[0x0, 0x0, b, 0x0, 0x0],
[0x0, 0x0, b, 0x0, 0x0],
[b, 0x0, b, 0x0, b],
[0x0, b, b, b, 0x0],
[0x0, 0x0, b, 0x0, 0x0]
]
end
def char_matrix(0x20, _b) do
[
[0x0, 0x0, 0x0, 0x0, 0x0],
[0x0, 0x0, 0x0, 0x0, 0x0],
[0x0, 0x0, 0x0, 0x0, 0x0],
[0x0, 0x0, 0x0, 0x0, 0x0],
[0x0, 0x0, 0x0, 0x0, 0x0],
[0x0, 0x0, 0x0, 0x0, 0x0],
[0x0, 0x0, 0x0, 0x0, 0x0]
]
end
def char_matrix(0x21, b) do
[[b], [b], [b], [b], [0x0], [b], [b]]
end
def char_matrix(0x22, b) do
[
[0x0, 0x0, 0x0],
[0x0, 0x0, 0x0],
[0x0, 0x0, 0x0],
[0x0, 0x0, 0x0],
[b, 0x0, b],
[b, 0x0, b],
[b, 0x0, b]
]
end
def char_matrix(0x23, b) do
[
[0x0, b, 0x0, b, 0x0],
[0x0, b, 0x0, b, 0x0],
[b, b, b, b, b],
[0x0, b, 0x0, b, 0x0],
[b, b, b, b, b],
[0x0, b, 0x0, b, 0x0],
[0x0, b, 0x0, b, 0x0]
]
end
def char_matrix(0x24, b) do
[
[0x0, 0x0, 0x0],
[0x0, 0x0, 0x0],
[b, 0x0, 0x0],
[b, 0x0, b],
[b, b, b],
[0x0, 0x0, b],
[0x0, 0x0, b]
]
end
def char_matrix(0x25, b) do
[
[b, b, 0x0, 0x0, 0x0],
[b, b, 0x0, 0x0, b],
[0x0, 0x0, 0x0, b, 0x0],
[0x0, 0x0, b, 0x0, 0x0],
[0x0, b, 0x0, 0x0, 0x0],
[b, 0x0, 0x0, b, b],
[0x0, 0x0, 0x0, b, b]
]
end
def char_matrix(0x26, b) do
[
[0x0, b, 0x0, 0x0, 0x0],
[b, 0x0, b, 0x0, 0x0],
[b, 0x0, b, 0x0, 0x0],
[0x0, b, 0x0, 0x0, 0x0],
[b, 0x0, b, 0x0, b],
[b, 0x0, 0x0, b, 0x0],
[0x0, b, b, 0x0, b]
]
end
def char_matrix(0x27, b) do
[[0x0], [0x0], [0x0], [0x0], [b], [b], [b]]
end
def char_matrix(0x28, b) do
[
[0x0, 0x0, b],
[0x0, b, 0x0],
[b, 0x0, 0x0],
[b, 0x0, 0x0],
[b, 0x0, 0x0],
[0x0, b, 0x0],
[0x0, 0x0, b]
]
end
def char_matrix(0x29, b) do
[
[b, 0x0, 0x0],
[0x0, b, 0x0],
[0x0, 0x0, b],
[0x0, 0x0, b],
[0x0, 0x0, b],
[0x0, b, 0x0],
[b, 0x0, 0x0]
]
end
def char_matrix(0x2A, b) do
[
[0x0, 0x0, 0x0, 0x0, 0x0],
[0x0, 0x0, 0x0, 0x0, 0x0],
[0x0, 0x0, b, 0x0, 0x0],
[b, 0x0, b, 0x0, b],
[0x0, b, b, b, 0x0],
[b, 0x0, b, 0x0, b],
[0x0, 0x0, b, 0x0, 0x0]
]
end
def char_matrix(0x2B, b) do
[
[0x0, 0x0, 0x0, 0x0, 0x0],
[0x0, 0x0, 0x0, 0x0, 0x0],
[0x0, 0x0, b, 0x0, 0x0],
[0x0, 0x0, b, 0x0, 0x0],
[b, b, b, b, b],
[0x0, 0x0, b, 0x0, 0x0],
[0x0, 0x0, b, 0x0, 0x0]
]
end
def char_matrix(0x2C, b) do
[[0x0, 0x0], [0x0, 0x0], [0x0, 0x0], [0x0, 0x0], [b, b], [0x0, b], [b, 0x0]]
end
def char_matrix(0x2D, b) do
[
[0x0, 0x0, 0x0, 0x0, 0x0],
[0x0, 0x0, 0x0, 0x0, 0x0],
[0x0, 0x0, 0x0, 0x0, 0x0],
[0x0, 0x0, 0x0, 0x0, 0x0],
[0x0, 0x0, 0x0, 0x0, 0x0],
[0x0, 0x0, 0x0, 0x0, 0x0],
[b, b, b, b, b]
]
end
def char_matrix(0x2E, b) do
[[0x0, 0x0], [0x0, 0x0], [0x0, 0x0], [0x0, 0x0], [0x0, 0x0], [b, b], [b, b]]
end
def char_matrix(0x2F, b) do
[
[0x0, 0x0, 0x0, 0x0, 0x0],
[0x0, 0x0, 0x0, 0x0, 0x0],
[0x0, 0x0, 0x0, 0x0, b],
[0x0, 0x0, 0x0, b, 0x0],
[0x0, 0x0, b, 0x0, 0x0],
[0x0, b, 0x0, 0x0, 0x0],
[b, 0x0, 0x0, 0x0, 0x0]
]
end
def char_matrix(0x30, b) do
[
[0x0, b, b, b, 0x0],
[b, 0x0, 0x0, 0x0, b],
[b, 0x0, 0x0, b, b],
[b, 0x0, b, 0x0, b],
[b, b, 0x0, 0x0, b],
[b, 0x0, 0x0, 0x0, b],
[0x0, b, b, b, 0x0]
]
end
def char_matrix(0x31, b) do
[
[0x0, b, 0x0],
[b, b, 0x0],
[0x0, b, 0x0],
[0x0, b, 0x0],
[0x0, b, 0x0],
[0x0, b, 0x0],
[b, b, b]
]
end
def char_matrix(0x32, b) do
[
[0x0, b, b, b, 0x0],
[b, 0x0, 0x0, 0x0, b],
[0x0, 0x0, 0x0, 0x0, b],
[0x0, 0x0, 0x0, b, 0x0],
[0x0, 0x0, b, 0x0, 0x0],
[0x0, b, 0x0, 0x0, 0x0],
[b, b, b, b, b]
]
end
def char_matrix(0x33, b) do
[
[b, b, b, b, b],
[0x0, 0x0, 0x0, b, 0x0],
[0x0, 0x0, b, 0x0, 0x0],
[0x0, 0x0, 0x0, b, 0x0],
[0x0, 0x0, 0x0, 0x0, b],
[b, 0x0, 0x0, 0x0, b],
[0x0, b, b, b, 0x0]
]
end
def char_matrix(0x34, b) do
[
[0x0, 0x0, 0x0, b, 0x0],
[0x0, 0x0, b, b, 0x0],
[0x0, b, 0x0, b, 0x0],
[b, 0x0, 0x0, b, 0x0],
[b, b, b, b, b],
[0x0, 0x0, 0x0, b, 0x0],
[0x0, 0x0, 0x0, b, 0x0]
]
end
def char_matrix(0x35, b) do
[
[b, b, b, b, b],
[b, 0x0, 0x0, 0x0, 0x0],
[b, b, b, b, 0x0],
[0x0, 0x0, 0x0, 0x0, b],
[0x0, 0x0, 0x0, 0x0, b],
[b, 0x0, 0x0, 0x0, b],
[0x0, b, b, b, 0x0]
]
end
def char_matrix(0x36, b) do
[
[0x0, 0x0, b, b, 0x0],
[0x0, b, 0x0, 0x0, 0x0],
[b, 0x0, 0x0, 0x0, 0x0],
[b, b, b, b, 0x0],
[b, 0x0, 0x0, 0x0, b],
[b, 0x0, 0x0, 0x0, b],
[0x0, b, b, b, 0x0]
]
end
def char_matrix(0x37, b) do
[
[b, b, b, b, b],
[0x0, 0x0, 0x0, 0x0, b],
[0x0, 0x0, 0x0, b, 0x0],
[0x0, 0x0, b, 0x0, 0x0],
[0x0, b, 0x0, 0x0, 0x0],
[0x0, b, 0x0, 0x0, 0x0],
[0x0, b, 0x0, 0x0, 0x0]
]
end
def char_matrix(0x38, b) do
[
[0x0, b, b, b, 0x0],
[b, 0x0, 0x0, 0x0, b],
[b, 0x0, 0x0, 0x0, b],
[0x0, b, b, b, 0x0],
[b, 0x0, 0x0, 0x0, b],
[b, 0x0, 0x0, 0x0, b],
[0x0, b, b, b, 0x0]
]
end
def char_matrix(0x39, b) do
[
[0x0, b, b, b, 0x0],
[b, 0x0, 0x0, 0x0, b],
[b, 0x0, 0x0, 0x0, b],
[0x0, b, b, b, b],
[0x0, 0x0, 0x0, 0x0, b],
[0x0, 0x0, 0x0, b, 0x0],
[0x0, b, b, 0x0, 0x0]
]
end
def char_matrix(0x3A, b) do
[[0x0, 0x0], [0x0, 0x0], [b, b], [b, b], [0x0, 0x0], [b, b], [b, b]]
end
def char_matrix(0x3B, b) do
[[0x0, 0x0], [b, b], [b, b], [0x0, 0x0], [b, b], [0x0, b], [b, 0x0]]
end
def char_matrix(0x3C, b) do
[
[0x0, 0x0, 0x0, b],
[0x0, 0x0, b, 0x0],
[0x0, b, 0x0, 0x0],
[b, 0x0, 0x0, 0x0],
[0x0, b, 0x0, 0x0],
[0x0, 0x0, b, 0x0],
[0x0, 0x0, 0x0, b]
]
end
def char_matrix(0x3D, b) do
[
[0x0, 0x0, 0x0, 0x0, 0x0],
[0x0, 0x0, 0x0, 0x0, 0x0],
[0x0, 0x0, 0x0, 0x0, 0x0],
[0x0, 0x0, 0x0, 0x0, 0x0],
[b, b, b, b, b],
[0x0, 0x0, 0x0, 0x0, 0x0],
[b, b, b, b, b]
]
end
def char_matrix(0x3E, b) do
[
[b, 0x0, 0x0, 0x0],
[0x0, b, 0x0, 0x0],
[0x0, 0x0, b, 0x0],
[0x0, 0x0, 0x0, b],
[0x0, 0x0, b, 0x0],
[0x0, b, 0x0, 0x0],
[b, 0x0, 0x0, 0x0]
]
end
def char_matrix(0x3F, b) do
[
[0x0, b, b, b, 0x0],
[b, 0x0, 0x0, 0x0, b],
[0x0, 0x0, 0x0, 0x0, b],
[0x0, 0x0, 0x0, b, 0x0],
[0x0, 0x0, b, 0x0, 0x0],
[0x0, 0x0, 0x0, 0x0, 0x0],
[0x0, 0x0, b, 0x0, 0x0]
]
end
def char_matrix(0x40, b) do
[
[0x0, b, b, b, 0x0],
[b, 0x0, 0x0, 0x0, b],
[b, 0x0, b, 0x0, b],
[b, 0x0, b, b, b],
[b, 0x0, b, 0x0, 0x0],
[b, 0x0, 0x0, 0x0, 0x0],
[0x0, b, b, b, b]
]
end
def char_matrix(0x41, b) do
[
[0x0, b, b, b, 0x0],
[b, 0x0, 0x0, 0x0, b],
[b, 0x0, 0x0, 0x0, b],
[b, b, b, b, b],
[b, 0x0, 0x0, 0x0, b],
[b, 0x0, 0x0, 0x0, b],
[b, 0x0, 0x0, 0x0, b]
]
end
def char_matrix(0x42, b) do
[
[b, b, b, b, 0x0],
[b, 0x0, 0x0, 0x0, b],
[b, 0x0, 0x0, 0x0, b],
[b, b, b, b, 0x0],
[b, 0x0, 0x0, 0x0, b],
[b, 0x0, 0x0, 0x0, b],
[b, b, b, b, 0x0]
]
end
def char_matrix(0x43, b) do
[
[0x0, b, b, b, 0x0],
[b, 0x0, 0x0, 0x0, b],
[b, 0x0, 0x0, 0x0, 0x0],
[b, 0x0, 0x0, 0x0, 0x0],
[b, 0x0, 0x0, 0x0, 0x0],
[b, 0x0, 0x0, 0x0, b],
[0x0, b, b, b, 0x0]
]
end
def char_matrix(0x44, b) do
[
[b, b, b, b, 0x0],
[b, 0x0, 0x0, 0x0, b],
[b, 0x0, 0x0, 0x0, b],
[b, 0x0, 0x0, 0x0, b],
[b, 0x0, 0x0, 0x0, b],
[b, 0x0, 0x0, 0x0, b],
[b, b, b, b, 0x0]
]
end
def char_matrix(0x45, b) do
[
[b, b, b, b, b],
[b, 0x0, 0x0, 0x0, 0x0],
[b, 0x0, 0x0, 0x0, 0x0],
[b, b, b, b, 0x0],
[b, 0x0, 0x0, 0x0, 0x0],
[b, 0x0, 0x0, 0x0, 0x0],
[b, b, b, b, b]
]
end
def char_matrix(0x46, b) do
[
[b, b, b, b, b],
[b, 0x0, 0x0, 0x0, 0x0],
[b, 0x0, 0x0, 0x0, 0x0],
[b, b, b, b, 0x0],
[b, 0x0, 0x0, 0x0, 0x0],
[b, 0x0, 0x0, 0x0, 0x0],
[b, 0x0, 0x0, 0x0, 0x0]
]
end
def char_matrix(0x47, b) do
[
[0x0, b, b, b, 0x0],
[b, 0x0, 0x0, 0x0, b],
[b, 0x0, 0x0, 0x0, 0x0],
[b, 0x0, b, b, b],
[b, 0x0, 0x0, 0x0, b],
[b, 0x0, 0x0, 0x0, b],
[0x0, b, b, b, b]
]
end
def char_matrix(0x48, b) do
[
[b, 0x0, 0x0, 0x0, b],
[b, 0x0, 0x0, 0x0, b],
[b, 0x0, 0x0, 0x0, b],
[b, b, b, b, b],
[b, 0x0, 0x0, 0x0, b],
[b, 0x0, 0x0, 0x0, b],
[b, 0x0, 0x0, 0x0, b]
]
end
def char_matrix(0x49, b) do
[
[b, b, b],
[0x0, b, 0x0],
[0x0, b, 0x0],
[0x0, b, 0x0],
[0x0, b, 0x0],
[0x0, b, 0x0],
[b, b, b]
]
end
def char_matrix(0x4A, b) do
[
[0x0, 0x0, b, b, b],
[0x0, 0x0, 0x0, b, 0x0],
[0x0, 0x0, 0x0, b, 0x0],
[0x0, 0x0, 0x0, b, 0x0],
[0x0, 0x0, 0x0, b, 0x0],
[b, 0x0, 0x0, b, 0x0],
[0x0, b, b, 0x0, 0x0]
]
end
def char_matrix(0x4B, b) do
[
[b, 0x0, 0x0, 0x0, b],
[b, 0x0, 0x0, b, 0x0],
[b, 0x0, b, 0x0, 0x0],
[b, b, 0x0, 0x0, 0x0],
[b, 0x0, b, 0x0, 0x0],
[b, 0x0, 0x0, b, 0x0],
[b, 0x0, 0x0, 0x0, b]
]
end
def char_matrix(0x4C, b) do
[
[b, 0x0, 0x0, 0x0, 0x0],
[b, 0x0, 0x0, 0x0, 0x0],
[b, 0x0, 0x0, 0x0, 0x0],
[b, 0x0, 0x0, 0x0, 0x0],
[b, 0x0, 0x0, 0x0, 0x0],
[b, 0x0, 0x0, 0x0, 0x0],
[b, b, b, b, b]
]
end
def char_matrix(0x4D, b) do
[
[b, 0x0, 0x0, 0x0, b],
[b, b, 0x0, b, b],
[b, 0x0, b, 0x0, b],
[b, 0x0, b, 0x0, b],
[b, 0x0, 0x0, 0x0, b],
[b, 0x0, 0x0, 0x0, b],
[b, 0x0, 0x0, 0x0, b]
]
end
def char_matrix(0x4E, b) do
[
[b, 0x0, 0x0, 0x0, b],
[b, 0x0, 0x0, 0x0, b],
[b, b, 0x0, 0x0, b],
[b, 0x0, b, 0x0, b],
[b, 0x0, 0x0, b, b],
[b, 0x0, 0x0, 0x0, b],
[b, 0x0, 0x0, 0x0, b]
]
end
def char_matrix(0x4F, b) do
[
[0x0, b, b, b, 0x0],
[b, 0x0, 0x0, 0x0, b],
[b, 0x0, 0x0, 0x0, b],
[b, 0x0, 0x0, 0x0, b],
[b, 0x0, 0x0, 0x0, b],
[b, 0x0, 0x0, 0x0, b],
[0x0, b, b, b, 0x0]
]
end
def char_matrix(0x50, b) do
[
[b, b, b, b, 0x0],
[b, 0x0, 0x0, 0x0, b],
[b, 0x0, 0x0, 0x0, b],
[b, b, b, b, 0x0],
[b, 0x0, 0x0, 0x0, 0x0],
[b, 0x0, 0x0, 0x0, 0x0],
[b, 0x0, 0x0, 0x0, 0x0]
]
end
def char_matrix(0x51, b) do
[
[0x0, b, b, b, 0x0],
[b, 0x0, 0x0, 0x0, b],
[b, 0x0, 0x0, 0x0, b],
[b, 0x0, 0x0, 0x0, b],
[b, 0x0, b, 0x0, b],
[b, 0x0, 0x0, b, 0x0],
[0x0, b, b, 0x0, b]
]
end
def char_matrix(0x52, b) do
[
[b, b, b, b, 0x0],
[b, 0x0, 0x0, 0x0, b],
[b, 0x0, 0x0, 0x0, b],
[b, b, b, b, 0x0],
[b, 0x0, b, 0x0, 0x0],
[b, 0x0, 0x0, b, 0x0],
[b, 0x0, 0x0, 0x0, b]
]
end
def char_matrix(0x53, b) do
[
[0x0, b, b, b, b],
[b, 0x0, 0x0, 0x0, 0x0],
[b, 0x0, 0x0, 0x0, 0x0],
[0x0, b, b, b, 0x0],
[0x0, 0x0, 0x0, 0x0, b],
[0x0, 0x0, 0x0, 0x0, b],
[b, b, b, b, 0x0]
]
end
def char_matrix(0x54, b) do
[
[b, b, b, b, b],
[0x0, 0x0, b, 0x0, 0x0],
[0x0, 0x0, b, 0x0, 0x0],
[0x0, 0x0, b, 0x0, 0x0],
[0x0, 0x0, b, 0x0, 0x0],
[0x0, 0x0, b, 0x0, 0x0],
[0x0, 0x0, b, 0x0, 0x0]
]
end
def char_matrix(0x55, b) do
[
[b, 0x0, 0x0, 0x0, b],
[b, 0x0, 0x0, 0x0, b],
[b, 0x0, 0x0, 0x0, b],
[b, 0x0, 0x0, 0x0, b],
[b, 0x0, 0x0, 0x0, b],
[b, 0x0, 0x0, 0x0, b],
[0x0, b, b, b, 0x0]
]
end
def char_matrix(0x56, b) do
[
[b, 0x0, 0x0, 0x0, b],
[b, 0x0, 0x0, 0x0, b],
[b, 0x0, 0x0, 0x0, b],
[b, 0x0, 0x0, 0x0, b],
[0x0, b, 0x0, b, 0x0],
[0x0, b, 0x0, b, 0x0],
[0x0, 0x0, b, 0x0, 0x0]
]
end
def char_matrix(0x57, b) do
[
[b, 0x0, 0x0, 0x0, b],
[b, 0x0, 0x0, 0x0, b],
[b, 0x0, 0x0, 0x0, b],
[b, 0x0, b, 0x0, b],
[b, 0x0, b, 0x0, b],
[b, 0x0, b, 0x0, b],
[0x0, b, 0x0, b, 0x0]
]
end
def char_matrix(0x58, b) do
[
[b, 0x0, 0x0, 0x0, b],
[b, 0x0, 0x0, 0x0, b],
[0x0, b, 0x0, b, 0x0],
[0x0, 0x0, b, 0x0, 0x0],
[0x0, b, 0x0, b, 0x0],
[b, 0x0, 0x0, 0x0, b],
[b, 0x0, 0x0, 0x0, b]
]
end
def char_matrix(0x59, b) do
[
[b, 0x0, 0x0, 0x0, b],
[b, 0x0, 0x0, 0x0, b],
[b, 0x0, 0x0, 0x0, b],
[0x0, b, 0x0, b, 0x0],
[0x0, 0x0, b, 0x0, 0x0],
[0x0, 0x0, b, 0x0, 0x0],
[0x0, 0x0, b, 0x0, 0x0]
]
end
def char_matrix(0x5A, b) do
[
[b, b, b, b, b],
[0x0, 0x0, 0x0, 0x0, b],
[0x0, 0x0, 0x0, b, 0x0],
[0x0, 0x0, b, 0x0, 0x0],
[0x0, b, 0x0, 0x0, 0x0],
[b, 0x0, 0x0, 0x0, 0x0],
[b, b, b, b, b]
]
end
def char_matrix(0x5B, b) do
[[b, b], [b, 0x0], [b, 0x0], [b, 0x0], [b, 0x0], [b, 0x0], [b, b]]
end
def char_matrix(0x5C, b) do
[
[0x0, 0x0, 0x0, 0x0, 0x0],
[0x0, 0x0, 0x0, 0x0, 0x0],
[b, 0x0, 0x0, 0x0, 0x0],
[0x0, b, 0x0, 0x0, 0x0],
[0x0, 0x0, b, 0x0, 0x0],
[0x0, 0x0, 0x0, b, 0x0],
[0x0, 0x0, 0x0, 0x0, b]
]
end
def char_matrix(0x5D, b) do
[[b, b], [0x0, b], [0x0, b], [0x0, b], [0x0, b], [0x0, b], [b, b]]
end
def char_matrix(0x5E, b) do
[
[0x0, 0x0, 0x0, 0x0, 0x0],
[0x0, 0x0, 0x0, 0x0, 0x0],
[0x0, 0x0, 0x0, 0x0, 0x0],
[0x0, 0x0, 0x0, 0x0, 0x0],
[0x0, 0x0, b, 0x0, 0x0],
[0x0, b, 0x0, b, 0x0],
[b, 0x0, 0x0, 0x0, b]
]
end
def char_matrix(0x5F, b) do
[
[0x0, 0x0, 0x0, 0x0, 0x0],
[0x0, 0x0, 0x0, 0x0, 0x0],
[0x0, 0x0, 0x0, 0x0, 0x0],
[0x0, 0x0, 0x0, 0x0, 0x0],
[0x0, 0x0, 0x0, 0x0, 0x0],
[0x0, 0x0, 0x0, 0x0, 0x0],
[b, b, b, b, b]
]
end
def char_matrix(0x60, b) do
[[0x0, 0x0], [0x0, 0x0], [0x0, 0x0], [0x0, 0x0], [b, 0x0], [b, 0x0], [0x0, b]]
end
def char_matrix(0x61, b) do
[
[0x0, 0x0, 0x0, 0x0, 0x0],
[0x0, 0x0, 0x0, 0x0, 0x0],
[0x0, b, b, b, 0x0],
[0x0, 0x0, 0x0, 0x0, b],
[0x0, b, b, b, b],
[b, 0x0, 0x0, 0x0, b],
[0x0, b, b, b, b]
]
end
def char_matrix(0x62, b) do
[
[b, 0x0, 0x0, 0x0, 0x0],
[b, 0x0, 0x0, 0x0, 0x0],
[b, 0x0, b, b, 0x0],
[b, b, 0x0, 0x0, b],
[b, 0x0, 0x0, 0x0, b],
[b, 0x0, 0x0, 0x0, b],
[b, b, b, b, 0x0]
]
end
def char_matrix(0x63, b) do
[
[0x0, 0x0, 0x0, 0x0, 0x0],
[0x0, 0x0, 0x0, 0x0, 0x0],
[0x0, b, b, b, 0x0],
[b, 0x0, 0x0, 0x0, 0x0],
[b, 0x0, 0x0, 0x0, 0x0],
[b, 0x0, 0x0, 0x0, b],
[0x0, b, b, b, 0x0]
]
end
def char_matrix(0x64, b) do
[
[0x0, 0x0, 0x0, 0x0, b],
[0x0, 0x0, 0x0, 0x0, b],
[0x0, b, b, 0x0, b],
[b, 0x0, 0x0, b, b],
[b, 0x0, 0x0, 0x0, b],
[b, 0x0, 0x0, 0x0, b],
[0x0, b, b, b, b]
]
end
def char_matrix(0x65, b) do
[
[0x0, 0x0, 0x0, 0x0, 0x0],
[0x0, 0x0, 0x0, 0x0, 0x0],
[0x0, b, b, b, 0x0],
[b, 0x0, 0x0, 0x0, b],
[b, b, b, b, b],
[b, 0x0, 0x0, 0x0, 0x0],
[0x0, b, b, b, 0x0]
]
end
def char_matrix(0x66, b) do
[
[0x0, 0x0, b, b, 0x0],
[0x0, b, 0x0, 0x0, b],
[0x0, b, 0x0, 0x0, 0x0],
[b, b, b, 0x0, 0x0],
[0x0, b, 0x0, 0x0, 0x0],
[0x0, b, 0x0, 0x0, 0x0],
[0x0, b, 0x0, 0x0, 0x0]
]
end
def char_matrix(0x67, b) do
[
[0x0, 0x0, 0x0, 0x0, 0x0],
[0x0, b, b, b, b],
[b, 0x0, 0x0, 0x0, b],
[b, 0x0, 0x0, 0x0, b],
[0x0, b, b, b, b],
[0x0, 0x0, 0x0, 0x0, b],
[0x0, b, b, b, 0x0]
]
end
def char_matrix(0x68, b) do
[
[b, 0x0, 0x0, 0x0, 0x0],
[b, 0x0, 0x0, 0x0, 0x0],
[b, 0x0, b, b, 0x0],
[b, b, 0x0, 0x0, b],
[b, 0x0, 0x0, 0x0, b],
[b, 0x0, 0x0, 0x0, b],
[b, 0x0, 0x0, 0x0, b]
]
end
def char_matrix(0x69, b) do
[
[0x0, b, 0x0],
[0x0, 0x0, 0x0],
[b, b, 0x0],
[0x0, b, 0x0],
[0x0, b, 0x0],
[0x0, b, 0x0],
[b, b, b]
]
end
def char_matrix(0x6A, b) do
[
[0x0, 0x0, 0x0, b],
[0x0, 0x0, 0x0, 0x0],
[0x0, 0x0, b, b],
[0x0, 0x0, 0x0, b],
[0x0, 0x0, 0x0, b],
[b, 0x0, 0x0, b],
[0x0, b, b, 0x0]
]
end
def char_matrix(0x6B, b) do
[
[b, 0x0, 0x0, 0x0],
[b, 0x0, 0x0, 0x0],
[b, 0x0, 0x0, b],
[b, 0x0, b, 0x0],
[b, b, 0x0, 0x0],
[b, 0x0, b, 0x0],
[b, 0x0, 0x0, b]
]
end
def char_matrix(0x6C, b) do
[
[b, b, 0x0],
[0x0, b, 0x0],
[0x0, b, 0x0],
[0x0, b, 0x0],
[0x0, b, 0x0],
[0x0, b, 0x0],
[b, b, b]
]
end
def char_matrix(0x6D, b) do
[
[0x0, 0x0, 0x0, 0x0, 0x0],
[0x0, 0x0, 0x0, 0x0, 0x0],
[b, b, 0x0, b, 0x0],
[b, 0x0, b, 0x0, b],
[b, 0x0, b, 0x0, b],
[b, 0x0, 0x0, 0x0, b],
[b, 0x0, 0x0, 0x0, b]
]
end
def char_matrix(0x6E, b) do
[
[0x0, 0x0, 0x0, 0x0, 0x0],
[0x0, 0x0, 0x0, 0x0, 0x0],
[b, 0x0, b, b, 0x0],
[b, b, 0x0, 0x0, b],
[b, 0x0, 0x0, 0x0, b],
[b, 0x0, 0x0, 0x0, b],
[b, 0x0, 0x0, 0x0, b]
]
end
def char_matrix(0x6F, b) do
[
[0x0, 0x0, 0x0, 0x0, 0x0],
[0x0, 0x0, 0x0, 0x0, 0x0],
[0x0, b, b, b, 0x0],
[b, 0x0, 0x0, 0x0, b],
[b, 0x0, 0x0, 0x0, b],
[b, 0x0, 0x0, 0x0, b],
[0x0, b, b, b, 0x0]
]
end
def char_matrix(0x70, b) do
[
[0x0, 0x0, 0x0, 0x0, 0x0],
[0x0, 0x0, 0x0, 0x0, 0x0],
[b, b, b, b, 0x0],
[b, 0x0, 0x0, 0x0, b],
[b, b, b, b, 0x0],
[b, 0x0, 0x0, 0x0, 0x0],
[b, 0x0, 0x0, 0x0, 0x0]
]
end
def char_matrix(0x71, b) do
[
[0x0, 0x0, 0x0, 0x0, 0x0],
[0x0, 0x0, 0x0, 0x0, 0x0],
[0x0, b, b, 0x0, b],
[b, 0x0, 0x0, b, b],
[0x0, b, b, b, b],
[0x0, 0x0, 0x0, 0x0, b],
[0x0, 0x0, 0x0, 0x0, b]
]
end
def char_matrix(0x72, b) do
[
[0x0, 0x0, 0x0, 0x0, 0x0],
[0x0, 0x0, 0x0, 0x0, 0x0],
[b, 0x0, b, b, 0x0],
[b, b, 0x0, 0x0, b],
[b, 0x0, 0x0, 0x0, 0x0],
[b, 0x0, 0x0, 0x0, 0x0],
[b, 0x0, 0x0, 0x0, 0x0]
]
end
def char_matrix(0x73, b) do
[
[0x0, 0x0, 0x0, 0x0, 0x0],
[0x0, 0x0, 0x0, 0x0, 0x0],
[0x0, b, b, b, 0x0],
[b, 0x0, 0x0, 0x0, 0x0],
[0x0, b, b, b, 0x0],
[0x0, 0x0, 0x0, 0x0, b],
[b, b, b, b, 0x0]
]
end
def char_matrix(0x74, b) do
[
[0x0, b, 0x0, 0x0, 0x0],
[0x0, b, 0x0, 0x0, 0x0],
[b, b, b, 0x0, 0x0],
[0x0, b, 0x0, 0x0, 0x0],
[0x0, b, 0x0, 0x0, 0x0],
[0x0, b, 0x0, 0x0, b],
[0x0, 0x0, b, b, 0x0]
]
end
def char_matrix(0x75, b) do
[
[0x0, 0x0, 0x0, 0x0, 0x0],
[0x0, 0x0, 0x0, 0x0, 0x0],
[b, 0x0, 0x0, 0x0, b],
[b, 0x0, 0x0, 0x0, b],
[b, 0x0, 0x0, 0x0, b],
[b, 0x0, 0x0, b, b],
[0x0, b, b, 0x0, b]
]
end
def char_matrix(0x76, b) do
[
[0x0, 0x0, 0x0, 0x0, 0x0],
[0x0, 0x0, 0x0, 0x0, 0x0],
[b, 0x0, 0x0, 0x0, b],
[b, 0x0, 0x0, 0x0, b],
[b, 0x0, 0x0, 0x0, b],
[0x0, b, 0x0, b, 0x0],
[0x0, 0x0, b, 0x0, 0x0]
]
end
def char_matrix(0x77, b) do
[
[0x0, 0x0, 0x0, 0x0, 0x0],
[0x0, 0x0, 0x0, 0x0, 0x0],
[b, 0x0, 0x0, 0x0, b],
[b, 0x0, 0x0, 0x0, b],
[b, 0x0, b, 0x0, b],
[b, 0x0, b, 0x0, b],
[0x0, b, 0x0, b, 0x0]
]
end
def char_matrix(0x78, b) do
[
[0x0, 0x0, 0x0, 0x0, 0x0],
[0x0, 0x0, 0x0, 0x0, 0x0],
[b, 0x0, 0x0, 0x0, b],
[0x0, b, 0x0, b, 0x0],
[0x0, 0x0, b, 0x0, 0x0],
[0x0, b, 0x0, b, 0x0],
[b, 0x0, 0x0, 0x0, b]
]
end
def char_matrix(0x79, b) do
[
[0x0, 0x0, 0x0, 0x0, 0x0],
[0x0, 0x0, 0x0, 0x0, 0x0],
[b, 0x0, 0x0, 0x0, b],
[b, 0x0, 0x0, 0x0, b],
[0x0, b, b, b, b],
[0x0, 0x0, 0x0, 0x0, b],
[0x0, b, b, b, 0x0]
]
end
def char_matrix(0x7A, b) do
[
[0x0, 0x0, 0x0, 0x0, 0x0],
[0x0, 0x0, 0x0, 0x0, 0x0],
[b, b, b, b, b],
[0x0, 0x0, 0x0, b, 0x0],
[0x0, 0x0, b, 0x0, 0x0],
[0x0, b, 0x0, 0x0, 0x0],
[b, b, b, b, b]
]
end
def char_matrix(0x7B, b) do
[
[0x0, 0x0, b, b],
[0x0, b, 0x0, 0x0],
[0x0, b, 0x0, 0x0],
[b, 0x0, 0x0, 0x0],
[0x0, b, 0x0, 0x0],
[0x0, b, 0x0, 0x0],
[0x0, 0x0, b, b]
]
end
def char_matrix(0x7C, b) do
[[b], [b], [b], [b], [b], [b], [b]]
end
def char_matrix(0x7D, b) do
[
[b, b, 0x0, 0x0],
[0x0, 0x0, b, 0x0],
[0x0, 0x0, b, 0x0],
[0x0, 0x0, 0x0, b],
[0x0, 0x0, b, 0x0],
[0x0, 0x0, b, 0x0],
[b, b, 0x0, 0x0]
]
end
def char_matrix(0x7E, b) do
[
[0x0, 0x0, 0x0, 0x0, 0x0],
[0x0, 0x0, 0x0, 0x0, 0x0],
[0x0, 0x0, 0x0, 0x0, 0x0],
[0x0, 0x0, 0x0, 0x0, 0x0],
[0x0, b, 0x0, 0x0, 0x0],
[b, 0x0, b, 0x0, b],
[0x0, 0x0, 0x0, b, 0x0]
]
end
def char_matrix(0x7F, b) do
[
[b, b, b, b, b],
[b, b, b, b, b],
[b, 0x0, 0x0, 0x0, b],
[b, b, b, b, b],
[b, 0x0, 0x0, 0x0, b],
[b, b, b, b, b],
[b, b, b, b, b]
]
end
def char_matrix(0x80, b) do
[
[0x0, 0x0, 0x0, 0x0],
[0x0, 0x0, 0x0, 0x0],
[0x0, b, b, b],
[b, 0x0, 0x0, 0x0],
[b, b, b, b],
[b, 0x0, 0x0, 0x0],
[0x0, b, b, b]
]
end
def char_matrix(0x81, b) do
[
[0x0, 0x0, 0x0],
[0x0, 0x0, 0x0],
[b, 0x0, 0x0],
[b, b, 0x0],
[b, b, b],
[b, b, 0x0],
[b, 0x0, 0x0]
]
end
def char_matrix(0x82, b) do
[
[0x0, 0x0, 0x0, 0x0, 0x0],
[0x0, b, 0x0, 0x0, 0x0],
[0x0, 0x0, b, 0x0, 0x0],
[0x0, 0x0, 0x0, b, 0x0],
[0x0, 0x0, b, b, 0x0],
[0x0, b, 0x0, 0x0, b],
[b, 0x0, 0x0, 0x0, b]
]
end
def char_matrix(0x83, b) do
[
[0x0, 0x0, 0x0, 0x0, 0x0],
[0x0, 0x0, 0x0, 0x0, 0x0],
[b, 0x0, 0x0, b, 0x0],
[b, 0x0, 0x0, b, 0x0],
[b, 0x0, 0x0, b, 0x0],
[b, b, b, 0x0, b],
[b, 0x0, 0x0, 0x0, 0x0]
]
end
def char_matrix(0x84, b) do
[
[0x0, 0x0, 0x0, 0x0, 0x0],
[0x0, 0x0, 0x0, 0x0, 0x0],
[b, b, b, b, b],
[0x0, b, 0x0, b, 0x0],
[0x0, b, 0x0, b, 0x0],
[0x0, b, 0x0, b, 0x0],
[b, 0x0, 0x0, b, b]
]
end
def char_matrix(0x85, b) do
[
[0x0, 0x0, 0x0, 0x0, 0x0],
[0x0, 0x0, b, b, 0x0],
[0x0, b, 0x0, 0x0, b],
[0x0, b, 0x0, 0x0, b],
[0x0, b, b, b, 0x0],
[0x0, b, 0x0, 0x0, 0x0],
[b, 0x0, 0x0, 0x0, 0x0]
]
end
def char_matrix(0x86, b) do
[
[b, b, b, b, b],
[0x0, b, 0x0, 0x0, 0x0],
[0x0, 0x0, b, 0x0, 0x0],
[0x0, 0x0, 0x0, b, 0x0],
[0x0, 0x0, b, 0x0, 0x0],
[0x0, b, 0x0, 0x0, 0x0],
[b, b, b, b, b]
]
end
def char_matrix(0x87, b) do
[
[0x0, 0x0, 0x0, 0x0, 0x0],
[0x0, 0x0, 0x0, 0x0, 0x0],
[0x0, b, b, b, b],
[b, 0x0, 0x0, b, 0x0],
[b, 0x0, 0x0, b, 0x0],
[b, 0x0, 0x0, b, 0x0],
[0x0, b, b, 0x0, 0x0]
]
end
def char_matrix(0x88, b) do
[
[0x0, 0x0, 0x0, 0x0, 0x0],
[0x0, 0x0, 0x0, 0x0, 0x0],
[0x0, b, b, b, b],
[b, 0x0, b, 0x0, 0x0],
[0x0, 0x0, b, 0x0, 0x0],
[0x0, 0x0, b, 0x0, b],
[0x0, 0x0, 0x0, b, 0x0]
]
end
def char_matrix(0x89, b) do
[
[0x0, 0x0, b, 0x0, 0x0],
[0x0, 0x0, b, 0x0, 0x0],
[0x0, b, b, b, 0x0],
[b, 0x0, b, 0x0, b],
[0x0, b, b, b, 0x0],
[0x0, 0x0, b, 0x0, 0x0],
[0x0, 0x0, b, 0x0, 0x0]
]
end
def char_matrix(0x8A, b) do
[
[0x0, b, b, b, 0x0],
[b, 0x0, 0x0, 0x0, b],
[b, 0x0, 0x0, 0x0, b],
[b, 0x0, 0x0, 0x0, b],
[0x0, b, 0x0, b, 0x0],
[0x0, b, 0x0, b, 0x0],
[b, b, 0x0, b, b]
]
end
def char_matrix(0x8B, b) do
[
[b, b, b, b, b],
[0x0, 0x0, 0x0, 0x0, 0x0],
[b, 0x0, 0x0, 0x0, b],
[0x0, b, 0x0, b, 0x0],
[0x0, 0x0, b, 0x0, 0x0],
[0x0, b, 0x0, b, 0x0],
[b, 0x0, 0x0, 0x0, b]
]
end
def char_matrix(0x8C, b) do
[
[b, b, b, b, b],
[0x0, 0x0, 0x0, 0x0, 0x0],
[b, 0x0, 0x0, 0x0, b],
[b, 0x0, 0x0, 0x0, b],
[0x0, b, b, b, b],
[0x0, 0x0, 0x0, 0x0, b],
[0x0, b, b, b, 0x0]
]
end
def char_matrix(0x8D, b) do
[
[0x0, 0x0, 0x0],
[0x0, 0x0, 0x0],
[0x0, 0x0, 0x0],
[0x0, 0x0, 0x0],
[b, 0x0, b],
[0x0, b, 0x0],
[b, 0x0, b]
]
end
def char_matrix(0x8E, b) do
[
[0x0, 0x0, 0x0, 0x0, 0x0],
[0x0, 0x0, 0x0, 0x0, 0x0],
[0x0, 0x0, 0x0, 0x0, 0x0],
[0x0, 0x0, 0x0, 0x0, 0x0],
[0x0, 0x0, 0x0, 0x0, 0x0],
[0x0, 0x0, 0x0, 0x0, 0x0],
[b, 0x0, b, 0x0, b]
]
end
def char_matrix(0x8F, b) do
[
[0x0, 0x0, 0x0],
[0x0, 0x0, 0x0],
[0x0, 0x0, b],
[0x0, b, b],
[b, b, b],
[0x0, b, b],
[0x0, 0x0, b]
]
end
def char_matrix(0x90, b) do
[
[0x0, 0x0, 0x0],
[0x0, 0x0, 0x0],
[0x0, 0x0, 0x0],
[0x0, 0x0, 0x0],
[b, b, b],
[b, b, b],
[b, b, b]
]
end
def char_matrix(0x91, b) do
[
[0x0, 0x0, b],
[0x0, 0x0, b],
[0x0, 0x0, b],
[0x0, b, 0x0],
[b, 0x0, 0x0],
[b, 0x0, 0x0],
[b, 0x0, 0x0]
]
end
def char_matrix(0x92, b) do
[
[0x0, 0x0, 0x0],
[0x0, 0x0, 0x0],
[0x0, 0x0, 0x0],
[0x0, 0x0, 0x0],
[0x0, 0x0, 0x0],
[0x0, 0x0, 0x0],
[b, b, b]
]
end
def char_matrix(0x93, b) do
[
[0x0, 0x0, 0x0],
[0x0, 0x0, 0x0],
[b, b, b],
[0x0, 0x0, b],
[b, b, b],
[b, 0x0, 0x0],
[b, b, b]
]
end
def char_matrix(0x94, b) do
[
[0x0, 0x0, 0x0],
[0x0, 0x0, 0x0],
[0x0, 0x0, 0x0],
[0x0, 0x0, 0x0],
[b, b, b],
[b, 0x0, b],
[b, b, b]
]
end
def char_matrix(0x95, b) do
[
[0x0, 0x0, 0x0],
[0x0, 0x0, 0x0],
[b, b, 0x0],
[0x0, 0x0, b],
[0x0, b, 0x0],
[0x0, 0x0, b],
[b, b, 0x0]
]
end
def char_matrix(0x96, b) do
[
[b, b, b, b, b],
[b, b, b, b, b],
[b, b, b, b, b],
[b, b, b, b, b],
[b, b, b, b, b],
[b, b, b, b, b],
[b, b, b, b, b]
]
end
def char_matrix(0x97, b) do
[
[0x0, b, 0x0, 0x0],
[0x0, 0x0, 0x0, 0x0],
[b, b, 0x0, 0x0],
[0x0, b, 0x0, 0x0],
[0x0, b, 0x0, 0x0],
[0x0, b, 0x0, b],
[0x0, 0x0, b, 0x0]
]
end
def char_matrix(0x98, b) do
[
[0x0, 0x0, b, 0x0, 0x0],
[0x0, b, 0x0, b, 0x0],
[b, b, b, b, 0x0],
[b, 0x0, 0x0, 0x0, b],
[b, b, b, b, 0x0],
[b, 0x0, 0x0, 0x0, 0x0],
[b, 0x0, 0x0, 0x0, 0x0]
]
end
def char_matrix(0x99, b) do
[
[b, 0x0, 0x0, 0x0, 0x0],
[0x0, b, 0x0, 0x0, b],
[0x0, b, 0x0, b, 0x0],
[0x0, 0x0, b, 0x0, 0x0],
[0x0, b, 0x0, b, 0x0],
[b, 0x0, 0x0, b, 0x0],
[0x0, 0x0, 0x0, 0x0, b]
]
end
def char_matrix(0x9A, b) do
[
[b, b, b, b, b],
[b, 0x0, 0x0, 0x0, b],
[b, 0x0, b, 0x0, 0x0],
[b, b, b, 0x0, 0x0],
[b, 0x0, b, 0x0, 0x0],
[b, 0x0, 0x0, 0x0, 0x0],
[b, 0x0, 0x0, 0x0, 0x0]
]
end
def char_matrix(0x9B, b) do
[
[0x0, 0x0, 0x0, 0x0, 0x0],
[0x0, 0x0, 0x0, 0x0, 0x0],
[0x0, b, b, b, 0x0],
[b, 0x0, 0x0, 0x0, b],
[b, b, b, b, 0x0],
[b, 0x0, 0x0, 0x0, 0x0],
[0x0, b, b, 0x0, 0x0]
]
end
def char_matrix(0x9C, b) do
[
[0x0, 0x0, 0x0],
[0x0, 0x0, 0x0],
[b, 0x0, 0x0],
[b, 0x0, 0x0],
[b, 0x0, 0x0],
[b, 0x0, 0x0],
[b, b, b]
]
end
def char_matrix(0x9D, b) do
[
[b, 0x0, 0x0, 0x0, b],
[b, b, 0x0, 0x0, b],
[b, b, b, 0x0, b],
[b, b, b, b, b],
[b, 0x0, b, b, b],
[b, 0x0, 0x0, b, b],
[b, 0x0, 0x0, 0x0, b]
]
end
def char_matrix(0x9E, b) do
[
[b, 0x0, b, 0x0, 0x0],
[0x0, b, 0x0, b, 0x0],
[0x0, 0x0, b, 0x0, b],
[0x0, 0x0, b, 0x0, b],
[0x0, 0x0, b, 0x0, b],
[0x0, b, 0x0, b, 0x0],
[b, 0x0, b, 0x0, 0x0]
]
end
def char_matrix(0x9F, b) do
[
[0x0, 0x0, 0x0, 0x0],
[0x0, 0x0, 0x0, 0x0],
[0x0, b, 0x0, 0x0],
[b, b, b, 0x0],
[b, b, b, b],
[b, b, b, 0x0],
[0x0, b, 0x0, 0x0]
]
end
def char_matrix(0xA0, _b) do
[
[0x0, 0x0, 0x0, 0x0, 0x0],
[0x0, 0x0, 0x0, 0x0, 0x0],
[0x0, 0x0, 0x0, 0x0, 0x0],
[0x0, 0x0, 0x0, 0x0, 0x0],
[0x0, 0x0, 0x0, 0x0, 0x0],
[0x0, 0x0, 0x0, 0x0, 0x0],
[0x0, 0x0, 0x0, 0x0, 0x0]
]
end
def char_matrix(0xA1, b) do
[[b], [b], [0x0], [b], [b], [b], [b]]
end
def char_matrix(0xA2, b) do
[
[0x0, 0x0, 0x0, 0x0, b],
[0x0, 0x0, 0x0, b, 0x0],
[0x0, b, b, b, 0x0],
[b, 0x0, 0x0, b, 0x0],
[b, 0x0, b, 0x0, 0x0],
[b, 0x0, b, 0x0, b],
[0x0, b, b, b, 0x0]
]
end
def char_matrix(0xA3, b) do
[
[0x0, b, b, 0x0],
[b, 0x0, 0x0, b],
[b, 0x0, 0x0, 0x0],
[b, b, b, 0x0],
[b, 0x0, 0x0, 0x0],
[b, 0x0, 0x0, 0x0],
[b, b, b, b]
]
end
def char_matrix(0xA4, b) do
[
[b, 0x0, 0x0, 0x0, b],
[0x0, b, b, b, 0x0],
[0x0, b, 0x0, b, 0x0],
[0x0, b, 0x0, b, 0x0],
[0x0, b, 0x0, b, 0x0],
[0x0, b, b, b, 0x0],
[b, 0x0, 0x0, 0x0, b]
]
end
def char_matrix(0xA5, b) do
[
[b, 0x0, 0x0, 0x0, b],
[0x0, b, 0x0, b, 0x0],
[0x0, 0x0, b, 0x0, 0x0],
[0x0, b, b, b, 0x0],
[0x0, 0x0, b, 0x0, 0x0],
[0x0, b, b, b, 0x0],
[0x0, 0x0, b, 0x0, 0x0]
]
end
def char_matrix(0xA6, b) do
[[b], [b], [b], [0x0], [b], [b], [b]]
end
def char_matrix(0xA7, b) do
[
[0x0, 0x0, b, b, b],
[0x0, b, 0x0, 0x0, 0x0],
[0x0, 0x0, b, b, 0x0],
[0x0, b, 0x0, b, 0x0],
[0x0, b, b, 0x0, 0x0],
[0x0, 0x0, 0x0, b, 0x0],
[b, b, b, 0x0, 0x0]
]
end
def char_matrix(0xA8, b) do
[
[0x0, 0x0, 0x0],
[0x0, 0x0, 0x0],
[0x0, 0x0, 0x0],
[0x0, 0x0, 0x0],
[0x0, 0x0, 0x0],
[0x0, 0x0, 0x0],
[b, 0x0, b]
]
end
def char_matrix(0xA9, b) do
[
[0x0, b, b, b, 0x0],
[b, 0x0, 0x0, 0x0, b],
[b, 0x0, b, b, b],
[b, 0x0, b, b, b],
[b, 0x0, b, b, b],
[b, 0x0, 0x0, 0x0, b],
[0x0, b, b, b, 0x0]
]
end
def char_matrix(0xAA, b) do
[
[b, b, b],
[0x0, 0x0, b],
[b, b, b],
[b, 0x0, b],
[b, b, b],
[0x0, 0x0, 0x0],
[b, b, b]
]
end
def char_matrix(0xAB, b) do
[
[0x0, 0x0, 0x0, 0x0, 0x0],
[0x0, 0x0, 0x0, 0x0, 0x0],
[0x0, 0x0, 0x0, 0x0, 0x0],
[0x0, 0x0, 0x0, 0x0, 0x0],
[0x0, b, 0x0, 0x0, b],
[b, 0x0, 0x0, b, 0x0],
[0x0, b, 0x0, 0x0, b]
]
end
def char_matrix(0xAC, b) do
[
[0x0, 0x0, 0x0, 0x0, 0x0],
[0x0, 0x0, 0x0, 0x0, 0x0],
[0x0, 0x0, 0x0, 0x0, 0x0],
[0x0, 0x0, 0x0, 0x0, 0x0],
[b, b, b, b, b],
[0x0, 0x0, 0x0, 0x0, b],
[0x0, 0x0, 0x0, 0x0, b]
]
end
def char_matrix(0xAD, b) do
[
[0x0, b, b, b, 0x0],
[b, 0x0, 0x0, 0x0, b],
[b, 0x0, b, 0x0, b],
[b, 0x0, 0x0, b, b],
[b, 0x0, 0x0, 0x0, b],
[b, 0x0, b, 0x0, b],
[0x0, b, b, b, 0x0]
]
end
def char_matrix(0xAE, b) do
[
[0x0, 0x0, 0x0, 0x0, 0x0],
[0x0, 0x0, 0x0, 0x0, 0x0],
[0x0, 0x0, 0x0, 0x0, 0x0],
[0x0, 0x0, 0x0, 0x0, 0x0],
[0x0, 0x0, 0x0, 0x0, 0x0],
[0x0, 0x0, 0x0, 0x0, 0x0],
[b, b, b, b, b]
]
end
def char_matrix(0xAF, _b) do
[
[0x0, 0x0, 0x0, 0x0, 0x0],
[0x0, 0x0, 0x0, 0x0, 0x0],
[0x0, 0x0, 0x0, 0x0, 0x0],
[0x0, 0x0, 0x0, 0x0, 0x0],
[0x0, 0x0, 0x0, 0x0, 0x0],
[0x0, 0x0, 0x0, 0x0, 0x0],
[0x0, 0x0, 0x0, 0x0, 0x0]
]
end
def char_matrix(0xB0, b) do
[
[0x0, 0x0, 0x0, 0x0],
[0x0, 0x0, 0x0, 0x0],
[0x0, 0x0, 0x0, 0x0],
[0x0, b, b, 0x0],
[b, 0x0, 0x0, b],
[b, 0x0, 0x0, b],
[0x0, b, b, 0x0]
]
end
def char_matrix(0xB1, b) do
[
[0x0, 0x0, 0x0],
[0x0, 0x0, 0x0],
[0x0, b, 0x0],
[b, b, b],
[0x0, b, 0x0],
[0x0, 0x0, 0x0],
[b, b, b]
]
end
def char_matrix(0xB2, b) do
[
[0x0, 0x0, 0x0, 0x0],
[0x0, 0x0, 0x0, 0x0],
[0x0, b, b, 0x0],
[b, 0x0, 0x0, b],
[0x0, 0x0, b, 0x0],
[0x0, b, 0x0, 0x0],
[b, b, b, b]
]
end
def char_matrix(0xB3, b) do
[
[0x0, 0x0, 0x0, 0x0],
[0x0, 0x0, 0x0, 0x0],
[b, b, b, 0x0],
[0x0, 0x0, 0x0, b],
[0x0, b, b, 0x0],
[0x0, 0x0, 0x0, b],
[b, b, b, 0x0]
]
end
def char_matrix(0xB4, b) do
[[0x0, 0x0], [0x0, 0x0], [0x0, 0x0], [0x0, 0x0], [0x0, 0x0], [0x0, b], [b, 0x0]]
end
def char_matrix(0xB5, b) do
[
[0x0, 0x0, 0x0, 0x0],
[0x0, 0x0, 0x0, 0x0],
[b, 0x0, b, 0x0],
[b, 0x0, b, 0x0],
[b, b, b, 0x0],
[b, 0x0, 0x0, b],
[b, 0x0, 0x0, 0x0]
]
end
def char_matrix(0xB6, b) do
[
[0x0, b, b, b, b],
[b, b, 0x0, 0x0, b],
[b, b, 0x0, 0x0, b],
[0x0, b, 0x0, 0x0, b],
[0x0, 0x0, 0x0, 0x0, b],
[0x0, b, 0x0, 0x0, b],
[0x0, 0x0, b, b, 0x0]
]
end
def char_matrix(0xB7, b) do
[[0x0, 0x0], [0x0, 0x0], [0x0, 0x0], [0x0, 0x0], [0x0, 0x0], [b, b], [b, b]]
end
def char_matrix(0xB8, b) do
[[0x0], [0x0], [0x0], [0x0], [0x0], [b], [b]]
end
def char_matrix(0xB9, b) do
[
[0x0, 0x0, 0x0],
[0x0, 0x0, 0x0],
[0x0, b, 0x0],
[b, b, 0x0],
[0x0, b, 0x0],
[0x0, b, 0x0],
[b, b, b]
]
end
def char_matrix(0xBA, b) do
[
[0x0, 0x0, 0x0],
[0x0, 0x0, 0x0],
[b, b, b],
[b, 0x0, b],
[b, b, b],
[0x0, 0x0, 0x0],
[b, b, b]
]
end
def char_matrix(0xBB, b) do
[
[0x0, 0x0, 0x0, 0x0, 0x0],
[0x0, 0x0, 0x0, 0x0, 0x0],
[0x0, 0x0, 0x0, 0x0, 0x0],
[0x0, 0x0, 0x0, 0x0, 0x0],
[b, 0x0, 0x0, b, 0x0],
[0x0, b, 0x0, 0x0, b],
[b, 0x0, 0x0, b, 0x0]
]
end
def char_matrix(0xBC, b) do
[
[b, b, b, b, b],
[b, b, b, 0x0, b],
[b, b, b, 0x0, b],
[b, b, b, b, b],
[b, 0x0, b, 0x0, b],
[b, 0x0, b, 0x0, b],
[b, b, b, b, b]
]
end
def char_matrix(0xBD, b) do
[
[b, b, b, b, b],
[b, b, b, 0x0, b],
[b, b, b, 0x0, b],
[b, b, b, b, b],
[b, b, b, 0x0, b],
[b, b, b, 0x0, b],
[b, b, b, b, b]
]
end
def char_matrix(0xBE, b) do
[
[b, b, b, b, b],
[b, b, b, b, b],
[b, b, b, b, b],
[b, b, b, b, b],
[b, b, b, 0x0, b],
[b, b, b, 0x0, b],
[b, b, b, b, b]
]
end
def char_matrix(0xBF, b) do
[[0x0, 0x0], [0x0, b], [0x0, 0x0], [0x0, b], [b, 0x0], [b, 0x0], [0x0, b]]
end
def char_matrix(0xC0, b) do
[
[0x0, 0x0, b, 0x0, 0x0],
[0x0, 0x0, 0x0, b, 0x0],
[0x0, b, b, b, 0x0],
[b, 0x0, 0x0, 0x0, b],
[b, b, b, b, b],
[b, 0x0, 0x0, 0x0, b],
[b, 0x0, 0x0, 0x0, b]
]
end
def char_matrix(0xC1, b) do
[
[0x0, 0x0, b, 0x0, 0x0],
[0x0, b, 0x0, 0x0, 0x0],
[0x0, b, b, b, 0x0],
[b, 0x0, 0x0, 0x0, b],
[b, b, b, b, b],
[b, 0x0, 0x0, 0x0, b],
[b, 0x0, 0x0, 0x0, b]
]
end
def char_matrix(0xC2, b) do
[
[0x0, 0x0, b, 0x0, 0x0],
[0x0, b, 0x0, b, 0x0],
[0x0, 0x0, 0x0, 0x0, 0x0],
[0x0, b, b, b, 0x0],
[b, 0x0, 0x0, 0x0, b],
[b, b, b, b, b],
[b, 0x0, 0x0, 0x0, b]
]
end
def char_matrix(0xC3, b) do
[
[0x0, b, b, 0x0, b],
[b, 0x0, b, b, 0x0],
[0x0, 0x0, 0x0, 0x0, 0x0],
[0x0, b, b, b, 0x0],
[b, 0x0, 0x0, 0x0, b],
[b, b, b, b, b],
[b, 0x0, 0x0, 0x0, b]
]
end
def char_matrix(0xC4, b) do
[
[0x0, 0x0, 0x0, 0x0, 0x0],
[0x0, b, 0x0, b, 0x0],
[0x0, 0x0, 0x0, 0x0, 0x0],
[0x0, b, b, b, 0x0],
[b, 0x0, 0x0, 0x0, b],
[b, b, b, b, b],
[b, 0x0, 0x0, 0x0, b]
]
end
def char_matrix(0xC5, b) do
[
[0x0, 0x0, 0x0, 0x0, 0x0],
[0x0, 0x0, b, 0x0, 0x0],
[0x0, 0x0, 0x0, 0x0, 0x0],
[0x0, b, b, b, 0x0],
[b, 0x0, 0x0, 0x0, b],
[b, b, b, b, b],
[b, 0x0, 0x0, 0x0, b]
]
end
def char_matrix(0xC6, b) do
[
[0x0, 0x0, 0x0, 0x0, 0x0],
[0x0, 0x0, 0x0, 0x0, 0x0],
[0x0, 0x0, b, b, b],
[0x0, b, b, 0x0, 0x0],
[b, 0x0, b, b, 0x0],
[b, 0x0, b, 0x0, 0x0],
[b, 0x0, b, b, b]
]
end
def char_matrix(0xC7, b) do
[
[0x0, 0x0, 0x0, 0x0],
[0x0, b, b, b],
[b, 0x0, 0x0, 0x0],
[b, 0x0, 0x0, 0x0],
[0x0, b, b, b],
[0x0, 0x0, b, 0x0],
[0x0, b, 0x0, 0x0]
]
end
def char_matrix(0xC8, b) do
[
[0x0, 0x0, b, 0x0, 0x0],
[0x0, 0x0, 0x0, b, 0x0],
[b, b, b, b, b],
[b, 0x0, 0x0, 0x0, 0x0],
[b, b, b, 0x0, 0x0],
[b, 0x0, 0x0, 0x0, 0x0],
[b, b, b, b, b]
]
end
def char_matrix(0xC9, b) do
[
[0x0, 0x0, b, 0x0, 0x0],
[0x0, b, 0x0, 0x0, 0x0],
[b, b, b, b, b],
[b, 0x0, 0x0, 0x0, 0x0],
[b, b, b, 0x0, 0x0],
[b, 0x0, 0x0, 0x0, 0x0],
[b, b, b, b, b]
]
end
def char_matrix(0xCA, b) do
[
[0x0, 0x0, b, 0x0, 0x0],
[0x0, b, 0x0, b, 0x0],
[b, b, b, b, b],
[b, 0x0, 0x0, 0x0, 0x0],
[b, b, b, b, 0x0],
[b, 0x0, 0x0, 0x0, 0x0],
[b, b, b, b, b]
]
end
def char_matrix(0xCB, b) do
[
[0x0, b, 0x0, b, 0x0],
[0x0, 0x0, 0x0, 0x0, 0x0],
[b, b, b, b, b],
[b, 0x0, 0x0, 0x0, 0x0],
[b, b, b, b, 0x0],
[b, 0x0, 0x0, 0x0, 0x0],
[b, b, b, b, b]
]
end
def char_matrix(0xCC, b) do
[
[0x0, b, 0x0],
[0x0, 0x0, b],
[b, b, b],
[0x0, b, 0x0],
[0x0, b, 0x0],
[0x0, b, 0x0],
[b, b, b]
]
end
def char_matrix(0xCD, b) do
[
[0x0, b, 0x0],
[b, 0x0, 0x0],
[b, b, b],
[0x0, b, 0x0],
[0x0, b, 0x0],
[0x0, b, 0x0],
[b, b, b]
]
end
def char_matrix(0xCE, b) do
[
[0x0, b, 0x0],
[b, 0x0, b],
[0x0, 0x0, 0x0],
[b, b, b],
[0x0, b, 0x0],
[0x0, b, 0x0],
[b, b, b]
]
end
def char_matrix(0xCF, b) do
[
[0x0, 0x0, 0x0],
[b, 0x0, b],
[0x0, 0x0, 0x0],
[b, b, b],
[0x0, b, 0x0],
[0x0, b, 0x0],
[b, b, b]
]
end
def char_matrix(0xD0, b) do
[
[0x0, 0x0, 0x0, 0x0, 0x0],
[0x0, 0x0, 0x0, 0x0, 0x0],
[0x0, b, b, b, 0x0],
[0x0, b, 0x0, 0x0, b],
[b, b, b, 0x0, b],
[0x0, b, 0x0, 0x0, b],
[0x0, b, b, b, 0x0]
]
end
def char_matrix(0xD1, b) do
[
[0x0, b, b, 0x0, b],
[b, 0x0, b, b, 0x0],
[0x0, 0x0, 0x0, 0x0, 0x0],
[b, 0x0, 0x0, 0x0, b],
[b, b, 0x0, 0x0, b],
[b, 0x0, b, 0x0, b],
[b, 0x0, 0x0, b, b]
]
end
def char_matrix(0xD2, b) do
[
[0x0, 0x0, b, 0x0, 0x0],
[0x0, 0x0, 0x0, b, 0x0],
[0x0, b, b, b, 0x0],
[b, 0x0, 0x0, 0x0, b],
[b, 0x0, 0x0, 0x0, b],
[b, 0x0, 0x0, 0x0, b],
[0x0, b, b, b, 0x0]
]
end
def char_matrix(0xD3, b) do
[
[0x0, 0x0, b, 0x0, 0x0],
[0x0, b, 0x0, 0x0, 0x0],
[0x0, b, b, b, 0x0],
[b, 0x0, 0x0, 0x0, b],
[b, 0x0, 0x0, 0x0, b],
[b, 0x0, 0x0, 0x0, b],
[0x0, b, b, b, 0x0]
]
end
def char_matrix(0xD4, b) do
[
[0x0, 0x0, b, 0x0, 0x0],
[0x0, b, 0x0, b, 0x0],
[0x0, 0x0, 0x0, 0x0, 0x0],
[0x0, b, b, b, 0x0],
[b, 0x0, 0x0, 0x0, b],
[b, 0x0, 0x0, 0x0, b],
[0x0, b, b, b, 0x0]
]
end
def char_matrix(0xD5, b) do
[
[0x0, b, b, 0x0, b],
[b, 0x0, b, b, 0x0],
[0x0, 0x0, 0x0, 0x0, 0x0],
[0x0, b, b, b, 0x0],
[b, 0x0, 0x0, 0x0, b],
[b, 0x0, 0x0, 0x0, b],
[0x0, b, b, b, 0x0]
]
end
def char_matrix(0xD6, b) do
[
[0x0, b, 0x0, b, 0x0],
[0x0, 0x0, 0x0, 0x0, 0x0],
[0x0, b, b, b, 0x0],
[b, 0x0, 0x0, 0x0, b],
[b, 0x0, 0x0, 0x0, b],
[b, 0x0, 0x0, 0x0, b],
[0x0, b, b, b, 0x0]
]
end
def char_matrix(0xD7, b) do
[
[0x0, 0x0, 0x0],
[0x0, 0x0, 0x0],
[0x0, 0x0, 0x0],
[0x0, 0x0, 0x0],
[b, 0x0, b],
[0x0, b, 0x0],
[b, 0x0, b]
]
end
def char_matrix(0xD8, b) do
[
[0x0, 0x0, 0x0, 0x0, b],
[0x0, 0x0, b, b, 0x0],
[0x0, b, 0x0, b, 0x0],
[0x0, b, b, b, 0x0],
[0x0, b, 0x0, b, 0x0],
[0x0, b, b, 0x0, 0x0],
[b, 0x0, 0x0, 0x0, 0x0]
]
end
def char_matrix(0xD9, b) do
[
[0x0, 0x0, b, 0x0, 0x0],
[0x0, 0x0, 0x0, b, 0x0],
[b, 0x0, 0x0, 0x0, b],
[b, 0x0, 0x0, 0x0, b],
[b, 0x0, 0x0, 0x0, b],
[b, 0x0, 0x0, 0x0, b],
[0x0, b, b, b, 0x0]
]
end
def char_matrix(0xDA, b) do
[
[0x0, 0x0, b, 0x0, 0x0],
[0x0, b, 0x0, 0x0, 0x0],
[b, 0x0, 0x0, 0x0, b],
[b, 0x0, 0x0, 0x0, b],
[b, 0x0, 0x0, 0x0, b],
[b, 0x0, 0x0, 0x0, b],
[0x0, b, b, b, 0x0]
]
end
def char_matrix(0xDB, b) do
[
[0x0, 0x0, b, 0x0, 0x0],
[0x0, b, 0x0, b, 0x0],
[0x0, 0x0, 0x0, 0x0, 0x0],
[b, 0x0, 0x0, 0x0, b],
[b, 0x0, 0x0, 0x0, b],
[b, 0x0, 0x0, 0x0, b],
[0x0, b, b, b, 0x0]
]
end
def char_matrix(0xDC, b) do
[
[0x0, b, 0x0, b, 0x0],
[0x0, 0x0, 0x0, 0x0, 0x0],
[b, 0x0, 0x0, 0x0, b],
[b, 0x0, 0x0, 0x0, b],
[b, 0x0, 0x0, 0x0, b],
[b, 0x0, 0x0, 0x0, b],
[0x0, b, b, b, 0x0]
]
end
def char_matrix(0xDD, b) do
[
[0x0, 0x0, 0x0, b, 0x0],
[0x0, 0x0, b, 0x0, 0x0],
[b, 0x0, 0x0, 0x0, b],
[0x0, b, 0x0, b, 0x0],
[0x0, 0x0, b, 0x0, 0x0],
[0x0, 0x0, b, 0x0, 0x0],
[0x0, 0x0, b, 0x0, 0x0]
]
end
def char_matrix(0xDE, b) do
[
[b, 0x0, 0x0, 0x0],
[b, 0x0, 0x0, 0x0],
[b, b, b, 0x0],
[b, 0x0, 0x0, b],
[b, b, b, 0x0],
[b, 0x0, 0x0, 0x0],
[b, 0x0, 0x0, 0x0]
]
end
def char_matrix(0xDF, b) do
[
[0x0, b, b, 0x0, 0x0],
[b, 0x0, 0x0, b, 0x0],
[b, 0x0, 0x0, b, 0x0],
[b, 0x0, b, b, 0x0],
[b, 0x0, 0x0, 0x0, b],
[b, 0x0, 0x0, 0x0, b],
[b, 0x0, b, b, 0x0]
]
end
def char_matrix(0xE0, b) do
[
[0x0, b, 0x0, 0x0, 0x0],
[0x0, 0x0, b, 0x0, 0x0],
[0x0, b, b, b, 0x0],
[0x0, 0x0, 0x0, 0x0, b],
[0x0, b, b, b, b],
[b, 0x0, 0x0, 0x0, b],
[0x0, b, b, b, b]
]
end
def char_matrix(0xE1, b) do
[
[0x0, 0x0, 0x0, b, 0x0],
[0x0, 0x0, b, 0x0, 0x0],
[0x0, b, b, b, 0x0],
[0x0, 0x0, 0x0, 0x0, b],
[0x0, b, b, b, b],
[b, 0x0, 0x0, 0x0, b],
[0x0, b, b, b, b]
]
end
def char_matrix(0xE2, b) do
[
[0x0, 0x0, b, 0x0, 0x0],
[0x0, b, 0x0, b, 0x0],
[0x0, b, b, b, 0x0],
[0x0, 0x0, 0x0, 0x0, b],
[0x0, b, b, b, b],
[b, 0x0, 0x0, 0x0, b],
[0x0, b, b, b, b]
]
end
def char_matrix(0xE3, b) do
[
[0x0, b, b, 0x0, b],
[b, 0x0, b, b, 0x0],
[0x0, b, b, b, 0x0],
[0x0, 0x0, 0x0, 0x0, b],
[0x0, b, b, b, b],
[b, 0x0, 0x0, 0x0, b],
[0x0, b, b, b, b]
]
end
def char_matrix(0xE4, b) do
[
[0x0, b, 0x0, b, 0x0],
[0x0, 0x0, 0x0, 0x0, 0x0],
[0x0, b, b, b, 0x0],
[0x0, 0x0, 0x0, 0x0, b],
[0x0, b, b, b, b],
[b, 0x0, 0x0, 0x0, b],
[0x0, b, b, b, b]
]
end
def char_matrix(0xE5, b) do
[
[0x0, 0x0, b, 0x0, 0x0],
[0x0, 0x0, 0x0, 0x0, 0x0],
[0x0, b, b, b, 0x0],
[0x0, 0x0, 0x0, 0x0, b],
[0x0, b, b, b, b],
[b, 0x0, 0x0, 0x0, b],
[0x0, b, b, b, b]
]
end
def char_matrix(0xE6, b) do
[
[0x0, 0x0, 0x0, 0x0, 0x0],
[0x0, 0x0, 0x0, 0x0, 0x0],
[b, b, 0x0, b, 0x0],
[0x0, 0x0, b, 0x0, b],
[b, b, b, b, b],
[b, 0x0, b, 0x0, 0x0],
[b, b, 0x0, b, b]
]
end
def char_matrix(0xE7, b) do
[
[0x0, 0x0, 0x0, 0x0],
[0x0, b, b, b],
[b, 0x0, 0x0, 0x0],
[b, 0x0, 0x0, 0x0],
[0x0, b, b, b],
[0x0, 0x0, b, 0x0],
[0x0, 0x0, 0x0, b]
]
end
def char_matrix(0xE8, b) do
[
[0x0, 0x0, b, 0x0, 0x0],
[0x0, 0x0, 0x0, b, 0x0],
[0x0, b, b, b, 0x0],
[b, 0x0, 0x0, 0x0, b],
[b, b, b, b, b],
[b, 0x0, 0x0, 0x0, 0x0],
[0x0, b, b, b, 0x0]
]
end
def char_matrix(0xE9, b) do
[
[0x0, 0x0, b, 0x0, 0x0],
[0x0, b, 0x0, 0x0, 0x0],
[0x0, b, b, b, 0x0],
[b, 0x0, 0x0, 0x0, b],
[b, b, b, b, b],
[b, 0x0, 0x0, 0x0, 0x0],
[0x0, b, b, b, 0x0]
]
end
def char_matrix(0xEA, b) do
[
[0x0, 0x0, b, 0x0, 0x0],
[0x0, b, 0x0, b, 0x0],
[0x0, b, b, b, 0x0],
[b, 0x0, 0x0, 0x0, b],
[b, b, b, b, b],
[b, 0x0, 0x0, 0x0, 0x0],
[0x0, b, b, b, 0x0]
]
end
def char_matrix(0xEB, b) do
[
[0x0, b, 0x0, b, 0x0],
[0x0, 0x0, 0x0, 0x0, 0x0],
[0x0, b, b, b, 0x0],
[b, 0x0, 0x0, 0x0, b],
[b, b, b, b, b],
[b, 0x0, 0x0, 0x0, 0x0],
[0x0, b, b, b, 0x0]
]
end
def char_matrix(0xEC, b) do
[
[b, 0x0, 0x0],
[0x0, b, 0x0],
[0x0, 0x0, 0x0],
[b, b, 0x0],
[0x0, b, 0x0],
[0x0, b, 0x0],
[b, b, b]
]
end
def char_matrix(0xED, b) do
[
[0x0, 0x0, b],
[0x0, b, 0x0],
[0x0, 0x0, 0x0],
[b, b, 0x0],
[0x0, b, 0x0],
[0x0, b, 0x0],
[b, b, b]
]
end
def char_matrix(0xEE, b) do
[
[0x0, b, 0x0],
[b, 0x0, b],
[0x0, 0x0, 0x0],
[b, b, 0x0],
[0x0, b, 0x0],
[0x0, b, 0x0],
[b, b, b]
]
end
def char_matrix(0xEF, b) do
[
[0x0, 0x0, 0x0],
[b, 0x0, b],
[0x0, 0x0, 0x0],
[b, b, 0x0],
[0x0, b, 0x0],
[0x0, b, 0x0],
[b, b, b]
]
end
def char_matrix(0xF0, b) do
[
[0x0, b, 0x0, b, 0x0],
[0x0, 0x0, b, 0x0, 0x0],
[0x0, b, 0x0, b, 0x0],
[0x0, b, b, b, 0x0],
[b, 0x0, 0x0, 0x0, b],
[b, 0x0, 0x0, 0x0, b],
[0x0, b, b, b, 0x0]
]
end
def char_matrix(0xF1, b) do
[
[0x0, b, b, 0x0, b],
[b, 0x0, b, b, 0x0],
[0x0, 0x0, 0x0, 0x0, 0x0],
[b, 0x0, b, b, 0x0],
[b, b, 0x0, 0x0, b],
[b, 0x0, 0x0, 0x0, b],
[b, 0x0, 0x0, 0x0, b]
]
end
def char_matrix(0xF2, b) do
[
[0x0, 0x0, b, 0x0, 0x0],
[0x0, 0x0, 0x0, b, 0x0],
[0x0, 0x0, 0x0, 0x0, 0x0],
[0x0, b, b, b, 0x0],
[b, 0x0, 0x0, 0x0, b],
[b, 0x0, 0x0, 0x0, b],
[0x0, b, b, b, 0x0]
]
end
def char_matrix(0xF3, b) do
[
[0x0, 0x0, b, 0x0, 0x0],
[0x0, b, 0x0, 0x0, 0x0],
[0x0, 0x0, 0x0, 0x0, 0x0],
[0x0, b, b, b, 0x0],
[b, 0x0, 0x0, 0x0, b],
[b, 0x0, 0x0, 0x0, b],
[0x0, b, b, b, 0x0]
]
end
def char_matrix(0xF4, b) do
[
[0x0, 0x0, b, 0x0, 0x0],
[0x0, b, 0x0, b, 0x0],
[0x0, 0x0, 0x0, 0x0, 0x0],
[0x0, b, b, b, 0x0],
[b, 0x0, 0x0, 0x0, b],
[b, 0x0, 0x0, 0x0, b],
[0x0, b, b, b, 0x0]
]
end
def char_matrix(0xF5, b) do
[
[0x0, b, b, 0x0, b],
[b, 0x0, b, b, 0x0],
[0x0, 0x0, 0x0, 0x0, 0x0],
[0x0, b, b, b, 0x0],
[b, 0x0, 0x0, 0x0, b],
[b, 0x0, 0x0, 0x0, b],
[0x0, b, b, b, 0x0]
]
end
def char_matrix(0xF6, b) do
[
[0x0, b, 0x0, b, 0x0],
[0x0, 0x0, 0x0, 0x0, 0x0],
[0x0, 0x0, 0x0, 0x0, 0x0],
[0x0, b, b, b, 0x0],
[b, 0x0, 0x0, 0x0, b],
[b, 0x0, 0x0, 0x0, b],
[0x0, b, b, b, 0x0]
]
end
def char_matrix(0xF7, b) do
[
[0x0, 0x0, 0x0, 0x0, 0x0],
[0x0, 0x0, 0x0, 0x0, 0x0],
[0x0, 0x0, b, 0x0, 0x0],
[0x0, 0x0, 0x0, 0x0, 0x0],
[b, b, b, b, b],
[0x0, 0x0, 0x0, 0x0, 0x0],
[0x0, 0x0, b, 0x0, 0x0]
]
end
def char_matrix(0xF8, b) do
[
[0x0, 0x0, 0x0, 0x0, 0x0],
[0x0, 0x0, 0x0, b, 0x0],
[0x0, b, b, b, 0x0],
[b, 0x0, b, 0x0, b],
[b, b, 0x0, 0x0, b],
[0x0, b, b, b, 0x0],
[b, 0x0, 0x0, 0x0, 0x0]
]
end
def char_matrix(0xF9, b) do
[
[0x0, 0x0, b, 0x0, 0x0],
[0x0, 0x0, 0x0, b, 0x0],
[0x0, 0x0, 0x0, 0x0, 0x0],
[b, 0x0, 0x0, 0x0, b],
[b, 0x0, 0x0, 0x0, b],
[b, 0x0, 0x0, 0x0, b],
[0x0, b, b, b, 0x0]
]
end
def char_matrix(0xFA, b) do
[
[0x0, 0x0, b, 0x0, 0x0],
[0x0, b, 0x0, 0x0, 0x0],
[0x0, 0x0, 0x0, 0x0, 0x0],
[b, 0x0, 0x0, 0x0, b],
[b, 0x0, 0x0, 0x0, b],
[b, 0x0, 0x0, 0x0, b],
[0x0, b, b, b, 0x0]
]
end
def char_matrix(0xFB, b) do
[
[0x0, 0x0, b, 0x0, 0x0],
[0x0, b, 0x0, b, 0x0],
[0x0, 0x0, 0x0, 0x0, 0x0],
[b, 0x0, 0x0, 0x0, b],
[b, 0x0, 0x0, 0x0, b],
[b, 0x0, 0x0, 0x0, b],
[0x0, b, b, b, 0x0]
]
end
def char_matrix(0xFC, b) do
[
[0x0, b, 0x0, b, 0x0],
[0x0, 0x0, 0x0, 0x0, 0x0],
[0x0, 0x0, 0x0, 0x0, 0x0],
[b, 0x0, 0x0, 0x0, b],
[b, 0x0, 0x0, 0x0, b],
[b, 0x0, 0x0, 0x0, b],
[0x0, b, b, b, 0x0]
]
end
def char_matrix(0xFD, b) do
[
[0x0, 0x0, 0x0, b, 0x0],
[0x0, 0x0, b, 0x0, 0x0],
[0x0, 0x0, 0x0, 0x0, 0x0],
[b, 0x0, 0x0, 0x0, b],
[0x0, b, 0x0, b, 0x0],
[0x0, 0x0, b, 0x0, 0x0],
[b, b, 0x0, 0x0, 0x0]
]
end
def char_matrix(0xFE, b) do
[
[b, 0x0, 0x0, 0x0, 0x0],
[b, 0x0, b, b, 0x0],
[b, b, 0x0, 0x0, b],
[b, 0x0, 0x0, 0x0, b],
[b, 0x0, 0x0, 0x0, b],
[b, b, b, b, 0x0],
[b, 0x0, 0x0, 0x0, 0x0]
]
end
def char_matrix(b, b) do
[
[0x0, b, 0x0, 0x0, 0x0],
[0x0, 0x0, b, 0x0, 0x0],
[0x0, 0x0, 0x0, 0x0, 0x0],
[b, 0x0, 0x0, 0x0, b],
[0x0, b, 0x0, b, 0x0],
[0x0, 0x0, b, 0x0, 0x0],
[b, b, 0x0, 0x0, 0x0]
]
end
end
|
lib/scroll_hat/fonts/big.ex
| 0.577734 | 0.479626 |
big.ex
|
starcoder
|
defmodule SetOne.ChallengeSix do
use Bitwise
alias SetOne.ChallengeThree, as: ChallengeThree
@doc """
Attempts to find the key of a given ciphertext encrypted with repeating XOR.
http://cryptopals.com/sets/1/challenges/6
"""
@spec find_key_repeating_xor(binary) :: [binary]
def find_key_repeating_xor(ciphertext) do
guess_keysizes(ciphertext)
|> Helpers.pmap(&find_key_repeating_xor(ciphertext, &1))
end
def find_key_repeating_xor(ciphertext, {keysize, _}) do
ciphertext
|> :binary.bin_to_list()
|> Stream.chunk_every(keysize, keysize, :discard)
|> Helpers.transpose()
|> Helpers.pmap(fn list ->
{key, _, _} = ChallengeThree.my_decoder(list)
key
end)
end
@doc """
Attempts to guess the keysize by calculating the average hamming distance of blocks and picking the three smallest
"""
@spec guess_keysizes(binary) :: [{pos_integer, pos_integer}]
def guess_keysizes(ciphertext) do
2..40
|> Helpers.pmap(&{&1, calculate_block_distance(&1, ciphertext)})
|> Enum.sort_by(fn {_keysize, distance} -> distance end)
|> Enum.take(3)
end
@doc """
Calculates the normalized averages of the hamming distances
"""
@spec calculate_block_distance(pos_integer, binary) :: float
def calculate_block_distance(block_size, ciphertext) do
ciphertext
|> :binary.bin_to_list()
|> Enum.chunk_every(block_size)
|> sum_hamming_and_average
end
defp sum_hamming_and_average(blocks), do: sum_hamming(blocks) / length(blocks)
defp sum_hamming([_first]), do: 0
defp sum_hamming([first | tail]) do
hamming(first, hd(tail)) / length(first) + sum_hamming(tail)
end
@doc """
Calcualtes the Hamming Distance of two strings
http://cryptopals.com/sets/1/challenges/6/
### Examples
iex> SetOne.ChallengeSix.hamming("abc", "abc")
0
iex> SetOne.ChallengeSix.hamming("0", "1")
1
"""
@spec hamming(binary, binary) :: pos_integer
def hamming(first, second) when is_binary(first) and is_binary(second) do
a = :binary.bin_to_list(first)
b = :binary.bin_to_list(second)
hamming(a, b)
end
@spec hamming([byte], [byte]) :: pos_integer
def hamming(a, b) do
Enum.zip(a, b)
|> Enum.map(fn {x, y} -> x ^^^ y end)
|> Enum.reduce(0, fn x, acc -> acc + Helpers.count_bits(x) end)
end
end
|
lib/set_1/challenge_6.ex
| 0.886988 | 0.427935 |
challenge_6.ex
|
starcoder
|
defmodule ExMpesa.Reversal do
@moduledoc """
Reversal API enables reversal of transactions done
You will be able to reverse a transaction where you are the credit party. This means it will be done via the Web portal, and may require manual authorization from the Service Provider side. But if you are allowed to reverse a transaction via API, it may also need to be authorized.
An initiator requires the Org Reversals Initiator role to be able to perform reversals via API
"""
import ExMpesa.MpesaBase
alias ExMpesa.Util
@doc """
Makes a request to the mpesa reversal endpoint with the given params.
## Params
The function requires two keys to be present for a successful request, `:transation_id` and `:amount`
The params can be any of the accepted params by the api endpoint with the keys converted to snake case. For example
`QueueTimeOutURL` is expected to be in the format `queue_time_out_url`.
Additionally, the keys `:security_credential, :initiator, :receiver_party, :result_url, :queue_time_out_url` are
loaded from the respective config and used if they're not provided as part of the params in the arguments.
## Options
Because reversal can be done for B2B, B2C or C2B, this function allows for an option to load the configs from.
It defaults to `:reversal` which means it will use config under the `:mpesa` config.
In order to reuse the configs for the other apis, use the parent key under the mpesa config.
For example, in order to use the `b2b` configs, pass in `:b2b` as the option
## Configuration
Add below config to dev.exs / prod.exs files
`config.exs`
```elixir
config :ex_mpesa,
cert: "",
reversal: [
short_code: "",
initiator_name: "",
password: "",
timeout_url: "",
result_url: "",
security_credential: ""
]
```
To generate security_credential, head over to https://developer.safaricom.co.ke/test_credentials, then Initiator Security Password for your environment.
Alternatively, generate security credential using certificate
`cert` - This is the M-Pesa public key certificate used to encrypt your plain password.
There are 2 types of certificates.
- sandox - https://developer.safaricom.co.ke/sites/default/files/cert/cert_sandbox/cert.cer .
- production - https://developer.safaricom.co.ke/sites/default/files/cert/cert_prod/cert.cer .
`password` - <PASSWORD>.
Environment
- production - set password from the organization portal.
- sandbox - use your own custom password
## Example
iex> ExMpesa.Reversal.reverse(%{amount: 30, transaction_id: "LGR013H3J2"}, :reversal)
{:ok,
%{
"ConversationID" => "AG_20201011_00006511c0024c170286",
"OriginatorConversationID" => "8094-41340768-1",
"ResponseCode" => "0",
"ResponseDescription" => "Accept the service request successfully."
}}
"""
def reverse(params, option \\ :reversal)
def reverse(%{transaction_id: _trans_id, amount: _amount} = params, api) do
config = Application.get_env(:ex_mpesa, api)
credential = Util.get_security_credential_for(api)
reversal_config = Application.get_env(:ex_mpesa, :reversal)
%{
security_credential: credential,
initiator: config[:initiator_name],
receiver_party: config[:short_code],
result_url: reversal_config[:result_url],
queue_time_out_url: reversal_config[:timeout_url]
}
|> Map.merge(params)
|> reversal_payload()
|> request_reversal()
end
def reverse(_, _) do
{:error, "either transaction_id or amount is missing from the given params"}
end
defp reversal_payload(params) do
%{
"Initiator" => params.initiator,
"SecurityCredential" => params.security_credential,
"CommandID" => "TransactionReversal",
"TransactionID" => params.transaction_id,
"Amount" => params.amount,
"ReceiverParty" => params.receiver_party,
"RecieverIdentifierType" => "11",
"ResultURL" => params.result_url,
"QueueTimeOutURL" => params.queue_time_out_url,
"Remarks" => Map.get(params, :remarks, "Payment Reversal"),
"Occasion" => Map.get(params, :occassion, "Payment Reversal")
}
end
defp request_reversal(payload) do
make_request("/mpesa/reversal/v1/request", payload)
end
end
|
lib/ex_mpesa/reversal.ex
| 0.868674 | 0.85984 |
reversal.ex
|
starcoder
|
defmodule Dune.Session do
@moduledoc """
Sessions provide a way to evaluate code and keep state (bindings, modules...) between evaluations.
- Use `Dune.eval_string/2` to execute code as a one-off
- Use `Dune.Session.eval_string/3` to execute consecutive code blocks
`Dune.Session` could be used to implement something like a safe IEx shell, or to compile a module
once and call it several times without the overhead of parsing.
`Dune.Session` is also a struct that is used to store the state of an evaluation.
Only the following fields are public:
- `last_result`: contains the result of the last evaluation, or `nil` for empty sessions
Other fields are private and shouldn't be accessed directly.
"""
alias Dune.{Allowlist, Eval, Parser, Success, Failure, Opts}
@opaque private_env :: Eval.Env.t()
@opaque private_compile_env :: Parser.CompileEnv.t()
@typedoc """
The type of a `Dune.Session`.
"""
@type t :: %__MODULE__{
last_result: nil | Success.t() | Failure.t(),
env: private_env,
compile_env: private_compile_env,
bindings: keyword
}
@enforce_keys [:env, :compile_env, :bindings, :last_result]
defstruct [:env, :compile_env, :bindings, :last_result]
@default_env Eval.Env.new(Dune.AtomMapping.new(), Allowlist.Default)
@default_compile_env Parser.CompileEnv.new(Allowlist.Default)
@doc """
Returns a new empty session.
## Examples
iex> Dune.Session.new()
#Dune.Session<last_result: nil, ...>
"""
@spec new :: t
def new do
%__MODULE__{
env: @default_env,
compile_env: @default_compile_env,
bindings: [],
last_result: nil
}
end
@doc """
Evaluates the provided `string` in the context of the `session` and returns a new session.
The result will be available in the `last_result` key.
In case of a success, the variable bindings or created modules will be saved in the session.
In case of a failure, the rest of the session state won't be updated, so it is possible to
keep executing instructions after a failure
## Examples
iex> Dune.Session.new()
...> |> Dune.Session.eval_string("x = 1")
...> |> Dune.Session.eval_string("x + 2")
#Dune.Session<last_result: %Dune.Success{inspected: "3", stdio: "", value: 3}, ...>
iex> Dune.Session.new()
...> |> Dune.Session.eval_string("x = 1")
...> |> Dune.Session.eval_string("x = x / 0") # will fail, but the previous state is kept
...> |> Dune.Session.eval_string("x + 2")
#Dune.Session<last_result: %Dune.Success{inspected: "3", stdio: "", value: 3}, ...>
"""
@spec eval_string(t, String.t(), keyword) :: t
def eval_string(session = %__MODULE__{}, string, opts \\ []) do
opts = Opts.validate!(opts)
parse_state = %{atom_mapping: session.env.atom_mapping, compile_env: session.compile_env}
parsed = Parser.parse_string(string, opts, parse_state)
parsed
|> Eval.run(opts, session)
|> add_result_to_session(session, parsed)
end
defp add_result_to_session(result = %Success{value: {value, env, bindings}}, session, %{
compile_env: compile_env
}) do
result = %{result | value: value}
%{session | env: env, compile_env: compile_env, last_result: result, bindings: bindings}
end
defp add_result_to_session(result = %Failure{}, session, _) do
%{session | last_result: result}
end
defimpl Inspect do
import Inspect.Algebra
def inspect(session, opts) do
container_doc(
"#Dune.Session<",
[last_result: session.last_result],
", ...>",
opts,
&do_inspect/2,
break: :strict
)
end
defp do_inspect({key, value}, opts) do
key = color(Code.Identifier.inspect_as_key(key), :atom, opts)
concat(key, concat(" ", to_doc(value, opts)))
end
end
end
|
lib/dune/session.ex
| 0.86757 | 0.729158 |
session.ex
|
starcoder
|
defmodule Anansi.Sequence do
@moduledoc """
Tools for generating escape codes for multiple ANSI instructions in one go.
"""
# @doc """
# Macro for generating multiple ANSI escape instructions in one go.
# """
# defmacro compose(sequence, block \\ []) do
# sequence = List.wrap sequence
# {code, sequence} = Keyword.pop(sequence, :do, Keyword.get(block, :do, [""]))
# quote do: [
# [unquote(sequence) |> Anansi.Sequence.compose],
# [unquote(code)],
# # revert somehow
# ]
# end
@doc """
Composes multiple ANSI escape instructions from a `sequence` of instructions.
"""
def compose(sequence) when is_list(sequence) do
sequence |> build |> Enum.map(fn {m, f, a} -> apply(m, f, a) end)
end
@doc """
Converts an Anansi `sequence` of instructions into `{module, function, args}` tuples.
"""
def build(sequence) when is_list(sequence) do
sequence |> explode |> Enum.map(&instruction/1)
end
defp explode(tree) do
:lists.reverse(do_explode(tree, [], []))
end
defp do_explode([], _, acc) do
acc
end
defp do_explode([{key, subtree} | trees], current, acc) do
do_explode(trees, current, do_explode(subtree, [key | current], acc))
end
defp do_explode([key | rest], current, acc) do
do_explode(rest, current, do_explode(key, current, acc))
end
defp do_explode(leaf, current, acc) do
[[leaf | current] | acc]
end
defp instruction([arg | [fun | [namespace | []]]]) do
do_instruction([namespace], fun, [arg])
end
defp instruction([fun | [namespace | []]]) do
do_instruction([namespace], fun)
end
defp instruction([fun | []]) do
do_instruction([], fun)
end
defp instruction(other) do
raise "unrecognized Anansi instruction: `#{inspect(:lists.reverse(other))}`"
end
defp do_instruction(namespace, fun, args \\ []) do
module = namespace
|> Enum.map(&(&1 |> to_string |> Macro.camelize))
|> :lists.reverse
|> Enum.reduce(Anansi, &(Module.concat &2, &1))
{module, fun, args}
end
end
|
lib/anansi/sequence.ex
| 0.573798 | 0.513059 |
sequence.ex
|
starcoder
|
defmodule Talib.EMA do
@moduledoc ~S"""
Defines an Exponential Moving Average.
## History
Version: 1.0
Source: http://www.itl.nist.gov/div898/handbook/pmc/section3/pmc324.htm
Audited by:
| Name | Title |
| :----------- | :---------------- |
| | |
"""
@typedoc """
Defines an Exponential Moving Average.
* :period - The period of the EMA
* :values - List of values resulting from the calculation
"""
@type t :: %Talib.EMA{period: integer, values: [float]}
defstruct period: 0,
values: []
@doc """
Gets the EMA of a list.
Returns `{:ok, ema}`, otherwise `{:error, reason}`.
## Examples
iex>Talib.EMA.from_list([1, 2, 3], 2)
{:ok, %Talib.EMA{period: 2, values: [
1.0,
1.6666666666666665,
2.5555555555555554
]}}
iex>Talib.EMA.from_list([], 2)
{:error, :no_data}
"""
@spec from_list([number], integer) :: {:ok, Talib.EMA.t()} | {:error, atom}
def from_list(data, period), do: calculate(data, period)
@doc """
Gets the EMA of a list.
Raises `NoDataError` if the given list is an empty list.
## Examples
iex>Talib.EMA.from_list!([1, 2, 3], 2)
%Talib.EMA{period: 2, values: [
1.0,
1.6666666666666665,
2.5555555555555554
]}
iex>Talib.EMA.from_list!([], 2)
** (NoDataError) no data error
"""
@spec from_list!([number], integer) :: Talib.EMA.t() | no_return
def from_list!(data, period) do
case calculate(data, period) do
{:ok, result} -> result
{:error, :no_data} -> raise NoDataError
end
end
@doc false
@spec calculate([number], integer, [float]) ::
{:ok, Talib.EMA.t()}
| {:error, atom}
defp calculate(data, period, results \\ [])
defp calculate([], _period, []),
do: {:error, :no_data}
defp calculate([], period, results),
do: {:ok, %Talib.EMA{period: period, values: results}}
# defp calculate([hd | tl], period, []),
# do: calculate(tl, period, List.duplicate(0.0, period - 1) ++ [hd])
# For the first EMA, we use the SMA
defp calculate(data, period, []) do
{sma_data, tl} = Enum.split(data, period)
first_average = Enum.sum(sma_data) / period
calculate(tl, period, List.duplicate(0.0, period - 1) ++ [first_average])
end
# EMA = {Close - EMA(previous day)} x multiplier + EMA(previous day)
defp calculate([hd | tl], period, results) do
[previous_average] = Enum.take(results, -1)
new_weight = 2 / (period + 1)
# new_average = (hd - previous_average) * new_weight + previous_average
new_average = hd * new_weight + previous_average * (1 - new_weight)
calculate(tl, period, results ++ [new_average])
end
end
|
lib/talib/ema.ex
| 0.905165 | 0.659629 |
ema.ex
|
starcoder
|
defmodule AWS.CodeCommit do
@moduledoc """
AWS CodeCommit
This is the *AWS CodeCommit API Reference*. This reference provides
descriptions of the operations and data types for AWS CodeCommit API along
with usage examples.
You can use the AWS CodeCommit API to work with the following objects:
Repositories, by calling the following:
<ul> <li> `BatchGetRepositories`, which returns information about one or
more repositories associated with your AWS account.
</li> <li> `CreateRepository`, which creates an AWS CodeCommit repository.
</li> <li> `DeleteRepository`, which deletes an AWS CodeCommit repository.
</li> <li> `GetRepository`, which returns information about a specified
repository.
</li> <li> `ListRepositories`, which lists all AWS CodeCommit repositories
associated with your AWS account.
</li> <li> `UpdateRepositoryDescription`, which sets or updates the
description of the repository.
</li> <li> `UpdateRepositoryName`, which changes the name of the
repository. If you change the name of a repository, no other users of that
repository will be able to access it until you send them the new HTTPS or
SSH URL to use.
</li> </ul> Branches, by calling the following:
<ul> <li> `CreateBranch`, which creates a new branch in a specified
repository.
</li> <li> `DeleteBranch`, which deletes the specified branch in a
repository unless it is the default branch.
</li> <li> `GetBranch`, which returns information about a specified branch.
</li> <li> `ListBranches`, which lists all branches for a specified
repository.
</li> <li> `UpdateDefaultBranch`, which changes the default branch for a
repository.
</li> </ul> Files, by calling the following:
<ul> <li> `DeleteFile`, which deletes the content of a specified file from
a specified branch.
</li> <li> `GetFile`, which returns the base-64 encoded content of a
specified file.
</li> <li> `GetFolder`, which returns the contents of a specified folder or
directory.
</li> <li> `PutFile`, which adds or modifies a file in a specified
repository and branch.
</li> </ul> Information about committed code in a repository, by calling
the following:
<ul> <li> `CreateCommit`, which creates a commit for changes to a
repository.
</li> <li> `GetBlob`, which returns the base-64 encoded content of an
individual Git blob object within a repository.
</li> <li> `GetCommit`, which returns information about a commit, including
commit messages and author and committer information.
</li> <li> `GetDifferences`, which returns information about the
differences in a valid commit specifier (such as a branch, tag, HEAD,
commit ID or other fully qualified reference).
</li> </ul> Pull requests, by calling the following:
<ul> <li> `CreatePullRequest`, which creates a pull request in a specified
repository.
</li> <li> `DescribePullRequestEvents`, which returns information about one
or more pull request events.
</li> <li> `GetCommentsForPullRequest`, which returns information about
comments on a specified pull request.
</li> <li> `GetMergeConflicts`, which returns information about merge
conflicts between the source and destination branch in a pull request.
</li> <li> `GetPullRequest`, which returns information about a specified
pull request.
</li> <li> `ListPullRequests`, which lists all pull requests for a
repository.
</li> <li> `MergePullRequestByFastForward`, which merges the source
destination branch of a pull request into the specified destination branch
for that pull request using the fast-forward merge option.
</li> <li> `PostCommentForPullRequest`, which posts a comment to a pull
request at the specified line, file, or request.
</li> <li> `UpdatePullRequestDescription`, which updates the description of
a pull request.
</li> <li> `UpdatePullRequestStatus`, which updates the status of a pull
request.
</li> <li> `UpdatePullRequestTitle`, which updates the title of a pull
request.
</li> </ul> Information about comments in a repository, by calling the
following:
<ul> <li> `DeleteCommentContent`, which deletes the content of a comment on
a commit in a repository.
</li> <li> `GetComment`, which returns information about a comment on a
commit.
</li> <li> `GetCommentsForComparedCommit`, which returns information about
comments on the comparison between two commit specifiers in a repository.
</li> <li> `PostCommentForComparedCommit`, which creates a comment on the
comparison between two commit specifiers in a repository.
</li> <li> `PostCommentReply`, which creates a reply to a comment.
</li> <li> `UpdateComment`, which updates the content of a comment on a
commit in a repository.
</li> </ul> Triggers, by calling the following:
<ul> <li> `GetRepositoryTriggers`, which returns information about triggers
configured for a repository.
</li> <li> `PutRepositoryTriggers`, which replaces all triggers for a
repository and can be used to create or delete triggers.
</li> <li> `TestRepositoryTriggers`, which tests the functionality of a
repository trigger by sending data to the trigger target.
</li> </ul> For information about how to use AWS CodeCommit, see the [AWS
CodeCommit User
Guide](http://docs.aws.amazon.com/codecommit/latest/userguide/welcome.html).
"""
@doc """
Returns information about one or more repositories.
<note> The description field for a repository accepts all HTML characters
and all valid Unicode characters. Applications that do not HTML-encode the
description and display it in a web page could expose users to potentially
malicious code. Make sure that you HTML-encode the description field in any
application that uses this API to display the repository description on a
web page.
</note>
"""
def batch_get_repositories(client, input, options \\ []) do
request(client, "BatchGetRepositories", input, options)
end
@doc """
Creates a new branch in a repository and points the branch to a commit.
<note> Calling the create branch operation does not set a repository's
default branch. To do this, call the update default branch operation.
</note>
"""
def create_branch(client, input, options \\ []) do
request(client, "CreateBranch", input, options)
end
@doc """
Creates a commit for a repository on the tip of a specified branch.
"""
def create_commit(client, input, options \\ []) do
request(client, "CreateCommit", input, options)
end
@doc """
Creates a pull request in the specified repository.
"""
def create_pull_request(client, input, options \\ []) do
request(client, "CreatePullRequest", input, options)
end
@doc """
Creates a new, empty repository.
"""
def create_repository(client, input, options \\ []) do
request(client, "CreateRepository", input, options)
end
@doc """
Deletes a branch from a repository, unless that branch is the default
branch for the repository.
"""
def delete_branch(client, input, options \\ []) do
request(client, "DeleteBranch", input, options)
end
@doc """
Deletes the content of a comment made on a change, file, or commit in a
repository.
"""
def delete_comment_content(client, input, options \\ []) do
request(client, "DeleteCommentContent", input, options)
end
@doc """
Deletes a specified file from a specified branch. A commit is created on
the branch that contains the revision. The file will still exist in the
commits prior to the commit that contains the deletion.
"""
def delete_file(client, input, options \\ []) do
request(client, "DeleteFile", input, options)
end
@doc """
Deletes a repository. If a specified repository was already deleted, a null
repository ID will be returned.
<important> Deleting a repository also deletes all associated objects and
metadata. After a repository is deleted, all future push calls to the
deleted repository will fail.
</important>
"""
def delete_repository(client, input, options \\ []) do
request(client, "DeleteRepository", input, options)
end
@doc """
Returns information about one or more pull request events.
"""
def describe_pull_request_events(client, input, options \\ []) do
request(client, "DescribePullRequestEvents", input, options)
end
@doc """
Returns the base-64 encoded content of an individual blob within a
repository.
"""
def get_blob(client, input, options \\ []) do
request(client, "GetBlob", input, options)
end
@doc """
Returns information about a repository branch, including its name and the
last commit ID.
"""
def get_branch(client, input, options \\ []) do
request(client, "GetBranch", input, options)
end
@doc """
Returns the content of a comment made on a change, file, or commit in a
repository.
"""
def get_comment(client, input, options \\ []) do
request(client, "GetComment", input, options)
end
@doc """
Returns information about comments made on the comparison between two
commits.
"""
def get_comments_for_compared_commit(client, input, options \\ []) do
request(client, "GetCommentsForComparedCommit", input, options)
end
@doc """
Returns comments made on a pull request.
"""
def get_comments_for_pull_request(client, input, options \\ []) do
request(client, "GetCommentsForPullRequest", input, options)
end
@doc """
Returns information about a commit, including commit message and committer
information.
"""
def get_commit(client, input, options \\ []) do
request(client, "GetCommit", input, options)
end
@doc """
Returns information about the differences in a valid commit specifier (such
as a branch, tag, HEAD, commit ID or other fully qualified reference).
Results can be limited to a specified path.
"""
def get_differences(client, input, options \\ []) do
request(client, "GetDifferences", input, options)
end
@doc """
Returns the base-64 encoded contents of a specified file and its metadata.
"""
def get_file(client, input, options \\ []) do
request(client, "GetFile", input, options)
end
@doc """
Returns the contents of a specified folder in a repository.
"""
def get_folder(client, input, options \\ []) do
request(client, "GetFolder", input, options)
end
@doc """
Returns information about merge conflicts between the before and after
commit IDs for a pull request in a repository.
"""
def get_merge_conflicts(client, input, options \\ []) do
request(client, "GetMergeConflicts", input, options)
end
@doc """
Gets information about a pull request in a specified repository.
"""
def get_pull_request(client, input, options \\ []) do
request(client, "GetPullRequest", input, options)
end
@doc """
Returns information about a repository.
<note> The description field for a repository accepts all HTML characters
and all valid Unicode characters. Applications that do not HTML-encode the
description and display it in a web page could expose users to potentially
malicious code. Make sure that you HTML-encode the description field in any
application that uses this API to display the repository description on a
web page.
</note>
"""
def get_repository(client, input, options \\ []) do
request(client, "GetRepository", input, options)
end
@doc """
Gets information about triggers configured for a repository.
"""
def get_repository_triggers(client, input, options \\ []) do
request(client, "GetRepositoryTriggers", input, options)
end
@doc """
Gets information about one or more branches in a repository.
"""
def list_branches(client, input, options \\ []) do
request(client, "ListBranches", input, options)
end
@doc """
Returns a list of pull requests for a specified repository. The return list
can be refined by pull request status or pull request author ARN.
"""
def list_pull_requests(client, input, options \\ []) do
request(client, "ListPullRequests", input, options)
end
@doc """
Gets information about one or more repositories.
"""
def list_repositories(client, input, options \\ []) do
request(client, "ListRepositories", input, options)
end
@doc """
Closes a pull request and attempts to merge the source commit of a pull
request into the specified destination branch for that pull request at the
specified commit using the fast-forward merge option.
"""
def merge_pull_request_by_fast_forward(client, input, options \\ []) do
request(client, "MergePullRequestByFastForward", input, options)
end
@doc """
Posts a comment on the comparison between two commits.
"""
def post_comment_for_compared_commit(client, input, options \\ []) do
request(client, "PostCommentForComparedCommit", input, options)
end
@doc """
Posts a comment on a pull request.
"""
def post_comment_for_pull_request(client, input, options \\ []) do
request(client, "PostCommentForPullRequest", input, options)
end
@doc """
Posts a comment in reply to an existing comment on a comparison between
commits or a pull request.
"""
def post_comment_reply(client, input, options \\ []) do
request(client, "PostCommentReply", input, options)
end
@doc """
Adds or updates a file in a branch in an AWS CodeCommit repository, and
generates a commit for the addition in the specified branch.
"""
def put_file(client, input, options \\ []) do
request(client, "PutFile", input, options)
end
@doc """
Replaces all triggers for a repository. This can be used to create or
delete triggers.
"""
def put_repository_triggers(client, input, options \\ []) do
request(client, "PutRepositoryTriggers", input, options)
end
@doc """
Tests the functionality of repository triggers by sending information to
the trigger target. If real data is available in the repository, the test
will send data from the last commit. If no data is available, sample data
will be generated.
"""
def test_repository_triggers(client, input, options \\ []) do
request(client, "TestRepositoryTriggers", input, options)
end
@doc """
Replaces the contents of a comment.
"""
def update_comment(client, input, options \\ []) do
request(client, "UpdateComment", input, options)
end
@doc """
Sets or changes the default branch name for the specified repository.
<note> If you use this operation to change the default branch name to the
current default branch name, a success message is returned even though the
default branch did not change.
</note>
"""
def update_default_branch(client, input, options \\ []) do
request(client, "UpdateDefaultBranch", input, options)
end
@doc """
Replaces the contents of the description of a pull request.
"""
def update_pull_request_description(client, input, options \\ []) do
request(client, "UpdatePullRequestDescription", input, options)
end
@doc """
Updates the status of a pull request.
"""
def update_pull_request_status(client, input, options \\ []) do
request(client, "UpdatePullRequestStatus", input, options)
end
@doc """
Replaces the title of a pull request.
"""
def update_pull_request_title(client, input, options \\ []) do
request(client, "UpdatePullRequestTitle", input, options)
end
@doc """
Sets or changes the comment or description for a repository.
<note> The description field for a repository accepts all HTML characters
and all valid Unicode characters. Applications that do not HTML-encode the
description and display it in a web page could expose users to potentially
malicious code. Make sure that you HTML-encode the description field in any
application that uses this API to display the repository description on a
web page.
</note>
"""
def update_repository_description(client, input, options \\ []) do
request(client, "UpdateRepositoryDescription", input, options)
end
@doc """
Renames a repository. The repository name must be unique across the calling
AWS account. In addition, repository names are limited to 100 alphanumeric,
dash, and underscore characters, and cannot include certain characters. The
suffix ".git" is prohibited. For a full description of the limits on
repository names, see
[Limits](http://docs.aws.amazon.com/codecommit/latest/userguide/limits.html)
in the AWS CodeCommit User Guide.
"""
def update_repository_name(client, input, options \\ []) do
request(client, "UpdateRepositoryName", input, options)
end
@spec request(map(), binary(), map(), list()) ::
{:ok, Poison.Parser.t | nil, Poison.Response.t} |
{:error, Poison.Parser.t} |
{:error, HTTPoison.Error.t}
defp request(client, action, input, options) do
client = %{client | service: "codecommit"}
host = get_host("codecommit", client)
url = get_url(host, client)
headers = [{"Host", host},
{"Content-Type", "application/x-amz-json-1.1"},
{"X-Amz-Target", "CodeCommit_20150413.#{action}"}]
payload = Poison.Encoder.encode(input, [])
headers = AWS.Request.sign_v4(client, "POST", url, headers, payload)
case HTTPoison.post(url, payload, headers, options) do
{:ok, response=%HTTPoison.Response{status_code: 200, body: ""}} ->
{:ok, nil, response}
{:ok, response=%HTTPoison.Response{status_code: 200, body: body}} ->
{:ok, Poison.Parser.parse!(body), response}
{:ok, _response=%HTTPoison.Response{body: body}} ->
error = Poison.Parser.parse!(body)
exception = error["__type"]
message = error["message"]
{:error, {exception, message}}
{:error, %HTTPoison.Error{reason: reason}} ->
{:error, %HTTPoison.Error{reason: reason}}
end
end
defp get_host(endpoint_prefix, client) do
if client.region == "local" do
"localhost"
else
"#{endpoint_prefix}.#{client.region}.#{client.endpoint}"
end
end
defp get_url(host, %{:proto => proto, :port => port}) do
"#{proto}://#{host}:#{port}/"
end
end
|
lib/aws/code_commit.ex
| 0.881424 | 0.507568 |
code_commit.ex
|
starcoder
|
defmodule GuessWho.Contenders.TrevorBrown do
@moduledoc """
A strategy that finds the character by finding an attribute that splits the
remaining characters into two groups of similar size in order to perform a
binary search. Sometimes there is no attribute that splits the remaining
characters 50/50, so this isn't a perfect strategy. Also lacks a few obivous
optimizations.
"""
alias GuessWho.Attributes
@behaviour GuessWho.Contender
# Based on a quick look in attributes.yml, big mouth seems to be a good first
# question
@my_pet_attribute "big mouth"
@impl GuessWho.Contender
def name(), do: "<NAME>"
@impl GuessWho.Contender
def turn(nil, nil) do
{@my_pet_attribute, %{
remaining_characters: Attributes.characters(),
last_guess: @my_pet_attribute
}}
end
def turn(response, %{remaining_characters: characters, last_guess: last_guess} = state) do
case remaining_characters(response, characters, last_guess) do
# We could have also had a case here for when two characters remain, and
# rather than spliting on a character attribute we could have just taken
# a guess of one character or the other.
[name] ->
{name, %{state | remaining_characters: [name]}}
remaining ->
guess = next_guess?(remaining)
{guess, %{state | remaining_characters: remaining, last_guess: guess}}
end
end
defp remaining_characters({:has_attribute?, true}, characters, last_guess) do
# Feels like there should be an easier way to do this
Enum.filter(characters, fn(character) ->
Attributes.character_has_attribute?(character, last_guess)
end)
end
defp remaining_characters({:has_attribute?, false}, characters, last_guess) do
characters -- Attributes.characters_with_attribute(last_guess)
end
# Given a list of remaining characters by name, determine the best next guess
defp next_guess?(remaining_characters) do
# Ideally we want to find an attribute that splits the list exactly 50/50
half = Enum.count(remaining_characters) / 2
character_attrs =
remaining_characters
|> Enum.map(fn(character) ->
{Attributes.character_attributes(character), character}
end)
attrs_list =
character_attrs
|> Enum.map(&elem(&1, 0))
|> List.flatten()
|> Enum.uniq()
# This gets pretty messy and I'm sure there is a much better way of doing
# this
[{_, best_guess}|_possible_guesses] =
attrs_list
|> List.foldl([], fn(attribute, acc) ->
count =
character_attrs
|> Enum.filter(fn({attrs, _name}) ->
Enum.member?(attrs, attribute)
end)
|> Enum.count()
[{abs(half - count), attribute}|acc]
end)
|> Enum.sort_by(fn({score, _attribute}) -> score end)
best_guess
end
end
|
lib/guess_who/contenders/trevor_brown.ex
| 0.729905 | 0.482673 |
trevor_brown.ex
|
starcoder
|
defmodule AstraeaVirgoWeb.JudgementTypeView do
use AstraeaVirgoWeb, :view
@moduledoc """
Response for Judgement Type API
"""
defp ce() do
%{
id: "CE",
name: "Compile Error",
penalty: false,
solved: false
}
end
defp ac() do
%{
id: "AC",
name: "Accepted",
penalty: false,
solved: true
}
end
defp tle() do
%{
id: "TLE",
name: "Time Limit Exceeded",
penalty: false,
solved: false
}
end
defp rte() do
%{
id: "RTE",
name: "Run-Time Error",
penalty: true,
solved: false
}
end
defp wa() do
%{
id: "WA",
name: "Wrong Answer",
penalty: true,
solved: false
}
end
defp judgement_type("CE"), do: ce()
defp judgement_type("AC"), do: ac()
defp judgement_type("TLE"), do: tle()
defp judgement_type("RTE"), do: rte()
defp judgement_type("WA"), do: wa()
@doc """
Response
## index.json
Response for index Judgement Type API: `GET /api/judgement-types`
Response: list of Object
| field | type | required | null | descript |
|---------|---------|----------|------|--------------------------|
| id | ID | yes | no | |
| name | string | yes | no | |
| penalty | boolean | no | yes | 当前判题结果是否用罚时 |
| solved | boolean | no | yes | 当前判题结果是否解决问题 |
Example:
```json
[
{
"id": "CE",
"name": "Compile Error",
"penalty": false,
"solved": false
}
]
```
## show.json
Response for show Judgement Type API: `GET /api/judgement-types/<judgement_type_id>`
Response: Object
| field | type | required | null | descript |
|---------|---------|----------|------|--------------------------|
| id | ID | yes | no | |
| name | string | yes | no | |
| penalty | boolean | no | yes | 当前判题结果是否用罚时 |
| solved | boolean | no | yes | 当前判题结果是否解决问题 |
Example:
```json
{
"id": "CE",
"name": "Compile Error",
"penalty": false,
"solved": false
}
```
"""
def render("index.json", _assigns) do
[
ce(),
ac(),
tle(),
rte(),
wa(),
]
end
def render("show.json", assigns) do
judgement_type(assigns.type)
end
end
|
lib/virgo_web/views/judgement_type_view.ex
| 0.743913 | 0.53959 |
judgement_type_view.ex
|
starcoder
|
defmodule Cronex.Job do
@moduledoc """
This module represents a job.
"""
import Cronex.Parser
defstruct frequency: nil,
task: nil,
pid: nil
@doc """
Creates a `%Job{}` with a given frequency and task.
Check `Cronex.Every.every/3` documentation, to view the accepted `frequency` arguments.
"""
def new(frequency, task)
when is_atom(frequency) and is_function(task) do
%Cronex.Job{}
|> Map.put(:frequency, parse_regular_frequency(frequency))
|> Map.put(:task, task)
end
@doc """
Creates a `%Job{}` with the given arguments.
Different argument data types combinations are accepted:
- When `arg1` is an atom and `arg2` is a string, they represent the `frequency` and `time` respectively.
- When `arg1` is an integer and `arg2` is an atom, they represent the `interval` and `frequency` respectively.
Check `Cronex.Every.every/3` documentation, to view the accepted `frequency` and `time` arguments.
"""
def new(arg1, arg2, task)
when is_atom(arg1) and is_bitstring(arg2) and is_function(task) do
%Cronex.Job{}
|> Map.put(:frequency, parse_regular_frequency(arg1, arg2))
|> Map.put(:task, task)
end
def new(arg1, arg2, task)
when is_integer(arg1) and is_atom(arg2) and is_function(task) do
%Cronex.Job{}
|> Map.put(:frequency, parse_interval_frequency(arg1, arg2))
|> Map.put(:task, task)
end
@doc """
Creates a `%Job{}` with the given interval, frequency, time and task.
Check `Cronex.Every.every/4` documentation, to view the accepted `interval`, `frequency` and `time` arguments.
"""
def new(interval, frequency, time, task)
when is_integer(interval) and is_atom(frequency) and is_function(task) do
%Cronex.Job{}
|> Map.put(:frequency, parse_interval_frequency(interval, frequency, time))
|> Map.put(:task, task)
end
@doc """
Validates a given `%Job{}`.
Returns the given %Job{} if the job is valid, raises an error if the job is invalid.
"""
def validate!(%Cronex.Job{frequency: frequency} = job) do
case frequency do
:invalid -> raise_invalid_frequency_error()
_ -> job
end
end
@doc """
Runs and updates the pid attribute of a given `%Job{}`.
"""
def run(%Cronex.Job{task: task} = job, supervisor) do
{:ok, pid} = Task.Supervisor.start_child(supervisor, task)
job |> Map.put(:pid, pid)
end
@doc """
Checks if a given `%Job{}` can run, based on it's frequency and pid.
"""
def can_run?(%Cronex.Job{} = job) do
# TODO Process.alive? only works for local processes, improve this to support several nodes
# Is time to run
# Job process is dead or non existing
is_time(job.frequency) and (job.pid == nil or !Process.alive?(job.pid))
end
defp raise_invalid_frequency_error do
raise ArgumentError, """
An invalid frequency was given when creating a job.
Check the docs to see the accepted frequency arguments.
"""
end
# Every minute job
defp is_time({:*, :*, :*, :*, :*}), do: true
# Every interval minute job, check interval minute
defp is_time({interval, :*, :*, :*, :*}) when is_function(interval) do
interval.(current_date_time().minute) == 0
end
# Every hour job, check minute of job
defp is_time({minute, :*, :*, :*, :*})
when is_integer(minute) do
current_date_time().minute == minute
end
# Every interval hour job, check minute of job and interval hour
defp is_time({minute, interval, :*, :*, :*})
when is_integer(minute) and is_function(interval) do
current_date_time().minute == minute and interval.(current_date_time().hour) == 0
end
# Every day job, check time of job
defp is_time({minute, hour, :*, :*, :*})
when is_integer(minute) and is_integer(hour) do
current_date_time().minute == minute and current_date_time().hour == hour
end
# Every interval day job, check time of job and interval day
defp is_time({minute, hour, interval, :*, :*})
when is_integer(minute) and is_integer(hour) and is_function(interval) do
current_date_time().minute == minute and current_date_time().hour == hour and
interval.(current_date_time().day - 1) == 0
end
# Every week job, check time and day of the week
defp is_time({minute, hour, :*, :*, day_of_week}) do
current_date_time().minute == minute and current_date_time().hour == hour and
Date.day_of_week(current_date_time()) == day_of_week
end
# Every month job, check time and day of job
defp is_time({minute, hour, day, :*, :*})
when is_integer(minute) and is_integer(hour) and is_integer(day) do
current_date_time().minute == minute and current_date_time().hour == hour and
current_date_time().day == day
end
# Every interval month job, check time, day and interval month
defp is_time({minute, hour, day, interval, :*})
when is_integer(minute) and is_integer(hour) and is_integer(day) and is_function(interval) do
current_date_time().minute == minute and current_date_time().hour == hour and
current_date_time().day == day and interval.(current_date_time().month - 1) == 0
end
# Every year job, check month, day and time of job
defp is_time({minute, hour, day, month, :*}) do
current_date_time().minute == minute and current_date_time().hour == hour and
current_date_time().day == day and current_date_time().month == month
end
defp is_time(_frequency), do: false
defp current_date_time do
date_time_provider = Application.get_env(:cronex, :date_time_provider, DateTime)
date_time_provider.utc_now
end
end
|
lib/cronex/job.ex
| 0.826852 | 0.774839 |
job.ex
|
starcoder
|
defmodule Axon.Layers do
@moduledoc ~S"""
Functional implementations of common neural network layer
operations.
Layers are the building blocks of neural networks. These
functional implementations can be used to express higher-level
constructs using fundamental building blocks. Neural network
layers are stateful with respect to their parameters.
These implementations do not assume the responsibility of
managing state - instead opting to delegate this responsibility
to the caller.
Basic neural networks can be seen as a composition of functions:
input
|> dense(w1, b1)
|> relu()
|> dense(w2, b2)
|> softmax()
These kinds of models are often referred to as deep feedforward networks
or multilayer perceptrons (MLPs) because information flows forward
through the network with no feedback connections. Mathematically,
a feedforward network can be represented as:
$$f(x) = f^{(3)}(f^{(2)}(f^{(1)}(x)))$$
You can see a similar pattern emerge if we condense the call stack
in the previous example:
softmax(dense(relu(dense(input, w1, b1)), w2, b2))
The chain structure shown here is the most common structure used
in neural networks. You can consider each function $f^{(n)}$ as a
*layer* in the neural network - for example $f^{(2)} is the 2nd
layer in the network. The number of function calls in the
structure is the *depth* of the network. This is where the term
*deep learning* comes from.
Neural networks are often written as the mapping:
$$y = f(x; \theta)$$
Where $x$ is the input to the neural network and $\theta$ are the
set of learned parameters. In Elixir, you would write this:
y = model(input, params)
From the previous example, `params` would represent the collection:
{w1, b1, w2, b2}
where `w1` and `w2` are layer *weights*, and `b1` and `b2` are layer
*biases*.
"""
import Nx.Defn
import Axon.Shared
## Linear
@doc ~S"""
Functional implementation of a dense layer.
Linear transformation of the input such that:
$$y = xW^T + b$$
A dense layer or fully connected layer transforms
the input using the given weight matrix and bias
to compute:
Nx.dot(input, weight) + bias
Typically, both `weight` and `bias` are learnable
parameters trained using gradient-based optimzation.
## Parameter Shapes
* `input` - `{batch_size, ..., input_features}`
* `weight` - `{input_features, output_features}`
* `bias` - `{output_features}`
## Output Shape
`{batch_size, output_features}`
## Examples
iex> input = Nx.tensor([[1.0, 0.5, 1.0, 0.5], [0.0, 0.0, 0.0, 0.0]], type: {:f, 32})
iex> weight = Nx.tensor([[0.2], [0.3], [0.5], [0.8]], type: {:f, 32})
iex> bias = Nx.tensor([1.0], type: {:f, 32})
iex> Axon.Layers.dense(input, weight, bias)
#Nx.Tensor<
f32[2][1]
[
[2.25],
[1.0]
]
>
"""
@doc type: :linear
defn dense(input, weight, bias) do
input
|> Nx.dot([Nx.rank(input) - 1], weight, [0])
|> Nx.add(bias)
end
## Convolutional
@doc """
Functional implementation of a general dimensional convolutional
layer.
Convolutional layers can be described as applying a convolution
over an input signal composed of several input planes. Intuitively,
the input kernel slides `output_channels` number of filters over
the input tensor to extract features from the input tensor.
Convolutional layers are most commonly used in computer vision,
but can also be useful when working with sequences and other input signals.
## Parameter Shapes
* `input` - `{batch_size, input_channels, input_spatial0, ..., input_spatialN}`
* `weight` - `{output_channels, input_channels, kernel_spatial0, ..., kernel_spatialN}`
* `bias` - `{output_channels}` or `{}`
## Options
* `:strides` - kernel strides. Can be a scalar or a list
who's length matches the number of spatial dimensions in
the input tensor. Defaults to 1.
* `:padding` - zero padding on the input. Can be one of
`:valid`, `:same` or a general padding configuration
without interior padding for each spatial dimension
of the input.
* `:input_dilation` - input dilation factor. Equivalent
to applying interior padding on the input. The amount
of interior padding applied is given by `kernel_dilation - 1`.
Defaults to `1` or no dilation.
* `:kernel_dilation` - kernel dilation factor. Equivalent
to applying interior padding on the kernel. The amount
of interior padding applied is given by `kernel_dilation - 1`.
Defaults to `1` or no dilation.
## Examples
### One-dimensional convolution
iex> input = Nx.tensor([[[0.1294, -0.6638, 1.0251]], [[ 0.9182, 1.1512, -1.6149]]], type: {:f, 32})
iex> weight = Nx.tensor([[[-1.5475, 1.2425]], [[0.1871, 0.5458]], [[-0.4488, 0.8879]]], type: {:f, 32})
iex> bias = Nx.tensor([0.7791, 0.1676, 1.5971], type: {:f, 32})
iex> Axon.Layers.conv(input, weight, bias)
#Nx.Tensor<
f32[2][3][2]
[
[
[-0.24591797590255737, 3.08001708984375],
[-0.1704912781715393, 0.6029025316238403],
[0.9496372938156128, 2.80519962310791]
],
[
[0.7885514497756958, -3.0088953971862793],
[0.9677201509475708, -0.4984228312969208],
[2.207162380218506, -0.3534282445907593]
]
]
>
### Two-dimensional convolution
iex> input = Nx.tensor([[[[-1.0476, -0.5041], [-0.9336, 1.5907]]]], type: {:f, 32})
iex> weight = Nx.tensor([
...> [[[0.7514, 0.7356], [1.3909, 0.6800]]],
...> [[[-0.3450, 0.4551], [-0.6275, -0.9875]]],
...> [[[1.8587, 0.4722], [0.6058, -1.0301]]]
...> ], type: {:f, 32})
iex> bias = Nx.tensor([1.9564, 0.2822, -0.5385], type: {:f, 32})
iex> Axon.Layers.conv(input, weight, bias)
#Nx.Tensor<
f32[1][3][1][1]
[
[
[
[0.5815491676330566]
],
[
[-0.5707762241363525]
],
[
[-4.927865028381348]
]
]
]
>
### Three-dimensional convolution
iex> input = Nx.tensor([[[[[-0.6497], [1.0939]], [[-2.5465], [0.7801]]]]], type: {:f, 32})
iex> weight = Nx.tensor([
...> [[[[ 0.7390], [-0.0927]], [[-0.8675], [-0.9209]]]],
...> [[[[-0.6638], [0.4341]], [[0.6368], [1.1846]]]]
...> ], type: {:f, 32})
iex> bias = Nx.tensor([-0.4101, 0.1776], type: {:f, 32})
iex> Axon.Layers.conv(input, weight, bias)
#Nx.Tensor<
f32[1][2][1][1][1]
[
[
[
[
[0.49906185269355774]
]
],
[
[
[0.38622811436653137]
]
]
]
]
>
"""
@doc type: :convolutional
defn conv(input, weight, bias, opts \\ []) do
opts =
keyword!(opts,
strides: 1,
padding: :valid,
input_dilation: 1,
kernel_dilation: 1,
feature_group_size: 1,
batch_group_size: 1
)
bias_reshape =
transform(
{Nx.shape(bias), Nx.rank(input)},
fn {bias_shape, rank} ->
conv_bias_reshape(bias_shape, rank - 2)
end
)
input
|> Nx.conv(weight,
strides: opts[:strides],
padding: opts[:padding],
input_dilation: opts[:input_dilation],
kernel_dilation: opts[:kernel_dilation],
feature_group_size: opts[:feature_group_size],
batch_group_size: opts[:batch_group_size]
)
|> Nx.add(Nx.reshape(bias, bias_reshape))
end
@doc """
Functional implementation of a general dimensional transposed
convolutional layer.
*Note: This layer is currently implemented as a fractionally strided
convolution by padding the input tensor. Please open an issue if you'd
like this behavior changed.*
Transposed convolutions are sometimes (incorrectly) referred to as
deconvolutions because it "reverses" the spatial dimensions
of a normal convolution. Transposed convolutions are a form of upsampling -
they produce larger spatial dimensions than the input tensor. They
can be thought of as a convolution in reverse - and are sometimes
implemented as the backward pass of a normal convolution.
## Options
* `:strides` - kernel strides. Can be a scalar or a list
who's length matches the number of spatial dimensions in
the input tensor. Defaults to 1.
* `:padding` - zero padding on the input. Can be one of
`:valid`, `:same` or a general padding configuration
without interior padding for each spatial dimension
of the input.
* `:input_dilation` - input dilation factor. Equivalent
to applying interior padding on the input. The amount
of interior padding applied is given by `kernel_dilation - 1`.
Defaults to `1` or no dilation.
* `:kernel_dilation` - kernel dilation factor. Equivalent
to applying interior padding on the kernel. The amount
of interior padding applied is given by `kernel_dilation - 1`.
Defaults to `1` or no dilation.
## Examples
iex> input = Nx.iota({1, 3, 3}, type: {:f, 32})
iex> kernel = Nx.iota({6, 3, 2}, type: {:f, 32})
iex> bias = Nx.tensor(1.0, type: {:f, 32})
iex> Axon.Layers.conv_transpose(input, kernel, bias)
#Nx.Tensor<
f32[1][6][4]
[
[
[40.0, 79.0, 94.0, 43.0],
[94.0, 205.0, 256.0, 133.0],
[148.0, 331.0, 418.0, 223.0],
[202.0, 457.0, 580.0, 313.0],
[256.0, 583.0, 742.0, 403.0],
[310.0, 709.0, 904.0, 493.0]
]
]
>
## References
* [A guide to convolution arithmethic for deep learning](https://arxiv.org/abs/1603.07285v1)
* [Deconvolutional Networks](https://www.matthewzeiler.com/mattzeiler/deconvolutionalnetworks.pdf)
"""
@doc type: :convolutional
defn conv_transpose(input, weight, bias, opts \\ []) do
assert_equal_rank!(input, weight)
opts =
keyword!(opts,
strides: 1,
padding: :valid,
input_dilation: 1,
kernel_dilation: 1
)
bias_reshape =
transform(
{Nx.shape(bias), Nx.rank(input)},
fn {bias_shape, rank} ->
conv_bias_reshape(bias_shape, rank - 2)
end
)
strides =
transform(
{Nx.rank(input), opts[:strides]},
fn
{_, [_ | _] = strides} -> strides
{rank, strides} -> List.duplicate(strides, rank - 2)
end
)
padding =
transform(
{Nx.shape(weight), opts[:kernel_dilation], strides, opts[:padding]},
&conv_transpose_padding/1
)
input
|> Nx.conv(weight,
strides: opts[:strides],
padding: padding,
input_dilation: opts[:input_dilation],
kernel_dilation: opts[:kernel_dilation]
)
|> Nx.add(Nx.reshape(bias, bias_reshape))
end
@doc """
Functional implementation of a general dimensional depthwise
convolution.
Depthwise convolutions apply a single convolutional filter to
each input channel. This is done by setting `feature_group_size`
equal to the number of input channels. This will split the
output_channels into `input_channels` number of groups and
convolve the grouped kernel channels over the corresponding input
channel.
## Parameter Shapes
* `input` - `{batch_size, input_channels, input_spatial0, ..., input_spatialN}`
* `weight` - `{output_channels, 1, kernel_spatial0, ..., kernel_spatialN}`
* `bias` - `{output_channels}` or `{}`
`output_channels` must be a multiple of the input channels.
## Options
* `:strides` - kernel strides. Can be a scalar or a list
who's length matches the number of spatial dimensions in
the input tensor. Defaults to 1.
* `:padding` - zero padding on the input. Can be one of
`:valid`, `:same` or a general padding configuration
without interior padding for each spatial dimension
of the input.
* `:input_dilation` - input dilation factor. Equivalent
to applying interior padding on the input. The amount
of interior padding applied is given by `kernel_dilation - 1`.
Defaults to `1` or no dilation.
* `:kernel_dilation` - kernel dilation factor. Equivalent
to applying interior padding on the kernel. The amount
of interior padding applied is given by `kernel_dilation - 1`.
Defaults to `1` or no dilation.
"""
@doc type: :convolutional
defn depthwise_conv(input, weight, bias, opts \\ []) do
assert_equal_rank!(input, weight)
opts =
keyword!(opts,
strides: 1,
padding: :valid,
input_dilation: 1,
kernel_dilation: 1
)
strides =
transform(
{Nx.rank(input), opts[:strides]},
fn
{_, [_ | _] = strides} -> strides
{rank, strides} -> List.duplicate(strides, rank - 2)
end
)
num_groups = transform(Nx.shape(input), &elem(&1, 1))
bias_reshape = transform(Nx.shape(bias), &conv_bias_reshape(&1, 1))
input
|> Nx.conv(weight,
strides: strides,
padding: opts[:padding],
input_dilation: opts[:input_dilation],
kernel_dilation: opts[:kernel_dilation],
feature_group_size: num_groups
)
|> Nx.add(Nx.reshape(bias, bias_reshape))
end
@doc """
Functional implementation of a 2-dimensional separable depthwise
convolution.
The 2-d depthwise separable convolution performs 2 depthwise convolutions
each over 1 spatial dimension of the input.
## Parameter Shapes
* `input` - `{batch_size, input_channels, input_spatial0, ..., input_spatialN}`
* `k1` - `{output_channels, 1, kernel_spatial0, 1}`
* `b1` - `{output_channels}` or `{}`
* `k2` - `{output_channels, 1, 1, kernel_spatial1}`
* `b2` - `{output_channels}` or `{}`
`output_channels` must be a multiple of the input channels.
## Options
* `:strides` - kernel strides. Can be a scalar or a list
who's length matches the number of spatial dimensions in
the input tensor. Defaults to 1.
* `:padding` - zero padding on the input. Can be one of
`:valid`, `:same` or a general padding configuration
without interior padding for each spatial dimension
of the input.
* `:input_dilation` - input dilation factor. Equivalent
to applying interior padding on the input. The amount
of interior padding applied is given by `kernel_dilation - 1`.
Defaults to `1` or no dilation.
* `:kernel_dilation` - kernel dilation factor. Equivalent
to applying interior padding on the kernel. The amount
of interior padding applied is given by `kernel_dilation - 1`.
Defaults to `1` or no dilation.
## References
* [Xception: Deep Learning with Depthwise Separable Convolutions](https://arxiv.org/abs/1610.02357)
"""
@doc type: :convolutional
defn separable_conv2d(input, k1, b1, k2, b2, opts \\ []) do
input
|> depthwise_conv(k1, b1, opts)
|> depthwise_conv(k2, b2, opts)
end
@doc """
Functional implementation of a 3-dimensional separable depthwise
convolution.
The 3-d depthwise separable convolution performs 3 depthwise convolutions
each over 1 spatial dimension of the input.
## Parameter Shapes
* `input` - `{batch_size, input_channels, input_spatial0, ..., input_spatialN}`
* `k1` - `{output_channels, 1, kernel_spatial0, 1, 1}`
* `b1` - `{output_channels}` or `{}`
* `k2` - `{output_channels, 1, 1, kernel_spatial1, 1}`
* `b2` - `{output_channels}` or `{}`
* `k3` - `{output_channels, 1, 1, 1, 1, kernel_spatial2}`
* `b3` - `{output_channels}` or `{}`
`output_channels` must be a multiple of the input channels.
## Options
* `:strides` - kernel strides. Can be a scalar or a list
who's length matches the number of spatial dimensions in
the input tensor. Defaults to 1.
* `:padding` - zero padding on the input. Can be one of
`:valid`, `:same` or a general padding configuration
without interior padding for each spatial dimension
of the input.
* `:input_dilation` - input dilation factor. Equivalent
to applying interior padding on the input. The amount
of interior padding applied is given by `kernel_dilation - 1`.
Defaults to `1` or no dilation.
* `:kernel_dilation` - kernel dilation factor. Equivalent
to applying interior padding on the kernel. The amount
of interior padding applied is given by `kernel_dilation - 1`.
Defaults to `1` or no dilation.
## References
* [Xception: Deep Learning with Depthwise Separable Convolutions](https://arxiv.org/abs/1610.02357)
"""
@doc type: :convolutional
defn separable_conv3d(input, k1, b1, k2, b2, k3, b3, opts \\ []) do
input
|> depthwise_conv(k1, b1, opts)
|> depthwise_conv(k2, b2, opts)
|> depthwise_conv(k3, b3, opts)
end
@doc """
Functional implementation of a general dimensional max pooling layer.
Pooling is applied to the spatial dimension of the input tensor.
Max pooling returns the maximum element in each valid window of
the input tensor. It is often used after convolutional layers
to downsample the input even further.
## Options
* `kernel_size` - window size. Rank must match spatial dimension
of the input tensor. Required.
* `:strides` - kernel strides. Can be a scalar or a list
who's length matches the number of spatial dimensions in
the input tensor. Defaults to 1.
* `:padding` - zero padding on the input. Can be one of
`:valid`, `:same` or a general padding configuration
without interior padding for each spatial dimension
of the input.
* `:window_dilations` - kernel dilation factor. Equivalent
to applying interior padding on the kernel. The amount
of interior padding applied is given by `kernel_dilation - 1`.
Can be scalar or list who's length matches the number of
spatial dimensions in the input tensor. Defaults to `1` or no
dilation.
## Examples
iex> t = Nx.tensor([[
...> [0.051500000059604645, -0.7042999863624573, -0.32899999618530273],
...> [-0.37130001187324524, 1.6191999912261963, -0.11829999834299088],
...> [0.7099999785423279, 0.7282999753952026, -0.18639999628067017]]], type: {:f, 32})
iex> Axon.Layers.max_pool(t, kernel_size: 2)
#Nx.Tensor<
f32[1][3][2]
[
[
[0.051500000059604645, -0.32899999618530273],
[1.6191999912261963, 1.6191999912261963],
[0.7282999753952026, 0.7282999753952026]
]
]
>
"""
@doc type: :pooling
defn max_pool(input, opts \\ []) do
opts =
keyword!(
opts,
[:kernel_size, strides: 1, padding: :valid, window_dilations: 1]
)
window_dimensions =
transform(
{Nx.rank(input), opts[:kernel_size]},
fn {rank, kernel_size} ->
pool_window_size(kernel_size, rank - 2)
end
)
strides =
transform(
{Nx.rank(input), opts[:strides]},
fn
{_, [_ | _] = strides} -> [1, 1 | strides]
{rank, strides} -> [1, 1 | List.duplicate(rank - 2, strides)]
end
)
opts = transform(opts, &Keyword.delete(&1, :kernel_size))
input
|> Nx.window_max(window_dimensions,
strides: strides,
padding: opts[:padding],
window_dilations: opts[:window_dilations]
)
end
@doc """
A general dimensional functional average pooling layer.
Pooling is applied to the spatial dimension of the input tensor.
Average pooling returns the average of all elements in valid
windows in the input tensor. It is often used after convolutional
layers to downsample the input even further.
## Options
* `kernel_size` - window size. Rank must match spatial dimension
of the input tensor. Required.
* `:strides` - kernel strides. Can be a scalar or a list
who's length matches the number of spatial dimensions in
the input tensor. Defaults to 1.
* `:padding` - zero padding on the input. Can be one of
`:valid`, `:same` or a general padding configuration
without interior padding for each spatial dimension
of the input.
* `:window_dilations` - kernel dilation factor. Equivalent
to applying interior padding on the kernel. The amount
of interior padding applied is given by `kernel_dilation - 1`.
Can be scalar or list who's length matches the number of
spatial dimensions in the input tensor. Defaults to `1` or no
dilation.
"""
@doc type: :pooling
defn avg_pool(input, opts \\ []) do
opts =
keyword!(
opts,
[:kernel_size, strides: 1, padding: :valid, window_dilations: 1]
)
window_dimensions =
transform(
{Nx.rank(input), opts[:kernel_size]},
fn {rank, kernel_size} ->
pool_window_size(kernel_size, rank - 2)
end
)
strides =
transform(
{Nx.rank(input), opts[:strides]},
fn
{_, [_ | _] = strides} -> [1, 1 | strides]
{rank, strides} -> [1, 1 | List.duplicate(rank - 2, strides)]
end
)
opts = transform(opts, &Keyword.delete(&1, :kernel_size))
input
|> Nx.window_mean(window_dimensions,
strides: strides,
padding: opts[:padding],
window_dilations: opts[:window_dilations]
)
end
@doc ~S"""
Functional implementation of a general dimensional power average
pooling layer.
Pooling is applied to the spatial dimension of the input tensor.
Power average pooling computes the following function on each
valid window of the input tensor:
$$f(X) = \sqrt[p]{\sum_{x \in X} x^{p}}$$
Where $p$ is given by the keyword argument `:norm`. As $p$ approaches
infinity, it becomes equivalent to max pooling.
## Options
* `kernel_size` - window size. Rank must match spatial dimension
of the input tensor. Required.
* `:strides` - kernel strides. Can be a scalar or a list
who's length matches the number of spatial dimensions in
the input tensor. Defaults to 1.
* `:padding` - zero padding on the input. Can be one of
`:valid`, `:same` or a general padding configuration
without interior padding for each spatial dimension
of the input.
* `:window_dilations` - kernel dilation factor. Equivalent
to applying interior padding on the kernel. The amount
of interior padding applied is given by `kernel_dilation - 1`.
Can be scalar or list who's length matches the number of
spatial dimensions in the input tensor. Defaults to `1` or no
dilation.
## Examples
iex> t = Nx.tensor([[[0.9450, 0.4684, 1.8146], [1.2663, 0.4354, -0.0781], [-0.4759, 0.3251, 0.8742]]], type: {:f, 32})
iex> Axon.Layers.lp_pool(t, kernel_size: 2, norm: 2)
#Nx.Tensor<
f32[1][3][2]
[
[
[1.0547149181365967, 1.8740788698196411],
[1.3390626907348633, 0.4423491656780243],
[0.5763426423072815, 0.9326926469802856]
]
]
>
"""
@doc type: :pooling
defn lp_pool(input, opts \\ []) do
opts =
keyword!(
opts,
[:kernel_size, strides: 1, padding: :valid, window_dilations: 1, norm: 2]
)
window_dimensions =
transform(
{Nx.rank(input), opts[:kernel_size]},
fn {rank, kernel_size} ->
pool_window_size(kernel_size, rank - 2)
end
)
strides =
transform(
{Nx.rank(input), opts[:strides]},
fn
{_, [_ | _] = strides} -> [1, 1 | strides]
{rank, strides} -> [1, 1 | List.duplicate(rank - 2, strides)]
end
)
norm = opts[:norm]
opts =
opts
|> transform(&Keyword.delete(&1, :kernel_size))
|> transform(&Keyword.delete(&1, :norm))
input
|> Nx.power(norm)
|> Nx.window_sum(window_dimensions,
strides: strides,
padding: opts[:padding],
window_dilations: opts[:window_dilations]
)
|> Nx.power(Nx.divide(Nx.tensor(1, type: Nx.type(input)), norm))
end
@doc """
Functional implementation of general dimensional adaptive average
pooling.
Adaptive pooling allows you to specify the desired output size
of the transformed input. This will automatically adapt the
window size and strides to obtain the desired output size. It
will then perform average pooling using the calculated window
size and strides.
Adaptive pooling can be useful when working on multiple inputs with
different spatial input shapes. You can guarantee the output of
an adaptive pooling operation is always the same size regardless
of input shape.
## Options
* `:output_size` - spatial output size. Must be a tuple with
size equal to the spatial dimensions in the input tensor.
Required.
"""
@doc type: :pooling
defn adaptive_avg_pool(input, opts \\ []) do
opts = keyword!(opts, [:output_size])
window_strides =
transform(
{Nx.shape(input), Nx.rank(input), opts[:output_size]},
fn {shape, rank, output_size} ->
adaptive_pool_window_strides({shape, output_size}, rank - 2)
end
)
window_dimensions =
transform(
{Nx.shape(input), Nx.rank(input), window_strides, opts[:output_size]},
fn {shape, rank, strides, output_size} ->
adaptive_pool_window_strides({shape, strides, output_size}, rank - 2)
end
)
input
|> Nx.window_mean(window_dimensions, padding: :valid, strides: window_strides)
end
@doc """
Functional implementation of general dimensional adaptive max
pooling.
Adaptive pooling allows you to specify the desired output size
of the transformed input. This will automatically adapt the
window size and strides to obtain the desired output size. It
will then perform max pooling using the calculated window
size and strides.
Adaptive pooling can be useful when working on multiple inputs with
different spatial input shapes. You can guarantee the output of
an adaptive pooling operation is always the same size regardless
of input shape.
## Options
* `:output_size` - spatial output size. Must be a tuple with
size equal to the spatial dimensions in the input tensor.
Required.
"""
@doc type: :pooling
defn adaptive_max_pool(input, opts \\ []) do
opts = keyword!(opts, [:output_size])
window_strides =
transform(
{Nx.shape(input), Nx.rank(input), opts[:output_size]},
fn {shape, rank, output_size} ->
adaptive_pool_window_strides({shape, output_size}, rank - 2)
end
)
window_dimensions =
transform(
{Nx.shape(input), Nx.rank(input), window_strides, opts[:output_size]},
fn {shape, rank, strides, output_size} ->
adaptive_pool_window_size({shape, strides, output_size}, rank - 2)
end
)
input
|> Nx.window_max(window_dimensions, padding: :valid, strides: window_strides)
end
## Normalization
@doc ~S"""
Functional implementation of batch normalization.
Normalizes the input by calculating mean and variance of the
input tensor along every dimension but the given `:channel_index`,
and then scaling according to:
$$y = \frac{x - E[x]}{\sqrt{Var[x] + \epsilon}} * \gamma + \beta$$
`gamma` and `beta` are often trainable parameters. This method does
not maintain an EMA of mean and variance.
## Options
* `:epsilon` - numerical stability term. $epsilon$ in the above
formulation.
* `:channel_index` - channel index used to determine reduction
axes for mean and variance calculation.
## References
* [Batch Normalization: Accelerating Deep Network Training by Reducing Internal Covariate Shift](https://arxiv.org/abs/1502.03167)
"""
@doc type: :normalization
defn batch_norm(input, gamma, bias, opts \\ []) do
opts = keyword!(opts, epsilon: 1.0e-5, channel_index: 1)
axes = transform({Nx.axes(input), opts[:channel_index]}, &batch_norm_axes/1)
{mean, var} = mean_and_variance(input, axes: axes)
normalize(input, mean, var, gamma, bias, epsilon: opts[:epsilon])
end
@doc ~S"""
Functional implementation of layer normalization.
Normalizes the input by calculating mean and variance of the
input tensor along the given feature dimension `:channel_index`.
$$y = \frac{x - E[x]}{\sqrt{Var[x] + \epsilon}} * \gamma + \beta$$
`gamma` and `beta` are often trainable parameters. This method does
not maintain an EMA of mean and variance.
## Options
* `:epsilon` - numerical stability term. $epsilon$ in the above
formulation.
* `:channel_index` - channel index used to determine reduction
axes for mean and variance calculation.
"""
@doc type: :normalization
defn layer_norm(input, gamma, bias, opts \\ []) do
opts = keyword!(opts, epsilon: 1.0e-6, channel_index: 1)
axes = opts[:channel_index]
{mean, var} = mean_and_variance(input, axes: [axes])
normalize(input, mean, var, gamma, bias, epsilon: opts[:epsilon])
end
@doc """
Functional implementation of group normalization.
Normalizes the input by reshaping input into groups of given
`:group_size` and then calculating the mean and variance along
every dimension but the input batch dimension.
$$y = \frac{x - E[x]}{\sqrt{Var[x] + \epsilon}} * \gamma + \beta$$
`gamma` and `beta` are often trainable parameters. This method does
not maintain an EMA of mean and variance.
## Options
* `:group_size` - channel group size. Size of each group to split
input channels into.
* `:epsilon` - numerical stability term. $epsilon$ in the above
formulation.
* `:channel_index` - channel index used to determine reduction
axes and group shape for mean and variance calculation.
## References
* [Group Normalization](https://arxiv.org/abs/1803.08494v3)
"""
@doc type: :normalization
defn group_norm(input, gamma, bias, opts \\ []) do
opts = keyword!(opts, [:group_size, epsilon: 1.0e-6, channel_index: 1])
group_shape =
transform({Nx.shape(input), opts[:group_size], opts[:channel_index]}, &group_norm_shape/1)
x = Nx.reshape(input, group_shape)
axes = transform(Nx.rank(x), &group_norm_axes/1)
{mean, var} = mean_and_variance(x, axes: axes)
x = normalize(x, mean, var, gamma, bias)
Nx.reshape(x, Nx.shape(input)) * gamma + bias
end
@doc """
Functional implementation of instance normalization.
Normalizes the input by calculating mean and variance of the
input tensor along the spatial dimensions of the input.
$$y = \frac{x - E[x]}{\sqrt{Var[x] + \epsilon}} * \gamma + \beta$$
`gamma` and `beta` are often trainable parameters. This method does
not maintain an EMA of mean and variance.
## Options
* `:epsilon` - numerical stability term. $epsilon$ in the above
formulation.
* `:channel_index` - channel index used to determine reduction
axes for mean and variance calculation.
## References
* [Instance Normalization: The Missing Ingredient for Fast Stylization](https://arxiv.org/abs/1607.08022v3)
"""
@doc type: :normalization
defn instance_norm(input, gamma, bias, opts \\ []) do
opts = keyword!(opts, epsilon: 1.0e-6, channel_index: 1)
axes = transform({Nx.axes(input), opts[:channel_index]}, &instance_norm_axes/1)
{mean, var} = mean_and_variance(input, axes: axes)
normalize(input, mean, var, gamma, bias, epsilon: opts[:epsilon])
end
## Stochastic
# TODO: Manage the state of these RNGs
@doc ~S"""
Functional implementation of a dropout layer.
Applies a mask to some elements of the input tensor with probability
`rate` and scales the input tensor by a factor of $\frac{1}{1 - rate}$.
Dropout is a form of regularization that helps prevent overfitting
by preventing models from becoming too reliant on certain connections.
Dropout can somewhat be thought of as learning an ensemble of models
with random connections masked.
## Options
* `:rate` - dropout rate. Used to determine probability a connection
will be dropped. Required.
# `:noise_shape` - input noise shape. Shape of `mask` which can be useful
for broadcasting `mask` across feature channels or other dimensions.
Defaults to shape of input tensor.
## References
* [Dropout: A Simple Way to Prevent Neural Networks from Overfitting](https://jmlr.org/papers/v15/srivastava14a.html)
"""
@doc type: :dropout
defn dropout(input, opts \\ []) do
opts = keyword!(opts, [:rate, noise_shape: Nx.shape(input)])
keep_prob = Nx.tensor(1, type: Nx.type(input)) - opts[:rate]
mask = Nx.less(Nx.random_uniform(opts[:noise_shape], type: Nx.type(input)), keep_prob)
mask =
transform(
{mask, Nx.shape(input)},
fn {mask, input_shape} ->
if Nx.shape(mask) == input_shape,
do: mask,
else: Nx.broadcast(mask, input_shape)
end
)
Nx.select(mask, input / keep_prob, Nx.tensor(0, type: Nx.type(input)))
end
@doc """
Functional implementation of an n-dimensional spatial
dropout layer.
Applies a mask to entire feature maps instead of individual
elements. This is done by calculating a mask shape equal to
the spatial dimensions of the input tensor with 1 channel,
and then broadcasting the mask across the feature dimension
of the input tensor.
## Options
* `:rate` - dropout rate. Used to determine probability a connection
will be dropped. Required.
# `:noise_shape` - input noise shape. Shape of `mask` which can be useful
for broadcasting `mask` across feature channels or other dimensions.
Defaults to shape of input tensor.
## References
* [Efficient Object Localization Using Convolutional Networks](https://arxiv.org/abs/1411.4280)
"""
@doc type: :dropout
defn spatial_dropout(input, opts \\ []) do
opts = keyword!(opts, rate: 0.5)
noise_shape = transform(Nx.shape(input), &spatial_dropout_noise_shape/1)
dropout(input, rate: opts[:rate], noise_shape: noise_shape)
end
@doc """
Functional implementation of an alpha dropout layer.
Alpha dropout is a type of dropout that forces the input
to have zero mean and unit standard deviation. Randomly
masks some elements and scales to enforce self-normalization.
## Options
* `:rate` - dropout rate. Used to determine probability a connection
will be dropped. Required.
# `:noise_shape` - input noise shape. Shape of `mask` which can be useful
for broadcasting `mask` across feature channels or other dimensions.
Defaults to shape of input tensor.
## References
* [Self-Normalizing Neural Networks](https://arxiv.org/abs/1706.02515)
"""
@doc type: :dropout
defn alpha_dropout(input, opts \\ []) do
opts = keyword!(opts, rate: 0.5)
rate = opts[:rate]
alpha = Nx.tensor(1.6732632423543772848170429916717, type: Nx.type(input))
scale = Nx.tensor(1.0507009873554804934193349852946, type: Nx.type(input))
alpha_p = -alpha * scale
keep_prob = Nx.tensor(1, type: Nx.type(input)) - rate
mask = Nx.less(Nx.random_uniform(Nx.shape(input), type: Nx.type(input)), keep_prob)
a = Nx.rsqrt(keep_prob * Nx.power(Nx.tensor(1, type: Nx.type(input)) * alpha_p, 2))
b = -a * alpha_p * rate
x = Nx.select(mask, input, alpha_p)
a * x + b
end
@doc """
Functional implementation of a feature alpha dropout layer.
Feature alpha dropout applies dropout in the same manner as
spatial dropout; however, it also enforces self-normalization
by masking inputs with the SELU activation function and scaling
unmasked inputs.
## Options
* `:rate` - dropout rate. Used to determine probability a connection
will be dropped. Required.
# `:noise_shape` - input noise shape. Shape of `mask` which can be useful
for broadcasting `mask` across feature channels or other dimensions.
Defaults to shape of input tensor.
"""
@doc type: :dropout
defn feature_alpha_dropout(input, opts \\ []) do
opts = keyword!(opts, rate: 0.5)
noise_shape = transform(Nx.shape(input), &spatial_dropout_noise_shape/1)
keep_prob = 1 - opts[:rate]
mask = Nx.less(Nx.random_uniform(noise_shape, type: Nx.type(input)), keep_prob)
mask =
transform(
{mask, Nx.shape(input)},
fn {mask, input_shape} ->
if Nx.shape(mask) == input_shape,
do: mask,
else: Nx.broadcast(mask, input_shape)
end
)
Nx.select(mask, input / keep_prob, Nx.negate(Axon.Activations.selu(input)))
end
## Shape
@doc """
Flattens input to shape of `{batch, units}` by folding outer
dimensions.
## Examples
iex> Axon.Layers.flatten(Nx.iota({1, 2, 2}, type: {:f, 32}))
#Nx.Tensor<
f32[1][4]
[
[0.0, 1.0, 2.0, 3.0]
]
>
"""
defn flatten(x) do
new_shape =
transform(
Nx.shape(x),
fn shape ->
batch_size = elem(shape, 0)
new_units =
shape
|> Tuple.delete_at(0)
|> Nx.size()
{batch_size, new_units}
end
)
Nx.reshape(x, new_shape)
end
## Helpers
# `window_x` functions expect a window which matches the
# rank of the input shape. For basic pooling we don't pool
# across batch or channel dimensions, so we just specify
# a size of `1` for each of those
defp pool_window_size(w, spatial_rank) do
spatial_dims =
case w do
x when is_integer(x) ->
List.duplicate(x, spatial_rank)
x when is_tuple(x) ->
Tuple.to_list(x)
x ->
raise ArgumentError,
"expected pool window to be tuple or integer" <>
" , got #{inspect(x)}"
end
List.to_tuple([1, 1 | spatial_dims])
end
# Adaptive pooling functions adapt the strides of the window
# according to:
# stride = div(input, output)
# This preserves the size of the channel/batch dimension
defp adaptive_pool_window_strides({input_shape, output_spatial}, spatial_rank) do
input_spatial =
input_shape
|> Tuple.delete_at(0)
|> Tuple.delete_at(0)
|> Tuple.to_list()
output_spatial =
case output_spatial do
x when is_integer(x) ->
List.duplicate(x, spatial_rank)
x when is_tuple(x) ->
Tuple.to_list(x)
x ->
raise ArgumentError,
"expected output spatial dimensions to be tuple" <>
" or integer, got #{inspect(x)}"
end
strides =
output_spatial
|> Enum.zip(input_spatial)
|> Enum.map(fn {input, output} -> div(input, output) end)
[1, 1 | strides]
end
# Adaptive pooling functions adopt the size of the window
# according to:
# size = input_size - (output_size - 1) * stride
# This preserves the size of the channel/batch dimension
defp adaptive_pool_window_size({input_shape, [_, _ | stride], output_spatial}, spatial_rank) do
input_spatial =
input_shape
|> Tuple.delete_at(0)
|> Tuple.delete_at(0)
|> Tuple.to_list()
output_spatial =
case output_spatial do
x when is_integer(x) ->
List.duplicate(x, spatial_rank)
x when is_tuple(x) ->
Tuple.to_list(x)
x ->
raise ArgumentError,
"expected output spatial dimensions to be tuple" <>
" or integer, got #{inspect(x)}"
end
zip_all = [input_spatial, output_spatial, stride]
output_size =
zip_all
|> Enum.zip()
|> Enum.map(fn {input, output, s} -> input - (output - 1) * s end)
List.to_tuple([1, 1 | output_size])
end
# In order to effectively broadcast, we need to expand
# the dimensions of the bias term in convolutions - if
# the input bias shape is a vector, otherwise we'll just
# attempt to let it broadcast itself
defp conv_bias_reshape(input_shape, spatial_rank) do
case input_shape do
{} ->
{}
{shape} ->
spatial_dims = List.duplicate(1, spatial_rank)
List.to_tuple([1, shape | spatial_dims])
shape when is_tuple(shape) ->
shape
end
end
# Spatial dropout shapes are broadcasted across feature
# channels, so we set the channel size to 1 and preserve
# the spatial dimensions
defp spatial_dropout_noise_shape(input_shape) do
:erlang.setelement(2, input_shape, 1)
end
# Fractionally strided convolution (transposed convolution)
# by padding the input
defp conv_transpose_padding({kernel_shape, kernel_dilation, strides, padding})
when padding in [:valid, :same] do
kernel_spatial_dims =
kernel_shape
|> Tuple.delete_at(0)
|> Tuple.delete_at(0)
kernel_dilation =
if is_list(kernel_dilation),
do: kernel_dilation,
else: List.duplicate(kernel_dilation, tuple_size(kernel_spatial_dims))
effective_kernel_size =
kernel_spatial_dims
|> Tuple.to_list()
|> Enum.zip(kernel_dilation)
|> Enum.map(fn {k, r} -> (k - 1) * r + 1 end)
case padding do
:valid ->
effective_kernel_size
|> Enum.zip(strides)
|> Enum.map(fn {k, s} ->
pad_len = k + s - 2 + max(k - s, 0)
pad_a = k - 1
{pad_a, pad_len - pad_a}
end)
:same ->
effective_kernel_size
|> Enum.zip(strides)
|> Enum.map(fn {k, s} ->
pad_len = k + s - 2
pad_a =
if s > k - 1 do
k - 1
else
ceil(pad_len / 2)
end
{pad_a, pad_len - pad_a}
end)
end
end
defp conv_transpose_padding({_, _, _, padding}), do: padding
defp batch_norm_axes({axes, channel_index}) do
axes
|> Enum.filter(&(&1 != channel_index))
end
defp instance_norm_axes({axes, channel_index}) do
reduction_axes = axes -- [0, channel_index]
if reduction_axes == [] do
raise ArgumentError, "rank of input shape must be at least 3"
else
reduction_axes
end
end
defp group_norm_axes(rank) do
for(i <- 1..(rank - 2), do: i) ++ [rank - 1]
end
defp group_norm_shape({shape, group_size, channel_index}) do
channels = :erlang.element(channel_index + 1, shape)
num_groups = div(channels, group_size)
Tuple.delete_at(shape, channel_index)
|> Tuple.insert_at(channel_index, num_groups)
|> Tuple.insert_at(channel_index + 1, group_size)
end
end
|
lib/axon/layers.ex
| 0.959193 | 0.891999 |
layers.ex
|
starcoder
|
defmodule Chunky.Sequence.OEIS.Repr do
@moduledoc """
Sequences from the [Online Encyclopedia of Integer Sequences](https://oeis.org) dealing with number
representations, like alternate bases, digit contents, and patterns of digits.
## Available Sequences
### Omissions and Deletions of digits
- `create_sequence_a004176/1` - A004176 - Omit 1's from n.
- `create_sequence_a004177/1` - A004177 - Omit 2's from n.
- `create_sequence_a004178/1` - A004178 - Omit 3's from n.
- `create_sequence_a004179/1` - A004179 - Omit 4's from n.
- `create_sequence_a004180/1` - A004180 - Omit 5's from n.
- `create_sequence_a004181/1` - A004181 - Omit 6's from n.
- `create_sequence_a004182/1` - A004182 - Omit 7's from n.
- `create_sequence_a004183/1` - A004183 - Omit 8's from n.
- `create_sequence_a004184/1` - A004184 - Omit 9's from n.
- `create_sequence_a004719/1` - A004719 - Delete all 0's from n.
- `create_sequence_a004720/1` - A004720 - Delete all digits '1' from the sequence of nonnegative integers.
- `create_sequence_a004721/1` - A004721 - Delete all 2's from the sequence of nonnegative integers.
- `create_sequence_a004722/1` - A004722 - Delete all digits 3 from the terms of the sequence of nonnegative integers.
- `create_sequence_a004723/1` - A004723 - Delete all 4's from the sequence of nonnegative integers.
- `create_sequence_a004724/1` - A004724 - Delete all 5's from the sequence of nonnegative integers.
- `create_sequence_a004725/1` - A004725 - Delete all 6's from the sequence of nonnegative integers.
- `create_sequence_a004726/1` - A004726 - Delete all 7's from the sequence of nonnegative integers.
- `create_sequence_a004727/1` - A004727 - Delete all 8's from the sequence of nonnegative integers.
- `create_sequence_a004728/1` - A004728 - Delete all 9's from the sequence of nonnegative integers.
### Base 10 Representations
These representations are in Base 10. See below for representations in other bases.
Numbers with or without specific decimal digits:
- `create_sequence_a011531/1` - A011531 - Numbers that contain a digit 1 in their decimal representation.
- `create_sequence_a011532/1` - A011532 - Numbers that contain a 2.
- `create_sequence_a011533/1` - A011533 - Numbers that contain a 3.
- `create_sequence_a011534/1` - A011534 - Numbers that contain a 4.
- `create_sequence_a011535/1` - A011535 - Numbers that contain a 5.
- `create_sequence_a011536/1` - A011536 - Numbers that contain a 6.
- `create_sequence_a011537/1` - A011537 - Numbers that contain at least one 7.
- `create_sequence_a011538/1` - A011538 - Numbers that contain an 8.
- `create_sequence_a011539/1` - A011539 - "9ish numbers": decimal representation contains at least one nine.
- `create_sequence_a011540/1` - A011540 - Numbers that contain a digit 0.
- `create_sequence_a014263/1` - A014263 - Numbers that contain even digits only.
- `create_sequence_a052382/1` - A052382 - Numbers without 0 as a digit, a.k.a. zeroless numbers.
- `create_sequence_a052383/1` - A052383 - Numbers without 1 as a digit.
- `create_sequence_a052404/1` - A052404 - Numbers without 2 as a digit.
- `create_sequence_a052405/1` - A052405 - Numbers without 3 as a digit.
- `create_sequence_a052406/1` - A052406 - Numbers without 4 as a digit.
- `create_sequence_a052413/1` - A052413 - Numbers without 5 as a digit.
- `create_sequence_a052414/1` - A052414 - Numbers without 6 as a digit.
- `create_sequence_a052419/1` - A052419 - Numbers without 7 as a digit.
- `create_sequence_a052421/1` - A052421 - Numbers without 8 as a digit.
- `create_sequence_a051003/1` - A051003 - Beastly (or hateful) numbers: numbers containing the string 666 in their decimal expansion.
- `create_sequence_a052040/1` - A052040 - Numbers n such that n^2 lacks the digit zero in its decimal expansion.
- `create_sequence_a067251/1` - A067251 - Numbers with no trailing zeros in decimal representation.
- `create_sequence_a097256/1` - A097256 - Numbers whose set of base 10 digits is {0,9}.
- `create_sequence_a121022/1` - A121022 - Even numbers containing a 2 in their decimal representation.
Digit counts, summations, or products in base 10:
- `create_sequence_a001101/1` - A001101 - Moran numbers: n such that (n / sum of digits of n) is prime.
- `create_sequence_a005349/1` - A005349 - Niven (or Harshad) numbers: numbers that are divisible by the sum of their digits.
- `create_sequence_a007602/1` - A007602 - Numbers that are divisible by the product of their digits.
- `create_sequence_a007953/1` - A007953 - Digital sum (i.e., sum of digits) of n; also called digsum(n).
- `create_sequence_a007954/1` - A007954 - Product of decimal digits of n.
- `create_sequence_a055640/1` - A055640 - Number of nonzero digits in decimal expansion of n.
- `create_sequence_a055641/1` - A055641 - Number of zero digits in n.
- `create_sequence_a055642/1` - A055642 - Number of digits in decimal expansion of n.
- `create_sequence_a102669/1` - A102669 - Number of digits >= 2 in decimal representation of n.
- `create_sequence_a102670/1` - A102670 - Number of digits >= 2 in the decimal representations of all integers from 0 to n.
- `create_sequence_a102671/1` - A102671 - Number of digits >= 3 in decimal representation of n.
- `create_sequence_a102672/1` - A102672 - Number of digits >= 3 in the decimal representations of all integers from 0 to n.
- `create_sequence_a102673/1` - A102673 - Number of digits >= 4 in decimal representation of n.
- `create_sequence_a102674/1` - A102674 - Number of digits >= 4 in the decimal representations of all integers from 0 to n.
- `create_sequence_a102675/1` - A102675 - Number of digits >= 5 in decimal representation of n.
- `create_sequence_a102676/1` - A102676 - Number of digits >= 5 in the decimal representations of all integers from 0 to n.
- `create_sequence_a102677/1` - A102677 - Number of digits >= 6 in decimal representation of n.
- `create_sequence_a102678/1` - A102678 - Number of digits >= 6 in the decimal representations of all integers from 0 to n.
- `create_sequence_a102679/1` - A102679 - Number of digits >= 7 in decimal representation of n.
- `create_sequence_a102680/1` - A102680 - Number of digits >= 7 in the decimal representations of all integers from 0 to n.
- `create_sequence_a102681/1` - A102681 - Number of digits >= 8 in decimal representation of n.
- `create_sequence_a102682/1` - A102682 - Number of digits >= 8 in the decimal representations of all integers from 0 to n.
- `create_sequence_a102683/1` - A102683 - Number of digits 9 in decimal representation of n.
- `create_sequence_a102684/1` - A102684 - Number of times the digit 9 appears in the decimal representations of all integers from 0 to n.
- `create_sequence_a115983/1` - A115983 - Apocalypse primes: 10^665+a(n) has 666 decimal digits and is prime.
- `create_sequence_a160093/1` - A160093 - Number of digits in n, excluding any trailing zeros.
- `create_sequence_a193238/1` - A193238 - Number of prime digits in decimal representation of n.
- `create_sequence_a196563/1` - A196563 - Number of even digits in decimal representation of n.
- `create_sequence_a122840/1` - A122840 - a(n) is the number of 0s at the end of n when n is written in base 10.
- `create_sequence_a114904/1` - A114904 - Sorted numbers of digits of any base-10 narcissistic number.
- `create_sequence_a046253/1` - A046253 - Equal to the sum of its nonzero digits raised to its own power.
- `create_sequence_a010872/1` - A010872 - a(n) = n mod 3.
Rhonda numbers in base 10:
- `create_sequence_a099542/1` - A099542 - Rhonda numbers to base 10.
- `create_sequence_a100987/1` - A100987 - Integers that are Rhonda numbers to some base.
- `create_sequence_a100988/1` - A100988 - Integers that are Rhonda numbers to more than one base.
Kaprekar numbers:
- `create_sequence_a006886/1` - A006886 - Kaprekar numbers: positive numbers n such that n = q+r and n^2 = q*10^m+r, for some m >= 1, q >= 0 and 0 <= r < 10^m, with n != 10^a, a >= 1.
- `create_sequence_a053816/1` - A053816 - Another version of the Kaprekar numbers (A006886): n such that n=q+r and n^2=q*10^m+r, for some m >= 1, q>=0 and 0<=r<10^m, with n != 10^a, a>=1 and n an m-digit number.
Armstrong numbers:
- `create_sequence_a005188/1` - A005188 - Armstrong (or pluperfect, or Plus Perfect, or narcissistic) numbers: m-digit positive numbers equal to sum of the m-th powers of their digits.
- `create_sequence_a014576/1` - A014576 - Smallest n-digit narcissistic (or Armstrong) number: smallest n-digit number equal to sum of n-th powers of its digits (or 0 if no such number exists).
### Base Specific Representations (excluding Base 10)
Base 2:
- `create_sequence_a007088/1` - A007088 - The binary numbers (or binary words, or binary vectors): numbers written in base 2.
- `create_sequence_a023416/1` - A023416 - Number of 0's in binary expansion of n.
- `create_sequence_a059015/1` - A059015 - Total number of 0's in binary expansions of 0, ..., n.
- `create_sequence_a071858/1` - A071858 - (Number of 1's in binary expansion of n) mod 3.
- `create_sequence_a179868/1` - A179868 - (Number of 1's in binary expansion of n) mod 4.
- `create_sequence_a000788/1` - A000788 - Total number of 1's in binary expansions of 0, ..., n.
Base 3:
- `create_sequence_a007089/1` - A007089 - Numbers in base 3.
- `create_sequence_a005823/1` - A005823 - Numbers whose ternary expansion contains no 1's.
- `create_sequence_a005836/1` - A005836 - Numbers n whose base 3 representation contains no 2.
- `create_sequence_a023692/1` - A023692 - Numbers with a single 1 in their ternary expansion.
- `create_sequence_a043321/1` - A043321 - Numbers n such that number of 0's in base 3 is 1.
- `create_sequence_a023693/1` - A023693 - Numbers with exactly 2 1's in ternary expansion.
- `create_sequence_a023694/1` - A023694 - Numbers with exactly 3 1's in ternary expansion.
- `create_sequence_a023695/1` - A023695 - Numbers with exactly 4 1's in ternary expansion.
- `create_sequence_a023696/1` - A023696 - Numbers with exactly 5 1's in ternary expansion.
- `create_sequence_a023697/1` - A023697 - Numbers with exactly 6 1's in ternary expansion.
- `create_sequence_a023698/1` - A023698 - Numbers with exactly 7 1's in ternary expansion.
- `create_sequence_a023699/1` - A023699 - Numbers with a single 2 in their ternary expansion.
- `create_sequence_a023700/1` - A023700 - Numbers with exactly 2 2's in ternary expansion.
- `create_sequence_a023701/1` - A023701 - Numbers with exactly 3 2's in their ternary expansion.
- `create_sequence_a023702/1` - A023702 - Numbers with exactly 4 2's in ternary expansion of n.
- `create_sequence_a023703/1` - A023703 - Numbers with exactly 5 2's in ternary expansion.
- `create_sequence_a023704/1` - A023704 - Numbers with exactly 6 2's in ternary expansion.
- `create_sequence_a032924/1` - A032924 - Numbers whose ternary expansion contains no 0.
- `create_sequence_a062756/1` - A062756 - Number of 1's in ternary (base 3) expansion of n.
- `create_sequence_a074940/1` - A074940 - Numbers having at least one 2 in their ternary representation.
- `create_sequence_a077267/1` - A077267 - Number of zeros in base 3 expansion of n.
- `create_sequence_a081603/1` - A081603 - Number of 2's in ternary representation of n.
- `create_sequence_a081605/1` - A081605 - Numbers having at least one 0 in their ternary representation.
- `create_sequence_a081606/1` - A081606 - Numbers having at least one 1 in their ternary representation.
- `create_sequence_a023745/1` - A023745 - Plaindromes: numbers whose digits in base 3 are in nondecreasing order.
Base 4:
- `create_sequence_a007090/1` - A007090 - Numbers in base 4.
- `create_sequence_a010344/1` - A010344 - Base-4 Armstrong or narcissistic numbers (written in base 10).
- `create_sequence_a023706/1` - A023706 - Numbers with a single 0 in their base 4 expansion.
- `create_sequence_a023707/1` - A023707 - Numbers with exactly 2 0's in base 4 expansion.
- `create_sequence_a023708/1` - A023708 - Numbers with exactly 3 0's in base 4 expansion.
- `create_sequence_a023709/1` - A023709 - Numbers with no 1's in base 4 expansion.
- `create_sequence_a023710/1` - A023710 - Numbers with a single 1 in their base 4 expansion.
- `create_sequence_a023711/1` - A023711 - Numbers with exactly 2 1's in base 4 expansion.
- `create_sequence_a023712/1` - A023712 - Numbers with exactly 3 1's in base 4 expansion.
- `create_sequence_a023713/1` - A023713 - Numbers with no 2's in base 4 expansion.
- `create_sequence_a023714/1` - A023714 - Numbers with a single 2 in their base 4 expansion.
- `create_sequence_a023715/1` - A023715 - Numbers with exactly 2 2's in base 4 expansion.
- `create_sequence_a023716/1` - A023716 - Numbers with exactly 3 2's in base 4 expansion.
- `create_sequence_a023717/1` - A023717 - Numbers with no 3's in base 4 expansion.
- `create_sequence_a023718/1` - A023718 - Numbers with a single 3 in their base 4 expansion.
- `create_sequence_a023719/1` - A023719 - Numbers with exactly two 3's in base 4 expansion.
- `create_sequence_a023720/1` - A023720 - Numbers with exactly 3 3's in base 4 expansion.
- `create_sequence_a023705/1` - A023705 - Numbers with no 0's in base 4 expansion.
- `create_sequence_a100968/1` - A100968 - Integers n that are Rhonda numbers to base 4.
- `create_sequence_a023746/1` - A023746 - Plaindromes: numbers whose digits in base 4 are in nondecreasing order.
Base 5:
- `create_sequence_a007091/1` - A007091 - Numbers in base 5.
- `create_sequence_a010346/1` - A010346 - Base-5 Armstrong or narcissistic numbers (written in base 10).
- `create_sequence_a023721/1` - A023721 - Numbers with no 0's in their base-5 expansion.
- `create_sequence_a023722/1` - A023722 - Numbers with a single 0 in their base 5 expansion.
- `create_sequence_a023723/1` - A023723 - Numbers with exactly 2 0's in base 5 expansion.
- `create_sequence_a023724/1` - A023724 - Numbers with exactly 3 0's in base 5 expansion.
- `create_sequence_a023725/1` - A023725 - Numbers with no 1's in their base-5 expansion.
- `create_sequence_a023726/1` - A023726 - Numbers with a single 1 in their base 5 expansion.
- `create_sequence_a023727/1` - A023727 - Numbers with exactly 2 1's in their base 5 expansion.
- `create_sequence_a023728/1` - A023728 - Numbers with exactly 3 1's in base 5 expansion.
- `create_sequence_a023729/1` - A023729 - Numbers with no 2's in their base-5 expansion.
- `create_sequence_a023730/1` - A023730 - Numbers with a single 2 in their base 5 expansion.
- `create_sequence_a023731/1` - A023731 - Numbers with exactly two 2's in base 5 expansion.
- `create_sequence_a023732/1` - A023732 - Numbers with exactly 3 2's in base 5 expansion.
- `create_sequence_a023733/1` - A023733 - Numbers with no 3's in base-5 expansion.
- `create_sequence_a023734/1` - A023734 - Numbers with a single 3 in their base-5 expansion.
- `create_sequence_a023735/1` - A023735 - Numbers with exactly 2 3's in their base-5 expansion.
- `create_sequence_a023736/1` - A023736 - Numbers with exactly 3 3's in their base-5 expansion.
- `create_sequence_a023738/1` - A023738 - Numbers with a single 4 in their base 5 expansion.
- `create_sequence_a023739/1` - A023739 - Numbers with exactly 2 4's in base 5 expansion.
- `create_sequence_a023740/1` - A023740 - Numbers with exactly 3 4's in base 5 expansion.
- `create_sequence_a097251/1` - A097251 - Numbers whose set of base 5 digits is {0,4}.
- `create_sequence_a023747/1` - A023747 - Plaindromes: numbers whose digits in base 5 are in nondecreasing order.
Base 6:
- `create_sequence_a007092/1` - A007092 - Numbers in base 6.
- `create_sequence_a010348/1` - A010348 - Base-6 Armstrong or narcissistic numbers (written in base 10).
- `create_sequence_a097252/1` - A097252 - Numbers whose set of base 6 digits is {0,5}.
- `create_sequence_a100969/1` - A100969 - Integers n that are Rhonda numbers to base 6.
- `create_sequence_a248910/1` - A248910 - Numbers with no zeros in base-6 representation.
- `create_sequence_a023748/1` - A023748 - Plaindromes: numbers whose digits in base 6 are in nondecreasing order.
Base 7:
- `create_sequence_a007093/1` - A007093 - Numbers in base 7.
- `create_sequence_a010350/1` - A010350 - Base-7 Armstrong or narcissistic numbers (written in base 10).
- `create_sequence_a097253/1` - A097253 - Numbers whose set of base 7 digits is {0,6}.
- `create_sequence_a023749/1` - A023749 - Plaindromes: numbers whose digits in base 7 are in nondecreasing order.
Base 8:
- `create_sequence_a007094/1` - A007094 - Numbers in base 8.
- `create_sequence_a010353/1` - A010353 - Base-9 Armstrong or narcissistic numbers (written in base 10).
- `create_sequence_a097254/1` - A097254 - Numbers whose set of base 8 digits is {0,7}.
- `create_sequence_a100970/1` - A100970 - Integers n that are Rhonda numbers to base 8.
- `create_sequence_a255805/1` - A255805 - Numbers with no zeros in base-8 representation.
- `create_sequence_a023750/1` - A023750 - Plaindromes: numbers whose digits in base 8 are in nondecreasing order.
Base 9:
- `create_sequence_a007095/1` - A007095 - Numbers in base 9.
- `create_sequence_a010354/1` - A010354 - Base-8 Armstrong or narcissistic numbers (written in base 10).
- `create_sequence_a097255/1` - A097255 - Numbers whose set of base 9 digits is {0,8}.
- `create_sequence_a100973/1` - A100973 - Integers that are Rhonda numbers to base 9.
- `create_sequence_a255808/1` - A255808 - Numbers with no zeros in base-9 representation.
- `create_sequence_a023751/1` - A023751 - Plaindromes: numbers whose digits in base 9 are in nondecreasing order.
Base 11
- `create_sequence_a097257/1` - A097257 - Numbers whose set of base 11 digits is {0,A}, where A base 11 = 10 base 10.
- `create_sequence_a161948/1` - A161948 - Base-11 Armstrong or narcissistic numbers (written in base 10).
- `create_sequence_a023752/1` - A023752 - Plaindromes: numbers whose digits in base 11 are in nondecreasing order.
Base 12
- `create_sequence_a097258/1` - A097258 - Numbers whose set of base 12 digits is {0,B}, where B base 12 = 11 base 10.
- `create_sequence_a100971/1` - A100971 - Integers n that are Rhonda numbers to base 12.
- `create_sequence_a161949/1` - A161949 - Base-12 Armstrong or narcissistic numbers (written in base 10).
- `create_sequence_a023753/1` - A023753 - Plaindromes: numbers whose digits in base 12 are in nondecreasing order.
Base 13
- `create_sequence_a097259/1` - A097259 - Numbers whose set of base 13 digits is {0,C}, where C base 13 = 12 base 10.
- `create_sequence_a161950/1` - A161950 - Base-13 Armstrong or narcissistic numbers (written in base 10).
- `create_sequence_a023754/1` - A023754 - Plaindromes: numbers whose digits in base 13 are in nondecreasing order.
Base 14
- `create_sequence_a097260/1` - A097260 - Numbers whose set of base 14 digits is {0,D}, where D base 14 = 13 base 10.
- `create_sequence_a100972/1` - A100972 - Integers that are Rhonda numbers to base 14.
- `create_sequence_a161951/1` - A161951 - Base-14 Armstrong or narcissistic numbers (written in base 10).
- `create_sequence_a023755/1` - A023755 - Plaindromes: numbers whose digits in base 14 are in nondecreasing order.
Base 15
- `create_sequence_a097261/1` - A097261 - Numbers whose set of base 15 digits is {0,E}, where E base 15 = 14 base 10.
- `create_sequence_a100974/1` - A100974 - Integers that are Rhonda numbers to base 15.
- `create_sequence_a161952/1` - A161952 - Base-15 Armstrong or narcissistic numbers (written in base 10).
- `create_sequence_a023756/1` - A023756 - Plaindromes: numbers whose digits in base 15 are in nondecreasing order.
Base 16
- `create_sequence_a097262/1` - A097262 - Numbers whose set of base 16 digits is {0,F}, where F base 16 = 15 base 10.
- `create_sequence_a100975/1` - A100975 - Integers that are Rhonda numbers to base 16.
- `create_sequence_a161953/1` - A161953 - Base-16 Armstrong or narcissistic numbers (written in base 10).
- `create_sequence_a023757/1` - A023757 - Plaindromes: numbers whose digits in base 16 are in nondecreasing order.
Base 18
- `create_sequence_a255735/1` - A255735 - Integers that are Rhonda numbers to base 18.
Base 20
- `create_sequence_a255732/1` - A255732 - Rhonda numbers in vigesimal number system.
Base 30
- `create_sequence_a255736/1` - A255736 - Integers that are Rhonda numbers to base 30.
Base 60
- `create_sequence_a255731/1` - A255731 - Rhonda numbers in sexagesimal number system.
"""
import Chunky.Sequence, only: [sequence_for_function: 1, sequence_for_list: 1]
alias Chunky.Math
alias Chunky.Math.Predicates
# raw data for A014576 - Smallest n-digit narcissistic (or Armstrong) number: smallest n-digit number equal to sum of n-th powers of its digits (or 0 if no such number exists).
@data_a014576 [
1,
0,
153,
1634,
54748,
548_834,
1_741_725,
24_678_050,
146_511_208,
4_679_307_774,
32_164_049_650,
0,
0,
28_116_440_335_967,
0,
4_338_281_769_391_370,
21_897_142_587_612_075,
0,
1_517_841_543_307_505_039,
63_105_425_988_599_693_916,
128_468_643_043_731_391_252,
0
]
# raw data for A046253 - Equal to the sum of its nonzero digits raised to its own power.
@data_a046253 [0, 1, 3435, 438_579_088]
# raw data for A114904 - Sorted numbers of digits of any base-10 narcissistic number.
@data_a114904 [
1,
3,
4,
5,
6,
7,
8,
9,
10,
11,
14,
16,
17,
19,
20,
21,
23,
24,
25,
27,
29,
31,
32,
33,
34,
35,
37,
38,
39
]
@doc """
OEIS Sequence `A004176` - Omit 1's from n.
From [OEIS A004176](https://oeis.org/A004176):
> Omit 1's from n.
> (Formerly )
**Sequence IDs**: `:a004176`
**Finite**: False
**Offset**: 0
## Example
iex> Sequence.create(Elixir.Chunky.Sequence.OEIS.Repr, :a004176) |> Sequence.take!(75)
[0,0,2,3,4,5,6,7,8,9,0,0,2,3,4,5,6,7,8,9,20,2,22,23,24,25,26,27,28,29,30,3,32,33,34,35,36,37,38,39,40,4,42,43,44,45,46,47,48,49,50,5,52,53,54,55,56,57,58,59,60,6,62,63,64,65,66,67,68,69,70,7,72,73,74]
"""
@doc offset: 0,
sequence: "Omit 1's from n.",
references: [{:oeis, :a004176, "https://oeis.org/A004176"}]
def create_sequence_a004176(_opts) do
sequence_for_function(&Elixir.Chunky.Sequence.OEIS.Repr.seq_a004176/1)
end
@doc false
@doc offset: 0
def seq_a004176(idx) do
Math.remove_digits!(idx, [1])
end
@doc """
OEIS Sequence `A004177` - Omit 2's from n.
From [OEIS A004177](https://oeis.org/A004177):
> Omit 2's from n.
> (Formerly )
**Sequence IDs**: `:a004177`
**Finite**: False
**Offset**: 0
## Example
iex> Sequence.create(Elixir.Chunky.Sequence.OEIS.Repr, :a004177) |> Sequence.take!(75)
[0,1,0,3,4,5,6,7,8,9,10,11,1,13,14,15,16,17,18,19,0,1,0,3,4,5,6,7,8,9,30,31,3,33,34,35,36,37,38,39,40,41,4,43,44,45,46,47,48,49,50,51,5,53,54,55,56,57,58,59,60,61,6,63,64,65,66,67,68,69,70,71,7,73,74]
"""
@doc offset: 0,
sequence: "Omit 2's from n.",
references: [{:oeis, :a004177, "https://oeis.org/A004177"}]
def create_sequence_a004177(_opts) do
sequence_for_function(&Elixir.Chunky.Sequence.OEIS.Repr.seq_a004177/1)
end
@doc false
@doc offset: 0
def seq_a004177(idx) do
Math.remove_digits!(idx, [2])
end
@doc """
OEIS Sequence `A004178` - Omit 3's from n.
From [OEIS A004178](https://oeis.org/A004178):
> Omit 3's from n.
> (Formerly )
**Sequence IDs**: `:a004178`
**Finite**: False
**Offset**: 0
## Example
iex> Sequence.create(Elixir.Chunky.Sequence.OEIS.Repr, :a004178) |> Sequence.take!(75)
[0,1,2,0,4,5,6,7,8,9,10,11,12,1,14,15,16,17,18,19,20,21,22,2,24,25,26,27,28,29,0,1,2,0,4,5,6,7,8,9,40,41,42,4,44,45,46,47,48,49,50,51,52,5,54,55,56,57,58,59,60,61,62,6,64,65,66,67,68,69,70,71,72,7,74]
"""
@doc offset: 0,
sequence: "Omit 3's from n.",
references: [{:oeis, :a004178, "https://oeis.org/A004178"}]
def create_sequence_a004178(_opts) do
sequence_for_function(&Elixir.Chunky.Sequence.OEIS.Repr.seq_a004178/1)
end
@doc false
@doc offset: 0
def seq_a004178(idx) do
Math.remove_digits!(idx, [3])
end
@doc """
OEIS Sequence `A004179` - Omit 4's from n.
From [OEIS A004179](https://oeis.org/A004179):
> Omit 4's from n.
> (Formerly )
**Sequence IDs**: `:a004179`
**Finite**: False
**Offset**: 0
## Example
iex> Sequence.create(Elixir.Chunky.Sequence.OEIS.Repr, :a004179) |> Sequence.take!(75)
[0,1,2,3,0,5,6,7,8,9,10,11,12,13,1,15,16,17,18,19,20,21,22,23,2,25,26,27,28,29,30,31,32,33,3,35,36,37,38,39,0,1,2,3,0,5,6,7,8,9,50,51,52,53,5,55,56,57,58,59,60,61,62,63,6,65,66,67,68,69,70,71,72,73,7]
"""
@doc offset: 0,
sequence: "Omit 4's from n.",
references: [{:oeis, :a004179, "https://oeis.org/A004179"}]
def create_sequence_a004179(_opts) do
sequence_for_function(&Elixir.Chunky.Sequence.OEIS.Repr.seq_a004179/1)
end
@doc false
@doc offset: 0
def seq_a004179(idx) do
Math.remove_digits!(idx, [4])
end
@doc """
OEIS Sequence `A004180` - Omit 5's from n.
From [OEIS A004180](https://oeis.org/A004180):
> Omit 5's from n.
> (Formerly )
**Sequence IDs**: `:a004180`
**Finite**: False
**Offset**: 0
## Example
iex> Sequence.create(Elixir.Chunky.Sequence.OEIS.Repr, :a004180) |> Sequence.take!(75)
[0,1,2,3,4,0,6,7,8,9,10,11,12,13,14,1,16,17,18,19,20,21,22,23,24,2,26,27,28,29,30,31,32,33,34,3,36,37,38,39,40,41,42,43,44,4,46,47,48,49,0,1,2,3,4,0,6,7,8,9,60,61,62,63,64,6,66,67,68,69,70,71,72,73,74]
"""
@doc offset: 0,
sequence: "Omit 5's from n.",
references: [{:oeis, :a004180, "https://oeis.org/A004180"}]
def create_sequence_a004180(_opts) do
sequence_for_function(&Elixir.Chunky.Sequence.OEIS.Repr.seq_a004180/1)
end
@doc false
@doc offset: 0
def seq_a004180(idx) do
Math.remove_digits!(idx, [5])
end
@doc """
OEIS Sequence `A004181` - Omit 6's from n.
From [OEIS A004181](https://oeis.org/A004181):
> Omit 6's from n.
> (Formerly )
**Sequence IDs**: `:a004181`
**Finite**: False
**Offset**: 0
## Example
iex> Sequence.create(Elixir.Chunky.Sequence.OEIS.Repr, :a004181) |> Sequence.take!(75)
[0,1,2,3,4,5,0,7,8,9,10,11,12,13,14,15,1,17,18,19,20,21,22,23,24,25,2,27,28,29,30,31,32,33,34,35,3,37,38,39,40,41,42,43,44,45,4,47,48,49,50,51,52,53,54,55,5,57,58,59,0,1,2,3,4,5,0,7,8,9,70,71,72,73,74]
"""
@doc offset: 0,
sequence: "Omit 6's from n.",
references: [{:oeis, :a004181, "https://oeis.org/A004181"}]
def create_sequence_a004181(_opts) do
sequence_for_function(&Elixir.Chunky.Sequence.OEIS.Repr.seq_a004181/1)
end
@doc false
@doc offset: 0
def seq_a004181(idx) do
Math.remove_digits!(idx, [6])
end
@doc """
OEIS Sequence `A004182` - Omit 7's from n.
From [OEIS A004182](https://oeis.org/A004182):
> Omit 7's from n.
> (Formerly )
**Sequence IDs**: `:a004182`
**Finite**: False
**Offset**: 0
## Example
iex> Sequence.create(Elixir.Chunky.Sequence.OEIS.Repr, :a004182) |> Sequence.take!(73)
[0,1,2,3,4,5,6,0,8,9,10,11,12,13,14,15,16,1,18,19,20,21,22,23,24,25,26,2,28,29,30,31,32,33,34,35,36,3,38,39,40,41,42,43,44,45,46,4,48,49,50,51,52,53,54,55,56,5,58,59,60,61,62,63,64,65,66,6,68,69,0,1,2]
"""
@doc offset: 0,
sequence: "Omit 7's from n.",
references: [{:oeis, :a004182, "https://oeis.org/A004182"}]
def create_sequence_a004182(_opts) do
sequence_for_function(&Elixir.Chunky.Sequence.OEIS.Repr.seq_a004182/1)
end
@doc false
@doc offset: 0
def seq_a004182(idx) do
Math.remove_digits!(idx, [7])
end
@doc """
OEIS Sequence `A004183` - Omit 8's from n.
From [OEIS A004183](https://oeis.org/A004183):
> Omit 8's from n.
> (Formerly )
**Sequence IDs**: `:a004183`
**Finite**: False
**Offset**: 0
## Example
iex> Sequence.create(Elixir.Chunky.Sequence.OEIS.Repr, :a004183) |> Sequence.take!(72)
[0,1,2,3,4,5,6,7,0,9,10,11,12,13,14,15,16,17,1,19,20,21,22,23,24,25,26,27,2,29,30,31,32,33,34,35,36,37,3,39,40,41,42,43,44,45,46,47,4,49,50,51,52,53,54,55,56,57,5,59,60,61,62,63,64,65,66,67,6,69,70,71]
"""
@doc offset: 0,
sequence: "Omit 8's from n.",
references: [{:oeis, :a004183, "https://oeis.org/A004183"}]
def create_sequence_a004183(_opts) do
sequence_for_function(&Elixir.Chunky.Sequence.OEIS.Repr.seq_a004183/1)
end
@doc false
@doc offset: 0
def seq_a004183(idx) do
Math.remove_digits!(idx, [8])
end
@doc """
OEIS Sequence `A004184` - Omit 9's from n.
From [OEIS A004184](https://oeis.org/A004184):
> Omit 9's from n.
> (Formerly )
**Sequence IDs**: `:a004184`
**Finite**: False
**Offset**: 0
## Example
iex> Sequence.create(Elixir.Chunky.Sequence.OEIS.Repr, :a004184) |> Sequence.take!(72)
[0,1,2,3,4,5,6,7,8,0,10,11,12,13,14,15,16,17,18,1,20,21,22,23,24,25,26,27,28,2,30,31,32,33,34,35,36,37,38,3,40,41,42,43,44,45,46,47,48,4,50,51,52,53,54,55,56,57,58,5,60,61,62,63,64,65,66,67,68,6,70,71]
"""
@doc offset: 0,
sequence: "Omit 9's from n.",
references: [{:oeis, :a004184, "https://oeis.org/A004184"}]
def create_sequence_a004184(_opts) do
sequence_for_function(&Elixir.Chunky.Sequence.OEIS.Repr.seq_a004184/1)
end
@doc false
@doc offset: 0
def seq_a004184(idx) do
Math.remove_digits!(idx, [9])
end
@doc """
OEIS Sequence `A004719` - Delete all 0's from n.
From [OEIS A004719](https://oeis.org/A004719):
> Delete all 0's from n.
> (Formerly )
**Sequence IDs**: `:a004719`
**Finite**: False
**Offset**: 1
## Example
iex> Sequence.create(Elixir.Chunky.Sequence.OEIS.Repr, :a004719) |> Sequence.take!(120)
[1,2,3,4,5,6,7,8,9,1,11,12,13,14,15,16,17,18,19,2,21,22,23,24,25,26,27,28,29,3,31,32,33,34,35,36,37,38,39,4,41,42,43,44,45,46,47,48,49,5,51,52,53,54,55,56,57,58,59,6,61,62,63,64,65,66,67,68,69,7,71,72,73,74,75,76,77,78,79,8,81,82,83,84,85,86,87,88,89,9,91,92,93,94,95,96,97,98,99,1,11,12,13,14,15,16,17,18,19,11,111,112,113,114,115,116,117,118,119,12]
"""
@doc offset: 1,
sequence: "Delete all 0's from n.",
references: [{:oeis, :a004719, "https://oeis.org/A004719"}]
def create_sequence_a004719(_opts) do
%{
next_fn: &seq_a004719/3,
data: %{
last_number: -1
}
}
end
@doc false
def seq_a004719(:init, data, _v), do: %{data: data, value: 0}
@doc false
def seq_a004719(:next, data, _v) do
# find the next number
r = 0
n =
Math.next_number(
fn v ->
Math.remove_digits!(v, [r], empty: false)
end,
data.last_number
)
# store it and return it
{
:continue,
%{
data: data |> Map.put(:last_number, n),
value: n |> Math.remove_digits!([r])
}
}
end
@doc """
OEIS Sequence `A004720` - Delete all digits '1' from the sequence of nonnegative integers.
From [OEIS A004720](https://oeis.org/A004720):
> Delete all digits '1' from the sequence of nonnegative integers.
> (Formerly )
**Sequence IDs**: `:a004720`
**Finite**: False
**Offset**: 1
## Example
iex> Sequence.create(Elixir.Chunky.Sequence.OEIS.Repr, :a004720) |> Sequence.take!(74)
[0,2,3,4,5,6,7,8,9,0,2,3,4,5,6,7,8,9,20,2,22,23,24,25,26,27,28,29,30,3,32,33,34,35,36,37,38,39,40,4,42,43,44,45,46,47,48,49,50,5,52,53,54,55,56,57,58,59,60,6,62,63,64,65,66,67,68,69,70,7,72,73,74,75]
"""
@doc offset: 1,
sequence: "Delete all digits '1' from the sequence of nonnegative integers.",
references: [{:oeis, :a004720, "https://oeis.org/A004720"}]
def create_sequence_a004720(_opts) do
%{
next_fn: &seq_a004720/3,
data: %{
last_number: -1
}
}
end
@doc false
def seq_a004720(:init, data, _v), do: %{data: data, value: 0}
@doc false
def seq_a004720(:next, data, _v) do
# find the next number
r = 1
n =
Math.next_number(
fn v ->
Math.remove_digits!(v, [r], empty: false)
end,
data.last_number
)
# store it and return it
{
:continue,
%{
data: data |> Map.put(:last_number, n),
value: n |> Math.remove_digits!([r])
}
}
end
@doc """
OEIS Sequence `A004721` - Delete all 2's from the sequence of nonnegative integers.
From [OEIS A004721](https://oeis.org/A004721):
> Delete all 2's from the sequence of nonnegative integers.
> (Formerly )
**Sequence IDs**: `:a004721`
**Finite**: False
**Offset**: 0
## Example
iex> Sequence.create(Elixir.Chunky.Sequence.OEIS.Repr, :a004721) |> Sequence.take!(74)
[0,1,3,4,5,6,7,8,9,10,11,1,13,14,15,16,17,18,19,0,1,3,4,5,6,7,8,9,30,31,3,33,34,35,36,37,38,39,40,41,4,43,44,45,46,47,48,49,50,51,5,53,54,55,56,57,58,59,60,61,6,63,64,65,66,67,68,69,70,71,7,73,74,75]
"""
@doc offset: 0,
sequence: "Delete all 2's from the sequence of nonnegative integers.",
references: [{:oeis, :a004721, "https://oeis.org/A004721"}]
def create_sequence_a004721(_opts) do
%{
next_fn: &seq_a004721/3,
data: %{
last_number: -1
}
}
end
@doc false
def seq_a004721(:init, data, _v), do: %{data: data, value: 0}
@doc false
def seq_a004721(:next, data, _v) do
# find the next number
r = 2
n =
Math.next_number(
fn v ->
Math.remove_digits!(v, [r], empty: false)
end,
data.last_number
)
# store it and return it
{
:continue,
%{
data: data |> Map.put(:last_number, n),
value: n |> Math.remove_digits!([r])
}
}
end
@doc """
OEIS Sequence `A004722` - Delete all digits 3 from the terms of the sequence of nonnegative integers.
From [OEIS A004722](https://oeis.org/A004722):
> Delete all digits 3 from the terms of the sequence of nonnegative integers.
> (Formerly )
**Sequence IDs**: `:a004722`
**Finite**: False
**Offset**: 0
## Example
iex> Sequence.create(Elixir.Chunky.Sequence.OEIS.Repr, :a004722) |> Sequence.take!(75)
[0,1,2,4,5,6,7,8,9,10,11,12,1,14,15,16,17,18,19,20,21,22,2,24,25,26,27,28,29,0,1,2,4,5,6,7,8,9,40,41,42,4,44,45,46,47,48,49,50,51,52,5,54,55,56,57,58,59,60,61,62,6,64,65,66,67,68,69,70,71,72,7,74,75,76]
"""
@doc offset: 0,
sequence: "Delete all digits 3 from the terms of the sequence of nonnegative integers.",
references: [{:oeis, :a004722, "https://oeis.org/A004722"}]
def create_sequence_a004722(_opts) do
%{
next_fn: &seq_a004722/3,
data: %{
last_number: -1
}
}
end
@doc false
def seq_a004722(:init, data, _v), do: %{data: data, value: 0}
@doc false
def seq_a004722(:next, data, _v) do
# find the next number
r = 3
n =
Math.next_number(
fn v ->
Math.remove_digits!(v, [r], empty: false)
end,
data.last_number
)
# store it and return it
{
:continue,
%{
data: data |> Map.put(:last_number, n),
value: n |> Math.remove_digits!([r])
}
}
end
@doc """
OEIS Sequence `A004723` - Delete all 4's from the sequence of nonnegative integers.
From [OEIS A004723](https://oeis.org/A004723):
> Delete all 4's from the sequence of nonnegative integers.
> (Formerly )
**Sequence IDs**: `:a004723`
**Finite**: False
**Offset**: 0
## Example
iex> Sequence.create(Elixir.Chunky.Sequence.OEIS.Repr, :a004723) |> Sequence.take!(74)
[0,1,2,3,5,6,7,8,9,10,11,12,13,1,15,16,17,18,19,20,21,22,23,2,25,26,27,28,29,30,31,32,33,3,35,36,37,38,39,0,1,2,3,5,6,7,8,9,50,51,52,53,5,55,56,57,58,59,60,61,62,63,6,65,66,67,68,69,70,71,72,73,7,75]
"""
@doc offset: 0,
sequence: "Delete all 4's from the sequence of nonnegative integers.",
references: [{:oeis, :a004723, "https://oeis.org/A004723"}]
def create_sequence_a004723(_opts) do
%{
next_fn: &seq_a004723/3,
data: %{
last_number: -1
}
}
end
@doc false
def seq_a004723(:init, data, _v), do: %{data: data, value: 0}
@doc false
def seq_a004723(:next, data, _v) do
# find the next number
r = 4
n =
Math.next_number(
fn v ->
Math.remove_digits!(v, [r], empty: false)
end,
data.last_number
)
# store it and return it
{
:continue,
%{
data: data |> Map.put(:last_number, n),
value: n |> Math.remove_digits!([r])
}
}
end
@doc """
OEIS Sequence `A004724` - Delete all 5's from the sequence of nonnegative integers.
From [OEIS A004724](https://oeis.org/A004724):
> Delete all 5's from the sequence of nonnegative integers.
> (Formerly )
**Sequence IDs**: `:a004724`
**Finite**: False
**Offset**: 0
## Example
iex> Sequence.create(Elixir.Chunky.Sequence.OEIS.Repr, :a004724) |> Sequence.take!(75)
[0,1,2,3,4,6,7,8,9,10,11,12,13,14,1,16,17,18,19,20,21,22,23,24,2,26,27,28,29,30,31,32,33,34,3,36,37,38,39,40,41,42,43,44,4,46,47,48,49,0,1,2,3,4,6,7,8,9,60,61,62,63,64,6,66,67,68,69,70,71,72,73,74,7,76]
"""
@doc offset: 0,
sequence: "Delete all 5's from the sequence of nonnegative integers.",
references: [{:oeis, :a004724, "https://oeis.org/A004724"}]
def create_sequence_a004724(_opts) do
%{
next_fn: &seq_a004724/3,
data: %{
last_number: -1
}
}
end
@doc false
def seq_a004724(:init, data, _v), do: %{data: data, value: 0}
@doc false
def seq_a004724(:next, data, _v) do
# find the next number
r = 5
n =
Math.next_number(
fn v ->
Math.remove_digits!(v, [r], empty: false)
end,
data.last_number
)
# store it and return it
{
:continue,
%{
data: data |> Map.put(:last_number, n),
value: n |> Math.remove_digits!([r])
}
}
end
@doc """
OEIS Sequence `A004725` - Delete all 6's from the sequence of nonnegative integers.
From [OEIS A004725](https://oeis.org/A004725):
> Delete all 6's from the sequence of nonnegative integers.
> (Formerly )
**Sequence IDs**: `:a004725`
**Finite**: False
**Offset**: 0
## Example
iex> Sequence.create(Elixir.Chunky.Sequence.OEIS.Repr, :a004725) |> Sequence.take!(75)
[0,1,2,3,4,5,7,8,9,10,11,12,13,14,15,1,17,18,19,20,21,22,23,24,25,2,27,28,29,30,31,32,33,34,35,3,37,38,39,40,41,42,43,44,45,4,47,48,49,50,51,52,53,54,55,5,57,58,59,0,1,2,3,4,5,7,8,9,70,71,72,73,74,75,7]
"""
@doc offset: 0,
sequence: "Delete all 6's from the sequence of nonnegative integers.",
references: [{:oeis, :a004725, "https://oeis.org/A004725"}]
def create_sequence_a004725(_opts) do
%{
next_fn: &seq_a004725/3,
data: %{
last_number: -1
}
}
end
@doc false
def seq_a004725(:init, data, _v), do: %{data: data, value: 0}
@doc false
def seq_a004725(:next, data, _v) do
# find the next number
r = 6
n =
Math.next_number(
fn v ->
Math.remove_digits!(v, [r], empty: false)
end,
data.last_number
)
# store it and return it
{
:continue,
%{
data: data |> Map.put(:last_number, n),
value: n |> Math.remove_digits!([r])
}
}
end
@doc """
OEIS Sequence `A004726` - Delete all 7's from the sequence of nonnegative integers.
From [OEIS A004726](https://oeis.org/A004726):
> Delete all 7's from the sequence of nonnegative integers.
> (Formerly )
**Sequence IDs**: `:a004726`
**Finite**: False
**Offset**: 0
## Example
iex> Sequence.create(Elixir.Chunky.Sequence.OEIS.Repr, :a004726) |> Sequence.take!(72)
[0,1,2,3,4,5,6,8,9,10,11,12,13,14,15,16,1,18,19,20,21,22,23,24,25,26,2,28,29,30,31,32,33,34,35,36,3,38,39,40,41,42,43,44,45,46,4,48,49,50,51,52,53,54,55,56,5,58,59,60,61,62,63,64,65,66,6,68,69,0,1,2]
"""
@doc offset: 0,
sequence: "Delete all 7's from the sequence of nonnegative integers.",
references: [{:oeis, :a004726, "https://oeis.org/A004726"}]
def create_sequence_a004726(_opts) do
%{
next_fn: &seq_a004726/3,
data: %{
last_number: -1
}
}
end
@doc false
def seq_a004726(:init, data, _v), do: %{data: data, value: 0}
@doc false
def seq_a004726(:next, data, _v) do
# find the next number
r = 7
n =
Math.next_number(
fn v ->
Math.remove_digits!(v, [r], empty: false)
end,
data.last_number
)
# store it and return it
{
:continue,
%{
data: data |> Map.put(:last_number, n),
value: n |> Math.remove_digits!([r])
}
}
end
@doc """
OEIS Sequence `A004727` - Delete all 8's from the sequence of nonnegative integers.
From [OEIS A004727](https://oeis.org/A004727):
> Delete all 8's from the sequence of nonnegative integers.
> (Formerly )
**Sequence IDs**: `:a004727`
**Finite**: False
**Offset**: 0
## Example
iex> Sequence.create(Elixir.Chunky.Sequence.OEIS.Repr, :a004727) |> Sequence.take!(71)
[0,1,2,3,4,5,6,7,9,10,11,12,13,14,15,16,17,1,19,20,21,22,23,24,25,26,27,2,29,30,31,32,33,34,35,36,37,3,39,40,41,42,43,44,45,46,47,4,49,50,51,52,53,54,55,56,57,5,59,60,61,62,63,64,65,66,67,6,69,70,71]
"""
@doc offset: 0,
sequence: "Delete all 8's from the sequence of nonnegative integers.",
references: [{:oeis, :a004727, "https://oeis.org/A004727"}]
def create_sequence_a004727(_opts) do
%{
next_fn: &seq_a004727/3,
data: %{
last_number: -1
}
}
end
@doc false
def seq_a004727(:init, data, _v), do: %{data: data, value: 0}
@doc false
def seq_a004727(:next, data, _v) do
# find the next number
r = 8
n =
Math.next_number(
fn v ->
Math.remove_digits!(v, [r], empty: false)
end,
data.last_number
)
# store it and return it
{
:continue,
%{
data: data |> Map.put(:last_number, n),
value: n |> Math.remove_digits!([r])
}
}
end
@doc """
OEIS Sequence `A004728` - Delete all 9's from the sequence of nonnegative integers.
From [OEIS A004728](https://oeis.org/A004728):
> Delete all 9's from the sequence of nonnegative integers.
> (Formerly )
**Sequence IDs**: `:a004728`
**Finite**: False
**Offset**: 0
## Example
iex> Sequence.create(Elixir.Chunky.Sequence.OEIS.Repr, :a004728) |> Sequence.take!(71)
[0,1,2,3,4,5,6,7,8,10,11,12,13,14,15,16,17,18,1,20,21,22,23,24,25,26,27,28,2,30,31,32,33,34,35,36,37,38,3,40,41,42,43,44,45,46,47,48,4,50,51,52,53,54,55,56,57,58,5,60,61,62,63,64,65,66,67,68,6,70,71]
"""
@doc offset: 0,
sequence: "Delete all 9's from the sequence of nonnegative integers.",
references: [{:oeis, :a004728, "https://oeis.org/A004728"}]
def create_sequence_a004728(_opts) do
%{
next_fn: &seq_a004728/3,
data: %{
last_number: -1
}
}
end
@doc false
def seq_a004728(:init, data, _v), do: %{data: data, value: 0}
@doc false
def seq_a004728(:next, data, _v) do
# find the next number
r = 9
n =
Math.next_number(
fn v ->
Math.remove_digits!(v, [r], empty: false)
end,
data.last_number
)
# store it and return it
{
:continue,
%{
data: data |> Map.put(:last_number, n),
value: n |> Math.remove_digits!([r])
}
}
end
@doc """
OEIS Sequence `A007088` - The binary numbers (or binary words, or binary vectors): numbers written in base 2.
From [OEIS A007088](https://oeis.org/A007088):
> The binary numbers (or binary words, or binary vectors): numbers written in base 2.
> (Formerly M4679)
**Sequence IDs**: `:a007088`
**Finite**: False
**Offset**: 0
## Example
iex> Sequence.create(Elixir.Chunky.Sequence.OEIS.Repr, :a007088) |> Sequence.take!(40)
[0,1,10,11,100,101,110,111,1000,1001,1010,1011,1100,1101,1110,1111,10000,10001,10010,10011,10100,10101,10110,10111,11000,11001,11010,11011,11100,11101,11110,11111,100000,100001,100010,100011,100100,100101,100110,100111]
"""
@doc offset: 0,
sequence:
"The binary numbers (or binary words, or binary vectors): numbers written in base 2.",
references: [{:oeis, :a007088, "https://oeis.org/A007088"}]
def create_sequence_a007088(_opts) do
sequence_for_function(&Elixir.Chunky.Sequence.OEIS.Repr.seq_a007088/1)
end
@doc false
@doc offset: 0
def seq_a007088(idx) do
idx |> Integer.digits(2) |> Integer.undigits()
end
@doc """
OEIS Sequence `A007089` - Numbers in base 3.
From [OEIS A007089](https://oeis.org/A007089):
> Numbers in base 3.
> (Formerly M1960)
**Sequence IDs**: `:a007089`
**Finite**: False
**Offset**: 0
## Example
iex> Sequence.create(Elixir.Chunky.Sequence.OEIS.Repr, :a007089) |> Sequence.take!(50)
[0,1,2,10,11,12,20,21,22,100,101,102,110,111,112,120,121,122,200,201,202,210,211,212,220,221,222,1000,1001,1002,1010,1011,1012,1020,1021,1022,1100,1101,1102,1110,1111,1112,1120,1121,1122,1200,1201,1202,1210,1211]
"""
@doc offset: 0,
sequence: "Numbers in base 3.",
references: [{:oeis, :a007089, "https://oeis.org/A007089"}]
def create_sequence_a007089(_opts) do
sequence_for_function(&Elixir.Chunky.Sequence.OEIS.Repr.seq_a007089/1)
end
@doc false
@doc offset: 0
def seq_a007089(idx) do
idx |> Integer.digits(3) |> Integer.undigits()
end
@doc """
OEIS Sequence `A007090` - Numbers in base 4.
From [OEIS A007090](https://oeis.org/A007090):
> Numbers in base 4.
> (Formerly M0900)
**Sequence IDs**: `:a007090`
**Finite**: False
**Offset**: 0
## Example
iex> Sequence.create(Elixir.Chunky.Sequence.OEIS.Repr, :a007090) |> Sequence.take!(64)
[0,1,2,3,10,11,12,13,20,21,22,23,30,31,32,33,100,101,102,103,110,111,112,113,120,121,122,123,130,131,132,133,200,201,202,203,210,211,212,213,220,221,222,223,230,231,232,233,300,301,302,303,310,311,312,313,320,321,322,323,330,331,332,333]
"""
@doc offset: 0,
sequence: "Numbers in base 4.",
references: [{:oeis, :a007090, "https://oeis.org/A007090"}]
def create_sequence_a007090(_opts) do
sequence_for_function(&Elixir.Chunky.Sequence.OEIS.Repr.seq_a007090/1)
end
@doc false
@doc offset: 0
def seq_a007090(idx) do
idx |> Integer.digits(4) |> Integer.undigits()
end
@doc """
OEIS Sequence `A007091` - Numbers in base 5.
From [OEIS A007091](https://oeis.org/A007091):
> Numbers in base 5.
> (Formerly M0595)
**Sequence IDs**: `:a007091`
**Finite**: False
**Offset**: 0
## Example
iex> Sequence.create(Elixir.Chunky.Sequence.OEIS.Repr, :a007091) |> Sequence.take!(66)
[0,1,2,3,4,10,11,12,13,14,20,21,22,23,24,30,31,32,33,34,40,41,42,43,44,100,101,102,103,104,110,111,112,113,114,120,121,122,123,124,130,131,132,133,134,140,141,142,143,144,200,201,202,203,204,210,211,212,213,214,220,221,222,223,224,230]
"""
@doc offset: 0,
sequence: "Numbers in base 5.",
references: [{:oeis, :a007091, "https://oeis.org/A007091"}]
def create_sequence_a007091(_opts) do
sequence_for_function(&Elixir.Chunky.Sequence.OEIS.Repr.seq_a007091/1)
end
@doc false
@doc offset: 0
def seq_a007091(idx) do
idx |> Integer.digits(5) |> Integer.undigits()
end
@doc """
OEIS Sequence `A007092` - Numbers in base 6.
From [OEIS A007092](https://oeis.org/A007092):
> Numbers in base 6.
> (Formerly M0532)
**Sequence IDs**: `:a007092`
**Finite**: False
**Offset**: 0
## Example
iex> Sequence.create(Elixir.Chunky.Sequence.OEIS.Repr, :a007092) |> Sequence.take!(66)
[0,1,2,3,4,5,10,11,12,13,14,15,20,21,22,23,24,25,30,31,32,33,34,35,40,41,42,43,44,45,50,51,52,53,54,55,100,101,102,103,104,105,110,111,112,113,114,115,120,121,122,123,124,125,130,131,132,133,134,135,140,141,142,143,144,145]
"""
@doc offset: 0,
sequence: "Numbers in base 6.",
references: [{:oeis, :a007092, "https://oeis.org/A007092"}]
def create_sequence_a007092(_opts) do
sequence_for_function(&Elixir.Chunky.Sequence.OEIS.Repr.seq_a007092/1)
end
@doc false
@doc offset: 0
def seq_a007092(idx) do
idx |> Integer.digits(6) |> Integer.undigits()
end
@doc """
OEIS Sequence `A007093` - Numbers in base 7.
From [OEIS A007093](https://oeis.org/A007093):
> Numbers in base 7.
> (Formerly M0511)
**Sequence IDs**: `:a007093`
**Finite**: False
**Offset**: 0
## Example
iex> Sequence.create(Elixir.Chunky.Sequence.OEIS.Repr, :a007093) |> Sequence.take!(64)
[0,1,2,3,4,5,6,10,11,12,13,14,15,16,20,21,22,23,24,25,26,30,31,32,33,34,35,36,40,41,42,43,44,45,46,50,51,52,53,54,55,56,60,61,62,63,64,65,66,100,101,102,103,104,105,106,110,111,112,113,114,115,116,120]
"""
@doc offset: 0,
sequence: "Numbers in base 7.",
references: [{:oeis, :a007093, "https://oeis.org/A007093"}]
def create_sequence_a007093(_opts) do
sequence_for_function(&Elixir.Chunky.Sequence.OEIS.Repr.seq_a007093/1)
end
@doc false
@doc offset: 0
def seq_a007093(idx) do
idx |> Integer.digits(7) |> Integer.undigits()
end
@doc """
OEIS Sequence `A007094` - Numbers in base 8.
From [OEIS A007094](https://oeis.org/A007094):
> Numbers in base 8.
> (Formerly M0498)
**Sequence IDs**: `:a007094`
**Finite**: False
**Offset**: 0
## Example
iex> Sequence.create(Elixir.Chunky.Sequence.OEIS.Repr, :a007094) |> Sequence.take!(74)
[0,1,2,3,4,5,6,7,10,11,12,13,14,15,16,17,20,21,22,23,24,25,26,27,30,31,32,33,34,35,36,37,40,41,42,43,44,45,46,47,50,51,52,53,54,55,56,57,60,61,62,63,64,65,66,67,70,71,72,73,74,75,76,77,100,101,102,103,104,105,106,107,110,111]
"""
@doc offset: 0,
sequence: "Numbers in base 8.",
references: [{:oeis, :a007094, "https://oeis.org/A007094"}]
def create_sequence_a007094(_opts) do
sequence_for_function(&Elixir.Chunky.Sequence.OEIS.Repr.seq_a007094/1)
end
@doc false
@doc offset: 0
def seq_a007094(idx) do
idx |> Integer.digits(8) |> Integer.undigits()
end
@doc """
OEIS Sequence `A007095` - Numbers in base 9.
From [OEIS A007095](https://oeis.org/A007095):
> Numbers in base 9.
> (Formerly M0490)
**Sequence IDs**: `:a007095`
**Finite**: False
**Offset**: 0
## Example
iex> Sequence.create(Elixir.Chunky.Sequence.OEIS.Repr, :a007095) |> Sequence.take!(77)
[0,1,2,3,4,5,6,7,8,10,11,12,13,14,15,16,17,18,20,21,22,23,24,25,26,27,28,30,31,32,33,34,35,36,37,38,40,41,42,43,44,45,46,47,48,50,51,52,53,54,55,56,57,58,60,61,62,63,64,65,66,67,68,70,71,72,73,74,75,76,77,78,80,81,82,83,84]
"""
@doc offset: 0,
sequence: "Numbers in base 9.",
references: [{:oeis, :a007095, "https://oeis.org/A007095"}]
def create_sequence_a007095(_opts) do
sequence_for_function(&Elixir.Chunky.Sequence.OEIS.Repr.seq_a007095/1)
end
@doc false
@doc offset: 0
def seq_a007095(idx) do
idx |> Integer.digits(9) |> Integer.undigits()
end
@doc """
OEIS Sequence `A011531` - Numbers that contain a digit 1 in their decimal representation.
From [OEIS A011531](https://oeis.org/A011531):
> Numbers that contain a digit 1 in their decimal representation.
> (Formerly )
**Sequence IDs**: `:a011531`
**Finite**: False
**Offset**: 1
## Example
iex> Sequence.create(Elixir.Chunky.Sequence.OEIS.Repr, :a011531) |> Sequence.take!(53)
[1,10,11,12,13,14,15,16,17,18,19,21,31,41,51,61,71,81,91,100,101,102,103,104,105,106,107,108,109,110,111,112,113,114,115,116,117,118,119,120,121,122,123,124,125,126,127,128,129,130,131,132,133]
"""
@doc offset: 1,
sequence: "Numbers that contain a digit 1 in their decimal representation.",
references: [{:oeis, :a011531, "https://oeis.org/A011531"}]
def create_sequence_a011531(_opts) do
sequence_for_function(&Elixir.Chunky.Sequence.OEIS.Repr.seq_a011531/2)
end
@doc false
@doc offset: 1
def seq_a011531(_idx, last) do
Math.next_number(fn v -> Math.contains_number?(v, 1) end, last)
end
@doc """
OEIS Sequence `A011532` - Numbers that contain a 2.
From [OEIS A011532](https://oeis.org/A011532):
> Numbers that contain a 2.
> (Formerly )
**Sequence IDs**: `:a011532`
**Finite**: False
**Offset**: 1
## Example
iex> Sequence.create(Elixir.Chunky.Sequence.OEIS.Repr, :a011532) |> Sequence.take!(53)
[2,12,20,21,22,23,24,25,26,27,28,29,32,42,52,62,72,82,92,102,112,120,121,122,123,124,125,126,127,128,129,132,142,152,162,172,182,192,200,201,202,203,204,205,206,207,208,209,210,211,212,213,214]
"""
@doc offset: 1,
sequence: "Numbers that contain a 2.",
references: [{:oeis, :a011532, "https://oeis.org/A011532"}]
def create_sequence_a011532(_opts) do
sequence_for_function(&Elixir.Chunky.Sequence.OEIS.Repr.seq_a011532/2)
end
@doc false
@doc offset: 1
def seq_a011532(_idx, last) do
Math.next_number(fn v -> Math.contains_number?(v, 2) end, last)
end
@doc """
OEIS Sequence `A011533` - Numbers that contain a 3.
From [OEIS A011533](https://oeis.org/A011533):
> Numbers that contain a 3.
> (Formerly )
**Sequence IDs**: `:a011533`
**Finite**: False
**Offset**: 1
## Example
iex> Sequence.create(Elixir.Chunky.Sequence.OEIS.Repr, :a011533) |> Sequence.take!(53)
[3,13,23,30,31,32,33,34,35,36,37,38,39,43,53,63,73,83,93,103,113,123,130,131,132,133,134,135,136,137,138,139,143,153,163,173,183,193,203,213,223,230,231,232,233,234,235,236,237,238,239,243,253]
"""
@doc offset: 1,
sequence: "Numbers that contain a 3.",
references: [{:oeis, :a011533, "https://oeis.org/A011533"}]
def create_sequence_a011533(_opts) do
sequence_for_function(&Elixir.Chunky.Sequence.OEIS.Repr.seq_a011533/2)
end
@doc false
@doc offset: 1
def seq_a011533(_idx, last) do
Math.next_number(fn v -> Math.contains_number?(v, 3) end, last)
end
@doc """
OEIS Sequence `A011534` - Numbers that contain a 4.
From [OEIS A011534](https://oeis.org/A011534):
> Numbers that contain a 4.
> (Formerly )
**Sequence IDs**: `:a011534`
**Finite**: False
**Offset**: 1
## Example
iex> Sequence.create(Elixir.Chunky.Sequence.OEIS.Repr, :a011534) |> Sequence.take!(53)
[4,14,24,34,40,41,42,43,44,45,46,47,48,49,54,64,74,84,94,104,114,124,134,140,141,142,143,144,145,146,147,148,149,154,164,174,184,194,204,214,224,234,240,241,242,243,244,245,246,247,248,249,254]
"""
@doc offset: 1,
sequence: "Numbers that contain a 4.",
references: [{:oeis, :a011534, "https://oeis.org/A011534"}]
def create_sequence_a011534(_opts) do
sequence_for_function(&Elixir.Chunky.Sequence.OEIS.Repr.seq_a011534/2)
end
@doc false
@doc offset: 1
def seq_a011534(_idx, last) do
Math.next_number(fn v -> Math.contains_number?(v, 4) end, last)
end
@doc """
OEIS Sequence `A011535` - Numbers that contain a 5.
From [OEIS A011535](https://oeis.org/A011535):
> Numbers that contain a 5.
> (Formerly )
**Sequence IDs**: `:a011535`
**Finite**: False
**Offset**: 1
## Example
iex> Sequence.create(Elixir.Chunky.Sequence.OEIS.Repr, :a011535) |> Sequence.take!(53)
[5,15,25,35,45,50,51,52,53,54,55,56,57,58,59,65,75,85,95,105,115,125,135,145,150,151,152,153,154,155,156,157,158,159,165,175,185,195,205,215,225,235,245,250,251,252,253,254,255,256,257,258,259]
"""
@doc offset: 1,
sequence: "Numbers that contain a 5.",
references: [{:oeis, :a011535, "https://oeis.org/A011535"}]
def create_sequence_a011535(_opts) do
sequence_for_function(&Elixir.Chunky.Sequence.OEIS.Repr.seq_a011535/2)
end
@doc false
@doc offset: 1
def seq_a011535(_idx, last) do
Math.next_number(fn v -> Math.contains_number?(v, 5) end, last)
end
@doc """
OEIS Sequence `A011536` - Numbers that contain a 6.
From [OEIS A011536](https://oeis.org/A011536):
> Numbers that contain a 6.
> (Formerly )
**Sequence IDs**: `:a011536`
**Finite**: False
**Offset**: 1
## Example
iex> Sequence.create(Elixir.Chunky.Sequence.OEIS.Repr, :a011536) |> Sequence.take!(53)
[6,16,26,36,46,56,60,61,62,63,64,65,66,67,68,69,76,86,96,106,116,126,136,146,156,160,161,162,163,164,165,166,167,168,169,176,186,196,206,216,226,236,246,256,260,261,262,263,264,265,266,267,268]
"""
@doc offset: 1,
sequence: "Numbers that contain a 6.",
references: [{:oeis, :a011536, "https://oeis.org/A011536"}]
def create_sequence_a011536(_opts) do
sequence_for_function(&Elixir.Chunky.Sequence.OEIS.Repr.seq_a011536/2)
end
@doc false
@doc offset: 1
def seq_a011536(_idx, last) do
Math.next_number(fn v -> Math.contains_number?(v, 6) end, last)
end
@doc """
OEIS Sequence `A011537` - Numbers that contain at least one 7.
From [OEIS A011537](https://oeis.org/A011537):
> Numbers that contain at least one 7.
> (Formerly )
**Sequence IDs**: `:a011537`
**Finite**: False
**Offset**: 1
## Example
iex> Sequence.create(Elixir.Chunky.Sequence.OEIS.Repr, :a011537) |> Sequence.take!(53)
[7,17,27,37,47,57,67,70,71,72,73,74,75,76,77,78,79,87,97,107,117,127,137,147,157,167,170,171,172,173,174,175,176,177,178,179,187,197,207,217,227,237,247,257,267,270,271,272,273,274,275,276,277]
"""
@doc offset: 1,
sequence: "Numbers that contain at least one 7.",
references: [{:oeis, :a011537, "https://oeis.org/A011537"}]
def create_sequence_a011537(_opts) do
sequence_for_function(&Elixir.Chunky.Sequence.OEIS.Repr.seq_a011537/2)
end
@doc false
@doc offset: 1
def seq_a011537(_idx, last) do
Math.next_number(fn v -> Math.contains_number?(v, 7) end, last)
end
@doc """
OEIS Sequence `A011538` - Numbers that contain an 8.
From [OEIS A011538](https://oeis.org/A011538):
> Numbers that contain an 8.
> (Formerly )
**Sequence IDs**: `:a011538`
**Finite**: False
**Offset**: 1
## Example
iex> Sequence.create(Elixir.Chunky.Sequence.OEIS.Repr, :a011538) |> Sequence.take!(53)
[8,18,28,38,48,58,68,78,80,81,82,83,84,85,86,87,88,89,98,108,118,128,138,148,158,168,178,180,181,182,183,184,185,186,187,188,189,198,208,218,228,238,248,258,268,278,280,281,282,283,284,285,286]
"""
@doc offset: 1,
sequence: "Numbers that contain an 8.",
references: [{:oeis, :a011538, "https://oeis.org/A011538"}]
def create_sequence_a011538(_opts) do
sequence_for_function(&Elixir.Chunky.Sequence.OEIS.Repr.seq_a011538/2)
end
@doc false
@doc offset: 1
def seq_a011538(_idx, last) do
Math.next_number(fn v -> Math.contains_number?(v, 8) end, last)
end
@doc """
OEIS Sequence `A011539` - "9ish numbers": decimal representation contains at least one nine.
From [OEIS A011539](https://oeis.org/A011539):
> "9ish numbers": decimal representation contains at least one nine.
> (Formerly )
**Sequence IDs**: `:a011539`
**Finite**: False
**Offset**: 1
## Example
iex> Sequence.create(Elixir.Chunky.Sequence.OEIS.Repr, :a011539) |> Sequence.take!(53)
[9,19,29,39,49,59,69,79,89,90,91,92,93,94,95,96,97,98,99,109,119,129,139,149,159,169,179,189,190,191,192,193,194,195,196,197,198,199,209,219,229,239,249,259,269,279,289,290,291,292,293,294,295]
"""
@doc offset: 1,
sequence: "9ish numbers: decimal representation contains at least one nine.",
references: [{:oeis, :a011539, "https://oeis.org/A011539"}]
def create_sequence_a011539(_opts) do
sequence_for_function(&Elixir.Chunky.Sequence.OEIS.Repr.seq_a011539/2)
end
@doc false
@doc offset: 1
def seq_a011539(_idx, last) do
Math.next_number(fn v -> Math.contains_number?(v, 9) end, last)
end
@doc """
OEIS Sequence `A011540` - Numbers that contain a digit 0.
From [OEIS A011540](https://oeis.org/A011540):
> Numbers that contain a digit 0.
> (Formerly )
**Sequence IDs**: `:a011540`
**Finite**: False
**Offset**: 1
## Example
iex> Sequence.create(Elixir.Chunky.Sequence.OEIS.Repr, :a011540) |> Sequence.take!(51)
[0,10,20,30,40,50,60,70,80,90,100,101,102,103,104,105,106,107,108,109,110,120,130,140,150,160,170,180,190,200,201,202,203,204,205,206,207,208,209,210,220,230,240,250,260,270,280,290,300,301,302]
"""
@doc offset: 1,
sequence: "Numbers that contain a digit 0.",
references: [{:oeis, :a011540, "https://oeis.org/A011540"}]
def create_sequence_a011540(_opts) do
sequence_for_function(&Elixir.Chunky.Sequence.OEIS.Repr.seq_a011540/2)
end
@doc false
@doc offset: 1, fill_value: -1
def seq_a011540(_idx, last) do
Math.next_number(fn v -> Math.contains_number?(v, 0) end, last)
end
@doc """
OEIS Sequence `A052382` - Numbers without 0 as a digit, a.k.a. zeroless numbers.
From [OEIS A052382](https://oeis.org/A052382):
> Numbers without 0 as a digit, a.k.a. zeroless numbers.
> (Formerly )
**Sequence IDs**: `:a052382`
**Finite**: False
**Offset**: 1
## Example
iex> Sequence.create(Elixir.Chunky.Sequence.OEIS.Repr, :a052382) |> Sequence.take!(94)
[1,2,3,4,5,6,7,8,9,11,12,13,14,15,16,17,18,19,21,22,23,24,25,26,27,28,29,31,32,33,34,35,36,37,38,39,41,42,43,44,45,46,47,48,49,51,52,53,54,55,56,57,58,59,61,62,63,64,65,66,67,68,69,71,72,73,74,75,76,77,78,79,81,82,83,84,85,86,87,88,89,91,92,93,94,95,96,97,98,99,111,112,113,114]
"""
@doc offset: 1,
sequence: "Numbers without 0 as a digit, a.k.a. zeroless numbers.",
references: [{:oeis, :a052382, "https://oeis.org/A052382"}]
def create_sequence_a052382(_opts) do
sequence_for_function(&Elixir.Chunky.Sequence.OEIS.Repr.seq_a052382/2)
end
@doc false
@doc offset: 1
def seq_a052382(_idx, last) do
Math.next_number(fn v -> Math.contains_number?(v, 0) == false end, last)
end
@doc """
OEIS Sequence `A052383` - Numbers without 1 as a digit.
From [OEIS A052383](https://oeis.org/A052383):
> Numbers without 1 as a digit.
> (Formerly )
**Sequence IDs**: `:a052383`
**Finite**: False
**Offset**: 1
## Example
iex> Sequence.create(Elixir.Chunky.Sequence.OEIS.Repr, :a052383) |> Sequence.take!(72)
[0,2,3,4,5,6,7,8,9,20,22,23,24,25,26,27,28,29,30,32,33,34,35,36,37,38,39,40,42,43,44,45,46,47,48,49,50,52,53,54,55,56,57,58,59,60,62,63,64,65,66,67,68,69,70,72,73,74,75,76,77,78,79,80,82,83,84,85,86,87,88,89]
"""
@doc offset: 1,
sequence: "Numbers without 1 as a digit.",
references: [{:oeis, :a052383, "https://oeis.org/A052383"}]
def create_sequence_a052383(_opts) do
sequence_for_function(&Elixir.Chunky.Sequence.OEIS.Repr.seq_a052383/2)
end
@doc false
@doc offset: 1, fill_value: -1
def seq_a052383(_idx, last) do
Math.next_number(fn v -> Math.contains_number?(v, 1) == false end, last)
end
@doc """
OEIS Sequence `A052404` - Numbers without 2 as a digit.
From [OEIS A052404](https://oeis.org/A052404):
> Numbers without 2 as a digit.
> (Formerly )
**Sequence IDs**: `:a052404`
**Finite**: False
**Offset**: 1
## Example
iex> Sequence.create(Elixir.Chunky.Sequence.OEIS.Repr, :a052404) |> Sequence.take!(72)
[0,1,3,4,5,6,7,8,9,10,11,13,14,15,16,17,18,19,30,31,33,34,35,36,37,38,39,40,41,43,44,45,46,47,48,49,50,51,53,54,55,56,57,58,59,60,61,63,64,65,66,67,68,69,70,71,73,74,75,76,77,78,79,80,81,83,84,85,86,87,88,89]
"""
@doc offset: 1,
sequence: "Numbers without 2 as a digit.",
references: [{:oeis, :a052404, "https://oeis.org/A052404"}]
def create_sequence_a052404(_opts) do
sequence_for_function(&Elixir.Chunky.Sequence.OEIS.Repr.seq_a052404/2)
end
@doc false
@doc offset: 1, fill_value: -1
def seq_a052404(_idx, last) do
Math.next_number(fn v -> Math.contains_number?(v, 2) == false end, last)
end
@doc """
OEIS Sequence `A052405` - Numbers without 3 as a digit.
From [OEIS A052405](https://oeis.org/A052405):
> Numbers without 3 as a digit.
> (Formerly )
**Sequence IDs**: `:a052405`
**Finite**: False
**Offset**: 1
## Example
iex> Sequence.create(Elixir.Chunky.Sequence.OEIS.Repr, :a052405) |> Sequence.take!(72)
[0,1,2,4,5,6,7,8,9,10,11,12,14,15,16,17,18,19,20,21,22,24,25,26,27,28,29,40,41,42,44,45,46,47,48,49,50,51,52,54,55,56,57,58,59,60,61,62,64,65,66,67,68,69,70,71,72,74,75,76,77,78,79,80,81,82,84,85,86,87,88,89]
"""
@doc offset: 1,
sequence: "Numbers without 3 as a digit.",
references: [{:oeis, :a052405, "https://oeis.org/A052405"}]
def create_sequence_a052405(_opts) do
sequence_for_function(&Elixir.Chunky.Sequence.OEIS.Repr.seq_a052405/2)
end
@doc false
@doc offset: 1, fill_value: -1
def seq_a052405(_idx, last) do
Math.next_number(fn v -> Math.contains_number?(v, 3) == false end, last)
end
@doc """
OEIS Sequence `A052406` - Numbers without 4 as a digit.
From [OEIS A052406](https://oeis.org/A052406):
> Numbers without 4 as a digit.
> (Formerly )
**Sequence IDs**: `:a052406`
**Finite**: False
**Offset**: 1
## Example
iex> Sequence.create(Elixir.Chunky.Sequence.OEIS.Repr, :a052406) |> Sequence.take!(72)
[0,1,2,3,5,6,7,8,9,10,11,12,13,15,16,17,18,19,20,21,22,23,25,26,27,28,29,30,31,32,33,35,36,37,38,39,50,51,52,53,55,56,57,58,59,60,61,62,63,65,66,67,68,69,70,71,72,73,75,76,77,78,79,80,81,82,83,85,86,87,88,89]
"""
@doc offset: 1,
sequence: "Numbers without 4 as a digit.",
references: [{:oeis, :a052406, "https://oeis.org/A052406"}]
def create_sequence_a052406(_opts) do
sequence_for_function(&Elixir.Chunky.Sequence.OEIS.Repr.seq_a052406/2)
end
@doc false
@doc offset: 1, fill_value: -1
def seq_a052406(_idx, last) do
Math.next_number(fn v -> Math.contains_number?(v, 4) == false end, last)
end
@doc """
OEIS Sequence `A052413` - Numbers without 5 as a digit.
From [OEIS A052413](https://oeis.org/A052413):
> Numbers without 5 as a digit.
> (Formerly )
**Sequence IDs**: `:a052413`
**Finite**: False
**Offset**: 1
## Example
iex> Sequence.create(Elixir.Chunky.Sequence.OEIS.Repr, :a052413) |> Sequence.take!(72)
[0,1,2,3,4,6,7,8,9,10,11,12,13,14,16,17,18,19,20,21,22,23,24,26,27,28,29,30,31,32,33,34,36,37,38,39,40,41,42,43,44,46,47,48,49,60,61,62,63,64,66,67,68,69,70,71,72,73,74,76,77,78,79,80,81,82,83,84,86,87,88,89]
"""
@doc offset: 1,
sequence: "Numbers without 5 as a digit.",
references: [{:oeis, :a052413, "https://oeis.org/A052413"}]
def create_sequence_a052413(_opts) do
sequence_for_function(&Elixir.Chunky.Sequence.OEIS.Repr.seq_a052413/2)
end
@doc false
@doc offset: 1, fill_value: -1
def seq_a052413(_idx, last) do
Math.next_number(fn v -> Math.contains_number?(v, 5) == false end, last)
end
@doc """
OEIS Sequence `A052414` - Numbers without 6 as a digit.
From [OEIS A052414](https://oeis.org/A052414):
> Numbers without 6 as a digit.
> (Formerly )
**Sequence IDs**: `:a052414`
**Finite**: False
**Offset**: 1
## Example
iex> Sequence.create(Elixir.Chunky.Sequence.OEIS.Repr, :a052414) |> Sequence.take!(72)
[0,1,2,3,4,5,7,8,9,10,11,12,13,14,15,17,18,19,20,21,22,23,24,25,27,28,29,30,31,32,33,34,35,37,38,39,40,41,42,43,44,45,47,48,49,50,51,52,53,54,55,57,58,59,70,71,72,73,74,75,77,78,79,80,81,82,83,84,85,87,88,89]
"""
@doc offset: 1,
sequence: "Numbers without 6 as a digit.",
references: [{:oeis, :a052414, "https://oeis.org/A052414"}]
def create_sequence_a052414(_opts) do
sequence_for_function(&Elixir.Chunky.Sequence.OEIS.Repr.seq_a052414/2)
end
@doc false
@doc offset: 1, fill_value: -1
def seq_a052414(_idx, last) do
Math.next_number(fn v -> Math.contains_number?(v, 6) == false end, last)
end
@doc """
OEIS Sequence `A052419` - Numbers without 7 as a digit.
From [OEIS A052419](https://oeis.org/A052419):
> Numbers without 7 as a digit.
> (Formerly )
**Sequence IDs**: `:a052419`
**Finite**: False
**Offset**: 1
## Example
iex> Sequence.create(Elixir.Chunky.Sequence.OEIS.Repr, :a052419) |> Sequence.take!(72)
[0,1,2,3,4,5,6,8,9,10,11,12,13,14,15,16,18,19,20,21,22,23,24,25,26,28,29,30,31,32,33,34,35,36,38,39,40,41,42,43,44,45,46,48,49,50,51,52,53,54,55,56,58,59,60,61,62,63,64,65,66,68,69,80,81,82,83,84,85,86,88,89]
"""
@doc offset: 1,
sequence: "Numbers without 7 as a digit.",
references: [{:oeis, :a052419, "https://oeis.org/A052419"}]
def create_sequence_a052419(_opts) do
sequence_for_function(&Elixir.Chunky.Sequence.OEIS.Repr.seq_a052419/2)
end
@doc false
@doc offset: 1, fill_value: -1
def seq_a052419(_idx, last) do
Math.next_number(fn v -> Math.contains_number?(v, 7) == false end, last)
end
@doc """
OEIS Sequence `A052421` - Numbers without 8 as a digit.
From [OEIS A052421](https://oeis.org/A052421):
> Numbers without 8 as a digit.
> (Formerly )
**Sequence IDs**: `:a052421`
**Finite**: False
**Offset**: 1
## Example
iex> Sequence.create(Elixir.Chunky.Sequence.OEIS.Repr, :a052421) |> Sequence.take!(72)
[0,1,2,3,4,5,6,7,9,10,11,12,13,14,15,16,17,19,20,21,22,23,24,25,26,27,29,30,31,32,33,34,35,36,37,39,40,41,42,43,44,45,46,47,49,50,51,52,53,54,55,56,57,59,60,61,62,63,64,65,66,67,69,70,71,72,73,74,75,76,77,79]
"""
@doc offset: 1,
sequence: "Numbers without 8 as a digit.",
references: [{:oeis, :a052421, "https://oeis.org/A052421"}]
def create_sequence_a052421(_opts) do
sequence_for_function(&Elixir.Chunky.Sequence.OEIS.Repr.seq_a052421/2)
end
@doc false
@doc offset: 1, fill_value: -1
def seq_a052421(_idx, last) do
Math.next_number(fn v -> Math.contains_number?(v, 8) == false end, last)
end
@doc """
OEIS Sequence `A121022` - Even numbers containing a 2 in their decimal representation.
From [OEIS A121022](https://oeis.org/A121022):
> Even numbers containing a 2 in their decimal representation.
> (Formerly )
**Sequence IDs**: `:a121022`
**Finite**: False
**Offset**: 1
## Example
iex> Sequence.create(Elixir.Chunky.Sequence.OEIS.Repr, :a121022) |> Sequence.take!(55)
[2,12,20,22,24,26,28,32,42,52,62,72,82,92,102,112,120,122,124,126,128,132,142,152,162,172,182,192,200,202,204,206,208,210,212,214,216,218,220,222,224,226,228,230,232,234,236,238,240,242,244,246,248,250,252]
"""
@doc offset: 1,
sequence: "Even numbers containing a 2 in their decimal representation.",
references: [{:oeis, :a121022, "https://oeis.org/A121022"}]
def create_sequence_a121022(_opts) do
sequence_for_function(&Elixir.Chunky.Sequence.OEIS.Repr.seq_a121022/2)
end
@doc false
@doc offset: 1, fill_value: 0
def seq_a121022(_idx, last) do
Math.next_number(fn v -> Math.contains_number?(v, 2) end, last, 2)
end
@doc """
OEIS Sequence `A100968` - Integers n that are Rhonda numbers to base 4.
From [OEIS A100968](https://oeis.org/A100968):
> Integers n that are Rhonda numbers to base 4.
> (Formerly )
**Sequence IDs**: `:a100968`
**Finite**: False
**Offset**: 1
## Example
iex> Sequence.create(Elixir.Chunky.Sequence.OEIS.Repr, :a100968) |> Sequence.take!(7)
[10206,11935,12150,16031,45030,94185,113022]
"""
@doc offset: 1,
sequence: "Integers n that are Rhonda numbers to base 4.",
references: [{:oeis, :a100968, "https://oeis.org/A100968"}]
def create_sequence_a100968(_opts) do
sequence_for_function(&Elixir.Chunky.Sequence.OEIS.Repr.seq_a100968/2)
end
@doc false
@doc offset: 1, fill_value: 10000
def seq_a100968(_idx, last) do
Math.next_number(&Predicates.is_rhonda_to_base_4?/1, last)
end
@doc """
OEIS Sequence `A100969` - Integers n that are Rhonda numbers to base 6.
From [OEIS A100969](https://oeis.org/A100969):
> Integers n that are Rhonda numbers to base 6.
> (Formerly )
**Sequence IDs**: `:a100969`
**Finite**: False
**Offset**: 1
## Example
iex> Sequence.create(Elixir.Chunky.Sequence.OEIS.Repr, :a100969) |> Sequence.take!(13)
[855,1029,3813,5577,7040,7304,15104,19136,35350,36992,41031,42009,60368]
"""
@doc offset: 1,
sequence: "Integers n that are Rhonda numbers to base 6.",
references: [{:oeis, :a100969, "https://oeis.org/A100969"}]
def create_sequence_a100969(_opts) do
sequence_for_function(&Elixir.Chunky.Sequence.OEIS.Repr.seq_a100969/2)
end
@doc false
@doc offset: 1, fill_value: 800
def seq_a100969(_idx, last) do
Math.next_number(&Predicates.is_rhonda_to_base_6?/1, last)
end
@doc """
OEIS Sequence `A100970` - Integers n that are Rhonda numbers to base 8.
From [OEIS A100970](https://oeis.org/A100970):
> Integers n that are Rhonda numbers to base 8.
> (Formerly )
**Sequence IDs**: `:a100970`
**Finite**: False
**Offset**: 1
## Example
iex> Sequence.create(Elixir.Chunky.Sequence.OEIS.Repr, :a100970) |> Sequence.take!(20)
[1836,6318,6622,10530,14500,14739,17655,18550,25398,25956,30562,39215,39325,50875,51429,52887,55611,56420,58548,59731]
"""
@doc offset: 1,
sequence: "Integers n that are Rhonda numbers to base 8.",
references: [{:oeis, :a100970, "https://oeis.org/A100970"}]
def create_sequence_a100970(_opts) do
sequence_for_function(&Elixir.Chunky.Sequence.OEIS.Repr.seq_a100970/2)
end
@doc false
@doc offset: 1, fill_value: 1800
def seq_a100970(_idx, last) do
Math.next_number(&Predicates.is_rhonda_to_base_8?/1, last)
end
@doc """
OEIS Sequence `A100973` - Integers that are Rhonda numbers to base 9.
From [OEIS A100973](https://oeis.org/A100973):
> Integers that are Rhonda numbers to base 9.
> (Formerly )
**Sequence IDs**: `:a100973`
**Finite**: False
**Offset**: 1
## Example
iex> Sequence.create(Elixir.Chunky.Sequence.OEIS.Repr, :a100973) |> Sequence.take!(12)
[15540,21054,25331,44360,44660,44733,47652,50560,54944,76857,77142,83334]
"""
@doc offset: 1,
sequence: "Integers that are Rhonda numbers to base 9.",
references: [{:oeis, :a100973, "https://oeis.org/A100973"}]
def create_sequence_a100973(_opts) do
sequence_for_function(&Elixir.Chunky.Sequence.OEIS.Repr.seq_a100973/2)
end
@doc false
@doc offset: 1, fill_value: 15500
def seq_a100973(_idx, last) do
Math.next_number(&Predicates.is_rhonda_to_base_9?/1, last)
end
@doc """
OEIS Sequence `A099542` - Rhonda numbers to base 10.
From [OEIS A099542](https://oeis.org/A099542):
> Rhonda numbers to base 10.
> (Formerly )
**Sequence IDs**: `:a099542`
**Finite**: False
**Offset**: 1
## Example
iex> Sequence.create(Elixir.Chunky.Sequence.OEIS.Repr, :a099542) |> Sequence.take!(28)
[1568,2835,4752,5265,5439,5664,5824,5832,8526,12985,15625,15698,19435,25284,25662,33475,34935,35581,45951,47265,47594,52374,53176,53742,54479,55272,56356,56718]
"""
@doc offset: 1,
sequence: "Rhonda numbers to base 10.",
references: [{:oeis, :a099542, "https://oeis.org/A099542"}]
def create_sequence_a099542(_opts) do
sequence_for_function(&Elixir.Chunky.Sequence.OEIS.Repr.seq_a099542/2)
end
@doc false
@doc offset: 1, fill_value: 1500
def seq_a099542(_idx, last) do
Math.next_number(&Predicates.is_rhonda_to_base_10?/1, last)
end
@doc """
OEIS Sequence `A100971` - Integers n that are Rhonda numbers to base 12.
From [OEIS A100971](https://oeis.org/A100971):
> Integers n that are Rhonda numbers to base 12.
> (Formerly )
**Sequence IDs**: `:a100971`
**Finite**: False
**Offset**: 1
## Example
iex> Sequence.create(Elixir.Chunky.Sequence.OEIS.Repr, :a100971) |> Sequence.take!(35)
[560,800,3993,4425,4602,4888,7315,8296,9315,11849,12028,13034,14828,15052,16264,18511,18906,25619,25875,27176,32742,37264,37523,46035,50765,52338,58261,58504,59166,62002,66176,66752,66928,67195,68502]
"""
@doc offset: 1,
sequence: "Integers n that are Rhonda numbers to base 12.",
references: [{:oeis, :a100971, "https://oeis.org/A100971"}]
def create_sequence_a100971(_opts) do
sequence_for_function(&Elixir.Chunky.Sequence.OEIS.Repr.seq_a100971/2)
end
@doc false
@doc offset: 1, fill_value: 500
def seq_a100971(_idx, last) do
Math.next_number(&Predicates.is_rhonda_to_base_12?/1, last)
end
@doc """
OEIS Sequence `A100972` - Integers that are Rhonda numbers to base 14.
From [OEIS A100972](https://oeis.org/A100972):
> Integers that are Rhonda numbers to base 14.
> (Formerly )
**Sequence IDs**: `:a100972`
**Finite**: False
**Offset**: 1
## Example
iex> Sequence.create(Elixir.Chunky.Sequence.OEIS.Repr, :a100972) |> Sequence.take!(14)
[11475,18655,20565,29631,31725,45387,58404,58667,59950,63945,67525,68904,91245,99603]
"""
@doc offset: 1,
sequence: "Integers that are Rhonda numbers to base 14.",
references: [{:oeis, :a100972, "https://oeis.org/A100972"}]
def create_sequence_a100972(_opts) do
sequence_for_function(&Elixir.Chunky.Sequence.OEIS.Repr.seq_a100972/2)
end
@doc false
@doc offset: 1, fill_value: 11400
def seq_a100972(_idx, last) do
Math.next_number(&Predicates.is_rhonda_to_base_14?/1, last)
end
@doc """
OEIS Sequence `A100974` - Integers that are Rhonda numbers to base 15.
From [OEIS A100974](https://oeis.org/A100974):
> Integers that are Rhonda numbers to base 15.
> (Formerly )
**Sequence IDs**: `:a100974`
**Finite**: False
**Offset**: 1
## Example
iex> Sequence.create(Elixir.Chunky.Sequence.OEIS.Repr, :a100974) |> Sequence.take!(22)
[2392,2472,11468,15873,17424,18126,19152,20079,24388,30758,31150,33004,33550,37925,39483,42550,44714,58870,59605,66950,70182,71485]
"""
@doc offset: 1,
sequence: "Integers that are Rhonda numbers to base 15.",
references: [{:oeis, :a100974, "https://oeis.org/A100974"}]
def create_sequence_a100974(_opts) do
sequence_for_function(&Elixir.Chunky.Sequence.OEIS.Repr.seq_a100974/2)
end
@doc false
@doc offset: 1, fill_value: 2300
def seq_a100974(_idx, last) do
Math.next_number(&Predicates.is_rhonda_to_base_15?/1, last)
end
@doc """
OEIS Sequence `A100975` - Integers that are Rhonda numbers to base 16.
From [OEIS A100975](https://oeis.org/A100975):
> Integers that are Rhonda numbers to base 16.
> (Formerly )
**Sequence IDs**: `:a100975`
**Finite**: False
**Offset**: 1
## Example
iex> Sequence.create(Elixir.Chunky.Sequence.OEIS.Repr, :a100975) |> Sequence.take!(22)
[1000,1134,6776,15912,19624,20043,20355,23946,26296,29070,31906,32292,34236,34521,36465,39066,50055,50986,52341,54340,58088,59541]
"""
@doc offset: 1,
sequence: "Integers that are Rhonda numbers to base 16.",
references: [{:oeis, :a100975, "https://oeis.org/A100975"}]
def create_sequence_a100975(_opts) do
sequence_for_function(&Elixir.Chunky.Sequence.OEIS.Repr.seq_a100975/2)
end
@doc false
@doc offset: 1, fill_value: 900
def seq_a100975(_idx, last) do
Math.next_number(&Predicates.is_rhonda_to_base_16?/1, last)
end
@doc """
OEIS Sequence `A255732` - Rhonda numbers in vigesimal number system.
From [OEIS A255732](https://oeis.org/A255732):
> Rhonda numbers in vigesimal number system.
> (Formerly )
**Sequence IDs**: `:a255732`
**Finite**: False
**Offset**: 1
## Example
iex> Sequence.create(Elixir.Chunky.Sequence.OEIS.Repr, :a255732) |> Sequence.take!(14)
[1815,11050,15295,21165,22165,30702,34510,34645,42292,44165,52059,53416,65945,78430]
"""
@doc offset: 1,
sequence: "Rhonda numbers in vigesimal number system.",
references: [{:oeis, :a255732, "https://oeis.org/A255732"}]
def create_sequence_a255732(_opts) do
sequence_for_function(&Elixir.Chunky.Sequence.OEIS.Repr.seq_a255732/2)
end
@doc false
@doc offset: 1, fill_value: 1800
def seq_a255732(_idx, last) do
Math.next_number(&Predicates.is_rhonda_to_base_20?/1, last)
end
@doc """
OEIS Sequence `A255736` - Integers that are Rhonda numbers to base 30.
From [OEIS A255736](https://oeis.org/A255736):
> Integers that are Rhonda numbers to base 30.
> (Formerly )
**Sequence IDs**: `:a255736`
**Finite**: False
**Offset**: 1
## Example
iex> Sequence.create(Elixir.Chunky.Sequence.OEIS.Repr, :a255736) |> Sequence.take!(27)
[3024,3168,5115,5346,5950,6762,7750,7956,8470,9476,9576,9849,10360,11495,13035,13356,16335,22610,22784,23864,37515,38025,40704,40986,49887,52925,59800]
"""
@doc offset: 1,
sequence: "Integers that are Rhonda numbers to base 30.",
references: [{:oeis, :a255736, "https://oeis.org/A255736"}]
def create_sequence_a255736(_opts) do
sequence_for_function(&Elixir.Chunky.Sequence.OEIS.Repr.seq_a255736/2)
end
@doc false
@doc offset: 1, fill_value: 3000
def seq_a255736(_idx, last) do
Math.next_number(&Predicates.is_rhonda_to_base_30?/1, last)
end
@doc """
OEIS Sequence `A255731` - Rhonda numbers in sexagesimal number system.
From [OEIS A255731](https://oeis.org/A255731):
> Rhonda numbers in sexagesimal number system.
> (Formerly )
**Sequence IDs**: `:a255731`
**Finite**: False
**Offset**: 1
## Example
iex> Sequence.create(Elixir.Chunky.Sequence.OEIS.Repr, :a255731) |> Sequence.take!(17)
[3348,3510,6750,17430,18750,18876,18944,19475,20564,21312,26550,28280,37230,38396,43940,48042,77770]
"""
@doc offset: 1,
sequence: "Rhonda numbers in sexagesimal number system.",
references: [{:oeis, :a255731, "https://oeis.org/A255731"}]
def create_sequence_a255731(_opts) do
sequence_for_function(&Elixir.Chunky.Sequence.OEIS.Repr.seq_a255731/2)
end
@doc false
@doc offset: 1, fill_value: 3300
def seq_a255731(_idx, last) do
Math.next_number(&Predicates.is_rhonda_to_base_60?/1, last)
end
@doc """
OEIS Sequence `A100988` - Integers that are Rhonda numbers to more than one base.
From [OEIS A100988](https://oeis.org/A100988):
> Integers that are Rhonda numbers to more than one base.
> (Formerly )
**Sequence IDs**: `:a100988`
**Finite**: False
**Offset**: 1
## Example
iex> Sequence.create(Elixir.Chunky.Sequence.OEIS.Repr, :a100988) |> Sequence.take!(39)
[1000,2940,4200,4212,4725,5670,5824,5832,6776,6864,7040,7140,8296,9476,9633,10200,11016,11050,11160,11495,11935,12393,12474,13068,13260,13671,14014,14322,14406,15680,15750,15912,16240,16821,17056,17820,18270,18655,18700]
"""
@doc offset: 1,
sequence: "Integers that are Rhonda numbers to more than one base.",
references: [{:oeis, :a100988, "https://oeis.org/A100988"}]
def create_sequence_a100988(_opts) do
sequence_for_function(&Elixir.Chunky.Sequence.OEIS.Repr.seq_a100988/2)
end
@doc false
@doc offset: 1, fill_value: 900
def seq_a100988(_idx, last) do
Math.next_number(&Predicates.is_multiple_rhonda?/1, last)
end
@doc """
OEIS Sequence `A100987` - Integers that are Rhonda numbers to some base.
From [OEIS A100987](https://oeis.org/A100987):
> Integers that are Rhonda numbers to some base.
> (Formerly )
**Sequence IDs**: `:a100987`
**Finite**: False
**Offset**: 1
## Example
iex> Sequence.create(Elixir.Chunky.Sequence.OEIS.Repr, :a100987) |> Sequence.take!(44)
[560,756,800,855,1000,1029,1134,1470,1568,1632,1750,1815,1836,1944,1995,2080,2100,2392,2472,2662,2695,2709,2835,2940,3000,3024,3060,3087,3094,3168,3240,3264,3348,3456,3510,3600,3672,3675,3744,3750,3813,3888,3952,3976]
"""
@doc offset: 1,
sequence: "Integers that are Rhonda numbers to some base.",
references: [{:oeis, :a100987, "https://oeis.org/A100987"}]
def create_sequence_a100987(_opts) do
sequence_for_function(&Elixir.Chunky.Sequence.OEIS.Repr.seq_a100987/2)
end
@doc false
@doc offset: 1, fill_value: 500
def seq_a100987(_idx, last) do
Math.next_number(
fn v ->
Math.get_rhonda_to(v) |> length() > 0
end,
last
)
end
@doc """
OEIS Sequence `A255735` - Integers that are Rhonda numbers to base 18.
From [OEIS A255735](https://oeis.org/A255735):
> Integers that are Rhonda numbers to base 18.
> (Formerly )
**Sequence IDs**: `:a255735`
**Finite**: False
**Offset**: 1
## Example
iex> Sequence.create(Elixir.Chunky.Sequence.OEIS.Repr, :a255735) |> Sequence.take!(19)
[1470,3000,8918,17025,19402,20650,21120,22156,26522,36549,38354,43281,46035,48768,54229,54528,56584,58216,58224]
"""
@doc offset: 1,
sequence: "Integers that are Rhonda numbers to base 18.",
references: [{:oeis, :a255735, "https://oeis.org/A255735"}]
def create_sequence_a255735(_opts) do
sequence_for_function(&Elixir.Chunky.Sequence.OEIS.Repr.seq_a255735/2)
end
@doc false
@doc offset: 1, fill_value: 1400
def seq_a255735(_idx, last) do
Math.next_number(
fn v ->
Math.is_rhonda_to_base?(v, 18)
end,
last
)
end
@doc """
OEIS Sequence `A000788` - Total number of 1's in binary expansions of 0, ..., n.
From [OEIS A000788](https://oeis.org/A000788):
> Total number of 1's in binary expansions of 0, ..., n.
> (Formerly M0964 N0360)
**Sequence IDs**: `:a000788`
**Finite**: False
**Offset**: 0
## Example
iex> Sequence.create(Elixir.Chunky.Sequence.OEIS.Repr, :a000788) |> Sequence.take!(63)
[0,1,2,4,5,7,9,12,13,15,17,20,22,25,28,32,33,35,37,40,42,45,48,52,54,57,60,64,67,71,75,80,81,83,85,88,90,93,96,100,102,105,108,112,115,119,123,128,130,133,136,140,143,147,151,156,159,163,167,172,176,181,186]
"""
@doc offset: 0,
sequence: "Total number of 1's in binary expansions of 0, ..., n.",
references: [{:oeis, :a000788, "https://oeis.org/A000788"}]
def create_sequence_a000788(_opts) do
sequence_for_function(&Elixir.Chunky.Sequence.OEIS.Repr.seq_a000788/1)
end
@doc false
@doc offset: 0
def seq_a000788(idx) do
0..idx
|> Enum.map(fn d -> Math.digit_count(d, [1], base: 2) end)
|> Enum.sum()
end
@doc """
OEIS Sequence `A005823` - Numbers whose ternary expansion contains no 1's.
From [OEIS A005823](https://oeis.org/A005823):
> Numbers whose ternary expansion contains no 1's.
> (Formerly M1567)
**Sequence IDs**: `:a005823`
**Finite**: False
**Offset**: 1
## Example
iex> Sequence.create(Elixir.Chunky.Sequence.OEIS.Repr, :a005823) |> Sequence.take!(56)
[0,2,6,8,18,20,24,26,54,56,60,62,72,74,78,80,162,164,168,170,180,182,186,188,216,218,222,224,234,236,240,242,486,488,492,494,504,506,510,512,540,542,546,548,558,560,564,566,648,650,654,656,666,668,672,674]
"""
@doc offset: 1,
sequence: "Numbers whose ternary expansion contains no 1's.",
references: [{:oeis, :a005823, "https://oeis.org/A005823"}]
def create_sequence_a005823(_opts) do
sequence_for_function(&Elixir.Chunky.Sequence.OEIS.Repr.seq_a005823/2)
end
@doc false
@doc offset: 1, fill_value: -1
def seq_a005823(_idx, last) do
Math.next_number(
fn v ->
Math.digit_count(v, [1], base: 3) == 0
end,
last
)
end
@doc """
OEIS Sequence `A005836` - Numbers n whose base 3 representation contains no 2.
From [OEIS A005836](https://oeis.org/A005836):
> Numbers n whose base 3 representation contains no 2.
> (Formerly M2353)
**Sequence IDs**: `:a005836`
**Finite**: False
**Offset**: 1
## Example
iex> Sequence.create(Elixir.Chunky.Sequence.OEIS.Repr, :a005836) |> Sequence.take!(58)
[0,1,3,4,9,10,12,13,27,28,30,31,36,37,39,40,81,82,84,85,90,91,93,94,108,109,111,112,117,118,120,121,243,244,246,247,252,253,255,256,270,271,273,274,279,280,282,283,324,325,327,328,333,334,336,337,351,352]
"""
@doc offset: 1,
sequence: "Numbers n whose base 3 representation contains no 2.",
references: [{:oeis, :a005836, "https://oeis.org/A005836"}]
def create_sequence_a005836(_opts) do
sequence_for_function(&Elixir.Chunky.Sequence.OEIS.Repr.seq_a005836/2)
end
@doc false
@doc offset: 1, fill_value: -1
def seq_a005836(_idx, last) do
Math.next_number(
fn v ->
Math.digit_count(v, [2], base: 3) == 0
end,
last
)
end
@doc """
OEIS Sequence `A007954` - Product of decimal digits of n.
From [OEIS A007954](https://oeis.org/A007954):
> Product of decimal digits of n.
> (Formerly )
**Sequence IDs**: `:a007954`
**Finite**: False
**Offset**: 0
## Example
iex> Sequence.create(Elixir.Chunky.Sequence.OEIS.Repr, :a007954) |> Sequence.take!(108)
[0,1,2,3,4,5,6,7,8,9,0,1,2,3,4,5,6,7,8,9,0,2,4,6,8,10,12,14,16,18,0,3,6,9,12,15,18,21,24,27,0,4,8,12,16,20,24,28,32,36,0,5,10,15,20,25,30,35,40,45,0,6,12,18,24,30,36,42,48,54,0,7,14,21,28,35,42,49,56,63,0,8,16,24,32,40,48,56,64,72,0,9,18,27,36,45,54,63,72,81,0,0,0,0,0,0,0,0]
"""
@doc offset: 0,
sequence: "Product of decimal digits of n.",
references: [{:oeis, :a007954, "https://oeis.org/A007954"}]
def create_sequence_a007954(_opts) do
sequence_for_function(&Elixir.Chunky.Sequence.OEIS.Repr.seq_a007954/1)
end
@doc false
@doc offset: 0
def seq_a007954(idx) do
idx
|> Integer.digits()
|> Enum.reduce(1, fn x, acc -> x * acc end)
end
@doc """
OEIS Sequence `A010872` - a(n) = n mod 3.
From [OEIS A010872](https://oeis.org/A010872):
> a(n) = n mod 3.
> (Formerly )
**Sequence IDs**: `:a010872`
**Finite**: False
**Offset**: 0
## Example
iex> Sequence.create(Elixir.Chunky.Sequence.OEIS.Repr, :a010872) |> Sequence.take!(105)
[0,1,2,0,1,2,0,1,2,0,1,2,0,1,2,0,1,2,0,1,2,0,1,2,0,1,2,0,1,2,0,1,2,0,1,2,0,1,2,0,1,2,0,1,2,0,1,2,0,1,2,0,1,2,0,1,2,0,1,2,0,1,2,0,1,2,0,1,2,0,1,2,0,1,2,0,1,2,0,1,2,0,1,2,0,1,2,0,1,2,0,1,2,0,1,2,0,1,2,0,1,2,0,1,2]
"""
@doc offset: 0,
sequence: "a(n) = n mod 3.",
references: [{:oeis, :a010872, "https://oeis.org/A010872"}]
def create_sequence_a010872(_opts) do
sequence_for_function(&Elixir.Chunky.Sequence.OEIS.Repr.seq_a010872/1)
end
@doc false
@doc offset: 0
def seq_a010872(idx) do
rem(idx, 3)
end
@doc """
OEIS Sequence `A023416` - Number of 0's in binary expansion of n.
From [OEIS A023416](https://oeis.org/A023416):
> Number of 0's in binary expansion of n.
> (Formerly )
**Sequence IDs**: `:a023416`
**Finite**: False
**Offset**: 0
## Example
iex> Sequence.create(Elixir.Chunky.Sequence.OEIS.Repr, :a023416) |> Sequence.take!(105)
[1,0,1,0,2,1,1,0,3,2,2,1,2,1,1,0,4,3,3,2,3,2,2,1,3,2,2,1,2,1,1,0,5,4,4,3,4,3,3,2,4,3,3,2,3,2,2,1,4,3,3,2,3,2,2,1,3,2,2,1,2,1,1,0,6,5,5,4,5,4,4,3,5,4,4,3,4,3,3,2,5,4,4,3,4,3,3,2,4,3,3,2,3,2,2,1,5,4,4,3,4,3,3,2,4]
"""
@doc offset: 0,
sequence: "Number of 0's in binary expansion of n.",
references: [{:oeis, :a023416, "https://oeis.org/A023416"}]
def create_sequence_a023416(_opts) do
sequence_for_function(&Elixir.Chunky.Sequence.OEIS.Repr.seq_a023416/1)
end
@doc false
@doc offset: 0
def seq_a023416(idx) do
Math.digit_count(idx, [0], base: 2)
end
@doc """
OEIS Sequence `A023705` - Numbers with no 0's in base 4 expansion.
From [OEIS A023705](https://oeis.org/A023705):
> Numbers with no 0's in base 4 expansion.
> (Formerly )
**Sequence IDs**: `:a023705`
**Finite**: False
**Offset**: 1
## Example
iex> Sequence.create(Elixir.Chunky.Sequence.OEIS.Repr, :a023705) |> Sequence.take!(63)
[1,2,3,5,6,7,9,10,11,13,14,15,21,22,23,25,26,27,29,30,31,37,38,39,41,42,43,45,46,47,53,54,55,57,58,59,61,62,63,85,86,87,89,90,91,93,94,95,101,102,103,105,106,107,109,110,111,117,118,119,121,122,123]
"""
@doc offset: 1,
sequence: "Numbers with no 0's in base 4 expansion.",
references: [{:oeis, :a023705, "https://oeis.org/A023705"}]
def create_sequence_a023705(_opts) do
sequence_for_function(&Elixir.Chunky.Sequence.OEIS.Repr.seq_a023705/2)
end
@doc false
@doc offset: 1
def seq_a023705(_idx, last) do
Math.next_number(
fn v ->
Math.digit_count(v, [0], base: 4) == 0
end,
last
)
end
@doc """
OEIS Sequence `A032924` - Numbers whose ternary expansion contains no 0.
From [OEIS A032924](https://oeis.org/A032924):
> Numbers whose ternary expansion contains no 0.
> (Formerly )
**Sequence IDs**: `:a032924`
**Finite**: False
**Offset**: 1
## Example
iex> Sequence.create(Elixir.Chunky.Sequence.OEIS.Repr, :a032924) |> Sequence.take!(60)
[1,2,4,5,7,8,13,14,16,17,22,23,25,26,40,41,43,44,49,50,52,53,67,68,70,71,76,77,79,80,121,122,124,125,130,131,133,134,148,149,151,152,157,158,160,161,202,203,205,206,211,212,214,215,229,230,232,233,238,239]
"""
@doc offset: 1,
sequence: "Numbers whose ternary expansion contains no 0.",
references: [{:oeis, :a032924, "https://oeis.org/A032924"}]
def create_sequence_a032924(_opts) do
sequence_for_function(&Elixir.Chunky.Sequence.OEIS.Repr.seq_a032924/2)
end
@doc false
@doc offset: 1
def seq_a032924(_idx, last) do
Math.next_number(
fn v ->
Math.digit_count(v, [0], base: 3) == 0
end,
last
)
end
@doc """
OEIS Sequence `A052040` - Numbers n such that n^2 lacks the digit zero in its decimal expansion.
From [OEIS A052040](https://oeis.org/A052040):
> Numbers n such that n^2 lacks the digit zero in its decimal expansion.
> (Formerly )
**Sequence IDs**: `:a052040`
**Finite**: False
**Offset**: 1
## Example
iex> Sequence.create(Elixir.Chunky.Sequence.OEIS.Repr, :a052040) |> Sequence.take!(72)
[1,2,3,4,5,6,7,8,9,11,12,13,14,15,16,17,18,19,21,22,23,24,25,26,27,28,29,31,34,35,36,37,38,39,41,42,43,44,46,54,56,57,58,59,61,62,63,65,66,67,68,69,72,73,74,75,76,77,79,81,82,83,85,86,87,88,89,91,92,93,94,96]
"""
@doc offset: 1,
sequence: "Numbers n such that n^2 lacks the digit zero in its decimal expansion.",
references: [{:oeis, :a052040, "https://oeis.org/A052040"}]
def create_sequence_a052040(_opts) do
sequence_for_function(&Elixir.Chunky.Sequence.OEIS.Repr.seq_a052040/2)
end
@doc false
@doc offset: 1
def seq_a052040(_idx, last) do
Math.next_number(
fn v ->
Math.digit_count(v * v, [0]) == 0
end,
last
)
end
@doc """
OEIS Sequence `A055640` - Number of nonzero digits in decimal expansion of n.
From [OEIS A055640](https://oeis.org/A055640):
> Number of nonzero digits in decimal expansion of n.
> (Formerly )
**Sequence IDs**: `:a055640`
**Finite**: False
**Offset**: 0
## Example
iex> Sequence.create(Elixir.Chunky.Sequence.OEIS.Repr, :a055640) |> Sequence.take!(105)
[0,1,1,1,1,1,1,1,1,1,1,2,2,2,2,2,2,2,2,2,1,2,2,2,2,2,2,2,2,2,1,2,2,2,2,2,2,2,2,2,1,2,2,2,2,2,2,2,2,2,1,2,2,2,2,2,2,2,2,2,1,2,2,2,2,2,2,2,2,2,1,2,2,2,2,2,2,2,2,2,1,2,2,2,2,2,2,2,2,2,1,2,2,2,2,2,2,2,2,2,1,2,2,2,2]
"""
@doc offset: 0,
sequence: "Number of nonzero digits in decimal expansion of n.",
references: [{:oeis, :a055640, "https://oeis.org/A055640"}]
def create_sequence_a055640(_opts) do
sequence_for_function(&Elixir.Chunky.Sequence.OEIS.Repr.seq_a055640/1)
end
@doc false
@doc offset: 0
def seq_a055640(idx) do
Math.digit_count(idx, [1, 2, 3, 4, 5, 6, 7, 8, 9])
end
@doc """
OEIS Sequence `A055641` - Number of zero digits in n.
From [OEIS A055641](https://oeis.org/A055641):
> Number of zero digits in n.
> (Formerly )
**Sequence IDs**: `:a055641`
**Finite**: False
**Offset**: 0
## Example
iex> Sequence.create(Elixir.Chunky.Sequence.OEIS.Repr, :a055641) |> Sequence.take!(106)
[1,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,2,1,1,1,1,1]
"""
@doc offset: 0,
sequence: "Number of zero digits in n.",
references: [{:oeis, :a055641, "https://oeis.org/A055641"}]
def create_sequence_a055641(_opts) do
sequence_for_function(&Elixir.Chunky.Sequence.OEIS.Repr.seq_a055641/1)
end
@doc false
@doc offset: 0
def seq_a055641(idx) do
Math.digit_count(idx, [0])
end
@doc """
OEIS Sequence `A055642` - Number of digits in decimal expansion of n.
From [OEIS A055642](https://oeis.org/A055642):
> Number of digits in decimal expansion of n.
> (Formerly )
**Sequence IDs**: `:a055642`
**Finite**: False
**Offset**: 0
## Example
iex> Sequence.create(Elixir.Chunky.Sequence.OEIS.Repr, :a055642) |> Sequence.take!(106)
[1,1,1,1,1,1,1,1,1,1,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,3,3,3,3,3,3]
"""
@doc offset: 0,
sequence: "Number of digits in decimal expansion of n.",
references: [{:oeis, :a055642, "https://oeis.org/A055642"}]
def create_sequence_a055642(_opts) do
sequence_for_function(&Elixir.Chunky.Sequence.OEIS.Repr.seq_a055642/1)
end
@doc false
@doc offset: 0
def seq_a055642(idx) do
Integer.digits(idx) |> length()
end
@doc """
OEIS Sequence `A067251` - Numbers with no trailing zeros in decimal representation.
From [OEIS A067251](https://oeis.org/A067251):
> Numbers with no trailing zeros in decimal representation.
> (Formerly )
**Sequence IDs**: `:a067251`
**Finite**: False
**Offset**: 1
## Example
iex> Sequence.create(Elixir.Chunky.Sequence.OEIS.Repr, :a067251) |> Sequence.take!(94)
[1,2,3,4,5,6,7,8,9,11,12,13,14,15,16,17,18,19,21,22,23,24,25,26,27,28,29,31,32,33,34,35,36,37,38,39,41,42,43,44,45,46,47,48,49,51,52,53,54,55,56,57,58,59,61,62,63,64,65,66,67,68,69,71,72,73,74,75,76,77,78,79,81,82,83,84,85,86,87,88,89,91,92,93,94,95,96,97,98,99,101,102,103,104]
"""
@doc offset: 1,
sequence: "Numbers with no trailing zeros in decimal representation.",
references: [{:oeis, :a067251, "https://oeis.org/A067251"}]
def create_sequence_a067251(_opts) do
sequence_for_function(&Elixir.Chunky.Sequence.OEIS.Repr.seq_a067251/2)
end
@doc false
@doc offset: 1
def seq_a067251(_idx, last) do
Math.next_number(
fn v ->
v |> Integer.digits() |> List.last() != 0
end,
last
)
end
@doc """
OEIS Sequence `A071858` - (Number of 1's in binary expansion of n) mod 3.
From [OEIS A071858](https://oeis.org/A071858):
> (Number of 1's in binary expansion of n) mod 3.
> (Formerly )
**Sequence IDs**: `:a071858`
**Finite**: False
**Offset**: 0
## Example
iex> Sequence.create(Elixir.Chunky.Sequence.OEIS.Repr, :a071858) |> Sequence.take!(105)
[0,1,1,2,1,2,2,0,1,2,2,0,2,0,0,1,1,2,2,0,2,0,0,1,2,0,0,1,0,1,1,2,1,2,2,0,2,0,0,1,2,0,0,1,0,1,1,2,2,0,0,1,0,1,1,2,0,1,1,2,1,2,2,0,1,2,2,0,2,0,0,1,2,0,0,1,0,1,1,2,2,0,0,1,0,1,1,2,0,1,1,2,1,2,2,0,2,0,0,1,0,1,1,2,0]
"""
@doc offset: 0,
sequence: "(Number of 1's in binary expansion of n) mod 3.",
references: [{:oeis, :a071858, "https://oeis.org/A071858"}]
def create_sequence_a071858(_opts) do
sequence_for_function(&Elixir.Chunky.Sequence.OEIS.Repr.seq_a071858/1)
end
@doc false
@doc offset: 0
def seq_a071858(idx) do
Math.digit_count(idx, [1], base: 2) |> rem(3)
end
@doc """
OEIS Sequence `A122840` - a(n) is the number of 0s at the end of n when n is written in base 10.
From [OEIS A122840](https://oeis.org/A122840):
> a(n) is the number of 0s at the end of n when n is written in base 10.
> (Formerly )
**Sequence IDs**: `:a122840`
**Finite**: False
**Offset**: 1
## Example
iex> Sequence.create(Elixir.Chunky.Sequence.OEIS.Repr, :a122840) |> Sequence.take!(105)
[0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0]
"""
@doc offset: 1,
sequence: "a(n) is the number of 0s at the end of n when n is written in base 10.",
references: [{:oeis, :a122840, "https://oeis.org/A122840"}]
def create_sequence_a122840(_opts) do
sequence_for_function(&Elixir.Chunky.Sequence.OEIS.Repr.seq_a122840/1)
end
@doc false
@doc offset: 1
def seq_a122840(idx) do
idx
|> Integer.digits()
|> Enum.reverse()
|> Enum.take_while(fn v -> v == 0 end)
|> length()
end
@doc """
OEIS Sequence `A160093` - Number of digits in n, excluding any trailing zeros.
From [OEIS A160093](https://oeis.org/A160093):
> Number of digits in n, excluding any trailing zeros.
> (Formerly )
**Sequence IDs**: `:a160093`
**Finite**: False
**Offset**: 1
## Example
iex> Sequence.create(Elixir.Chunky.Sequence.OEIS.Repr, :a160093) |> Sequence.take!(105)
[1,1,1,1,1,1,1,1,1,1,2,2,2,2,2,2,2,2,2,1,2,2,2,2,2,2,2,2,2,1,2,2,2,2,2,2,2,2,2,1,2,2,2,2,2,2,2,2,2,1,2,2,2,2,2,2,2,2,2,1,2,2,2,2,2,2,2,2,2,1,2,2,2,2,2,2,2,2,2,1,2,2,2,2,2,2,2,2,2,1,2,2,2,2,2,2,2,2,2,1,3,3,3,3,3]
"""
@doc offset: 1,
sequence: "Number of digits in n, excluding any trailing zeros.",
references: [{:oeis, :a160093, "https://oeis.org/A160093"}]
def create_sequence_a160093(_opts) do
sequence_for_function(&Elixir.Chunky.Sequence.OEIS.Repr.seq_a160093/1)
end
@doc false
@doc offset: 1
def seq_a160093(idx) do
idx
|> Integer.digits()
|> Enum.reverse()
|> Enum.drop_while(fn v -> v == 0 end)
|> length()
end
@doc """
OEIS Sequence `A179868` - (Number of 1's in binary expansion of n) mod 4.
From [OEIS A179868](https://oeis.org/A179868):
> (Number of 1's in binary expansion of n) mod 4.
> (Formerly )
**Sequence IDs**: `:a179868`
**Finite**: False
**Offset**: 0
## Example
iex> Sequence.create(Elixir.Chunky.Sequence.OEIS.Repr, :a179868) |> Sequence.take!(101)
[0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,0,1,2,2,3,2,3,3,0,2,3,3,0,3,0,0,1,1,2,2,3,2,3,3,0,2,3,3,0,3,0,0,1,2,3,3,0,3,0,0,1,3,0,0,1,0,1,1,2,1,2,2,3,2,3,3,0,2,3,3,0,3,0,0,1,2,3,3,0,3,0,0,1,3,0,0,1,0,1,1,2,2,3,3,0,3]
"""
@doc offset: 0,
sequence: "(Number of 1's in binary expansion of n) mod 4.",
references: [{:oeis, :a179868, "https://oeis.org/A179868"}]
def create_sequence_a179868(_opts) do
sequence_for_function(&Elixir.Chunky.Sequence.OEIS.Repr.seq_a179868/1)
end
@doc false
@doc offset: 0
def seq_a179868(idx) do
Math.digit_count(idx, [1], base: 2) |> rem(4)
end
@doc """
OEIS Sequence `A193238` - Number of prime digits in decimal representation of n.
From [OEIS A193238](https://oeis.org/A193238):
> Number of prime digits in decimal representation of n.
> (Formerly )
**Sequence IDs**: `:a193238`
**Finite**: False
**Offset**: 0
## Example
iex> Sequence.create(Elixir.Chunky.Sequence.OEIS.Repr, :a193238) |> Sequence.take!(86)
[0,0,1,1,0,1,0,1,0,0,0,0,1,1,0,1,0,1,0,0,1,1,2,2,1,2,1,2,1,1,1,1,2,2,1,2,1,2,1,1,0,0,1,1,0,1,0,1,0,0,1,1,2,2,1,2,1,2,1,1,0,0,1,1,0,1,0,1,0,0,1,1,2,2,1,2,1,2,1,1,0,0,1,1,0,1]
"""
@doc offset: 0,
sequence: "Number of prime digits in decimal representation of n.",
references: [{:oeis, :a193238, "https://oeis.org/A193238"}]
def create_sequence_a193238(_opts) do
sequence_for_function(&Elixir.Chunky.Sequence.OEIS.Repr.seq_a193238/1)
end
@doc false
@doc offset: 0
def seq_a193238(idx) do
Math.digit_count(idx, [2, 3, 5, 7])
end
@doc """
OEIS Sequence `A196563` - Number of even digits in decimal representation of n.
From [OEIS A196563](https://oeis.org/A196563):
> Number of even digits in decimal representation of n.
> (Formerly )
**Sequence IDs**: `:a196563`
**Finite**: False
**Offset**: 0
## Example
iex> Sequence.create(Elixir.Chunky.Sequence.OEIS.Repr, :a196563) |> Sequence.take!(86)
[1,0,1,0,1,0,1,0,1,0,1,0,1,0,1,0,1,0,1,0,2,1,2,1,2,1,2,1,2,1,1,0,1,0,1,0,1,0,1,0,2,1,2,1,2,1,2,1,2,1,1,0,1,0,1,0,1,0,1,0,2,1,2,1,2,1,2,1,2,1,1,0,1,0,1,0,1,0,1,0,2,1,2,1,2,1]
"""
@doc offset: 0,
sequence: "Number of even digits in decimal representation of n.",
references: [{:oeis, :a196563, "https://oeis.org/A196563"}]
def create_sequence_a196563(_opts) do
sequence_for_function(&Elixir.Chunky.Sequence.OEIS.Repr.seq_a196563/1)
end
@doc false
@doc offset: 0
def seq_a196563(idx) do
Math.digit_count(idx, [0, 2, 4, 6, 8])
end
@doc """
OEIS Sequence `A248910` - Numbers with no zeros in base-6 representation.
From [OEIS A248910](https://oeis.org/A248910):
> Numbers with no zeros in base-6 representation.
> (Formerly )
**Sequence IDs**: `:a248910`
**Finite**: False
**Offset**: 1
## Example
iex> Sequence.create(Elixir.Chunky.Sequence.OEIS.Repr, :a248910) |> Sequence.take!(67)
[1,2,3,4,5,7,8,9,10,11,13,14,15,16,17,19,20,21,22,23,25,26,27,28,29,31,32,33,34,35,43,44,45,46,47,49,50,51,52,53,55,56,57,58,59,61,62,63,64,65,67,68,69,70,71,79,80,81,82,83,85,86,87,88,89,91,92]
"""
@doc offset: 1,
sequence: "Numbers with no zeros in base-6 representation.",
references: [{:oeis, :a248910, "https://oeis.org/A248910"}]
def create_sequence_a248910(_opts) do
sequence_for_function(&Elixir.Chunky.Sequence.OEIS.Repr.seq_a248910/2)
end
@doc false
@doc offset: 1
def seq_a248910(_idx, last) do
Math.next_number(
fn v ->
Math.digit_count(v, [0], base: 6) == 0
end,
last
)
end
@doc """
OEIS Sequence `A255805` - Numbers with no zeros in base-8 representation.
From [OEIS A255805](https://oeis.org/A255805):
> Numbers with no zeros in base-8 representation.
> (Formerly )
**Sequence IDs**: `:a255805`
**Finite**: False
**Offset**: 1
## Example
iex> Sequence.create(Elixir.Chunky.Sequence.OEIS.Repr, :a255805) |> Sequence.take!(67)
[1,2,3,4,5,6,7,9,10,11,12,13,14,15,17,18,19,20,21,22,23,25,26,27,28,29,30,31,33,34,35,36,37,38,39,41,42,43,44,45,46,47,49,50,51,52,53,54,55,57,58,59,60,61,62,63,73,74,75,76,77,78,79,81,82,83,84]
"""
@doc offset: 1,
sequence: "Numbers with no zeros in base-8 representation.",
references: [{:oeis, :a255805, "https://oeis.org/A255805"}]
def create_sequence_a255805(_opts) do
sequence_for_function(&Elixir.Chunky.Sequence.OEIS.Repr.seq_a255805/2)
end
@doc false
@doc offset: 1
def seq_a255805(_idx, last) do
Math.next_number(
fn v ->
Math.digit_count(v, [0], base: 8) == 0
end,
last
)
end
@doc """
OEIS Sequence `A255808` - Numbers with no zeros in base-9 representation.
From [OEIS A255808](https://oeis.org/A255808):
> Numbers with no zeros in base-9 representation.
> (Formerly )
**Sequence IDs**: `:a255808`
**Finite**: False
**Offset**: 1
## Example
iex> Sequence.create(Elixir.Chunky.Sequence.OEIS.Repr, :a255808) |> Sequence.take!(67)
[1,2,3,4,5,6,7,8,10,11,12,13,14,15,16,17,19,20,21,22,23,24,25,26,28,29,30,31,32,33,34,35,37,38,39,40,41,42,43,44,46,47,48,49,50,51,52,53,55,56,57,58,59,60,61,62,64,65,66,67,68,69,70,71,73,74,75]
"""
@doc offset: 1,
sequence: "Numbers with no zeros in base-9 representation.",
references: [{:oeis, :a255808, "https://oeis.org/A255808"}]
def create_sequence_a255808(_opts) do
sequence_for_function(&Elixir.Chunky.Sequence.OEIS.Repr.seq_a255808/2)
end
@doc false
@doc offset: 1
def seq_a255808(_idx, last) do
Math.next_number(
fn v ->
Math.digit_count(v, [0], base: 9) == 0
end,
last
)
end
@doc """
OEIS Sequence `A007953` - Digital sum (i.e., sum of digits) of n; also called digsum(n).
From [OEIS A007953](https://oeis.org/A007953):
> Digital sum (i.e., sum of digits) of n; also called digsum(n).
> (Formerly )
**Sequence IDs**: `:a007953`
**Finite**: False
**Offset**: 0
## Example
iex> Sequence.create(Elixir.Chunky.Sequence.OEIS.Repr, :a007953) |> Sequence.take!(88)
[0,1,2,3,4,5,6,7,8,9,1,2,3,4,5,6,7,8,9,10,2,3,4,5,6,7,8,9,10,11,3,4,5,6,7,8,9,10,11,12,4,5,6,7,8,9,10,11,12,13,5,6,7,8,9,10,11,12,13,14,6,7,8,9,10,11,12,13,14,15,7,8,9,10,11,12,13,14,15,16,8,9,10,11,12,13,14,15]
"""
@doc offset: 0,
sequence: "Digital sum (i.e., sum of digits) of n; also called digsum(n).",
references: [{:oeis, :a007953, "https://oeis.org/A007953"}]
def create_sequence_a007953(_opts) do
sequence_for_function(&Elixir.Chunky.Sequence.OEIS.Repr.seq_a007953/1)
end
@doc false
@doc offset: 0
def seq_a007953(idx) do
idx |> Integer.digits() |> Enum.sum()
end
@doc """
OEIS Sequence `A014263` - Numbers that contain even digits only.
From [OEIS A014263](https://oeis.org/A014263):
> Numbers that contain even digits only.
> (Formerly )
**Sequence IDs**: `:a014263`
**Finite**: False
**Offset**: 1
## Example
iex> Sequence.create(Elixir.Chunky.Sequence.OEIS.Repr, :a014263) |> Sequence.take!(58)
[0,2,4,6,8,20,22,24,26,28,40,42,44,46,48,60,62,64,66,68,80,82,84,86,88,200,202,204,206,208,220,222,224,226,228,240,242,244,246,248,260,262,264,266,268,280,282,284,286,288,400,402,404,406,408,420,422,424]
"""
@doc offset: 1,
sequence: "Numbers that contain even digits only.",
references: [{:oeis, :a014263, "https://oeis.org/A014263"}]
def create_sequence_a014263(_opts) do
sequence_for_function(&Elixir.Chunky.Sequence.OEIS.Repr.seq_a014263/2)
end
@doc false
@doc offset: 1, fill_value: -1
def seq_a014263(_idx, last) do
Math.next_number(
fn v ->
Math.digit_count(v, [1, 3, 5, 7, 9]) == 0
end,
last
)
end
@doc """
OEIS Sequence `A023692` - Numbers with a single 1 in their ternary expansion.
From [OEIS A023692](https://oeis.org/A023692):
> Numbers with a single 1 in their ternary expansion.
> (Formerly )
**Sequence IDs**: `:a023692`
**Finite**: False
**Offset**: 1
## Example
iex> Sequence.create(Elixir.Chunky.Sequence.OEIS.Repr, :a023692) |> Sequence.take!(56)
[1,3,5,7,9,11,15,17,19,21,23,25,27,29,33,35,45,47,51,53,55,57,59,61,63,65,69,71,73,75,77,79,81,83,87,89,99,101,105,107,135,137,141,143,153,155,159,161,163,165,167,169,171,173,177,179]
"""
@doc offset: 1,
sequence: "Numbers with a single 1 in their ternary expansion.",
references: [{:oeis, :a023692, "https://oeis.org/A023692"}]
def create_sequence_a023692(_opts) do
sequence_for_function(&Elixir.Chunky.Sequence.OEIS.Repr.seq_a023692/2)
end
@doc false
@doc offset: 1
def seq_a023692(_idx, last) do
Math.next_number(
fn v ->
Math.digit_count(v, [1], base: 3) == 1
end,
last
)
end
@doc """
OEIS Sequence `A023693` - Numbers with exactly 2 1's in ternary expansion.
From [OEIS A023693](https://oeis.org/A023693):
> Numbers with exactly 2 1's in ternary expansion.
> (Formerly )
**Sequence IDs**: `:a023693`
**Finite**: False
**Offset**: 1
## Example
iex> Sequence.create(Elixir.Chunky.Sequence.OEIS.Repr, :a023693) |> Sequence.take!(53)
[4,10,12,14,16,22,28,30,32,34,36,38,42,44,46,48,50,52,58,64,66,68,70,76,82,84,86,88,90,92,96,98,100,102,104,106,108,110,114,116,126,128,132,134,136,138,140,142,144,146,150,152,154]
"""
@doc offset: 1,
sequence: "Numbers with exactly 2 1's in ternary expansion.",
references: [{:oeis, :a023693, "https://oeis.org/A023693"}]
def create_sequence_a023693(_opts) do
sequence_for_function(&Elixir.Chunky.Sequence.OEIS.Repr.seq_a023693/2)
end
@doc false
@doc offset: 1
def seq_a023693(_idx, last) do
Math.next_number(
fn v ->
Math.digit_count(v, [1], base: 3) == 2
end,
last
)
end
@doc """
OEIS Sequence `A023694` - Numbers with exactly 3 1's in ternary expansion.
From [OEIS A023694](https://oeis.org/A023694):
> Numbers with exactly 3 1's in ternary expansion.
> (Formerly )
**Sequence IDs**: `:a023694`
**Finite**: False
**Offset**: 1
## Example
iex> Sequence.create(Elixir.Chunky.Sequence.OEIS.Repr, :a023694) |> Sequence.take!(49)
[13,31,37,39,41,43,49,67,85,91,93,95,97,103,109,111,113,115,117,119,123,125,127,129,131,133,139,145,147,149,151,157,175,193,199,201,203,205,211,229,247,253,255,257,259,265,271,273,275]
"""
@doc offset: 1,
sequence: "Numbers with exactly 3 1's in ternary expansion.",
references: [{:oeis, :a023694, "https://oeis.org/A023694"}]
def create_sequence_a023694(_opts) do
sequence_for_function(&Elixir.Chunky.Sequence.OEIS.Repr.seq_a023694/2)
end
@doc false
@doc offset: 1
def seq_a023694(_idx, last) do
Math.next_number(
fn v ->
Math.digit_count(v, [1], base: 3) == 3
end,
last
)
end
@doc """
OEIS Sequence `A023695` - Numbers with exactly 4 1's in ternary expansion.
From [OEIS A023695](https://oeis.org/A023695):
> Numbers with exactly 4 1's in ternary expansion.
> (Formerly )
**Sequence IDs**: `:a023695`
**Finite**: False
**Offset**: 1
## Example
iex> Sequence.create(Elixir.Chunky.Sequence.OEIS.Repr, :a023695) |> Sequence.take!(45)
[40,94,112,118,120,122,124,130,148,202,256,274,280,282,284,286,292,310,328,334,336,338,340,346,352,354,356,358,360,362,366,368,370,372,374,376,382,388,390,392,394,400,418,436,442]
"""
@doc offset: 1,
sequence: "Numbers with exactly 4 1's in ternary expansion.",
references: [{:oeis, :a023695, "https://oeis.org/A023695"}]
def create_sequence_a023695(_opts) do
sequence_for_function(&Elixir.Chunky.Sequence.OEIS.Repr.seq_a023695/2)
end
@doc false
@doc offset: 1
def seq_a023695(_idx, last) do
Math.next_number(
fn v ->
Math.digit_count(v, [1], base: 3) == 4
end,
last
)
end
@doc """
OEIS Sequence `A023696` - Numbers with exactly 5 1's in ternary expansion.
From [OEIS A023696](https://oeis.org/A023696):
> Numbers with exactly 5 1's in ternary expansion.
> (Formerly )
**Sequence IDs**: `:a023696`
**Finite**: False
**Offset**: 1
## Example
iex> Sequence.create(Elixir.Chunky.Sequence.OEIS.Repr, :a023696) |> Sequence.take!(40)
[121,283,337,355,361,363,365,367,373,391,445,607,769,823,841,847,849,851,853,859,877,931,985,1003,1009,1011,1013,1015,1021,1039,1057,1063,1065,1067,1069,1075,1081,1083,1085,1087]
"""
@doc offset: 1,
sequence: "Numbers with exactly 5 1's in ternary expansion.",
references: [{:oeis, :a023696, "https://oeis.org/A023696"}]
def create_sequence_a023696(_opts) do
sequence_for_function(&Elixir.Chunky.Sequence.OEIS.Repr.seq_a023696/2)
end
@doc false
@doc offset: 1
def seq_a023696(_idx, last) do
Math.next_number(
fn v ->
Math.digit_count(v, [1], base: 3) == 5
end,
last
)
end
@doc """
OEIS Sequence `A023698` - Numbers with exactly 7 1's in ternary expansion.
From [OEIS A023698](https://oeis.org/A023698):
> Numbers with exactly 7 1's in ternary expansion.
> (Formerly )
**Sequence IDs**: `:a023698`
**Finite**: False
**Offset**: 1
## Example
iex> Sequence.create(Elixir.Chunky.Sequence.OEIS.Repr, :a023698) |> Sequence.take!(36)
[1093,2551,3037,3199,3253,3271,3277,3279,3281,3283,3289,3307,3361,3523,4009,5467,6925,7411,7573,7627,7645,7651,7653,7655,7657,7663,7681,7735,7897,8383,8869,9031,9085,9103,9109,9111]
"""
@doc offset: 1,
sequence: "Numbers with exactly 7 1's in ternary expansion.",
references: [{:oeis, :a023698, "https://oeis.org/A023698"}]
def create_sequence_a023698(_opts) do
sequence_for_function(&Elixir.Chunky.Sequence.OEIS.Repr.seq_a023698/2)
end
@doc false
@doc offset: 1
def seq_a023698(_idx, last) do
Math.next_number(
fn v ->
Math.digit_count(v, [1], base: 3) == 7
end,
last
)
end
@doc """
OEIS Sequence `A023699` - Numbers with a single 2 in their ternary expansion.
From [OEIS A023699](https://oeis.org/A023699):
> Numbers with a single 2 in their ternary expansion.
> (Formerly )
**Sequence IDs**: `:a023699`
**Finite**: False
**Offset**: 1
## Example
iex> Sequence.create(Elixir.Chunky.Sequence.OEIS.Repr, :a023699) |> Sequence.take!(56)
[2,5,6,7,11,14,15,16,18,19,21,22,29,32,33,34,38,41,42,43,45,46,48,49,54,55,57,58,63,64,66,67,83,86,87,88,92,95,96,97,99,100,102,103,110,113,114,115,119,122,123,124,126,127,129,130]
"""
@doc offset: 1,
sequence: "Numbers with a single 2 in their ternary expansion.",
references: [{:oeis, :a023699, "https://oeis.org/A023699"}]
def create_sequence_a023699(_opts) do
sequence_for_function(&Elixir.Chunky.Sequence.OEIS.Repr.seq_a023699/2)
end
@doc false
@doc offset: 1
def seq_a023699(_idx, last) do
Math.next_number(
fn v ->
Math.digit_count(v, [2], base: 3) == 1
end,
last
)
end
@doc """
OEIS Sequence `A023700` - Numbers with exactly 2 2's in ternary expansion.
From [OEIS A023700](https://oeis.org/A023700):
> Numbers with exactly 2 2's in ternary expansion.
> (Formerly )
**Sequence IDs**: `:a023700`
**Finite**: False
**Offset**: 1
## Example
iex> Sequence.create(Elixir.Chunky.Sequence.OEIS.Repr, :a023700) |> Sequence.take!(51)
[8,17,20,23,24,25,35,44,47,50,51,52,56,59,60,61,65,68,69,70,72,73,75,76,89,98,101,104,105,106,116,125,128,131,132,133,137,140,141,142,146,149,150,151,153,154,156,157,164,167,168]
"""
@doc offset: 1,
sequence: "Numbers with exactly 2 2's in ternary expansion.",
references: [{:oeis, :a023700, "https://oeis.org/A023700"}]
def create_sequence_a023700(_opts) do
sequence_for_function(&Elixir.Chunky.Sequence.OEIS.Repr.seq_a023700/2)
end
@doc false
@doc offset: 1
def seq_a023700(_idx, last) do
Math.next_number(
fn v ->
Math.digit_count(v, [2], base: 3) == 2
end,
last
)
end
@doc """
OEIS Sequence `A023701` - Numbers with exactly 3 2's in their ternary expansion.
From [OEIS A023701](https://oeis.org/A023701):
> Numbers with exactly 3 2's in their ternary expansion.
> (Formerly )
**Sequence IDs**: `:a023701`
**Finite**: False
**Offset**: 1
## Example
iex> Sequence.create(Elixir.Chunky.Sequence.OEIS.Repr, :a023701) |> Sequence.take!(47)
[26,53,62,71,74,77,78,79,107,134,143,152,155,158,159,160,170,179,182,185,186,187,197,206,209,212,213,214,218,221,222,223,227,230,231,232,234,235,237,238,269,296,305,314,317,320,321]
"""
@doc offset: 1,
sequence: "Numbers with exactly 3 2's in their ternary expansion.",
references: [{:oeis, :a023701, "https://oeis.org/A023701"}]
def create_sequence_a023701(_opts) do
sequence_for_function(&Elixir.Chunky.Sequence.OEIS.Repr.seq_a023701/2)
end
@doc false
@doc offset: 1
def seq_a023701(_idx, last) do
Math.next_number(
fn v ->
Math.digit_count(v, [2], base: 3) == 3
end,
last
)
end
@doc """
OEIS Sequence `A023702` - Numbers with exactly 4 2's in ternary expansion of n.
From [OEIS A023702](https://oeis.org/A023702):
> Numbers with exactly 4 2's in ternary expansion of n.
> (Formerly )
**Sequence IDs**: `:a023702`
**Finite**: False
**Offset**: 1
## Example
iex> Sequence.create(Elixir.Chunky.Sequence.OEIS.Repr, :a023702) |> Sequence.take!(45)
[80,161,188,215,224,233,236,239,240,241,323,404,431,458,467,476,479,482,483,484,512,539,548,557,560,563,564,565,593,620,629,638,641,644,645,646,656,665,668,671,672,673,683,692,695]
"""
@doc offset: 1,
sequence: "Numbers with exactly 4 2's in ternary expansion of n.",
references: [{:oeis, :a023702, "https://oeis.org/A023702"}]
def create_sequence_a023702(_opts) do
sequence_for_function(&Elixir.Chunky.Sequence.OEIS.Repr.seq_a023702/2)
end
@doc false
@doc offset: 1
def seq_a023702(_idx, last) do
Math.next_number(
fn v ->
Math.digit_count(v, [2], base: 3) == 4
end,
last
)
end
@doc """
OEIS Sequence `A023703` - Numbers with exactly 5 2's in ternary expansion.
From [OEIS A023703](https://oeis.org/A023703):
> Numbers with exactly 5 2's in ternary expansion.
> (Formerly )
**Sequence IDs**: `:a023703`
**Finite**: False
**Offset**: 1
## Example
iex> Sequence.create(Elixir.Chunky.Sequence.OEIS.Repr, :a023703) |> Sequence.take!(38)
[242,485,566,647,674,701,710,719,722,725,726,727,971,1214,1295,1376,1403,1430,1439,1448,1451,1454,1455,1456,1538,1619,1646,1673,1682,1691,1694,1697,1698,1699,1781,1862,1889,1916]
"""
@doc offset: 1,
sequence: "Numbers with exactly 5 2's in ternary expansion.",
references: [{:oeis, :a023703, "https://oeis.org/A023703"}]
def create_sequence_a023703(_opts) do
sequence_for_function(&Elixir.Chunky.Sequence.OEIS.Repr.seq_a023703/2)
end
@doc false
@doc offset: 1
def seq_a023703(_idx, last) do
Math.next_number(
fn v ->
Math.digit_count(v, [2], base: 3) == 5
end,
last
)
end
@doc """
OEIS Sequence `A023704` - Numbers with exactly 6 2's in ternary expansion.
From [OEIS A023704](https://oeis.org/A023704):
> Numbers with exactly 6 2's in ternary expansion.
> (Formerly )
**Sequence IDs**: `:a023704`
**Finite**: False
**Offset**: 1
## Example
iex> Sequence.create(Elixir.Chunky.Sequence.OEIS.Repr, :a023704) |> Sequence.take!(36)
[728,1457,1700,1943,2024,2105,2132,2159,2168,2177,2180,2183,2184,2185,2915,3644,3887,4130,4211,4292,4319,4346,4355,4364,4367,4370,4371,4372,4616,4859,4940,5021,5048,5075,5084,5093]
"""
@doc offset: 1,
sequence: "Numbers with exactly 6 2's in ternary expansion.",
references: [{:oeis, :a023704, "https://oeis.org/A023704"}]
def create_sequence_a023704(_opts) do
sequence_for_function(&Elixir.Chunky.Sequence.OEIS.Repr.seq_a023704/2)
end
@doc false
@doc offset: 1
def seq_a023704(_idx, last) do
Math.next_number(
fn v ->
Math.digit_count(v, [2], base: 3) == 6
end,
last
)
end
@doc """
OEIS Sequence `A023706` - Numbers with a single 0 in their base 4 expansion.
From [OEIS A023706](https://oeis.org/A023706):
> Numbers with a single 0 in their base 4 expansion.
> (Formerly )
**Sequence IDs**: `:a023706`
**Finite**: False
**Offset**: 1
## Example
iex> Sequence.create(Elixir.Chunky.Sequence.OEIS.Repr, :a023706) |> Sequence.take!(56)
[0,4,8,12,17,18,19,20,24,28,33,34,35,36,40,44,49,50,51,52,56,60,69,70,71,73,74,75,77,78,79,81,82,83,84,88,92,97,98,99,100,104,108,113,114,115,116,120,124,133,134,135,137,138,139,141]
"""
@doc offset: 1,
sequence: "Numbers with a single 0 in their base 4 expansion.",
references: [{:oeis, :a023706, "https://oeis.org/A023706"}]
def create_sequence_a023706(_opts) do
sequence_for_function(&Elixir.Chunky.Sequence.OEIS.Repr.seq_a023706/2)
end
@doc false
@doc offset: 1, fill_value: -1
def seq_a023706(_idx, last) do
Math.next_number(
fn v ->
Math.digit_count(v, [0], base: 4) == 1
end,
last
)
end
@doc """
OEIS Sequence `A023707` - Numbers with exactly 2 0's in base 4 expansion.
From [OEIS A023707](https://oeis.org/A023707):
> Numbers with exactly 2 0's in base 4 expansion.
> (Formerly )
**Sequence IDs**: `:a023707`
**Finite**: False
**Offset**: 1
## Example
iex> Sequence.create(Elixir.Chunky.Sequence.OEIS.Repr, :a023707) |> Sequence.take!(48)
[16,32,48,65,66,67,68,72,76,80,96,112,129,130,131,132,136,140,144,160,176,193,194,195,196,200,204,208,224,240,261,262,263,265,266,267,269,270,271,273,274,275,276,280,284,289,290,291]
"""
@doc offset: 1,
sequence: "Numbers with exactly 2 0's in base 4 expansion.",
references: [{:oeis, :a023707, "https://oeis.org/A023707"}]
def create_sequence_a023707(_opts) do
sequence_for_function(&Elixir.Chunky.Sequence.OEIS.Repr.seq_a023707/2)
end
@doc false
@doc offset: 1
def seq_a023707(_idx, last) do
Math.next_number(
fn v ->
Math.digit_count(v, [0], base: 4) == 2
end,
last
)
end
@doc """
OEIS Sequence `A023708` - Numbers with exactly 3 0's in base 4 expansion.
From [OEIS A023708](https://oeis.org/A023708):
> Numbers with exactly 3 0's in base 4 expansion.
> (Formerly )
**Sequence IDs**: `:a023708`
**Finite**: False
**Offset**: 1
## Example
iex> Sequence.create(Elixir.Chunky.Sequence.OEIS.Repr, :a023708) |> Sequence.take!(44)
[64,128,192,257,258,259,260,264,268,272,288,304,320,384,448,513,514,515,516,520,524,528,544,560,576,640,704,769,770,771,772,776,780,784,800,816,832,896,960,1029,1030,1031,1033,1034]
"""
@doc offset: 1,
sequence: "Numbers with exactly 3 0's in base 4 expansion.",
references: [{:oeis, :a023708, "https://oeis.org/A023708"}]
def create_sequence_a023708(_opts) do
sequence_for_function(&Elixir.Chunky.Sequence.OEIS.Repr.seq_a023708/2)
end
@doc false
@doc offset: 1
def seq_a023708(_idx, last) do
Math.next_number(
fn v ->
Math.digit_count(v, [0], base: 4) == 3
end,
last
)
end
@doc """
OEIS Sequence `A023709` - Numbers with no 1's in base 4 expansion.
From [OEIS A023709](https://oeis.org/A023709):
> Numbers with no 1's in base 4 expansion.
> (Formerly )
**Sequence IDs**: `:a023709`
**Finite**: False
**Offset**: 1
## Example
iex> Sequence.create(Elixir.Chunky.Sequence.OEIS.Repr, :a023709) |> Sequence.take!(52)
[0,2,3,8,10,11,12,14,15,32,34,35,40,42,43,44,46,47,48,50,51,56,58,59,60,62,63,128,130,131,136,138,139,140,142,143,160,162,163,168,170,171,172,174,175,176,178,179,184,186,187,188]
"""
@doc offset: 1,
sequence: "Numbers with no 1's in base 4 expansion.",
references: [{:oeis, :a023709, "https://oeis.org/A023709"}]
def create_sequence_a023709(_opts) do
sequence_for_function(&Elixir.Chunky.Sequence.OEIS.Repr.seq_a023709/2)
end
@doc false
@doc offset: 1, fill_value: -1
def seq_a023709(_idx, last) do
Math.next_number(
fn v ->
Math.digit_count(v, [1], base: 4) == 0
end,
last
)
end
@doc """
OEIS Sequence `A023710` - Numbers with a single 1 in their base 4 expansion.
From [OEIS A023710](https://oeis.org/A023710):
> Numbers with a single 1 in their base 4 expansion.
> (Formerly )
**Sequence IDs**: `:a023710`
**Finite**: False
**Offset**: 1
## Example
iex> Sequence.create(Elixir.Chunky.Sequence.OEIS.Repr, :a023710) |> Sequence.take!(56)
[1,4,6,7,9,13,16,18,19,24,26,27,28,30,31,33,36,38,39,41,45,49,52,54,55,57,61,64,66,67,72,74,75,76,78,79,96,98,99,104,106,107,108,110,111,112,114,115,120,122,123,124,126,127,129,132]
"""
@doc offset: 1,
sequence: "Numbers with a single 1 in their base 4 expansion.",
references: [{:oeis, :a023710, "https://oeis.org/A023710"}]
def create_sequence_a023710(_opts) do
sequence_for_function(&Elixir.Chunky.Sequence.OEIS.Repr.seq_a023710/2)
end
@doc false
@doc offset: 1
def seq_a023710(_idx, last) do
Math.next_number(
fn v ->
Math.digit_count(v, [1], base: 4) == 1
end,
last
)
end
@doc """
OEIS Sequence `A023711` - Numbers with exactly 2 1's in base 4 expansion.
From [OEIS A023711](https://oeis.org/A023711):
> Numbers with exactly 2 1's in base 4 expansion.
> (Formerly )
**Sequence IDs**: `:a023711`
**Finite**: False
**Offset**: 1
## Example
iex> Sequence.create(Elixir.Chunky.Sequence.OEIS.Repr, :a023711) |> Sequence.take!(51)
[5,17,20,22,23,25,29,37,53,65,68,70,71,73,77,80,82,83,88,90,91,92,94,95,97,100,102,103,105,109,113,116,118,119,121,125,133,145,148,150,151,153,157,165,181,197,209,212,214,215,217]
"""
@doc offset: 1,
sequence: "Numbers with exactly 2 1's in base 4 expansion.",
references: [{:oeis, :a023711, "https://oeis.org/A023711"}]
def create_sequence_a023711(_opts) do
sequence_for_function(&Elixir.Chunky.Sequence.OEIS.Repr.seq_a023711/2)
end
@doc false
@doc offset: 1
def seq_a023711(_idx, last) do
Math.next_number(
fn v ->
Math.digit_count(v, [1], base: 4) == 2
end,
last
)
end
@doc """
OEIS Sequence `A023712` - Numbers with exactly 3 1's in base 4 expansion.
From [OEIS A023712](https://oeis.org/A023712):
> Numbers with exactly 3 1's in base 4 expansion.
> (Formerly )
**Sequence IDs**: `:a023712`
**Finite**: False
**Offset**: 1
## Example
iex> Sequence.create(Elixir.Chunky.Sequence.OEIS.Repr, :a023712) |> Sequence.take!(47)
[21,69,81,84,86,87,89,93,101,117,149,213,261,273,276,278,279,281,285,293,309,321,324,326,327,329,333,336,338,339,344,346,347,348,350,351,353,356,358,359,361,365,369,372,374,375,377]
"""
@doc offset: 1,
sequence: "Numbers with exactly 3 1's in base 4 expansion.",
references: [{:oeis, :a023712, "https://oeis.org/A023712"}]
def create_sequence_a023712(_opts) do
sequence_for_function(&Elixir.Chunky.Sequence.OEIS.Repr.seq_a023712/2)
end
@doc false
@doc offset: 1
def seq_a023712(_idx, last) do
Math.next_number(
fn v ->
Math.digit_count(v, [1], base: 4) == 3
end,
last
)
end
@doc """
OEIS Sequence `A023713` - Numbers with no 2's in base 4 expansion.
From [OEIS A023713](https://oeis.org/A023713):
> Numbers with no 2's in base 4 expansion.
> (Formerly )
**Sequence IDs**: `:a023713`
**Finite**: False
**Offset**: 1
## Example
iex> Sequence.create(Elixir.Chunky.Sequence.OEIS.Repr, :a023713) |> Sequence.take!(58)
[0,1,3,4,5,7,12,13,15,16,17,19,20,21,23,28,29,31,48,49,51,52,53,55,60,61,63,64,65,67,68,69,71,76,77,79,80,81,83,84,85,87,92,93,95,112,113,115,116,117,119,124,125,127,192,193,195,196]
"""
@doc offset: 1,
sequence: "Numbers with no 2's in base 4 expansion.",
references: [{:oeis, :a023713, "https://oeis.org/A023713"}]
def create_sequence_a023713(_opts) do
sequence_for_function(&Elixir.Chunky.Sequence.OEIS.Repr.seq_a023713/2)
end
@doc false
@doc offset: 1, fill_value: -1
def seq_a023713(_idx, last) do
Math.next_number(
fn v ->
Math.digit_count(v, [2], base: 4) == 0
end,
last
)
end
@doc """
OEIS Sequence `A023714` - Numbers with a single 2 in their base 4 expansion.
From [OEIS A023714](https://oeis.org/A023714):
> Numbers with a single 2 in their base 4 expansion.
> (Formerly )
**Sequence IDs**: `:a023714`
**Finite**: False
**Offset**: 1
## Example
iex> Sequence.create(Elixir.Chunky.Sequence.OEIS.Repr, :a023714) |> Sequence.take!(56)
[2,6,8,9,11,14,18,22,24,25,27,30,32,33,35,36,37,39,44,45,47,50,54,56,57,59,62,66,70,72,73,75,78,82,86,88,89,91,94,96,97,99,100,101,103,108,109,111,114,118,120,121,123,126,128,129]
"""
@doc offset: 1,
sequence: "Numbers with a single 2 in their base 4 expansion.",
references: [{:oeis, :a023714, "https://oeis.org/A023714"}]
def create_sequence_a023714(_opts) do
sequence_for_function(&Elixir.Chunky.Sequence.OEIS.Repr.seq_a023714/2)
end
@doc false
@doc offset: 1
def seq_a023714(_idx, last) do
Math.next_number(
fn v ->
Math.digit_count(v, [2], base: 4) == 1
end,
last
)
end
@doc """
OEIS Sequence `A023715` - Numbers with exactly 2 2's in base 4 expansion.
From [OEIS A023715](https://oeis.org/A023715):
> Numbers with exactly 2 2's in base 4 expansion.
> (Formerly )
**Sequence IDs**: `:a023715`
**Finite**: False
**Offset**: 1
## Example
iex> Sequence.create(Elixir.Chunky.Sequence.OEIS.Repr, :a023715) |> Sequence.take!(48)
[10,26,34,38,40,41,43,46,58,74,90,98,102,104,105,107,110,122,130,134,136,137,139,142,146,150,152,153,155,158,160,161,163,164,165,167,172,173,175,178,182,184,185,187,190,202,218,226]
"""
@doc offset: 1,
sequence: "Numbers with exactly 2 2's in base 4 expansion.",
references: [{:oeis, :a023715, "https://oeis.org/A023715"}]
def create_sequence_a023715(_opts) do
sequence_for_function(&Elixir.Chunky.Sequence.OEIS.Repr.seq_a023715/2)
end
@doc false
@doc offset: 1
def seq_a023715(_idx, last) do
Math.next_number(
fn v ->
Math.digit_count(v, [2], base: 4) == 2
end,
last
)
end
@doc """
OEIS Sequence `A023716` - Numbers with exactly 3 2's in base 4 expansion.
From [OEIS A023716](https://oeis.org/A023716):
> Numbers with exactly 3 2's in base 4 expansion.
> (Formerly )
**Sequence IDs**: `:a023716`
**Finite**: False
**Offset**: 1
## Example
iex> Sequence.create(Elixir.Chunky.Sequence.OEIS.Repr, :a023716) |> Sequence.take!(45)
[42,106,138,154,162,166,168,169,171,174,186,234,298,362,394,410,418,422,424,425,427,430,442,490,522,538,546,550,552,553,555,558,570,586,602,610,614,616,617,619,622,634,642,646,648]
"""
@doc offset: 1,
sequence: "Numbers with exactly 3 2's in base 4 expansion.",
references: [{:oeis, :a023716, "https://oeis.org/A023716"}]
def create_sequence_a023716(_opts) do
sequence_for_function(&Elixir.Chunky.Sequence.OEIS.Repr.seq_a023716/2)
end
@doc false
@doc offset: 1
def seq_a023716(_idx, last) do
Math.next_number(
fn v ->
Math.digit_count(v, [2], base: 4) == 3
end,
last
)
end
@doc """
OEIS Sequence `A023717` - Numbers with no 3's in base 4 expansion.
From [OEIS A023717](https://oeis.org/A023717):
> Numbers with no 3's in base 4 expansion.
> (Formerly )
**Sequence IDs**: `:a023717`
**Finite**: False
**Offset**: 0
## Example
iex> Sequence.create(Elixir.Chunky.Sequence.OEIS.Repr, :a023717) |> Sequence.take!(59)
[0,1,2,4,5,6,8,9,10,16,17,18,20,21,22,24,25,26,32,33,34,36,37,38,40,41,42,64,65,66,68,69,70,72,73,74,80,81,82,84,85,86,88,89,90,96,97,98,100,101,102,104,105,106,128,129,130,132,133]
"""
@doc offset: 0,
sequence: "Numbers with no 3's in base 4 expansion.",
references: [{:oeis, :a023717, "https://oeis.org/A023717"}]
def create_sequence_a023717(_opts) do
sequence_for_function(&Elixir.Chunky.Sequence.OEIS.Repr.seq_a023717/2)
end
@doc false
@doc offset: 0, fill_value: -1
def seq_a023717(_idx, last) do
Math.next_number(
fn v ->
Math.digit_count(v, [3], base: 4) == 0
end,
last
)
end
@doc """
OEIS Sequence `A023718` - Numbers with a single 3 in their base 4 expansion.
From [OEIS A023718](https://oeis.org/A023718):
> Numbers with a single 3 in their base 4 expansion.
> (Formerly )
**Sequence IDs**: `:a023718`
**Finite**: False
**Offset**: 1
## Example
iex> Sequence.create(Elixir.Chunky.Sequence.OEIS.Repr, :a023718) |> Sequence.take!(56)
[3,7,11,12,13,14,19,23,27,28,29,30,35,39,43,44,45,46,48,49,50,52,53,54,56,57,58,67,71,75,76,77,78,83,87,91,92,93,94,99,103,107,108,109,110,112,113,114,116,117,118,120,121,122,131,135]
"""
@doc offset: 1,
sequence: "Numbers with a single 3 in their base 4 expansion.",
references: [{:oeis, :a023718, "https://oeis.org/A023718"}]
def create_sequence_a023718(_opts) do
sequence_for_function(&Elixir.Chunky.Sequence.OEIS.Repr.seq_a023718/2)
end
@doc false
@doc offset: 1
def seq_a023718(_idx, last) do
Math.next_number(
fn v ->
Math.digit_count(v, [3], base: 4) == 1
end,
last
)
end
@doc """
OEIS Sequence `A023719` - Numbers with exactly two 3's in base 4 expansion.
From [OEIS A023719](https://oeis.org/A023719):
> Numbers with exactly two 3's in base 4 expansion.
> (Formerly )
**Sequence IDs**: `:a023719`
**Finite**: False
**Offset**: 1
## Example
iex> Sequence.create(Elixir.Chunky.Sequence.OEIS.Repr, :a023719) |> Sequence.take!(54)
[15,31,47,51,55,59,60,61,62,79,95,111,115,119,123,124,125,126,143,159,175,179,183,187,188,189,190,195,199,203,204,205,206,211,215,219,220,221,222,227,231,235,236,237,238,240,241,242,244,245,246,248,249,250]
"""
@doc offset: 1,
sequence: "Numbers with exactly two 3's in base 4 expansion.",
references: [{:oeis, :a023719, "https://oeis.org/A023719"}]
def create_sequence_a023719(_opts) do
sequence_for_function(&Elixir.Chunky.Sequence.OEIS.Repr.seq_a023719/2)
end
@doc false
@doc offset: 1
def seq_a023719(_idx, last) do
Math.next_number(
fn v ->
Math.digit_count(v, [3], base: 4) == 2
end,
last
)
end
@doc """
OEIS Sequence `A023720` - Numbers with exactly 3 3's in base 4 expansion.
From [OEIS A023720](https://oeis.org/A023720):
> Numbers with exactly 3 3's in base 4 expansion.
> (Formerly )
**Sequence IDs**: `:a023720`
**Finite**: False
**Offset**: 1
## Example
iex> Sequence.create(Elixir.Chunky.Sequence.OEIS.Repr, :a023720) |> Sequence.take!(45)
[63,127,191,207,223,239,243,247,251,252,253,254,319,383,447,463,479,495,499,503,507,508,509,510,575,639,703,719,735,751,755,759,763,764,765,766,783,799,815,819,823,827,828,829,830]
"""
@doc offset: 1,
sequence: "Numbers with exactly 3 3's in base 4 expansion.",
references: [{:oeis, :a023720, "https://oeis.org/A023720"}]
def create_sequence_a023720(_opts) do
sequence_for_function(&Elixir.Chunky.Sequence.OEIS.Repr.seq_a023720/2)
end
@doc false
@doc offset: 1
def seq_a023720(_idx, last) do
Math.next_number(
fn v ->
Math.digit_count(v, [3], base: 4) == 3
end,
last
)
end
@doc """
OEIS Sequence `A023721` - Numbers with no 0's in their base-5 expansion.
From [OEIS A023721](https://oeis.org/A023721):
> Numbers with no 0's in their base-5 expansion.
> (Formerly )
**Sequence IDs**: `:a023721`
**Finite**: False
**Offset**: 1
## Example
iex> Sequence.create(Elixir.Chunky.Sequence.OEIS.Repr, :a023721) |> Sequence.take!(63)
[1,2,3,4,6,7,8,9,11,12,13,14,16,17,18,19,21,22,23,24,31,32,33,34,36,37,38,39,41,42,43,44,46,47,48,49,56,57,58,59,61,62,63,64,66,67,68,69,71,72,73,74,81,82,83,84,86,87,88,89,91,92,93]
"""
@doc offset: 1,
sequence: "Numbers with no 0's in their base-5 expansion.",
references: [{:oeis, :a023721, "https://oeis.org/A023721"}]
def create_sequence_a023721(_opts) do
sequence_for_function(&Elixir.Chunky.Sequence.OEIS.Repr.seq_a023721/2)
end
@doc false
@doc offset: 1
def seq_a023721(_idx, last) do
Math.next_number(
fn v ->
Math.digit_count(v, [0], base: 5) == 0
end,
last
)
end
@doc """
OEIS Sequence `A023722` - Numbers with a single 0 in their base 5 expansion.
From [OEIS A023722](https://oeis.org/A023722):
> Numbers with a single 0 in their base 5 expansion.
> (Formerly )
**Sequence IDs**: `:a023722`
**Finite**: False
**Offset**: 1
## Example
iex> Sequence.create(Elixir.Chunky.Sequence.OEIS.Repr, :a023722) |> Sequence.take!(53)
[0,5,10,15,20,26,27,28,29,30,35,40,45,51,52,53,54,55,60,65,70,76,77,78,79,80,85,90,95,101,102,103,104,105,110,115,120,131,132,133,134,136,137,138,139,141,142,143,144,146,147,148,149]
"""
@doc offset: 1,
sequence: "Numbers with a single 0 in their base 5 expansion.",
references: [{:oeis, :a023722, "https://oeis.org/A023722"}]
def create_sequence_a023722(_opts) do
sequence_for_function(&Elixir.Chunky.Sequence.OEIS.Repr.seq_a023722/2)
end
@doc false
@doc offset: 1, fill_value: -1
def seq_a023722(_idx, last) do
Math.next_number(
fn v ->
Math.digit_count(v, [0], base: 5) == 1
end,
last
)
end
@doc """
OEIS Sequence `A023723` - Numbers with exactly 2 0's in base 5 expansion.
From [OEIS A023723](https://oeis.org/A023723):
> Numbers with exactly 2 0's in base 5 expansion.
> (Formerly )
**Sequence IDs**: `:a023723`
**Finite**: False
**Offset**: 1
## Example
iex> Sequence.create(Elixir.Chunky.Sequence.OEIS.Repr, :a023723) |> Sequence.take!(46)
[25,50,75,100,126,127,128,129,130,135,140,145,150,175,200,225,251,252,253,254,255,260,265,270,275,300,325,350,376,377,378,379,380,385,390,395,400,425,450,475,501,502,503,504,505,510]
"""
@doc offset: 1,
sequence: "Numbers with exactly 2 0's in base 5 expansion.",
references: [{:oeis, :a023723, "https://oeis.org/A023723"}]
def create_sequence_a023723(_opts) do
sequence_for_function(&Elixir.Chunky.Sequence.OEIS.Repr.seq_a023723/2)
end
@doc false
@doc offset: 1
def seq_a023723(_idx, last) do
Math.next_number(
fn v ->
Math.digit_count(v, [0], base: 5) == 2
end,
last
)
end
@doc """
OEIS Sequence `A023724` - Numbers with exactly 3 0's in base 5 expansion.
From [OEIS A023724](https://oeis.org/A023724):
> Numbers with exactly 3 0's in base 5 expansion.
> (Formerly )
**Sequence IDs**: `:a023724`
**Finite**: False
**Offset**: 1
## Example
iex> Sequence.create(Elixir.Chunky.Sequence.OEIS.Repr, :a023724) |> Sequence.take!(39)
[125,250,375,500,626,627,628,629,630,635,640,645,650,675,700,725,750,875,1000,1125,1251,1252,1253,1254,1255,1260,1265,1270,1275,1300,1325,1350,1375,1500,1625,1750,1876,1877,1878]
"""
@doc offset: 1,
sequence: "Numbers with exactly 3 0's in base 5 expansion.",
references: [{:oeis, :a023724, "https://oeis.org/A023724"}]
def create_sequence_a023724(_opts) do
sequence_for_function(&Elixir.Chunky.Sequence.OEIS.Repr.seq_a023724/2)
end
@doc false
@doc offset: 1
def seq_a023724(_idx, last) do
Math.next_number(
fn v ->
Math.digit_count(v, [0], base: 5) == 3
end,
last
)
end
@doc """
OEIS Sequence `A023725` - Numbers with no 1's in their base-5 expansion.
From [OEIS A023725](https://oeis.org/A023725):
> Numbers with no 1's in their base-5 expansion.
> (Formerly )
**Sequence IDs**: `:a023725`
**Finite**: False
**Offset**: 1
## Example
iex> Sequence.create(Elixir.Chunky.Sequence.OEIS.Repr, :a023725) |> Sequence.take!(61)
[0,2,3,4,10,12,13,14,15,17,18,19,20,22,23,24,50,52,53,54,60,62,63,64,65,67,68,69,70,72,73,74,75,77,78,79,85,87,88,89,90,92,93,94,95,97,98,99,100,102,103,104,110,112,113,114,115,117,118,119,120]
"""
@doc offset: 1,
sequence: "Numbers with no 1's in their base-5 expansion.",
references: [{:oeis, :a023725, "https://oeis.org/A023725"}]
def create_sequence_a023725(_opts) do
sequence_for_function(&Elixir.Chunky.Sequence.OEIS.Repr.seq_a023725/2)
end
@doc false
@doc offset: 1, fill_value: -1
def seq_a023725(_idx, last) do
Math.next_number(
fn v ->
Math.digit_count(v, [1], base: 5) == 0
end,
last
)
end
@doc """
OEIS Sequence `A023726` - Numbers with a single 1 in their base 5 expansion.
From [OEIS A023726](https://oeis.org/A023726):
> Numbers with a single 1 in their base 5 expansion.
> (Formerly )
**Sequence IDs**: `:a023726`
**Finite**: False
**Offset**: 1
## Example
iex> Sequence.create(Elixir.Chunky.Sequence.OEIS.Repr, :a023726) |> Sequence.take!(56)
[1,5,7,8,9,11,16,21,25,27,28,29,35,37,38,39,40,42,43,44,45,47,48,49,51,55,57,58,59,61,66,71,76,80,82,83,84,86,91,96,101,105,107,108,109,111,116,121,125,127,128,129,135,137,138,139]
"""
@doc offset: 1,
sequence: "Numbers with a single 1 in their base 5 expansion.",
references: [{:oeis, :a023726, "https://oeis.org/A023726"}]
def create_sequence_a023726(_opts) do
sequence_for_function(&Elixir.Chunky.Sequence.OEIS.Repr.seq_a023726/2)
end
@doc false
@doc offset: 1
def seq_a023726(_idx, last) do
Math.next_number(
fn v ->
Math.digit_count(v, [1], base: 5) == 1
end,
last
)
end
@doc """
OEIS Sequence `A023727` - Numbers with exactly 2 1's in their base 5 expansion.
From [OEIS A023727](https://oeis.org/A023727):
> Numbers with exactly 2 1's in their base 5 expansion.
> (Formerly )
**Sequence IDs**: `:a023727`
**Finite**: False
**Offset**: 1
## Example
iex> Sequence.create(Elixir.Chunky.Sequence.OEIS.Repr, :a023727) |> Sequence.take!(48)
[6,26,30,32,33,34,36,41,46,56,81,106,126,130,132,133,134,136,141,146,150,152,153,154,160,162,163,164,165,167,168,169,170,172,173,174,176,180,182,183,184,186,191,196,201,205,207,208]
"""
@doc offset: 1,
sequence: "Numbers with exactly 2 1's in their base 5 expansion.",
references: [{:oeis, :a023727, "https://oeis.org/A023727"}]
def create_sequence_a023727(_opts) do
sequence_for_function(&Elixir.Chunky.Sequence.OEIS.Repr.seq_a023727/2)
end
@doc false
@doc offset: 1
def seq_a023727(_idx, last) do
Math.next_number(
fn v ->
Math.digit_count(v, [1], base: 5) == 2
end,
last
)
end
@doc """
OEIS Sequence `A023728` - Numbers with exactly 3 1's in base 5 expansion.
From [OEIS A023728](https://oeis.org/A023728):
> Numbers with exactly 3 1's in base 5 expansion.
> (Formerly )
**Sequence IDs**: `:a023728`
**Finite**: False
**Offset**: 1
## Example
iex> Sequence.create(Elixir.Chunky.Sequence.OEIS.Repr, :a023728) |> Sequence.take!(45)
[31,131,151,155,157,158,159,161,166,171,181,206,231,281,406,531,631,651,655,657,658,659,661,666,671,681,706,731,751,755,757,758,759,761,766,771,775,777,778,779,785,787,788,789,790]
"""
@doc offset: 1,
sequence: "Numbers with exactly 3 1's in base 5 expansion.",
references: [{:oeis, :a023728, "https://oeis.org/A023728"}]
def create_sequence_a023728(_opts) do
sequence_for_function(&Elixir.Chunky.Sequence.OEIS.Repr.seq_a023728/2)
end
@doc false
@doc offset: 1
def seq_a023728(_idx, last) do
Math.next_number(
fn v ->
Math.digit_count(v, [1], base: 5) == 3
end,
last
)
end
@doc """
OEIS Sequence `A023729` - Numbers with no 2's in their base-5 expansion.
From [OEIS A023729](https://oeis.org/A023729):
> Numbers with no 2's in their base-5 expansion.
> (Formerly )
**Sequence IDs**: `:a023729`
**Finite**: False
**Offset**: 1
## Example
iex> Sequence.create(Elixir.Chunky.Sequence.OEIS.Repr, :a023729) |> Sequence.take!(62)
[0,1,3,4,5,6,8,9,15,16,18,19,20,21,23,24,25,26,28,29,30,31,33,34,40,41,43,44,45,46,48,49,75,76,78,79,80,81,83,84,90,91,93,94,95,96,98,99,100,101,103,104,105,106,108,109,115,116,118,119,120,121]
"""
@doc offset: 1,
sequence: "Numbers with no 2's in their base-5 expansion.",
references: [{:oeis, :a023729, "https://oeis.org/A023729"}]
def create_sequence_a023729(_opts) do
sequence_for_function(&Elixir.Chunky.Sequence.OEIS.Repr.seq_a023729/2)
end
@doc false
@doc offset: 1, fill_value: -1
def seq_a023729(_idx, last) do
Math.next_number(
fn v ->
Math.digit_count(v, [2], base: 5) == 0
end,
last
)
end
@doc """
OEIS Sequence `A023730` - Numbers with a single 2 in their base 5 expansion.
From [OEIS A023730](https://oeis.org/A023730):
> Numbers with a single 2 in their base 5 expansion.
> (Formerly )
**Sequence IDs**: `:a023730`
**Finite**: False
**Offset**: 1
## Example
iex> Sequence.create(Elixir.Chunky.Sequence.OEIS.Repr, :a023730) |> Sequence.take!(56)
[2,7,10,11,13,14,17,22,27,32,35,36,38,39,42,47,50,51,53,54,55,56,58,59,65,66,68,69,70,71,73,74,77,82,85,86,88,89,92,97,102,107,110,111,113,114,117,122,127,132,135,136,138,139,142,147]
"""
@doc offset: 1,
sequence: "Numbers with a single 2 in their base 5 expansion.",
references: [{:oeis, :a023730, "https://oeis.org/A023730"}]
def create_sequence_a023730(_opts) do
sequence_for_function(&Elixir.Chunky.Sequence.OEIS.Repr.seq_a023730/2)
end
@doc false
@doc offset: 1
def seq_a023730(_idx, last) do
Math.next_number(
fn v ->
Math.digit_count(v, [2], base: 5) == 1
end,
last
)
end
@doc """
OEIS Sequence `A023731` - Numbers with exactly two 2's in base 5 expansion.
From [OEIS A023731](https://oeis.org/A023731):
> Numbers with exactly two 2's in base 5 expansion.
> (Formerly )
**Sequence IDs**: `:a023731`
**Finite**: False
**Offset**: 1
## Example
iex> Sequence.create(Elixir.Chunky.Sequence.OEIS.Repr, :a023731) |> Sequence.take!(48)
[12,37,52,57,60,61,63,64,67,72,87,112,137,162,177,182,185,186,188,189,192,197,212,237,252,257,260,261,263,264,267,272,277,282,285,286,288,289,292,297,300,301,303,304,305,306,308,309]
"""
@doc offset: 1,
sequence: "Numbers with exactly two 2's in base 5 expansion.",
references: [{:oeis, :a023731, "https://oeis.org/A023731"}]
def create_sequence_a023731(_opts) do
sequence_for_function(&Elixir.Chunky.Sequence.OEIS.Repr.seq_a023731/2)
end
@doc false
@doc offset: 1
def seq_a023731(_idx, last) do
Math.next_number(
fn v ->
Math.digit_count(v, [2], base: 5) == 2
end,
last
)
end
@doc """
OEIS Sequence `A023732` - Numbers with exactly 3 2's in base 5 expansion.
From [OEIS A023732](https://oeis.org/A023732):
> Numbers with exactly 3 2's in base 5 expansion.
> (Formerly )
**Sequence IDs**: `:a023732`
**Finite**: False
**Offset**: 1
## Example
iex> Sequence.create(Elixir.Chunky.Sequence.OEIS.Repr, :a023732) |> Sequence.take!(42)
[62,187,262,287,302,307,310,311,313,314,317,322,337,362,437,562,687,812,887,912,927,932,935,936,938,939,942,947,962,987,1062,1187,1262,1287,1302,1307,1310,1311,1313,1314,1317,1322]
"""
@doc offset: 1,
sequence: "Numbers with exactly 3 2's in base 5 expansion.",
references: [{:oeis, :a023732, "https://oeis.org/A023732"}]
def create_sequence_a023732(_opts) do
sequence_for_function(&Elixir.Chunky.Sequence.OEIS.Repr.seq_a023732/2)
end
@doc false
@doc offset: 1
def seq_a023732(_idx, last) do
Math.next_number(
fn v ->
Math.digit_count(v, [2], base: 5) == 3
end,
last
)
end
@doc """
OEIS Sequence `A023733` - Numbers with no 3's in base-5 expansion.
From [OEIS A023733](https://oeis.org/A023733):
> Numbers with no 3's in base-5 expansion.
> (Formerly )
**Sequence IDs**: `:a023733`
**Finite**: False
**Offset**: 1
## Example
iex> Sequence.create(Elixir.Chunky.Sequence.OEIS.Repr, :a023733) |> Sequence.take!(60)
[0,1,2,4,5,6,7,9,10,11,12,14,20,21,22,24,25,26,27,29,30,31,32,34,35,36,37,39,45,46,47,49,50,51,52,54,55,56,57,59,60,61,62,64,70,71,72,74,100,101,102,104,105,106,107,109,110,111,112,114]
"""
@doc offset: 1,
sequence: "Numbers with no 3's in base-5 expansion.",
references: [{:oeis, :a023733, "https://oeis.org/A023733"}]
def create_sequence_a023733(_opts) do
sequence_for_function(&Elixir.Chunky.Sequence.OEIS.Repr.seq_a023733/2)
end
@doc false
@doc offset: 1, fill_value: -1
def seq_a023733(_idx, last) do
Math.next_number(
fn v ->
Math.digit_count(v, [3], base: 5) == 0
end,
last
)
end
@doc """
OEIS Sequence `A023734` - Numbers with a single 3 in their base-5 expansion.
From [OEIS A023734](https://oeis.org/A023734):
> Numbers with a single 3 in their base-5 expansion.
> (Formerly )
**Sequence IDs**: `:a023734`
**Finite**: False
**Offset**: 1
## Example
iex> Sequence.create(Elixir.Chunky.Sequence.OEIS.Repr, :a023734) |> Sequence.take!(58)
[3,8,13,15,16,17,19,23,28,33,38,40,41,42,44,48,53,58,63,65,66,67,69,73,75,76,77,79,80,81,82,84,85,86,87,89,95,96,97,99,103,108,113,115,116,117,119,123,128,133,138,140,141,142,144,148,153,158]
"""
@doc offset: 1,
sequence: "Numbers with a single 3 in their base-5 expansion.",
references: [{:oeis, :a023734, "https://oeis.org/A023734"}]
def create_sequence_a023734(_opts) do
sequence_for_function(&Elixir.Chunky.Sequence.OEIS.Repr.seq_a023734/2)
end
@doc false
@doc offset: 1
def seq_a023734(_idx, last) do
Math.next_number(
fn v ->
Math.digit_count(v, [3], base: 5) == 1
end,
last
)
end
@doc """
OEIS Sequence `A023735` - Numbers with exactly 2 3's in their base-5 expansion.
From [OEIS A023735](https://oeis.org/A023735):
> Numbers with exactly 2 3's in their base-5 expansion.
> (Formerly )
**Sequence IDs**: `:a023735`
**Finite**: False
**Offset**: 1
## Example
iex> Sequence.create(Elixir.Chunky.Sequence.OEIS.Repr, :a023735) |> Sequence.take!(52)
[18,43,68,78,83,88,90,91,92,94,98,118,143,168,193,203,208,213,215,216,217,219,223,243,268,293,318,328,333,338,340,341,342,344,348,368,378,383,388,390,391,392,394,398,403,408,413,415,416,417,419,423]
"""
@doc offset: 1,
sequence: "Numbers with exactly 2 3's in their base-5 expansion.",
references: [{:oeis, :a023735, "https://oeis.org/A023735"}]
def create_sequence_a023735(_opts) do
sequence_for_function(&Elixir.Chunky.Sequence.OEIS.Repr.seq_a023735/2)
end
@doc false
@doc offset: 1
def seq_a023735(_idx, last) do
Math.next_number(
fn v ->
Math.digit_count(v, [3], base: 5) == 2
end,
last
)
end
@doc """
OEIS Sequence `A023736` - Numbers with exactly 3 3's in their base-5 expansion.
From [OEIS A023736](https://oeis.org/A023736):
> Numbers with exactly 3 3's in their base-5 expansion.
> (Formerly )
**Sequence IDs**: `:a023736`
**Finite**: False
**Offset**: 1
## Example
iex> Sequence.create(Elixir.Chunky.Sequence.OEIS.Repr, :a023736) |> Sequence.take!(40)
[93,218,343,393,418,443,453,458,463,465,466,467,469,473,493,593,718,843,968,1018,1043,1068,1078,1083,1088,1090,1091,1092,1094,1098,1118,1218,1343,1468,1593,1643,1668,1693,1703,1708]
"""
@doc offset: 1,
sequence: "Numbers with exactly 3 3's in their base-5 expansion.",
references: [{:oeis, :a023736, "https://oeis.org/A023736"}]
def create_sequence_a023736(_opts) do
sequence_for_function(&Elixir.Chunky.Sequence.OEIS.Repr.seq_a023736/2)
end
@doc false
@doc offset: 1
def seq_a023736(_idx, last) do
Math.next_number(
fn v ->
Math.digit_count(v, [3], base: 5) == 3
end,
last
)
end
@doc """
OEIS Sequence `A023738` - Numbers with a single 4 in their base 5 expansion.
From [OEIS A023738](https://oeis.org/A023738):
> Numbers with a single 4 in their base 5 expansion.
> (Formerly )
**Sequence IDs**: `:a023738`
**Finite**: False
**Offset**: 1
## Example
iex> Sequence.create(Elixir.Chunky.Sequence.OEIS.Repr, :a023738) |> Sequence.take!(54)
[4,9,14,19,20,21,22,23,29,34,39,44,45,46,47,48,54,59,64,69,70,71,72,73,79,84,89,94,95,96,97,98,100,101,102,103,105,106,107,108,110,111,112,113,115,116,117,118,129,134,139,144,145,146]
"""
@doc offset: 1,
sequence: "Numbers with a single 4 in their base 5 expansion.",
references: [{:oeis, :a023738, "https://oeis.org/A023738"}]
def create_sequence_a023738(_opts) do
sequence_for_function(&Elixir.Chunky.Sequence.OEIS.Repr.seq_a023738/2)
end
@doc false
@doc offset: 1
def seq_a023738(_idx, last) do
Math.next_number(
fn v ->
Math.digit_count(v, [4], base: 5) == 1
end,
last
)
end
@doc """
OEIS Sequence `A023739` - Numbers with exactly 2 4's in base 5 expansion.
From [OEIS A023739](https://oeis.org/A023739):
> Numbers with exactly 2 4's in base 5 expansion.
> (Formerly )
**Sequence IDs**: `:a023739`
**Finite**: False
**Offset**: 1
## Example
iex> Sequence.create(Elixir.Chunky.Sequence.OEIS.Repr, :a023739) |> Sequence.take!(46)
[24,49,74,99,104,109,114,119,120,121,122,123,149,174,199,224,229,234,239,244,245,246,247,248,274,299,324,349,354,359,364,369,370,371,372,373,399,424,449,474,479,484,489,494,495,496]
"""
@doc offset: 1,
sequence: "Numbers with exactly 2 4's in base 5 expansion.",
references: [{:oeis, :a023739, "https://oeis.org/A023739"}]
def create_sequence_a023739(_opts) do
sequence_for_function(&Elixir.Chunky.Sequence.OEIS.Repr.seq_a023739/2)
end
@doc false
@doc offset: 1
def seq_a023739(_idx, last) do
Math.next_number(
fn v ->
Math.digit_count(v, [4], base: 5) == 2
end,
last
)
end
@doc """
OEIS Sequence `A023740` - Numbers with exactly 3 4's in base 5 expansion.
From [OEIS A023740](https://oeis.org/A023740):
> Numbers with exactly 3 4's in base 5 expansion.
> (Formerly )
**Sequence IDs**: `:a023740`
**Finite**: False
**Offset**: 1
## Example
iex> Sequence.create(Elixir.Chunky.Sequence.OEIS.Repr, :a023740) |> Sequence.take!(40)
[124,249,374,499,524,549,574,599,604,609,614,619,620,621,622,623,749,874,999,1124,1149,1174,1199,1224,1229,1234,1239,1244,1245,1246,1247,1248,1374,1499,1624,1749,1774,1799,1824,1849]
"""
@doc offset: 1,
sequence: "Numbers with exactly 3 4's in base 5 expansion.",
references: [{:oeis, :a023740, "https://oeis.org/A023740"}]
def create_sequence_a023740(_opts) do
sequence_for_function(&Elixir.Chunky.Sequence.OEIS.Repr.seq_a023740/2)
end
@doc false
@doc offset: 1
def seq_a023740(_idx, last) do
Math.next_number(
fn v ->
Math.digit_count(v, [4], base: 5) == 3
end,
last
)
end
@doc """
OEIS Sequence `A023745` - Plaindromes: numbers whose digits in base 3 are in nondecreasing order.
From [OEIS A023745](https://oeis.org/A023745):
> Plaindromes: numbers whose digits in base 3 are in nondecreasing order.
> (Formerly )
**Sequence IDs**: `:a023745`
**Finite**: False
**Offset**: 1
## Example
iex> Sequence.create(Elixir.Chunky.Sequence.OEIS.Repr, :a023745) |> Sequence.take!(49)
[0,1,2,4,5,8,13,14,17,26,40,41,44,53,80,121,122,125,134,161,242,364,365,368,377,404,485,728,1093,1094,1097,1106,1133,1214,1457,2186,3280,3281,3284,3293,3320,3401,3644,4373,6560,9841,9842,9845,9854]
"""
@doc offset: 1,
sequence: "Plaindromes: numbers whose digits in base 3 are in nondecreasing order.",
references: [{:oeis, :a023745, "https://oeis.org/A023745"}]
def create_sequence_a023745(_opts) do
sequence_for_function(&Elixir.Chunky.Sequence.OEIS.Repr.seq_a023745/2)
end
@doc false
@doc offset: 1, fill_value: -1
def seq_a023745(_idx, last) do
Math.next_number(fn v -> Math.is_plaindrome_in_base?(v, 3) end, last)
end
@doc """
OEIS Sequence `A023746` - Plaindromes: numbers whose digits in base 4 are in nondecreasing order.
From [OEIS A023746](https://oeis.org/A023746):
> Plaindromes: numbers whose digits in base 4 are in nondecreasing order.
> (Formerly )
**Sequence IDs**: `:a023746`
**Finite**: False
**Offset**: 1
## Example
iex> Sequence.create(Elixir.Chunky.Sequence.OEIS.Repr, :a023746) |> Sequence.take!(58)
[0,1,2,3,5,6,7,10,11,15,21,22,23,26,27,31,42,43,47,63,85,86,87,90,91,95,106,107,111,127,170,171,175,191,255,341,342,343,346,347,351,362,363,367,383,426,427,431,447,511,682,683,687,703,767,1023,1365,1366]
"""
@doc offset: 1,
sequence: "Plaindromes: numbers whose digits in base 4 are in nondecreasing order.",
references: [{:oeis, :a023746, "https://oeis.org/A023746"}]
def create_sequence_a023746(_opts) do
sequence_for_function(&Elixir.Chunky.Sequence.OEIS.Repr.seq_a023746/2)
end
@doc false
@doc offset: 1, fill_value: -1
def seq_a023746(_idx, last) do
Math.next_number(fn v -> Math.is_plaindrome_in_base?(v, 4) end, last)
end
@doc """
OEIS Sequence `A023747` - Plaindromes: numbers whose digits in base 5 are in nondecreasing order.
From [OEIS A023747](https://oeis.org/A023747):
> Plaindromes: numbers whose digits in base 5 are in nondecreasing order.
> (Formerly )
**Sequence IDs**: `:a023747`
**Finite**: False
**Offset**: 1
## Example
iex> Sequence.create(Elixir.Chunky.Sequence.OEIS.Repr, :a023747) |> Sequence.take!(60)
[0,1,2,3,4,6,7,8,9,12,13,14,18,19,24,31,32,33,34,37,38,39,43,44,49,62,63,64,68,69,74,93,94,99,124,156,157,158,159,162,163,164,168,169,174,187,188,189,193,194,199,218,219,224,249,312,313,314,318,319]
"""
@doc offset: 1,
sequence: "Plaindromes: numbers whose digits in base 5 are in nondecreasing order.",
references: [{:oeis, :a023747, "https://oeis.org/A023747"}]
def create_sequence_a023747(_opts) do
sequence_for_function(&Elixir.Chunky.Sequence.OEIS.Repr.seq_a023747/2)
end
@doc false
@doc offset: 1, fill_value: -1
def seq_a023747(_idx, last) do
Math.next_number(fn v -> Math.is_plaindrome_in_base?(v, 5) end, last)
end
@doc """
OEIS Sequence `A023748` - Plaindromes: numbers whose digits in base 6 are in nondecreasing order.
From [OEIS A023748](https://oeis.org/A023748):
> Plaindromes: numbers whose digits in base 6 are in nondecreasing order.
> (Formerly )
**Sequence IDs**: `:a023748`
**Finite**: False
**Offset**: 1
## Example
iex> Sequence.create(Elixir.Chunky.Sequence.OEIS.Repr, :a023748) |> Sequence.take!(63)
[0,1,2,3,4,5,7,8,9,10,11,14,15,16,17,21,22,23,28,29,35,43,44,45,46,47,50,51,52,53,57,58,59,64,65,71,86,87,88,89,93,94,95,100,101,107,129,130,131,136,137,143,172,173,179,215,259,260,261,262,263,266,267]
"""
@doc offset: 1,
sequence: "Plaindromes: numbers whose digits in base 6 are in nondecreasing order.",
references: [{:oeis, :a023748, "https://oeis.org/A023748"}]
def create_sequence_a023748(_opts) do
sequence_for_function(&Elixir.Chunky.Sequence.OEIS.Repr.seq_a023748/2)
end
@doc false
@doc offset: 1, fill_value: -1
def seq_a023748(_idx, last) do
Math.next_number(fn v -> Math.is_plaindrome_in_base?(v, 6) end, last)
end
@doc """
OEIS Sequence `A023749` - Plaindromes: numbers whose digits in base 7 are in nondecreasing order.
From [OEIS A023749](https://oeis.org/A023749):
> Plaindromes: numbers whose digits in base 7 are in nondecreasing order.
> (Formerly )
**Sequence IDs**: `:a023749`
**Finite**: False
**Offset**: 1
## Example
iex> Sequence.create(Elixir.Chunky.Sequence.OEIS.Repr, :a023749) |> Sequence.take!(65)
[0,1,2,3,4,5,6,8,9,10,11,12,13,16,17,18,19,20,24,25,26,27,32,33,34,40,41,48,57,58,59,60,61,62,65,66,67,68,69,73,74,75,76,81,82,83,89,90,97,114,115,116,117,118,122,123,124,125,130,131,132,138,139,146,171]
"""
@doc offset: 1,
sequence: "Plaindromes: numbers whose digits in base 7 are in nondecreasing order.",
references: [{:oeis, :a023749, "https://oeis.org/A023749"}]
def create_sequence_a023749(_opts) do
sequence_for_function(&Elixir.Chunky.Sequence.OEIS.Repr.seq_a023749/2)
end
@doc false
@doc offset: 1, fill_value: -1
def seq_a023749(_idx, last) do
Math.next_number(fn v -> Math.is_plaindrome_in_base?(v, 7) end, last)
end
@doc """
OEIS Sequence `A023750` - Plaindromes: numbers whose digits in base 8 are in nondecreasing order.
From [OEIS A023750](https://oeis.org/A023750):
> Plaindromes: numbers whose digits in base 8 are in nondecreasing order.
> (Formerly )
**Sequence IDs**: `:a023750`
**Finite**: False
**Offset**: 1
## Example
iex> Sequence.create(Elixir.Chunky.Sequence.OEIS.Repr, :a023750) |> Sequence.take!(66)
[0,1,2,3,4,5,6,7,9,10,11,12,13,14,15,18,19,20,21,22,23,27,28,29,30,31,36,37,38,39,45,46,47,54,55,63,73,74,75,76,77,78,79,82,83,84,85,86,87,91,92,93,94,95,100,101,102,103,109,110,111,118,119,127,146,147]
"""
@doc offset: 1,
sequence: "Plaindromes: numbers whose digits in base 8 are in nondecreasing order.",
references: [{:oeis, :a023750, "https://oeis.org/A023750"}]
def create_sequence_a023750(_opts) do
sequence_for_function(&Elixir.Chunky.Sequence.OEIS.Repr.seq_a023750/2)
end
@doc false
@doc offset: 1, fill_value: -1
def seq_a023750(_idx, last) do
Math.next_number(fn v -> Math.is_plaindrome_in_base?(v, 8) end, last)
end
@doc """
OEIS Sequence `A023751` - Plaindromes: numbers whose digits in base 9 are in nondecreasing order.
From [OEIS A023751](https://oeis.org/A023751):
> Plaindromes: numbers whose digits in base 9 are in nondecreasing order.
> (Formerly )
**Sequence IDs**: `:a023751`
**Finite**: False
**Offset**: 1
## Example
iex> Sequence.create(Elixir.Chunky.Sequence.OEIS.Repr, :a023751) |> Sequence.take!(66)
[0,1,2,3,4,5,6,7,8,10,11,12,13,14,15,16,17,20,21,22,23,24,25,26,30,31,32,33,34,35,40,41,42,43,44,50,51,52,53,60,61,62,70,71,80,91,92,93,94,95,96,97,98,101,102,103,104,105,106,107,111,112,113,114,115,116]
"""
@doc offset: 1,
sequence: "Plaindromes: numbers whose digits in base 9 are in nondecreasing order.",
references: [{:oeis, :a023751, "https://oeis.org/A023751"}]
def create_sequence_a023751(_opts) do
sequence_for_function(&Elixir.Chunky.Sequence.OEIS.Repr.seq_a023751/2)
end
@doc false
@doc offset: 1, fill_value: -1
def seq_a023751(_idx, last) do
Math.next_number(fn v -> Math.is_plaindrome_in_base?(v, 9) end, last)
end
@doc """
OEIS Sequence `A023752` - Plaindromes: numbers whose digits in base 11 are in nondecreasing order.
From [OEIS A023752](https://oeis.org/A023752):
> Plaindromes: numbers whose digits in base 11 are in nondecreasing order.
> (Formerly )
**Sequence IDs**: `:a023752`
**Finite**: False
**Offset**: 1
## Example
iex> Sequence.create(Elixir.Chunky.Sequence.OEIS.Repr, :a023752) |> Sequence.take!(66)
[0,1,2,3,4,5,6,7,8,9,10,12,13,14,15,16,17,18,19,20,21,24,25,26,27,28,29,30,31,32,36,37,38,39,40,41,42,43,48,49,50,51,52,53,54,60,61,62,63,64,65,72,73,74,75,76,84,85,86,87,96,97,98,108,109,120]
"""
@doc offset: 1,
sequence: "Plaindromes: numbers whose digits in base 11 are in nondecreasing order.",
references: [{:oeis, :a023752, "https://oeis.org/A023752"}]
def create_sequence_a023752(_opts) do
sequence_for_function(&Elixir.Chunky.Sequence.OEIS.Repr.seq_a023752/2)
end
@doc false
@doc offset: 1, fill_value: -1
def seq_a023752(_idx, last) do
Math.next_number(fn v -> Math.is_plaindrome_in_base?(v, 11) end, last)
end
@doc """
OEIS Sequence `A023753` - Plaindromes: numbers whose digits in base 12 are in nondecreasing order.
From [OEIS A023753](https://oeis.org/A023753):
> Plaindromes: numbers whose digits in base 12 are in nondecreasing order.
> (Formerly )
**Sequence IDs**: `:a023753`
**Finite**: False
**Offset**: 1
## Example
iex> Sequence.create(Elixir.Chunky.Sequence.OEIS.Repr, :a023753) |> Sequence.take!(69)
[0,1,2,3,4,5,6,7,8,9,10,11,13,14,15,16,17,18,19,20,21,22,23,26,27,28,29,30,31,32,33,34,35,39,40,41,42,43,44,45,46,47,52,53,54,55,56,57,58,59,65,66,67,68,69,70,71,78,79,80,81,82,83,91,92,93,94,95,104]
"""
@doc offset: 1,
sequence: "Plaindromes: numbers whose digits in base 12 are in nondecreasing order.",
references: [{:oeis, :a023753, "https://oeis.org/A023753"}]
def create_sequence_a023753(_opts) do
sequence_for_function(&Elixir.Chunky.Sequence.OEIS.Repr.seq_a023753/2)
end
@doc false
@doc offset: 1, fill_value: -1
def seq_a023753(_idx, last) do
Math.next_number(fn v -> Math.is_plaindrome_in_base?(v, 12) end, last)
end
@doc """
OEIS Sequence `A023754` - Plaindromes: numbers whose digits in base 13 are in nondecreasing order.
From [OEIS A023754](https://oeis.org/A023754):
> Plaindromes: numbers whose digits in base 13 are in nondecreasing order.
> (Formerly )
**Sequence IDs**: `:a023754`
**Finite**: False
**Offset**: 1
## Example
iex> Sequence.create(Elixir.Chunky.Sequence.OEIS.Repr, :a023754) |> Sequence.take!(70)
[0,1,2,3,4,5,6,7,8,9,10,11,12,14,15,16,17,18,19,20,21,22,23,24,25,28,29,30,31,32,33,34,35,36,37,38,42,43,44,45,46,47,48,49,50,51,56,57,58,59,60,61,62,63,64,70,71,72,73,74,75,76,77,84,85,86,87,88,89,90]
"""
@doc offset: 1,
sequence: "Plaindromes: numbers whose digits in base 13 are in nondecreasing order.",
references: [{:oeis, :a023754, "https://oeis.org/A023754"}]
def create_sequence_a023754(_opts) do
sequence_for_function(&Elixir.Chunky.Sequence.OEIS.Repr.seq_a023754/2)
end
@doc false
@doc offset: 1, fill_value: -1
def seq_a023754(_idx, last) do
Math.next_number(fn v -> Math.is_plaindrome_in_base?(v, 13) end, last)
end
@doc """
OEIS Sequence `A023755` - Plaindromes: numbers whose digits in base 14 are in nondecreasing order.
From [OEIS A023755](https://oeis.org/A023755):
> Plaindromes: numbers whose digits in base 14 are in nondecreasing order.
> (Formerly )
**Sequence IDs**: `:a023755`
**Finite**: False
**Offset**: 1
## Example
iex> Sequence.create(Elixir.Chunky.Sequence.OEIS.Repr, :a023755) |> Sequence.take!(70)
[0,1,2,3,4,5,6,7,8,9,10,11,12,13,15,16,17,18,19,20,21,22,23,24,25,26,27,30,31,32,33,34,35,36,37,38,39,40,41,45,46,47,48,49,50,51,52,53,54,55,60,61,62,63,64,65,66,67,68,69,75,76,77,78,79,80,81,82,83,90]
"""
@doc offset: 1,
sequence: "Plaindromes: numbers whose digits in base 14 are in nondecreasing order.",
references: [{:oeis, :a023755, "https://oeis.org/A023755"}]
def create_sequence_a023755(_opts) do
sequence_for_function(&Elixir.Chunky.Sequence.OEIS.Repr.seq_a023755/2)
end
@doc false
@doc offset: 1, fill_value: -1
def seq_a023755(_idx, last) do
Math.next_number(fn v -> Math.is_plaindrome_in_base?(v, 14) end, last)
end
@doc """
OEIS Sequence `A023756` - Plaindromes: numbers whose digits in base 15 are in nondecreasing order.
From [OEIS A023756](https://oeis.org/A023756):
> Plaindromes: numbers whose digits in base 15 are in nondecreasing order.
> (Formerly )
**Sequence IDs**: `:a023756`
**Finite**: False
**Offset**: 1
## Example
iex> Sequence.create(Elixir.Chunky.Sequence.OEIS.Repr, :a023756) |> Sequence.take!(70)
[0,1,2,3,4,5,6,7,8,9,10,11,12,13,14,16,17,18,19,20,21,22,23,24,25,26,27,28,29,32,33,34,35,36,37,38,39,40,41,42,43,44,48,49,50,51,52,53,54,55,56,57,58,59,64,65,66,67,68,69,70,71,72,73,74,80,81,82,83,84]
"""
@doc offset: 1,
sequence: "Plaindromes: numbers whose digits in base 15 are in nondecreasing order.",
references: [{:oeis, :a023756, "https://oeis.org/A023756"}]
def create_sequence_a023756(_opts) do
sequence_for_function(&Elixir.Chunky.Sequence.OEIS.Repr.seq_a023756/2)
end
@doc false
@doc offset: 1, fill_value: -1
def seq_a023756(_idx, last) do
Math.next_number(fn v -> Math.is_plaindrome_in_base?(v, 15) end, last)
end
@doc """
OEIS Sequence `A023757` - Plaindromes: numbers whose digits in base 16 are in nondecreasing order.
From [OEIS A023757](https://oeis.org/A023757):
> Plaindromes: numbers whose digits in base 16 are in nondecreasing order.
> (Formerly )
**Sequence IDs**: `:a023757`
**Finite**: False
**Offset**: 1
## Example
iex> Sequence.create(Elixir.Chunky.Sequence.OEIS.Repr, :a023757) |> Sequence.take!(70)
[0,1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,17,18,19,20,21,22,23,24,25,26,27,28,29,30,31,34,35,36,37,38,39,40,41,42,43,44,45,46,47,51,52,53,54,55,56,57,58,59,60,61,62,63,68,69,70,71,72,73,74,75,76,77,78,79]
"""
@doc offset: 1,
sequence: "Plaindromes: numbers whose digits in base 16 are in nondecreasing order.",
references: [{:oeis, :a023757, "https://oeis.org/A023757"}]
def create_sequence_a023757(_opts) do
sequence_for_function(&Elixir.Chunky.Sequence.OEIS.Repr.seq_a023757/2)
end
@doc false
@doc offset: 1, fill_value: -1
def seq_a023757(_idx, last) do
Math.next_number(fn v -> Math.is_plaindrome_in_base?(v, 16) end, last)
end
@doc """
OEIS Sequence `A023697` - Numbers with exactly 6 1's in ternary expansion.
From [OEIS A023697](https://oeis.org/A023697):
> Numbers with exactly 6 1's in ternary expansion.
> (Formerly )
**Sequence IDs**: `:a023697`
**Finite**: False
**Offset**: 1
## Example
iex> Sequence.create(Elixir.Chunky.Sequence.OEIS.Repr, :a023697) |> Sequence.take!(36)
[364,850,1012,1066,1084,1090,1092,1094,1096,1102,1120,1174,1336,1822,2308,2470,2524,2542,2548,2550,2552,2554,2560,2578,2632,2794,2956,3010,3028,3034,3036,3038,3040,3046,3064,3118]
"""
@doc offset: 1,
sequence: "Numbers with exactly 6 1's in ternary expansion.",
references: [{:oeis, :a023697, "https://oeis.org/A023697"}]
def create_sequence_a023697(_opts) do
sequence_for_function(&Elixir.Chunky.Sequence.OEIS.Repr.seq_a023697/2)
end
@doc false
@doc offset: 1
def seq_a023697(_idx, last) do
Math.next_number(
fn v ->
Math.digit_count(v, [1], base: 3) == 6
end,
last
)
end
@doc """
OEIS Sequence `A043321` - Numbers n such that number of 0's in base 3 is 1.
From [OEIS A043321](https://oeis.org/A043321):
> Numbers n such that number of 0's in base 3 is 1.
> (Formerly )
**Sequence IDs**: `:a043321`
**Finite**: False
**Offset**: 1
## Example
iex> Sequence.create(Elixir.Chunky.Sequence.OEIS.Repr, :a043321) |> Sequence.take!(56)
[3,6,10,11,12,15,19,20,21,24,31,32,34,35,37,38,39,42,46,47,48,51,58,59,61,62,64,65,66,69,73,74,75,78,94,95,97,98,103,104,106,107,112,113,115,116,118,119,120,123,127,128,129,132,139,140]
"""
@doc offset: 1,
sequence: "Numbers n such that number of 0's in base 3 is 1.",
references: [{:oeis, :a043321, "https://oeis.org/A043321"}]
def create_sequence_a043321(_opts) do
sequence_for_function(&Elixir.Chunky.Sequence.OEIS.Repr.seq_a043321/2)
end
@doc false
@doc offset: 1
def seq_a043321(_idx, last) do
Math.next_number(
fn v ->
Math.digit_count(v, [0], base: 3) == 1
end,
last
)
end
@doc """
OEIS Sequence `A059015` - Total number of 0's in binary expansions of 0, ..., n.
From [OEIS A059015](https://oeis.org/A059015):
> Total number of 0's in binary expansions of 0, ..., n.
> (Formerly )
**Sequence IDs**: `:a059015`
**Finite**: False
**Offset**: 0
## Example
iex> Sequence.create(Elixir.Chunky.Sequence.OEIS.Repr, :a059015) |> Sequence.take!(66)
[1,1,2,2,4,5,6,6,9,11,13,14,16,17,18,18,22,25,28,30,33,35,37,38,41,43,45,46,48,49,50,50,55,59,63,66,70,73,76,78,82,85,88,90,93,95,97,98,102,105,108,110,113,115,117,118,121,123,125,126,128,129,130,130,136,141]
"""
@doc offset: 0,
sequence: "Total number of 0's in binary expansions of 0, ..., n.",
references: [{:oeis, :a059015, "https://oeis.org/A059015"}]
def create_sequence_a059015(_opts) do
sequence_for_function(&Elixir.Chunky.Sequence.OEIS.Repr.seq_a059015/1)
end
@doc false
@doc offset: 0
def seq_a059015(idx) do
0..idx
|> Enum.map(fn d -> Math.digit_count(d, [0], base: 2) end)
|> Enum.sum()
end
@doc """
OEIS Sequence `A062756` - Number of 1's in ternary (base 3) expansion of n.
From [OEIS A062756](https://oeis.org/A062756):
> Number of 1's in ternary (base 3) expansion of n.
> (Formerly )
**Sequence IDs**: `:a062756`
**Finite**: False
**Offset**: 0
## Example
iex> Sequence.create(Elixir.Chunky.Sequence.OEIS.Repr, :a062756) |> Sequence.take!(105)
[0,1,0,1,2,1,0,1,0,1,2,1,2,3,2,1,2,1,0,1,0,1,2,1,0,1,0,1,2,1,2,3,2,1,2,1,2,3,2,3,4,3,2,3,2,1,2,1,2,3,2,1,2,1,0,1,0,1,2,1,0,1,0,1,2,1,2,3,2,1,2,1,0,1,0,1,2,1,0,1,0,1,2,1,2,3,2,1,2,1,2,3,2,3,4,3,2,3,2,1,2,1,2,3,2]
"""
@doc offset: 0,
sequence: "Number of 1's in ternary (base 3) expansion of n.",
references: [{:oeis, :a062756, "https://oeis.org/A062756"}]
def create_sequence_a062756(_opts) do
sequence_for_function(&Elixir.Chunky.Sequence.OEIS.Repr.seq_a062756/1)
end
@doc false
@doc offset: 0
def seq_a062756(idx) do
Math.digit_count(idx, [1], base: 3)
end
@doc """
OEIS Sequence `A074940` - Numbers having at least one 2 in their ternary representation.
From [OEIS A074940](https://oeis.org/A074940):
> Numbers having at least one 2 in their ternary representation.
> (Formerly )
**Sequence IDs**: `:a074940`
**Finite**: False
**Offset**: 1
## Example
iex> Sequence.create(Elixir.Chunky.Sequence.OEIS.Repr, :a074940) |> Sequence.take!(71)
[2,5,6,7,8,11,14,15,16,17,18,19,20,21,22,23,24,25,26,29,32,33,34,35,38,41,42,43,44,45,46,47,48,49,50,51,52,53,54,55,56,57,58,59,60,61,62,63,64,65,66,67,68,69,70,71,72,73,74,75,76,77,78,79,80,83,86,87,88,89,92]
"""
@doc offset: 1,
sequence: "Numbers having at least one 2 in their ternary representation.",
references: [{:oeis, :a074940, "https://oeis.org/A074940"}]
def create_sequence_a074940(_opts) do
sequence_for_function(&Elixir.Chunky.Sequence.OEIS.Repr.seq_a074940/2)
end
@doc false
@doc offset: 1
def seq_a074940(_idx, last) do
Math.next_number(
fn v ->
Math.digit_count(v, [2], base: 3) >= 1
end,
last
)
end
@doc """
OEIS Sequence `A077267` - Number of zeros in base 3 expansion of n.
From [OEIS A077267](https://oeis.org/A077267):
> Number of zeros in base 3 expansion of n.
> (Formerly )
**Sequence IDs**: `:a077267`
**Finite**: False
**Offset**: 0
## Example
iex> Sequence.create(Elixir.Chunky.Sequence.OEIS.Repr, :a077267) |> Sequence.take!(106)
[1,0,0,1,0,0,1,0,0,2,1,1,1,0,0,1,0,0,2,1,1,1,0,0,1,0,0,3,2,2,2,1,1,2,1,1,2,1,1,1,0,0,1,0,0,2,1,1,1,0,0,1,0,0,3,2,2,2,1,1,2,1,1,2,1,1,1,0,0,1,0,0,2,1,1,1,0,0,1,0,0,4,3,3,3,2,2,3,2,2,3,2,2,2,1,1,2,1,1,3,2,2,2,1,1,2]
"""
@doc offset: 0,
sequence: "Number of zeros in base 3 expansion of n.",
references: [{:oeis, :a077267, "https://oeis.org/A077267"}]
def create_sequence_a077267(_opts) do
sequence_for_function(&Elixir.Chunky.Sequence.OEIS.Repr.seq_a077267/1)
end
@doc false
@doc offset: 0
def seq_a077267(idx) do
Math.digit_count(idx, [0], base: 3)
end
@doc """
OEIS Sequence `A081603` - Number of 2's in ternary representation of n.
From [OEIS A081603](https://oeis.org/A081603):
> Number of 2's in ternary representation of n.
> (Formerly )
**Sequence IDs**: `:a081603`
**Finite**: False
**Offset**: 0
## Example
iex> Sequence.create(Elixir.Chunky.Sequence.OEIS.Repr, :a081603) |> Sequence.take!(105)
[0,0,1,0,0,1,1,1,2,0,0,1,0,0,1,1,1,2,1,1,2,1,1,2,2,2,3,0,0,1,0,0,1,1,1,2,0,0,1,0,0,1,1,1,2,1,1,2,1,1,2,2,2,3,1,1,2,1,1,2,2,2,3,1,1,2,1,1,2,2,2,3,2,2,3,2,2,3,3,3,4,0,0,1,0,0,1,1,1,2,0,0,1,0,0,1,1,1,2,1,1,2,1,1,2]
"""
@doc offset: 0,
sequence: "Number of 2's in ternary representation of n.",
references: [{:oeis, :a081603, "https://oeis.org/A081603"}]
def create_sequence_a081603(_opts) do
sequence_for_function(&Elixir.Chunky.Sequence.OEIS.Repr.seq_a081603/1)
end
@doc false
@doc offset: 0
def seq_a081603(idx) do
Math.digit_count(idx, [2], base: 3)
end
@doc """
OEIS Sequence `A081605` - Numbers having at least one 0 in their ternary representation.
From [OEIS A081605](https://oeis.org/A081605):
> Numbers having at least one 0 in their ternary representation.
> (Formerly )
**Sequence IDs**: `:a081605`
**Finite**: False
**Offset**: 1
## Example
iex> Sequence.create(Elixir.Chunky.Sequence.OEIS.Repr, :a081605) |> Sequence.take!(71)
[0,3,6,9,10,11,12,15,18,19,20,21,24,27,28,29,30,31,32,33,34,35,36,37,38,39,42,45,46,47,48,51,54,55,56,57,58,59,60,61,62,63,64,65,66,69,72,73,74,75,78,81,82,83,84,85,86,87,88,89,90,91,92,93,94,95,96,97,98,99,100]
"""
@doc offset: 1,
sequence: "Numbers having at least one 0 in their ternary representation.",
references: [{:oeis, :a081605, "https://oeis.org/A081605"}]
def create_sequence_a081605(_opts) do
sequence_for_function(&Elixir.Chunky.Sequence.OEIS.Repr.seq_a081605/2)
end
@doc false
@doc offset: 1, fill_value: -1
def seq_a081605(_idx, last) do
Math.next_number(
fn v ->
Math.digit_count(v, [0], base: 3) >= 1
end,
last
)
end
@doc """
OEIS Sequence `A081606` - Numbers having at least one 1 in their ternary representation.
From [OEIS A081606](https://oeis.org/A081606):
> Numbers having at least one 1 in their ternary representation.
> (Formerly )
**Sequence IDs**: `:a081606`
**Finite**: False
**Offset**: 1
## Example
iex> Sequence.create(Elixir.Chunky.Sequence.OEIS.Repr, :a081606) |> Sequence.take!(71)
[1,3,4,5,7,9,10,11,12,13,14,15,16,17,19,21,22,23,25,27,28,29,30,31,32,33,34,35,36,37,38,39,40,41,42,43,44,45,46,47,48,49,50,51,52,53,55,57,58,59,61,63,64,65,66,67,68,69,70,71,73,75,76,77,79,81,82,83,84,85,86]
"""
@doc offset: 1,
sequence: "Numbers having at least one 1 in their ternary representation.",
references: [{:oeis, :a081606, "https://oeis.org/A081606"}]
def create_sequence_a081606(_opts) do
sequence_for_function(&Elixir.Chunky.Sequence.OEIS.Repr.seq_a081606/2)
end
@doc false
@doc offset: 1
def seq_a081606(_idx, last) do
Math.next_number(
fn v ->
Math.digit_count(v, [1], base: 3) >= 1
end,
last
)
end
@doc """
OEIS Sequence `A097251` - Numbers whose set of base 5 digits is {0,4}.
From [OEIS A097251](https://oeis.org/A097251):
> Numbers whose set of base 5 digits is {0,4}.
> (Formerly )
**Sequence IDs**: `:a097251`
**Finite**: False
**Offset**: 0
## Example
iex> Sequence.create(Elixir.Chunky.Sequence.OEIS.Repr, :a097251) |> Sequence.take!(43)
[0,4,20,24,100,104,120,124,500,504,520,524,600,604,620,624,2500,2504,2520,2524,2600,2604,2620,2624,3000,3004,3020,3024,3100,3104,3120,3124,12500,12504,12520,12524,12600,12604,12620,12624,13000,13004,13020]
"""
@doc offset: 0,
sequence: "Numbers whose set of base 5 digits is {0,4}.",
references: [{:oeis, :a097251, "https://oeis.org/A097251"}]
def create_sequence_a097251(_opts) do
sequence_for_function(&Elixir.Chunky.Sequence.OEIS.Repr.seq_a097251/2)
end
@doc false
@doc offset: 0, fill_value: -1
def seq_a097251(_idx, last) do
Math.next_number(
fn v ->
Math.digit_count(v, [1, 2, 3], base: 5) == 0
end,
last
)
end
@doc """
OEIS Sequence `A097252` - Numbers whose set of base 6 digits is {0,5}.
From [OEIS A097252](https://oeis.org/A097252):
> Numbers whose set of base 6 digits is {0,5}.
> (Formerly )
**Sequence IDs**: `:a097252`
**Finite**: False
**Offset**: 0
## Example
iex> Sequence.create(Elixir.Chunky.Sequence.OEIS.Repr, :a097252) |> Sequence.take!(42)
[0,5,30,35,180,185,210,215,1080,1085,1110,1115,1260,1265,1290,1295,6480,6485,6510,6515,6660,6665,6690,6695,7560,7565,7590,7595,7740,7745,7770,7775,38880,38885,38910,38915,39060,39065,39090,39095,39960,39965]
"""
@doc offset: 0,
sequence: "Numbers whose set of base 6 digits is {0,5}.",
references: [{:oeis, :a097252, "https://oeis.org/A097252"}]
def create_sequence_a097252(_opts) do
sequence_for_function(&Elixir.Chunky.Sequence.OEIS.Repr.seq_a097252/2)
end
@doc false
@doc offset: 0, fill_value: -1
def seq_a097252(_idx, last) do
Math.next_number(
fn v ->
Math.digit_count(v, [1, 2, 3, 4], base: 6) == 0
end,
last
)
end
@doc """
OEIS Sequence `A097253` - Numbers whose set of base 7 digits is {0,6}.
From [OEIS A097253](https://oeis.org/A097253):
> Numbers whose set of base 7 digits is {0,6}.
> (Formerly )
**Sequence IDs**: `:a097253`
**Finite**: False
**Offset**: 1
## Example
iex> Sequence.create(Elixir.Chunky.Sequence.OEIS.Repr, :a097253) |> Sequence.take!(37)
[0,6,42,48,294,300,336,342,2058,2064,2100,2106,2352,2358,2394,2400,14406,14412,14448,14454,14700,14706,14742,14748,16464,16470,16506,16512,16758,16764,16800,16806,100842,100848,100884,100890,101136]
"""
@doc offset: 1,
sequence: "Numbers whose set of base 7 digits is {0,6}.",
references: [{:oeis, :a097253, "https://oeis.org/A097253"}]
def create_sequence_a097253(_opts) do
sequence_for_function(&Elixir.Chunky.Sequence.OEIS.Repr.seq_a097253/2)
end
@doc false
@doc offset: 1, fill_value: -1
def seq_a097253(_idx, last) do
Math.next_number(
fn v ->
Math.digit_count(v, [1, 2, 3, 4, 5], base: 7) == 0
end,
last
)
end
@doc """
OEIS Sequence `A097254` - Numbers whose set of base 8 digits is {0,7}.
From [OEIS A097254](https://oeis.org/A097254):
> Numbers whose set of base 8 digits is {0,7}.
> (Formerly )
**Sequence IDs**: `:a097254`
**Finite**: False
**Offset**: 1
## Example
iex> Sequence.create(Elixir.Chunky.Sequence.OEIS.Repr, :a097254) |> Sequence.take!(37)
[0,7,56,63,448,455,504,511,3584,3591,3640,3647,4032,4039,4088,4095,28672,28679,28728,28735,29120,29127,29176,29183,32256,32263,32312,32319,32704,32711,32760,32767,229376,229383,229432,229439,229824]
"""
@doc offset: 1,
sequence: "Numbers whose set of base 8 digits is {0,7}.",
references: [{:oeis, :a097254, "https://oeis.org/A097254"}]
def create_sequence_a097254(_opts) do
sequence_for_function(&Elixir.Chunky.Sequence.OEIS.Repr.seq_a097254/2)
end
@doc false
@doc offset: 1, fill_value: -1
def seq_a097254(_idx, last) do
Math.next_number(
fn v ->
Math.digit_count(v, [1, 2, 3, 4, 5, 6], base: 8) == 0
end,
last
)
end
@doc """
OEIS Sequence `A097255` - Numbers whose set of base 9 digits is {0,8}.
From [OEIS A097255](https://oeis.org/A097255):
> Numbers whose set of base 9 digits is {0,8}.
> (Formerly )
**Sequence IDs**: `:a097255`
**Finite**: False
**Offset**: 0
## Example
iex> Sequence.create(Elixir.Chunky.Sequence.OEIS.Repr, :a097255) |> Sequence.take!(37)
[0,8,72,80,648,656,720,728,5832,5840,5904,5912,6480,6488,6552,6560,52488,52496,52560,52568,53136,53144,53208,53216,58320,58328,58392,58400,58968,58976,59040,59048,472392,472400,472464,472472,473040]
"""
@doc offset: 0,
sequence: "Numbers whose set of base 9 digits is {0,8}.",
references: [{:oeis, :a097255, "https://oeis.org/A097255"}]
def create_sequence_a097255(_opts) do
sequence_for_function(&Elixir.Chunky.Sequence.OEIS.Repr.seq_a097255/2)
end
@doc false
@doc offset: 0, fill_value: -1
def seq_a097255(_idx, last) do
Math.next_number(
fn v ->
Math.digit_count(v, [1, 2, 3, 4, 5, 6, 7], base: 9) == 0
end,
last
)
end
@doc """
OEIS Sequence `A097256` - Numbers whose set of base 10 digits is {0,9}.
From [OEIS A097256](https://oeis.org/A097256):
> Numbers whose set of base 10 digits is {0,9}.
> (Formerly )
**Sequence IDs**: `:a097256`
**Finite**: False
**Offset**: 0
## Example
iex> Sequence.create(Elixir.Chunky.Sequence.OEIS.Repr, :a097256) |> Sequence.take!(37)
[0,9,90,99,900,909,990,999,9000,9009,9090,9099,9900,9909,9990,9999,90000,90009,90090,90099,90900,90909,90990,90999,99000,99009,99090,99099,99900,99909,99990,99999,900000,900009,900090,900099,900900]
"""
@doc offset: 0,
sequence: "Numbers whose set of base 10 digits is {0,9}.",
references: [{:oeis, :a097256, "https://oeis.org/A097256"}]
def create_sequence_a097256(_opts) do
sequence_for_function(&Elixir.Chunky.Sequence.OEIS.Repr.seq_a097256/2)
end
@doc false
@doc offset: 0, fill_value: -1
def seq_a097256(_idx, last) do
Math.next_number(
fn v ->
Math.digit_count(v, [1, 2, 3, 4, 5, 6, 7, 8], base: 10) == 0
end,
last
)
end
@doc """
OEIS Sequence `A097257` - Numbers whose set of base 11 digits is {0,A}, where A base 11 = 10 base 10.
From [OEIS A097257](https://oeis.org/A097257):
> Numbers whose set of base 11 digits is {0,A}, where A base 11 = 10 base 10.
> (Formerly )
**Sequence IDs**: `:a097257`
**Finite**: False
**Offset**: 0
## Example
iex> Sequence.create(Elixir.Chunky.Sequence.OEIS.Repr, :a097257) |> Sequence.take!(33)
[0,10,110,120,1210,1220,1320,1330,13310,13320,13420,13430,14520,14530,14630,14640,146410,146420,146520,146530,147620,147630,147730,147740,159720,159730,159830,159840,160930,160940,161040,161050,1610510]
"""
@doc offset: 0,
sequence: "Numbers whose set of base 11 digits is {0,A}, where A base 11 = 10 base 10.",
references: [{:oeis, :a097257, "https://oeis.org/A097257"}]
def create_sequence_a097257(_opts) do
sequence_for_function(&Elixir.Chunky.Sequence.OEIS.Repr.seq_a097257/2)
end
@doc false
@doc offset: 0, fill_value: -1
def seq_a097257(_idx, last) do
Math.next_number(
fn v ->
Math.digit_count(v, [1, 2, 3, 4, 5, 6, 7, 8, 9], base: 11) == 0
end,
last
)
end
@doc """
OEIS Sequence `A097258` - Numbers whose set of base 12 digits is {0,B}, where B base 12 = 11 base 10.
From [OEIS A097258](https://oeis.org/A097258):
> Numbers whose set of base 12 digits is {0,B}, where B base 12 = 11 base 10.
> (Formerly )
**Sequence IDs**: `:a097258`
**Finite**: False
**Offset**: 0
## Example
iex> Sequence.create(Elixir.Chunky.Sequence.OEIS.Repr, :a097258) |> Sequence.take!(33)
[0,11,132,143,1584,1595,1716,1727,19008,19019,19140,19151,20592,20603,20724,20735,228096,228107,228228,228239,229680,229691,229812,229823,247104,247115,247236,247247,248688,248699,248820,248831,2737152]
"""
@doc offset: 0,
sequence: "Numbers whose set of base 12 digits is {0,B}, where B base 12 = 11 base 10.",
references: [{:oeis, :a097258, "https://oeis.org/A097258"}]
def create_sequence_a097258(_opts) do
sequence_for_function(&Elixir.Chunky.Sequence.OEIS.Repr.seq_a097258/2)
end
@doc false
@doc offset: 0, fill_value: -1
def seq_a097258(_idx, last) do
Math.next_number(
fn v ->
Math.digit_count(v, [1, 2, 3, 4, 5, 6, 7, 8, 9, 10], base: 12) == 0
end,
last
)
end
@doc """
OEIS Sequence `A097259` - Numbers whose set of base 13 digits is {0,C}, where C base 13 = 12 base 10.
From [OEIS A097259](https://oeis.org/A097259):
> Numbers whose set of base 13 digits is {0,C}, where C base 13 = 12 base 10.
> (Formerly )
**Sequence IDs**: `:a097259`
**Finite**: False
**Offset**: 0
## Example
iex> Sequence.create(Elixir.Chunky.Sequence.OEIS.Repr, :a097259) |> Sequence.take!(33)
[0,12,156,168,2028,2040,2184,2196,26364,26376,26520,26532,28392,28404,28548,28560,342732,342744,342888,342900,344760,344772,344916,344928,369096,369108,369252,369264,371124,371136,371280,371292,4455516]
"""
@doc offset: 0,
sequence: "Numbers whose set of base 13 digits is {0,C}, where C base 13 = 12 base 10.",
references: [{:oeis, :a097259, "https://oeis.org/A097259"}]
def create_sequence_a097259(_opts) do
sequence_for_function(&Elixir.Chunky.Sequence.OEIS.Repr.seq_a097259/2)
end
@doc false
@doc offset: 0, fill_value: -1
def seq_a097259(_idx, last) do
Math.next_number(
fn v ->
Math.digit_count(v, [1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11], base: 13) == 0
end,
last
)
end
@doc """
OEIS Sequence `A097260` - Numbers whose set of base 14 digits is {0,D}, where D base 14 = 13 base 10.
From [OEIS A097260](https://oeis.org/A097260):
> Numbers whose set of base 14 digits is {0,D}, where D base 14 = 13 base 10.
> (Formerly )
**Sequence IDs**: `:a097260`
**Finite**: False
**Offset**: 1
## Example
iex> Sequence.create(Elixir.Chunky.Sequence.OEIS.Repr, :a097260) |> Sequence.take!(33)
[0,13,182,195,2548,2561,2730,2743,35672,35685,35854,35867,38220,38233,38402,38415,499408,499421,499590,499603,501956,501969,502138,502151,535080,535093,535262,535275,537628,537641,537810,537823,6991712]
"""
@doc offset: 1,
sequence: "Numbers whose set of base 14 digits is {0,D}, where D base 14 = 13 base 10.",
references: [{:oeis, :a097260, "https://oeis.org/A097260"}]
def create_sequence_a097260(_opts) do
sequence_for_function(&Elixir.Chunky.Sequence.OEIS.Repr.seq_a097260/2)
end
@doc false
@doc offset: 1, fill_value: -1
def seq_a097260(_idx, last) do
Math.next_number(
fn v ->
Math.digit_count(v, [1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12], base: 14) == 0
end,
last
)
end
@doc """
OEIS Sequence `A097261` - Numbers whose set of base 15 digits is {0,E}, where E base 15 = 14 base 10.
From [OEIS A097261](https://oeis.org/A097261):
> Numbers whose set of base 15 digits is {0,E}, where E base 15 = 14 base 10.
> (Formerly )
**Sequence IDs**: `:a097261`
**Finite**: False
**Offset**: 0
## Example
iex> Sequence.create(Elixir.Chunky.Sequence.OEIS.Repr, :a097261) |> Sequence.take!(33)
[0,14,210,224,3150,3164,3360,3374,47250,47264,47460,47474,50400,50414,50610,50624,708750,708764,708960,708974,711900,711914,712110,712124,756000,756014,756210,756224,759150,759164,759360,759374,10631250]
"""
@doc offset: 0,
sequence: "Numbers whose set of base 15 digits is {0,E}, where E base 15 = 14 base 10.",
references: [{:oeis, :a097261, "https://oeis.org/A097261"}]
def create_sequence_a097261(_opts) do
sequence_for_function(&Elixir.Chunky.Sequence.OEIS.Repr.seq_a097261/2)
end
@doc false
@doc offset: 0, fill_value: -1
def seq_a097261(_idx, last) do
Math.next_number(
fn v ->
Math.digit_count(v, [1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13], base: 15) == 0
end,
last
)
end
@doc """
OEIS Sequence `A097262` - Numbers whose set of base 16 digits is {0,F}, where F base 16 = 15 base 10.
From [OEIS A097262](https://oeis.org/A097262):
> Numbers whose set of base 16 digits is {0,F}, where F base 16 = 15 base 10.
> (Formerly )
**Sequence IDs**: `:a097262`
**Finite**: False
**Offset**: 0
## Example
iex> Sequence.create(Elixir.Chunky.Sequence.OEIS.Repr, :a097262) |> Sequence.take!(32)
[0,15,240,255,3840,3855,4080,4095,61440,61455,61680,61695,65280,65295,65520,65535,983040,983055,983280,983295,986880,986895,987120,987135,1044480,1044495,1044720,1044735,1048320,1048335,1048560,1048575]
"""
@doc offset: 0,
sequence: "Numbers whose set of base 16 digits is {0,F}, where F base 16 = 15 base 10.",
references: [{:oeis, :a097262, "https://oeis.org/A097262"}]
def create_sequence_a097262(_opts) do
sequence_for_function(&Elixir.Chunky.Sequence.OEIS.Repr.seq_a097262/2)
end
@doc false
@doc offset: 0, fill_value: -1
def seq_a097262(_idx, last) do
Math.next_number(
fn v ->
Math.digit_count(v, [1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14], base: 16) == 0
end,
last
)
end
@doc """
OEIS Sequence `A102669` - Number of digits >= 2 in decimal representation of n.
From [OEIS A102669](https://oeis.org/A102669):
> Number of digits >= 2 in decimal representation of n.
> (Formerly )
**Sequence IDs**: `:a102669`
**Finite**: False
**Offset**: 0
## Example
iex> Sequence.create(Elixir.Chunky.Sequence.OEIS.Repr, :a102669) |> Sequence.take!(105)
[0,0,1,1,1,1,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,2,2,2,2,2,2,2,2,1,1,2,2,2,2,2,2,2,2,1,1,2,2,2,2,2,2,2,2,1,1,2,2,2,2,2,2,2,2,1,1,2,2,2,2,2,2,2,2,1,1,2,2,2,2,2,2,2,2,1,1,2,2,2,2,2,2,2,2,1,1,2,2,2,2,2,2,2,2,0,0,1,1,1]
"""
@doc offset: 0,
sequence: "Number of digits >= 2 in decimal representation of n.",
references: [{:oeis, :a102669, "https://oeis.org/A102669"}]
def create_sequence_a102669(_opts) do
sequence_for_function(&Elixir.Chunky.Sequence.OEIS.Repr.seq_a102669/1)
end
@doc false
@doc offset: 0
def seq_a102669(idx) do
Math.digit_count(idx, [2, 3, 4, 5, 6, 7, 8, 9])
end
@doc """
OEIS Sequence `A102670` - Number of digits >= 2 in the decimal representations of all integers from 0 to n.
From [OEIS A102670](https://oeis.org/A102670):
> Number of digits >= 2 in the decimal representations of all integers from 0 to n.
> (Formerly )
**Sequence IDs**: `:a102670`
**Finite**: False
**Offset**: 0
## Example
iex> Sequence.create(Elixir.Chunky.Sequence.OEIS.Repr, :a102670) |> Sequence.take!(72)
[0,0,1,2,3,4,5,6,7,8,8,8,9,10,11,12,13,14,15,16,17,18,20,22,24,26,28,30,32,34,35,36,38,40,42,44,46,48,50,52,53,54,56,58,60,62,64,66,68,70,71,72,74,76,78,80,82,84,86,88,89,90,92,94,96,98,100,102,104,106,107,108]
"""
@doc offset: 0,
sequence:
"Number of digits >= 2 in the decimal representations of all integers from 0 to n.",
references: [{:oeis, :a102670, "https://oeis.org/A102670"}]
def create_sequence_a102670(_opts) do
sequence_for_function(&Elixir.Chunky.Sequence.OEIS.Repr.seq_a102670/1)
end
@doc false
@doc offset: 0
def seq_a102670(idx) do
0..idx
|> Enum.map(fn d -> Math.digit_count(d, [2, 3, 4, 5, 6, 7, 8, 9]) end)
|> Enum.sum()
end
@doc """
OEIS Sequence `A102671` - Number of digits >= 3 in decimal representation of n.
From [OEIS A102671](https://oeis.org/A102671):
> Number of digits >= 3 in decimal representation of n.
> (Formerly )
**Sequence IDs**: `:a102671`
**Finite**: False
**Offset**: 0
## Example
iex> Sequence.create(Elixir.Chunky.Sequence.OEIS.Repr, :a102671) |> Sequence.take!(105)
[0,0,0,1,1,1,1,1,1,1,0,0,0,1,1,1,1,1,1,1,0,0,0,1,1,1,1,1,1,1,1,1,1,2,2,2,2,2,2,2,1,1,1,2,2,2,2,2,2,2,1,1,1,2,2,2,2,2,2,2,1,1,1,2,2,2,2,2,2,2,1,1,1,2,2,2,2,2,2,2,1,1,1,2,2,2,2,2,2,2,1,1,1,2,2,2,2,2,2,2,0,0,0,1,1]
"""
@doc offset: 0,
sequence: "Number of digits >= 3 in decimal representation of n.",
references: [{:oeis, :a102671, "https://oeis.org/A102671"}]
def create_sequence_a102671(_opts) do
sequence_for_function(&Elixir.Chunky.Sequence.OEIS.Repr.seq_a102671/1)
end
@doc false
@doc offset: 0
def seq_a102671(idx) do
Math.digit_count(idx, [3, 4, 5, 6, 7, 8, 9])
end
@doc """
OEIS Sequence `A102672` - Number of digits >= 3 in the decimal representations of all integers from 0 to n.
From [OEIS A102672](https://oeis.org/A102672):
> Number of digits >= 3 in the decimal representations of all integers from 0 to n.
> (Formerly )
**Sequence IDs**: `:a102672`
**Finite**: False
**Offset**: 0
## Example
iex> Sequence.create(Elixir.Chunky.Sequence.OEIS.Repr, :a102672) |> Sequence.take!(74)
[0,0,0,1,2,3,4,5,6,7,7,7,7,8,9,10,11,12,13,14,14,14,14,15,16,17,18,19,20,21,22,23,24,26,28,30,32,34,36,38,39,40,41,43,45,47,49,51,53,55,56,57,58,60,62,64,66,68,70,72,73,74,75,77,79,81,83,85,87,89,90,91,92,94]
"""
@doc offset: 0,
sequence:
"Number of digits >= 3 in the decimal representations of all integers from 0 to n.",
references: [{:oeis, :a102672, "https://oeis.org/A102672"}]
def create_sequence_a102672(_opts) do
sequence_for_function(&Elixir.Chunky.Sequence.OEIS.Repr.seq_a102672/1)
end
@doc false
@doc offset: 0
def seq_a102672(idx) do
0..idx
|> Enum.map(fn d -> Math.digit_count(d, [3, 4, 5, 6, 7, 8, 9]) end)
|> Enum.sum()
end
@doc """
OEIS Sequence `A102673` - Number of digits >= 4 in decimal representation of n.
From [OEIS A102673](https://oeis.org/A102673):
> Number of digits >= 4 in decimal representation of n.
> (Formerly )
**Sequence IDs**: `:a102673`
**Finite**: False
**Offset**: 0
## Example
iex> Sequence.create(Elixir.Chunky.Sequence.OEIS.Repr, :a102673) |> Sequence.take!(105)
[0,0,0,0,1,1,1,1,1,1,0,0,0,0,1,1,1,1,1,1,0,0,0,0,1,1,1,1,1,1,0,0,0,0,1,1,1,1,1,1,1,1,1,1,2,2,2,2,2,2,1,1,1,1,2,2,2,2,2,2,1,1,1,1,2,2,2,2,2,2,1,1,1,1,2,2,2,2,2,2,1,1,1,1,2,2,2,2,2,2,1,1,1,1,2,2,2,2,2,2,0,0,0,0,1]
"""
@doc offset: 0,
sequence: "Number of digits >= 4 in decimal representation of n.",
references: [{:oeis, :a102673, "https://oeis.org/A102673"}]
def create_sequence_a102673(_opts) do
sequence_for_function(&Elixir.Chunky.Sequence.OEIS.Repr.seq_a102673/1)
end
@doc false
@doc offset: 0
def seq_a102673(idx) do
Math.digit_count(idx, [4, 5, 6, 7, 8, 9])
end
@doc """
OEIS Sequence `A102674` - Number of digits >= 4 in the decimal representations of all integers from 0 to n.
From [OEIS A102674](https://oeis.org/A102674):
> Number of digits >= 4 in the decimal representations of all integers from 0 to n.
> (Formerly )
**Sequence IDs**: `:a102674`
**Finite**: False
**Offset**: 0
## Example
iex> Sequence.create(Elixir.Chunky.Sequence.OEIS.Repr, :a102674) |> Sequence.take!(75)
[0,0,0,0,1,2,3,4,5,6,6,6,6,6,7,8,9,10,11,12,12,12,12,12,13,14,15,16,17,18,18,18,18,18,19,20,21,22,23,24,25,26,27,28,30,32,34,36,38,40,41,42,43,44,46,48,50,52,54,56,57,58,59,60,62,64,66,68,70,72,73,74,75,76,78]
"""
@doc offset: 0,
sequence:
"Number of digits >= 4 in the decimal representations of all integers from 0 to n.",
references: [{:oeis, :a102674, "https://oeis.org/A102674"}]
def create_sequence_a102674(_opts) do
sequence_for_function(&Elixir.Chunky.Sequence.OEIS.Repr.seq_a102674/1)
end
@doc false
@doc offset: 0
def seq_a102674(idx) do
0..idx
|> Enum.map(fn d -> Math.digit_count(d, [4, 5, 6, 7, 8, 9]) end)
|> Enum.sum()
end
@doc """
OEIS Sequence `A102675` - Number of digits >= 5 in decimal representation of n.
From [OEIS A102675](https://oeis.org/A102675):
> Number of digits >= 5 in decimal representation of n.
> (Formerly )
**Sequence IDs**: `:a102675`
**Finite**: False
**Offset**: 0
## Example
iex> Sequence.create(Elixir.Chunky.Sequence.OEIS.Repr, :a102675) |> Sequence.take!(105)
[0,0,0,0,0,1,1,1,1,1,0,0,0,0,0,1,1,1,1,1,0,0,0,0,0,1,1,1,1,1,0,0,0,0,0,1,1,1,1,1,0,0,0,0,0,1,1,1,1,1,1,1,1,1,1,2,2,2,2,2,1,1,1,1,1,2,2,2,2,2,1,1,1,1,1,2,2,2,2,2,1,1,1,1,1,2,2,2,2,2,1,1,1,1,1,2,2,2,2,2,0,0,0,0,0]
"""
@doc offset: 0,
sequence: "Number of digits >= 5 in decimal representation of n.",
references: [{:oeis, :a102675, "https://oeis.org/A102675"}]
def create_sequence_a102675(_opts) do
sequence_for_function(&Elixir.Chunky.Sequence.OEIS.Repr.seq_a102675/1)
end
@doc false
@doc offset: 0
def seq_a102675(idx) do
Math.digit_count(idx, [5, 6, 7, 8, 9])
end
@doc """
OEIS Sequence `A102676` - Number of digits >= 5 in the decimal representations of all integers from 0 to n.
From [OEIS A102676](https://oeis.org/A102676):
> Number of digits >= 5 in the decimal representations of all integers from 0 to n.
> (Formerly )
**Sequence IDs**: `:a102676`
**Finite**: False
**Offset**: 0
## Example
iex> Sequence.create(Elixir.Chunky.Sequence.OEIS.Repr, :a102676) |> Sequence.take!(76)
[0,0,0,0,0,1,2,3,4,5,5,5,5,5,5,6,7,8,9,10,10,10,10,10,10,11,12,13,14,15,15,15,15,15,15,16,17,18,19,20,20,20,20,20,20,21,22,23,24,25,26,27,28,29,30,32,34,36,38,40,41,42,43,44,45,47,49,51,53,55,56,57,58,59,60,62]
"""
@doc offset: 0,
sequence:
"Number of digits >= 5 in the decimal representations of all integers from 0 to n.",
references: [{:oeis, :a102676, "https://oeis.org/A102676"}]
def create_sequence_a102676(_opts) do
sequence_for_function(&Elixir.Chunky.Sequence.OEIS.Repr.seq_a102676/1)
end
@doc false
@doc offset: 0
def seq_a102676(idx) do
0..idx
|> Enum.map(fn d -> Math.digit_count(d, [5, 6, 7, 8, 9]) end)
|> Enum.sum()
end
@doc """
OEIS Sequence `A102677` - Number of digits >= 6 in decimal representation of n.
From [OEIS A102677](https://oeis.org/A102677):
> Number of digits >= 6 in decimal representation of n.
> (Formerly )
**Sequence IDs**: `:a102677`
**Finite**: False
**Offset**: 0
## Example
iex> Sequence.create(Elixir.Chunky.Sequence.OEIS.Repr, :a102677) |> Sequence.take!(105)
[0,0,0,0,0,0,1,1,1,1,0,0,0,0,0,0,1,1,1,1,0,0,0,0,0,0,1,1,1,1,0,0,0,0,0,0,1,1,1,1,0,0,0,0,0,0,1,1,1,1,0,0,0,0,0,0,1,1,1,1,1,1,1,1,1,1,2,2,2,2,1,1,1,1,1,1,2,2,2,2,1,1,1,1,1,1,2,2,2,2,1,1,1,1,1,1,2,2,2,2,0,0,0,0,0]
"""
@doc offset: 0,
sequence: "Number of digits >= 6 in decimal representation of n.",
references: [{:oeis, :a102677, "https://oeis.org/A102677"}]
def create_sequence_a102677(_opts) do
sequence_for_function(&Elixir.Chunky.Sequence.OEIS.Repr.seq_a102677/1)
end
@doc false
@doc offset: 0
def seq_a102677(idx) do
Math.digit_count(idx, [6, 7, 8, 9])
end
@doc """
OEIS Sequence `A102678` - Number of digits >= 6 in the decimal representations of all integers from 0 to n.
From [OEIS A102678](https://oeis.org/A102678):
> Number of digits >= 6 in the decimal representations of all integers from 0 to n.
> (Formerly )
**Sequence IDs**: `:a102678`
**Finite**: False
**Offset**: 0
## Example
iex> Sequence.create(Elixir.Chunky.Sequence.OEIS.Repr, :a102678) |> Sequence.take!(78)
[0,0,0,0,0,0,1,2,3,4,4,4,4,4,4,4,5,6,7,8,8,8,8,8,8,8,9,10,11,12,12,12,12,12,12,12,13,14,15,16,16,16,16,16,16,16,17,18,19,20,20,20,20,20,20,20,21,22,23,24,25,26,27,28,29,30,32,34,36,38,39,40,41,42,43,44,46,48]
"""
@doc offset: 0,
sequence:
"Number of digits >= 6 in the decimal representations of all integers from 0 to n.",
references: [{:oeis, :a102678, "https://oeis.org/A102678"}]
def create_sequence_a102678(_opts) do
sequence_for_function(&Elixir.Chunky.Sequence.OEIS.Repr.seq_a102678/1)
end
@doc false
@doc offset: 0
def seq_a102678(idx) do
0..idx
|> Enum.map(fn d -> Math.digit_count(d, [6, 7, 8, 9]) end)
|> Enum.sum()
end
@doc """
OEIS Sequence `A102679` - Number of digits >= 7 in decimal representation of n.
From [OEIS A102679](https://oeis.org/A102679):
> Number of digits >= 7 in decimal representation of n.
> (Formerly )
**Sequence IDs**: `:a102679`
**Finite**: False
**Offset**: 0
## Example
iex> Sequence.create(Elixir.Chunky.Sequence.OEIS.Repr, :a102679) |> Sequence.take!(105)
[0,0,0,0,0,0,0,1,1,1,0,0,0,0,0,0,0,1,1,1,0,0,0,0,0,0,0,1,1,1,0,0,0,0,0,0,0,1,1,1,0,0,0,0,0,0,0,1,1,1,0,0,0,0,0,0,0,1,1,1,0,0,0,0,0,0,0,1,1,1,1,1,1,1,1,1,1,2,2,2,1,1,1,1,1,1,1,2,2,2,1,1,1,1,1,1,1,2,2,2,0,0,0,0,0]
"""
@doc offset: 0,
sequence: "Number of digits >= 7 in decimal representation of n.",
references: [{:oeis, :a102679, "https://oeis.org/A102679"}]
def create_sequence_a102679(_opts) do
sequence_for_function(&Elixir.Chunky.Sequence.OEIS.Repr.seq_a102679/1)
end
@doc false
@doc offset: 0
def seq_a102679(idx) do
Math.digit_count(idx, [7, 8, 9])
end
@doc """
OEIS Sequence `A102680` - Number of digits >= 7 in the decimal representations of all integers from 0 to n.
From [OEIS A102680](https://oeis.org/A102680):
> Number of digits >= 7 in the decimal representations of all integers from 0 to n.
> (Formerly )
**Sequence IDs**: `:a102680`
**Finite**: False
**Offset**: 0
## Example
iex> Sequence.create(Elixir.Chunky.Sequence.OEIS.Repr, :a102680) |> Sequence.take!(82)
[0,0,0,0,0,0,0,1,2,3,3,3,3,3,3,3,3,4,5,6,6,6,6,6,6,6,6,7,8,9,9,9,9,9,9,9,9,10,11,12,12,12,12,12,12,12,12,13,14,15,15,15,15,15,15,15,15,16,17,18,18,18,18,18,18,18,18,19,20,21,22,23,24,25,26,27,28,30,32,34,35,36]
"""
@doc offset: 0,
sequence:
"Number of digits >= 7 in the decimal representations of all integers from 0 to n.",
references: [{:oeis, :a102680, "https://oeis.org/A102680"}]
def create_sequence_a102680(_opts) do
sequence_for_function(&Elixir.Chunky.Sequence.OEIS.Repr.seq_a102680/1)
end
@doc false
@doc offset: 0
def seq_a102680(idx) do
0..idx
|> Enum.map(fn d -> Math.digit_count(d, [7, 8, 9]) end)
|> Enum.sum()
end
@doc """
OEIS Sequence `A102681` - Number of digits >= 8 in decimal representation of n.
From [OEIS A102681](https://oeis.org/A102681):
> Number of digits >= 8 in decimal representation of n.
> (Formerly )
**Sequence IDs**: `:a102681`
**Finite**: False
**Offset**: 0
## Example
iex> Sequence.create(Elixir.Chunky.Sequence.OEIS.Repr, :a102681) |> Sequence.take!(105)
[0,0,0,0,0,0,0,0,1,1,0,0,0,0,0,0,0,0,1,1,0,0,0,0,0,0,0,0,1,1,0,0,0,0,0,0,0,0,1,1,0,0,0,0,0,0,0,0,1,1,0,0,0,0,0,0,0,0,1,1,0,0,0,0,0,0,0,0,1,1,0,0,0,0,0,0,0,0,1,1,1,1,1,1,1,1,1,1,2,2,1,1,1,1,1,1,1,1,2,2,0,0,0,0,0]
"""
@doc offset: 0,
sequence: "Number of digits >= 8 in decimal representation of n.",
references: [{:oeis, :a102681, "https://oeis.org/A102681"}]
def create_sequence_a102681(_opts) do
sequence_for_function(&Elixir.Chunky.Sequence.OEIS.Repr.seq_a102681/1)
end
@doc false
@doc offset: 0
def seq_a102681(idx) do
Math.digit_count(idx, [8, 9])
end
@doc """
OEIS Sequence `A102682` - Number of digits >= 8 in the decimal representations of all integers from 0 to n.
From [OEIS A102682](https://oeis.org/A102682):
> Number of digits >= 8 in the decimal representations of all integers from 0 to n.
> (Formerly )
**Sequence IDs**: `:a102682`
**Finite**: False
**Offset**: 0
## Example
iex> Sequence.create(Elixir.Chunky.Sequence.OEIS.Repr, :a102682) |> Sequence.take!(86)
[0,0,0,0,0,0,0,0,1,2,2,2,2,2,2,2,2,2,3,4,4,4,4,4,4,4,4,4,5,6,6,6,6,6,6,6,6,6,7,8,8,8,8,8,8,8,8,8,9,10,10,10,10,10,10,10,10,10,11,12,12,12,12,12,12,12,12,12,13,14,14,14,14,14,14,14,14,14,15,16,17,18,19,20,21,22]
"""
@doc offset: 0,
sequence:
"Number of digits >= 8 in the decimal representations of all integers from 0 to n.",
references: [{:oeis, :a102682, "https://oeis.org/A102682"}]
def create_sequence_a102682(_opts) do
sequence_for_function(&Elixir.Chunky.Sequence.OEIS.Repr.seq_a102682/1)
end
@doc false
@doc offset: 0
def seq_a102682(idx) do
0..idx
|> Enum.map(fn d -> Math.digit_count(d, [8, 9]) end)
|> Enum.sum()
end
@doc """
OEIS Sequence `A102683` - Number of digits 9 in decimal representation of n.
From [OEIS A102683](https://oeis.org/A102683):
> Number of digits 9 in decimal representation of n.
> (Formerly )
**Sequence IDs**: `:a102683`
**Finite**: False
**Offset**: 0
## Example
iex> Sequence.create(Elixir.Chunky.Sequence.OEIS.Repr, :a102683) |> Sequence.take!(105)
[0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,1,1,1,1,1,1,1,1,1,1,2,0,0,0,0,0]
"""
@doc offset: 0,
sequence: "Number of digits 9 in decimal representation of n.",
references: [{:oeis, :a102683, "https://oeis.org/A102683"}]
def create_sequence_a102683(_opts) do
sequence_for_function(&Elixir.Chunky.Sequence.OEIS.Repr.seq_a102683/1)
end
@doc false
@doc offset: 0
def seq_a102683(idx) do
Math.digit_count(idx, [9])
end
@doc """
OEIS Sequence `A102684` - Number of times the digit 9 appears in the decimal representations of all integers from 0 to n.
From [OEIS A102684](https://oeis.org/A102684):
> Number of times the digit 9 appears in the decimal representations of all integers from 0 to n.
> (Formerly )
**Sequence IDs**: `:a102684`
**Finite**: False
**Offset**: 0
## Example
iex> Sequence.create(Elixir.Chunky.Sequence.OEIS.Repr, :a102684) |> Sequence.take!(100)
[0,0,0,0,0,0,0,0,0,1,1,1,1,1,1,1,1,1,1,2,2,2,2,2,2,2,2,2,2,3,3,3,3,3,3,3,3,3,3,4,4,4,4,4,4,4,4,4,4,5,5,5,5,5,5,5,5,5,5,6,6,6,6,6,6,6,6,6,6,7,7,7,7,7,7,7,7,7,7,8,8,8,8,8,8,8,8,8,8,9,10,11,12,13,14,15,16,17,18,20]
"""
@doc offset: 0,
sequence:
"Number of times the digit 9 appears in the decimal representations of all integers from 0 to n.",
references: [{:oeis, :a102684, "https://oeis.org/A102684"}]
def create_sequence_a102684(_opts) do
sequence_for_function(&Elixir.Chunky.Sequence.OEIS.Repr.seq_a102684/1)
end
@doc false
@doc offset: 0
def seq_a102684(idx) do
0..idx
|> Enum.map(fn d -> Math.digit_count(d, [9]) end)
|> Enum.sum()
end
@doc """
OEIS Sequence `A006886` - Kaprekar numbers: positive numbers n such that n = q+r and n^2 = q*10^m+r, for some m >= 1, q >= 0 and 0 <= r < 10^m, with n != 10^a, a >= 1.
From [OEIS A006886](https://oeis.org/A006886):
> Kaprekar numbers: positive numbers n such that n = q+r and n^2 = q*10^m+r, for some m >= 1, q >= 0 and 0 <= r < 10^m, with n != 10^a, a >= 1.
> (Formerly M4625)
**Sequence IDs**: `:a006886`
**Finite**: False
**Offset**: 1
## Example
iex> Sequence.create(Elixir.Chunky.Sequence.OEIS.Repr, :a006886) |> Sequence.take!(17)
[1,9,45,55,99,297,703,999,2223,2728,4879,4950,5050,5292,7272,7777,9999]
"""
@doc offset: 1,
sequence:
"Kaprekar numbers: positive numbers n such that n = q+r and n^2 = q*10^m+r, for some m >= 1, q >= 0 and 0 <= r < 10^m, with n != 10^a, a >= 1.",
references: [{:oeis, :a006886, "https://oeis.org/A006886"}]
def create_sequence_a006886(_opts) do
sequence_for_function(&Elixir.Chunky.Sequence.OEIS.Repr.seq_a006886/2)
end
@doc false
@doc offset: 1, fill_value: 0
def seq_a006886(_idx, last) do
Math.next_number(&Predicates.is_kaprekar_number?/1, last)
end
@doc """
OEIS Sequence `A053816` - Another version of the Kaprekar numbers (A006886): n such that n=q+r and n^2=q*10^m+r, for some m >= 1, q>=0 and 0<=r<10^m, with n != 10^a, a>=1 and n an m-digit number.
From [OEIS A053816](https://oeis.org/A053816):
> Another version of the Kaprekar numbers (A006886): n such that n=q+r and n^2=q*10^m+r, for some m >= 1, q>=0 and 0<=r<10^m, with n != 10^a, a>=1 and n an m-digit number.
> (Formerly )
**Sequence IDs**: `:a053816`
**Finite**: False
**Offset**: 1
## Example
iex> Sequence.create(Elixir.Chunky.Sequence.OEIS.Repr, :a053816) |> Sequence.take!(15)
[1,9,45,55,99,297,703,999,2223,2728,4950,5050,7272,7777,9999]
"""
@doc offset: 1,
sequence:
"Another version of the Kaprekar numbers (A006886): n such that n=q+r and n^2=q*10^m+r, for some m >= 1, q>=0 and 0<=r<10^m, with n != 10^a, a>=1 and n an m-digit number.",
references: [{:oeis, :a053816, "https://oeis.org/A053816"}]
def create_sequence_a053816(_opts) do
sequence_for_function(&Elixir.Chunky.Sequence.OEIS.Repr.seq_a053816/2)
end
@doc false
@doc offset: 1, fill_value: 0
def seq_a053816(_idx, last) do
Math.next_number(&Predicates.is_kaprekar_strict_number?/1, last)
end
@doc """
OEIS Sequence `A005188` - Armstrong (or pluperfect, or Plus Perfect, or narcissistic) numbers: m-digit positive numbers equal to sum of the m-th powers of their digits.
From [OEIS A005188](https://oeis.org/A005188):
> Armstrong (or pluperfect, or Plus Perfect, or narcissistic) numbers: m-digit positive numbers equal to sum of the m-th powers of their digits.
> (Formerly M0488)
**Sequence IDs**: `:a005188`
**Finite**: False
**Offset**: 1
## Example
iex> Sequence.create(Elixir.Chunky.Sequence.OEIS.Repr, :a005188) |> Sequence.take!(16)
[1,2,3,4,5,6,7,8,9,153,370,371,407,1634,8208,9474]
"""
@doc offset: 1,
sequence:
"Armstrong (or pluperfect, or Plus Perfect, or narcissistic) numbers: m-digit positive numbers equal to sum of the m-th powers of their digits.",
references: [{:oeis, :a005188, "https://oeis.org/A005188"}]
def create_sequence_a005188(_opts) do
sequence_for_function(&Elixir.Chunky.Sequence.OEIS.Repr.seq_a005188/2)
end
@doc false
@doc offset: 1, fill_value: 0
def seq_a005188(_idx, last) do
Math.next_number(&Predicates.is_narcissistic_number?/1, last)
end
@doc """
OEIS Sequence `A010353` - Base-9 Armstrong or narcissistic numbers (written in base 10).
From [OEIS A010353](https://oeis.org/A010353):
> Base-9 Armstrong or narcissistic numbers (written in base 10).
> (Formerly )
**Sequence IDs**: `:a010353`
**Finite**: False
**Offset**: 1
## Example
iex> Sequence.create(Elixir.Chunky.Sequence.OEIS.Repr, :a010353) |> Sequence.take!(18)
[1,2,3,4,5,6,7,8,41,50,126,127,468,469,1824,8052,8295,9857]
"""
@doc offset: 1,
sequence: "Base-9 Armstrong or narcissistic numbers (written in base 10).",
references: [{:oeis, :a010353, "https://oeis.org/A010353"}]
def create_sequence_a010353(_opts) do
sequence_for_function(&Elixir.Chunky.Sequence.OEIS.Repr.seq_a010353/2)
end
@doc false
@doc offset: 1, fill_value: 0
def seq_a010353(_idx, last) do
Math.next_number(fn v -> Math.is_narcissistic_in_base?(v, 9) end, last)
end
@doc """
OEIS Sequence `A010354` - Base-8 Armstrong or narcissistic numbers (written in base 10).
From [OEIS A010354](https://oeis.org/A010354):
> Base-8 Armstrong or narcissistic numbers (written in base 10).
> (Formerly )
**Sequence IDs**: `:a010354`
**Finite**: False
**Offset**: 1
## Example
iex> Sequence.create(Elixir.Chunky.Sequence.OEIS.Repr, :a010354) |> Sequence.take!(14)
[1,2,3,4,5,6,7,20,52,92,133,307,432,433]
"""
@doc offset: 1,
sequence: "Base-8 Armstrong or narcissistic numbers (written in base 10).",
references: [{:oeis, :a010354, "https://oeis.org/A010354"}]
def create_sequence_a010354(_opts) do
sequence_for_function(&Elixir.Chunky.Sequence.OEIS.Repr.seq_a010354/2)
end
@doc false
@doc offset: 1, fill_value: 0
def seq_a010354(_idx, last) do
Math.next_number(fn v -> Math.is_narcissistic_in_base?(v, 8) end, last)
end
@doc """
OEIS Sequence `A010350` - Base-7 Armstrong or narcissistic numbers (written in base 10).
From [OEIS A010350](https://oeis.org/A010350):
> Base-7 Armstrong or narcissistic numbers (written in base 10).
> (Formerly )
**Sequence IDs**: `:a010350`
**Finite**: False
**Offset**: 1
## Example
iex> Sequence.create(Elixir.Chunky.Sequence.OEIS.Repr, :a010350) |> Sequence.take!(20)
[1,2,3,4,5,6,10,25,32,45,133,134,152,250,3190,3222,3612,3613,4183,9286]
"""
@doc offset: 1,
sequence: "Base-7 Armstrong or narcissistic numbers (written in base 10).",
references: [{:oeis, :a010350, "https://oeis.org/A010350"}]
def create_sequence_a010350(_opts) do
sequence_for_function(&Elixir.Chunky.Sequence.OEIS.Repr.seq_a010350/2)
end
@doc false
@doc offset: 1, fill_value: 0
def seq_a010350(_idx, last) do
Math.next_number(fn v -> Math.is_narcissistic_in_base?(v, 7) end, last)
end
@doc """
OEIS Sequence `A010348` - Base-6 Armstrong or narcissistic numbers (written in base 10).
From [OEIS A010348](https://oeis.org/A010348):
> Base-6 Armstrong or narcissistic numbers (written in base 10).
> (Formerly )
**Sequence IDs**: `:a010348`
**Finite**: False
**Offset**: 1
## Example
iex> Sequence.create(Elixir.Chunky.Sequence.OEIS.Repr, :a010348) |> Sequence.take!(13)
[1,2,3,4,5,99,190,2292,2293,2324,3432,3433,6197]
"""
@doc offset: 1,
sequence: "Base-6 Armstrong or narcissistic numbers (written in base 10).",
references: [{:oeis, :a010348, "https://oeis.org/A010348"}]
def create_sequence_a010348(_opts) do
sequence_for_function(&Elixir.Chunky.Sequence.OEIS.Repr.seq_a010348/2)
end
@doc false
@doc offset: 1, fill_value: 0
def seq_a010348(_idx, last) do
Math.next_number(fn v -> Math.is_narcissistic_in_base?(v, 6) end, last)
end
@doc """
OEIS Sequence `A010346` - Base-5 Armstrong or narcissistic numbers (written in base 10).
From [OEIS A010346](https://oeis.org/A010346):
> Base-5 Armstrong or narcissistic numbers (written in base 10).
> (Formerly )
**Sequence IDs**: `:a010346`
**Finite**: False
**Offset**: 1
## Example
iex> Sequence.create(Elixir.Chunky.Sequence.OEIS.Repr, :a010346) |> Sequence.take!(14)
[1,2,3,4,13,18,28,118,289,353,419,4890,4891,9113]
"""
@doc offset: 1,
sequence: "Base-5 Armstrong or narcissistic numbers (written in base 10).",
references: [{:oeis, :a010346, "https://oeis.org/A010346"}]
def create_sequence_a010346(_opts) do
sequence_for_function(&Elixir.Chunky.Sequence.OEIS.Repr.seq_a010346/2)
end
@doc false
@doc offset: 1, fill_value: 0
def seq_a010346(_idx, last) do
Math.next_number(fn v -> Math.is_narcissistic_in_base?(v, 5) end, last)
end
@doc """
OEIS Sequence `A010344` - Base-4 Armstrong or narcissistic numbers (written in base 10).
From [OEIS A010344](https://oeis.org/A010344):
> Base-4 Armstrong or narcissistic numbers (written in base 10).
> (Formerly )
**Sequence IDs**: `:a010344`
**Finite**: False
**Offset**: 1
## Example
iex> Sequence.create(Elixir.Chunky.Sequence.OEIS.Repr, :a010344) |> Sequence.take!(11)
[1,2,3,28,29,35,43,55,62,83,243]
"""
@doc offset: 1,
sequence: "Base-4 Armstrong or narcissistic numbers (written in base 10).",
references: [{:oeis, :a010344, "https://oeis.org/A010344"}]
def create_sequence_a010344(_opts) do
sequence_for_function(&Elixir.Chunky.Sequence.OEIS.Repr.seq_a010344/2)
end
@doc false
@doc offset: 1, fill_value: 0
def seq_a010344(_idx, last) do
Math.next_number(fn v -> Math.is_narcissistic_in_base?(v, 4) end, last)
end
@doc """
OEIS Sequence `A161948` - Base-11 Armstrong or narcissistic numbers (written in base 10).
From [OEIS A161948](https://oeis.org/A161948):
> Base-11 Armstrong or narcissistic numbers (written in base 10).
> (Formerly )
**Sequence IDs**: `:a161948`
**Finite**: False
**Offset**: 1
## Example
iex> Sequence.create(Elixir.Chunky.Sequence.OEIS.Repr, :a161948) |> Sequence.take!(18)
[1,2,3,4,5,6,7,8,9,10,61,72,126,370,855,1161,1216,1280]
"""
@doc offset: 1,
sequence: "Base-11 Armstrong or narcissistic numbers (written in base 10).",
references: [{:oeis, :a161948, "https://oeis.org/A161948"}]
def create_sequence_a161948(_opts) do
sequence_for_function(&Elixir.Chunky.Sequence.OEIS.Repr.seq_a161948/2)
end
@doc false
@doc offset: 1, fill_value: 0
def seq_a161948(_idx, last) do
Math.next_number(fn v -> Math.is_narcissistic_in_base?(v, 11) end, last)
end
@doc """
OEIS Sequence `A161949` - Base-12 Armstrong or narcissistic numbers (written in base 10).
From [OEIS A161949](https://oeis.org/A161949):
> Base-12 Armstrong or narcissistic numbers (written in base 10).
> (Formerly )
**Sequence IDs**: `:a161949`
**Finite**: False
**Offset**: 1
## Example
iex> Sequence.create(Elixir.Chunky.Sequence.OEIS.Repr, :a161949) |> Sequence.take!(16)
[1,2,3,4,5,6,7,8,9,10,11,29,125,811,944,1539]
"""
@doc offset: 1,
sequence: "Base-12 Armstrong or narcissistic numbers (written in base 10).",
references: [{:oeis, :a161949, "https://oeis.org/A161949"}]
def create_sequence_a161949(_opts) do
sequence_for_function(&Elixir.Chunky.Sequence.OEIS.Repr.seq_a161949/2)
end
@doc false
@doc offset: 1, fill_value: 0
def seq_a161949(_idx, last) do
Math.next_number(fn v -> Math.is_narcissistic_in_base?(v, 12) end, last)
end
@doc """
OEIS Sequence `A161950` - Base-13 Armstrong or narcissistic numbers (written in base 10).
From [OEIS A161950](https://oeis.org/A161950):
> Base-13 Armstrong or narcissistic numbers (written in base 10).
> (Formerly )
**Sequence IDs**: `:a161950`
**Finite**: False
**Offset**: 1
## Example
iex> Sequence.create(Elixir.Chunky.Sequence.OEIS.Repr, :a161950) |> Sequence.take!(23)
[1,2,3,4,5,6,7,8,9,10,11,12,17,45,85,98,136,160,793,794,854,1968,8194]
"""
@doc offset: 1,
sequence: "Base-13 Armstrong or narcissistic numbers (written in base 10).",
references: [{:oeis, :a161950, "https://oeis.org/A161950"}]
def create_sequence_a161950(_opts) do
sequence_for_function(&Elixir.Chunky.Sequence.OEIS.Repr.seq_a161950/2)
end
@doc false
@doc offset: 1, fill_value: 0
def seq_a161950(_idx, last) do
Math.next_number(fn v -> Math.is_narcissistic_in_base?(v, 13) end, last)
end
@doc """
OEIS Sequence `A161951` - Base-14 Armstrong or narcissistic numbers (written in base 10).
From [OEIS A161951](https://oeis.org/A161951):
> Base-14 Armstrong or narcissistic numbers (written in base 10).
> (Formerly )
**Sequence IDs**: `:a161951`
**Finite**: False
**Offset**: 1
## Example
iex> Sequence.create(Elixir.Chunky.Sequence.OEIS.Repr, :a161951) |> Sequence.take!(15)
[1,2,3,4,5,6,7,8,9,10,11,12,13,244,793]
"""
@doc offset: 1,
sequence: "Base-14 Armstrong or narcissistic numbers (written in base 10).",
references: [{:oeis, :a161951, "https://oeis.org/A161951"}]
def create_sequence_a161951(_opts) do
sequence_for_function(&Elixir.Chunky.Sequence.OEIS.Repr.seq_a161951/2)
end
@doc false
@doc offset: 1, fill_value: 0
def seq_a161951(_idx, last) do
Math.next_number(fn v -> Math.is_narcissistic_in_base?(v, 14) end, last)
end
@doc """
OEIS Sequence `A161952` - Base-15 Armstrong or narcissistic numbers (written in base 10).
From [OEIS A161952](https://oeis.org/A161952):
> Base-15 Armstrong or narcissistic numbers (written in base 10).
> (Formerly )
**Sequence IDs**: `:a161952`
**Finite**: False
**Offset**: 1
## Example
iex> Sequence.create(Elixir.Chunky.Sequence.OEIS.Repr, :a161952) |> Sequence.take!(19)
[1,2,3,4,5,6,7,8,9,10,11,12,13,14,113,128,2755,3052,5059]
"""
@doc offset: 1,
sequence: "Base-15 Armstrong or narcissistic numbers (written in base 10).",
references: [{:oeis, :a161952, "https://oeis.org/A161952"}]
def create_sequence_a161952(_opts) do
sequence_for_function(&Elixir.Chunky.Sequence.OEIS.Repr.seq_a161952/2)
end
@doc false
@doc offset: 1, fill_value: 0
def seq_a161952(_idx, last) do
Math.next_number(fn v -> Math.is_narcissistic_in_base?(v, 15) end, last)
end
@doc """
OEIS Sequence `A161953` - Base-16 Armstrong or narcissistic numbers (written in base 10).
From [OEIS A161953](https://oeis.org/A161953):
> Base-16 Armstrong or narcissistic numbers (written in base 10).
> (Formerly )
**Sequence IDs**: `:a161953`
**Finite**: False
**Offset**: 1
## Example
iex> Sequence.create(Elixir.Chunky.Sequence.OEIS.Repr, :a161953) |> Sequence.take!(34)
[1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,342,371,520,584,645,1189,1456,1457,1547,1611,2240,2241,2458,2729,2755,3240,3689,3744,3745]
"""
@doc offset: 1,
sequence: "Base-16 Armstrong or narcissistic numbers (written in base 10).",
references: [{:oeis, :a161953, "https://oeis.org/A161953"}]
def create_sequence_a161953(_opts) do
sequence_for_function(&Elixir.Chunky.Sequence.OEIS.Repr.seq_a161953/2)
end
@doc false
@doc offset: 1, fill_value: 0
def seq_a161953(_idx, last) do
Math.next_number(fn v -> Math.is_narcissistic_in_base?(v, 16) end, last)
end
@doc """
OEIS Sequence `A114904` - Sorted numbers of digits of any base-10 narcissistic number.
From [OEIS A114904](https://oeis.org/A114904):
> Sorted numbers of digits of any base-10 narcissistic number.
> (Formerly )
**Sequence IDs**: `:a114904`
**Finite**: False
**Offset**: 1
## Example
iex> Sequence.create(Elixir.Chunky.Sequence.OEIS.Repr, :a114904) |> Sequence.take!(29)
[1,3,4,5,6,7,8,9,10,11,14,16,17,19,20,21,23,24,25,27,29,31,32,33,34,35,37,38,39]
"""
@doc offset: 1,
sequence: "Sorted numbers of digits of any base-10 narcissistic number.",
references: [{:oeis, :a114904, "https://oeis.org/A114904"}]
def create_sequence_a114904(_opts) do
sequence_for_list(@data_a114904)
end
@doc """
OEIS Sequence `A014576` - Smallest n-digit narcissistic (or Armstrong) number: smallest n-digit number equal to sum of n-th powers of its digits (or 0 if no such number exists).
From [OEIS A014576](https://oeis.org/A014576):
> Smallest n-digit narcissistic (or Armstrong) number: smallest n-digit number equal to sum of n-th powers of its digits (or 0 if no such number exists).
> (Formerly )
**Sequence IDs**: `:a014576`
**Finite**: False
**Offset**: 1
## Example
iex> Sequence.create(Elixir.Chunky.Sequence.OEIS.Repr, :a014576) |> Sequence.take!(22)
[1,0,153,1634,54748,548834,1741725,24678050,146511208,4679307774,32164049650,0,0,28116440335967,0,4338281769391370,21897142587612075,0,1517841543307505039,63105425988599693916,128468643043731391252,0]
"""
@doc offset: 1,
sequence:
"Smallest n-digit narcissistic (or Armstrong) number: smallest n-digit number equal to sum of n-th powers of its digits (or 0 if no such number exists).",
references: [{:oeis, :a014576, "https://oeis.org/A014576"}]
def create_sequence_a014576(_opts) do
sequence_for_list(@data_a014576)
end
@doc """
OEIS Sequence `A046253` - Equal to the sum of its nonzero digits raised to its own power.
From [OEIS A046253](https://oeis.org/A046253):
> Equal to the sum of its nonzero digits raised to its own power.
> (Formerly )
**Sequence IDs**: `:a046253`
**Finite**: False
**Offset**: 1
## Example
iex> Sequence.create(Elixir.Chunky.Sequence.OEIS.Repr, :a046253) |> Sequence.take!(4)
[0,1,3435,438579088]
"""
@doc offset: 1,
sequence: "Equal to the sum of its nonzero digits raised to its own power.",
references: [{:oeis, :a046253, "https://oeis.org/A046253"}]
def create_sequence_a046253(_opts) do
sequence_for_list(@data_a046253)
end
@doc """
OEIS Sequence `A001101` - Moran numbers: n such that (n / sum of digits of n) is prime.
From [OEIS A001101](https://oeis.org/A001101):
> Moran numbers: n such that (n / sum of digits of n) is prime.
> (Formerly )
**Sequence IDs**: `:a001101`
**Finite**: False
**Offset**: 1
## Example
iex> Sequence.create(Elixir.Chunky.Sequence.OEIS.Repr, :a001101) |> Sequence.take!(45)
[18,21,27,42,45,63,84,111,114,117,133,152,153,156,171,190,195,198,201,207,209,222,228,247,261,266,285,333,370,372,399,402,407,423,444,465,481,511,516,518,531,555,558,592,603]
"""
@doc offset: 1,
sequence: "Moran numbers: n such that (n / sum of digits of n) is prime.",
references: [{:oeis, :a001101, "https://oeis.org/A001101"}]
def create_sequence_a001101(_opts) do
sequence_for_function(&Elixir.Chunky.Sequence.OEIS.Repr.seq_a001101/2)
end
@doc false
@doc offset: 1
def seq_a001101(_idx, last) do
Math.next_number(&Predicates.is_moran_number?/1, last)
end
@doc """
OEIS Sequence `A005349` - Niven (or Harshad) numbers: numbers that are divisible by the sum of their digits.
From [OEIS A005349](https://oeis.org/A005349):
> Niven (or Harshad) numbers: numbers that are divisible by the sum of their digits.
> (Formerly M0481)
**Sequence IDs**: `:a005349`
**Finite**: False
**Offset**: 1
## Example
iex> Sequence.create(Elixir.Chunky.Sequence.OEIS.Repr, :a005349) |> Sequence.take!(61)
[1,2,3,4,5,6,7,8,9,10,12,18,20,21,24,27,30,36,40,42,45,48,50,54,60,63,70,72,80,81,84,90,100,102,108,110,111,112,114,117,120,126,132,133,135,140,144,150,152,153,156,162,171,180,190,192,195,198,200,201,204]
"""
@doc offset: 1,
sequence:
"Niven (or Harshad) numbers: numbers that are divisible by the sum of their digits.",
references: [{:oeis, :a005349, "https://oeis.org/A005349"}]
def create_sequence_a005349(_opts) do
sequence_for_function(&Elixir.Chunky.Sequence.OEIS.Repr.seq_a005349/2)
end
@doc false
@doc offset: 1, fill_value: 0
def seq_a005349(_idx, last) do
Math.next_number(&Predicates.is_harshad_number?/1, last)
end
@doc """
OEIS Sequence `A007602` - Numbers that are divisible by the product of their digits.
From [OEIS A007602](https://oeis.org/A007602):
> Numbers that are divisible by the product of their digits.
> (Formerly M0482)
**Sequence IDs**: `:a007602`
**Finite**: False
**Offset**: 1
## Example
iex> Sequence.create(Elixir.Chunky.Sequence.OEIS.Repr, :a007602) |> Sequence.take!(53)
[1,2,3,4,5,6,7,8,9,11,12,15,24,36,111,112,115,128,132,135,144,175,212,216,224,312,315,384,432,612,624,672,735,816,1111,1112,1113,1115,1116,1131,1176,1184,1197,1212,1296,1311,1332,1344,1416,1575,1715,2112,2144]
"""
@doc offset: 1,
sequence: "Numbers that are divisible by the product of their digits.",
references: [{:oeis, :a007602, "https://oeis.org/A007602"}]
def create_sequence_a007602(_opts) do
sequence_for_function(&Elixir.Chunky.Sequence.OEIS.Repr.seq_a007602/2)
end
@doc false
@doc offset: 1, fill_value: 0
def seq_a007602(_idx, last) do
Math.next_number(&Predicates.is_zuckerman_number?/1, last)
end
@doc """
OEIS Sequence `A115983` - Apocalypse primes: 10^665+a(n) has 666 decimal digits and is prime.
From [OEIS A115983](https://oeis.org/A115983):
> Apocalypse primes: 10^665+a(n) has 666 decimal digits and is prime.
> (Formerly )
**Sequence IDs**: `:a115983`
**Finite**: False
**Offset**: 1
## Example
iex> Sequence.create(Elixir.Chunky.Sequence.OEIS.Repr, :a115983) |> Sequence.take!(7)
[123,1837,6409,7329,8569,8967,9663]
"""
@doc offset: 1,
sequence: "Apocalypse primes: 10^665+a(n) has 666 decimal digits and is prime.",
references: [{:oeis, :a115983, "https://oeis.org/A115983"}]
def create_sequence_a115983(_opts) do
sequence_for_function(&Elixir.Chunky.Sequence.OEIS.Repr.seq_a115983/2)
end
@doc false
@doc offset: 1
def seq_a115983(_idx, last) do
Math.next_number(
fn v ->
Predicates.is_apocalypse_prime?(Math.pow(10, 665) + v)
end,
last
)
end
@doc """
OEIS Sequence `A051003` - Beastly (or hateful) numbers: numbers containing the string 666 in their decimal expansion.
From [OEIS A051003](https://oeis.org/A051003):
> Beastly (or hateful) numbers: numbers containing the string 666 in their decimal expansion.
> (Formerly )
**Sequence IDs**: `:a051003`
**Finite**: False
**Offset**: 1
## Example
iex> Sequence.create(Elixir.Chunky.Sequence.OEIS.Repr, :a051003) |> Sequence.take!(19)
[666,1666,2666,3666,4666,5666,6660,6661,6662,6663,6664,6665,6666,6667,6668,6669,7666,8666,9666]
"""
@doc offset: 1,
sequence:
"Beastly (or hateful) numbers: numbers containing the string 666 in their decimal expansion.",
references: [{:oeis, :a051003, "https://oeis.org/A051003"}]
def create_sequence_a051003(_opts) do
sequence_for_function(&Elixir.Chunky.Sequence.OEIS.Repr.seq_a051003/2)
end
@doc false
@doc offset: 1
def seq_a051003(_idx, last) do
Math.next_number(&Predicates.is_beast_number?/1, last)
end
end
|
lib/sequence/oeis/repr.ex
| 0.859782 | 0.749798 |
repr.ex
|
starcoder
|
defmodule AMQP.Basic do
@moduledoc """
Functions to publish, consume and acknowledge messages.
"""
require Record
import AMQP.Core
alias AMQP.Channel
Record.defrecordp :amqp_msg, [props: p_basic(), payload: ""]
@doc """
Publishes a message to an Exchange.
This method publishes a message to a specific exchange. The message will be routed to queues as defined by
the exchange configuration and distributed to any subscribers.
The parameter `exchange` specifies the name of the exchange to publish to. If set to empty string, it publishes to the
default exchange.
The `routing_key` parameter specifies the routing key for the message.
The `payload` parameter specifies the message content as a binary..
In addition to the previous parameters, , the following options can be used:
* `mandatory`: If set, returns an error if the broker can't route the message to a queue
* `immediate`: If set, returns an error if the broker can't deliver te message to a consumer immediately.
Additional Basic properties can be set using the following options:
* `content_type`: MIME Content type
* `content_encoding`: MIME Content encoding
* `headers`: Message headers. Can be used with headers Exchanges
* `persistent`: If set, uses persistent delivery mode. Messages marked as `persistent` that are delivered to `durable` queues will be logged to disk.
* `correlation_id`: application correlation identifier
* `priority`: message priority, ranging from 0 to 9
* `reply_to`: name of the reply queue
* `expiration`: how long the message is valid (in milliseconds)
* `message_id`: message identifier
* `timestamp`: timestamp associated with this message (epoch time)
* `type`: message type as a string
* `user-id`: creating user ID. RabbitMQ will validate this against the active connection user
* `app-id`: publishing application ID
"""
def publish(%Channel{pid: pid}, exchange, routing_key, payload, options \\ []) do
basic_publish =
basic_publish(exchange: exchange,
routing_key: routing_key,
mandatory: Keyword.get(options, :mandatory, false),
immediate: Keyword.get(options, :immediate, false))
p_basic =
p_basic(content_type: Keyword.get(options, :content_type, :undefined),
content_encoding: Keyword.get(options, :content_encoding, :undefined),
headers: Keyword.get(options, :headers, :undefined),
delivery_mode: if(options[:persistent], do: 2, else: 1),
priority: Keyword.get(options, :priority, :undefined),
correlation_id: Keyword.get(options, :correlation_id, :undefined),
reply_to: Keyword.get(options, :reply_to, :undefined),
expiration: Keyword.get(options, :expiration, :undefined),
message_id: Keyword.get(options, :message_id, :undefined),
timestamp: Keyword.get(options, :timestamp, :undefined),
type: Keyword.get(options, :type, :undefined),
user_id: Keyword.get(options, :user_id, :undefined),
app_id: Keyword.get(options, :app_id, :undefined),
cluster_id: Keyword.get(options, :cluster_id, :undefined))
:amqp_channel.cast pid, basic_publish, amqp_msg(props: p_basic, payload: payload)
end
@doc """
Sets the message prefetch count or prefetech size (in bytes). If `global` is set to `true` this
applies to the entire Connection, otherwise it applies only to the specified Channel.
"""
def qos(%Channel{pid: pid}, options \\ []) do
basic_qos_ok() =
:amqp_channel.call pid,
basic_qos(prefetch_size: Keyword.get(options, :prefetch_size, 0),
prefetch_count: Keyword.get(options, :prefetch_count, 0),
global: Keyword.get(options, :global, false))
:ok
end
@doc """
Acknowledges one or more messages. If multiple is set to `true`, all messages up to the one
specified by `delivery_tag` are considered acknowledged by the server.
"""
def ack(%Channel{pid: pid}, delivery_tag, options \\ []) do
:amqp_channel.call pid, basic_ack(delivery_tag: delivery_tag, multiple: Keyword.get(options, :multiple, false))
end
@doc """
Rejects (and, optionally, requeues) a message.
"""
def reject(%Channel{pid: pid}, delivery_tag, options \\ []) do
:amqp_channel.call pid, basic_reject(delivery_tag: delivery_tag,
requeue: Keyword.get(options, :requeue, true))
end
@doc """
Negative acknowledge of one or more messages. If multiple is set to `true`, all messages up to the one
specified by `delivery_tag` are considered as not acknowledged by the server. If `requeue` is set to
`true`, the message will be returned to the queue and redelivered to the next available consumer.
This is a RabbitMQ specific extension to AMQP 0.9.1. It is equivalent to reject, but allows rejecting
multiple messages using the `multiple` option.
"""
def nack(%Channel{pid: pid}, delivery_tag, options \\ []) do
:amqp_channel.call pid, basic_nack(delivery_tag: delivery_tag,
multiple: Keyword.get(options, :multiple, false),
requeue: Keyword.get(options, :requeue, true))
end
@doc """
Polls a queue for an existing message.
Returns the tuple `{:empty, meta}` if the queue is empty or the tuple {:ok, payload, meta} if at least
one message exists in the queue. The returned meta map includes the entry `message_count` with the
current number of messages in the queue.
Receiving messages by polling a queue is not as as efficient as subscribing a consumer to a queue,
so consideration should be taken when receiving large volumes of messages.
Setting the `no_ack` option to true will tell the broker that the receiver will not send an acknowledgement of
the message. Once it believes it has delivered a message, then it is free to assume that the consuming application
has taken responsibility for it. In general, a lot of applications will not want these semantics, rather, they
will want to explicitly acknowledge the receipt of a message and have `no_ack` with the default value of false.
"""
def get(%Channel{pid: pid}, queue, options \\ []) do
case :amqp_channel.call pid, basic_get(queue: queue, no_ack: Keyword.get(options, :no_ack, false)) do
{basic_get_ok(delivery_tag: delivery_tag,
redelivered: redelivered,
exchange: exchange,
routing_key: routing_key,
message_count: message_count),
amqp_msg(props: p_basic(content_type: content_type,
content_encoding: content_encoding,
headers: headers,
delivery_mode: delivery_mode,
priority: priority,
correlation_id: correlation_id,
reply_to: reply_to,
expiration: expiration,
message_id: message_id,
timestamp: timestamp,
type: type,
user_id: user_id,
app_id: app_id,
cluster_id: cluster_id), payload: payload)} ->
{:ok, payload, %{delivery_tag: delivery_tag,
redelivered: redelivered,
exchange: exchange,
routing_key: routing_key,
message_count: message_count,
content_type: content_type,
content_encoding: content_encoding,
headers: headers,
persistent: delivery_mode == 2,
priority: priority,
correlation_id: correlation_id,
reply_to: reply_to,
expiration: expiration,
message_id: message_id,
timestamp: timestamp,
type: type,
user_id: user_id,
app_id: app_id,
cluster_id: cluster_id}}
basic_get_empty(cluster_id: cluster_id) ->
{:empty, %{cluster_id: cluster_id}}
end
end
@doc """
Asks the server to redeliver all unacknowledged messages on a specified channel.
If `requeue` is set to `true` the server will attempt to requeue the message,
potentially delivering it to another subscriber. Otherwise it will be redelivered
to the original recipient.
"""
def recover(%Channel{pid: pid}, options \\ []) do
:amqp_channel.call pid, basic_recover(requeue: Keyword.get(options, :requeue, false))
end
@doc """
Registers a queue consumer process. The `pid` of the process can be set using
the `handler` option and defaults to the calling process.
The handler process will receive the following data structures and should as
process messages.
"""
def consume(%Channel{pid: pid}, queue, options \\ []) do
basic_consume =
basic_consume(queue: queue,
consumer_tag: Keyword.get(options, :consumer_tag, ""),
no_local: Keyword.get(options, :no_local, false),
no_ack: Keyword.get(options, :no_ack, false),
exclusive: Keyword.get(options, :exclusive, false),
nowait: Keyword.get(options, :no_wait, false),
arguments: Keyword.get(options, :arguments, []))
handler = options[:handler] || self()
response_mapper = spawn fn ->
Process.link(handler)
do_consume(handler)
end
basic_consume_ok(consumer_tag: consumer_tag) =
:amqp_channel.subscribe(pid, basic_consume, response_mapper)
{:ok, consumer_tag}
end
defp do_consume(handler) do
receive do
{basic_deliver(consumer_tag: consumer_tag,
delivery_tag: delivery_tag,
redelivered: redelivered,
exchange: exchange,
routing_key: routing_key),
amqp_msg(props: p_basic(content_type: content_type,
content_encoding: content_encoding,
headers: headers,
delivery_mode: delivery_mode,
priority: priority,
correlation_id: correlation_id,
reply_to: reply_to,
expiration: expiration,
message_id: message_id,
timestamp: timestamp,
type: type,
user_id: user_id,
app_id: app_id,
cluster_id: cluster_id), payload: payload)} ->
send handler, {payload, %{consumer_tag: consumer_tag,
delivery_tag: delivery_tag,
redelivered: redelivered,
exchange: exchange,
routing_key: routing_key,
content_type: content_type,
content_encoding: content_encoding,
headers: headers,
persistent: delivery_mode == 2,
priority: priority,
correlation_id: correlation_id,
reply_to: reply_to,
expiration: expiration,
message_id: message_id,
timestamp: timestamp,
type: type,
user_id: user_id,
app_id: app_id,
cluster_id: cluster_id}}
end
do_consume(handler)
end
end
|
lib/amqp/basic.ex
| 0.883939 | 0.489137 |
basic.ex
|
starcoder
|
defmodule HPack.Table do
@moduledoc """
Functions to maintain the (de)compression context.
Contains the static tables as well as all menagement of the dynamic table.
"""
@static [
{":authority", nil},
{":method", "GET"},
{":method", "POST"},
{":path", "/"},
{":path", "/index.html"},
{":scheme", "http"},
{":scheme", "https"},
{":status", "200"},
{":status", "204"},
{":status", "206"},
{":status", "304"},
{":status", "400"},
{":status", "404"},
{":status", "500"},
{"accept-charset", nil},
{"accept-encoding gzip, deflate", nil},
{"accept-language", nil},
{"accept-ranges", nil},
{"accept", nil},
{"access-control-allow-origin", nil},
{"age", nil},
{"allow", nil},
{"authorization", nil},
{"cache-control", nil},
{"content-disposition", nil},
{"content-encoding", nil},
{"content-language", nil},
{"content-length", nil},
{"content-location", nil},
{"content-range", nil},
{"content-type", nil},
{"cookie", nil},
{"date", nil},
{"etag", nil},
{"expect", nil},
{"expires", nil},
{"from", nil},
{"host", nil},
{"if-match", nil},
{"if-modified-since", nil},
{"if-none-match", nil},
{"if-range", nil},
{"if-unmodified-since", nil},
{"last-modified", nil},
{"link", nil},
{"location", nil},
{"max-forwards", nil},
{"proxy-authenticate", nil},
{"proxy-authorization", nil},
{"range", nil},
{"referer", nil},
{"refresh", nil},
{"retry-after", nil},
{"server", nil},
{"set-cookie", nil},
{"strict-transport-security", nil},
{"transfer-encoding", nil},
{"user-agent", nil},
{"vary", nil},
{"via", nil},
{"www-authenticate", nil}
]
@type size() :: non_neg_integer()
@type table() :: list()
@type index() :: non_neg_integer()
@opaque t :: %__MODULE__{
size: size(),
table: table()
}
defstruct size: nil, table: []
@spec new(size()) :: t()
def new(max_table_size) do
%__MODULE__{size: max_table_size}
end
@spec lookup(index(), t()) :: {:ok, HPack.header()} | {:error, :not_found}
def lookup(index, %{table: table}) do
table
|> full_table()
|> Enum.at(index - 1)
|> case do
header when not is_nil(header) ->
{:ok, header}
_ ->
{:error, :not_found}
end
end
@spec find(HPack.name(), HPack.value(), t()) ::
{:error, :not_found} | {:keyindex, integer} | {:fullindex, integer}
def find(name, value, %{table: table}) do
match_on_key_and_value =
table
|> full_table()
|> Enum.find_index(fn {ck, cv} -> ck == name && cv == value end)
match_on_key =
table
|> full_table()
|> Enum.find_index(fn {ck, _} -> ck == name end)
cond do
match_on_key_and_value != nil -> {:fullindex, match_on_key_and_value + 1}
match_on_key != nil -> {:keyindex, match_on_key + 1}
true -> {:error, :not_found}
end
end
@spec add(HPack.header(), t()) :: {:ok, t()}
def add({key, value}, %{table: table} = context) do
{:ok, check_size(%{context | table: [{key, value} | table]})}
end
@spec resize(size(), t(), size() | nil) :: {:ok, t()} | {:error, :decode_error}
def resize(size, context, max_size \\ nil)
def resize(size, context, max_size)
when not is_integer(max_size) or size <= max_size do
{:ok, check_size(%{context | size: size})}
end
def resize(_size, _context, _max_size), do: {:error, :decode_error}
@spec size(t()) :: size()
def size(%{table: table}), do: calculate_size(table)
# check table size and evict entries when neccessary
defp check_size(%{size: size, table: table} = context) do
%{context | size: size, table: evict(calculate_size(table) > size, table, size)}
end
defp calculate_size(table) do
Enum.reduce(table, 0, fn {key, value}, acc -> acc + byte_size(key) + byte_size(value) + 32 end)
end
defp evict(true, table, size) do
new_table = List.delete_at(table, length(table) - 1)
evict(calculate_size(new_table) > size, new_table, size)
end
defp evict(false, table, _), do: table
defp full_table(table), do: @static ++ table
end
|
lib/hpack/table.ex
| 0.766556 | 0.590514 |
table.ex
|
starcoder
|
defmodule Mathmatical.Questions do
@moduledoc """
The Questions context.
"""
import Ecto.Query, warn: false
alias Mathmatical.Repo
alias Mathmatical.Questions.Subject
@doc """
Returns the list of subjects.
## Examples
iex> list_subjects()
[%Subject{}, ...]
"""
def list_subjects do
Repo.all(Subject)
end
@doc """
Gets a single subject.
Raises `Ecto.NoResultsError` if the Subject does not exist.
## Examples
iex> get_subject!(123)
%Subject{}
iex> get_subject!(456)
** (Ecto.NoResultsError)
"""
def get_subject!(id), do: Repo.get!(Subject, id)
@doc """
Creates a subject.
## Examples
iex> create_subject(%{field: value})
{:ok, %Subject{}}
iex> create_subject(%{field: bad_value})
{:error, %Ecto.Changeset{}}
"""
def create_subject(attrs \\ %{}) do
%Subject{}
|> Subject.changeset(attrs)
|> Repo.insert()
end
@doc """
Updates a subject.
## Examples
iex> update_subject(subject, %{field: new_value})
{:ok, %Subject{}}
iex> update_subject(subject, %{field: bad_value})
{:error, %Ecto.Changeset{}}
"""
def update_subject(%Subject{} = subject, attrs) do
subject
|> Subject.changeset(attrs)
|> Repo.update()
end
@doc """
Deletes a Subject.
## Examples
iex> delete_subject(subject)
{:ok, %Subject{}}
iex> delete_subject(subject)
{:error, %Ecto.Changeset{}}
"""
def delete_subject(%Subject{} = subject) do
Repo.delete(subject)
end
@doc """
Returns an `%Ecto.Changeset{}` for tracking subject changes.
## Examples
iex> change_subject(subject)
%Ecto.Changeset{source: %Subject{}}
"""
def change_subject(%Subject{} = subject) do
Subject.changeset(subject, %{})
end
alias Mathmatical.Questions.Question
@doc """
Returns the list of questions.
## Examples
iex> list_questions()
[%Question{}, ...]
"""
def list_questions do
Repo.all(Question)
end
@doc """
Gets a single question.
Raises `Ecto.NoResultsError` if the Question does not exist.
## Examples
iex> get_question!(123)
%Question{}
iex> get_question!(456)
** (Ecto.NoResultsError)
"""
def get_question!(id), do: Repo.get!(Question, id)
def get_next_question(id) do
next_id = id + 1
Repo.one(from q in Question, where: q.id == ^next_id)
end
@doc """
Creates a question.
## Examples
iex> create_question(%{field: value})
{:ok, %Question{}}
iex> create_question(%{field: bad_value})
{:error, %Ecto.Changeset{}}
"""
def create_question(attrs \\ %{}) do
%Question{}
|> Question.changeset(attrs)
|> Repo.insert()
end
@doc """
Updates a question.
## Examples
iex> update_question(question, %{field: new_value})
{:ok, %Question{}}
iex> update_question(question, %{field: bad_value})
{:error, %Ecto.Changeset{}}
"""
def update_question(%Question{} = question, attrs) do
question
|> Question.changeset(attrs)
|> Repo.update()
end
@doc """
Deletes a Question.
## Examples
iex> delete_question(question)
{:ok, %Question{}}
iex> delete_question(question)
{:error, %Ecto.Changeset{}}
"""
def delete_question(%Question{} = question) do
Repo.delete(question)
end
@doc """
Returns an `%Ecto.Changeset{}` for tracking question changes.
## Examples
iex> change_question(question)
%Ecto.Changeset{source: %Question{}}
"""
def change_question(%Question{} = question) do
Question.changeset(question, %{})
end
end
|
lib/mathmatical/questions.ex
| 0.86548 | 0.456168 |
questions.ex
|
starcoder
|
defmodule AWS.Health do
@moduledoc """
Health
The Health API provides programmatic access to the Health information that
appears in the [Personal Health Dashboard](https://phd.aws.amazon.com/phd/home#/).
You can use the API operations to get information about events that might affect
your Amazon Web Services services and resources.
You must have a Business or Enterprise Support plan from [Amazon Web Services Support](http://aws.amazon.com/premiumsupport/) to use the Health
API. If you call the Health API from an Amazon Web Services account that doesn't
have a Business or Enterprise Support plan, you receive a
`SubscriptionRequiredException` error.
You can use the Health endpoint health.us-east-1.amazonaws.com
(HTTPS) to call the Health API operations. Health supports a multi-Region
application architecture and has two regional endpoints in an active-passive
configuration. You can use the high availability endpoint example to determine
which Amazon Web Services Region is active, so that you can get the latest
information from the API. For more information, see [Accessing the Health API](https://docs.aws.amazon.com/health/latest/ug/health-api.html) in the
*Health User Guide*.
For authentication of requests, Health uses the [Signature Version 4 Signing Process](https://docs.aws.amazon.com/general/latest/gr/signature-version-4.html).
If your Amazon Web Services account is part of Organizations, you can use the
Health organizational view feature. This feature provides a centralized view of
Health events across all accounts in your organization. You can aggregate Health
events in real time to identify accounts in your organization that are affected
by an operational event or get notified of security vulnerabilities. Use the
organizational view API operations to enable this feature and return event
information. For more information, see [Aggregating Health events](https://docs.aws.amazon.com/health/latest/ug/aggregate-events.html) in
the *Health User Guide*.
When you use the Health API operations to return Health events, see the
following recommendations:
Use the
[eventScopeCode](https://docs.aws.amazon.com/health/latest/APIReference/API_Event.html#AWSHealth-Type-Event-eventScopeCode)
parameter to specify whether to return Health events that are public or
account-specific.
Use pagination to view all events from the response. For example,
if you call the `DescribeEventsForOrganization` operation to get all events in
your organization, you might receive several page results. Specify the
`nextToken` in the next request to return more results.
"""
alias AWS.Client
alias AWS.Request
def metadata do
%AWS.ServiceMetadata{
abbreviation: "AWSHealth",
api_version: "2016-08-04",
content_type: "application/x-amz-json-1.1",
credential_scope: nil,
endpoint_prefix: "health",
global?: false,
protocol: "json",
service_id: "Health",
signature_version: "v4",
signing_name: "health",
target_prefix: "AWSHealth_20160804"
}
end
@doc """
Returns a list of accounts in the organization from Organizations that are
affected by the provided event.
For more information about the different types of Health events, see
[Event](https://docs.aws.amazon.com/health/latest/APIReference/API_Event.html). Before you can call this operation, you must first enable Health to work with
Organizations. To do this, call the
[EnableHealthServiceAccessForOrganization](https://docs.aws.amazon.com/health/latest/APIReference/API_EnableHealthServiceAccessForOrganization.html)
operation from your organization's management account.
This API operation uses pagination. Specify the `nextToken` parameter in the
next request to return more results.
"""
def describe_affected_accounts_for_organization(%Client{} = client, input, options \\ []) do
Request.request_post(
client,
metadata(),
"DescribeAffectedAccountsForOrganization",
input,
options
)
end
@doc """
Returns a list of entities that have been affected by the specified events,
based on the specified filter criteria.
Entities can refer to individual customer resources, groups of customer
resources, or any other construct, depending on the Amazon Web Services service.
Events that have impact beyond that of the affected entities, or where the
extent of impact is unknown, include at least one entity indicating this.
At least one event ARN is required.
This API operation uses pagination. Specify the `nextToken`
parameter in the next request to return more results.
This operation supports resource-level permissions. You can use
this operation to allow or deny access to specific Health events. For more
information, see [Resource- and action-based conditions](https://docs.aws.amazon.com/health/latest/ug/security_iam_id-based-policy-examples.html#resource-action-based-conditions)
in the *Health User Guide*.
"""
def describe_affected_entities(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "DescribeAffectedEntities", input, options)
end
@doc """
Returns a list of entities that have been affected by one or more events for one
or more accounts in your organization in Organizations, based on the filter
criteria.
Entities can refer to individual customer resources, groups of customer
resources, or any other construct, depending on the Amazon Web Services service.
At least one event Amazon Resource Name (ARN) and account ID are required.
Before you can call this operation, you must first enable Health to work with
Organizations. To do this, call the
[EnableHealthServiceAccessForOrganization](https://docs.aws.amazon.com/health/latest/APIReference/API_EnableHealthServiceAccessForOrganization.html) operation from your organization's management account.
This API operation uses pagination. Specify the `nextToken`
parameter in the next request to return more results.
This operation doesn't support resource-level permissions. You
can't use this operation to allow or deny access to specific Health events. For
more information, see [Resource- and action-based
conditions](https://docs.aws.amazon.com/health/latest/ug/security_iam_id-based-policy-examples.html#resource-action-based-conditions)
in the *Health User Guide*.
"""
def describe_affected_entities_for_organization(%Client{} = client, input, options \\ []) do
Request.request_post(
client,
metadata(),
"DescribeAffectedEntitiesForOrganization",
input,
options
)
end
@doc """
Returns the number of entities that are affected by each of the specified
events.
"""
def describe_entity_aggregates(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "DescribeEntityAggregates", input, options)
end
@doc """
Returns the number of events of each event type (issue, scheduled change, and
account notification).
If no filter is specified, the counts of all events in each category are
returned.
This API operation uses pagination. Specify the `nextToken` parameter in the
next request to return more results.
"""
def describe_event_aggregates(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "DescribeEventAggregates", input, options)
end
@doc """
Returns detailed information about one or more specified events.
Information includes standard event data (Amazon Web Services Region, service,
and so on, as returned by
[DescribeEvents](https://docs.aws.amazon.com/health/latest/APIReference/API_DescribeEvents.html)), a detailed event description, and possible additional metadata that depends upon
the nature of the event. Affected entities are not included. To retrieve the
entities, use the
[DescribeAffectedEntities](https://docs.aws.amazon.com/health/latest/APIReference/API_DescribeAffectedEntities.html)
operation.
If a specified event can't be retrieved, an error message is returned for that
event.
This operation supports resource-level permissions. You can use this operation
to allow or deny access to specific Health events. For more information, see
[Resource- and action-based conditions](https://docs.aws.amazon.com/health/latest/ug/security_iam_id-based-policy-examples.html#resource-action-based-conditions)
in the *Health User Guide*.
"""
def describe_event_details(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "DescribeEventDetails", input, options)
end
@doc """
Returns detailed information about one or more specified events for one or more
Amazon Web Services accounts in your organization.
This information includes standard event data (such as the Amazon Web Services
Region and service), an event description, and (depending on the event) possible
metadata. This operation doesn't return affected entities, such as the resources
related to the event. To return affected entities, use the
[DescribeAffectedEntitiesForOrganization](https://docs.aws.amazon.com/health/latest/APIReference/API_DescribeAffectedEntitiesForOrganization.html) operation.
Before you can call this operation, you must first enable Health to work with
Organizations. To do this, call the
[EnableHealthServiceAccessForOrganization](https://docs.aws.amazon.com/health/latest/APIReference/API_EnableHealthServiceAccessForOrganization.html)
operation from your organization's management account.
When you call the `DescribeEventDetailsForOrganization` operation, specify the
`organizationEventDetailFilters` object in the request. Depending on the Health
event type, note the following differences:
* To return event details for a public event, you must specify a
null value for the `awsAccountId` parameter. If you specify an account ID for a
public event, Health returns an error message because public events aren't
specific to an account.
* To return event details for an event that is specific to an
account in your organization, you must specify the `awsAccountId` parameter in
the request. If you don't specify an account ID, Health returns an error message
because the event is specific to an account in your organization.
For more information, see
[Event](https://docs.aws.amazon.com/health/latest/APIReference/API_Event.html). This operation doesn't support resource-level permissions. You can't use this
operation to allow or deny access to specific Health events. For more
information, see [Resource- and action-based
conditions](https://docs.aws.amazon.com/health/latest/ug/security_iam_id-based-policy-examples.html#resource-action-based-conditions)
in the *Health User Guide*.
"""
def describe_event_details_for_organization(%Client{} = client, input, options \\ []) do
Request.request_post(
client,
metadata(),
"DescribeEventDetailsForOrganization",
input,
options
)
end
@doc """
Returns the event types that meet the specified filter criteria.
You can use this API operation to find information about the Health event, such
as the category, Amazon Web Services service, and event code. The metadata for
each event appears in the
[EventType](https://docs.aws.amazon.com/health/latest/APIReference/API_EventType.html)
object.
If you don't specify a filter criteria, the API operation returns all event
types, in no particular order.
This API operation uses pagination. Specify the `nextToken` parameter in the
next request to return more results.
"""
def describe_event_types(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "DescribeEventTypes", input, options)
end
@doc """
Returns information about events that meet the specified filter criteria.
Events are returned in a summary form and do not include the detailed
description, any additional metadata that depends on the event type, or any
affected resources. To retrieve that information, use the
[DescribeEventDetails](https://docs.aws.amazon.com/health/latest/APIReference/API_DescribeEventDetails.html) and
[DescribeAffectedEntities](https://docs.aws.amazon.com/health/latest/APIReference/API_DescribeAffectedEntities.html)
operations.
If no filter criteria are specified, all events are returned. Results are sorted
by `lastModifiedTime`, starting with the most recent event.
When you call the `DescribeEvents` operation and specify an entity
for the `entityValues` parameter, Health might return public events that aren't
specific to that resource. For example, if you call `DescribeEvents` and specify
an ID for an Amazon Elastic Compute Cloud (Amazon EC2) instance, Health might
return events that aren't specific to that resource or service. To get events
that are specific to a service, use the `services` parameter in the `filter`
object. For more information, see
[Event](https://docs.aws.amazon.com/health/latest/APIReference/API_Event.html).
This API operation uses pagination. Specify the `nextToken`
parameter in the next request to return more results.
"""
def describe_events(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "DescribeEvents", input, options)
end
@doc """
Returns information about events across your organization in Organizations.
You can use the`filters` parameter to specify the events that you want to
return. Events are returned in a summary form and don't include the affected
accounts, detailed description, any additional metadata that depends on the
event type, or any affected resources. To retrieve that information, use the
following operations:
*
[DescribeAffectedAccountsForOrganization](https://docs.aws.amazon.com/health/latest/APIReference/API_DescribeAffectedAccountsForOrganization.html) *
[DescribeEventDetailsForOrganization](https://docs.aws.amazon.com/health/latest/APIReference/API_DescribeEventDetailsForOrganization.html)
*
[DescribeAffectedEntitiesForOrganization](https://docs.aws.amazon.com/health/latest/APIReference/API_DescribeAffectedEntitiesForOrganization.html)
If you don't specify a `filter`, the `DescribeEventsForOrganizations` returns
all events across your organization. Results are sorted by `lastModifiedTime`,
starting with the most recent event.
For more information about the different types of Health events, see
[Event](https://docs.aws.amazon.com/health/latest/APIReference/API_Event.html).
Before you can call this operation, you must first enable Health to work with
Organizations. To do this, call the
[EnableHealthServiceAccessForOrganization](https://docs.aws.amazon.com/health/latest/APIReference/API_EnableHealthServiceAccessForOrganization.html)
operation from your organization's management account.
This API operation uses pagination. Specify the `nextToken` parameter in the
next request to return more results.
"""
def describe_events_for_organization(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "DescribeEventsForOrganization", input, options)
end
@doc """
This operation provides status information on enabling or disabling Health to
work with your organization.
To call this operation, you must sign in as an IAM user, assume an IAM role, or
sign in as the root user (not recommended) in the organization's management
account.
"""
def describe_health_service_status_for_organization(%Client{} = client, input, options \\ []) do
Request.request_post(
client,
metadata(),
"DescribeHealthServiceStatusForOrganization",
input,
options
)
end
@doc """
Disables Health from working with Organizations.
To call this operation, you must sign in as an Identity and Access Management
(IAM) user, assume an IAM role, or sign in as the root user (not recommended) in
the organization's management account. For more information, see [Aggregating Health
events](https://docs.aws.amazon.com/health/latest/ug/aggregate-events.html) in
the *Health User Guide*.
This operation doesn't remove the service-linked role from the management
account in your organization. You must use the IAM console, API, or Command Line
Interface (CLI) to remove the service-linked role. For more information, see
[Deleting a Service-Linked Role](https://docs.aws.amazon.com/IAM/latest/UserGuide/using-service-linked-roles.html#delete-service-linked-role)
in the *IAM User Guide*.
You can also disable the organizational feature by using the Organizations
[DisableAWSServiceAccess](https://docs.aws.amazon.com/organizations/latest/APIReference/API_DisableAWSServiceAccess.html)
API operation. After you call this operation, Health stops aggregating events
for all other Amazon Web Services accounts in your organization. If you call the
Health API operations for organizational view, Health returns an error. Health
continues to aggregate health events for your Amazon Web Services account.
"""
def disable_health_service_access_for_organization(%Client{} = client, input, options \\ []) do
Request.request_post(
client,
metadata(),
"DisableHealthServiceAccessForOrganization",
input,
options
)
end
@doc """
Enables Health to work with Organizations.
You can use the organizational view feature to aggregate events from all Amazon
Web Services accounts in your organization in a centralized location.
This operation also creates a service-linked role for the management account in
the organization.
To call this operation, you must meet the following requirements:
You must have a Business or Enterprise Support plan from [Amazon Web Services Support](http://aws.amazon.com/premiumsupport/) to use the Health
API. If you call the Health API from an Amazon Web Services account that doesn't
have a Business or Enterprise Support plan, you receive a
`SubscriptionRequiredException` error.
You must have permission to call this operation from the
organization's management account. For example IAM policies, see [Health identity-based policy
examples](https://docs.aws.amazon.com/health/latest/ug/security_iam_id-based-policy-examples.html).
If you don't have the required support plan, you can instead use the Health
console to enable the organizational view feature. For more information, see
[Aggregating Health events](https://docs.aws.amazon.com/health/latest/ug/aggregate-events.html) in
the *Health User Guide*.
"""
def enable_health_service_access_for_organization(%Client{} = client, input, options \\ []) do
Request.request_post(
client,
metadata(),
"EnableHealthServiceAccessForOrganization",
input,
options
)
end
end
|
lib/aws/generated/health.ex
| 0.893356 | 0.635067 |
health.ex
|
starcoder
|
defmodule NervesTime.RTC.MCP7940 do
@moduledoc """
Microchip MCP7940N RTC implementation for NervesTime
To configure NervesTime to use this module, update the `:nerves_time` application
environment like this:
```elixir
config :nerves_time, rtc: NervesTime.RTC.MCP7940
```
If not using `"i2c-1"` or the default I2C bus address, specify them like this:
```elixir
config :nerves_time, rtc: {NervesTime.RTC.MCP7940N, [bus_name: "i2c-2", address: 0x69]}
```
Check the logs for error messages if the RTC doesn't appear to work.
See https://ww1.microchip.com/downloads/aemDocuments/documents/MPD/ProductDocuments/DataSheets/MCP7940N-Battery-Backed-I2C-RTCC-with-SRAM-20005010J.pdf
for implementation details.
"""
@behaviour NervesTime.RealTimeClock
require Logger
alias Circuits.I2C
alias NervesTime.RTC.MCP7940.Registers
alias NervesTime.RTC.MCP7940.BcdDate
alias NervesTime.RTC.MCP7940.Control
@default_bus_name "i2c-1"
@default_address 0x6F
@typedoc false
@type state :: %{
i2c: I2C.bus(),
retries: I2C.bus(),
bus_name: String.t(),
address: I2C.address()
}
@impl NervesTime.RealTimeClock
def init(args) do
bus_name = Keyword.get(args, :bus_name, @default_bus_name)
address = Keyword.get(args, :address, @default_address)
retries = Keyword.get(args, :retries, 100)
open_rtc(bus_name, address, retries)
end
def open_rtc(bus_name, address, 0) do
with {:ok, i2c} <- I2C.open(bus_name),
:ok <- Control.deviceStart(i2c, address) do
{:ok, %{i2c: i2c, bus_name: bus_name, address: address}}
end
end
def open_rtc(bus_name, address, retries) do
with {:ok, i2c} <- I2C.open(bus_name),
:ok <- Control.deviceStart(i2c, address) do
Logger.info("initializing MCP7940 RTC #{inspect [bus_name: bus_name, address: address]} => #{inspect i2c}")
{:ok, %{i2c: i2c, bus_name: bus_name, address: address}}
else
err ->
Logger.error("Error initializing MCP7940 RTC #{inspect [bus_name: bus_name, address: address]} => #{inspect err}")
Process.sleep(200)
open_rtc(bus_name, address, retries - 1)
end
end
@impl NervesTime.RealTimeClock
def terminate(_state), do: :ok
@impl NervesTime.RealTimeClock
def set_time(%{address: address, i2c: i2c} = state, now) do
with {:ok, registers} <- BcdDate.encode(now),
:ok <- Control.deviceStop(i2c, address),
:ok <- I2C.write(i2c, address, [<<Registers.name(:RTCSEC)>>, registers]),
:ok <- Control.deviceStart(i2c, address) do
state
else
error ->
_ = Logger.error("Error setting MCP7940 RTC to #{inspect(now)}: #{inspect(error)}")
state
end
end
@impl NervesTime.RealTimeClock
def get_time(state) do
with {:ok, registers} <-
I2C.write_read(state.i2c, state.address, <<Registers.name(:RTCSEC)>>, 7),
{:ok, time} <- BcdDate.decode(registers),
%NaiveDateTime{} <- time
do
{:ok, time, state}
else
any_error ->
Logger.error("MCP7940 RTC not set or has an error: #{inspect(any_error)}")
{:unset, state}
end
end
end
|
lib/nerves_time/rtc/mcp7940.ex
| 0.732974 | 0.702326 |
mcp7940.ex
|
starcoder
|
defmodule LastCrusader.Micropub.PostTypeDiscovery do
@moduledoc """
Indieweb Post Type discovery implementation
see https://indieweb.org/post-type-discovery
Post Type Discovery specifies an algorithm for consuming code to determine the type of a post by its content properties and their values rather than an explicit “post type” property, thus better matched to modern post creation UIs that allow combining text, media, etc in a variety of ways without burdening users with any notion of what kind of post they are creating.
The Post Type Discovery algorithm ("the algorithm") discovers the type of a post given a data structure representing a post with a flat set of properties (e.g. Activity Streams (1.0 or 2.0) JSON, or JSON output from parsing [microformats2]), each with one or more values, by following these steps until reaching the first "it is a(n) ... post" statement at which point the "..." is the discovered post type.
1. If the post has an "rsvp" property with a valid value,
Then it is an RSVP post.
2. If the post has an "in-reply-to" property with a valid URL,
Then it is a reply post.
3. If the post has a "repost-of" property with a valid URL,
Then it is a repost (AKA "share") post.
4. If the post has a "like-of" property with a valid URL,
Then it is a like (AKA "favorite") post.
5. If the post has a "video" property with a valid URL,
Then it is a video post.
6. If the post has a "photo" property with a valid URL,
Then it is a photo post.
7. If the post has a "content" property with a non-empty value,
Then use its first non-empty value as the content
8. Else if the post has a "summary" property with a non-empty value,
Then use its first non-empty value as the content
9. Else it is a note post.
10. If the post has no "name" property
or has a "name" property with an empty string value (or no value)
Then it is a note post.
11. Take the first non-empty value of the "name" property
12. Trim all leading/trailing whitespace
13. Collapse all sequences of internal whitespace to a single space (0x20) character each
14. Do the same with the content
15. If this processed "name" property value is NOT a prefix of the processed content,
Then it is an article post.
16. It is a note post.
Quoted property names in the algorithm are defined in h-entry.
"""
@type post_type() ::
:note | :article | :bookmark | :rvsp | :in_reply_to | :like_of | :video | :photo
@doc """
Discover the post type according to the official algorithm. Can be:
- `:note`
- `:article`
- `:bookmark`
- `:rvsp`
- `:in_reply_to`
- `:like_of`
- `:video`
- `:photo`
"""
@spec discover(any()) :: post_type()
def discover(post)
def discover(m = %{rvsp: value}) do
case valid_rvsp_value(value) do
true -> :rvsp
_ -> pop_and_continue(:rvsp, m)
end
end
def discover(m = %{"in-reply-to": url}) do
case uri_valid?(url) do
true -> :in_reply_to
_ -> pop_and_continue("in-reply-to", m)
end
end
def discover(m = %{"repost-of": url}) do
case uri_valid?(url) do
true -> :repost_of
_ -> pop_and_continue("repost-of", m)
end
end
def discover(m = %{"bookmark-of": url}) do
case uri_valid?(url) do
true -> :bookmark
_ -> pop_and_continue("bookmark-of", m)
end
end
def discover(m = %{"like-of": url}) do
case uri_valid?(url) do
true -> :like_of
_ -> pop_and_continue("like-of", m)
end
end
def discover(m = %{video: url}) do
case uri_valid?(url) do
true -> :video
_ -> pop_and_continue(:video, m)
end
end
def discover(m = %{photo: url}) do
case uri_valid?(url) do
true -> :photo
_ -> pop_and_continue(:photo, m)
end
end
def discover(%{name: name, content: content}) when content != "" do
case is_name_a_title?(name, content) do
true -> :article
_ -> :note
end
end
def discover(%{name: name, summary: content}) do
case is_name_a_title?(name, content) do
true -> :article
_ -> :note
end
end
def discover(_) do
:note
end
@doc """
Determine whether the name property represents an explicit title.
see [one python implementation](https://github.com/kylewm/mf2util/blob/6e4c0dc904475b05381f618af9046e044a6c5f99/mf2util.py#L395)
because the official algo explanation is not clear at all (to me). I took documentation (and unit tests) from it:
Typically when parsing an h-entry, we check whether `p-name == e-content (value)`.
If they are non-equal, then p-name likely represents a title.
However, occasionally we come across an h-entry that does not
provide an explicit p-name. In this case, the name is
automatically generated by converting the entire h-entry content
to plain text. This definitely does not represent a title, and
looks very bad when displayed as such.
To handle this case, we broaden the equality check to see if
content is a subset of name. We also strip out non-alphanumeric
characters just to make the check a little more forgiving.
"""
@spec is_name_a_title?(String.t(), String.t()) :: boolean()
def is_name_a_title?(name, content)
def is_name_a_title?(name, nil) when name != nil do
true
end
def is_name_a_title?(nil, _) do
false
end
def is_name_a_title?(name, content) do
not String.contains?(
strip_spaces_and_punctuation(name),
strip_spaces_and_punctuation(content)
)
end
defp valid_rvsp_value(value) do
String.downcase(value) in ["yes", "no", "maybe", "interested"]
end
defp pop_and_continue(key, map) when is_bitstring(key) do
String.to_existing_atom(key)
|> pop_and_continue(map)
end
defp pop_and_continue(key, map) do
{_, new_map} = Map.pop(map, key)
discover(new_map)
end
defp strip_spaces_and_punctuation(value) do
String.downcase(value)
|> String.replace(~r"[[:punct:]]", "")
|> String.replace(~r"[[:space:]]", "")
end
defp uri_valid?(url) do
%URI{scheme: scheme, host: host} = URI.parse(url)
scheme != nil && host != nil && host =~ "."
end
end
|
lib/last_crusader/micropub/post_type_discovery.ex
| 0.824744 | 0.571826 |
post_type_discovery.ex
|
starcoder
|
defmodule Structex do
@doc """
Calculates expected distortion response by equivalent linearization.
iex> model =
...> fn %Tensorex{shape: [2]} = distortion ->
...> mass = Tensorex.from_list([[10.2, 0], [0, 20.4]])
...> k0 = 88.1 - distortion[[0]] * 10
...> k1 = 165.2 - distortion[[1]] * 15
...> stiffness = Tensorex.from_list([[k0, -k0], [-k0, k0 + k1]])
...> {eigens, _} = Structex.Modal.normal_modes(mass, stiffness)
...> damping_ratio = 0.05 + distortion[[0]] * 0.05
...> damping = Structex.Modal.stiffness_propotional_damping(stiffness, min(eigens[[0, 0]], eigens[[1, 1]]), damping_ratio)
...> {mass, damping, stiffness}
...> end
...>
...> response_spectrum =
...> fn natural_period, damping_ratio ->
...> fh = 1.5 / (1 + 10 * damping_ratio)
...> Structex.Load.Seismic.normalized_acceleration_response_spectrum(3.2 * 1.5, 0.16, 0.64).(natural_period) * fh
...> end
...>
...> Structex.limit_strength_response(model, Tensorex.from_list([0, 0]), response_spectrum, :cqc)
%Tensorex{data: %{[0] => 0.2894516267339246, [1] => 0.15860082111575682}, shape: [2]}
"""
@spec limit_strength_response(
(distortion :: Tensorex.t() ->
{mass :: Tensorex.t(), damping :: Tensorex.t(), stiffness :: Tensorex.t()}),
Tensorex.t(),
(natural_period :: number, damping_ratio :: number -> number),
:srss | :cqc,
number
) :: Tensorex.t()
def limit_strength_response(
model,
%Tensorex{shape: [_]} = initial_distortion,
acceleration_spectrum,
superimpose_method,
tolerance \\ 1.0e-15
)
when is_function(model, 1) and is_function(acceleration_spectrum, 2) and
superimpose_method in [:srss, :cqc] do
{
%Tensorex{shape: [degrees, degrees]} = mass,
%Tensorex{shape: [degrees, degrees]} = damping,
%Tensorex{shape: [degrees, degrees]} = stiffness
} = model.(initial_distortion)
response =
Structex.Modal.linear_modal_response(
mass,
damping,
stiffness,
mass |> Tensorex.Operator.multiply(Tensorex.fill([degrees], 1), [{1, 0}]),
acceleration_spectrum,
superimpose_method
)
if Tensorex.in_tolerance?(initial_distortion, response, tolerance) do
response
else
limit_strength_response(model, response, acceleration_spectrum, superimpose_method)
end
end
@doc """
Composes each matrix of the element into a system matrix.
The first argument must be a list of two-element tuple where the first element is a 2-dimension
list of matrices (2-rank `t:Tensorex.t/0`) and the second one is a list of node identifiers. The
matrices at the same node identifier will be sumed up.
If the second argument is passed, the element order respects what range to be used for each node
identifier.
iex> Structex.compose([
...> {[[Tensorex.from_list([[10, 0], [0, 10]])]], [0]},
...> {[[Tensorex.from_list([[ 5, 0], [0, 5]])]], [1]}
...> ])
{
%Tensorex{data: %{[0, 0] => 10,
[1, 1] => 10,
[2, 2] => 5,
[3, 3] => 5}, shape: [4, 4]},
%{0 => 0..1, 1 => 2..3}
}
iex> Structex.compose([
...> {
...> [[Tensorex.from_list([[32.1, 0], [0, 42.1]]), Tensorex.from_list([[-32.1, 0], [0, -42.1]])], [Tensorex.from_list([[-32.1, 0], [0, -42.1]]), Tensorex.from_list([[32.1, 0], [0, 42.1]])]],
...> [0, 1]
...> },
...> {
...> [[Tensorex.from_list([[24 , 0], [0, 14 ]]), Tensorex.from_list([[-24 , 0], [0, -14 ]])], [Tensorex.from_list([[-24 , 0], [0, -14 ]]), Tensorex.from_list([[24 , 0], [0, 14 ]])]],
...> [1, 2]
...> },
...> {
...> [[Tensorex.from_list([[63.1, 0], [0, 55.3]])]],
...> [0]
...> }
...> ])
{
%Tensorex{data: %{[0, 0] => 95.2, [0, 2] => -32.1,
[1, 1] => 97.4, [1, 3] => -42.1,
[2, 0] => -32.1, [2, 2] => 56.1, [2, 4] => -24,
[3, 1] => -42.1, [3, 3] => 56.1, [3, 5] => -14,
[4, 2] => -24 , [4, 4] => 24,
[5, 3] => -14 , [5, 5] => 14}, shape: [6, 6]},
%{0 => 0..1, 1 => 2..3, 2 => 4..5}
}
iex> Structex.compose([
...> {
...> [[Tensorex.from_list([[32.1, 0], [0, 42.1]]), Tensorex.from_list([[-32.1, 0], [0, -42.1]])], [Tensorex.from_list([[-32.1, 0], [0, -42.1]]), Tensorex.from_list([[32.1, 0], [0, 42.1]])]],
...> [0, 1]
...> },
...> {
...> [[Tensorex.from_list([[24 , 0], [0, 14 ]]), Tensorex.from_list([[-24 , 0], [0, -14 ]])], [Tensorex.from_list([[-24 , 0], [0, -14 ]]), Tensorex.from_list([[24 , 0], [0, 14 ]])]],
...> [1, 2]
...> },
...> {
...> [[Tensorex.from_list([[ 5 , 0], [0, 8 ]]), Tensorex.from_list([[ -5 , 0], [0, -8 ]])], [Tensorex.from_list([[ -5 , 0], [0, -8 ]]), Tensorex.from_list([[ 5 , 0], [0, 8 ]])]],
...> [3, 2]
...> },
...> {
...> [[Tensorex.from_list([[ 3.9, 0], [0, 6.5]]), Tensorex.from_list([[ -3.9, 0], [0, -6.5]])], [Tensorex.from_list([[ -3.9, 0], [0, -6.5]]), Tensorex.from_list([[ 3.9, 0], [0, 6.5]])]],
...> [4, 5]
...> },
...> {
...> [[Tensorex.from_list([[63.1, 0], [0, 55.3]])]],
...> [0]
...> }
...> ], %{1 => 0..1, 2 => 2..3})
{
%Tensorex{data: %{[0, 0] => 56.1, [0, 2] => -24, [0, 4] => -32.1,
[1, 1] => 56.1, [1, 3] => -14, [1, 5] => -42.1,
[2, 0] => -24 , [2, 2] => 29, [2, 6] => -5,
[3, 1] => -14 , [3, 3] => 22, [3, 7] => -8,
[4, 0] => -32.1, [4, 4] => 95.2,
[5, 1] => -42.1, [5, 5] => 97.4,
[6, 2] => -5, [6, 6] => 5,
[7, 3] => -8, [7, 7] => 8,
[ 8, 8] => 3.9, [ 8, 10] => -3.9,
[ 9, 9] => 6.5, [ 9, 11] => -6.5,
[10, 8] => -3.9, [10, 10] => 3.9,
[11, 9] => -6.5, [11, 11] => 6.5}, shape: [12, 12]},
%{0 => 4..5, 1 => 0..1, 2 => 2..3, 3 => 6..7, 4 => 8..9, 5 => 10..11}
}
"""
@spec compose(Enum.t(), %{term => Range.t()}) :: {Tensorex.t(), %{term => Range.t()}}
def compose(matrix_and_node_ids, range_indices \\ %{}) do
{elements, new_range_indices} =
Enum.map_reduce(matrix_and_node_ids, range_indices, fn {matrices, nodes}, acc ->
Stream.zip(matrices, nodes)
|> Stream.map(fn {row, node} ->
Stream.zip(row, nodes) |> Stream.map(&Tuple.insert_at(&1, 1, node))
end)
|> Stream.concat()
|> Enum.map_reduce(acc, fn
{%Tensorex{} = matrix, node1, node2}, ranges
when is_map_key(ranges, node1) and is_map_key(ranges, node2) ->
size = max(Enum.max(ranges[node1]), Enum.max(ranges[node2])) + 1
{put_in(Tensorex.zero([size, size])[[ranges[node1], ranges[node2]]], matrix), ranges}
{%Tensorex{shape: [degree | _]} = matrix, node1, node2}, ranges
when is_map_key(ranges, node1) ->
max_index = Map.values(ranges) |> Stream.map(&Enum.max/1) |> Enum.max(fn -> -1 end)
new_max_index = max_index + degree
range = (max_index + 1)..new_max_index
size = new_max_index + 1
{
put_in(Tensorex.zero([size, size])[[ranges[node1], range]], matrix),
Map.put(ranges, node2, range)
}
{%Tensorex{shape: [degree | _]} = matrix, node, node}, ranges ->
max_index = Map.values(ranges) |> Stream.map(&Enum.max/1) |> Enum.max(fn -> -1 end)
new_max_index = max_index + degree
range = (max_index + 1)..new_max_index
size = new_max_index + 1
{
put_in(Tensorex.zero([size, size])[[range, range]], matrix),
Map.put(ranges, node, range)
}
end)
end)
composed =
Stream.concat(elements)
|> Enum.reduce(fn
%Tensorex{shape: [deg1 | _]} = element1, %Tensorex{shape: [deg2 | _] = shape} = element2
when deg1 < deg2 ->
element1 |> Tensorex.reshape(shape) |> Tensorex.Operator.add(element2)
%Tensorex{shape: [degree | _]} = element1, %Tensorex{shape: [degree | _]} = element2 ->
element2 |> Tensorex.Operator.add(element1)
%Tensorex{shape: shape} = element1, element2 ->
element2 |> Tensorex.reshape(shape) |> Tensorex.Operator.add(element1)
end)
{composed, new_range_indices}
end
@doc """
The standard acceleration due to gravity on the surface of the earth.
"""
@spec standard_gravity_acceleration() :: float
def standard_gravity_acceleration(), do: 9.80665
end
|
lib/structex.ex
| 0.704465 | 0.541712 |
structex.ex
|
starcoder
|
defmodule Tensorflow.Profiler.Trace.DevicesEntry do
@moduledoc false
use Protobuf, map: true, syntax: :proto3
@type t :: %__MODULE__{
key: non_neg_integer,
value: Tensorflow.Profiler.Device.t() | nil
}
defstruct [:key, :value]
field(:key, 1, type: :uint32)
field(:value, 2, type: Tensorflow.Profiler.Device)
end
defmodule Tensorflow.Profiler.Trace do
@moduledoc false
use Protobuf, syntax: :proto3
@type t :: %__MODULE__{
devices: %{non_neg_integer => Tensorflow.Profiler.Device.t() | nil},
trace_events: [Tensorflow.Profiler.TraceEvent.t()]
}
defstruct [:devices, :trace_events]
field(:devices, 1,
repeated: true,
type: Tensorflow.Profiler.Trace.DevicesEntry,
map: true
)
field(:trace_events, 4, repeated: true, type: Tensorflow.Profiler.TraceEvent)
end
defmodule Tensorflow.Profiler.Device.ResourcesEntry do
@moduledoc false
use Protobuf, map: true, syntax: :proto3
@type t :: %__MODULE__{
key: non_neg_integer,
value: Tensorflow.Profiler.Resource.t() | nil
}
defstruct [:key, :value]
field(:key, 1, type: :uint32)
field(:value, 2, type: Tensorflow.Profiler.Resource)
end
defmodule Tensorflow.Profiler.Device do
@moduledoc false
use Protobuf, syntax: :proto3
@type t :: %__MODULE__{
name: String.t(),
device_id: non_neg_integer,
resources: %{
non_neg_integer => Tensorflow.Profiler.Resource.t() | nil
}
}
defstruct [:name, :device_id, :resources]
field(:name, 1, type: :string)
field(:device_id, 2, type: :uint32)
field(:resources, 3,
repeated: true,
type: Tensorflow.Profiler.Device.ResourcesEntry,
map: true
)
end
defmodule Tensorflow.Profiler.Resource do
@moduledoc false
use Protobuf, syntax: :proto3
@type t :: %__MODULE__{
name: String.t(),
resource_id: non_neg_integer
}
defstruct [:name, :resource_id]
field(:name, 1, type: :string)
field(:resource_id, 2, type: :uint32)
end
defmodule Tensorflow.Profiler.TraceEvent.ArgsEntry do
@moduledoc false
use Protobuf, map: true, syntax: :proto3
@type t :: %__MODULE__{
key: String.t(),
value: String.t()
}
defstruct [:key, :value]
field(:key, 1, type: :string)
field(:value, 2, type: :string)
end
defmodule Tensorflow.Profiler.TraceEvent do
@moduledoc false
use Protobuf, syntax: :proto3
@type t :: %__MODULE__{
device_id: non_neg_integer,
resource_id: non_neg_integer,
name: String.t(),
timestamp_ps: non_neg_integer,
duration_ps: non_neg_integer,
args: %{String.t() => String.t()}
}
defstruct [
:device_id,
:resource_id,
:name,
:timestamp_ps,
:duration_ps,
:args
]
field(:device_id, 1, type: :uint32)
field(:resource_id, 2, type: :uint32)
field(:name, 3, type: :string)
field(:timestamp_ps, 9, type: :uint64)
field(:duration_ps, 10, type: :uint64)
field(:args, 11,
repeated: true,
type: Tensorflow.Profiler.TraceEvent.ArgsEntry,
map: true
)
end
|
lib/tensorflow/core/protobuf/trace_events.pb.ex
| 0.776072 | 0.519156 |
trace_events.pb.ex
|
starcoder
|
defmodule Squabble.Server do
@moduledoc """
Implementation for the squabble server
"""
alias Squabble.PG
alias Squabble.State
require Logger
@behaviour Squabble.Leader
@type debug() :: map()
@key :squabble
@check_election_timeout 1500
@doc """
Get the winner subscription notices
"""
def winner_subscriptions(state) do
state.subscriptions ++ [Squabble.Server]
end
@doc """
Send back debug information from the squabble cluster
"""
@spec debug(State.t()) :: [debug()]
def debug(state) do
members = PG.members(others: true)
debug_info =
Enum.map(members, fn member ->
GenServer.call(member, :state)
end)
[Map.put(state, :node, node()) | debug_info]
end
@impl true
def leader_selected(_term) do
:ets.insert(@key, {:is_leader?, true})
end
@impl true
def node_down(), do: :ok
@doc """
Check for a leader already in the cluster
"""
@spec look_for_leader(State.t()) :: {:ok, State.t()}
def look_for_leader(state) do
Logger.debug("Checking for a current leader.", type: :squabble)
PG.broadcast([others: true], fn pid ->
Squabble.leader_check(pid)
end)
{:ok, state}
end
@doc """
Reply to the leader check if the node is a leader
"""
@spec leader_check(State.t(), pid()) :: {:ok, State.t()}
def leader_check(state, pid) do
case state.state do
"leader" ->
Squabble.notify_of_leader(pid, state.term)
{:ok, state}
_ ->
{:ok, state}
end
end
@doc """
Try to elect yourself as the leader
"""
def start_election(state, term) do
Logger.debug(fn ->
"Starting an election for term #{term}, announcing candidacy"
end, type: :squabble)
case check_term_newer(state, term) do
{:ok, :newer} ->
if state.size == 1 do
voted_leader(state, 1)
else
PG.broadcast(fn pid ->
Squabble.announce_candidate(pid, term)
end)
Process.send_after(
self(),
{:election, :check_election_status, term},
@check_election_timeout
)
{:ok, %{state | highest_seen_term: term}}
end
{:error, :same} ->
Logger.debug(fn ->
"Someone already won this round, not starting"
end, type: :squabble)
{:ok, state}
{:error, :older} ->
Logger.debug(fn ->
"This term has already completed, not starting"
end, type: :squabble)
{:ok, state}
end
end
@doc """
Vote for the leader
"""
def vote_leader(state, pid, term) do
Logger.debug(fn ->
"Received ballot for term #{term}, from #{inspect(pid)}, voting"
end, type: :squabble)
with {:ok, :newer} <- check_term_newer(state, term),
{:ok, :not_voted} <- check_voted(state) do
Squabble.vote_for(pid, term)
{:ok, %{state | voted_for: pid, highest_seen_term: term}}
else
{:error, :same} ->
Logger.debug(fn ->
"Received a vote for the same term"
end, type: :squabble)
{:ok, state}
_ ->
{:ok, state}
end
end
@doc """
A vote came in from the cluster
"""
def vote_received(state, pid, term) do
Logger.debug(fn ->
"Received a vote for leader for term #{term}, from #{inspect(pid)}"
end, type: :squabble)
with {:ok, :newer} <- check_term_newer(state, term),
{:ok, state} <- append_vote(state, pid),
{:ok, :majority} <- check_majority_votes(state) do
PG.broadcast([others: true], fn pid ->
Squabble.new_leader(pid, term)
end)
voted_leader(state, term)
else
{:error, :same} ->
Logger.debug("An old vote received - ignoring", type: :squabble)
{:ok, state}
{:error, :older} ->
Logger.debug("An old vote received - ignoring", type: :squabble)
{:ok, state}
{:error, :not_enough} ->
Logger.debug("Not enough votes to be a winner", type: :squabble)
append_vote(state, pid)
end
end
@doc """
Set the winner as leader
"""
def set_leader(state, leader_pid, leader_node, term) do
with :ok <- check_leader_different(state, leader_pid, leader_node, term),
{:ok, :newer} <- check_term_newer(state, term) do
Logger.debug(fn ->
"Setting leader for term #{term} as #{inspect(leader_pid)}"
end, type: :squabble)
:ets.insert(@key, {:is_leader?, false})
state =
state
|> Map.put(:term, term)
|> Map.put(:highest_seen_term, term)
|> Map.put(:leader_pid, leader_pid)
|> Map.put(:leader_node, leader_node)
|> Map.put(:state, "follower")
|> Map.put(:votes, [])
|> Map.put(:voted_for, nil)
{:ok, state}
else
{:error, :same} ->
Logger.debug(fn ->
"Another node has the same term and is a leader, starting a new term"
end, type: :squabble)
Squabble.start_election(state.term + 1)
{:ok, state}
_ ->
{:ok, state}
end
end
defp check_leader_different(state, leader_pid, leader_node, term) do
case state.term == term && state.leader_pid == leader_pid && state.leader_node == leader_node do
true ->
{:error, :leader, :same}
false ->
:ok
end
end
@doc """
A new node joined the cluster, assert leadership
"""
@spec assert_leader(State.t()) :: {:ok, State.t()}
def assert_leader(state) do
case state.state do
"leader" ->
Logger.debug(fn ->
"A new node came online, asserting leadership"
end, type: :squabble)
PG.broadcast([others: true], fn pid ->
Squabble.new_leader(pid, state.term)
end)
Enum.each(winner_subscriptions(state), fn module ->
module.leader_selected(state.term)
end)
{:ok, state}
_ ->
{:ok, state}
end
end
@doc """
A node went down, check if it was the leader
"""
@spec node_down(State.t(), atom()) :: {:ok, State.t()}
def node_down(state, node) do
send_node_down_notice(state)
case state.leader_node do
^node ->
Squabble.start_election(state.term + 1)
{:ok, state}
_ ->
{:ok, state}
end
end
defp send_node_down_notice(state) do
case Squabble.node_is_leader?() do
true ->
Enum.map(winner_subscriptions(state), fn module ->
module.node_down()
end)
false ->
:ok
end
end
@doc """
Check if a term is newer than the local state
"""
@spec check_term_newer(State.t(), integer()) :: boolean()
def check_term_newer(state, term) do
cond do
term > state.term ->
{:ok, :newer}
term == state.term ->
{:error, :same}
true ->
{:error, :older}
end
end
def append_vote(state, pid) do
{:ok, %{state | votes: [pid | state.votes]}}
end
@doc """
Check if the node has a majority of the votes
"""
@spec check_majority_votes(State.t()) :: {:ok, :majority} | {:error, :not_enough}
def check_majority_votes(state) do
case length(state.votes) >= state.size / 2 do
true ->
{:ok, :majority}
false ->
{:error, :not_enough}
end
end
@doc """
Check if the node has voted in this term
"""
@spec check_voted(State.t()) :: {:ok, :not_voted} | {:error, :voted}
def check_voted(state) do
case state.voted_for do
nil ->
{:ok, :not_voted}
_ ->
{:error, :voted}
end
end
@doc """
Mark the current node as the new leader for the term
"""
@spec voted_leader(State.t(), integer()) :: {:ok, State.t()}
def voted_leader(state, term) do
Logger.debug(fn ->
"Won the election for term #{term}"
end, type: :squabble)
{:ok, state} = set_leader(state, self(), node(), term)
Enum.each(winner_subscriptions(state), fn module ->
module.leader_selected(term)
end)
{:ok, %{state | state: "leader"}}
end
@doc """
Check on the current term, and if it's stuck
"""
@spec check_election_status(State.t(), integer()) :: {:ok, State.t()}
def check_election_status(state, term) do
Logger.debug(
fn ->
"Checking election status for term #{term}"
end,
type: :squabble
)
case state.term < term do
true ->
Logger.debug("Restarting the election, it seems frozen", type: :squabble)
_check_election_status(state, term)
false ->
{:ok, state}
end
end
defp _check_election_status(state, term) do
case state.state do
"candidate" ->
Squabble.start_election(term + 1)
{:ok, state}
_ ->
{:ok, state}
end
end
end
|
lib/squabble/server.ex
| 0.797911 | 0.430806 |
server.ex
|
starcoder
|
defmodule Grizzly.ZWave.SmartStart.MetaExtension.BootstrappingMode do
@moduledoc """
This extension is used to advertise the bootstrapping mode to use when
including the node advertised in the provisioning list
"""
@typedoc """
The modes are:
- `:security_2` - the node must be manually set to learn mode and follow the
S2 bootstrapping instructions
- `:smart_start` - the node will use S2 bootstrapping automatically using the
SmartStart functionality
"""
@behaviour Grizzly.ZWave.SmartStart.MetaExtension
@type mode :: :security_2 | :smart_start
@type t :: %__MODULE__{
mode: mode()
}
@enforce_keys [:mode]
defstruct mode: nil
@doc """
Create a `BootstrappingMode.t()`
"""
@spec new(mode()) :: {:ok, t()} | {:error, :invalid_mode}
def new(mode) when mode in [:security_2, :smart_start] do
{:ok, %__MODULE__{mode: mode}}
end
def new(_), do: {:error, :invalid_mode}
@doc """
Make a binary from a `BootstrappingMode.t()`
"""
@impl true
@spec to_binary(t()) :: {:ok, binary()}
def to_binary(%__MODULE__{mode: mode}) do
{:ok, <<0x36::size(7), 1::size(1), 0x01, mode_to_byte(mode)>>}
end
@doc """
Make a `BootstrappingMode.t()` from a binary
The binary string for this extension requires the critical bit to be set and
if it is not this function will return `{:error, :critical_bit_not_set}`
"""
@impl true
@spec from_binary(binary()) :: {:ok, t()} | {:error, :critical_bit_not_set | :invalid_binary}
def from_binary(<<0x36::size(7), 1::size(1), 0x01, mode_byte>>) do
case mode_from_byte(mode_byte) do
{:ok, mode} ->
new(mode)
error ->
error
end
end
def from_binary(<<0x36::size(7), 0::size(1), _rest::binary>>) do
{:error, :critical_bit_not_set}
end
def from_binary(_), do: {:error, :invalid_binary}
defp mode_to_byte(:security_2), do: 0x00
defp mode_to_byte(:smart_start), do: 0x01
defp mode_from_byte(0x00), do: {:ok, :security_2}
defp mode_from_byte(0x01), do: {:ok, :smart_start}
defp mode_from_byte(_mode), do: {:error, :invalid_mode}
end
|
lib/grizzly/zwave/smart_start/meta_extension/bootstrapping_mode.ex
| 0.886911 | 0.422415 |
bootstrapping_mode.ex
|
starcoder
|
defmodule AWS.Directory do
@moduledoc """
Directory Service
Directory Service is a web service that makes it easy for you to setup and run
directories in the Amazon Web Services cloud, or connect your Amazon Web
Services resources with an existing self-managed Microsoft Active Directory.
This guide provides detailed information about Directory Service operations,
data types, parameters, and errors. For information about Directory Services
features, see [Directory Service](https://aws.amazon.com/directoryservice/) and the [Directory Service Administration
Guide](http://docs.aws.amazon.com/directoryservice/latest/admin-guide/what_is.html).
Amazon Web Services provides SDKs that consist of libraries and sample code for
various programming languages and platforms (Java, Ruby, .Net, iOS, Android,
etc.). The SDKs provide a convenient way to create programmatic access to
Directory Service and other Amazon Web Services services. For more information
about the Amazon Web Services SDKs, including how to download and install them,
see [Tools for Amazon Web Services](http://aws.amazon.com/tools/).
"""
alias AWS.Client
alias AWS.Request
def metadata do
%AWS.ServiceMetadata{
abbreviation: "Directory Service",
api_version: "2015-04-16",
content_type: "application/x-amz-json-1.1",
credential_scope: nil,
endpoint_prefix: "ds",
global?: false,
protocol: "json",
service_id: "Directory Service",
signature_version: "v4",
signing_name: "ds",
target_prefix: "DirectoryService_20150416"
}
end
@doc """
Accepts a directory sharing request that was sent from the directory owner
account.
"""
def accept_shared_directory(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "AcceptSharedDirectory", input, options)
end
@doc """
If the DNS server for your self-managed domain uses a publicly addressable IP
address, you must add a CIDR address block to correctly route traffic to and
from your Microsoft AD on Amazon Web Services.
*AddIpRoutes* adds this address block. You can also use *AddIpRoutes* to
facilitate routing traffic that uses public IP ranges from your Microsoft AD on
Amazon Web Services to a peer VPC.
Before you call *AddIpRoutes*, ensure that all of the required permissions have
been explicitly granted through a policy. For details about what permissions are
required to run the *AddIpRoutes* operation, see [Directory Service API Permissions: Actions, Resources, and Conditions
Reference](http://docs.aws.amazon.com/directoryservice/latest/admin-guide/UsingWithDS_IAM_ResourcePermissions.html).
"""
def add_ip_routes(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "AddIpRoutes", input, options)
end
@doc """
Adds two domain controllers in the specified Region for the specified directory.
"""
def add_region(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "AddRegion", input, options)
end
@doc """
Adds or overwrites one or more tags for the specified directory.
Each directory can have a maximum of 50 tags. Each tag consists of a key and
optional value. Tag keys must be unique to each resource.
"""
def add_tags_to_resource(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "AddTagsToResource", input, options)
end
@doc """
Cancels an in-progress schema extension to a Microsoft AD directory.
Once a schema extension has started replicating to all domain controllers, the
task can no longer be canceled. A schema extension can be canceled during any of
the following states; `Initializing`, `CreatingSnapshot`, and `UpdatingSchema`.
"""
def cancel_schema_extension(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "CancelSchemaExtension", input, options)
end
@doc """
Creates an AD Connector to connect to a self-managed directory.
Before you call `ConnectDirectory`, ensure that all of the required permissions
have been explicitly granted through a policy. For details about what
permissions are required to run the `ConnectDirectory` operation, see [Directory Service API Permissions: Actions, Resources, and Conditions
Reference](http://docs.aws.amazon.com/directoryservice/latest/admin-guide/UsingWithDS_IAM_ResourcePermissions.html).
"""
def connect_directory(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "ConnectDirectory", input, options)
end
@doc """
Creates an alias for a directory and assigns the alias to the directory.
The alias is used to construct the access URL for the directory, such as
`http://<alias>.awsapps.com`.
After an alias has been created, it cannot be deleted or reused, so this
operation should only be used when absolutely necessary.
"""
def create_alias(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "CreateAlias", input, options)
end
@doc """
Creates an Active Directory computer object in the specified directory.
"""
def create_computer(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "CreateComputer", input, options)
end
@doc """
Creates a conditional forwarder associated with your Amazon Web Services
directory.
Conditional forwarders are required in order to set up a trust relationship with
another domain. The conditional forwarder points to the trusted domain.
"""
def create_conditional_forwarder(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "CreateConditionalForwarder", input, options)
end
@doc """
Creates a Simple AD directory.
For more information, see [Simple Active Directory](https://docs.aws.amazon.com/directoryservice/latest/admin-guide/directory_simple_ad.html)
in the *Directory Service Admin Guide*.
Before you call `CreateDirectory`, ensure that all of the required permissions
have been explicitly granted through a policy. For details about what
permissions are required to run the `CreateDirectory` operation, see [Directory Service API Permissions: Actions, Resources, and Conditions
Reference](http://docs.aws.amazon.com/directoryservice/latest/admin-guide/UsingWithDS_IAM_ResourcePermissions.html).
"""
def create_directory(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "CreateDirectory", input, options)
end
@doc """
Creates a subscription to forward real-time Directory Service domain controller
security logs to the specified Amazon CloudWatch log group in your Amazon Web
Services account.
"""
def create_log_subscription(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "CreateLogSubscription", input, options)
end
@doc """
Creates a Microsoft AD directory in the Amazon Web Services Cloud.
For more information, see [Managed Microsoft AD](https://docs.aws.amazon.com/directoryservice/latest/admin-guide/directory_microsoft_ad.html)
in the *Directory Service Admin Guide*.
Before you call *CreateMicrosoftAD*, ensure that all of the required permissions
have been explicitly granted through a policy. For details about what
permissions are required to run the *CreateMicrosoftAD* operation, see
[Directory Service API Permissions: Actions, Resources, and Conditions Reference](http://docs.aws.amazon.com/directoryservice/latest/admin-guide/UsingWithDS_IAM_ResourcePermissions.html).
"""
def create_microsoft_ad(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "CreateMicrosoftAD", input, options)
end
@doc """
Creates a snapshot of a Simple AD or Microsoft AD directory in the Amazon Web
Services cloud.
You cannot take snapshots of AD Connector directories.
"""
def create_snapshot(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "CreateSnapshot", input, options)
end
@doc """
Directory Service for Microsoft Active Directory allows you to configure trust
relationships.
For example, you can establish a trust between your Managed Microsoft AD
directory, and your existing self-managed Microsoft Active Directory. This would
allow you to provide users and groups access to resources in either domain, with
a single set of credentials.
This action initiates the creation of the Amazon Web Services side of a trust
relationship between an Managed Microsoft AD directory and an external domain.
You can create either a forest trust or an external trust.
"""
def create_trust(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "CreateTrust", input, options)
end
@doc """
Deletes a conditional forwarder that has been set up for your Amazon Web
Services directory.
"""
def delete_conditional_forwarder(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "DeleteConditionalForwarder", input, options)
end
@doc """
Deletes an Directory Service directory.
Before you call `DeleteDirectory`, ensure that all of the required permissions
have been explicitly granted through a policy. For details about what
permissions are required to run the `DeleteDirectory` operation, see [Directory Service API Permissions: Actions, Resources, and Conditions
Reference](http://docs.aws.amazon.com/directoryservice/latest/admin-guide/UsingWithDS_IAM_ResourcePermissions.html).
"""
def delete_directory(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "DeleteDirectory", input, options)
end
@doc """
Deletes the specified log subscription.
"""
def delete_log_subscription(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "DeleteLogSubscription", input, options)
end
@doc """
Deletes a directory snapshot.
"""
def delete_snapshot(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "DeleteSnapshot", input, options)
end
@doc """
Deletes an existing trust relationship between your Managed Microsoft AD
directory and an external domain.
"""
def delete_trust(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "DeleteTrust", input, options)
end
@doc """
Deletes from the system the certificate that was registered for secure LDAP or
client certificate authentication.
"""
def deregister_certificate(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "DeregisterCertificate", input, options)
end
@doc """
Removes the specified directory as a publisher to the specified Amazon SNS
topic.
"""
def deregister_event_topic(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "DeregisterEventTopic", input, options)
end
@doc """
Displays information about the certificate registered for secure LDAP or client
certificate authentication.
"""
def describe_certificate(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "DescribeCertificate", input, options)
end
@doc """
Retrieves information about the type of client authentication for the specified
directory, if the type is specified.
If no type is specified, information about all client authentication types that
are supported for the specified directory is retrieved. Currently, only
`SmartCard` is supported.
"""
def describe_client_authentication_settings(%Client{} = client, input, options \\ []) do
Request.request_post(
client,
metadata(),
"DescribeClientAuthenticationSettings",
input,
options
)
end
@doc """
Obtains information about the conditional forwarders for this account.
If no input parameters are provided for RemoteDomainNames, this request
describes all conditional forwarders for the specified directory ID.
"""
def describe_conditional_forwarders(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "DescribeConditionalForwarders", input, options)
end
@doc """
Obtains information about the directories that belong to this account.
You can retrieve information about specific directories by passing the directory
identifiers in the `DirectoryIds` parameter. Otherwise, all directories that
belong to the current account are returned.
This operation supports pagination with the use of the `NextToken` request and
response parameters. If more results are available, the
`DescribeDirectoriesResult.NextToken` member contains a token that you pass in
the next call to `DescribeDirectories` to retrieve the next set of items.
You can also specify a maximum number of return results with the `Limit`
parameter.
"""
def describe_directories(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "DescribeDirectories", input, options)
end
@doc """
Provides information about any domain controllers in your directory.
"""
def describe_domain_controllers(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "DescribeDomainControllers", input, options)
end
@doc """
Obtains information about which Amazon SNS topics receive status messages from
the specified directory.
If no input parameters are provided, such as DirectoryId or TopicName, this
request describes all of the associations in the account.
"""
def describe_event_topics(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "DescribeEventTopics", input, options)
end
@doc """
Describes the status of LDAP security for the specified directory.
"""
def describe_ldaps_settings(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "DescribeLDAPSSettings", input, options)
end
@doc """
Provides information about the Regions that are configured for multi-Region
replication.
"""
def describe_regions(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "DescribeRegions", input, options)
end
@doc """
Returns the shared directories in your account.
"""
def describe_shared_directories(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "DescribeSharedDirectories", input, options)
end
@doc """
Obtains information about the directory snapshots that belong to this account.
This operation supports pagination with the use of the *NextToken* request and
response parameters. If more results are available, the
*DescribeSnapshots.NextToken* member contains a token that you pass in the next
call to `DescribeSnapshots` to retrieve the next set of items.
You can also specify a maximum number of return results with the *Limit*
parameter.
"""
def describe_snapshots(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "DescribeSnapshots", input, options)
end
@doc """
Obtains information about the trust relationships for this account.
If no input parameters are provided, such as DirectoryId or TrustIds, this
request describes all the trust relationships belonging to the account.
"""
def describe_trusts(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "DescribeTrusts", input, options)
end
@doc """
Disables alternative client authentication methods for the specified directory.
"""
def disable_client_authentication(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "DisableClientAuthentication", input, options)
end
@doc """
Deactivates LDAP secure calls for the specified directory.
"""
def disable_ldaps(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "DisableLDAPS", input, options)
end
@doc """
Disables multi-factor authentication (MFA) with the Remote Authentication Dial
In User Service (RADIUS) server for an AD Connector or Microsoft AD directory.
"""
def disable_radius(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "DisableRadius", input, options)
end
@doc """
Disables single-sign on for a directory.
"""
def disable_sso(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "DisableSso", input, options)
end
@doc """
Enables alternative client authentication methods for the specified directory.
"""
def enable_client_authentication(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "EnableClientAuthentication", input, options)
end
@doc """
Activates the switch for the specific directory to always use LDAP secure calls.
"""
def enable_ldaps(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "EnableLDAPS", input, options)
end
@doc """
Enables multi-factor authentication (MFA) with the Remote Authentication Dial In
User Service (RADIUS) server for an AD Connector or Microsoft AD directory.
"""
def enable_radius(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "EnableRadius", input, options)
end
@doc """
Enables single sign-on for a directory.
Single sign-on allows users in your directory to access certain Amazon Web
Services services from a computer joined to the directory without having to
enter their credentials separately.
"""
def enable_sso(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "EnableSso", input, options)
end
@doc """
Obtains directory limit information for the current Region.
"""
def get_directory_limits(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "GetDirectoryLimits", input, options)
end
@doc """
Obtains the manual snapshot limits for a directory.
"""
def get_snapshot_limits(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "GetSnapshotLimits", input, options)
end
@doc """
For the specified directory, lists all the certificates registered for a secure
LDAP or client certificate authentication.
"""
def list_certificates(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "ListCertificates", input, options)
end
@doc """
Lists the address blocks that you have added to a directory.
"""
def list_ip_routes(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "ListIpRoutes", input, options)
end
@doc """
Lists the active log subscriptions for the Amazon Web Services account.
"""
def list_log_subscriptions(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "ListLogSubscriptions", input, options)
end
@doc """
Lists all schema extensions applied to a Microsoft AD Directory.
"""
def list_schema_extensions(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "ListSchemaExtensions", input, options)
end
@doc """
Lists all tags on a directory.
"""
def list_tags_for_resource(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "ListTagsForResource", input, options)
end
@doc """
Registers a certificate for a secure LDAP or client certificate authentication.
"""
def register_certificate(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "RegisterCertificate", input, options)
end
@doc """
Associates a directory with an Amazon SNS topic.
This establishes the directory as a publisher to the specified Amazon SNS topic.
You can then receive email or text (SMS) messages when the status of your
directory changes. You get notified if your directory goes from an Active status
to an Impaired or Inoperable status. You also receive a notification when the
directory returns to an Active status.
"""
def register_event_topic(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "RegisterEventTopic", input, options)
end
@doc """
Rejects a directory sharing request that was sent from the directory owner
account.
"""
def reject_shared_directory(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "RejectSharedDirectory", input, options)
end
@doc """
Removes IP address blocks from a directory.
"""
def remove_ip_routes(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "RemoveIpRoutes", input, options)
end
@doc """
Stops all replication and removes the domain controllers from the specified
Region.
You cannot remove the primary Region with this operation. Instead, use the
`DeleteDirectory` API.
"""
def remove_region(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "RemoveRegion", input, options)
end
@doc """
Removes tags from a directory.
"""
def remove_tags_from_resource(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "RemoveTagsFromResource", input, options)
end
@doc """
Resets the password for any user in your Managed Microsoft AD or Simple AD
directory.
You can reset the password for any user in your directory with the following
exceptions:
* For Simple AD, you cannot reset the password for any user that is
a member of either the **Domain Admins** or **Enterprise Admins** group except
for the administrator user.
* For Managed Microsoft AD, you can only reset the password for a
user that is in an OU based off of the NetBIOS name that you typed when you
created your directory. For example, you cannot reset the password for a user in
the **Amazon Web Services Reserved** OU. For more information about the OU
structure for an Managed Microsoft AD directory, see [What Gets Created](https://docs.aws.amazon.com/directoryservice/latest/admin-guide/ms_ad_getting_started_what_gets_created.html)
in the *Directory Service Administration Guide*.
"""
def reset_user_password(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "ResetUserPassword", input, options)
end
@doc """
Restores a directory using an existing directory snapshot.
When you restore a directory from a snapshot, any changes made to the directory
after the snapshot date are overwritten.
This action returns as soon as the restore operation is initiated. You can
monitor the progress of the restore operation by calling the
`DescribeDirectories` operation with the directory identifier. When the
**DirectoryDescription.Stage** value changes to `Active`, the restore operation
is complete.
"""
def restore_from_snapshot(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "RestoreFromSnapshot", input, options)
end
@doc """
Shares a specified directory (`DirectoryId`) in your Amazon Web Services account
(directory owner) with another Amazon Web Services account (directory consumer).
With this operation you can use your directory from any Amazon Web Services
account and from any Amazon VPC within an Amazon Web Services Region.
When you share your Managed Microsoft AD directory, Directory Service creates a
shared directory in the directory consumer account. This shared directory
contains the metadata to provide access to the directory within the directory
owner account. The shared directory is visible in all VPCs in the directory
consumer account.
The `ShareMethod` parameter determines whether the specified directory can be
shared between Amazon Web Services accounts inside the same Amazon Web Services
organization (`ORGANIZATIONS`). It also determines whether you can share the
directory with any other Amazon Web Services account either inside or outside of
the organization (`HANDSHAKE`).
The `ShareNotes` parameter is only used when `HANDSHAKE` is called, which sends
a directory sharing request to the directory consumer.
"""
def share_directory(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "ShareDirectory", input, options)
end
@doc """
Applies a schema extension to a Microsoft AD directory.
"""
def start_schema_extension(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "StartSchemaExtension", input, options)
end
@doc """
Stops the directory sharing between the directory owner and consumer accounts.
"""
def unshare_directory(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "UnshareDirectory", input, options)
end
@doc """
Updates a conditional forwarder that has been set up for your Amazon Web
Services directory.
"""
def update_conditional_forwarder(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "UpdateConditionalForwarder", input, options)
end
@doc """
Adds or removes domain controllers to or from the directory.
Based on the difference between current value and new value (provided through
this API call), domain controllers will be added or removed. It may take up to
45 minutes for any new domain controllers to become fully active once the
requested number of domain controllers is updated. During this time, you cannot
make another update request.
"""
def update_number_of_domain_controllers(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "UpdateNumberOfDomainControllers", input, options)
end
@doc """
Updates the Remote Authentication Dial In User Service (RADIUS) server
information for an AD Connector or Microsoft AD directory.
"""
def update_radius(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "UpdateRadius", input, options)
end
@doc """
Updates the trust that has been set up between your Managed Microsoft AD
directory and an self-managed Active Directory.
"""
def update_trust(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "UpdateTrust", input, options)
end
@doc """
Directory Service for Microsoft Active Directory allows you to configure and
verify trust relationships.
This action verifies a trust relationship between your Managed Microsoft AD
directory and an external domain.
"""
def verify_trust(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "VerifyTrust", input, options)
end
end
|
lib/aws/generated/directory.ex
| 0.8789 | 0.447098 |
directory.ex
|
starcoder
|
defmodule Ash.CiString do
@moduledoc """
Represents a case insensitive string
While some data layers are aware of case insensitive string types, in order for values
of this type to be used in other parts of Ash Framework, it has to be embedded in a module
this allows us to implement the `Comparable` protocol for it.
For the type implementation, see `Ash.Type.CiString`
"""
defstruct [:string, casted?: false, case: nil]
def sigil_i(value, mods) do
cond do
?l in mods ->
new(value, :lower)
?u in mods ->
new(value, :upper)
true ->
new(value)
end
end
defimpl Jason.Encoder do
def encode(ci_string, opts) do
ci_string
|> Ash.CiString.value()
|> Jason.Encode.string(opts)
end
end
defimpl String.Chars do
def to_string(ci_string) do
Ash.CiString.value(ci_string)
end
end
defimpl Inspect do
import Inspect.Algebra
def inspect(%Ash.CiString{string: string}, opts) do
concat(["#Ash.CiString<", to_doc(string, opts), ">"])
end
end
def new(value, casing \\ nil) do
case casing do
:upper ->
%Ash.CiString{casted?: true, string: value && String.upcase(value)}
:lower ->
%Ash.CiString{casted?: true, string: value && String.downcase(value)}
nil ->
%Ash.CiString{casted?: false, string: value}
end
end
def value(%Ash.CiString{string: value, casted?: false, case: :lower}) do
value && String.downcase(value)
end
def value(%Ash.CiString{string: value, casted?: false, case: :upper}) do
value && String.upcase(value)
end
def value(%Ash.CiString{string: value}) do
value
end
def compare(left, right) do
do_compare(to_comparable_string(left), to_comparable_string(right))
end
defp do_compare(left, right) when left < right, do: :lt
defp do_compare(left, right) when left == right, do: :eq
defp do_compare(_, _), do: :gt
@doc "Returns the downcased value, only downcasing if it hasn't already been down"
def to_comparable_string(value) when is_binary(value) do
String.downcase(value)
end
def to_comparable_string(%__MODULE__{case: :lower, casted?: true, string: value}) do
value
end
def to_comparable_string(%__MODULE__{string: value}) do
value && String.downcase(value)
end
def to_comparable_string(nil), do: nil
end
use Comp
defcomparable left :: Ash.CiString, right :: BitString do
Ash.CiString.compare(left, right)
end
defcomparable left :: Ash.CiString, right :: Ash.CiString do
Ash.CiString.compare(left, right)
end
|
lib/ash/ci_string.ex
| 0.854582 | 0.525734 |
ci_string.ex
|
starcoder
|
defmodule CSQuery.FieldValue do
@moduledoc """
A representation of a field value pattern matcher for the AWS CloudSearch
structured query syntax. If the `CSQuery.FieldValue` does not have a `name`,
then all text and text-array fields will be searched. If it does have a
`name`, then only the named field will be searched.
"""
alias CSQuery.Range, as: CSRange
alias CSQuery.{Expression, OperatorOption}
@typedoc "Valid value types for a `t:CSQuery.FieldValue.t/0`."
@type values ::
String.t()
| CSRange.t()
| Expression.t()
| number
| DateTime.t()
@typedoc """
Valid name types for a `CSQuery.FieldValue` struct.
Note that `t:CSQuery.Expression.operators/0` are not valid field names as
they are reserved keywords in the structured query syntax.
"""
@type names :: nil | String.t() | atom
@typedoc "The struct for `CSQuery.FieldValue`."
@type t :: %__MODULE__{name: names, value: values}
@enforce_keys [:value]
defstruct [:name, :value]
@doc """
Provide an unnamed `CSQuery.FieldValue` struct for the value provided.
If the value provided is a `CSQuery.FieldValue` struct, it will be returned.
If the value provided is a map, the first key and value combination will be
used to construct the `CSQuery.FieldValue` struct.
iex> new(%{title: "Star Wars", year: 1990})
%CSQuery.FieldValue{name: :title, value: "Star Wars"}
See `new/2` for more information.
"""
@spec new(t | map | values | Range.t()) :: t
def new(%__MODULE__{} = value), do: value
@spec new(struct) :: t
def new(%_mod{} = value), do: new(nil, value)
@spec new(%{required(atom | String.t()) => values | nil | Range.t()}) :: t | nil
def new(%{} = value) do
with [name | _] <- Map.keys(value), %{^name => value} <- value do
new(name, value)
else
_ -> nil
end
end
@spec new(values | nil | Range.t()) :: t
def new(value), do: new(nil, value)
@doc """
Provide an optionally named `CSQuery.FieldValue` struct. The `value` may be
one of `t:values/0`, `nil` (which will be converted to `""`), or a `Range`
(which will be converted to a `CSQuery.Range`).
If a `CSQuery.FieldValue` struct is provided, a struct with the `name`
replaced will be returned, effectively naming or renaming the field.
iex> new(:plot, %CSQuery.FieldValue{value: "war"})
%CSQuery.FieldValue{name: :plot, value: "war"}
iex> new(:plot, %CSQuery.FieldValue{name: :title, value: "war"})
%CSQuery.FieldValue{name: :plot, value: "war"}
As a special case, when one of the `t:CSQuery.Expression.operators/0` is
provided as the `name`, a `CSQuery.Expression` will be built inside of a
`CSQuery.FieldValue` struct.
"""
def new(name, value)
for operator <- CSQuery.operators() do
@spec new(unquote(operator), keyword) :: Expression.t()
def new(unquote(operator), value), do: new(Expression.new(unquote(operator), value))
end
@spec new(names, t) :: t
def new(name, %__MODULE__{} = value), do: %__MODULE__{value | name: name}
@spec new(names, values) :: t
def new(name, value) do
%__MODULE__{name: name, value: convert(value)}
end
@doc """
Convert the `t:CSQuery.FieldValue.t/0` to a string.
"""
@spec to_value(t) :: String.t()
def to_value(%__MODULE__{name: name, value: value}) do
[name, format(value)]
|> Enum.filter(& &1)
|> Enum.join(":")
end
defp convert(nil), do: ""
defp convert(%Range{} = value), do: CSRange.new(value)
defp convert({_first, _last} = value), do: CSRange.new(value)
defp convert(value), do: value
defp format(%CSRange{} = value), do: CSRange.to_value(value)
defp format(%Expression{} = value), do: Expression.to_query(value)
defp format(value) when is_number(value), do: to_string(value)
defp format(%DateTime{} = value), do: "'#{DateTime.to_iso8601(value)}'"
defp format(%OperatorOption{}), do: nil
defp format(value) when is_binary(value) do
cond do
is_parenthesized?(value) -> value
CSRange.is_range_string?(value) -> value
true -> "'#{escape(value)}'"
end
end
defp escape(value), do: String.replace(String.replace(value, "\\", "\\\\"), "'", "\\'")
defp is_parenthesized?(value) do
String.starts_with?(value, "(") && String.ends_with?(value, ")")
end
end
|
lib/csquery/field_value.ex
| 0.906798 | 0.694173 |
field_value.ex
|
starcoder
|
defmodule Cartograf do
@let :let
@drop :drop
@nest :nest
@const :const
@moduledoc """
Cartograf is a set of elixir macros for mapping fields from
one struct to another.
The goal is to make these struct-to-struct translations more
robust and less cumbersome to write and maintian.
## Basic Form
The basic form for using this module is of the form:
```elixir
map Proj.A, Proj.B, :one_to_one do
let :a, :aa
let :b, :bb
let :c, :cc
let :d, :dd
end
```
This structure would create a function called `one_to_one/1`
within whatever module this the macro was invoked within.
The `one_to_one/1` function would expect a struct of type `Proj.A`
and return a struct of type `Proj.B` would be returned.
This Map generates a function that contains a native elixir struct syntax for
the destination struct. For instance, for the *Basic Example*, the following
function is generated.
```
def one_to_one(bnd = %Proj.A{}) do
%Proj.B{aa: bdn.a, bb: bdn.b, cc: bdn.c, dd: bdn.d}
end
```
# Design Philosophy
`cartograf` is supposed to be a tool, not a hazzard.
The point of this project is to create robust mappings from
one struct to another. As such, there are a few safeties in
place to protect the developer.
* `map()` does require that its input struct is of the
correct type. The function generated leverages pattern
matching on the argument to ensure that the struct
type is the one declared when the map was specified.
* All input fields *should* be handled. Each `map()`
will report any unmapped fields as a warning at compile
time. This can also be configured to not report a warning, fail
compilation, for more info see Config. In order to remove these
warnings, a `drop(input_key)` should be added to the `map()`
The main purpose for this is catch
instances where developers add fields to structs, but fail
to update the maps.
* Maps do not automatically map identical keys from one struct
to another by default. To enable this, the `auto: true` option must
be set in the `map`'s options.
# Configuration
Cartograf by default will warn about any unmapped fields to change this behaviour
the following configuration changes can be made.
* `config :cartograf, on_missing_key: :warn`
Log warning for each unmapped field
* `config :cartograf, on_missing_key: :ignore`
Ignore unmapped fields, don't warn or throw
* `config :cartograf, on_missing_key: :throw`
Raise an exception on unmapped fields, halting compilation
"""
defmacro __using__(_) do
quote do
import Cartograf
end
end
defp get_children({:__block__, _meta, elements}) do
elements
end
defp get_children(element) do
[element]
end
defp tokenize(children) do
Enum.reduce(children, %{}, fn {atm, tup}, acc ->
Map.update(acc, atm, [tup], fn val -> [tup | val] end)
end)
end
defp get_list(lst, atom) do
Map.get(lst, atom, [])
end
defp make_struct_map(bindings, to, bound_var) do
bound = Macro.var(bound_var, __MODULE__)
const_mapping =
Enum.reduce(get_list(bindings, @const), [], fn {t, v}, acc ->
[{t, v} | acc]
end)
let_mapping =
Enum.reduce(get_list(bindings, @let), [], fn {f, t}, acc ->
[{t, quote(do: unquote(bound).unquote(f))} | acc]
end)
merged_mapping = let_mapping ++ const_mapping
{:%, [], [to, {:%{}, [], merged_mapping}]}
end
defp verify_mappings(mapped_keys, from_t) do
if(is_nil(from_t)) do
[]
else
from_keys = Map.keys(struct(from_t))
not_mapped =
Enum.filter(from_keys, fn key -> not (key in mapped_keys) and key != :__struct__ end)
not_mapped
end
end
# This is doing the recursive work needed for nest
defp expand_child_macros(children, binding) do
Macro.postwalk(children, {[], []}, fn mappings, {mapped, not_mapped} ->
mappings = Macro.expand(mappings, __ENV__)
case mappings do
{@nest, {key, nest_fn}} ->
{ast, {nest_mapped, not_nest_mapped}} = nest_fn.(binding)
{{@const, {key, ast}}, {nest_mapped ++ mapped, not_nest_mapped ++ not_mapped}}
other ->
{other, {mapped, not_mapped}}
end
end)
end
defp get_mapped_fields(mappings, additional_mapped) do
Keyword.keys(get_list(mappings, @let)) ++
Enum.map(get_list(mappings, @const), &elem(&1, 0)) ++
Enum.map(get_list(mappings, @drop), &elem(&1, 0)) ++ additional_mapped
end
defp auto_map_fields(true, mappings, mapped_nested, to_t, from_t) do
to_atoms = Map.keys(struct(to_t))
from_atoms = Map.keys(struct(from_t))
# Get keys that are already mapped
mapped = get_mapped_fields(mappings, mapped_nested)
mapped = Enum.uniq(mapped)
# Get shared keys between to and from
shared = Enum.filter(to_atoms, fn key -> key in from_atoms end)
# Get shared keys that are not mapped
shared = Enum.filter(shared, fn key -> not (key in mapped) && key != :__struct__ end)
new_lets = Keyword.new(Enum.map(shared, fn a -> {a, a} end))
# Add let entries for missing shared keys
Map.update(mappings, @let, new_lets, fn val ->
Keyword.merge(val, new_lets)
end)
end
defp auto_map_fields(false, mappings, _, _, _) do
mappings
end
defp create_map(children, to_t, binding, auto?, from_t \\ nil) do
{children, {mapped_n, _not_mapped_n}} = expand_child_macros(children, binding)
mappings = tokenize(children)
mappings = auto_map_fields(auto?, mappings, mapped_n, to_t, from_t)
mapped = get_mapped_fields(mappings, mapped_n)
mapped = Enum.uniq(mapped)
not_mapped = verify_mappings(mapped, from_t)
{make_struct_map(mappings, to_t, binding), {mapped, not_mapped}}
end
defp report_not_mapped(not_mapped, name, env) do
if(Enum.any?(not_mapped)) do
msg =
"In map \"#{name}\" the following source keys are not mapped: \n#{inspect(not_mapped)}"
stack = Macro.Env.stacktrace(env)
case Application.get_env(:cartograf, :on_missing_key, :warn) do
:warn ->
IO.warn(msg, stack)
:throw ->
reraise(Cartograf.MappingException, [message: msg], stack)
:ignore ->
nil
u ->
IO.warn(
"Cartograf expected config field :on_missing_key to
be either :warn, :throw, or :ignore, got :#{u}",
stack
)
IO.warn(msg, stack)
end
end
end
defp map_internal(from_t, to_t, name, auto?, map?, children, env) do
binding_raw = :carto
binding = Macro.var(binding_raw, __MODULE__)
{created_map, {_mapped, not_mapped}} = create_map(children, to_t, binding_raw, auto?, from_t)
report_not_mapped(not_mapped, name, env)
main =
quote do
def unquote(name)(unquote(binding) = %unquote(from_t){}) do
unquote(created_map)
end
end
if(map?) do
map =
quote do
def unquote(:"#{name}_map")(unquote(binding) = %unquote(from_t){}) do
Map.from_struct(unquote(created_map))
end
end
{main, map}
else
main
end
end
@doc """
Creates a function in the the current module for mapping from
struct to another.
```elixir
defmodule A, do: defstruct [:a, :b, :c]
defmodule B, do: defstruct [:aa, :bb, :cc]
defmodule YourModule do
use Cartograf
map A, B, :a_to_b do
let :a, :aa
let :b, :bb
let :c, :cc
end
end
```
```elixir
iex> YourModule.a_to_b(%A{a: 1, b: "2", c: :d})
%B{aa: 1, bb: "2", cc: :d}
```
The options:
* `auto: true` create bindings for all matching keys of the two
structs which are not already mapped
* `map: true` create a second anologous method `'name'_map` which will return
a map instead of the struct (some libraries rely of creating a struct for
you from a map of fields)
"""
@spec map(module(), module(), atom, [], do: any()) :: any()
defmacro map(from_t, to_t, name, opts \\ [], do: block) do
children = get_children(block)
from_t = Macro.expand(from_t, __CALLER__)
to_t = Macro.expand(to_t, __CALLER__)
auto? = Keyword.get(opts, :auto, false)
map? = Keyword.get(opts, :map, false)
map_internal(from_t, to_t, name, auto?, map?, children, __CALLER__)
end
@doc """
Specify where the a field in the input should be mapped to
in the out.
"""
@spec let(atom(), atom()) :: any()
defmacro let(source_key, dest_key) do
{@let, {source_key, dest_key}}
end
@doc """
Allow for a field in the output to be set to a constant
value.
"""
@spec const(atom(), any()) :: any()
defmacro const(dest_key, val) do
{@const, {dest_key, val}}
end
@doc """
Used to specify a nested map within `map()`.
This resulting struct will have the type of to_t.
No options are available for this macro.
"""
@spec nest(atom(), module(), do: any()) :: any()
defmacro nest(dest_key, to_t, do: block) do
children = get_children(block)
to_t = Macro.expand(to_t, __CALLER__)
nest_scope = fn binding -> create_map(children, to_t, binding, false) end
{@nest, {dest_key, nest_scope}}
end
@doc """
Allow for a field from the input to be excluded from
the output.
Most useful when using `auto`, however it
is recommended to use this for any non-mapped.
"""
@spec drop(atom()) :: any()
defmacro drop(src_key) do
{@drop, {src_key}}
end
end
|
lib/cartograf.ex
| 0.785514 | 0.882782 |
cartograf.ex
|
starcoder
|
defmodule MangoPay.Refund do
@moduledoc """
Functions for MangoPay [refund](https://docs.mangopay.com/endpoints/v2.01/refunds#e316_the-refund-object).
"""
use MangoPay.Query.Base
set_path "refunds"
@doc """
Get a refund.
## Examples
{:ok, refund} = MangoPay.Refund.get(id)
"""
def get id do
_get id
end
@doc """
Get a refund.
## Examples
refund = MangoPay.Refund.get!(id)
"""
def get! id do
_get! id
end
defmodule PayIn do
@moduledoc """
Functions for MangoPay [refund](https://docs.mangopay.com/endpoints/v2.01/refunds#e316_the-refund-object).
"""
use MangoPay.Query.Base, "refunds"
@doc """
Create a refund for a pay in.
## Examples
pay_in_id = Id of a transfer
params = %{
"Tag": "custom meta",
"AuthorId": "8494514",
"DebitedFunds": %{
"Currency": "EUR",
"Amount": 12
},
"Fees": %{
"Currency": "EUR",
"Amount": 12
}
}
{:ok, refund} = MangoPay.Refund.PayIn.create(pay_in_id, params)
"""
def create pay_in_id, params do
_create params, [MangoPay.PayIn.path(pay_in_id), resource()]
end
@doc """
Create a refund for a pay in.
## Examples
pay_in_id = Id of a transfer
params = %{
"Tag": "custom meta",
"AuthorId": "8494514",
"DebitedFunds": %{
"Currency": "EUR",
"Amount": 12
},
"Fees": %{
"Currency": "EUR",
"Amount": 12
}
}
refund = MangoPay.Refund.PayIn.create(pay_in_id, params)
"""
def create! pay_in_id, params do
_create! params, [MangoPay.PayIn.path(pay_in_id), resource()]
end
end
defmodule Transfer do
@moduledoc """
Functions for MangoPay [refund](https://docs.mangopay.com/endpoints/v2.01/refunds#e316_the-refund-object).
"""
use MangoPay.Query.Base, "refunds"
@doc """
Create a refund for a transfer.
## Examples
transfer_id = Id of a transfer
params = %{
"Tag": "custom meta",
"AuthorId": "<PASSWORD>"
}
{:ok, refund} = MangoPay.Refund.Transfer.create(transfer_id, params)
"""
def create transfer_id, params do
_create params, [MangoPay.Transfer.path(transfer_id), resource()]
end
@doc """
Create a refund for a transfer.
## Examples
transfer_id = Id of a transfer
params = %{
"Tag": "custom meta",
"AuthorId": "<PASSWORD>"
}
refund = MangoPay.Refund.Transfer.create!(transfer_id, params)
"""
def create! transfer_id, params do
_create! params, [MangoPay.Transfer.path(transfer_id), resource()]
end
end
@doc """
List all refund by pay out.
## Examples
pay_out_id = Id of a transfer
query = %{
"Page": 1,
"Per_Page": 25,
"Sort": "CreationDate:DESC",
"Status": "CREATED,FAILED",
"ResultCode": "000000,009199"
}
{:ok, refund} = MangoPay.Refund.all_by_pay_out(pay_out_id, query)
"""
def all_by_pay_out id, query \\ %{} do
_all [MangoPay.PayOut.path(id), resource()], query
end
@doc """
List all refund by pay out.
## Examples
pay_out_id = Id of a transfer
query = %{
"Page": 1,
"Per_Page": 25,
"Sort": "CreationDate:DESC",
"Status": "CREATED,FAILED",
"ResultCode": "000000,009199"
}
refund = MangoPay.Refund.all_by_pay_out!(pay_out_id, query)
"""
def all_by_pay_out! id, query \\ %{} do
_all! [MangoPay.PayOut.path(id), resource()], query
end
@doc """
List all refunds by pay in.
## Examples
pay_in_id = Id of a transfer
query = %{
"Page": 1,
"Per_Page": 25,
"Sort": "CreationDate:DESC",
"Status": "CREATED,FAILED",
"ResultCode": "000000,009199"
}
{:ok, refunds} = MangoPay.Refund.all_by_pay_in(pay_in_id, query)
"""
def all_by_pay_in id, query \\ %{} do
_all [MangoPay.PayIn.path(id), resource()], query
end
@doc """
List all refunds by pay in.
## Examples
pay_in_id = Id of a transfer
query = %{
"Page": 1,
"Per_Page": 25,
"Sort": "CreationDate:DESC",
"Status": "CREATED,FAILED",
"ResultCode": "000000,009199"
}
refunds = MangoPay.Refund.all_by_pay_in!(pay_in_id, query)
"""
def all_by_pay_in! id, query \\ %{} do
_all! [MangoPay.PayIn.path(id), resource()], query
end
@doc """
List all refunds by transfer.
## Examples
transfer_id = Id of a transfer
query = %{
"Page": 1,
"Per_Page": 25,
"Sort": "CreationDate:DESC",
"Status": "CREATED,FAILED",
"ResultCode": "000000,009199"
}
{:ok, refunds} = MangoPay.Refund.all_by_transfer(transfer_id, query)
"""
def all_by_transfer id, query \\ %{} do
_all [MangoPay.Transfer.path(id), resource()], query
end
@doc """
List all refunds by transfer.
## Examples
transfer_id = Id of a transfer
query = %{
"Page": 1,
"Per_Page": 25,
"Sort": "CreationDate:DESC",
"Status": "CREATED,FAILED",
"ResultCode": "000000,009199"
}
refunds = MangoPay.Refund.all_by_transfer!(transfer_id, query)
"""
def all_by_transfer! id, query \\ %{} do
_all! [MangoPay.Transfer.path(id), resource()], query
end
@doc """
List all refunds by repudiation.
## Examples
repudiation_id = Id of a repudiation
query = %{
"Page": 1,
"Per_Page": 25,
"Sort": "CreationDate:DESC",
"Status": "CREATED,FAILED",
"ResultCode": "000000,009199"
}
{:ok, refunds} = MangoPay.Refund.all_by_repudiation(repudiation_id, query)
"""
def all_by_repudiation id, query \\ %{} do
_all [MangoPay.Repudiation.path(id), resource()], query
end
@doc """
List all refunds by repudiation.
## Examples
repudiation_id = Id of a repudiation
query = %{
"Page": 1,
"Per_Page": 25,
"Sort": "CreationDate:DESC",
"Status": "CREATED,FAILED",
"ResultCode": "000000,009199"
}
refunds = MangoPay.Refund.all_by_repudiation!(repudiation_id, query)
"""
def all_by_repudiation! id, query \\ %{} do
_all! [MangoPay.Repudiation.path(id), resource()], query
end
end
|
lib/mango_pay/refund.ex
| 0.682468 | 0.521959 |
refund.ex
|
starcoder
|
defmodule StaffNotes.Files do
@moduledoc """
Represents the business-logic layer of handling files.
"""
require Logger
alias ExAws.S3
alias StaffNotes.ConfigurationError
@doc """
Accepts a Base64-encoded file and uploads it to S3.
## Examples
```
iex> upload_file(..., "image/png")
"https://image_bucket.s3.amazonaws.com/dbaaee81609747ba82bea2453cc33b83.png"
```
"""
@spec upload_file(String.t(), String.t()) ::
{:ok, String.t()} | {:error, String.t()} | no_return()
def upload_file(base64_data, mime_type), do: upload_file(base64_data, mime_type, [])
@doc false
def upload_file(base64_data, mime_type, options)
when is_binary(base64_data) and is_binary(mime_type) do
options = Keyword.merge(defaults(), options)
case Base.decode64(base64_data) do
:error -> {:error, "Error decoding base64 data"}
{:ok, binary} -> do_upload(config(:s3_bucket), binary, image_extension(mime_type), options)
end
end
defp defaults do
[
ex_aws_module: ExAws
]
end
defp config(key, default \\ nil) do
Keyword.get(
Application.get_env(:staff_notes, __MODULE__),
key,
default
)
end
defp do_upload(nil, _, _, _) do
raise ConfigurationError,
message: "No :s3_bucket configured for #{Mix.env()} in application :staff_notes"
end
defp do_upload(_, _, {:error, _} = error, _), do: error
defp do_upload(bucket, binary, {:ok, extension}, options) do
filename = Path.join(config(:base_path, ""), unique_filename(extension))
bucket
|> S3.put_object(filename, binary)
|> options[:ex_aws_module].request()
|> handle_response(bucket, filename)
end
defp handle_response({:error, _} = error, _, _), do: error
defp handle_response({:ok, response}, bucket, filename) do
url = Path.join("https://#{bucket}.s3.amazonaws.com", filename)
Logger.debug(fn -> "S3 upload result: #{inspect(response)}" end)
Logger.debug(fn -> "File uploaded: #{url}" end)
{:ok, url}
end
defp image_extension("image/gif"), do: {:ok, ".gif"}
defp image_extension("image/jpeg"), do: {:ok, ".jpg"}
defp image_extension("image/png"), do: {:ok, ".png"}
defp image_extension(_), do: {:error, "Could not determine file type"}
defp unique_filename(extension) do
Ecto.UUID.generate() <> extension
end
end
|
lib/staff_notes/files.ex
| 0.845528 | 0.599808 |
files.ex
|
starcoder
|
defmodule Konvex do
@moduledoc """
Provides modular KV-storage abstractions.
Each KV-storage shares a common set of abilities (e.g. get value by key),
but at the same time can expose it's own unique one.
So to address this heterogeneity and be able to compose modular clients
the library exposes this abilities on their own.
Each client defines it's own set of abilities it provides (and how each of them is implemented).
As a consequence least privilege principle comes for free,
and you can build clients along with your business-logic
(e.g. you can build two clients backed by the same storage,
but of different abilities: one for admin access with full set of abilities
and another for user read-only access without functionality to modify storage data).
The main KV-storage abstraction in konvex is Ability, that defines unit of storage functionality
(that can be either unique for particular backend or common for several ones).
Ability implementations for particular backends can be found in Konvex.Implementation module.
Given with ability implementations client is just a module that defines a subset of them to use.
Example:
defmodule ReadOnlyClient do
use Konvex.Implementation.YetAnotherKeyValueStorage.Ability.ToGetTextValue,
# Each backend can define it's own specific set of metadata to implement communication with it
connection_pool: get_connection_pool_for(:user_access)
end
defmodule AdminClient do
use Konvex.Implementation.YetAnotherKeyValueStorage.Ability.ToGetTextValue,
connection_pool: get_connection_pool_for(:admin_access)
use Konvex.Implementation.YetAnotherKeyValueStorage.Ability.ToPutTextValue,
connection_pool: get_connection_pool_for(:admin_access)
use Konvex.Implementation.YetAnotherKeyValueStorage.Ability.ToDeleteKey,
connection_pool: get_connection_pool_for(:admin_access)
end
Bonus level for those who's still reading (or library name motivation):
konvex stands for convex hull of Abilities.
The analogy here is that each client built with the library
is at some point a convex hull of abilities it implements.
The name also satisfies following requirements (joke in progress):
* must include K, V letters as a KV-storage library
* must include EX suffix as an Elixir-library
Thank you for your attention (joke done)
"""
end
|
lib/konvex.ex
| 0.75183 | 0.463384 |
konvex.ex
|
starcoder
|
defmodule Yggdrasil.Backend do
@moduledoc """
Backend behaviour that defines how to subscribe, unsubscribe and publish as
well as send messages of connection and disconnection to subscribers.
By default, the implementation uses `Phoenix.PubSub` as backend.
## Backend alias
When defining backends it is possible to define aliases for the module
as follows:
```
defmodule Yggdrasil.Backend.MyBackend do
use Yggdrasil.Backend, name: :my_backend
(... behaviour implementation ...)
end
```
And adding the following to our application supervision tree:
```
Supervisor.start_link([
{Yggdrasil.Backend.MyBackend, []}
...
])
```
This will allow you to use the following as a `Channel` to subscribe and
publish:
```
%Channel{name: "my_channel", backend: :my_backend}
```
"""
alias __MODULE__
alias Phoenix.PubSub
alias Yggdrasil.Channel
alias Yggdrasil.Registry
alias Yggdrasil.Subscriber.Manager
@doc """
Callback to define the subscription method. Receives the `channel`.
"""
@callback subscribe(channel :: Channel.t()) :: :ok | {:error, term()}
@doc """
Callback to define the unsubscription method. Receives the `channel`.
"""
@callback unsubscribe(channel :: Channel.t()) :: :ok | {:error, term()}
@doc """
Callback to publish the connected message in the `channel`. Receives a `pid`
in case the message shouldn't be broadcasted.
"""
@callback connected(channel :: Channel.t(), pid :: atom() | pid()) ::
:ok | {:error, term()}
@doc """
Callback to publish the disconnected message in the `channel`. Receives a
`pid` in case the message shouldn't be broadcasted.
"""
@callback disconnected(channel :: Channel.t(), pid :: atom() | pid()) ::
:ok | {:error, term()}
@doc """
Callback to publish a `message` in a `channel` with some `metadata`.
"""
@callback publish(
channel :: Channel.t(),
message :: term(),
metadata :: term()
) :: :ok | {:error, term()}
@doc """
Macro for using `Yggdrasil.Backend`.
The following are the available options:
- `:name` - Name of the backend. Must be an atom.
"""
defmacro __using__(options) do
backend_alias =
options[:name] || raise ArgumentError, message: "Backend not found"
quote do
@behaviour Yggdrasil.Backend
use Task, restart: :transient
@doc """
Start task to register the backend in the `Registry`.
"""
@spec start_link(term()) :: {:ok, pid()}
def start_link(_) do
Task.start_link(__MODULE__, :register, [])
end
@doc """
Registers backend in `Registry`.
"""
@spec register() :: :ok
def register do
name = unquote(backend_alias)
Registry.register_backend(name, __MODULE__)
end
@doc """
Subscribes to `channel`.
"""
@spec subscribe(Channel.t()) :: :ok | {:error, term()}
def subscribe(channel)
def subscribe(%Channel{} = channel) do
if Manager.subscribed?(channel) do
:ok
else
channel_name = Backend.transform_name(channel)
PubSub.subscribe(Yggdrasil.PubSub, channel_name)
end
rescue
reason ->
{:error, reason}
end
@doc """
Unsubscribe to `channel`.
"""
@spec unsubscribe(Channel.t()) :: :ok | {:error, term()}
def unsubscribe(channel)
def unsubscribe(%Channel{} = channel) do
if Manager.subscribed?(channel) do
channel_name = Backend.transform_name(channel)
PubSub.unsubscribe(Yggdrasil.PubSub, channel_name)
else
:ok
end
rescue
reason ->
{:error, reason}
end
@doc """
Broadcast a connection message in a `channel` and optionally to a `pid`.
"""
@spec connected(Channel.t(), nil | pid()) :: :ok | {:error, term()}
def connected(channel, pid)
def connected(%Channel{} = channel, nil) do
real_message = {:Y_CONNECTED, channel}
channel_name = Backend.transform_name(channel)
PubSub.broadcast(Yggdrasil.PubSub, channel_name, real_message)
rescue
reason ->
{:error, reason}
end
def connected(%Channel{} = channel, pid) do
real_message = {:Y_CONNECTED, channel}
send(pid, real_message)
:ok
end
@doc """
Broadcast a disconnection message in a `channel` and optionally to a
`pid`.
"""
@spec disconnected(Channel.t(), nil | pid()) :: :ok | {:error, term()}
def disconnected(channel, pid)
def disconnected(%Channel{} = channel, nil) do
real_message = {:Y_DISCONNECTED, channel}
channel_name = Backend.transform_name(channel)
PubSub.broadcast(Yggdrasil.PubSub, channel_name, real_message)
rescue
reason ->
{:error, reason}
end
def disconnected(%Channel{} = channel, pid) do
real_message = {:Y_DISCONNECTED, channel}
send(pid, real_message)
:ok
end
@doc """
Broadcasts a `message` in a `channel` with some `metadata`.
"""
@spec publish(Channel.t(), term(), term()) :: :ok | {:error, term()}
def publish(channel, message, metadata)
def publish(%Channel{} = channel, message, metadata) do
complete_channel = %Channel{channel | metadata: metadata}
real_message = {:Y_EVENT, complete_channel, message}
channel_name = Backend.transform_name(channel)
PubSub.broadcast(Yggdrasil.PubSub, channel_name, real_message)
rescue
reason ->
{:error, reason}
end
defoverridable subscribe: 1,
unsubscribe: 1,
connected: 2,
disconnected: 2,
publish: 3
end
end
@doc """
Transforms name of the `channel` to a `binary`.
"""
@spec transform_name(Channel.t()) :: binary()
def transform_name(channel)
def transform_name(%Channel{} = channel) do
channel
|> :erlang.phash2()
|> Integer.to_string()
end
@doc """
Generic subscription in a `channel`.
"""
@spec subscribe(Channel.t()) :: :ok | {:error, term()}
def subscribe(channel)
def subscribe(%Channel{backend: backend} = channel) do
with {:ok, module} <- Registry.get_backend_module(backend) do
module.subscribe(channel)
end
end
@doc """
Generic unsubscriptions in a `channel`.
"""
@spec unsubscribe(Channel.t()) :: :ok | {:error, term()}
def unsubscribe(channel)
def unsubscribe(%Channel{backend: backend} = channel) do
with {:ok, module} <- Registry.get_backend_module(backend) do
module.unsubscribe(channel)
end
end
@doc """
Generic connected message sender to a `channel`. Optionally receives
the `pid` of the specific subscriber.
"""
@spec connected(Channel.t()) :: :ok | {:error, term()}
@spec connected(Channel.t(), nil | pid()) :: :ok | {:error, term()}
def connected(channel, pid \\ nil)
def connected(%Channel{backend: backend} = channel, pid) do
with {:ok, module} <- Registry.get_backend_module(backend) do
module.connected(channel, pid)
end
end
@doc """
Generic disconnected message sender to a `channel`. Optionally receives
the `pid` of the specific subscriber.
"""
@spec disconnected(Channel.t()) :: :ok | {:error, term()}
@spec disconnected(Channel.t(), nil | pid()) :: :ok | {:error, term()}
def disconnected(channel, pid \\ nil)
def disconnected(%Channel{backend: backend} = channel, pid) do
with {:ok, module} <- Registry.get_backend_module(backend) do
module.disconnected(channel, pid)
end
end
@doc """
Generic publish `message` in a `channel` with some optional `metadata`.
"""
@spec publish(Channel.t(), term()) :: :ok | {:error, term()}
@spec publish(Channel.t(), term(), term()) :: :ok | {:error, term()}
def publish(channel, message, metadata \\ nil)
def publish(%Channel{backend: backend} = channel, message, metadata) do
with {:ok, module} <- Registry.get_backend_module(backend) do
module.publish(channel, message, metadata)
end
end
end
|
lib/yggdrasil/backend.ex
| 0.882592 | 0.789112 |
backend.ex
|
starcoder
|
defmodule AWS.NetworkManager do
@moduledoc """
Transit Gateway Network Manager (Network Manager) enables you to create a global
network, in which you can monitor your AWS and on-premises networks that are
built around transit gateways.
"""
@doc """
Associates a customer gateway with a device and optionally, with a link.
If you specify a link, it must be associated with the specified device.
You can only associate customer gateways that are connected to a VPN attachment
on a transit gateway. The transit gateway must be registered in your global
network. When you register a transit gateway, customer gateways that are
connected to the transit gateway are automatically included in the global
network. To list customer gateways that are connected to a transit gateway, use
the
[DescribeVpnConnections](https://docs.aws.amazon.com/AWSEC2/latest/APIReference/API_DescribeVpnConnections.html)
EC2 API and filter by `transit-gateway-id`.
You cannot associate a customer gateway with more than one device and link.
"""
def associate_customer_gateway(client, global_network_id, input, options \\ []) do
path_ = "/global-networks/#{URI.encode(global_network_id)}/customer-gateway-associations"
headers = []
query_ = []
request(client, :post, path_, query_, headers, input, options, nil)
end
@doc """
Associates a link to a device.
A device can be associated to multiple links and a link can be associated to
multiple devices. The device and link must be in the same global network and the
same site.
"""
def associate_link(client, global_network_id, input, options \\ []) do
path_ = "/global-networks/#{URI.encode(global_network_id)}/link-associations"
headers = []
query_ = []
request(client, :post, path_, query_, headers, input, options, nil)
end
@doc """
Creates a new device in a global network.
If you specify both a site ID and a location, the location of the site is used
for visualization in the Network Manager console.
"""
def create_device(client, global_network_id, input, options \\ []) do
path_ = "/global-networks/#{URI.encode(global_network_id)}/devices"
headers = []
query_ = []
request(client, :post, path_, query_, headers, input, options, nil)
end
@doc """
Creates a new, empty global network.
"""
def create_global_network(client, input, options \\ []) do
path_ = "/global-networks"
headers = []
query_ = []
request(client, :post, path_, query_, headers, input, options, nil)
end
@doc """
Creates a new link for a specified site.
"""
def create_link(client, global_network_id, input, options \\ []) do
path_ = "/global-networks/#{URI.encode(global_network_id)}/links"
headers = []
query_ = []
request(client, :post, path_, query_, headers, input, options, nil)
end
@doc """
Creates a new site in a global network.
"""
def create_site(client, global_network_id, input, options \\ []) do
path_ = "/global-networks/#{URI.encode(global_network_id)}/sites"
headers = []
query_ = []
request(client, :post, path_, query_, headers, input, options, nil)
end
@doc """
Deletes an existing device.
You must first disassociate the device from any links and customer gateways.
"""
def delete_device(client, device_id, global_network_id, input, options \\ []) do
path_ = "/global-networks/#{URI.encode(global_network_id)}/devices/#{URI.encode(device_id)}"
headers = []
query_ = []
request(client, :delete, path_, query_, headers, input, options, nil)
end
@doc """
Deletes an existing global network.
You must first delete all global network objects (devices, links, and sites) and
deregister all transit gateways.
"""
def delete_global_network(client, global_network_id, input, options \\ []) do
path_ = "/global-networks/#{URI.encode(global_network_id)}"
headers = []
query_ = []
request(client, :delete, path_, query_, headers, input, options, nil)
end
@doc """
Deletes an existing link.
You must first disassociate the link from any devices and customer gateways.
"""
def delete_link(client, global_network_id, link_id, input, options \\ []) do
path_ = "/global-networks/#{URI.encode(global_network_id)}/links/#{URI.encode(link_id)}"
headers = []
query_ = []
request(client, :delete, path_, query_, headers, input, options, nil)
end
@doc """
Deletes an existing site.
The site cannot be associated with any device or link.
"""
def delete_site(client, global_network_id, site_id, input, options \\ []) do
path_ = "/global-networks/#{URI.encode(global_network_id)}/sites/#{URI.encode(site_id)}"
headers = []
query_ = []
request(client, :delete, path_, query_, headers, input, options, nil)
end
@doc """
Deregisters a transit gateway from your global network.
This action does not delete your transit gateway, or modify any of its
attachments. This action removes any customer gateway associations.
"""
def deregister_transit_gateway(client, global_network_id, transit_gateway_arn, input, options \\ []) do
path_ = "/global-networks/#{URI.encode(global_network_id)}/transit-gateway-registrations/#{URI.encode(transit_gateway_arn)}"
headers = []
query_ = []
request(client, :delete, path_, query_, headers, input, options, nil)
end
@doc """
Describes one or more global networks.
By default, all global networks are described. To describe the objects in your
global network, you must use the appropriate `Get*` action. For example, to list
the transit gateways in your global network, use
`GetTransitGatewayRegistrations`.
"""
def describe_global_networks(client, global_network_ids \\ nil, max_results \\ nil, next_token \\ nil, options \\ []) do
path_ = "/global-networks"
headers = []
query_ = []
query_ = if !is_nil(next_token) do
[{"nextToken", next_token} | query_]
else
query_
end
query_ = if !is_nil(max_results) do
[{"maxResults", max_results} | query_]
else
query_
end
query_ = if !is_nil(global_network_ids) do
[{"globalNetworkIds", global_network_ids} | query_]
else
query_
end
request(client, :get, path_, query_, headers, nil, options, nil)
end
@doc """
Disassociates a customer gateway from a device and a link.
"""
def disassociate_customer_gateway(client, customer_gateway_arn, global_network_id, input, options \\ []) do
path_ = "/global-networks/#{URI.encode(global_network_id)}/customer-gateway-associations/#{URI.encode(customer_gateway_arn)}"
headers = []
query_ = []
request(client, :delete, path_, query_, headers, input, options, nil)
end
@doc """
Disassociates an existing device from a link.
You must first disassociate any customer gateways that are associated with the
link.
"""
def disassociate_link(client, global_network_id, input, options \\ []) do
path_ = "/global-networks/#{URI.encode(global_network_id)}/link-associations"
headers = []
{query_, input} =
[
{"DeviceId", "deviceId"},
{"LinkId", "linkId"},
]
|> AWS.Request.build_params(input)
request(client, :delete, path_, query_, headers, input, options, nil)
end
@doc """
Gets the association information for customer gateways that are associated with
devices and links in your global network.
"""
def get_customer_gateway_associations(client, global_network_id, customer_gateway_arns \\ nil, max_results \\ nil, next_token \\ nil, options \\ []) do
path_ = "/global-networks/#{URI.encode(global_network_id)}/customer-gateway-associations"
headers = []
query_ = []
query_ = if !is_nil(next_token) do
[{"nextToken", next_token} | query_]
else
query_
end
query_ = if !is_nil(max_results) do
[{"maxResults", max_results} | query_]
else
query_
end
query_ = if !is_nil(customer_gateway_arns) do
[{"customerGatewayArns", customer_gateway_arns} | query_]
else
query_
end
request(client, :get, path_, query_, headers, nil, options, nil)
end
@doc """
Gets information about one or more of your devices in a global network.
"""
def get_devices(client, global_network_id, device_ids \\ nil, max_results \\ nil, next_token \\ nil, site_id \\ nil, options \\ []) do
path_ = "/global-networks/#{URI.encode(global_network_id)}/devices"
headers = []
query_ = []
query_ = if !is_nil(site_id) do
[{"siteId", site_id} | query_]
else
query_
end
query_ = if !is_nil(next_token) do
[{"nextToken", next_token} | query_]
else
query_
end
query_ = if !is_nil(max_results) do
[{"maxResults", max_results} | query_]
else
query_
end
query_ = if !is_nil(device_ids) do
[{"deviceIds", device_ids} | query_]
else
query_
end
request(client, :get, path_, query_, headers, nil, options, nil)
end
@doc """
Gets the link associations for a device or a link.
Either the device ID or the link ID must be specified.
"""
def get_link_associations(client, global_network_id, device_id \\ nil, link_id \\ nil, max_results \\ nil, next_token \\ nil, options \\ []) do
path_ = "/global-networks/#{URI.encode(global_network_id)}/link-associations"
headers = []
query_ = []
query_ = if !is_nil(next_token) do
[{"nextToken", next_token} | query_]
else
query_
end
query_ = if !is_nil(max_results) do
[{"maxResults", max_results} | query_]
else
query_
end
query_ = if !is_nil(link_id) do
[{"linkId", link_id} | query_]
else
query_
end
query_ = if !is_nil(device_id) do
[{"deviceId", device_id} | query_]
else
query_
end
request(client, :get, path_, query_, headers, nil, options, nil)
end
@doc """
Gets information about one or more links in a specified global network.
If you specify the site ID, you cannot specify the type or provider in the same
request. You can specify the type and provider in the same request.
"""
def get_links(client, global_network_id, link_ids \\ nil, max_results \\ nil, next_token \\ nil, provider \\ nil, site_id \\ nil, type \\ nil, options \\ []) do
path_ = "/global-networks/#{URI.encode(global_network_id)}/links"
headers = []
query_ = []
query_ = if !is_nil(type) do
[{"type", type} | query_]
else
query_
end
query_ = if !is_nil(site_id) do
[{"siteId", site_id} | query_]
else
query_
end
query_ = if !is_nil(provider) do
[{"provider", provider} | query_]
else
query_
end
query_ = if !is_nil(next_token) do
[{"nextToken", next_token} | query_]
else
query_
end
query_ = if !is_nil(max_results) do
[{"maxResults", max_results} | query_]
else
query_
end
query_ = if !is_nil(link_ids) do
[{"linkIds", link_ids} | query_]
else
query_
end
request(client, :get, path_, query_, headers, nil, options, nil)
end
@doc """
Gets information about one or more of your sites in a global network.
"""
def get_sites(client, global_network_id, max_results \\ nil, next_token \\ nil, site_ids \\ nil, options \\ []) do
path_ = "/global-networks/#{URI.encode(global_network_id)}/sites"
headers = []
query_ = []
query_ = if !is_nil(site_ids) do
[{"siteIds", site_ids} | query_]
else
query_
end
query_ = if !is_nil(next_token) do
[{"nextToken", next_token} | query_]
else
query_
end
query_ = if !is_nil(max_results) do
[{"maxResults", max_results} | query_]
else
query_
end
request(client, :get, path_, query_, headers, nil, options, nil)
end
@doc """
Gets information about the transit gateway registrations in a specified global
network.
"""
def get_transit_gateway_registrations(client, global_network_id, max_results \\ nil, next_token \\ nil, transit_gateway_arns \\ nil, options \\ []) do
path_ = "/global-networks/#{URI.encode(global_network_id)}/transit-gateway-registrations"
headers = []
query_ = []
query_ = if !is_nil(transit_gateway_arns) do
[{"transitGatewayArns", transit_gateway_arns} | query_]
else
query_
end
query_ = if !is_nil(next_token) do
[{"nextToken", next_token} | query_]
else
query_
end
query_ = if !is_nil(max_results) do
[{"maxResults", max_results} | query_]
else
query_
end
request(client, :get, path_, query_, headers, nil, options, nil)
end
@doc """
Lists the tags for a specified resource.
"""
def list_tags_for_resource(client, resource_arn, options \\ []) do
path_ = "/tags/#{URI.encode(resource_arn)}"
headers = []
query_ = []
request(client, :get, path_, query_, headers, nil, options, nil)
end
@doc """
Registers a transit gateway in your global network.
The transit gateway can be in any AWS Region, but it must be owned by the same
AWS account that owns the global network. You cannot register a transit gateway
in more than one global network.
"""
def register_transit_gateway(client, global_network_id, input, options \\ []) do
path_ = "/global-networks/#{URI.encode(global_network_id)}/transit-gateway-registrations"
headers = []
query_ = []
request(client, :post, path_, query_, headers, input, options, nil)
end
@doc """
Tags a specified resource.
"""
def tag_resource(client, resource_arn, input, options \\ []) do
path_ = "/tags/#{URI.encode(resource_arn)}"
headers = []
query_ = []
request(client, :post, path_, query_, headers, input, options, nil)
end
@doc """
Removes tags from a specified resource.
"""
def untag_resource(client, resource_arn, input, options \\ []) do
path_ = "/tags/#{URI.encode(resource_arn)}"
headers = []
{query_, input} =
[
{"TagKeys", "tagKeys"},
]
|> AWS.Request.build_params(input)
request(client, :delete, path_, query_, headers, input, options, nil)
end
@doc """
Updates the details for an existing device.
To remove information for any of the parameters, specify an empty string.
"""
def update_device(client, device_id, global_network_id, input, options \\ []) do
path_ = "/global-networks/#{URI.encode(global_network_id)}/devices/#{URI.encode(device_id)}"
headers = []
query_ = []
request(client, :patch, path_, query_, headers, input, options, nil)
end
@doc """
Updates an existing global network.
To remove information for any of the parameters, specify an empty string.
"""
def update_global_network(client, global_network_id, input, options \\ []) do
path_ = "/global-networks/#{URI.encode(global_network_id)}"
headers = []
query_ = []
request(client, :patch, path_, query_, headers, input, options, nil)
end
@doc """
Updates the details for an existing link.
To remove information for any of the parameters, specify an empty string.
"""
def update_link(client, global_network_id, link_id, input, options \\ []) do
path_ = "/global-networks/#{URI.encode(global_network_id)}/links/#{URI.encode(link_id)}"
headers = []
query_ = []
request(client, :patch, path_, query_, headers, input, options, nil)
end
@doc """
Updates the information for an existing site.
To remove information for any of the parameters, specify an empty string.
"""
def update_site(client, global_network_id, site_id, input, options \\ []) do
path_ = "/global-networks/#{URI.encode(global_network_id)}/sites/#{URI.encode(site_id)}"
headers = []
query_ = []
request(client, :patch, path_, query_, headers, input, options, nil)
end
@spec request(AWS.Client.t(), binary(), binary(), list(), list(), map(), list(), pos_integer()) ::
{:ok, map() | nil, map()}
| {:error, term()}
defp request(client, method, path, query, headers, input, options, success_status_code) do
client = %{client | service: "networkmanager"}
host = build_host("networkmanager", client)
url = host
|> build_url(path, client)
|> add_query(query, client)
additional_headers = [{"Host", host}, {"Content-Type", "application/x-amz-json-1.1"}]
headers = AWS.Request.add_headers(additional_headers, headers)
payload = encode!(client, input)
headers = AWS.Request.sign_v4(client, method, url, headers, payload)
perform_request(client, method, url, payload, headers, options, success_status_code)
end
defp perform_request(client, method, url, payload, headers, options, success_status_code) do
case AWS.Client.request(client, method, url, payload, headers, options) do
{:ok, %{status_code: status_code, body: body} = response}
when is_nil(success_status_code) and status_code in [200, 202, 204]
when status_code == success_status_code ->
body = if(body != "", do: decode!(client, body))
{:ok, body, response}
{:ok, response} ->
{:error, {:unexpected_response, response}}
error = {:error, _reason} -> error
end
end
defp build_host(_endpoint_prefix, %{region: "local", endpoint: endpoint}) do
endpoint
end
defp build_host(_endpoint_prefix, %{region: "local"}) do
"localhost"
end
defp build_host(endpoint_prefix, %{region: region, endpoint: endpoint}) do
"#{endpoint_prefix}.#{region}.#{endpoint}"
end
defp build_url(host, path, %{:proto => proto, :port => port}) do
"#{proto}://#{host}:#{port}#{path}"
end
defp add_query(url, [], _client) do
url
end
defp add_query(url, query, client) do
querystring = encode!(client, query, :query)
"#{url}?#{querystring}"
end
defp encode!(client, payload, format \\ :json) do
AWS.Client.encode!(client, payload, format)
end
defp decode!(client, payload) do
AWS.Client.decode!(client, payload, :json)
end
end
|
lib/aws/generated/network_manager.ex
| 0.798619 | 0.424591 |
network_manager.ex
|
starcoder
|
defmodule AWS.SFN do
@moduledoc """
AWS Step Functions
AWS Step Functions is a service that lets you coordinate the components of
distributed applications and microservices using visual workflows.
You can use Step Functions to build applications from individual components,
each of which performs a discrete function, or *task*, allowing you to scale and
change applications quickly. Step Functions provides a console that helps
visualize the components of your application as a series of steps. Step
Functions automatically triggers and tracks each step, and retries steps when
there are errors, so your application executes predictably and in the right
order every time. Step Functions logs the state of each step, so you can quickly
diagnose and debug any issues.
Step Functions manages operations and underlying infrastructure to ensure your
application is available at any scale. You can run tasks on AWS, your own
servers, or any system that has access to AWS. You can access and use Step
Functions using the console, the AWS SDKs, or an HTTP API. For more information
about Step Functions, see the * [AWS Step Functions Developer Guide](https://docs.aws.amazon.com/step-functions/latest/dg/welcome.html) *.
"""
@doc """
Creates an activity.
An activity is a task that you write in any programming language and host on any
machine that has access to AWS Step Functions. Activities must poll Step
Functions using the `GetActivityTask` API action and respond using `SendTask*`
API actions. This function lets Step Functions know the existence of your
activity and returns an identifier for use in a state machine and when polling
from the activity.
This operation is eventually consistent. The results are best effort and may not
reflect very recent updates and changes.
`CreateActivity` is an idempotent API. Subsequent requests won’t create a
duplicate resource if it was already created. `CreateActivity`'s idempotency
check is based on the activity `name`. If a following request has different
`tags` values, Step Functions will ignore these differences and treat it as an
idempotent request of the previous. In this case, `tags` will not be updated,
even if they are different.
"""
def create_activity(client, input, options \\ []) do
request(client, "CreateActivity", input, options)
end
@doc """
Creates a state machine.
A state machine consists of a collection of states that can do work (`Task`
states), determine to which states to transition next (`Choice` states), stop an
execution with an error (`Fail` states), and so on. State machines are specified
using a JSON-based, structured language. For more information, see [Amazon States
Language](https://docs.aws.amazon.com/step-functions/latest/dg/concepts-amazon-states-language.html)
in the AWS Step Functions User Guide.
This operation is eventually consistent. The results are best effort and may not
reflect very recent updates and changes.
`CreateStateMachine` is an idempotent API. Subsequent requests won’t create a
duplicate resource if it was already created. `CreateStateMachine`'s idempotency
check is based on the state machine `name`, `definition`, `type`,
`LoggingConfiguration` and `TracingConfiguration`. If a following request has a
different `roleArn` or `tags`, Step Functions will ignore these differences and
treat it as an idempotent request of the previous. In this case, `roleArn` and
`tags` will not be updated, even if they are different.
"""
def create_state_machine(client, input, options \\ []) do
request(client, "CreateStateMachine", input, options)
end
@doc """
Deletes an activity.
"""
def delete_activity(client, input, options \\ []) do
request(client, "DeleteActivity", input, options)
end
@doc """
Deletes a state machine.
This is an asynchronous operation: It sets the state machine's status to
`DELETING` and begins the deletion process.
For `EXPRESS`state machines, the deletion will happen eventually (usually less
than a minute). Running executions may emit logs after `DeleteStateMachine` API
is called.
"""
def delete_state_machine(client, input, options \\ []) do
request(client, "DeleteStateMachine", input, options)
end
@doc """
Describes an activity.
This operation is eventually consistent. The results are best effort and may not
reflect very recent updates and changes.
"""
def describe_activity(client, input, options \\ []) do
request(client, "DescribeActivity", input, options)
end
@doc """
Describes an execution.
This operation is eventually consistent. The results are best effort and may not
reflect very recent updates and changes.
This API action is not supported by `EXPRESS` state machines.
"""
def describe_execution(client, input, options \\ []) do
request(client, "DescribeExecution", input, options)
end
@doc """
Describes a state machine.
This operation is eventually consistent. The results are best effort and may not
reflect very recent updates and changes.
"""
def describe_state_machine(client, input, options \\ []) do
request(client, "DescribeStateMachine", input, options)
end
@doc """
Describes the state machine associated with a specific execution.
This operation is eventually consistent. The results are best effort and may not
reflect very recent updates and changes.
This API action is not supported by `EXPRESS` state machines.
"""
def describe_state_machine_for_execution(client, input, options \\ []) do
request(client, "DescribeStateMachineForExecution", input, options)
end
@doc """
Used by workers to retrieve a task (with the specified activity ARN) which has
been scheduled for execution by a running state machine.
This initiates a long poll, where the service holds the HTTP connection open and
responds as soon as a task becomes available (i.e. an execution of a task of
this type is needed.) The maximum time the service holds on to the request
before responding is 60 seconds. If no task is available within 60 seconds, the
poll returns a `taskToken` with a null string.
Workers should set their client side socket timeout to at least 65 seconds (5
seconds higher than the maximum time the service may hold the poll request).
Polling with `GetActivityTask` can cause latency in some implementations. See
[Avoid Latency When Polling for Activity Tasks](https://docs.aws.amazon.com/step-functions/latest/dg/bp-activity-pollers.html)
in the Step Functions Developer Guide.
"""
def get_activity_task(client, input, options \\ []) do
request(client, "GetActivityTask", input, options)
end
@doc """
Returns the history of the specified execution as a list of events.
By default, the results are returned in ascending order of the `timeStamp` of
the events. Use the `reverseOrder` parameter to get the latest events first.
If `nextToken` is returned, there are more results available. The value of
`nextToken` is a unique pagination token for each page. Make the call again
using the returned token to retrieve the next page. Keep all other arguments
unchanged. Each pagination token expires after 24 hours. Using an expired
pagination token will return an *HTTP 400 InvalidToken* error.
This API action is not supported by `EXPRESS` state machines.
"""
def get_execution_history(client, input, options \\ []) do
request(client, "GetExecutionHistory", input, options)
end
@doc """
Lists the existing activities.
If `nextToken` is returned, there are more results available. The value of
`nextToken` is a unique pagination token for each page. Make the call again
using the returned token to retrieve the next page. Keep all other arguments
unchanged. Each pagination token expires after 24 hours. Using an expired
pagination token will return an *HTTP 400 InvalidToken* error.
This operation is eventually consistent. The results are best effort and may not
reflect very recent updates and changes.
"""
def list_activities(client, input, options \\ []) do
request(client, "ListActivities", input, options)
end
@doc """
Lists the executions of a state machine that meet the filtering criteria.
Results are sorted by time, with the most recent execution first.
If `nextToken` is returned, there are more results available. The value of
`nextToken` is a unique pagination token for each page. Make the call again
using the returned token to retrieve the next page. Keep all other arguments
unchanged. Each pagination token expires after 24 hours. Using an expired
pagination token will return an *HTTP 400 InvalidToken* error.
This operation is eventually consistent. The results are best effort and may not
reflect very recent updates and changes.
This API action is not supported by `EXPRESS` state machines.
"""
def list_executions(client, input, options \\ []) do
request(client, "ListExecutions", input, options)
end
@doc """
Lists the existing state machines.
If `nextToken` is returned, there are more results available. The value of
`nextToken` is a unique pagination token for each page. Make the call again
using the returned token to retrieve the next page. Keep all other arguments
unchanged. Each pagination token expires after 24 hours. Using an expired
pagination token will return an *HTTP 400 InvalidToken* error.
This operation is eventually consistent. The results are best effort and may not
reflect very recent updates and changes.
"""
def list_state_machines(client, input, options \\ []) do
request(client, "ListStateMachines", input, options)
end
@doc """
List tags for a given resource.
Tags may only contain Unicode letters, digits, white space, or these symbols: `_
. : / = + - @`.
"""
def list_tags_for_resource(client, input, options \\ []) do
request(client, "ListTagsForResource", input, options)
end
@doc """
Used by activity workers and task states using the
[callback](https://docs.aws.amazon.com/step-functions/latest/dg/connect-to-resource.html#connect-wait-token)
pattern to report that the task identified by the `taskToken` failed.
"""
def send_task_failure(client, input, options \\ []) do
request(client, "SendTaskFailure", input, options)
end
@doc """
Used by activity workers and task states using the
[callback](https://docs.aws.amazon.com/step-functions/latest/dg/connect-to-resource.html#connect-wait-token) pattern to report to Step Functions that the task represented by the specified
`taskToken` is still making progress.
This action resets the `Heartbeat` clock. The `Heartbeat` threshold is specified
in the state machine's Amazon States Language definition (`HeartbeatSeconds`).
This action does not in itself create an event in the execution history.
However, if the task times out, the execution history contains an
`ActivityTimedOut` entry for activities, or a `TaskTimedOut` entry for for tasks
using the [job
run](https://docs.aws.amazon.com/step-functions/latest/dg/connect-to-resource.html#connect-sync)
or
[callback](https://docs.aws.amazon.com/step-functions/latest/dg/connect-to-resource.html#connect-wait-token)
pattern.
The `Timeout` of a task, defined in the state machine's Amazon States Language
definition, is its maximum allowed duration, regardless of the number of
`SendTaskHeartbeat` requests received. Use `HeartbeatSeconds` to configure the
timeout interval for heartbeats.
"""
def send_task_heartbeat(client, input, options \\ []) do
request(client, "SendTaskHeartbeat", input, options)
end
@doc """
Used by activity workers and task states using the
[callback](https://docs.aws.amazon.com/step-functions/latest/dg/connect-to-resource.html#connect-wait-token)
pattern to report that the task identified by the `taskToken` completed
successfully.
"""
def send_task_success(client, input, options \\ []) do
request(client, "SendTaskSuccess", input, options)
end
@doc """
Starts a state machine execution.
`StartExecution` is idempotent. If `StartExecution` is called with the same name
and input as a running execution, the call will succeed and return the same
response as the original request. If the execution is closed or if the input is
different, it will return a 400 `ExecutionAlreadyExists` error. Names can be
reused after 90 days.
"""
def start_execution(client, input, options \\ []) do
request(client, "StartExecution", input, options)
end
@doc """
Stops an execution.
This API action is not supported by `EXPRESS` state machines.
"""
def stop_execution(client, input, options \\ []) do
request(client, "StopExecution", input, options)
end
@doc """
Add a tag to a Step Functions resource.
An array of key-value pairs. For more information, see [Using Cost Allocation Tags](https://docs.aws.amazon.com/awsaccountbilling/latest/aboutv2/cost-alloc-tags.html)
in the *AWS Billing and Cost Management User Guide*, and [Controlling Access Using IAM
Tags](https://docs.aws.amazon.com/IAM/latest/UserGuide/access_iam-tags.html).
Tags may only contain Unicode letters, digits, white space, or these symbols: `_
. : / = + - @`.
"""
def tag_resource(client, input, options \\ []) do
request(client, "TagResource", input, options)
end
@doc """
Remove a tag from a Step Functions resource
"""
def untag_resource(client, input, options \\ []) do
request(client, "UntagResource", input, options)
end
@doc """
Updates an existing state machine by modifying its `definition`, `roleArn`, or
`loggingConfiguration`.
Running executions will continue to use the previous `definition` and `roleArn`.
You must include at least one of `definition` or `roleArn` or you will receive a
`MissingRequiredParameter` error.
All `StartExecution` calls within a few seconds will use the updated
`definition` and `roleArn`. Executions started immediately after calling
`UpdateStateMachine` may use the previous state machine `definition` and
`roleArn`.
"""
def update_state_machine(client, input, options \\ []) do
request(client, "UpdateStateMachine", input, options)
end
@spec request(AWS.Client.t(), binary(), map(), list()) ::
{:ok, map() | nil, map()}
| {:error, term()}
defp request(client, action, input, options) do
client = %{client | service: "states"}
host = build_host("states", client)
url = build_url(host, client)
headers = [
{"Host", host},
{"Content-Type", "application/x-amz-json-1.0"},
{"X-Amz-Target", "AWSStepFunctions.#{action}"}
]
payload = encode!(client, input)
headers = AWS.Request.sign_v4(client, "POST", url, headers, payload)
post(client, url, payload, headers, options)
end
defp post(client, url, payload, headers, options) do
case AWS.Client.request(client, :post, url, payload, headers, options) do
{:ok, %{status_code: 200, body: body} = response} ->
body = if body != "", do: decode!(client, body)
{:ok, body, response}
{:ok, response} ->
{:error, {:unexpected_response, response}}
error = {:error, _reason} -> error
end
end
defp build_host(_endpoint_prefix, %{region: "local", endpoint: endpoint}) do
endpoint
end
defp build_host(_endpoint_prefix, %{region: "local"}) do
"localhost"
end
defp build_host(endpoint_prefix, %{region: region, endpoint: endpoint}) do
"#{endpoint_prefix}.#{region}.#{endpoint}"
end
defp build_url(host, %{:proto => proto, :port => port}) do
"#{proto}://#{host}:#{port}/"
end
defp encode!(client, payload) do
AWS.Client.encode!(client, payload, :json)
end
defp decode!(client, payload) do
AWS.Client.decode!(client, payload, :json)
end
end
|
lib/aws/generated/sfn.ex
| 0.934865 | 0.750553 |
sfn.ex
|
starcoder
|
defmodule Environment do
@moduledoc """
Environment name and value storage.
"""
@initial_environment %{
"parent_env" => nil,
"null" => nil,
"true" => true,
"false" => false,
"VERSION" => "0.1"
}
@doc """
The global variables always defined at the start of the program.
"""
@spec global() :: pid()
def global() do
{:ok, pid} = Agent.start_link(fn -> @initial_environment end)
pid
end
@doc """
Creates a new environment process.
`parent` is an optional environment pid. The child environment can look up any
variables that are defined in the parent.
"""
@spec start_link(pid() | nil) :: pid()
def start_link(parent \\ nil) do
{:ok, pid} = Agent.start_link(fn -> %{"parent_env" => parent} end)
pid
end
@doc """
Creates a variable with the given name and value.
## Examples
iex> pid = Environment.start_link()
iex> Environment.define(pid, "x", 4)
4
"""
@spec define(pid(), String.t(), any()) :: any()
def define(pid, name, value) do
Agent.update(pid, fn state -> Map.put(state, name, value) end)
lookup(pid, name)
end
@doc """
Returns a variable value if it exists or returns `:undefined` if it does not
exist.
## Examples
iex> pid = Environment.start_link()
iex> Environment.define(pid, "x", 10)
10
iex> Environment.lookup(pid, "x")
10
"""
@spec lookup(pid(), any()) :: any() | :undefined
def lookup(pid, name) do
env = resolve(pid, name)
case env do
{:ok, pid} ->
Agent.get(pid, fn state -> Map.get(state, name) end)
_ ->
:undefined
end
end
@doc """
Assigns to a variable and returns the value or returns `:undefined` if the
variable does not exist.
## Examples
iex> pid = Environment.start_link()
iex> Environment.define(pid, "x", 10)
10
iex> Environment.assign(pid, "x", 100)
100
iex> Environment.lookup(pid, "x")
100
"""
@spec assign(pid(), String.t(), any()) :: any() | :undefined
def assign(pid, name, value) do
env = resolve(pid, name)
case env do
{:ok, pid} ->
Agent.update(pid, fn state -> Map.put(state, name, value) end)
value
_ -> :undefined
end
end
defp resolve(nil, _), do: :undefined
defp resolve(pid, name) do
has_key = Agent.get(pid, fn state -> Map.has_key?(state, name) end)
if has_key do
{:ok, pid}
else
parent = Agent.get(pid, fn state -> Map.get(state, "parent_env") end)
resolve(parent, name)
end
end
end
|
lib/environment.ex
| 0.80147 | 0.425486 |
environment.ex
|
starcoder
|
defmodule Numy.Vector do
@moduledoc """
Vector, basic implementation.
Implements protocols: `Numy.Vc`
"""
@enforce_keys [:nelm]
defstruct [
:nelm, # length of the vector
:data # actual data, type is list
]
def new(nelm) when is_integer(nelm) do
%Numy.Vector{nelm: nelm, data: List.duplicate(0.0, nelm)}
end
def new(list) when is_list(list) do
%Numy.Vector{nelm: length(list), data: Numy.Enumy.all_to_float(list)}
end
@doc "Create new Vector as a copy of other Vector"
def new(%Numy.Vector{nelm: sz, data: d} = _v) do
%Numy.Vector{nelm: sz, data: d}
end
@doc "Create new Vector as a concatenation of 2"
def new(%Numy.Vector{nelm: sz1, data: d1} = _v1,
%Numy.Vector{nelm: sz2, data: d2} = _v2) do
%Numy.Vector{nelm: sz1+sz2, data: d1 ++ d2}
end
defimpl Numy.Vc do
@doc "Make a clone"
def clone(%Numy.Vector{} = v) do
c = v
c
end
def assign_zeros(v) when is_map(v) do
%{v | data: List.duplicate(0.0, v.nelm)}
end
def assign_ones(v) when is_map(v) do
%{v | data: List.duplicate(1.0, v.nelm)}
end
def assign_random(v) when is_map(v) do
%{v | data: Numy.Float.make_list_randoms(v.nelm)}
end
def assign_all(v, val) when is_map(v) and is_number(val) do
%{v | data: List.duplicate(val, v.nelm)}
end
def data(v, nelm) when is_map(v) and is_integer(nelm) do
if nelm < 0 do
v.data
else
Enum.take(v.data, nelm)
end
end
def at(v, index, default \\ nil) when is_map(v) and is_integer(index) do
Enum.at(v.data, index, default)
end
def empty?(v) when is_map(v) do
v.nelm == 0
end
def size(v) when is_map(v) do
v.nelm
end
def equal?(v1,v2) when is_map(v1) and is_map(v2) do
Numy.Float.equal?(v1.data, v2.data)
end
@doc """
Add two vectors.
## Examples
iex(5)> v = Numy.Vector.new([1,2,3])
%Numy.Vector{data: [1.0, 2.0, 3.0], nelm: 3}
iex(6)> Numy.Vc.add(v, v)
%Numy.Vector{data: [2.0, 4.0, 6.0], nelm: 3}
"""
def add(v1, v2) when is_map(v1) and is_map(v2) do
res = Enum.zip(v1.data,v2.data) |> Enum.map(fn {a,b} -> a + b end)
%Numy.Vector{nelm: min(v1.nelm,v2.nelm), data: res}
end
def sub(v1, v2) when is_map(v1) and is_map(v2) do
res = Enum.zip(v1.data,v2.data) |> Enum.map(fn {a,b} -> a - b end)
%Numy.Vector{nelm: min(v1.nelm,v2.nelm), data: res}
end
def mul(v1, v2) when is_map(v1) and is_map(v2) do
res = Enum.zip(v1.data,v2.data) |> Enum.map(fn {a,b} -> a * b end)
%Numy.Vector{nelm: min(v1.nelm,v2.nelm), data: res}
end
def div(v1, v2) when is_map(v1) and is_map(v2) do
res = Enum.zip(v1.data,v2.data) |> Enum.map(fn {a,b} -> a / b end)
%Numy.Vector{nelm: min(v1.nelm,v2.nelm), data: res}
end
def scale(v, factor) when is_map(v) and is_number(factor) do
res = Enum.map(v.data, fn x -> x * factor end)
%Numy.Vector{nelm: v.nelm, data: res}
end
def offset(%Numy.Vector{nelm: nelm, data: data}, off) when is_number(off) do
%Numy.Vector{nelm: nelm, data: Enum.map(data, fn x -> x + off end)}
end
def negate(%Numy.Vector{nelm: nelm, data: data}) do
%Numy.Vector{nelm: nelm, data: Enum.map(data, fn x -> -x end)}
end
def dot(v1, v2) when is_map(v1) and is_map(v2) do
Numy.Enumy.dot_product(v1.data, v2.data)
end
@doc "Sum of all elements, ∑aᵢ"
def sum(v) do
Enum.sum(v.data)
end
@doc "Average (∑aᵢ)/length"
def mean(%Numy.Vector{nelm: nelm, data: _}) when nelm == 0, do: 0.0
def mean(%Numy.Vector{nelm: nelm, data: _} = v) do
Enum.sum(v.data) / nelm
end
@doc "Return max value"
def max(v), do: Enum.max(v.data)
@doc "Return min value"
def min(v), do: Enum.min(v.data)
@doc "Return index of max value"
def max_index(v) when is_map(v) do
max_val = Numy.Vc.max(v)
Enum.find_index(v.data, fn x -> x == max_val end)
end
@doc "Return index of min value"
def min_index(v) when is_map(v) do
min_val = Numy.Vc.min(v)
Enum.find_index(v.data, fn x -> x == min_val end)
end
@doc "Step function, aᵢ ← 0 if aᵢ < 0 else 1"
def apply_heaviside(v, cutoff \\ 0.0) when is_map(v) do
res = Enum.map(v.data, fn x -> if (x < cutoff), do: 0.0, else: 1.0 end)
%{v | data: res}
end
@doc "f(x) = 1/(1 + e⁻ˣ)"
def apply_sigmoid(v) when is_map(v) do
sigmoid = fn x -> (1.0/(1.0 + :math.exp(-x))) end
res = Enum.map(v.data, fn x -> sigmoid.(x) end)
%{v | data: res}
end
def sort(v) when is_map(v) do
Numy.Vector.new(Enum.sort(v.data))
end
def reverse(v) when is_map(v) do
Numy.Vector.new(Enum.reverse(v.data))
end
@doc "Create new Vector as a concatenation of 2"
def concat(%Numy.Vector{nelm: sz1, data: d1} = _v1,
%Numy.Vector{nelm: sz2, data: d2} = _v2) do
%Numy.Vector{nelm: sz1+sz2, data: d1 ++ d2}
end
def find(v,val) when is_map(v) and is_number(val) do
Enum.find_index(v.data, fn x -> x == (val / 1) end)
end
def contains?(v,val) when is_map(v) and is_number(val) do
Enum.member?(v.data, (val / 1))
end
def abs(%Numy.Vector{nelm: nelm, data: data}) do
%Numy.Vector{nelm: nelm, data: Enum.map(data, fn x -> Kernel.abs(x) end)}
end
def pow2(%Numy.Vector{nelm: nelm, data: data}) do
%Numy.Vector{nelm: nelm, data: Enum.map(data, fn x -> x * x end)}
end
def pow(%Numy.Vector{nelm: nelm, data: data}, p) do
%Numy.Vector{nelm: nelm, data: Enum.map(data, fn x -> :math.pow(x,p) end)}
end
def norm2(v) do
v |> Enum.reduce(0, fn x, acc -> acc + (x * x) end) |> :math.sqrt
end
end # defimpl Numy.Vc do
def mean_sq_err(v1, v2) do
sum_sq_err = Enum.zip(v1.data, v2.data)
|> Enum.reduce(0, fn {a,b}, acc -> acc + (a - b)*(a - b) end)
sum_sq_err / min(v1.nelm, v2.nelm)
end
def root_mean_sq_err(v1, v2) do
:math.sqrt(mean_sq_err(v1,v2))
end
end
|
lib/vector/vector.ex
| 0.829596 | 0.850903 |
vector.ex
|
starcoder
|
defmodule Starship do
@moduledoc """
The Starship Webserver.
This module is the starting point for the Starship Webserver.
If you are an end user of Starship, this is the only thing that you need to worry about.
Here is where you will `warp_in` your configuration (or use the default configuration provided) and start the webserver.
## A Note On SSL
You may pass in options to the configuration of Starship that allow you to use SSL secured connections to connect to the server.
The SSL options are passed directly into the Erlang SSL Application when the webserver is started, and on all requests,
an SSL handshake will be initiated using the certificates and keys that you provided in the initial configuration.
The SSL options should look something like this:
```elixir
ssl_opts = [{:cacertfile, "cacerts.pem"}, {:certfile, "cert.pem"}, {:keyfile, "key.pem"}]
```
"""
alias Starship.Errors
@default_configuration %{
ip: {0, 0, 0, 0},
port: 4000,
hosts: %{
{:http, "*"} => {Starship.Handler.Wildcard.Http, %{}},
{:ws, "*"} => {Starship.Handler.Wildcard.Websocket, %{}}
},
ssl_opts: nil
}
@doc """
Starts the Starship webserver with the default configuration.
The default configuration listens on port 4000, with wildcard handlers that receive requests for any host,
`Starship.Handler.Wildcard.Http` and `Starship.Handler.Wildcard.Websocket`.
## Examples
iex> pid = Starship.warp_in()
iex> is_pid(pid)
true
iex> Process.exit(pid, :closed)
"""
@spec warp_in :: pid
def warp_in, do: warp_in(@default_configuration)
@doc """
Starts the webserver with the desired configuration.
The `config` passed to this function should be a map
containing any configurations that you would like to
start your webserver with.
## Examples
iex(1)> config =
...(1)> %{
...(1)> ip: {0, 0, 0, 0},
...(1)> port: 4000,
...(1)> hosts: %{
...(1)> {:http, "*"} => {Starship.Handler.Wildcard.Http, %{}},
...(1)> {:ws, "*"} => {Starship.Handler.Wildcard.Websocket, %{}}
...(1)> },
...(1)> ssl_opts: nil
...(1)> }
%{
ip: {0, 0, 0, 0},
port: 4000,
hosts: %{
{:http, "*"} => {Starship.Handler.Wildcard.Http, %{}},
{:ws, "*"} => {Starship.Handler.Wildcard.Websocket, %{}}
},
ssl_opts: nil
}
iex(2)> pid = Starship.warp_in(config)
iex(3)> is_pid(pid)
true
iex(4)> Process.exit(pid, :closed)
"""
@spec warp_in(config :: map) :: pid
def warp_in(config) when is_map(config) do
config = validate_config!(config)
if config.ssl_opts != nil do
:ssl.start()
end
if elem(config.ip, 0) == :local do
path = elem(config.ip, 1)
File.rm(path)
end
listen_args = Map.get(config, :listen_args, [])
{:ok, lsocket} =
:gen_tcp.listen(
config.port,
listen_args ++
[
{:ifaddr, config.ip},
{:active, false},
{:reuseaddr, true},
{:nodelay, true},
{:recbuf, 4096},
{:exit_on_close, false},
:binary
]
)
config = Map.merge(config, %{listen_socket: lsocket, buf: <<>>})
spawn(Starship.Drive.Engine, :start, [config])
end
@doc """
Validates the configuration and adds any of the missing required information.
## Examples
iex(1)> config = %{}
%{}
iex(2)> Starship.validate_config!(config)
%{
hosts: %{
{:http, "*"} => {Starship.Handler.Wildcard.Http, %{}},
{:ws, "*"} => {Starship.Handler.Wildcard.Websocket, %{}}
},
ip: {0, 0, 0, 0},
port: 4000,
ssl_opts: nil
}
iex(3)> config =
...(3)> %{
...(3)> ip: {1, 2, 3, 4}
...(3)> }
%{
ip: {1, 2, 3, 4}
}
iex(4)> Starship.validate_config!(config)
%{
hosts: %{
{:http, "*"} => {Starship.Handler.Wildcard.Http, %{}},
{:ws, "*"} => {Starship.Handler.Wildcard.Websocket, %{}}
},
ip: {1, 2, 3, 4},
port: 4000,
ssl_opts: nil
}
iex(5)> config =
...(5)> %{
...(5)> ip: {1, 2, 3, 4},
...(5)> port: 4001
...(5)> }
%{
ip: {1, 2, 3, 4},
port: 4001
}
iex(6)> Starship.validate_config!(config)
%{
hosts: %{
{:http, "*"} => {Starship.Handler.Wildcard.Http, %{}},
{:ws, "*"} => {Starship.Handler.Wildcard.Websocket, %{}}
},
ip: {1, 2, 3, 4},
port: 4001,
ssl_opts: nil
}
iex(7)> config =
...(7)> %{
...(7)> ip: {1, 2, 3, 4},
...(7)> port: 4001,
...(7)> hosts: %{
...(7)> {:http, "*"} => {A.Different.Handler, %{}}
...(7)> }
...(7)> }
%{
hosts: %{
{:http, "*"} => {A.Different.Handler, %{}},
},
ip: {1, 2, 3, 4},
port: 4001
}
iex(8)> Starship.validate_config!(config)
%{
hosts: %{
{:http, "*"} => {A.Different.Handler, %{}},
{:ws, "*"} => {Starship.Handler.Wildcard.Websocket, %{}}
},
ip: {1, 2, 3, 4},
port: 4001,
ssl_opts: nil
}
iex(9)> config =
...(9)> %{
...(9)> ip: {1, 2, 3, 4},
...(9)> port: 4001,
...(9)> hosts: %{
...(9)> {:http, "wow"} => {A.Different.Handler, %{}}
...(9)> }
...(9)> }
%{
hosts: %{
{:http, "wow"} => {A.Different.Handler, %{}},
},
ip: {1, 2, 3, 4},
port: 4001
}
iex(10)> Starship.validate_config!(config)
%{
hosts: %{
{:http, "wow"} => {A.Different.Handler, %{}},
{:http, "*"} => {Starship.Handler.Wildcard.Http, %{}},
{:ws, "*"} => {Starship.Handler.Wildcard.Websocket, %{}}
},
ip: {1, 2, 3, 4},
port: 4001,
ssl_opts: nil
}
"""
@spec validate_config!(config :: map) :: map | no_return
def validate_config!(%{ip: ip, port: port, hosts: hosts} = config)
when is_tuple(ip) and is_integer(port) and is_map(hosts) do
config
|> Map.put(:hosts, Map.merge(@default_configuration.hosts, hosts))
|> Map.put_new(:ssl_opts, @default_configuration.ssl_opts)
end
def validate_config!(%{ip: ip, port: port} = config)
when is_tuple(ip) and is_integer(port) do
config
|> Map.put(:hosts, @default_configuration.hosts)
|> Map.put_new(:ssl_opts, @default_configuration.ssl_opts)
end
def validate_config!(config) when is_map(config) do
config
|> Map.put_new(:ip, @default_configuration.ip)
|> Map.put_new(:port, @default_configuration.port)
|> Map.put(:hosts, @default_configuration.hosts)
|> Map.put_new(:ssl_opts, @default_configuration.ssl_opts)
end
def validate_config!(config) do
raise Errors.InvalidConfigurationError,
provided_config: config,
default_config: @default_configuration
end
end
|
lib/starship.ex
| 0.79736 | 0.659556 |
starship.ex
|
starcoder
|
defmodule Isbndbex.Api do
use HTTPoison.Base
@base_url "http://isbndb.com/api/v2/json"
defp process_url(url), do: @base_url <> url
@doc """
Gets the book with the given `id` using `key` as the API key.
The `id` can be the books's isbn10, isbn13 or the internal isbndb book id.
"""
def get_book(key, id), do: get("/#{key}/book/#{id}")
@doc """
Gets the author with the given `id` using `key` as the API key.
The `id` corresponds to the internal isbndb author id.
"""
def get_author(key, id), do: get("/#{key}/author/#{id}")
@doc """
Gets the publisher with the given `id` using `key` as the API key.
The `id` corresponds to the internal isbndb publisher id.
"""
def get_publisher(key, id), do: get("/#{key}/publisher/#{id}")
@doc """
Gets the subject with the given `id` using `key` as the API key.
The `id` corresponds to the internal isbndb subject id.
"""
def get_subject(key, id), do: get("/#{key}/subject/#{id}")
@doc """
Gets the category with the given `id` using `key` as the API key.
The `id` corresponds to the internal isbndb category id.
"""
def get_category(key, id), do: get("/#{key}/category/#{id}")
@doc """
Gets a list of the stores selling the book with the given `id` using `key` as the API key.
The `id` can be the book's isbn10, isbn13 or the internal isbndb id.
"""
def get_prices(key, id), do: get("/#{key}/prices/#{id}")
@doc """
Searches the given `index` for books matching `query` using `key` as the API key.
"""
def get_books(key, query, index), do: get("/#{key}/books", params: %{q: query, i: index})
@doc """
Searches for authors who have names similar to `query` using `key` as the API key.
"""
def get_authors(key, query), do: get("/#{key}/authors", params: %{q: query})
@doc """
Searches for publishers that have names similar to `query` using `key` as the API key.
"""
def get_publishers(key, query), do: get("/#{key}/publishers", params: %{q: query})
@doc """
Searches for subjects that have names similar to `query` using `key` as the API key.
"""
def get_subjects(key, query), do: get("/#{key}/subjects", params: %{q: query})
@doc """
Searches for categories that have names similar to `query` using `key` as the API key.
"""
def get_categories(key, query), do: get("/#{key}/categories", params: %{q: query})
defp get(url) do
get!(url).body
|> Poison.decode!(keys: :atoms!)
end
defp get(url, options) do
get!(url, [], options).body
|> Poison.decode!(keys: :atoms!)
end
end
|
lib/isbndbex/api.ex
| 0.874948 | 0.62986 |
api.ex
|
starcoder
|
defmodule Ecto.Migration do
@moduledoc """
Migrations are used to modify your database schema over time.
This module provides many helpers for migrating the database,
allowing developers to use Elixir to alter their storage in
a way that is database independent.
Here is an example:
defmodule MyRepo.Migrations.AddWeatherTable do
use Ecto.Migration
def up do
create table("weather") do
add :city, :string, size: 40
add :temp_lo, :integer
add :temp_hi, :integer
add :prcp, :float
timestamps()
end
end
def down do
drop table("weather")
end
end
Note that migrations have `up/0` and `down/0` instructions, where
`up/0` applies changes to the database and `down/0` rolls back
changes, returning the database schema to a previous state.
Ecto creates a table (see the `:migration_source` configuration option)
in the database in order to keep track of migrations and will add
an entry to this table for each migration you define. Ecto also
locks the table when adding/removing entries, guaranteeing two
different servers cannot run the same migration at the same time.
Ecto provides some mix tasks to help developers work with migrations:
* `mix ecto.gen.migration` - generates a
migration that the user can fill in with particular commands
* `mix ecto.migrate` - migrates a repository
* `mix ecto.rollback` - rolls back a particular migration
Run `mix help COMMAND` for more information on a particular command.
## Change
`change/0` is an abstraction that wraps both `up/0` and `down/0` for
automatically-reversible migrations. For example, the migration above
can be written as:
defmodule MyRepo.Migrations.AddWeatherTable do
use Ecto.Migration
def change do
create table("weather") do
add :city, :string, size: 40
add :temp_lo, :integer
add :temp_hi, :integer
add :prcp, :float
timestamps()
end
end
end
However, note that not all commands are reversible. Trying to rollback
a non-reversible command will raise an `Ecto.MigrationError`.
A notable command in this regard is `execute/2`, which accepts a pair
of plain SQL strings, the first to run on forward migrations (`up/0`)
and the second when rolling back (`down/0`).
If `up/0` and `down/0` are implemented in a migration, they take precedence, and
`change/0` isn't invoked.
## Field Types
The Ecto primitive types are mapped to the appropriate database
type by the various database adapters. For example, `:string` is converted to
`:varchar`, `:binary` to `:bits` or `:blob`, and so on.
Similarly, you can pass any field type supported by your database
as long as it maps to an Ecto type. For instance, you can use `:text`,
`:varchar`, or `:char` in your migrations as `add :field_name, :text`.
In your Ecto schema, they will all map to the same `:string` type.
Remember, atoms can contain arbitrary characters by enclosing in
double quotes the characters following the colon. So, if you want to use a
field type with database-specific options, you can pass atoms containing
these options like `:"int unsigned"`, `:"time without time zone"`, etc.
## Prefixes
Migrations support specifying a table prefix or index prefix which will
target either a schema (if using PostgreSQL) or a different database (if using
MySQL). If no prefix is provided, the default schema or database is used.
Any reference declared in the table migration refers by default to the table
with the same declared prefix. The prefix is specified in the table options:
def up do
create table("weather", prefix: "north_america") do
add :city, :string, size: 40
add :temp_lo, :integer
add :temp_hi, :integer
add :prcp, :float
add :group_id, references(:groups)
timestamps()
end
create index("weather", [:city], prefix: "north_america")
end
Note: if using MySQL with a prefixed table, you must use the same prefix
for the references since cross-database references are not supported.
When using a prefixed table with either MySQL or PostgreSQL, you must use the
same prefix for the index field to ensure that you index the prefix-qualified
table.
## Transactions
For PostgreSQL, Ecto always runs migrations inside a transaction, but that's not
always desired: for example, you cannot create/drop indexes concurrently inside
a transaction (see the [PostgreSQL docs](http://www.postgresql.org/docs/9.2/static/sql-createindex.html#SQL-CREATEINDEX-CONCURRENTLY)).
Migrations can be forced to run outside a transaction by setting the
`@disable_ddl_transaction` module attribute to `true`:
defmodule MyRepo.Migrations.CreateIndexes do
use Ecto.Migration
@disable_ddl_transaction true
def change do
create index("posts", [:slug], concurrently: true)
end
end
Since running migrations outside a transaction can be dangerous, consider
performing very few operations in such migrations.
### Transaction Callbacks
There are use cases that dictate adding some common behavior after beginning a
migration transaction, or before commiting that transaction. For instance, one
might desire to set a `lock_timeout` for each lock in the transaction.
Another way these might be leveraged is by defining a custom migration module
so that these callbacks will run for *all* of your migrations, if you have special
requirements.
defmodule MyApp.Migration do
defmacro __using__(_) do
use Ecto.Migration
def after_begin() do
repo().query! "SET lock_timeout TO '5s'", "SET lock_timeout TO '10s'"
end
end
end
Then in your migrations you can `use MyApp.Migration` to share this behavior
among all your migrations.
## Comments
Migrations where you create or alter a table support specifying table
and column comments. The same can be done when creating constraints
and indexes. Not all databases support this feature.
def up do
create index("posts", [:name], comment: "Index Comment")
create constraint("products", "price_must_be_positive", check: "price > 0", comment: "Constraint Comment")
create table("weather", prefix: "north_america", comment: "Table Comment") do
add :city, :string, size: 40, comment: "Column Comment"
timestamps()
end
end
## Repo configuration
The following migration configuration options are available for a given repository:
* `:migration_source` - Version numbers of migrations will be saved in a
table named `schema_migrations` by default. You can configure the name of
the table via:
config :app, App.Repo, migration_source: "my_migrations"
* `:migration_primary_key` - By default, Ecto uses the `:id` column with type
`:bigserial`, but you can configure it via:
config :app, App.Repo, migration_primary_key: [name: :uuid, type: :binary_id]
* `:migration_timestamps` - By default, Ecto uses the `:naive_datetime` type, but
you can configure it via:
config :app, App.Repo, migration_timestamps: [type: :utc_datetime]
* `:migration_lock` - By default, Ecto will lock the migration table. This allows
multiple nodes to attempt to run migrations at the same time but only one will
succeed. However, this does not play well with other features, such as the
`:concurrently` option in PostgreSQL indexes. You can disable the `migration_lock`
by setting it to `nil`:
config :app, App.Repo, migration_lock: nil
* `:migration_default_prefix` - Ecto defaults to `nil` for the database prefix for
migrations, but you can configure it via:
config :app, App.Repo, migration_default_prefix: "my_prefix"
"""
@doc """
Migration code to run immediately after the transaction is opened.
Keep in mind that it is treated like any normal migration code, and should
consider both the up *and* down cases of the migration.
"""
@callback after_begin() :: term
@doc """
Migration code to run immediately before the transaction is closed.
Keep in mind that it is treated like any normal migration code, and should
consider both the up *and* down cases of the migration.
"""
@callback before_commit() :: term
@optional_callbacks after_begin: 0, before_commit: 0
defmodule Index do
@moduledoc """
Used internally by adapters.
To define an index in a migration, see `Ecto.Migration.index/3`.
"""
defstruct table: nil,
prefix: nil,
name: nil,
columns: [],
unique: false,
concurrently: false,
using: nil,
where: nil,
comment: nil,
options: nil
@type t :: %__MODULE__{
table: String.t,
prefix: atom,
name: atom,
columns: [atom | String.t],
unique: boolean,
concurrently: boolean,
using: atom | String.t,
where: atom | String.t,
comment: String.t | nil,
options: String.t
}
end
defmodule Table do
@moduledoc """
Used internally by adapters.
To define a table in a migration, see `Ecto.Migration.table/2`.
"""
defstruct name: nil, prefix: nil, comment: nil, primary_key: true, engine: nil, options: nil
@type t :: %__MODULE__{name: String.t, prefix: atom | nil, comment: String.t | nil, primary_key: boolean,
engine: atom, options: String.t}
end
defmodule Reference do
@moduledoc """
Used internally by adapters.
To define a reference in a migration, see `Ecto.Migration.references/2`.
"""
defstruct name: nil, prefix: nil, table: nil, column: :id, type: :bigserial, on_delete: :nothing, on_update: :nothing
@type t :: %__MODULE__{table: String.t, prefix: atom | nil, column: atom, type: atom, on_delete: atom, on_update: atom}
end
defmodule Constraint do
@moduledoc """
Used internally by adapters.
To define a constraint in a migration, see `Ecto.Migration.constraint/3`.
"""
defstruct name: nil, table: nil, check: nil, exclude: nil, prefix: nil, comment: nil
@type t :: %__MODULE__{name: atom, table: String.t, prefix: atom | nil,
check: String.t | nil, exclude: String.t | nil, comment: String.t | nil}
end
defmodule Command do
@moduledoc """
Used internally by adapters.
This represents the up and down legs of a reversible raw command
that is usually defined with `Ecto.Migration.execute/1`.
To define a reversible command in a migration, see `Ecto.Migration.execute/2`.
"""
defstruct up: nil, down: nil
@type t :: %__MODULE__{up: String.t, down: String.t}
end
alias Ecto.Migration.Runner
@doc false
defmacro __using__(_) do
quote location: :keep do
import Ecto.Migration
@disable_ddl_transaction false
@before_compile Ecto.Migration
end
end
@doc false
defmacro __before_compile__(_env) do
quote do
def __migration__,
do: [disable_ddl_transaction: @disable_ddl_transaction]
end
end
@doc """
Creates a table.
By default, the table will also include an `:id` primary key field that
has a type of `:bigserial`. Check the `table/2` docs for more information.
## Examples
create table(:posts) do
add :title, :string, default: "Untitled"
add :body, :text
timestamps()
end
"""
defmacro create(object, do: block) do
expand_create(object, :create, block)
end
@doc """
Creates a table if it does not exist.
Works just like `create/2` but does not raise an error when the table
already exists.
"""
defmacro create_if_not_exists(object, do: block) do
expand_create(object, :create_if_not_exists, block)
end
defp expand_create(object, command, block) do
quote do
table = %Table{} = unquote(object)
Runner.start_command({unquote(command), Ecto.Migration.__prefix__(table)})
if table.primary_key do
opts = Runner.repo_config(:migration_primary_key, [])
opts = Keyword.put(opts, :primary_key, true)
{name, opts} = Keyword.pop(opts, :name, :id)
{type, opts} = Keyword.pop(opts, :type, :bigserial)
add(name, type, opts)
end
unquote(block)
Runner.end_command
table
end
end
@doc """
Alters a table.
## Examples
alter table("posts") do
add :summary, :text
modify :title, :text
remove :views
end
"""
defmacro alter(object, do: block) do
quote do
table = %Table{} = unquote(object)
Runner.start_command({:alter, Ecto.Migration.__prefix__(table)})
unquote(block)
Runner.end_command
end
end
@doc """
Creates one of the following:
* an index
* a table with only an `:id` field
* a constraint
When reversing (in a `change/0` running backwards), indexes are only dropped
if they exist, and no errors are raised. To enforce dropping an index, use
`drop/1`.
## Examples
create index("posts", [:name])
create table("version")
create constraint("products", "price_must_be_positive", check: "price > 0")
"""
def create(%Index{} = index) do
Runner.execute {:create, __prefix__(index)}
index
end
def create(%Constraint{} = constraint) do
Runner.execute {:create, __prefix__(constraint)}
constraint
end
def create(%Table{} = table) do
do_create table, :create
table
end
@doc """
Creates an index or a table with only `:id` field if one does not yet exist.
## Examples
create_if_not_exists index("posts", [:name])
create_if_not_exists table("version")
"""
def create_if_not_exists(%Index{} = index) do
Runner.execute {:create_if_not_exists, __prefix__(index)}
end
def create_if_not_exists(%Table{} = table) do
do_create table, :create_if_not_exists
end
defp do_create(table, command) do
columns =
if table.primary_key do
[{:add, :id, :bigserial, primary_key: true}]
else
[]
end
Runner.execute {command, __prefix__(table), columns}
end
@doc """
Drops one of the following:
* an index
* a table
* a constraint
## Examples
drop index("posts", [:name])
drop table("posts")
drop constraint("products", "price_must_be_positive")
"""
def drop(%{} = index_or_table_or_constraint) do
Runner.execute {:drop, __prefix__(index_or_table_or_constraint)}
index_or_table_or_constraint
end
@doc """
Drops a table or index if it exists.
Does not raise an error if the specified table or index does not exist.
## Examples
drop_if_exists index("posts", [:name])
drop_if_exists table("posts")
"""
def drop_if_exists(%{} = index_or_table) do
Runner.execute {:drop_if_exists, __prefix__(index_or_table)}
index_or_table
end
@doc """
Returns a table struct that can be given to `create/2`, `alter/2`, `drop/1`,
etc.
## Examples
create table("products") do
add :name, :string
add :price, :decimal
end
drop table("products")
create table("products", primary_key: false) do
add :name, :string
add :price, :decimal
end
## Options
* `:primary_key` - when `false`, a primary key field is not generated on table
creation.
* `:engine` - customizes the table storage for supported databases. For MySQL,
the default is InnoDB.
* `:prefix` - the prefix for the table.
* `:options` - provide custom options that will be appended after the generated
statement. For example, "WITH", "INHERITS", or "ON COMMIT" clauses.
"""
def table(name, opts \\ [])
def table(name, opts) when is_atom(name) do
table(Atom.to_string(name), opts)
end
def table(name, opts) when is_binary(name) and is_list(opts) do
struct(%Table{name: name}, opts)
end
@doc ~S"""
Returns an index struct that can be given to `create/1`, `drop/1`, etc.
Expects the table name as the first argument and the index field(s) as
the second. The fields can be atoms, representing columns, or strings,
representing expressions that are sent as-is to the database.
## Options
* `:name` - the name of the index. Defaults to "#{table}_#{column}_index".
* `:unique` - indicates whether the index should be unique. Defaults to
`false`.
* `:concurrently` - indicates whether the index should be created/dropped
concurrently.
* `:using` - configures the index type.
* `:prefix` - specify an optional prefix for the index.
* `:where` - specify conditions for a partial index.
## Adding/dropping indexes concurrently
PostgreSQL supports adding/dropping indexes concurrently (see the
[docs](http://www.postgresql.org/docs/9.4/static/sql-createindex.html)).
In order to take advantage of this, the `:concurrently` option needs to be set
to `true` when the index is created/dropped.
**Note**: in order for the `:concurrently` option to work, the migration must
not be run inside a transaction. This means you need to set both
`@disable_ddl_transaction true` and set the `:migration_lock` repository
configuration to nil. For those reasons, we do recommend to run migrations
with concurrent indexes in isolation and disable those features only temporarily.
See the `Ecto.Migration` docs for more information on running migrations outside
of a transaction.
## Index types
When creating an index, the index type can be specified with the `:using`
option. The `:using` option can be an atom or a string, and its value is
passed to the generated `USING` clause as-is.
For example, PostgreSQL supports several index types like B-tree (the
default), Hash, GIN, and GiST. More information on index types can be found
in the [PostgreSQL docs]
(http://www.postgresql.org/docs/9.4/static/indexes-types.html).
## Partial indexes
Databases like PostgreSQL and MSSQL support partial indexes.
A partial index is an index built over a subset of a table. The subset
is defined by a conditional expression using the `:where` option.
The `:where` option can be an atom or a string; its value is passed
to the generated `WHERE` clause as-is.
More information on partial indexes can be found in the [PostgreSQL
docs](http://www.postgresql.org/docs/9.4/static/indexes-partial.html).
## Examples
# With no name provided, the name of the below index defaults to
# products_category_id_sku_index
create index("products", [:category_id, :sku], unique: true)
# The name can also be set explicitly
drop index("products", [:category_id, :sku], name: :my_special_name)
# Indexes can be added concurrently
create index("products", [:category_id, :sku], concurrently: true)
# The index type can be specified
create index("products", [:name], using: :hash)
# Partial indexes are created by specifying a :where option
create index("products", [:user_id], where: "price = 0", name: :free_products_index)
Indexes also support custom expressions. Some databases may require the
index expression to be written between parentheses:
# Create an index on a custom expression
create index("products", ["(lower(name))"], name: :products_lower_name_index)
# Create a tsvector GIN index on PostgreSQL
create index("products", ["(to_tsvector('english', name))"],
name: :products_name_vector, using: "GIN")
"""
def index(table, columns, opts \\ [])
def index(table, columns, opts) when is_atom(table) do
index(Atom.to_string(table), columns, opts)
end
def index(table, column, opts) when is_binary(table) and is_atom(column) do
index(table, [column], opts)
end
def index(table, columns, opts) when is_binary(table) and is_list(columns) and is_list(opts) do
validate_index_opts!(opts)
index = struct(%Index{table: table, columns: columns}, opts)
%{index | name: index.name || default_index_name(index)}
end
@doc """
Shortcut for creating a unique index.
See `index/3` for more information.
"""
def unique_index(table, columns, opts \\ [])
def unique_index(table, columns, opts) when is_list(opts) do
index(table, columns, [unique: true] ++ opts)
end
defp default_index_name(index) do
[index.table, index.columns, "index"]
|> List.flatten
|> Enum.map(&to_string(&1))
|> Enum.map(&String.replace(&1, ~r"[^\w_]", "_"))
|> Enum.map(&String.replace_trailing(&1, "_", ""))
|> Enum.join("_")
|> String.to_atom
end
@doc """
Executes arbitrary SQL or a keyword command.
Reversible commands can be defined by calling `execute/2`.
## Examples
execute "CREATE EXTENSION postgres_fdw"
execute create: "posts", capped: true, size: 1024
"""
def execute(command) when is_binary(command) or is_list(command) do
Runner.execute command
end
@doc """
Executes reversible SQL commands.
This is useful for database-specific functionality that does not
warrant special support in Ecto, for example, creating and dropping
a PostgreSQL extension. The `execute/2` form avoids having to define
separate `up/0` and `down/0` blocks that each contain an `execute/1`
expression.
## Examples
execute "CREATE EXTENSION postgres_fdw", "DROP EXTENSION postgres_fdw"
"""
def execute(up, down) when (is_binary(up) or is_list(up)) and
(is_binary(down) or is_list(down)) do
Runner.execute %Command{up: up, down: down}
end
@doc """
Gets the migrator direction.
"""
@spec direction :: :up | :down
def direction do
Runner.migrator_direction
end
@doc """
Gets the migrator repo.
"""
@spec repo :: Ecto.Repo.t
def repo do
Runner.repo()
end
@doc """
Gets the migrator prefix.
"""
def prefix do
Runner.prefix
end
@doc """
Adds a column when creating or altering a table.
This function also accepts Ecto primitive types as column types
that are normalized by the database adapter. For example,
`:string` is converted to `:varchar`, `:binary` to `:bits` or `:blob`,
and so on.
However, the column type is not always the same as the type used in your
schema. For example, a schema that has a `:string` field can be supported by
columns of type `:char`, `:varchar`, `:text`, and others. For this reason,
this function also accepts `:text` and other type annotations that are native
to the database. These are passed to the database as-is.
To sum up, the column type may be either an Ecto primitive type,
which is normalized in cases where the database does not understand it,
such as `:string` or `:binary`, or a database type which is passed as-is.
Custom Ecto types like `Ecto.UUID` are not supported because
they are application-level concerns and may not always map to the database.
## Examples
create table("posts") do
add :title, :string, default: "Untitled"
end
alter table("posts") do
add :summary, :text # Database type
add :object, :map # Elixir type which is handled by the database
end
## Options
* `:primary_key` - when `true`, marks this field as the primary key.
If multiple fields are marked, a composite primary key will be created.
* `:default` - the column's default value. It can be a string, number, empty
list, list of strings, list of numbers, or a fragment generated by
`fragment/1`.
* `:null` - when `false`, the column does not allow null values.
* `:size` - the size of the type (for example, the number of characters).
The default is no size, except for `:string`, which defaults to `255`.
* `:precision` - the precision for a numeric type. Required when `:scale` is
specified.
* `:scale` - the scale of a numeric type. Defaults to `0`.
"""
def add(column, type, opts \\ [])
def add(column, :datetime, _opts) when is_atom(column) do
raise ArgumentError, "the :datetime type in migrations is not supported, " <>
"please use :utc_datetime or :naive_datetime instead"
end
def add(column, type, opts) when is_atom(column) and is_list(opts) do
if opts[:scale] && !opts[:precision] do
raise ArgumentError, "column #{Atom.to_string(column)} is missing precision option"
end
validate_type!(type)
Runner.subcommand {:add, column, type, opts}
end
@doc """
Renames a table.
## Examples
rename table("posts"), to: table("new_posts")
"""
def rename(%Table{} = table_current, to: %Table{} = table_new) do
Runner.execute {:rename, __prefix__(table_current), __prefix__(table_new)}
table_new
end
@doc """
Renames a column outside of the `alter` statement.
## Examples
rename table("posts"), :title, to: :summary
"""
def rename(%Table{} = table, current_column, to: new_column) when is_atom(current_column) and is_atom(new_column) do
Runner.execute {:rename, __prefix__(table), current_column, new_column}
table
end
@doc """
Generates a fragment to be used as a default value.
## Examples
create table("posts") do
add :inserted_at, :naive_datetime, default: fragment("now()")
end
"""
def fragment(expr) when is_binary(expr) do
{:fragment, expr}
end
@doc """
Adds `:inserted_at` and `:updated_at` timestamp columns.
Those columns are of `:naive_datetime` type and by default cannot be null. A
list of `opts` can be given to customize the generated fields.
## Options
* `:inserted_at` - the name of the column for storing insertion times.
Setting it to `false` disables the column.
* `:updated_at` - the name of the column for storing last-updated-at times.
Setting it to `false` disables the column.
* `:type` - the type of the `:inserted_at` and `:updated_at` columns.
Defaults to `:naive_datetime`.
"""
def timestamps(opts \\ []) when is_list(opts) do
opts = Keyword.merge(Runner.repo_config(:migration_timestamps, []), opts)
opts = Keyword.put_new(opts, :null, false)
{type, opts} = Keyword.pop(opts, :type, :naive_datetime)
{inserted_at, opts} = Keyword.pop(opts, :inserted_at, :inserted_at)
{updated_at, opts} = Keyword.pop(opts, :updated_at, :updated_at)
if inserted_at != false, do: add(inserted_at, type, opts)
if updated_at != false, do: add(updated_at, type, opts)
end
@doc """
Modifies the type of a column when altering a table.
This command is not reversible unless the `:from` option is provided.
If the `:from` value is a `%Reference{}`, the adapter will try to drop
the corresponding foreign key constraints before modifying the type.
See `add/3` for more information on supported types.
## Examples
alter table("posts") do
modify :title, :text
end
## Options
* `:null` - determines whether the column accepts null values.
* `:default` - changes the default value of the column.
* `:from` - specifies the current type of the column.
* `:size` - specifies the size of the type (for example, the number of characters).
The default is no size.
* `:precision` - the precision for a numeric type. Required when `:scale` is
specified.
* `:scale` - the scale of a numeric type. Defaults to `0`.
"""
def modify(column, type, opts \\ [])
def modify(column, :datetime, _opts) when is_atom(column) do
raise ArgumentError, "the :datetime type in migrations is not supported, " <>
"please use :utc_datetime or :naive_datetime instead"
end
def modify(column, type, opts) when is_atom(column) and is_list(opts) do
if opts[:scale] && !opts[:precision] do
raise ArgumentError, "column #{Atom.to_string(column)} is missing precision option"
end
validate_type!(type)
Runner.subcommand {:modify, column, type, opts}
end
@doc """
Removes a column when altering a table.
This command is not reversible as Ecto does not know what type it should add
the column back as. See `remove/3` as a reversible alternative.
## Examples
alter table("posts") do
remove :title
end
"""
def remove(column) when is_atom(column) do
Runner.subcommand {:remove, column}
end
@doc """
Removes a column in a reversible way when altering a table.
`type` and `opts` are exactly the same as in `add/3`, and
they are used when the command is reversed.
If the `type` value is a `%Reference{}`, it is used to remove the constraint.
## Examples
alter table("posts") do
remove :title, :string, default: ""
end
"""
def remove(column, type, opts \\ []) when is_atom(column) do
validate_type!(type)
Runner.subcommand {:remove, column, type, opts}
end
@doc ~S"""
Defines a foreign key.
## Examples
create table("products") do
add :group_id, references("groups")
end
## Options
* `:name` - The name of the underlying reference, which defaults to
"#{table}_#{column}_fkey".
* `:column` - The foreign key column name, which defaults to `:id`.
* `:prefix` - The prefix for the reference. Defaults to the reference
of the table if present, or `nil`.
* `:type` - The foreign key type, which defaults to `:bigserial`.
* `:on_delete` - What to do if the referenced entry is deleted. May be
`:nothing` (default), `:delete_all`, `:nilify_all`, or `:restrict`.
* `:on_update` - What to do if the referenced entry is updated. May be
`:nothing` (default), `:update_all`, `:nilify_all`, or `:restrict`.
"""
def references(table, opts \\ [])
def references(table, opts) when is_atom(table) do
references(Atom.to_string(table), opts)
end
def references(table, opts) when is_binary(table) and is_list(opts) do
repo_opts = Keyword.take(Runner.repo_config(:migration_primary_key, []), [:type])
opts = Keyword.merge(repo_opts, opts)
reference = struct(%Reference{table: table}, opts)
unless reference.on_delete in [:nothing, :delete_all, :nilify_all, :restrict] do
raise ArgumentError, "unknown :on_delete value: #{inspect reference.on_delete}"
end
unless reference.on_update in [:nothing, :update_all, :nilify_all, :restrict] do
raise ArgumentError, "unknown :on_update value: #{inspect reference.on_update}"
end
reference
end
@doc ~S"""
Defines a constraint (either a check constraint or an exclusion constraint)
to be evaluated by the database when a row is inserted or updated.
## Examples
create constraint("users", :price_must_be_positive, check: "price > 0")
create constraint("size_ranges", :no_overlap, exclude: ~s|gist (int4range("from", "to", '[]') WITH &&)|)
drop constraint("products", "price_must_be_positive")
## Options
* `:check` - A check constraint expression. Required when creating a check constraint.
* `:exclude` - An exclusion constraint expression. Required when creating an exclusion constraint.
* `:prefix` - The prefix for the table.
"""
def constraint(table, name, opts \\ [])
def constraint(table, name, opts) when is_atom(table) do
constraint(Atom.to_string(table), name, opts)
end
def constraint(table, name, opts) when is_binary(table) and is_list(opts) do
struct(%Constraint{table: table, name: name}, opts)
end
@doc """
Executes queue migration commands.
Reverses the order in which commands are executed when doing a rollback
on a `change/0` function and resets the commands queue.
"""
def flush do
Runner.flush
end
# Validation helpers
defp validate_type!(type) when is_atom(type) do
case Atom.to_string(type) do
"Elixir." <> _ ->
raise ArgumentError,
"#{inspect type} is not a valid database type, " <>
"please use an atom like :string, :text and so on"
_ ->
:ok
end
end
defp validate_type!({type, subtype}) when is_atom(type) and is_atom(subtype) do
validate_type!(subtype)
end
defp validate_type!({type, subtype}) when is_atom(type) and is_tuple(subtype) do
for t <- Tuple.to_list(subtype), do: validate_type!(t)
end
defp validate_type!(%Reference{} = reference) do
reference
end
defp validate_index_opts!(opts) when is_list(opts) do
case Keyword.get_values(opts, :where) do
[_, _ | _] ->
raise ArgumentError,
"only one `where` keyword is supported when declaring a partial index. " <>
"To specify multiple conditions, write a single WHERE clause using AND between them"
_ ->
:ok
end
end
defp validate_index_opts!(opts), do: opts
@doc false
def __prefix__(%{prefix: prefix} = index_or_table) do
runner_prefix = Runner.prefix()
cond do
is_nil(prefix) ->
prefix = runner_prefix || Runner.repo_config(:migration_default_prefix, nil)
%{index_or_table | prefix: prefix}
is_nil(runner_prefix) or runner_prefix == to_string(prefix) ->
index_or_table
true ->
raise Ecto.MigrationError, message:
"the :prefix option `#{prefix}` does match the migrator prefix `#{runner_prefix}`"
end
end
end
|
deps/ecto_sql/lib/ecto/migration.ex
| 0.838084 | 0.409664 |
migration.ex
|
starcoder
|
defmodule Kernel.ParallelCompiler do
@moduledoc """
A module responsible for compiling and requiring files in parallel.
"""
@doc """
Starts a task for parallel compilation.
If you have a file that needs to compile other modules in parallel,
the spawned processes need to be aware of the compiler environment.
This function allows a developer to create a task that is aware of
those environments.
See `Task.async/1` for more information. The task spawned must be
always awaited on by calling `Task.await/1`
"""
@since "1.6.0"
def async(fun) when is_function(fun) do
if parent = :erlang.get(:elixir_compiler_pid) do
file = :erlang.get(:elixir_compiler_file)
{:error_handler, error_handler} = :erlang.process_info(self(), :error_handler)
Task.async(fn ->
:erlang.put(:elixir_compiler_pid, parent)
:erlang.put(:elixir_compiler_file, file)
:erlang.process_flag(:error_handler, error_handler)
fun.()
end)
else
raise ArgumentError,
"cannot spawn parallel compiler task because " <>
"the current file is not being compiled/required"
end
end
@doc """
Compiles the given files.
Those files are compiled in parallel and can automatically
detect dependencies between them. Once a dependency is found,
the current file stops being compiled until the dependency is
resolved.
It returns `{:ok, modules, warnings}` or `{:error, errors, warnings}`.
Both errors and warnings are a list of three element tuples containing
the file, line and the formatted error/warning.
## Options
* `:each_file` - for each file compiled, invokes the callback passing the
file
* `:each_long_compilation` - for each file that takes more than a given
timeout (see the `:long_compilation_threshold` option) to compile, invoke
this callback passing the file as its argument
* `:each_module` - for each module compiled, invokes the callback passing
the file, module and the module bytecode
* `:each_cycle` - after the given files are compiled, invokes this function
that return a list with potentially more files to compile
* `:long_compilation_threshold` - the timeout (in seconds) after the
`:each_long_compilation` callback is invoked; defaults to `15`
* `:dest` - the destination directory for the BEAM files. When using `files/2`,
this information is only used to properly annotate the BEAM files before
they are loaded into memory. If you want a file to actually be written to
`dest`, use `compile_to_path/3` instead.
"""
@since "1.6.0"
def compile(files, options \\ []) when is_list(options) do
spawn_workers(files, :compile, options)
end
@since "1.6.0"
def compile_to_path(files, path, options \\ []) when is_binary(path) and is_list(options) do
spawn_workers(files, {:compile, path}, options)
end
@doc """
Requires the given files in parallel.
Opposite to compile, dependencies are not attempted to be
automatically solved between files.
It returns `{:ok, modules, warnings}` or `{:error, errors, warnings}`.
Both errors and warnings are a list of three element tuples containing
the file, line and the formatted error/warning.
## Options
* `:each_file` - for each file compiled, invokes the callback passing the
file
* `:each_module` - for each module compiled, invokes the callback passing
the file, module and the module bytecode
"""
@since "1.6.0"
def require(files, options \\ []) when is_list(options) do
spawn_workers(files, :require, options)
end
# TODO: Deprecate on Elixir v1.8
@doc false
def files(files, options \\ []) when is_list(options) do
case spawn_workers(files, :compile, options) do
{:ok, modules, _} -> modules
{:error, _, _} -> exit({:shutdown, 1})
end
end
# TODO: Deprecate on Elixir v1.8
@doc false
def files_to_path(files, path, options \\ []) when is_binary(path) and is_list(options) do
case spawn_workers(files, {:compile, path}, options) do
{:ok, modules, _} -> modules
{:error, _, _} -> exit({:shutdown, 1})
end
end
defp spawn_workers(files, output, options) do
true = Code.ensure_loaded?(Kernel.ErrorHandler)
compiler_pid = self()
:elixir_code_server.cast({:reset_warnings, compiler_pid})
schedulers = max(:erlang.system_info(:schedulers_online), 2)
result =
spawn_workers(files, [], [], [], [], %{
dest: Keyword.get(options, :dest),
each_cycle: Keyword.get(options, :each_cycle, fn -> [] end),
each_file: Keyword.get(options, :each_file, fn _file -> :ok end),
each_long_compilation: Keyword.get(options, :each_long_compilation, fn _file -> :ok end),
each_module: Keyword.get(options, :each_module, fn _file, _module, _binary -> :ok end),
output: output,
long_compilation_threshold: Keyword.get(options, :long_compilation_threshold, 15),
schedulers: schedulers
})
# In case --warning-as-errors is enabled and there was a warning,
# compilation status will be set to error.
compilation_status = :elixir_code_server.call({:compilation_status, compiler_pid})
case {result, compilation_status} do
{{:ok, _, warnings}, :error} ->
message = "Compilation failed due to warnings while using the --warnings-as-errors option"
IO.puts(:stderr, message)
{:error, warnings, []}
{{:error, errors, warnings}, :error} ->
{:error, errors ++ warnings, []}
_ ->
result
end
end
# We already have n=schedulers currently running, don't spawn new ones
defp spawn_workers(files, waiting, queued, result, warnings, %{schedulers: schedulers} = state)
when length(queued) - length(waiting) >= schedulers do
wait_for_messages(files, waiting, queued, result, warnings, state)
end
# Release waiting processes
defp spawn_workers([{ref, found} | t], waiting, queued, result, warnings, state) do
waiting =
case List.keytake(waiting, ref, 2) do
{{_kind, pid, ^ref, _on, _defining}, waiting} ->
send(pid, {ref, found})
waiting
nil ->
waiting
end
spawn_workers(t, waiting, queued, result, warnings, state)
end
defp spawn_workers([file | files], waiting, queued, result, warnings, state) do
%{output: output, long_compilation_threshold: threshold, dest: dest} = state
parent = self()
{pid, ref} =
:erlang.spawn_monitor(fn ->
:erlang.put(:elixir_compiler_pid, parent)
:erlang.put(:elixir_compiler_file, file)
result =
try do
_ =
case output do
{:compile, path} ->
:erlang.process_flag(:error_handler, Kernel.ErrorHandler)
:elixir_compiler.file_to_path(file, path)
:compile ->
:erlang.process_flag(:error_handler, Kernel.ErrorHandler)
:elixir_compiler.file(file, dest)
:require ->
Code.require_file(file)
end
:ok
catch
kind, reason ->
{kind, reason, System.stacktrace()}
end
send(parent, {:file_done, self(), file, result})
exit(:shutdown)
end)
timer_ref = Process.send_after(self(), {:timed_out, pid}, threshold * 1000)
queued = [{pid, ref, file, timer_ref} | queued]
spawn_workers(files, waiting, queued, result, warnings, state)
end
# No more files, nothing waiting, queue is empty, this cycle is done
defp spawn_workers([], [], [], result, warnings, state) do
case state.each_cycle.() do
[] ->
modules = for {:module, mod} <- result, do: mod
warnings = Enum.reverse(warnings)
{:ok, modules, warnings}
more ->
spawn_workers(more, [], [], result, warnings, state)
end
end
# Queued x, waiting for x: POSSIBLE ERROR! Release processes so we get the failures
defp spawn_workers([], waiting, queued, result, warnings, state)
when length(waiting) == length(queued) do
pending =
for {pid, _, _, _} <- queued,
entry = waiting_on_without_definition(waiting, pid),
{_, _, ref, on, _} = entry,
do: {on, {ref, :not_found}}
# Instead of releasing all files at once, we release them in groups
# based on the module they are waiting on. We pick the module being
# depended on with less edges, as it is the mostly likely source of
# error (for example, someone made a typo). This may not always be
# true though: for example, if there is a macro injecting code into
# multiple modules and such code becomes faulty, now multiple modules
# are waiting on the same module required by the faulty code. However,
# since we need to pick something to be first, the one with fewer edges
# sounds like a sane choice.
pending
|> Enum.group_by(&elem(&1, 0), &elem(&1, 1))
|> Enum.sort_by(&length(elem(&1, 1)))
|> case do
[{_on, refs} | _] ->
spawn_workers(refs, waiting, queued, result, warnings, state)
[] ->
errors = handle_deadlock(waiting, queued)
{:error, errors, warnings}
end
end
# No more files, but queue and waiting are not full or do not match
defp spawn_workers([], waiting, queued, result, warnings, state) do
wait_for_messages([], waiting, queued, result, warnings, state)
end
defp waiting_on_without_definition(waiting, pid) do
{_, ^pid, _, on, _} = entry = List.keyfind(waiting, pid, 1)
if Enum.any?(waiting, fn {_, _, _, _, defining} -> on in defining end) do
nil
else
entry
end
end
# Wait for messages from child processes
defp wait_for_messages(files, waiting, queued, result, warnings, state) do
%{output: output} = state
receive do
{:struct_available, module} ->
available =
for {:struct, _, ref, waiting_module, _defining} <- waiting,
module == waiting_module,
do: {ref, :found}
result = [{:struct, module} | result]
spawn_workers(available ++ files, waiting, queued, result, warnings, state)
{:module_available, child, ref, file, module, binary} ->
state.each_module.(file, module, binary)
# Release the module loader which is waiting for an ack
send(child, {ref, :ack})
available =
for {:module, _, ref, waiting_module, _defining} <- waiting,
module == waiting_module,
do: {ref, :found}
cancel_waiting_timer(queued, child)
result = [{:module, module} | result]
spawn_workers(available ++ files, waiting, queued, result, warnings, state)
# If we are simply requiring files, we do not add to waiting.
{:waiting, _kind, child, ref, _on, _defining} when output == :require ->
send(child, {ref, :not_found})
spawn_workers(files, waiting, queued, result, warnings, state)
{:waiting, kind, child, ref, on, defining} ->
# Oops, we already got it, do not put it on waiting.
# Alternatively, we're waiting on ourselves,
# send :found so that we can crash with a better error.
waiting =
if :lists.any(&match?({^kind, ^on}, &1), result) or on in defining do
send(child, {ref, :found})
waiting
else
[{kind, child, ref, on, defining} | waiting]
end
spawn_workers(files, waiting, queued, result, warnings, state)
{:timed_out, child} ->
case List.keyfind(queued, child, 0) do
{^child, _, file, _} ->
state.each_long_compilation.(file)
_ ->
:ok
end
spawn_workers(files, waiting, queued, result, warnings, state)
{:warning, file, line, message} ->
file = file && Path.absname(file)
message = :unicode.characters_to_binary(message)
warning = {file, line, message}
wait_for_messages(files, waiting, queued, result, [warning | warnings], state)
{:file_done, child_pid, file, :ok} ->
discard_down(child_pid)
state.each_file.(file)
cancel_waiting_timer(queued, child_pid)
# Sometimes we may have spurious entries in the waiting
# list because someone invoked try/rescue UndefinedFunctionError
new_files = List.delete(files, child_pid)
new_queued = List.keydelete(queued, child_pid, 0)
new_waiting = List.keydelete(waiting, child_pid, 1)
spawn_workers(new_files, new_waiting, new_queued, result, warnings, state)
{:file_done, child_pid, file, {kind, reason, stack}} ->
discard_down(child_pid)
print_error(file, kind, reason, stack)
terminate(queued)
{:error, [to_error(file, kind, reason, stack)], warnings}
{:DOWN, ref, :process, _pid, reason} ->
case handle_down(queued, ref, reason) do
:ok -> wait_for_messages(files, waiting, queued, result, warnings, state)
{:error, errors} -> {:error, errors, warnings}
end
end
end
defp discard_down(pid) do
receive do
{:DOWN, _, :process, ^pid, _} -> :ok
end
end
defp handle_down(_queued, _ref, :normal) do
:ok
end
defp handle_down(queued, ref, reason) do
case List.keyfind(queued, ref, 1) do
{_child, ^ref, file, _timer_ref} ->
print_error(file, :exit, reason, [])
terminate(queued)
{:error, [to_error(file, :exit, reason, [])]}
_ ->
:ok
end
end
defp handle_deadlock(waiting, queued) do
deadlock =
for {pid, _, file, _} <- queued do
{:current_stacktrace, stacktrace} = Process.info(pid, :current_stacktrace)
Process.exit(pid, :kill)
{_kind, ^pid, _, on, _} = List.keyfind(waiting, pid, 1)
description = "deadlocked waiting on module #{inspect(on)}"
error = CompileError.exception(description: description, file: nil, line: nil)
print_error(file, :error, error, stacktrace)
{file, on, description}
end
IO.puts("""
Compilation failed because of a deadlock between files.
The following files depended on the following modules:
""")
max =
deadlock
|> Enum.map(&(&1 |> elem(0) |> String.length()))
|> Enum.max()
for {file, mod, _} <- deadlock do
IO.puts([" ", String.pad_leading(file, max), " => " | inspect(mod)])
end
IO.puts("")
for {file, _, description} <- deadlock, do: {Path.absname(file), nil, description}
end
defp terminate(queued) do
for {pid, _, _, _} <- queued do
Process.exit(pid, :kill)
end
end
defp print_error(file, kind, reason, stack) do
IO.write([
"\n== Compilation error in file #{Path.relative_to_cwd(file)} ==\n",
Kernel.CLI.format_error(kind, reason, stack)
])
end
defp cancel_waiting_timer(queued, child_pid) do
case List.keyfind(queued, child_pid, 0) do
{^child_pid, _ref, _file, timer_ref} ->
Process.cancel_timer(timer_ref)
# Let's flush the message in case it arrived before we canceled the
# timeout.
receive do
{:timed_out, ^child_pid} -> :ok
after
0 -> :ok
end
nil ->
:ok
end
end
defp to_error(file, kind, reason, stack) do
line = get_line(file, reason, stack)
file = Path.absname(file)
message = :unicode.characters_to_binary(Kernel.CLI.format_error(kind, reason, stack))
{file, line, message}
end
defp get_line(_file, %{line: line}, _stack) when is_integer(line) and line > 0 do
line
end
defp get_line(file, :undef, [{_, _, _, []}, {_, _, _, info} | _]) do
if Keyword.get(info, :file) == to_charlist(Path.relative_to_cwd(file)) do
Keyword.get(info, :line)
end
end
defp get_line(file, _reason, [{_, _, _, info} | _]) do
if Keyword.get(info, :file) == to_charlist(Path.relative_to_cwd(file)) do
Keyword.get(info, :line)
end
end
defp get_line(_, _, _) do
nil
end
end
|
lib/elixir/lib/kernel/parallel_compiler.ex
| 0.707203 | 0.427397 |
parallel_compiler.ex
|
starcoder
|
defmodule Stargate.Receiver do
@moduledoc """
Provides a Stargate websocket process that can be either a
reader or consumer connection based on the configuration passed
when starting the process.
"""
require Logger
use Stargate.Connection
import Stargate.Supervisor, only: [via: 2]
alias Stargate.{Consumer, Reader}
alias Stargate.Receiver.Dispatcher
@typedoc "A string identifier assigned to each message by the cluster"
@type message_id :: String.t()
@doc """
Sends an acknowledgement of the given message ID back to the Pulsar
cluster via the provided websocket process connection. This is required
for all Stargate consumers and readers where acknowledgement signals the
cluster to delete messages from the topic/subscription and send more while
readers require acknowledgement to signal readiness for more messages.
"""
@spec ack(GenServer.server(), message_id()) :: :ok | {:error, term()}
def ack(receiver, message_id) do
ack = construct_response(message_id)
WebSockex.send_frame(receiver, {:text, ack})
end
@doc """
Sends a permit request to the Pulsar cluster via the provided websocket process
connection. Used for consumers in pull mode to release up to the requested number
of messages to be returned when available.
"""
@spec pull_permit(GenServer.server(), non_neg_integer()) :: :ok | {:error, term()}
def pull_permit(receiver, count) do
permit = construct_permit(count)
WebSockex.send_frame(receiver, {:text, permit})
end
defmodule State do
@moduledoc """
Defines the state stored by the consumer or reader websocket process. The
Stargate receiver records the registry name associated to its supervision tree,
the URL of the cluster and topic it connects to, as well as the individual
components that make up the URL including the host, protocol (ws or wss), topic
path parameters (persistent or non-persistent, tenant, namespace, and topic)
and any query parameters configuing the connection.
"""
defstruct [
:registry,
:url,
:host,
:protocol,
:persistence,
:tenant,
:namespace,
:topic,
:query_params
]
end
@doc """
Start a consumer or reader websocket process and link it to the current process.
Consumer and Receiver options require, at minimum:
* `host` is a tuple of the address or URL of the Pulsar cluster (broker service)
and the port on which the service is exposed.
* `tenant` is a string representing the tenant portion of the receiver URL path parameter.
* `namespace` is a string representing the topic portion of the receiver URL path parameter.
* `topic` is a string representing the topic portion of the receiver URL path parameter.
* `subscription` (for consumers) is a string representing the subscription portion of the
receiver URL path paramater.
* `registry` is the name of the process registry associated to the client's supervision tee.
Stargate uses this to subscribe to the stages of the receiver and to send messages back
and forth between them.
* `handler` is the name of the handler module that implements the
`Stargate.Receiver.MessageHandler` behaviour.
Additional optional parameters to a consumer and reader are:
* `protocol` can be one of "ws" or "wss"; defaults to "ws".
* `persistence` can be one of "persistent" orr "non-persistent" per the Pulsar specification
of topics as being in-memory only or persisted to the brokers' disks. Defaults to "persistent".
* `processors` is the number of GenStage processes in the "processor" stage to be created.
This is the stage that performs the work of the message handler to perform processing logic
on the received messages. Defaults to 1.
* `handler_init_args` is any term that will be passed to the message handler to initialize
its state when a stateful handler is desired. Defaults to an empty list.
* `query_params` is a map containing any or all of the following:
# Consumer
* `ack_timeout` sets the timeout for unacked messages. Defaults to 0.
* `subscription_type` can be one of `:exclusive`, `:failover`, or `:shared` to tell
the Pulsar cluster if one or more consumers will be receiving messages on this topic
and subscription. Defaults to exclusive.
* `queue_size` sets the number of messages in the consumer's receive queue. Defaults to 1000.
* `name` registers a name for the consumer client with the Pulsar cluster.
* `priority` sets the priority with the cluster for the consumer client to receive messages.
* `max_redeliver_count` defines a maximum number of times to attempt redelivery of a message
to the consumer before sending it to a dead letter queue. Activates the dead letter topic feature.
* `dead_letter_topic` defines a name for a topic's corresponding dead letter topic. Activates
the dead letter topic feature. Defaults to "{topic}-{subscription}-DLQ".
* `pull_mode` can be `true` or `false`. When a consumer is in pull mode, the cluster will hold
messages on the subscription until it receives a permit request with an explicit number
of desired messages to fulfill.
# Reader
* `name` registers a name for the reader client with the Pulsar cluster.
* `queue_size` is the size of the queue maintained for the reader; defaults to 1000.
* `starting_message` can be one of `:earliest`, `:latest`, or a message ID.
Sets the reader's cursor to the desired message within the stream. Defaults to latest.
"""
@spec start_link(keyword()) :: GenServer.on_start()
def start_link(args) do
type = Keyword.fetch!(args, :type)
registry = Keyword.fetch!(args, :registry)
query_params_config = Keyword.get(args, :query_params)
query_params =
case type do
:consumer -> Consumer.QueryParams.build_params(query_params_config)
:reader -> Reader.QueryParams.build_params(query_params_config)
end
setup_state = %{
registry: registry,
query_params: query_params_config
}
state =
args
|> Stargate.Connection.connection_settings(type, query_params)
|> Map.merge(setup_state)
|> (fn fields -> struct(State, fields) end).()
server_opts =
args
|> Stargate.Connection.auth_settings()
|> Keyword.put(
:name,
via(
state.registry,
{:"#{type}", "#{state.persistence}", "#{state.tenant}", "#{state.namespace}",
"#{state.topic}"}
)
)
WebSockex.start_link(state.url, __MODULE__, state, server_opts)
end
@impl WebSockex
def handle_frame(
{:text, msg},
%{persistence: persistence, tenant: tenant, namespace: ns, topic: topic} = state
) do
Logger.debug("Received frame : #{inspect(msg)}")
:ok =
state.registry
|> via({:dispatcher, "#{persistence}", "#{tenant}", "#{ns}", "#{topic}"})
|> Dispatcher.push(msg)
{:ok, state}
end
defp construct_response(id), do: "{\"messageId\":\"#{id}\"}"
defp construct_permit(count), do: "{\"type\":\"permit\",\"permitMessages\":#{count}}"
end
|
lib/stargate/receiver.ex
| 0.865793 | 0.446857 |
receiver.ex
|
starcoder
|
defmodule Plug.SSL do
@moduledoc """
A plug to force SSL connections.
If the scheme of a request is https, it'll add a `strict-transport-security`
header to enable HTTP Strict Transport Security.
Otherwise, the request will be redirected to a corresponding location
with the `https` scheme by setting the `location` header of the response.
The status code will be 301 if the method of `conn` is `GET` or `HEAD`,
or 307 in other situations.
## x-forwarded-proto
If your Plug application is behind a proxy that handles HTTPS, you will
need to tell Plug to parse the proper protocol from the "x-forwarded-proto"
header. This can be done using the `:rewrite_on` option:
plug Plug.SSL, rewrite_on: [:x_forwarded_proto]
The command above will effectively change the value of `conn.scheme` by
the one sent in "x-forwarded-proto".
Since rewriting the scheme based on "x-forwarded-proto" can open up
security vulnerabilities, only provide the option above if:
* Your app is behind a proxy
* Your proxy strips "x-forwarded-proto" headers from all incoming requests
* Your proxy sets the "x-forwarded-proto" and sends it to Plug
## Options
* `:rewrite_on` - rewrites the scheme to https based on the given headers
* `:hsts` - a boolean on enabling HSTS or not, defaults to true.
* `:expires` - seconds to expires for HSTS, defaults to 31536000 (a year).
* `:subdomains` - a boolean on including subdomains or not in HSTS,
defaults to false.
* `:host` - a new host to redirect to if the request's scheme is `http`.
## Port
It is not possible to directly configure the port in `Plug.SSL` because
HSTS expects the port to be 443 for SSL. If you are not using HSTS and
wants to redirect to HTTPS on another port, you can sneak it alongside
the host, for example: `host: "example.com:443"`.
"""
@behaviour Plug
import Plug.Conn
alias Plug.Conn
def init(opts) do
{hsts_header(opts), Keyword.get(opts, :host), Keyword.get(opts, :rewrite_on, [])}
end
def call(conn, {hsts, host, rewrites}) do
conn = rewrite_on(conn, rewrites)
if conn.scheme == :https do
put_hsts_header(conn, hsts)
else
redirect_to_https(conn, host)
end
end
defp rewrite_on(conn, rewrites) do
Enum.reduce rewrites, conn, fn
:x_forwarded_proto, acc ->
if get_req_header(acc, "x-forwarded-proto") == ["https"] do
%{acc | scheme: :https}
else
acc
end
other, _acc ->
raise "unknown rewrite: #{inspect other}"
end
end
# http://tools.ietf.org/html/draft-hodges-strict-transport-sec-02
defp hsts_header(opts) do
if Keyword.get(opts, :hsts, true) do
expires = Keyword.get(opts, :expires, 31536000)
subdomains = Keyword.get(opts, :subdomains, false)
"max-age=#{expires}" <>
if(subdomains, do: "; includeSubDomains", else: "")
end
end
defp put_hsts_header(conn, hsts_header) when is_binary(hsts_header) do
put_resp_header(conn, "strict-transport-security", hsts_header)
end
defp put_hsts_header(conn, _), do: conn
defp redirect_to_https(%Conn{host: host} = conn, custom_host) do
status = if conn.method in ~w(HEAD GET), do: 301, else: 307
location = "https://" <> (custom_host || host) <>
conn.request_path <> qs(conn.query_string)
conn
|> put_resp_header("location", location)
|> send_resp(status, "")
|> halt
end
defp qs(""), do: ""
defp qs(qs), do: "?" <> qs
end
|
deps/plug/lib/plug/ssl.ex
| 0.854126 | 0.602617 |
ssl.ex
|
starcoder
|
defmodule Domo.TypeEnsurerFactory.Generator.MatchFunRegistry.Lists do
@moduledoc false
alias Domo.TypeEnsurerFactory.Precondition
alias Domo.TypeEnsurerFactory.Generator.TypeSpec
def list_spec?(type_spec_precond) do
{type_spec, _precond} = TypeSpec.split_spec_precond(type_spec_precond)
case type_spec do
[{:->, _, [_, _]}] -> false
{:nonempty_list, _, [_]} -> true
{:maybe_improper_list, _, [_, _]} -> true
{:nonempty_improper_list, _, [_, _]} -> true
{:nonempty_maybe_improper_list, _, [_, _]} -> true
[_] -> true
[_ | _] -> true
_ -> false
end
end
def map_value_type(type_spec_precond, fun) do
{type_spec, precond} = TypeSpec.split_spec_precond(type_spec_precond)
map_value_type(list_kind(type_spec), type_spec, precond, fun)
end
defp list_kind(type_spec) do
case type_spec do
[_element_spec] -> :proper_list
[_ | _] -> :keyword_list
{:nonempty_list, _, [_element_spec]} -> :proper_list
{improper_kind, _, [_, _]} -> improper_kind
end
end
defp map_value_type(:proper_list, type_spec, precond, fun) do
{case type_spec do
[element_spec] -> [fun.(element_spec)]
{:nonempty_list, context, [element_spec]} -> {:nonempty_list, context, [fun.(element_spec)]}
end, precond}
end
defp map_value_type(:keyword_list, type_spec, precond, fun) do
{Enum.map(type_spec, fn {key, value} -> {key, fun.(value)} end), precond}
end
defp map_value_type(_improper_kind, type_spec, precond, fun) do
{improper_kind, [], [head_element_spec, tail_element_spec]} = type_spec
{{improper_kind, [], [fun.(head_element_spec), fun.(tail_element_spec)]}, precond}
end
def match_spec_function_quoted(type_spec_precond) do
{type_spec, precond} = TypeSpec.split_spec_precond(type_spec_precond)
match_list_function_quoted(list_kind(type_spec), type_spec, precond)
end
# credo:disable-for-lines:63
defp match_list_function_quoted(:keyword_list, type_spec, precond) do
type_spec_atom = TypeSpec.to_atom(type_spec)
precond_atom = if precond, do: Precondition.to_atom(precond)
{_keys, value_spec_preconds} = Enum.unzip(type_spec)
kv_match_spec_attributes =
Enum.map(type_spec, fn {key, value_spec_precond} ->
{key, TypeSpec.match_spec_attributes(value_spec_precond)}
end)
|> Macro.escape()
{value_var, spec_string_var} =
if precond do
{quote(do: value), quote(do: spec_string)}
else
{quote(do: _value), quote(do: _spec_string)}
end
match_spec_quoted =
quote do
def do_match_spec({unquote(type_spec_atom), unquote(precond_atom)}, [] = unquote(value_var), unquote(spec_string_var)),
do: unquote(Precondition.ok_or_precond_call_quoted(precond, quote(do: spec_string), quote(do: value)))
def do_match_spec({unquote(type_spec_atom), unquote(precond_atom)}, [{_key, _value} | _] = value, spec_string) do
reduced_list =
Enum.reduce_while(unquote(kv_match_spec_attributes), value, fn {expected_key, value_attributes}, list ->
{value_spec_atom, value_precond_atom, value_spec_string} = value_attributes
{list_by_matching_key, filtered_list} = Enum.split_with(list, fn {key, _value} -> key == expected_key end)
value_error =
Enum.reduce_while(list_by_matching_key, :ok, fn {_key, value}, _acc ->
# credo:disable-for-lines:4
case do_match_spec({value_spec_atom, value_precond_atom}, value, value_spec_string) do
:ok -> {:cont, :ok}
{:error, _value, _messages} = error -> {:halt, error}
end
end)
case value_error do
:ok ->
{:cont, filtered_list}
{:error, value, messages} ->
message = {
"The element for the key %{key} has value %{value} that is invalid.",
[key: inspect(expected_key), value: inspect(value)]
}
{:halt, {:error, list, [message | messages]}}
end
end)
if is_list(reduced_list) do
unquote(Precondition.ok_or_precond_call_quoted(precond, quote(do: spec_string), quote(do: value)))
else
reduced_list
end
end
end
{match_spec_quoted, value_spec_preconds}
end
defp match_list_function_quoted(:proper_list, type_spec, precond) do
{can_be_empty?, element_spec_precond} =
case type_spec do
[element_spec_precond] -> {true, element_spec_precond}
{:nonempty_list, _, [element_spec_precond]} -> {false, element_spec_precond}
end
type_spec_atom = TypeSpec.to_atom(type_spec)
precond_atom = if precond, do: Precondition.to_atom(precond)
element_attributes = TypeSpec.match_spec_attributes(element_spec_precond)
match_list_elements_quoted = match_el_quoted(type_spec_atom, element_attributes, element_attributes)
guard_quoted = if can_be_empty?, do: quote(do: length(value) >= 0), else: quote(do: length(value) > 0)
spec_string_var = if precond, do: quote(do: spec_string), else: quote(do: _spec_string)
match_spec_quoted =
quote do
def do_match_spec({unquote(type_spec_atom), unquote(precond_atom)}, value, unquote(spec_string_var)) when unquote(guard_quoted) do
case do_match_list_elements(unquote(type_spec_atom), value, 0) do
{:proper, _length} ->
unquote(Precondition.ok_or_precond_call_quoted(precond, quote(do: spec_string), quote(do: value)))
{:element_error, messages} ->
{:error, value, messages}
end
end
end
{[match_spec_quoted, match_list_elements_quoted], [element_spec_precond]}
end
defp match_el_quoted(type_spec_atom, head_attributes, tail_attributes) do
{head_spec_atom, head_precond_atom, head_spec_string} = head_attributes
{tail_spec_atom, tail_precond_atom, tail_spec_string} = tail_attributes
quote do
def do_match_list_elements(unquote(type_spec_atom), [head | tail], idx) do
case do_match_spec({unquote(head_spec_atom), unquote(head_precond_atom)}, head, unquote(head_spec_string)) do
:ok ->
do_match_list_elements(unquote(type_spec_atom), tail, idx + 1)
{:error, element_value, messages} ->
{:element_error,
[
{"The element at index %{idx} has value %{element_value} that is invalid.", [idx: idx, element_value: inspect(element_value)]}
| messages
]}
end
end
def do_match_list_elements(unquote(type_spec_atom), [], idx) do
{:proper, idx}
end
def do_match_list_elements(unquote(type_spec_atom), tail, idx) do
case do_match_spec({unquote(tail_spec_atom), unquote(tail_precond_atom)}, tail, unquote(tail_spec_string)) do
:ok ->
{:improper, idx}
{:error, element_value, messages} ->
{:element_error,
[
{"The tail element has value %{element_value} that is invalid.", [element_value: inspect(element_value)]}
| messages
]}
end
end
end
end
defp match_list_function_quoted(:maybe_improper_list, type_spec, precond) do
{:maybe_improper_list, [], [head_spec_precond, tail_spec_precond]} = type_spec
type_spec_atom = TypeSpec.to_atom(type_spec)
precond_atom = if precond, do: Precondition.to_atom(precond)
head_attributes = TypeSpec.match_spec_attributes(head_spec_precond)
tail_attributes = TypeSpec.match_spec_attributes(tail_spec_precond)
match_list_elements_quoted = match_el_quoted(type_spec_atom, head_attributes, tail_attributes)
spec_string_var = if precond, do: quote(do: spec_string), else: quote(do: _spec_string)
match_spec_quoted =
quote do
def do_match_spec({unquote(type_spec_atom), unquote(precond_atom)}, value, unquote(spec_string_var)) when is_list(value) do
case do_match_list_elements(unquote(type_spec_atom), value, 0) do
{:element_error, messages} -> {:error, value, messages}
{_kind, _length} -> unquote(Precondition.ok_or_precond_call_quoted(precond, quote(do: spec_string), quote(do: value)))
end
end
end
{[match_spec_quoted, match_list_elements_quoted], [head_spec_precond, tail_spec_precond]}
end
defp match_list_function_quoted(:nonempty_improper_list, type_spec, precond) do
{:nonempty_improper_list, [], [head_spec_precond, tail_spec_precond]} = type_spec
type_spec_atom = TypeSpec.to_atom(type_spec)
precond_atom = if precond, do: Precondition.to_atom(precond)
head_attributes = TypeSpec.match_spec_attributes(head_spec_precond)
tail_attributes = TypeSpec.match_spec_attributes(tail_spec_precond)
match_list_elements_quoted = match_el_quoted(type_spec_atom, head_attributes, tail_attributes)
spec_string_var = if precond, do: quote(do: spec_string), else: quote(do: _spec_string)
match_spec_quoted =
quote do
def do_match_spec({unquote(type_spec_atom), unquote(precond_atom)}, value, unquote(spec_string_var)) when is_list(value) do
case do_match_list_elements(unquote(type_spec_atom), value, 0) do
{:element_error, messages} ->
{:error, value, messages}
{_kind, 0} ->
{:error, value, [{"Expected a nonempty list.", []}]}
{:proper, _elem_count} ->
{:error, value, [{"Expected an improper list.", []}]}
{_kind, _length} ->
unquote(Precondition.ok_or_precond_call_quoted(precond, quote(do: spec_string), quote(do: value)))
end
end
end
{[match_spec_quoted, match_list_elements_quoted], [head_spec_precond, tail_spec_precond]}
end
defp match_list_function_quoted(:nonempty_maybe_improper_list, type_spec, precond) do
{:nonempty_maybe_improper_list, [], [head_spec_precond, tail_spec_precond]} = type_spec
type_spec_atom = TypeSpec.to_atom(type_spec)
precond_atom = if precond, do: Precondition.to_atom(precond)
head_attributes = TypeSpec.match_spec_attributes(head_spec_precond)
tail_attributes = TypeSpec.match_spec_attributes(tail_spec_precond)
match_list_elements_quoted = match_el_quoted(type_spec_atom, head_attributes, tail_attributes)
spec_string_var = if precond, do: quote(do: spec_string), else: quote(do: _spec_string)
match_spec_quoted =
quote do
def do_match_spec({unquote(type_spec_atom), unquote(precond_atom)}, value, unquote(spec_string_var)) when is_list(value) do
case do_match_list_elements(unquote(type_spec_atom), value, 0) do
{:element_error, messages} ->
{:error, value, messages}
{_kind, 0} ->
{:error, value, [{"Expected a nonempty list.", []}]}
{_kind, _length} ->
unquote(Precondition.ok_or_precond_call_quoted(precond, quote(do: spec_string), quote(do: value)))
end
end
end
{[match_spec_quoted, match_list_elements_quoted], [head_spec_precond, tail_spec_precond]}
end
end
|
lib/domo/type_ensurer_factory/generator/match_fun_registry/lists.ex
| 0.602763 | 0.416352 |
lists.ex
|
starcoder
|
defmodule ExAws do
@moduledoc """
Module for making and processing AWS request
"""
use Application
@behaviour ExAws.Behaviour
@doc """
Perform an AWS request
First build an operation from one of the services, and then pass it to this
function to perform it.
If you want to build an operation manually, see: `ExAws.Operation`
This function takes an optional second parameter of configuration overrides.
This is useful if you want to have certain configuration changed on a per
request basis.
## Examples
If you have one of the service modules installed, you can just use those service
modules like this:
ExAws.S3.list_buckets |> ExAws.request
ExAws.S3.list_buckets |> ExAws.request(region: "eu-west-1")
ExAws.Dynamo.get_object("users", "<EMAIL>") |> ExAws.request
Alternatively you can create operation structs manually for services
that aren't supported:
op = %ExAws.Operation.JSON{
http_method: :post,
service: :dynamodb,
headers: [
{"x-amz-target", "DynamoDB_20120810.ListTables"},
{"content-type", "application/x-amz-json-1.0"}
],
}
ExAws.request(op)
"""
@impl ExAws.Behaviour
def request(op, config_overrides \\ []) do
ExAws.Operation.perform(op, ExAws.Config.new(op.service, config_overrides))
end
@doc """
Perform an AWS request, raise if it fails.
Same as `request/1,2` except it will either return the successful response from
AWS or raise an exception.
"""
@impl ExAws.Behaviour
def request!(op, config_overrides \\ []) do
case request(op, config_overrides) do
{:ok, result} ->
result
error ->
raise ExAws.Error, """
ExAws Request Error!
#{inspect(error)}
"""
end
end
@doc """
Return a stream for the AWS resource.
## Examples
ExAws.S3.list_objects("my-bucket") |> ExAws.stream!
"""
@impl ExAws.Behaviour
def stream!(op, config_overrides \\ []) do
ExAws.Operation.stream!(op, ExAws.Config.new(op.service, config_overrides))
end
@doc false
@impl Application
def start(_type, _args) do
children = [
{ExAws.Config.AuthCache, [name: ExAws.Config.AuthCache]}
]
opts = [strategy: :one_for_one, name: ExAws.Supervisor]
Supervisor.start_link(children, opts)
end
end
|
lib/ex_aws.ex
| 0.799912 | 0.450541 |
ex_aws.ex
|
starcoder
|
defmodule ExSieve.Builder.Where do
@moduledoc false
import Ecto.Query
alias ExSieve.Node.{Attribute, Condition, Grouping}
@true_values [1, '1', 'T', 't', true, 'true', 'TRUE', "1", "T", "t", "true", "TRUE"]
@spec build(Ecto.Queryable.t(), Grouping.t()) :: Ecto.Query.t()
def build(query, %Grouping{combinator: combinator} = grouping) when combinator in ~w(and or)a do
where(query, ^dynamic_grouping(grouping))
end
defp dynamic_grouping(%Grouping{conditions: conditions, groupings: groupings, combinator: combinator}) do
conditions
|> Enum.map(fn
%Condition{attributes: attrs, values: vals, predicate: predicate, combinator: combinator} ->
attrs
|> Enum.map(fn attr -> dynamic_predicate(predicate, attr, vals) end)
|> combine(combinator)
end)
|> Kernel.++(Enum.map(groupings, &dynamic_grouping/1))
|> combine(combinator)
end
defp combine([], _), do: dynamic(true)
defp combine([dynamic], _), do: dynamic
defp combine([dyn | dynamics], :and) do
Enum.reduce(dynamics, dyn, fn dyn, acc -> dynamic(^acc and ^dyn) end)
end
defp combine([dyn | dynamics], :or) do
Enum.reduce(dynamics, dyn, fn dyn, acc -> dynamic(^acc or ^dyn) end)
end
defp dynamic_predicate(:eq, %Attribute{parent: :query, name: name}, [value | _]) do
dynamic([p], field(p, ^name) == ^value)
end
defp dynamic_predicate(:eq, %Attribute{parent: parent, name: name}, [value | _]) do
dynamic([{^parent, p}], field(p, ^name) == ^value)
end
defp dynamic_predicate(:not_eq, %Attribute{parent: :query, name: name}, [value | _]) do
dynamic([p], field(p, ^name) != ^value)
end
defp dynamic_predicate(:not_eq, %Attribute{parent: parent, name: name}, [value | _]) do
dynamic([{^parent, p}], field(p, ^name) != ^value)
end
defp dynamic_predicate(:cont, %Attribute{parent: :query, name: name}, [value | _]) do
dynamic([p], ilike(field(p, ^name), ^"%#{value}%"))
end
defp dynamic_predicate(:cont, %Attribute{parent: parent, name: name}, [value | _]) do
dynamic([{^parent, p}], ilike(field(p, ^name), ^"%#{value}%"))
end
defp dynamic_predicate(:not_cont, %Attribute{parent: :query, name: name}, [value | _]) do
dynamic([p], not ilike(field(p, ^name), ^"%#{value}%"))
end
defp dynamic_predicate(:not_cont, %Attribute{parent: parent, name: name}, [value | _]) do
dynamic([{^parent, p}], not ilike(field(p, ^name), ^"%#{value}%"))
end
defp dynamic_predicate(:lt, %Attribute{parent: :query, name: name}, [value | _]) do
dynamic([p], field(p, ^name) < ^value)
end
defp dynamic_predicate(:lt, %Attribute{parent: parent, name: name}, [value | _]) do
dynamic([{^parent, p}], field(p, ^name) < ^value)
end
defp dynamic_predicate(:lteq, %Attribute{parent: :query, name: name}, [value | _]) do
dynamic([p], field(p, ^name) <= ^value)
end
defp dynamic_predicate(:lteq, %Attribute{parent: parent, name: name}, [value | _]) do
dynamic([{^parent, p}], field(p, ^name) <= ^value)
end
defp dynamic_predicate(:gt, %Attribute{parent: :query, name: name}, [value | _]) do
dynamic([p], field(p, ^name) > ^value)
end
defp dynamic_predicate(:gt, %Attribute{parent: parent, name: name}, [value | _]) do
dynamic([{^parent, p}], field(p, ^name) > ^value)
end
defp dynamic_predicate(:gteq, %Attribute{parent: :query, name: name}, [value | _]) do
dynamic([p], field(p, ^name) >= ^value)
end
defp dynamic_predicate(:gteq, %Attribute{parent: parent, name: name}, [value | _]) do
dynamic([{^parent, p}], field(p, ^name) >= ^value)
end
defp dynamic_predicate(:in, %Attribute{parent: :query, name: name}, values) do
dynamic([p], field(p, ^name) in ^values)
end
defp dynamic_predicate(:in, %Attribute{parent: parent, name: name}, values) do
dynamic([{^parent, p}], field(p, ^name) in ^values)
end
defp dynamic_predicate(:not_in, %Attribute{parent: :query, name: name}, values) do
dynamic([p], field(p, ^name) not in ^values)
end
defp dynamic_predicate(:not_in, %Attribute{parent: parent, name: name}, values) do
dynamic([{^parent, p}], field(p, ^name) not in ^values)
end
defp dynamic_predicate(:matches, %Attribute{parent: :query, name: name}, [value | _]) do
dynamic([p], ilike(field(p, ^name), ^value))
end
defp dynamic_predicate(:matches, %Attribute{parent: parent, name: name}, [value | _]) do
dynamic([{^parent, p}], ilike(field(p, ^name), ^value))
end
defp dynamic_predicate(:does_not_match, %Attribute{parent: :query, name: name}, [value | _]) do
dynamic([p], not ilike(field(p, ^name), ^value))
end
defp dynamic_predicate(:does_not_match, %Attribute{parent: parent, name: name}, [value | _]) do
dynamic([{^parent, p}], not ilike(field(p, ^name), ^value))
end
defp dynamic_predicate(:start, %Attribute{parent: :query, name: name}, [value | _]) do
dynamic([p], ilike(field(p, ^name), ^"#{value}%"))
end
defp dynamic_predicate(:start, %Attribute{parent: parent, name: name}, [value | _]) do
dynamic([{^parent, p}], ilike(field(p, ^name), ^"#{value}%"))
end
defp dynamic_predicate(:not_start, %Attribute{parent: :query, name: name}, [value | _]) do
dynamic([p], not ilike(field(p, ^name), ^"#{value}%"))
end
defp dynamic_predicate(:not_start, %Attribute{parent: parent, name: name}, [value | _]) do
dynamic([{^parent, p}], not ilike(field(p, ^name), ^"#{value}%"))
end
defp dynamic_predicate(:end, %Attribute{parent: :query, name: name}, [value | _]) do
dynamic([p], ilike(field(p, ^name), ^"%#{value}"))
end
defp dynamic_predicate(:end, %Attribute{parent: parent, name: name}, [value | _]) do
dynamic([{^parent, p}], ilike(field(p, ^name), ^"%#{value}"))
end
defp dynamic_predicate(:not_end, %Attribute{parent: :query, name: name}, [value | _]) do
dynamic([p], not ilike(field(p, ^name), ^"%#{value}"))
end
defp dynamic_predicate(:not_end, %Attribute{parent: parent, name: name}, [value | _]) do
dynamic([{^parent, p}], not ilike(field(p, ^name), ^"%#{value}"))
end
defp dynamic_predicate(true, attribute, [value | _]) when value in @true_values do
dynamic_predicate(:eq, attribute, [true])
end
defp dynamic_predicate(:not_true, attribute, [value | _]) when value in @true_values do
dynamic_predicate(:not_eq, attribute, [true])
end
defp dynamic_predicate(false, attribute, [value | _]) when value in @true_values do
dynamic_predicate(:eq, attribute, [false])
end
defp dynamic_predicate(:not_false, attribute, [value | _]) when value in @true_values do
dynamic_predicate(:not_eq, attribute, [false])
end
defp dynamic_predicate(:blank, %Attribute{parent: :query, name: name}, [value | _]) when value in @true_values do
dynamic([p], is_nil(field(p, ^name)) or field(p, ^name) == ^"")
end
defp dynamic_predicate(:blank, %Attribute{parent: parent, name: name}, [value | _]) when value in @true_values do
dynamic([{^parent, p}], is_nil(field(p, ^name)) or field(p, ^name) == ^"")
end
defp dynamic_predicate(:null, %Attribute{parent: :query, name: name}, [value | _]) when value in @true_values do
dynamic([p], is_nil(field(p, ^name)))
end
defp dynamic_predicate(:null, %Attribute{parent: parent, name: name}, [value | _]) when value in @true_values do
dynamic([{^parent, p}], is_nil(field(p, ^name)))
end
defp dynamic_predicate(:not_null, %Attribute{parent: :query, name: name}, [value | _]) when value in @true_values do
dynamic([p], not is_nil(field(p, ^name)))
end
defp dynamic_predicate(:not_null, %Attribute{parent: parent, name: name}, [value | _]) when value in @true_values do
dynamic([{^parent, p}], not is_nil(field(p, ^name)))
end
defp dynamic_predicate(:present, %Attribute{parent: :query, name: name}, [value | _]) when value in @true_values do
dynamic([p], not (is_nil(field(p, ^name)) or field(p, ^name) == ^""))
end
defp dynamic_predicate(:present, %Attribute{parent: parent, name: name}, [value | _]) when value in @true_values do
dynamic([{^parent, p}], not (is_nil(field(p, ^name)) or field(p, ^name) == ^""))
end
for basic_predicate <- Condition.basic_predicates() do
for {name, combinator} <- [all: :and, any: :or] do
predicate = String.to_atom("#{basic_predicate}_#{name}")
basic_predicate = String.to_atom(basic_predicate)
defp dynamic_predicate(unquote(predicate), attribute, values) do
values
|> Enum.map(&dynamic_predicate(unquote(basic_predicate), attribute, List.wrap(&1)))
|> combine(unquote(combinator))
end
end
end
end
|
lib/ex_sieve/builder/where.ex
| 0.719088 | 0.457137 |
where.ex
|
starcoder
|
defmodule Slack do
@moduledoc """
A client for the Slack API.
This library exports a module for each namespace on the Slack API. For each
method in that namespace, a function is exported from its parent module.
Nested namespaces are in nested modules.
When looking at the Slack API documentation, if you see a method called
`usergroups.users.list`, you know that you can call this API method with the
function call `Slack.UserGroup.User.list`. Note that all module names are
singular, regardless of whether they are singular or plural in the Slack API.
## Authentication
For Slack API endpoints that require an authentication token (which is all of
them save some special OAuth endpoints), a `Slack.Client.t` must be passed
in as the first argument to a function call:
access_token
|> Slack.client # Creates a new `Slack.Client.t`
|> Slack.Channel.list
While the typespec currently expects a `Slack.Client.t`, anything can
technically passed in on which the function can call `client.token`.
## Request Methods
The Slack API allows clients to either issue `GET` or `POST` requests for any
API method. In this client, `POST` is used whenever the request can
potentially create or change data. Otherwise, a `GET` is used.
## Return Values
For any successful method, the Slack API returns a key `"okay"` in the
response body with `true` as the value. When handling a response, this library
checks for this value and if it is present, returns `{:ok, response_body}`.
An `HTTPoison.Response.t` is only returned when a method call fails, in a
tuple: `{:error, response}`.
## Arbitrary Request
To make some arbitrary request that is not in this library, you can use
special HTTP methods on the `Slack` module:
{:ok, response} =
Slack.get "namespace.method", [], params: [token: access_token]
"""
use HTTPoison.Base
@type slack_response ::
{:ok, map} |
{:error, HTTPoison.Response.t} |
{:error, HTTPoison.Error.t}
@endpoint "https://slack.com/api"
def client(token), do: %Slack.Client{token: token}
def process_url("/" <> url), do: process_url(url)
def process_url(url), do: "#{@endpoint}/#{url}"
def process_response_body(body), do: Poison.decode!(body)
end
|
lib/slack.ex
| 0.856453 | 0.537891 |
slack.ex
|
starcoder
|
defmodule Timex.Time do
@million 1_000_000
Enum.each [usecs: @million, msecs: 1_000], fn {type, coef} ->
def to_usecs(value, unquote(type)), do: value * @million / unquote(coef)
def to_msecs(value, unquote(type)), do: value * 1000 / unquote(coef)
def to_secs(value, unquote(type)), do: value / unquote(coef)
def to_mins(value, unquote(type)), do: value / unquote(coef) / 60
def to_hours(value, unquote(type)), do: value / unquote(coef) / 3600
def to_days(value, unquote(type)), do: value / unquote(coef) / (3600 * 24)
def to_weeks(value, unquote(type)), do: value / unquote(coef) / (3600 * 24 * 7)
end
Enum.each [secs: 1, mins: 60, hours: 3600, days: 3600 * 24, weeks: 3600 * 24 * 7], fn {type, coef} ->
def unquote(type)(value), do: value * unquote(coef)
def to_usecs(value, unquote(type)), do: value * unquote(coef) * @million
def to_msecs(value, unquote(type)), do: value * unquote(coef) * 1000
def to_secs(value, unquote(type)), do: value * unquote(coef)
def to_mins(value, unquote(type)), do: value * unquote(coef) / 60
def to_hours(value, unquote(type)), do: value * unquote(coef) / 3600
def to_days(value, unquote(type)), do: value * unquote(coef) / (3600 * 24)
def to_weeks(value, unquote(type)), do: value * unquote(coef) / (3600 * 24 * 7)
end
Enum.each [:to_usecs, :to_msecs, :to_secs, :to_mins, :to_hours, :to_days, :to_weeks], fn name ->
def unquote(name)({hours, minutes, seconds}, :hms), do: unquote(name)(hours * 3600 + minutes * 60 + seconds, :secs)
end
def from(value, :usecs) do
{ sec, micro } = mdivmod(value)
{ mega, sec } = mdivmod(sec)
{ mega, sec, micro }
end
def from(value, :msecs) do
#micro = value * 1000
{ sec, micro } = divmod(value, 1000)
{ mega, sec } = mdivmod(sec)
{ mega, sec, micro }
end
def from(value, :secs) do
# trunc ...
{ sec, micro } = mdivmod(value)
{ mega, sec } = mdivmod(sec)
{ mega, sec, micro }
end
def to_usecs({mega, sec, micro}), do: (mega * @million + sec) * @million + micro
def to_msecs({mega, sec, micro}), do: (mega * @million + sec) * 1000 + micro / 1000
def to_secs({mega, sec, micro}), do: mega * @million + sec + micro / @million
def to_mins(timestamp), do: to_secs(timestamp) / 60
def to_hours(timestamp), do: to_secs(timestamp) / 3600
def to_days(timestamp), do: to_secs(timestamp) / (3600 * 24)
def to_weeks(timestamp), do: to_secs(timestamp) / (3600 * 24 * 7)
def to_timestamp(value, :usecs) do
{ secs, microsecs } = mdivmod(value)
{ megasecs, secs } = mdivmod(secs)
{megasecs, secs, microsecs}
end
def to_timestamp(value, :msecs) do
{ secs, microsecs } = divmod(value, 1000)
{ megasecs, secs } = mdivmod(secs)
{megasecs, secs, microsecs}
end
def to_timestamp(value, :secs) do
secs = trunc(value)
microsecs = trunc((value - secs) * @million)
{ megasecs, secs } = mdivmod(secs)
{megasecs, secs, microsecs}
end
def to_timestamp(value, :mins), do: to_timestamp(value * 60, :secs)
def to_timestamp(value, :hours), do: to_timestamp(value * 3600, :secs)
def to_timestamp(value, :days), do: to_timestamp(value * 3600 * 24, :secs)
def to_timestamp(value, :weeks), do: to_timestamp(value * 3600 * 24 * 7, :secs)
def to_timestamp(value, :hms), do: to_timestamp(to_secs(value, :hms), :secs)
def add({mega1,sec1,micro1}, {mega2,sec2,micro2}) do
normalize { mega1+mega2, sec1+sec2, micro1+micro2 }
end
def sub({mega1,sec1,micro1}, {mega2,sec2,micro2}) do
normalize { mega1-mega2, sec1-sec2, micro1-micro2 }
end
def scale({mega, secs, micro}, coef) do
normalize { mega*coef, secs*coef, micro*coef }
end
def invert({mega, sec, micro}) do
{ -mega, -sec, -micro }
end
def abs(timestamp={mega, sec, micro}) do
cond do
mega != 0 -> value = mega
sec != 0 -> value = sec
true -> value = micro
end
if value < 0 do
invert(timestamp)
else
timestamp
end
end
@doc """
Return a timestamp representing a time lapse of length 0.
Time.convert(Time.zero, :sec)
#=> 0
Can be useful for operations on collections of timestamps. For instance,
Enum.reduce timestamps, Time.zero, Time.add(&1, &2)
"""
def zero, do: {0, 0, 0}
@doc """
Convert timestamp in the form { megasecs, seconds, microsecs } to the
specified time units.
Supported units: microseconds (:usec), milliseconds (:msec), seconds (:sec),
minutes (:min), hours (:hour), days (:day), or weeks (:week).
"""
def convert(timestamp, type \\ :timestamp)
def convert(timestamp, :timestamp), do: timestamp
def convert(timestamp, :usecs), do: to_secs(timestamp) * 1000000
def convert(timestamp, :msecs), do: to_secs(timestamp) * 1000
def convert(timestamp, :secs), do: to_secs(timestamp)
def convert(timestamp, :mins), do: to_secs(timestamp) / 60
def convert(timestamp, :hours), do: to_secs(timestamp) / 3600
def convert(timestamp, :days), do: to_secs(timestamp) / (3600 * 24)
def convert(timestamp, :weeks), do: to_secs(timestamp) / (3600 * 24 * 7)
@doc """
Return time interval since the first day of year 0 to Epoch.
"""
def epoch(type \\ :timestamp)
def epoch(:timestamp) do
seconds = :calendar.datetime_to_gregorian_seconds({ {1970,1,1}, {0,0,0} })
{ mega, sec } = mdivmod(seconds)
{ mega, sec, 0 }
end
def epoch(type) do
convert(epoch, type)
end
@doc """
Time interval since Epoch.
The argument is an atom indicating the type of time units to return (see
convert/2 for supported values).
When the argument is omitted, the return value's format is { megasecs, seconds, microsecs }.
"""
def now(type \\ :timestamp)
def now(:timestamp) do
:os.timestamp
end
def now(type) do
convert(now, type)
end
@doc """
Time interval between timestamp and now. If timestamp is after now in time, the
return value will be negative.
The second argument is an atom indicating the type of time units to return:
microseconds (:usec), milliseconds (:msec), seconds (:sec), minutes (:min),
or hours (:hour).
When the second argument is omitted, the return value's format is { megasecs,
seconds, microsecs }.
"""
def elapsed(timestamp, type \\ :timestamp)
def elapsed(timestamp, type) do
diff(now, timestamp, type)
end
@doc """
Time interval between two timestamps. If the first timestamp comes before the
second one in time, the return value will be negative.
The third argument is an atom indicating the type of time units to return:
microseconds (:usec), milliseconds (:msec), seconds (:sec), minutes (:min),
or hours (:hour).
When the third argument is omitted, the return value's format is { megasecs,
seconds, microsecs }.
"""
def diff(t1, t2, type \\ :timestamp)
def diff({mega1,secs1,micro1}, {mega2,secs2,micro2}, :timestamp) do
# TODO: normalize the result
{mega1 - mega2, secs1 - secs2, micro1 - micro2}
end
def diff(t1, t2, type) do
convert(diff(t1, t2), type)
end
def measure(fun) do
measure_result(:timer.tc(fun))
end
def measure(fun, args) do
measure_result(:timer.tc(fun, args))
end
def measure(module, fun, args) do
measure_result(:timer.tc(module, fun, args))
end
defp measure_result({micro, ret}) do
{ to_timestamp(micro, :usec), ret }
end
defp normalize({mega, sec, micro}) do
# TODO: check for negative values
if micro >= @million do
{ sec, micro } = mdivmod(sec, micro)
end
if sec >= @million do
{ mega, sec } = mdivmod(mega, sec)
end
{ mega, sec, micro }
end
defp divmod(a, b) do
{ div(a, b), rem(a, b) }
end
defp divmod(initial, a, b) do
{ initial + div(a, b), rem(a, b) }
end
defp mdivmod(a) do
divmod(a, 1_000_000)
end
defp mdivmod(initial, a) do
divmod(initial, a, 1_000_000)
end
end
|
lib/time/time.ex
| 0.50415 | 0.774669 |
time.ex
|
starcoder
|
defmodule X509.CSR do
@moduledoc """
Implements PKCS#10 Certificate Signing Requests (CSRs), formally known by
their ASN.1 type CertificationRequest.
For conversion to and from PEM or DER format, use the generic functions in
the `X509` module.
"""
import X509.ASN1
alias X509.RDNSequence
@typedoc """
`:CertificationRequest` record , as used in Erlang's `:public_key` module
"""
@opaque t :: X509.ASN1.record(:certification_request)
# CertificationRequest record version
@version :v1
@doc """
Returns a `:CertificationRequest` record for the given key pair and subject.
Supports RSA and EC private keys. The public key is extracted from the
private key and encoded, together with the subject, in the CSR. The CSR is
then signed with the private key, using a configurable hash algorithm.
The default hash algorithm is `:sha256`. An alternative algorithm can be
specified using the `:hash` option. Possible values include `:sha224`,
`:sha256`, `:sha384`, `:sha512`.
Older hash algorithms, supported for compatibility with older software only,
include `:md5` (RSA only) and `:sha`. The use of these algorithms is
discouraged.
"""
@spec new(X509.PrivateKey.t(), String.t() | X509.RDNSequence.t(), Keyword.t()) :: t()
def new(private_key, subject, opts \\ []) do
hash = Keyword.get(opts, :hash, :sha256)
algorithm = sign_type(hash, private_key)
# Convert subject to RDNSequence, if necessary
subject_rdn_sequence =
case subject do
{:rdnSequence, _} -> subject
rdn -> RDNSequence.new(rdn)
end
# CertificationRequestInfo to be signed
info =
certification_request_info(
version: @version,
subject: subject_rdn_sequence,
subjectPKInfo:
private_key
|> X509.PublicKey.derive()
|> X509.PublicKey.wrap(:CertificationRequestInfo_subjectPKInfo),
attributes: []
)
info_der = :public_key.der_encode(:CertificationRequestInfo, info)
signature = :public_key.sign(info_der, hash, private_key)
certification_request(
certificationRequestInfo: info,
signatureAlgorithm: algorithm,
signature: signature
)
end
@doc """
Extracts the public key from the CSR.
"""
@spec public_key(t()) :: X509.PublicKey.t()
def public_key(certification_request(certificationRequestInfo: info)) do
info
|> certification_request_info(:subjectPKInfo)
|> X509.PublicKey.unwrap()
end
@doc """
Returns the Subject field of the CSR.
"""
@spec subject(t()) :: X509.RDNSequence.t()
def subject(certification_request(certificationRequestInfo: info)) do
info
|> certification_request_info(:subject)
end
@doc """
Verifies whether a CSR has a valid signature.
"""
@spec valid?(t()) :: boolean()
def valid?(
certification_request(
certificationRequestInfo: info,
signatureAlgorithm: algorithm,
signature: signature
) = csr
) do
info_der = :public_key.der_encode(:CertificationRequestInfo, info)
{digest_type, _} =
algorithm
|> certification_request_signature_algorithm(:algorithm)
|> :public_key.pkix_sign_types()
:public_key.verify(info_der, digest_type, signature, public_key(csr))
end
@doc """
Converts a CSR to DER (binary) format.
"""
# @doc since: "0.3.0"
@spec to_der(t()) :: binary()
def to_der(certification_request() = csr) do
:public_key.der_encode(:CertificationRequest, csr)
end
@doc """
Converts a CSR to PEM format.
"""
# @doc since: "0.3.0"
@spec to_pem(t()) :: String.t()
def to_pem(certification_request() = csr) do
:public_key.pem_entry_encode(:CertificationRequest, csr)
|> List.wrap()
|> :public_key.pem_encode()
end
@doc """
Attempts to parse a CSR in DER (binary) format. Raises in case of failure.
"""
# @doc since: "0.3.0"
@spec from_der!(binary()) :: t() | no_return()
def from_der!(der) do
:public_key.der_decode(:CertificationRequest, der)
end
@doc """
Parses a CSR in DER (binary) format.
Returns an `:ok` tuple in case of success, or an `:error` tuple in case of
failure. Possible error reasons are:
* `:malformed` - the data could not be decoded as a CSR
"""
# @doc since: "0.3.0"
@spec from_der(binary()) :: {:ok, t()} | {:error, :malformed}
def from_der(der) do
{:ok, from_der!(der)}
rescue
MatchError -> {:error, :malformed}
end
@doc """
Attempts to parse a CSR in PEM format. Raises in case of failure.
Processes the first PEM entry of type CERTIFICATE REQUEST found in the input.
"""
# @doc since: "0.3.0"
@spec from_pem!(String.t()) :: t() | no_return()
def from_pem!(pem) do
{:ok, csr} = from_pem(pem)
csr
end
@doc """
Parses a CSR in PEM format.
Processes the first PEM entry of type CERTIFICATE REQUEST found in the input.
Returns an `:ok` tuple in case of success, or an `:error` tuple in case of
failure. Possible error reasons are:
* `:not_found` - no PEM entry of type CERTIFICATE REQUEST was found
* `:malformed` - the entry could not be decoded as a CSR
"""
# @doc since: "0.3.0"
@spec from_pem(String.t()) :: {:ok, t()} | {:error, :malformed | :not_found}
def from_pem(pem) do
pem
|> :public_key.pem_decode()
|> Enum.find(&match?({:CertificationRequest, _, :not_encrypted}, &1))
|> case do
nil -> {:error, :not_found}
{:CertificationRequest, der, :not_encrypted} -> from_der(der)
end
end
# Returns a :CertificationRequest_signatureAlgorithm record for the given
# public key type and hash algorithm; this is essentially the reverse
# of `:public_key.pkix_sign_types/1`
defp sign_type(hash, rsa_private_key()) do
sign_type(hash, :rsa)
end
defp sign_type(hash, ec_private_key()) do
sign_type(hash, :ecdsa)
end
defp sign_type(:md5, :rsa) do
certification_request_signature_algorithm(
algorithm: oid(:md5WithRSAEncryption),
parameters: null()
)
end
defp sign_type(:sha, :rsa) do
certification_request_signature_algorithm(
algorithm: oid(:sha1WithRSAEncryption),
parameters: null()
)
end
defp sign_type(:sha224, :rsa) do
certification_request_signature_algorithm(
algorithm: oid(:sha224WithRSAEncryption),
parameters: null()
)
end
defp sign_type(:sha256, :rsa) do
certification_request_signature_algorithm(
algorithm: oid(:sha256WithRSAEncryption),
parameters: null()
)
end
defp sign_type(:sha384, :rsa) do
certification_request_signature_algorithm(
algorithm: oid(:sha384WithRSAEncryption),
parameters: null()
)
end
defp sign_type(:sha512, :rsa) do
certification_request_signature_algorithm(
algorithm: oid(:sha512WithRSAEncryption),
parameters: null()
)
end
defp sign_type(hash, :rsa) do
raise ArgumentError, "Unsupported hashing algorithm for RSA signing: #{inspect(hash)}"
end
defp sign_type(:sha, :ecdsa) do
certification_request_signature_algorithm(algorithm: oid(:"ecdsa-with-SHA1"))
end
defp sign_type(:sha224, :ecdsa) do
certification_request_signature_algorithm(algorithm: oid(:"ecdsa-with-SHA224"))
end
defp sign_type(:sha256, :ecdsa) do
certification_request_signature_algorithm(algorithm: oid(:"ecdsa-with-SHA256"))
end
defp sign_type(:sha384, :ecdsa) do
certification_request_signature_algorithm(algorithm: oid(:"ecdsa-with-SHA384"))
end
defp sign_type(:sha512, :ecdsa) do
certification_request_signature_algorithm(algorithm: oid(:"ecdsa-with-SHA512"))
end
defp sign_type(hash, :ecdsa) do
raise ArgumentError, "Unsupported hashing algorithm for ECDSA signing: #{inspect(hash)}"
end
end
|
lib/x509/csr.ex
| 0.891941 | 0.523968 |
csr.ex
|
starcoder
|
defmodule Axon do
@moduledoc """
A high-level interface for creating neural network models.
Axon is built entirely on top of Nx numerical definitions,
so every neural network can be JIT or AOT compiled using
any Nx compiler, or even transformed into high-level neural
network formats like TensorFlow Lite and ONNX.
All Axon models start with an input layer, specifying the
expected input shape of the training data:
input = Axon.input({nil, 784})
Notice you can specify the batch dimension as `nil`. You can
then compose inputs with other layers:
model =
input
|> Axon.dense(128, activation: :relu)
|> Axon.batch_norm()
|> Axon.dropout(rate: 0.8)
|> Axon.dense(64)
|> Axon.tanh()
|> Axon.dense(10)
|> Axon.activation(:softmax)
You can inspect the model for a nice summary:
IO.inspect(model)
-----------------------------------------------------
Model
=====================================================
Layer Shape Parameters
=====================================================
input_1 (input) {nil, 784} 0
dense_2 (dense) {nil, 128} 100480
relu_3 (relu) {nil, 128} 0
batch_norm_4 (batch_norm) {nil, 128} 256
dropout_5 (dropout) {nil, 128} 0
dense_6 (dense) {nil, 64} 8256
tanh_7 (tanh) {nil, 64} 0
dense_8 (dense) {nil, 10} 650
softmax_9 (softmax) {nil, 10} 0
-----------------------------------------------------
Under the hood, Axon models are represented as Elixir structs. You
can initialize and apply models using the macros `Axon.init/2` and
`Axon.predict/4`:
params = Axon.init(model, compiler: EXLA)
Axon.predict(model, params, inputs, compiler: EXLA)
Both `Axon.init/2` and `Axon.predict/4` can be used from within
Nx defn or outside.
Combining the Axon model creation API with the optimization and training
APIs, you can create and train neural networks with ease:
model =
Axon.input({nil, 784})
|> Axon.dense(128, activation: :relu)
|> Axon.layer_norm()
|> Axon.dropout()
|> Axon.dense(10, activation: :softmax)
IO.inspect model
final_params =
model
|> Axon.Training.step(:categorical_cross_entropy, Axon.Optimizers.adamw(0.005))
|> Axon.Training.train(train_images, train_labels, epochs: 10, compiler: EXLA)
"""
alias __MODULE__, as: Axon
@type t :: %__MODULE__{}
@doc false
defstruct [:id, :name, :output_shape, :parent, :op, :params, :opts]
@doc """
Custom Axon layer with given parent.
Applies `op` on `parent` with parameters `parameters`. `parameters`
is a map of trainable `parameters` created using `Axon.param`. Assumes
`op` is a function of the following form:
op = fn input, params -> ... end
If `opts` is not empty, it is treated as input options to the layer
method:
op = fn input, params, opts -> ... end
Parameters are accessed using the same key referenced in the `parameters`
map passed to `Axon.layer`:
w1 = Axon.param("weight", {})
b1 = Axon.param("bias", {})
op = fn input, params -> params["weight"] * input + params["bias"] end
Axon.layer(parent, op, {}, %{"weight" => w1, "bias" => b1})
"""
@doc type: :special
def layer(parent, op, output_shape, parameters, name \\ nil, opts \\ [])
when is_atom(op) or (is_function(op) and is_map(parameters)) do
op_name = if is_atom(op), do: op, else: :layer
{id, name} = unique_identifiers(op_name, name)
parameters =
parameters
|> Enum.map(fn {k, %{name: p_name} = param} ->
{k, %{param | name: name <> "_" <> p_name}}
end)
|> Map.new()
%Axon{
id: id,
name: name,
output_shape: output_shape,
parent: parent,
op: op,
params: parameters,
opts: opts
}
end
@doc """
Trainable Axon parameter used to create custom layers.
Parameters are specified in usages of `Axon.layer` and will
be automatically initialized and used in subsequent applications
of Axon models.
Parameters *must* be specified in order of their usage.
## Options
* `initializer` - parameter initializer. Defaults to `:glorot_uniform`.
* `:regularizer` - parameter regularizer. Defaults to `:none`.
"""
def param(name, shape, opts \\ []) do
initializer = opts[:initializer] || :glorot_uniform
validate_initializer!(initializer)
regularizer = opts[:regularizer] || :none
validate_regularizer!(regularizer)
id = System.unique_integer([:positive, :monotonic])
%Axon.Parameter{
id: id,
name: name,
shape: shape,
initializer: initializer,
regularizer: regularizer
}
end
@doc """
Adds an input layer to the network.
Input layers specify a model's inputs. Input layers are
always the root layers of the neural network.
## Options
* `name` - Layer name.
"""
@doc type: :special
def input(input_shape, opts \\ []) do
output_shape = Axon.Shape.input(input_shape)
layer(nil, :input, output_shape, %{}, opts[:name], opts)
end
@doc """
Adds a dense layer to the network.
The dense layer implements:
output = activation(dot(input, kernel) + bias)
where `activation` is given by the `:activation` option and both
`kernel` and `bias` are layer parameters. `units` specifies the
number of output units.
Compiles to `Axon.Layers.dense/3`.
## Options
* `name` - Layer name.
* `kernel_initializer` - Initializer for `kernel` weights.
* `bias_initializer` - Initializer for `bias` weights.
* `activation` - Element-wise activation function.
"""
@doc type: :linear
def dense(%Axon{output_shape: parent_shape} = x, units, opts \\ [])
when is_integer(units) and units > 0 do
activation = opts[:activation]
kernel_shape = Axon.Shape.dense_kernel(parent_shape, units)
bias_shape = Axon.Shape.dense_bias(parent_shape, units)
output_shape = Axon.Shape.dense(parent_shape, units)
kernel_initializer = opts[:kernel_initializer]
kernel_regularizer = opts[:kernel_regularizer]
kernel =
param("kernel", kernel_shape,
initializer: kernel_initializer,
regularizer: kernel_regularizer
)
bias_initializer = opts[:bias_initializer] || :zeros
bias_regularizer = opts[:bias_regularizer]
bias = param("bias", bias_shape, initializer: bias_initializer, regularizer: bias_regularizer)
node = layer(x, :dense, output_shape, %{"kernel" => kernel, "bias" => bias}, opts[:name])
if activation do
node
|> activation(activation)
else
node
end
end
@doc """
Adds a convolution layer to the network.
The convolution layer implements a general dimensional
convolutional layer - which convolves a kernel over the input
to produce an output.
Compiles to `Axon.Layers.conv/4`.
## Options
* `name` - Layer name.
* `kernel_initializer` - Initializer for `kernel` weights.
* `bias_initializer` - Initializer for `bias` weights.
* `activation` - Element-wise activation function.
* `kernel_size` - Size of the kernel spatial dimensions.
* `strides` - Stride during convolution.
* `padding` - Padding to the spatial dimensions of the input.
* `input_dilation` - Dilation to apply to input.
* `kernel_dilation` - Dilation to apply to kernel.
"""
@doc type: :convolution
def conv(%Axon{output_shape: parent_shape} = x, units, opts \\ [])
when is_integer(units) and units > 0 do
activation = opts[:activation]
kernel_size = opts[:kernel_size] || 1
strides = opts[:strides] || 1
padding = opts[:padding] || :valid
input_dilation = opts[:input_dilation] || 1
kernel_dilation = opts[:kernel_dilation] || 1
inner_rank = Nx.rank(parent_shape) - 2
kernel_size = tuple_or_duplicate(:kernel_size, kernel_size, inner_rank)
strides = list_or_duplicate(:strides, strides, inner_rank)
input_dilation = list_or_duplicate(:input_dilation, input_dilation, inner_rank)
kernel_dilation = list_or_duplicate(:kernel_dilation, kernel_dilation, inner_rank)
kernel_shape = Axon.Shape.conv_kernel(parent_shape, units, kernel_size)
bias_shape = Axon.Shape.conv_bias(parent_shape, units, kernel_size)
output_shape =
Axon.Shape.conv(
parent_shape,
kernel_shape,
strides,
padding,
input_dilation,
kernel_dilation
)
kernel_initializer = opts[:kernel_initializer]
kernel_regularizer = opts[:kernel_regularizer]
kernel =
param("kernel", kernel_shape,
initializer: kernel_initializer,
regularizer: kernel_regularizer
)
bias_initializer = opts[:bias_initializer] || :zeros
bias_regularizer = opts[:bias_regularizer]
bias = param("bias", bias_shape, initializer: bias_initializer, regularizer: bias_regularizer)
node =
layer(x, :conv, output_shape, %{"kernel" => kernel, "bias" => bias}, opts[:name],
strides: strides,
padding: padding,
input_dilation: input_dilation,
kernel_dilation: kernel_dilation
)
if activation do
node
|> activation(activation)
else
node
end
end
@doc """
Adds a transposed convolution layer to the network.
The tranposed convolution layer is sometimes referred to as a
fractionally strided convolution or (incorrectly) as a deconvolution.
Compiles to `Axon.Layers.conv_transpose/4`.
## Options
* `name` - Layer name.
* `kernel_initializer` - Initializer for `kernel` weights.
* `bias_initializer` - Initializer for `bias` weights.
* `activation` - Element-wise activation function.
* `kernel_size` - Size of the kernel spatial dimensions.
* `strides` - Stride during convolution.
* `padding` - Padding to the spatial dimensions of the input.
* `kernel_dilation` - Dilation to apply to kernel.
"""
@doc type: :convolution
def conv_transpose(%Axon{output_shape: parent_shape} = x, units, opts \\ []) do
activation = opts[:activation]
kernel_size = opts[:kernel_size] || 1
strides = opts[:strides] || 1
padding = opts[:padding] || :valid
kernel_dilation = opts[:kernel_dilation] || 1
inner_rank = Nx.rank(parent_shape) - 2
kernel_size = tuple_or_duplicate(:kernel_size, kernel_size, inner_rank)
strides = list_or_duplicate(:strides, strides, inner_rank)
kernel_dilation = list_or_duplicate(:kernel_dilation, kernel_dilation, inner_rank)
kernel_shape = Axon.Shape.conv_kernel(parent_shape, units, kernel_size)
bias_shape = Axon.Shape.conv_bias(parent_shape, units, kernel_size)
kernel_initializer = opts[:kernel_initializer]
kernel_regularizer = opts[:kernel_regularizer]
kernel =
param("kernel", kernel_shape,
initializer: kernel_initializer,
regularizer: kernel_regularizer
)
bias_initializer = opts[:bias_initializer] || :zeros
bias_regularizer = opts[:bias_regularizer]
bias = param("bias", bias_shape, initializer: bias_initializer, regularizer: bias_regularizer)
output_shape =
Axon.Shape.conv_transpose(
parent_shape,
kernel_shape,
strides,
padding,
kernel_dilation
)
node =
layer(x, :conv_transpose, output_shape, %{"kernel" => kernel, "bias" => bias}, opts[:name],
strides: strides,
padding: padding,
kernel_dilation: kernel_dilation
)
if activation do
node
|> activation(activation)
else
node
end
end
@doc """
Adds a depthwise convolution layer to the network.
The depthwise convolution layer implements a general
dimensional depthwise convolution - which is a convolution
where the feature group size is equal to the number of
input channels.
Channel multiplier grows the input channels by the given
factor. An input factor of 1 means the output channels
are the same as the input channels.
Compiles to `Axon.Layers.depthwise_conv`/4.
## Options
* `name` - Layer name.
* `kernel_initializer` - Initializer for `kernel` weights.
* `bias_initializer` - Initializer for `bias` weights.
* `activation` - Element-wise activation function.
* `kernel_size` - Size of the kernel spatial dimensions.
* `strides` - Stride during convolution.
* `padding` - Padding to the spatial dimensions of the input.
* `input_dilation` - Dilation to apply to input.
* `kernel_dilation` - Dilation to apply to kernel.
"""
@doc type: :convolution
def depthwise_conv(%Axon{output_shape: parent_shape} = x, channel_multiplier, opts \\ [])
when is_integer(channel_multiplier) and channel_multiplier >= 1 do
activation = opts[:activation]
kernel_size = opts[:kernel_size] || 1
strides = opts[:strides] || 1
padding = opts[:padding] || :valid
input_dilation = opts[:input_dilation] || 1
kernel_dilation = opts[:kernel_dilation] || 1
inner_rank = Nx.rank(parent_shape) - 2
kernel_size = tuple_or_duplicate(:kernel_size, kernel_size, inner_rank)
strides = list_or_duplicate(:strides, strides, inner_rank)
input_dilation = list_or_duplicate(:input_dilation, input_dilation, inner_rank)
kernel_dilation = list_or_duplicate(:kernel_dilation, kernel_dilation, inner_rank)
kernel_shape = Axon.Shape.depthwise_conv_kernel(parent_shape, channel_multiplier, kernel_size)
bias_shape = Axon.Shape.depthwise_conv_bias(parent_shape, channel_multiplier, kernel_size)
output_shape =
Axon.Shape.depthwise_conv(
parent_shape,
kernel_shape,
strides,
padding,
input_dilation,
kernel_dilation
)
kernel_initializer = opts[:kernel_initializer]
kernel_regularizer = opts[:kernel_regularizer]
kernel =
param("weight", kernel_shape,
initializer: kernel_initializer,
regularizer: kernel_regularizer
)
bias_initializer = opts[:bias_initializer] || :zeros
bias_regularizer = opts[:bias_regularizer]
bias = param("bias", bias_shape, initializer: bias_initializer, regularizer: bias_regularizer)
node =
layer(x, :depthwise_conv, output_shape, %{"kernel" => kernel, "bias" => bias}, opts[:name],
strides: strides,
padding: padding,
input_dilation: input_dilation,
kernel_dilation: kernel_dilation
)
if activation do
node
|> activation(activation)
else
node
end
end
@doc """
Adds a depthwise separable 2-dimensional convolution to the
network.
Depthwise separable convolutions break the kernel into kernels
for each dimension of the input and perform a depthwise conv
over the input with each kernel.
Compiles to `Axon.Layers.separable_conv2d/6`.
## Options
* `name` - Layer name.
* `kernel_initializer` - Initializer for `kernel` weights.
* `bias_initializer` - Initializer for `bias` weights.
* `activation` - Element-wise activation function.
* `kernel_size` - Size of the kernel spatial dimensions.
* `strides` - Stride during convolution.
* `padding` - Padding to the spatial dimensions of the input.
* `input_dilation` - Dilation to apply to input.
* `kernel_dilation` - Dilation to apply to kernel.
"""
@doc type: :convolution
def separable_conv2d(%Axon{output_shape: parent_shape} = x, channel_multiplier, opts \\ [])
when is_integer(channel_multiplier) and channel_multiplier >= 1 do
activation = opts[:activation]
kernel_size = opts[:kernel_size] || 1
strides = opts[:strides] || 1
padding = opts[:padding] || :valid
input_dilation = opts[:input_dilation] || 1
kernel_dilation = opts[:kernel_dilation] || 1
inner_rank = Nx.rank(parent_shape) - 2
kernel_size = tuple_or_duplicate(:kernel_size, kernel_size, inner_rank)
strides = list_or_duplicate(:strides, strides, inner_rank)
input_dilation = list_or_duplicate(:input_dilation, input_dilation, inner_rank)
kernel_dilation = list_or_duplicate(:kernel_dilation, kernel_dilation, inner_rank)
k1_shape =
Axon.Shape.separable_conv2d_kernel(parent_shape, channel_multiplier, kernel_size, 1)
k2_shape =
Axon.Shape.separable_conv2d_kernel(parent_shape, channel_multiplier, kernel_size, 2)
b1_shape = Axon.Shape.separable_conv2d_bias(parent_shape, channel_multiplier, kernel_size)
b2_shape = Axon.Shape.separable_conv2d_bias(parent_shape, channel_multiplier, kernel_size)
output_shape =
Axon.Shape.depthwise_conv(
parent_shape,
Axon.Shape.depthwise_conv_kernel(parent_shape, channel_multiplier, kernel_size),
strides,
padding,
input_dilation,
kernel_dilation
)
kernel_initializer = opts[:kernel_initializer]
kernel_regularizer = opts[:kernel_regularizer]
k1 =
param("kernel_1", k1_shape, initializer: kernel_initializer, regularizer: kernel_regularizer)
k2 =
param("kernel_2", k2_shape, initializer: kernel_initializer, regularizer: kernel_regularizer)
bias_initializer = opts[:bias_initializer] || :zeros
bias_regularizer = opts[:bias_regularizer]
b1 = param("bias_1", b1_shape, initializer: bias_initializer, regularizer: bias_regularizer)
b2 = param("bias_2", b2_shape, initializer: bias_initializer, regularizer: bias_regularizer)
node =
layer(
x,
:separable_conv2d,
output_shape,
%{"k1" => k1, "b1" => b1, "k2" => k2, "b2" => b2},
opts[:name],
strides: strides,
padding: padding,
input_dilation: input_dilation,
kernel_dilation: kernel_dilation
)
if activation do
node
|> activation(activation)
else
node
end
end
@doc """
Adds a depthwise separable 3-dimensional convolution to the
network.
Depthwise separable convolutions break the kernel into kernels
for each dimension of the input and perform a depthwise conv
over the input with each kernel.
Compiles to `Axon.Layers.separable_conv3d/8`.
## Options
* `name` - Layer name.
* `kernel_initializer` - Initializer for `kernel` weights.
* `bias_initializer` - Initializer for `bias` weights.
* `activation` - Element-wise activation function.
* `kernel_size` - Size of the kernel spatial dimensions.
* `strides` - Stride during convolution.
* `padding` - Padding to the spatial dimensions of the input.
* `input_dilation` - Dilation to apply to input.
* `kernel_dilation` - Dilation to apply to kernel.
"""
@doc type: :convolution
def separable_conv3d(%Axon{output_shape: parent_shape} = x, channel_multiplier, opts \\ [])
when is_integer(channel_multiplier) and channel_multiplier >= 1 do
activation = opts[:activation]
kernel_size = opts[:kernel_size] || 1
strides = opts[:strides] || 1
padding = opts[:padding] || :valid
input_dilation = opts[:input_dilation] || 1
kernel_dilation = opts[:kernel_dilation] || 1
inner_rank = Nx.rank(parent_shape) - 2
kernel_size = tuple_or_duplicate(:kernel_size, kernel_size, inner_rank)
strides = list_or_duplicate(:strides, strides, inner_rank)
input_dilation = list_or_duplicate(:input_dilation, input_dilation, inner_rank)
kernel_dilation = list_or_duplicate(:kernel_dilation, kernel_dilation, inner_rank)
k1_shape =
Axon.Shape.separable_conv3d_kernel(parent_shape, channel_multiplier, kernel_size, 1)
k2_shape =
Axon.Shape.separable_conv3d_kernel(parent_shape, channel_multiplier, kernel_size, 2)
k3_shape =
Axon.Shape.separable_conv3d_kernel(parent_shape, channel_multiplier, kernel_size, 3)
b1_shape = Axon.Shape.separable_conv3d_bias(parent_shape, channel_multiplier, kernel_size)
b2_shape = Axon.Shape.separable_conv3d_bias(parent_shape, channel_multiplier, kernel_size)
b3_shape = Axon.Shape.separable_conv3d_bias(parent_shape, channel_multiplier, kernel_size)
output_shape =
Axon.Shape.depthwise_conv(
parent_shape,
Axon.Shape.depthwise_conv_kernel(parent_shape, channel_multiplier, kernel_size),
strides,
padding,
input_dilation,
kernel_dilation
)
kernel_initializer = opts[:kernel_initializer]
kernel_regularizer = opts[:kernel_regularizer]
k1 =
param("kernel_1", k1_shape, initializer: kernel_initializer, regularizer: kernel_regularizer)
k2 =
param("kernel_2", k2_shape, initializer: kernel_initializer, regularizer: kernel_regularizer)
k3 =
param("kernel_3", k3_shape, initializer: kernel_initializer, regularizer: kernel_regularizer)
bias_initializer = opts[:bias_initializer] || :zeros
bias_regularizer = opts[:bias_regularizer]
b1 = param("bias_1", b1_shape, initializer: bias_initializer, regularizer: bias_regularizer)
b2 = param("bias_2", b2_shape, initializer: bias_initializer, regularizer: bias_regularizer)
b3 = param("bias_3", b3_shape, initializer: bias_initializer, regularizer: bias_regularizer)
node =
layer(
x,
:separable_conv3d,
output_shape,
%{"k1" => k1, "b1" => b1, "k2" => k2, "b2" => b2, "k3" => k3, "b3" => b3},
opts[:name],
strides: strides,
padding: padding,
input_dilation: input_dilation,
kernel_dilation: kernel_dilation
)
if activation do
node
|> activation(activation)
else
node
end
end
@activation_layers [
{:celu, "Continuously-differentiable exponential linear unit", "a"},
{:elu, "Exponential linear unit", "an"},
{:exp, "Exponential", "an"},
{:gelu, "Gaussian error linear unit", "a"},
{:hard_sigmoid, "Hard sigmoid", "a"},
{:hard_silu, "Hard sigmoid weighted linear unit", "a"},
{:hard_tanh, "Hard hyperbolic tangent", "a"},
{:leaky_relu, "Leaky rectified linear unit", "a"},
{:linear, "Linear", "a"},
{:log_sigmoid, "Log-sigmoid", "a"},
{:mish, "Mish", "a"},
{:relu, "Rectified linear unit", "a"},
{:relu6, "Rectified linear unit 6", "a"},
{:sigmoid, "Sigmoid", "a"},
{:silu, "Sigmoid weighted linear unit", "a"},
{:selu, "Scaled exponential linear unit", "a"},
{:softmax, "Softmax", "a"},
{:softplus, "Softplus", "a"},
{:softsign, "Softsign", "a"},
{:tanh, "Hyperbolic tangent", "a"}
]
@doc """
Adds an activation layer to the network.
Activation layers are element-wise functions typically called
after the output of another layer.
## Options
- `name` - Layer name.
"""
@doc type: :activation
def activation(x, activation, opts \\ [])
def activation(%Axon{output_shape: shape} = x, activation, opts) when is_atom(activation) do
layer(x, activation, shape, %{}, opts[:name], opts)
end
def activation(%Axon{output_shape: shape} = x, activation, opts)
when is_function(activation, 1) do
layer(x, activation, shape, %{}, opts[:name], opts)
end
## Activation
for {activation, name, a_or_an} <- @activation_layers do
@doc """
Adds #{a_or_an} #{name} activation layer to the network.
See `Axon.Activations.#{Atom.to_string(activation)}/1` for more details.
## Options
- `name` - Layer name.
"""
@doc type: :activation
def unquote(activation)(%Axon{} = x, opts \\ []) do
activation(x, unquote(activation), opts)
end
end
## Dropout
@dropout_layers [
{:dropout, "Dropout", "a"},
{:feature_alpha_dropout, "Feature alpha dropout", "a"},
{:spatial_dropout, "Spatial dropout", "a"},
{:alpha_dropout, "Alpha dropout", "an"}
]
for {dropout, name, a_or_an} <- @dropout_layers do
@doc """
Adds #{a_or_an} #{name} layer to the network.
See `Axon.Layers.#{Atom.to_string(dropout)}/2` for more details.
## Options
* `:name` - Layer name.
* `:rate` - Dropout rate.
"""
@doc type: :dropout
def unquote(dropout)(%Axon{} = x, opts \\ []) do
dropout(x, unquote(dropout), opts)
end
end
defp dropout(%Axon{output_shape: parent_shape} = x, dropout, opts) do
rate = opts[:rate] || 0.5
layer(x, dropout, parent_shape, %{}, opts[:name], rate: rate)
end
## Pooling
@pooling_layers [
{:max_pool, "Max pooling", "a"},
{:avg_pool, "Average pooling", "an"},
{:lp_pool, "Power average pooling", "a"}
]
for {pool, name, a_or_an} <- @pooling_layers do
@doc """
Adds #{a_or_an} #{name} layer to the network.
See `Axon.Layers.#{Atom.to_string(pool)}/2` for more details.
## Options
* `:name` - Layer name.
* `:kernel_size` - Pooling kernel size.
* `:strides` - Pooling strides.
"""
@doc type: :pooling
def unquote(pool)(%Axon{} = x, opts \\ []) do
pool(x, unquote(pool), opts)
end
end
defp pool(%Axon{output_shape: parent_shape} = x, pool, opts) do
kernel_size = opts[:kernel_size] || 1
strides = opts[:strides] || 1
padding = opts[:padding] || :valid
inner_rank = Nx.rank(parent_shape) - 2
kernel_size = tuple_or_duplicate(:kernel_size, kernel_size, inner_rank)
strides = list_or_duplicate(:strides, strides, inner_rank)
output_shape = Axon.Shape.pool(parent_shape, kernel_size, strides, padding)
layer(x, pool, output_shape, %{}, opts[:name],
kernel_size: kernel_size,
strides: strides,
padding: padding
)
end
## Adaptive Pooling
@adaptive_pooling_layers [
{:adaptive_avg_pool, "Adaptive average pooling", "an"},
{:adaptive_max_pool, "Adaptive max pooling", "an"}
]
for {pool, name, a_or_an} <- @adaptive_pooling_layers do
@doc """
Adds #{a_or_an} #{name} layer to the network.
See `Axon.Layers.#{Atom.to_string(pool)}/2` for more details.
## Options
* `:name` - Layer name.
* `:output_size` - Layer output size.
"""
@doc type: :pooling
def unquote(pool)(%Axon{} = x, opts \\ []) do
adaptative_pool(x, unquote(pool), opts)
end
end
defp adaptative_pool(%Axon{output_shape: parent_shape} = x, pool, opts) do
inner_rank = Nx.rank(parent_shape) - 2
output_size = tuple_or_duplicate(:output_size, opts[:output_size], inner_rank)
output_shape = Axon.Shape.adaptive_pool(parent_shape, output_size)
layer(x, pool, output_shape, %{}, opts[:name], output_size: output_size)
end
## Normalization
@normalization_layers [
{:batch_norm, "Batch normalization", "a"},
{:layer_norm, "Layer normalization", "a"},
{:instance_norm, "Instance normalization", "an"}
]
for {norm, name, a_or_an} <- @normalization_layers do
@doc """
Adds #{a_or_an} #{name} layer to the network.
See `Axon.Layers.#{Atom.to_string(norm)}/4` for more details.
## Options
* `:name` - Layer name.
* `:gamma_initializer` - Gamma parameter initializer.
* `:beta_initializer` - Beta parameter initializer.
* `:channel_index` - Input feature index used for calculating
mean and variance.
* `:epsilon` - Numerical stability term.
"""
@doc type: :normalization
def unquote(norm)(%Axon{} = x, opts \\ []) do
norm(x, unquote(norm), opts)
end
end
defp norm(%Axon{output_shape: shape} = x, norm, opts) do
channel_index = opts[:channel_index] || 1
epsilon = opts[:epsilon] || 1.0e-5
gamma_shape = Axon.Shape.norm_param(shape, channel_index)
beta_shape = Axon.Shape.norm_param(shape, channel_index)
gamma_initializer = opts[:gamma_initializer]
gamma_regularizer = opts[:gamma_regularizer]
gamma =
param("gamma", gamma_shape, initializer: gamma_initializer, regularizer: gamma_regularizer)
beta_initializer = opts[:beta_initializer] || :zeros
beta_regularizer = opts[:beta_regularizer]
beta = param("beta", beta_shape, initializer: beta_initializer, regularizer: beta_regularizer)
layer(x, norm, shape, %{"gamma" => gamma, "beta" => beta}, opts[:name],
epsilon: epsilon,
channel_index: channel_index
)
end
@doc """
Adds a group normalization layer to the network.
See `Axon.Layers.group_norm/4` for more details.
## Options
* `:name` - Layer name.
* `:gamma_initializer` - Gamma parameter initializer.
* `:beta_initializer` - Beta parameter initializer.
* `:channel_index` - Input feature index used for calculating
mean and variance.
* `:epsilon` - Numerical stability term.
"""
@doc type: :normalization
def group_norm(%Axon{output_shape: shape} = x, group_size, opts \\ [])
when is_integer(group_size) and group_size >= 1 do
channel_index = opts[:channel_index] || 1
epsilon = opts[:epsilon] || 1.0e-5
gamma_shape = Axon.Shape.norm_param(shape, channel_index)
beta_shape = Axon.Shape.norm_param(shape, channel_index)
gamma_initializer = opts[:gamma_initializer]
gamma_regularizer = opts[:gamma_regularizer]
gamma =
param("gamma", gamma_shape, initializer: gamma_initializer, regularizer: gamma_regularizer)
beta_initializer = opts[:beta_initializer] || :zeros
beta_regularizer = opts[:beta_regularizer]
beta = param("beta", beta_shape, initializer: beta_initializer, regularizer: beta_regularizer)
layer(x, :group_norm, shape, %{"gamma" => gamma, "beta" => beta}, opts[:name],
epsilon: epsilon,
channel_index: channel_index,
group_size: group_size
)
end
@doc """
Applies the given `Nx` expression to the input.
## Options
* `name` - Layer name.
"""
@doc type: :special
def nx(%Axon{output_shape: shape} = x, fun, opts \\ []) when is_function(fun, 1) do
# Some shape rules will not like nil batch shape
batch_size = elem(shape, 0)
shape = Tuple.delete_at(shape, 0)
param = Nx.Defn.Expr.parameter(:nx, {:f, 32}, shape, 0)
expr = Nx.Defn.jit(fun, [param], compiler: Axon.Defn)
output_shape = Tuple.insert_at(expr.shape, 0, batch_size)
layer(x, fun, output_shape, %{}, opts[:name])
end
@doc """
Adds a flatten layer to the network.
This layer will flatten all but the batch dimensions
of the input into a single layer. Typically called to flatten
the output of a convolution for use with a dense layer.
## Options
* `:name` - Layer name.
"""
@doc type: :shape
def flatten(%Axon{output_shape: shape} = x, opts \\ []) do
output_shape = Axon.Shape.flatten(shape)
layer(x, :flatten, output_shape, %{}, opts[:name])
end
@doc """
Adds a reshape layer to the network.
This layer will reshape non-batch dimensions of the input.
New shape must be compatible with old shape.
## Options
* `:name` - Layer name.
"""
@doc type: :shape
def reshape(%Axon{output_shape: shape} = x, new_shape, opts \\ []) do
output_shape = Axon.Shape.reshape(shape, new_shape)
layer(x, :reshape, output_shape, %{}, opts[:name])
end
@doc """
Adds a transpose layer to the network.
This layer will transpose non-batch dimensions of the input.
## Options
* `:name` - Layer name.
"""
@doc type: :shape
def transpose(%Axon{output_shape: shape} = x, permutation, opts \\ []) do
output_shape = Axon.Shape.transpose(shape, permutation)
layer(x, :transpose, output_shape, %{}, opts[:name], permutation: permutation)
end
@doc """
Adds a pad layer to the network.
This layer will pad the spatial dimensions of the input.
Padding configuration is a list of tuples for each spatial
dimension.
## Options
* `:name` - Layer name.
"""
@doc type: :shape
def pad(%Axon{output_shape: shape} = x, config, value \\ 0.0, opts \\ [])
when is_list(config) and is_number(value) do
output_shape = Axon.Shape.pad(shape, config)
layer(x, :pad, output_shape, %{}, opts[:name], padding_config: config, value: value)
end
@doc """
Adds a concatenate layer to the network.
This layer will concatenate inputs along the last
dimension unless specified otherwise.
## Options
* `:name` - Layer name.
* `:axis` - Concatenate axis.
"""
@doc type: :composition
def concatenate(%Axon{output_shape: x_shape} = x, %Axon{output_shape: y_shape} = y, opts)
when is_list(opts) do
axis = opts[:axis] || Nx.rank(x_shape) - 1
output_shape = Axon.Shape.concatenate([x_shape, y_shape], axis)
layer([x, y], :concatenate, output_shape, %{}, opts[:name], axis: axis)
end
@doc type: :composition
def concatenate([%Axon{output_shape: shape} | _] = inputs, opts)
when is_list(inputs) and is_list(opts) do
axis = opts[:axis] || Nx.rank(shape) - 1
input_shapes = inputs |> Enum.map(fn %Axon{output_shape: shape} -> shape end)
output_shape = Axon.Shape.concatenate(input_shapes, axis)
layer(inputs, :concatenate, output_shape, %{}, opts[:name], axis: axis)
end
@doc false
def concatenate(%Axon{} = x, %Axon{} = y), do: concatenate(x, y, [])
@doc false
def concatenate(inputs) when is_list(inputs), do: concatenate(inputs, [])
@element_wise_layers [:add, :subtract, :multiply]
for op <- @element_wise_layers do
@doc """
Adds a #{op} layer to the network.
This layer performs an element-wise #{Atom.to_string(op)} operation
on input layers. All input layers must be the same shape.
## Options
* `:name` - Layer name.
"""
@doc type: :composition
def unquote(op)(%Axon{output_shape: shape} = x, %Axon{output_shape: shape} = y, opts) do
Axon.layer([x, y], unquote(op), shape, %{}, opts[:name])
end
@doc """
Adds a #{op} layer to the network.
This layer performs an element-wise #{Atom.to_string(op)} operation
on all input layers. All input layers must be the same shape.
## Options
* `:name` - Layer name.
"""
@doc type: :composition
def unquote(op)([%Axon{output_shape: shape} | rest] = inputs, opts)
when is_list(inputs) and is_list(opts) do
output_shape =
Enum.reduce(rest, shape, fn %Axon{output_shape: shape}, acc ->
unless shape == acc do
raise ArgumentError, "all input shapes must match"
end
shape
end)
layer(inputs, unquote(op), output_shape, %{}, [], opts[:name])
end
@doc false
def unquote(op)(%Axon{output_shape: shape} = x, %Axon{output_shape: shape} = y) do
unquote(op)(x, y, [])
end
@doc false
def unquote(op)([%Axon{} | _] = inputs), do: unquote(op)(inputs, [])
end
@doc """
Adds a long short-term memory (LSTM) layer to the network.
LSTMs apply `Axon.Recurrent.lstm_cell/7` over an entire input
sequence and return:
{{new_cell, new_hidden}, output_sequence}
You can use the output state as the hidden state of another
LSTM layer with the `:hidden_state` option.
## Options
* `:activation` - recurrent activation. Defaults to `:tanh`.
* `:gate` - recurrent gate function. Defaults to `:sigmoid`.
* `:hidden_state` - initial hidden state. Defaults to `nil`.
* `:unroll` - `:dynamic` (loop preserving) or `:static` (compiled)
unrolling of RNN.
"""
@doc type: :recurrent
def lstm(%Axon{output_shape: shape} = x, units, opts \\ [])
when is_integer(units) and units > 0 do
activation = opts[:activation] || :tanh
gate = opts[:gate] || :sigmoid
hidden_state = opts[:hidden_state]
unroll = opts[:unroll] || :dynamic
output_shape = Axon.Shape.rnn(shape, units, "LSTM")
input_kernel_shape = Axon.Shape.rnn_input_kernel(shape, units, "LSTM")
hidden_kernel_shape = Axon.Shape.rnn_hidden_kernel(shape, units, "LSTM")
bias_shape = Axon.Shape.rnn_bias(shape, units, "LSTM")
hidden_state_shape = Axon.Shape.rnn_hidden_state(shape, units, "LSTM")
kernel_initializer = opts[:kernel_initializer] || :glorot_uniform
recurrent_initializer = opts[:recurrent_initializer] || :glorot_uniform
bias_initializer = opts[:bias_initializer] || :zeros
# Parameters
wii = param("wii", input_kernel_shape, initializer: kernel_initializer)
wif = param("wif", input_kernel_shape, initializer: kernel_initializer)
wig = param("wig", input_kernel_shape, initializer: kernel_initializer)
wio = param("wio", input_kernel_shape, initializer: kernel_initializer)
whi = param("whi", hidden_kernel_shape, initializer: kernel_initializer)
whf = param("whf", hidden_kernel_shape, initializer: kernel_initializer)
whg = param("whg", hidden_kernel_shape, initializer: kernel_initializer)
who = param("who", hidden_kernel_shape, initializer: kernel_initializer)
bi = param("bi", bias_shape, initializer: bias_initializer)
bf = param("bf", bias_shape, initializer: bias_initializer)
bg = param("bg", bias_shape, initializer: bias_initializer)
bo = param("bo", bias_shape, initializer: bias_initializer)
output =
layer(
x,
:lstm,
{{hidden_state_shape, hidden_state_shape}, output_shape},
%{
"wii" => wii,
"wif" => wif,
"wig" => wig,
"wio" => wio,
"whi" => whi,
"whf" => whf,
"whg" => whg,
"who" => who,
"bi" => bi,
"bf" => bf,
"bg" => bg,
"bo" => bo
},
opts[:name],
activation: activation,
gate: gate,
hidden_state: hidden_state,
hidden_state_shape: hidden_state_shape,
recurrent_initializer: recurrent_initializer,
unroll: unroll
)
new_c = layer(output, fn x, _ -> elem(elem(x, 0), 0) end, hidden_state_shape, %{})
new_h = layer(output, fn x, _ -> elem(elem(x, 0), 1) end, hidden_state_shape, %{})
output_sequence = layer(output, fn x, _ -> elem(x, 1) end, output_shape, %{})
{{new_c, new_h}, output_sequence}
end
@doc """
Adds a gated recurrent unit (GRU) layer to the network.
GRUs apply `Axon.Recurrent.gru_cell/7` over an entire input
sequence and return:
{{new_hidden}, output_sequence}
You can use the output state as the hidden state of another
LSTM layer with the `:hidden_state` option.
## Options
* `:activation` - recurrent activation. Defaults to `:tanh`.
* `:gate` - recurrent gate function. Defaults to `:sigmoid`.
* `:hidden_state` - initial hidden state. Defaults to `nil`.
* `:unroll` - `:dynamic` (loop preserving) or `:static` (compiled)
unrolling of RNN.
"""
@doc type: :recurrent
def gru(%Axon{output_shape: shape} = x, units, opts \\ [])
when is_integer(units) and units > 0 do
activation = opts[:activation] || :tanh
gate = opts[:gate] || :sigmoid
hidden_state = opts[:hidden_state]
unroll = opts[:unroll] || :dynamic
output_shape = Axon.Shape.rnn(shape, units, "GRU")
input_kernel_shape = Axon.Shape.rnn_input_kernel(shape, units, "GRU")
hidden_kernel_shape = Axon.Shape.rnn_hidden_kernel(shape, units, "GRU")
bias_shape = Axon.Shape.rnn_bias(shape, units, "GRU")
hidden_state_shape = Axon.Shape.rnn_hidden_state(shape, units, "GRU")
kernel_initializer = opts[:kernel_initializer] || :glorot_uniform
recurrent_initializer = opts[:recurrent_initializer] || :glorot_uniform
bias_initializer = opts[:bias_initializer] || :zeros
wir = param("wir", input_kernel_shape, initializer: kernel_initializer)
wiz = param("wiz", input_kernel_shape, initializer: kernel_initializer)
win = param("win", input_kernel_shape, initializer: kernel_initializer)
whr = param("whr", hidden_kernel_shape, initializer: kernel_initializer)
whz = param("whz", hidden_kernel_shape, initializer: kernel_initializer)
whn = param("whn", hidden_kernel_shape, initializer: kernel_initializer)
br = param("br", bias_shape, initializer: bias_initializer)
bz = param("bz", bias_shape, initializer: bias_initializer)
bin = param("bin", bias_shape, initializer: bias_initializer)
bhn = param("bhn", bias_shape, initializer: bias_initializer)
output =
layer(
x,
:gru,
{{hidden_state_shape}, output_shape},
%{
"wir" => wir,
"wiz" => wiz,
"win" => win,
"whr" => whr,
"whz" => whz,
"whn" => whn,
"br" => br,
"bz" => bz,
"bin" => bin,
"bhn" => bhn
},
opts[:name],
activation: activation,
gate: gate,
hidden_state: hidden_state,
hidden_state_shape: hidden_state_shape,
recurrent_initializer: recurrent_initializer,
unroll: unroll
)
new_h = layer(output, fn x, _ -> elem(elem(x, 0), 0) end, hidden_state_shape, %{})
output_sequence = layer(output, fn x, _ -> elem(x, 1) end, output_shape, %{})
{{new_h}, output_sequence}
end
@doc """
Adds a convolutional long short-term memory (LSTM) layer to the network.
ConvLSTMs apply `Axon.Recurrent.conv_lstm_cell/5` over an entire input
sequence and return:
{{new_cell, new_hidden}, output_sequence}
You can use the output state as the hidden state of another
LSTM layer with the `:hidden_state` option.
## Options
* `:padding` - convolutional padding. Defaults to `:same`.
* `:kernel_size` - convolutional kernel size. Defaults to `1`.
* `:strides` - convolutional strides. Defaults to `1`.
* `:hidden_state` - initial hidden state. Defaults to `nil`.
* `:unroll` - `:dynamic` (loop preserving) or `:static` (compiled)
unrolling of RNN.
"""
@doc type: :recurrent
def conv_lstm(%Axon{output_shape: shape} = x, units, opts \\ []) do
padding = opts[:padding] || :same
kernel_size = opts[:kernel_size] || 1
strides = opts[:strides] || 1
hidden_state = opts[:hidden_state]
unroll = opts[:unroll] || :dynamic
kernel_size = tuple_or_duplicate(:kernel_size, kernel_size, 1)
strides = list_or_duplicate(:strides, strides, 1)
hidden_state_shape = Axon.Shape.rnn_hidden_state(shape, units, "ConvLSTM")
input_kernel_shape = Axon.Shape.conv_kernel(shape, 4 * units, kernel_size)
hidden_kernel_shape = Axon.Shape.conv_kernel(hidden_state_shape, 4 * units, kernel_size)
bias_shape = Axon.Shape.conv_bias(shape, 4 * units, kernel_size)
output_shape = Axon.Shape.rnn(shape, units, "ConvLSTM")
kernel_initializer = opts[:kernel_initializer] || :glorot_uniform
recurrent_initializer = opts[:recurrent_initializer] || :glorot_uniform
bias_initializer = opts[:bias_initializer] || :zeros
wi = param("wi", input_kernel_shape, initializer: kernel_initializer)
wh = param("wh", hidden_kernel_shape, initializer: kernel_initializer)
b = param("b", bias_shape, initializer: bias_initializer)
output =
layer(
x,
:conv_lstm,
{{hidden_state_shape, hidden_state_shape}, output_shape},
%{"wi" => wi, "wh" => wh, "b" => b},
opts[:name],
strides: strides,
padding: padding,
hidden_state: hidden_state,
hidden_state_shape: hidden_state_shape,
recurrent_initializer: recurrent_initializer,
unroll: unroll
)
new_c = layer(output, fn x, _ -> elem(elem(x, 0), 0) end, hidden_state_shape, %{})
new_h = layer(output, fn x, _ -> elem(elem(x, 0), 1) end, hidden_state_shape, %{})
output_sequence = layer(output, fn x, _ -> elem(x, 1) end, output_shape, %{})
{{new_c, new_h}, output_sequence}
end
@doc """
Freezes parameters returned from `fun` in the given
model. `fun` takes the model's parameter list and returns
the list of parameters it wishes to freeze. `fun` defaults
to the identity function, freezing all of the parameters in
`model`.
Freezing parameters is useful when performing transfer learning
to leverage features learned from another problem in a new problem.
For example, it's common to combine the convolutional base from
larger models trained on ImageNet with fresh fully-connected classifiers.
The combined model is then trained on fresh data, with the convolutional
base frozen so as not to lose information. You can see this example in code
here:
cnn_base = get_pretrained_cnn_base()
model =
cnn_base
|> Axon.freeze()
|> Axon.flatten()
|> Axon.dense(1024, activation: :relu)
|> Axon.dropout()
|> Axon.dense(1000, activation: :softmax)
model
|> Axon.Training.step(:categorical_cross_entropy, Axon.Optimizers.adam(0.005))
|> Axon.Training.train(input, targets, epochs: 10)
When compiled, frozen parameters are wrapped in `Nx.Defn.Kernel.stop_grad/1`,
which zeros out the gradient with respect to the frozen parameter. Gradients
of frozen parameters will return `0.0`, meaning they won't be changed during
the update process.
"""
def freeze(%Axon{} = model, fun \\ & &1) when is_function(fun, 1) do
parameters =
model
|> get_params([])
|> Enum.uniq()
parameters_to_freeze = fun.(parameters)
do_freeze(model, parameters_to_freeze)
end
defp get_params(model, acc) when is_tuple(model) do
model
|> Tuple.to_list()
|> Enum.reduce(acc, &get_params/2)
end
defp get_params(%Axon{op: :input}, acc), do: acc
defp get_params(%Axon{parent: x}, acc) when is_list(x) do
Enum.reduce(x, acc, &get_params/2)
end
defp get_params(%Axon{parent: x, params: params, opts: opts}, acc) do
acc =
case opts[:hidden_state] do
state when is_tuple(state) ->
state
|> Tuple.to_list()
|> Enum.reduce(acc, &get_params/2)
nil ->
acc
end
get_params(x, Enum.reduce(Map.values(params), acc, fn x, ls -> [x | ls] end))
end
defp do_freeze(%Axon{op: :input} = x, _), do: x
defp do_freeze(%Axon{parent: parent} = x, parameters_to_freeze) when is_list(parent) do
parent = Enum.map(parent, &do_freeze(&1, parameters_to_freeze))
%{x | parent: parent}
end
defp do_freeze(%Axon{parent: parent, params: params} = x, parameters_to_freeze) do
parent = do_freeze(parent, parameters_to_freeze)
params =
params
|> Map.new(fn {k, %{name: param_name} = v} ->
if Enum.any?(parameters_to_freeze, fn %{name: name} -> name == param_name end) do
{k, %{v | frozen: true}}
else
{k, v}
end
end)
%{x | parent: parent, params: params}
end
defp do_freeze(x, parameters_to_freeze) when is_tuple(x) do
x
|> Tuple.to_list()
|> Enum.map(&do_freeze(&1, parameters_to_freeze))
end
@doc """
Compiles the given model to `{init_fn, predict_fn}`.
"""
@doc type: :compilation
def compile(model) do
Axon.Compiler.__compile__(model)
end
@doc """
Compiles and runs the given models initialization function
with the given compiler options.
"""
@doc type: :execution
defmacro init(model, opts \\ []) do
define_init(model, :init, [], opts)
end
@doc """
Compiles and runs the given Axon model with `params` on
`input` with the given compiler options.
"""
@doc type: :execution
defmacro predict(model, params, input, opts \\ []) do
define_predict(model, :predict, [params, input], opts)
end
@doc """
Compiles and runs the given Axon model's penalty function
on `params` with the given compiler options.
"""
@doc type: :execution
defmacro penalty(model, params, opts \\ []) do
define_penalty(model, :penalty, [params], opts)
end
## Implementation
defp define_init(model, caller, args, opts \\ []) do
quote do
Nx.Defn.Kernel.transform(unquote(args), fn args ->
model = unquote(model)
opts = unquote(opts)
caller = unquote(caller)
Axon.Compiler.__jit_init__(model, caller, args, opts)
end)
end
end
defp define_predict(model, caller, args, opts \\ []) do
quote do
Nx.Defn.Kernel.transform(unquote(args), fn args ->
model = unquote(model)
opts = unquote(opts)
caller = unquote(caller)
Axon.Compiler.__jit_predict__(model, caller, args, opts)
end)
end
end
defp define_penalty(model, caller, args, opts \\ []) do
quote do
Nx.Defn.Kernel.transform(unquote(args), fn args ->
model = unquote(model)
opts = unquote(opts)
caller = unquote(caller)
Axon.Compiler.__jit_penalty__(model, caller, args, opts)
end)
end
end
## Inspection
defimpl Inspect do
import Inspect.Algebra
def inspect(axon, _opts) do
title = "Model"
header = ["Layer", "Shape", "Parameters"]
{_, cache} = axon_to_rows(axon, %{})
rows =
cache
|> Enum.sort()
|> Enum.unzip()
|> Kernel.elem(1)
rows
|> TableRex.Table.new(header, title)
|> TableRex.Table.render!(
header_separator_symbol: "=",
title_separator_symbol: "=",
vertical_style: :off
)
|> string()
end
defp axon_to_rows(%{id: id} = graph, cache) do
case cache do
%{^id => row} ->
{row, cache}
%{} ->
{row, cache} = do_axon_to_rows(graph, cache)
cache = Map.put(cache, id, row)
{row, cache}
end
end
defp do_axon_to_rows(%Axon{op: op, parent: parents, name: name, output_shape: shape}, cache)
when is_list(parents) do
{names, cache} =
Enum.map_reduce(parents, cache, fn %Axon{name: name} = graph, cache ->
{_, cache} = axon_to_rows(graph, cache)
{name, cache}
end)
row = [name <> " ( #{Atom.to_string(op)} #{inspect(names)} )", "#{inspect(shape)}", 0]
{row, cache}
end
defp do_axon_to_rows(
%Axon{op: op, params: params, parent: parent, name: name, output_shape: shape},
cache
) do
cache =
if parent do
{_, cache} = axon_to_rows(parent, cache)
cache
else
cache
end
num_params =
params
|> Enum.reduce(0, fn {_, %Axon.Parameter{shape: shape}}, acc -> acc + Nx.size(shape) end)
row = [name <> " ( #{Atom.to_string(op)} )", "#{inspect(shape)}", "#{num_params}"]
{row, cache}
end
end
## Helpers
@valid_initializers [:zeros, :ones, :uniform, :normal, :identity] ++
[:lecun_uniform, :lecun_normal, :he_uniform, :he_normal] ++
[:glorot_uniform, :glorot_normal, :variance_scaling]
defp validate_initializer!(initializer)
when is_atom(initializer) and initializer in @valid_initializers do
:ok
end
defp validate_initializer!(initializer) when is_function(initializer, 1) do
:ok
end
defp validate_initializer!(initializer) do
raise ArgumentError,
"initializer must be one of #{inspect(@valid_initializers)}," <>
" or an arity-1 function accepting initializer options" <>
" got #{inspect(initializer)}"
end
@valid_regularizers [:l1, :l2, :l1l2, :none]
defp validate_regularizer!(regularizer)
when is_atom(regularizer) and regularizer in @valid_regularizers do
:ok
end
defp validate_regularizer!(regularizer) when is_function(regularizer) do
:ok
end
defp validate_regularizer!(regularizer) do
raise ArgumentError,
"regularizer must be one of #{inspect(@valid_regularizers)}," <>
" or a function accepting a parameter to regularize," <>
" got #{inspect(regularizer)}"
end
defp tuple_or_duplicate(key, tuple_or_integer, rank) do
cond do
is_tuple(tuple_or_integer) ->
if tuple_size(tuple_or_integer) != rank do
raise ArgumentError,
"expected #{inspect(key)} to be a #{rank}-element tuple, " <>
"got: #{inspect(tuple_or_integer)}"
end
tuple_or_integer
is_integer(tuple_or_integer) ->
Tuple.duplicate(tuple_or_integer, rank)
true ->
raise ArgumentError,
"expected #{inspect(key)} to be an integer or a tuple, " <>
"got: #{inspect(tuple_or_integer)}"
end
end
defp list_or_duplicate(key, list_or_integer, rank) do
cond do
is_list(list_or_integer) ->
if length(list_or_integer) != rank do
raise ArgumentError,
"expected #{inspect(key)} to be a #{rank}-element list, " <>
"got: #{inspect(list_or_integer)}"
end
list_or_integer
is_integer(list_or_integer) ->
List.duplicate(list_or_integer, rank)
true ->
raise ArgumentError,
"expected #{inspect(key)} to be an integer or a list, " <>
"got: #{inspect(list_or_integer)}"
end
end
defp unique_identifiers(type, nil) do
id = System.unique_integer([:positive, :monotonic])
{id, Atom.to_string(type) <> "_#{id}"}
end
defp unique_identifiers(_type, name), do: {System.unique_integer([:positive, :monotonic]), name}
end
|
lib/axon.ex
| 0.946312 | 0.696694 |
axon.ex
|
starcoder
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.