code
stringlengths 114
1.05M
| path
stringlengths 3
312
| quality_prob
float64 0.5
0.99
| learning_prob
float64 0.2
1
| filename
stringlengths 3
168
| kind
stringclasses 1
value |
---|---|---|---|---|---|
defmodule ExType.Type do
@moduledoc false
alias ExType.Assert
@type t ::
ExType.Type.Any.t()
| ExType.Type.None.t()
| ExType.Type.Union.t()
| ExType.Type.Intersection.t()
| ExType.Type.Protocol.t()
| ExType.Type.GenericProtocol.t()
| ExType.Type.Float.t()
| ExType.Type.Integer.t()
| ExType.Type.Atom.t()
| ExType.Type.Reference.t()
| ExType.Type.AnyFunction.t()
| ExType.Type.RawFunction.t()
| ExType.Type.TypedFunction.t()
| ExType.Type.Port.t()
| ExType.Type.PID.t()
| ExType.Type.AnyTuple.t()
| ExType.Type.TypedTuple.t()
| ExType.Type.SpecVariable.t()
# Map.Empty
| ExType.Type.Map.t()
| ExType.Type.Struct.t()
| ExType.Type.StructLikeMap.t()
# List.Empty
| ExType.Type.List.t()
| ExType.Type.BitString.t()
defmodule Any do
@moduledoc false
@type t :: %__MODULE__{}
defstruct []
end
def any, do: %Any{}
defmodule None do
@moduledoc false
@type t :: %__MODULE__{}
defstruct []
end
def none, do: %None{}
defmodule Union do
@moduledoc false
@type t :: %__MODULE__{
types: [ExType.Type.t()]
}
defstruct [:types]
end
def union(types) when is_list(types) do
Assert.is_list_of_type_structs!(types)
%Union{types: types}
end
# assert_type_struct
defmodule Intersection do
@moduledoc false
@type t :: %__MODULE__{
types: [ExType.Type.t()]
}
defstruct [:types]
end
def intersection(types) when is_list(types) do
Assert.is_list_of_type_structs!(types)
%Intersection{types: types}
end
defmodule SpecVariable do
@moduledoc false
@type t :: %__MODULE__{
name: atom(),
type: ExType.Type.t(),
spec: {atom(), atom(), pos_integer()},
id: integer()
}
defstruct [:name, :type, :spec, :id]
end
defmodule Protocol do
@moduledoc false
@type t :: %__MODULE__{
module: ExType.Type.Atom.t()
}
defstruct [:module]
end
defmodule GenericProtocol do
@moduledoc false
@type t :: %__MODULE__{
module: atom(),
generic: ExType.Type.t()
}
defstruct [:module, :generic]
end
def generic_protocol(module, generic) do
Assert.is_type_struct!(generic)
%GenericProtocol{module: module, generic: generic}
end
defmodule Float do
@moduledoc false
@type t :: %__MODULE__{}
defstruct []
end
def float, do: %Float{}
defmodule Integer do
@moduledoc false
@type t :: %__MODULE__{}
defstruct []
end
def integer, do: %Integer{}
defmodule Atom do
@moduledoc false
@type t :: %__MODULE__{
literal: boolean(),
value: atom()
}
defstruct [:literal, :value]
end
def atom, do: %Atom{}
defmodule Reference do
@moduledoc false
@type t :: %__MODULE__{}
defstruct []
end
def reference, do: %Reference{}
defmodule AnyFunction do
@moduledoc false
@type t :: %__MODULE__{}
defstruct []
end
def any_function, do: %AnyFunction{}
defmodule RawFunction do
@moduledoc false
@type t :: %__MODULE__{
arity: integer(),
clauses: [{[any()], any(), any()}],
context: ExType.Context.t(),
meta: Keyword.t()
}
defstruct [:arity, :clauses, :context, :meta]
end
defmodule TypedFunction do
@moduledoc false
@type t :: %__MODULE__{
inputs: [ExType.Type.t()],
output: ExType.Type.t()
}
defstruct [:inputs, :output]
end
defmodule List do
@moduledoc false
@type t :: %__MODULE__{
type: ExType.Type.t()
}
defstruct [:type]
end
def list(type) do
Assert.is_type_struct!(type)
%List{type: type}
end
# StructLikeMap
# Map.StructLike => it's map, not struct, but it has all atom as key,
# so it's struct like map
defmodule Map do
@moduledoc false
@type t :: %__MODULE__{
key: ExType.Type.t(),
value: ExType.Type.t()
}
defstruct [:key, :value]
end
def map(key_type, value_type) do
Assert.is_type_struct!(key_type)
Assert.is_type_struct!(value_type)
%Map{key: key_type, value: value_type}
end
# Struct and TypedStruct ?
defmodule Struct do
@moduledoc false
@type t :: %__MODULE__{
struct: atom(),
types: %{required(atom()) => ExType.Type.t()}
}
defstruct [:struct, :types]
end
defmodule StructLikeMap do
@moduledoc false
@type t :: %__MODULE__{
types: %{required(atom()) => ExType.Type.t()}
}
defstruct [:types]
end
defmodule Port do
@moduledoc false
@type t :: %__MODULE__{}
defstruct []
end
def port, do: %Port{}
defmodule PID do
@moduledoc false
@type t :: %__MODULE__{}
defstruct []
end
def pid, do: %PID{}
defmodule AnyTuple do
@moduledoc false
@type t :: %__MODULE__{}
defstruct []
end
def any_tuple, do: %AnyTuple{}
defmodule TypedTuple do
@moduledoc false
@type t :: %__MODULE__{
types: [ExType.Type.t()]
}
defstruct [:types]
end
def typed_tuple(types) when is_list(types) do
Assert.is_list_of_type_structs!(types)
%TypedTuple{types: types}
end
# TODO: distinguish bitstring and binary ???
defmodule BitString do
@moduledoc false
@type t :: %__MODULE__{}
defstruct []
end
def bit_string, do: %BitString{}
end
|
lib/ex_type/type.ex
| 0.713531 | 0.714504 |
type.ex
|
starcoder
|
defmodule Cizen.EffectHandler do
@moduledoc """
Handles effects.
"""
alias Cizen.Effect
alias Cizen.Event
alias Cizen.Request.Response
alias Cizen.SagaID
@type state :: %{
handler: SagaID.t(),
effect: Effect.t() | nil,
effect_state: term,
event_buffer: list(Event.t())
}
@type resolve :: {:resolve, term, state}
@spec init(SagaID.t()) :: state
def init(handler) do
%{handler: handler, effect: nil, effect_state: nil, event_buffer: []}
end
@spec perform_effect(state, Effect.t()) :: resolve | state
def perform_effect(state, effect) do
case Effect.init(state.handler, effect) do
{:resolve, value} ->
{:resolve, value, state}
{effect, effect_state} ->
state = %{state | effect: effect, effect_state: effect_state}
case feed_events(state, state.event_buffer) do
{:resolve, value, state, events} ->
{:resolve, value, %{state | event_buffer: events}}
{state, events} ->
%{state | event_buffer: events}
end
end
end
@spec feed_event(state, Event.t()) :: resolve | state
def feed_event(state, event) do
case state do
%{effect: nil} ->
append_to_buffer(state, event)
state ->
{resolved, value, state, events} =
case feed_events(state, [event]) do
{:resolve, value, state, events} -> {true, value, state, events}
{state, events} -> {false, nil, state, events}
end
# length must be 0 or 1
state =
if events == [] do
state
else
append_to_buffer(state, event)
end
if resolved do
{:resolve, value, state}
else
state
end
end
end
defp append_to_buffer(state, %Event{body: %Response{}}), do: state
defp append_to_buffer(state, event) do
event_buffer = state.event_buffer ++ [event]
%{state | event_buffer: event_buffer}
end
defp feed_events(state, []), do: {state, []}
defp feed_events(state, [event | tail]) do
case Effect.handle_event(state.handler, event, state.effect, state.effect_state) do
{:resolve, value} ->
state = %{state | effect: nil}
{:resolve, value, state, tail}
{:consume, effect_state} ->
state = %{state | effect_state: effect_state}
feed_events(state, tail)
effect_state ->
state = %{state | effect_state: effect_state}
{resolved, value, state, tail} =
case feed_events(state, tail) do
{:resolve, value, state, events} -> {true, value, state, events}
{state, events} -> {false, nil, state, events}
end
tail = [event | tail]
if resolved do
{:resolve, value, state, tail}
else
{state, tail}
end
end
end
end
|
lib/cizen/effect_handler.ex
| 0.823541 | 0.446193 |
effect_handler.ex
|
starcoder
|
defmodule Crawly do
@moduledoc """
Crawly is a fast high-level web crawling & scraping framework for Elixir.
"""
@doc """
Fetches a given url. This function is mainly used for the spiders development
when you need to get individual pages and parse them.
The fetched URL is being converted to a request, and the request is piped
through the middlewares specified in a config (with the exception of
`Crawly.Middlewares.DomainFilter`, `Crawly.Middlewares.RobotsTxt`)
Provide a spider with the `:with` option to fetch a given webpage using that spider.
### Fetching with a spider
To fetch a response from a url with a spider, define your spider, and pass the module name to the `:with` option.
iex> Crawly.fetch("https://www.example.com", with: MySpider)
{%HTTPoison.Response{...}, %{...}, [...], %{...}}
Using the `:with` option will return a 4 item tuple:
1. The HTTPoison response
2. The result returned from the `parse_item/1` callback
3. The list of items that have been processed by the declared item pipelines.
4. The pipeline state, included for debugging purposes.
"""
@type with_opt :: {:with, nil | module()}
@type request_opt :: {:request_options, list(Crawly.Request.option())}
@type headers_opt :: {:headers, list(Crawly.Request.header())}
@type parsed_item_result :: Crawly.ParsedItem.t()
@type parsed_items :: list(any())
@type pipeline_state :: %{optional(atom()) => any()}
@type spider :: module()
@spec fetch(url, opts) ::
HTTPoison.Response.t()
| {HTTPoison.Response.t(), parsed_item_result, parsed_items,
pipeline_state}
when url: binary(),
opts: [
with_opt
| request_opt
| headers_opt
]
def fetch(url, opts \\ []) do
opts = Enum.into(opts, %{with: nil, request_options: [], headers: []})
request0 =
Crawly.Request.new(url, opts[:headers], opts[:request_options])
|> Map.put(
:middlewares,
Crawly.Utils.get_settings(:middlewares, opts[:with], [])
)
ignored_middlewares = [
Crawly.Middlewares.DomainFilter,
Crawly.Middlewares.RobotsTxt
]
new_middlewares = request0.middlewares -- ignored_middlewares
request0 =
Map.put(
request0,
:middlewares,
new_middlewares
)
{%{} = request, _} = Crawly.Utils.pipe(request0.middlewares, request0, %{})
{:ok, {response, _}} = Crawly.Worker.get_response({request, opts[:with]})
case opts[:with] do
nil ->
# no spider provided, return response as is
response
_ ->
# spider provided, send response through parse_item callback, pipe through the pipelines
with {:ok, {parsed_result, _, _}} <-
Crawly.Worker.parse_item({response, opts[:with]}),
pipelines <-
Crawly.Utils.get_settings(
:pipelines,
opts[:with]
),
items <- Map.get(parsed_result, :items, []),
{pipeline_result, pipeline_state} <-
Enum.reduce(items, {[], %{}}, fn item, {acc, state} ->
{piped, state} = Crawly.Utils.pipe(pipelines, item, state)
if piped == false do
# dropped
{acc, state}
else
{[piped | acc], state}
end
end) do
{response, parsed_result, pipeline_result, pipeline_state}
end
end
end
@doc """
Parses a given response with a given spider. Allows to quickly see the outcome
of the given :parse_item implementation.
"""
@spec parse(response, spider) :: {:ok, result}
when response: Crawly.Response.t(),
spider: atom(),
result: Crawly.ParsedItem.t()
def parse(response, spider) do
case Kernel.function_exported?(spider, :parse_item, 1) do
false ->
{:error, :spider_not_found}
true ->
spider.parse_item(response)
end
end
@doc """
Returns a list of known modules which implements Crawly.Spider behaviour.
Should not be used for spider management. Use functions defined in `Crawly.Engine` for that.
"""
@spec list_spiders() :: [module()]
def list_spiders(), do: Crawly.Utils.list_spiders()
end
|
lib/crawly.ex
| 0.848737 | 0.50891 |
crawly.ex
|
starcoder
|
defmodule Artour.Admin do
@moduledoc """
The Admin context.
"""
import Ecto.Query, warn: false
alias Artour.Repo
alias Artour.Post
alias Artour.PostTag
alias Artour.Format
# alias Artour.Category
alias Artour.Image
# alias Artour.PostImage
@doc """
Returns the list of posts.
"""
def list_posts do
from(
p in Post,
join: category in assoc(p, :category),
join: cover_image in assoc(p, :cover_image),
preload: [category: category, cover_image: cover_image],
order_by: [desc: :id]
)
|> Repo.all
end
@doc """
Gets a single post by id
Raises `Ecto.NoResultsError` if the Post does not exist.
"""
def get_post_for_show!(id) do
from(
post in Post,
join: category in assoc(post, :category),
left_join: tag in assoc(post, :tags),
where: post.id == ^id,
preload: [category: category, tags: tag],
order_by: [tag.name]
)
|> Repo.one!
end
@doc """
Returns the list of images.
"""
def list_images do
from(
i in Image,
join: format in assoc(i, :format),
preload: [format: format],
order_by: [desc: :id]
)
|> Repo.all
end
@doc """
Returns a single image selected by image_id
"""
def get_image!(image_id) do
from(
i in Image,
join: format in assoc(i, :format),
preload: [format: format],
where: i.id == ^image_id,
limit: 1
)
|> Repo.one!
end
@doc """
Creates a image.
## Examples
iex> create_image(%{field: value})
{:ok, %Image{}}
iex> create_image(%{field: bad_value})
{:error, %Ecto.Changeset{}}
"""
def create_image(attrs \\ %{}) do
%Image{}
|> Image.changeset(attrs)
|> Repo.insert()
end
@doc """
Updates a image.
## Examples
iex> update_image(image, %{field: new_value})
{:ok, %Image{}}
iex> update_image(image, %{field: bad_value})
{:error, %Ecto.Changeset{}}
"""
def update_image(%Image{} = image, attrs) do
image
|> Image.changeset(attrs)
|> Repo.update()
end
@doc """
Deletes a Image.
## Examples
iex> delete_image(image)
{:ok, %Image{}}
iex> delete_image(image)
{:error, %Ecto.Changeset{}}
"""
def delete_image(%Image{} = image) do
Repo.delete(image)
end
@doc """
Returns an `%Ecto.Changeset{}` for tracking image changes.
## Examples
iex> change_image(image)
%Ecto.Changeset{source: %Image{}}
"""
def change_image(%Image{} = image) do
Image.changeset(image, %{})
end
@doc """
Deletes a single post_tag, given post_id and tag_id
"""
def delete_post_tag(post_id, tag_id) do
#use transaction to ensure only 1 row gets deleted
Repo.transaction(fn ->
{1, nil} = from(pt in PostTag, where: pt.post_id == ^post_id and pt.tag_id ==^tag_id)
|> Repo.delete_all()
end)
end
@doc """
Returns list of formats
"""
def list_formats() do
from(Format, order_by: :name)
|> Repo.all
end
end
|
apps/artour/lib/artour/admin/admin.ex
| 0.729134 | 0.416945 |
admin.ex
|
starcoder
|
defmodule Identicon do
@moduledoc """
Converts a string into an identicon
This module uses md5 encoding to convert string into a binary
We save the binary into a list
we select the first 3 elements in the list to select the color for identicon
The full list will be match to first 3 x 5 grids from identicon, we will toos las element in list
Even number will be a color grid, odd number in binary list will be white grid
To complete the 5x5 grid identicon we will mirror first 2 columns
"""
@doc """
Main function that will assemble the pipeline process
## Examples
iex> Identicon.main("Eduardo")
:ok
"""
def main(input) do
# even = color, #odd = white
input
|> hash_input
|> pick_color
|> grid_constructor
|> filter_odd_squares
|> build_pixel_map
|> draw_square
|> save_image(input)
end
@doc """
Converts a string input into a hash md5 code string
"""
def hash_input(input) do
hex =
:crypto.hash(:md5, input)
|> :binary.bin_to_list()
%Identicon.Image{hex: hex}
end
@doc """
Function that takes first 3 elements from hex list and creates a 3 elements tuple
that will act like rgb color
This function returns a new Image struct with color assigned
We can do pattern matching directly into parenthesis in def statement,
"""
def pick_color(%Identicon.Image{hex: hex_list} = input) do
[red, green, blue | _tail] = hex_list
# we use a pipe to assign elements from last struct to new one
%Identicon.Image{
input
| color: {red, green, blue}
}
end
@doc """
Function that will convert a list of bin numbers into a list of color/white (1/0)
"""
def grid_constructor(%Identicon.Image{hex: hex_list} = input) do
grid_list =
hex_list
|> Identicon.converts_to_string()
|> Enum.chunk_every(3, 3, :discard)
# passing a reference to a function, we pass a /1 to indicates what mirror_row version of function
# it is about, if we would have more than one mirror function it is useful
|> Enum.map(&Identicon.mirror_row/1)
# creating just one list and not nested one
|> List.flatten
|> Enum.map(&Identicon.converts_to_number/1)
|> Enum.with_index
%Identicon.Image{input | grid: grid_list}
end
@doc """
converts every element in a list into a string element
"""
def converts_to_string(list) do
for element <- list do
"#{element}"
end
end
@doc """
Creates a mirror list, where first 2 elements will be repeated in every
list but mirroring the 2 values before third and last value.
"""
def mirror_row(row) do
[first, second | _tail] = row
# mirroring grid, ++ is the way to list addition
row ++ [second, first]
end
@doc """
Converts a string list into integer list
"""
def converts_to_number(string_num) do
{number, _rest} = Integer.parse(string_num)
number
end
@doc """
This function will convert every even in color and every odd in
white
"""
def filter_odd_squares(%Identicon.Image{grid: grid} = input) do
# we are performing a function for every list in grid
grid = Enum.filter grid, fn({value, _index}) ->
rem(value, 2) == 0
end
%Identicon.Image{input | grid: grid}
end
@doc """
This function will create a the pixel map
every element in the list will represent colored grid in
pixel map
250 x 250 identicon,
5 x 5 grid
50 x 50 pxls individual grid
"""
def build_pixel_map(%Identicon.Image{grid: grid} = input) do
pixel_map = Enum.map grid, fn({_value, index}) ->
# top left
horizontal = rem(index, 5) * 50
vertical = div(index, 5) * 50
#point
top_left = { horizontal, vertical }
bottom_right = {horizontal + 50, vertical + 50}
# pixel pairs
{top_left, bottom_right}
end
%Identicon.Image{input | pixel_map: pixel_map}
end
@doc """
This function draws the identicon by itself
It will requirest color and pixel_map from Image struct
"""
def draw_square(%Identicon.Image{color: color_list, pixel_map: pixel_map}) do
image = :egd.create(250, 250)
fill = :egd.color(color_list)
Enum.each pixel_map, fn({start, stop}) ->
:egd.filledRectangle(image, start, stop, fill)
end
:egd.render(image)
end
@doc """
This function will take the binary and save to file system
It will save the image named it as input file name
"""
def save_image(binary, filename) do
File.write("#{filename}.png", binary)
end
end
|
identicon/lib/identicon.ex
| 0.868381 | 0.649155 |
identicon.ex
|
starcoder
|
require Utils
require Program
defmodule D11 do
@moduledoc """
--- Day 11: Space Police ---
On the way to Jupiter, you're pulled over by the Space Police.
"Attention, unmarked spacecraft! You are in violation of Space Law! All spacecraft must have a clearly visible registration identifier! You have 24 hours to comply or be sent to Space Jail!"
Not wanting to be sent to Space Jail, you radio back to the Elves on Earth for help. Although it takes almost three hours for their reply signal to reach you, they send instructions for how to power up the emergency hull painting robot and even provide a small Intcode program (your puzzle input) that will cause it to paint your ship appropriately.
There's just one problem: you don't have an emergency hull painting robot.
You'll need to build a new emergency hull painting robot. The robot needs to be able to move around on the grid of square panels on the side of your ship, detect the color of its current panel, and paint its current panel black or white. (All of the panels are currently black.)
The Intcode program will serve as the brain of the robot. The program uses input instructions to access the robot's camera: provide 0 if the robot is over a black panel or 1 if the robot is over a white panel. Then, the program will output two values:
First, it will output a value indicating the color to paint the panel the robot is over: 0 means to paint the panel black, and 1 means to paint the panel white.
Second, it will output a value indicating the direction the robot should turn: 0 means it should turn left 90 degrees, and 1 means it should turn right 90 degrees.
After the robot turns, it should always move forward exactly one panel. The robot starts facing up.
The robot will continue running for a while like this and halt when it is finished drawing. Do not restart the Intcode computer inside the robot during this process.
Before you deploy the robot, you should probably have an estimate of the area it will cover: specifically, you need to know the number of panels it paints at least once, regardless of color. In the example above, the robot painted 6 panels at least once. (It painted its starting panel twice, but that panel is still only counted once; it also never painted the panel it ended on.)
Build a new emergency hull painting robot and run the Intcode program on it. How many panels does it paint at least once?
--- Part Two ---
You're not sure what it's trying to paint, but it's definitely not a registration identifier. The Space Police are getting impatient.
Checking your external ship cameras again, you notice a white panel marked "emergency hull painting robot starting panel". The rest of the panels are still black, but it looks like the robot was expecting to start on a white panel, not a black one.
Based on the Space Law Space Brochure that the Space Police attached to one of your windows, a valid registration identifier is always eight capital letters. After starting the robot on a single white panel instead, what registration identifier does it paint on your hull?
"""
@behaviour Day
def run_program(_program, :halt, map, _location, _direction), do: map
def run_program(program, :block, map, {x, y}, direction) do
%Program{output: [turn | [color | _output]]} = program
# paint
map = Map.put(map, {x, y}, color)
# 0 = up, 1 = right, 2 = down, 3 = left
direction = rem(direction + turn * 2 + 3, 4)
location =
case direction do
0 -> {x, y - 1}
1 -> {x + 1, y}
2 -> {x, y + 1}
3 -> {x - 1, y}
end
current = Map.get(map, location, 0)
{status, program} = Program.run_blocking(%{program | input: [current]})
run_program(program, status, map, location, direction)
end
def run_program(program, initial_input) do
program = %{program | input: [initial_input]}
{:block, program} = Program.run_blocking(program)
run_program(program, :block, %{}, {0, 0}, 0)
end
def solve(input) do
input = input |> Utils.to_ints()
program = Program.new(input)
[a, b, c, d, e, f, g, h, i, j] =
input
|> Enum.with_index()
|> Enum.filter(fn {v, _i} -> v == 108 or v == 1008 end)
|> Enum.map(fn {_v, i} -> i end)
|> Enum.sort()
|> Enum.drop(-1)
|> Enum.map(fn i -> Enum.slice(input, i + 1, 2) end)
|> Enum.map(fn [a, b] -> if a == 8, do: b, else: a end)
part_1_hack =
"3,8,1005,8,325,1106,0,11,0,0,0,104,1,104,0,3,8,102,-1,8,10,1001,10,1,10,4,10,108,#{a},8,10,4,10,101,0,8,28,3,8,102,-1,8,10,101,1,10,10,4,10,1008,8,#{
b
},10,4,10,101,0,8,51,3,8,1002,8,-1,10,1001,10,1,10,4,10,108,#{c},8,10,4,10,101,0,8,72,3,8,102,-1,8,10,101,1,10,10,4,10,1008,8,#{
d
},10,4,10,1002,8,1,95,3,8,102,-1,8,10,1001,10,1,10,4,10,1008,8,#{e},10,4,10,101,0,8,117,3,8,1002,8,-1,10,101,1,10,10,4,10,108,#{
f
},8,10,4,10,1001,8,0,138,3,8,102,-1,8,10,1001,10,1,10,4,10,108,#{g},8,10,4,10,1001,8,0,160,3,8,102,-1,8,10,101,1,10,10,4,10,1008,8,#{
h
},10,4,10,102,1,8,183,3,8,102,-1,8,10,101,1,10,10,4,10,108,#{i},8,10,4,10,102,1,8,204,3,8,1002,8,-1,10,1001,10,1,10,4,10,1008,8,#{
j
},10,4,10,102,1,8,227,1006,0,74,2,1003,2,10,1,107,1,10,101,1,9,9,1007,9,1042,10,1005,10,15,99"
|> Utils.to_ints()
part_1_hacked = Program.hack(program, 0, part_1_hack)
part_1 =
part_1_hacked
|> run_program(0)
|> map_size
part_2 =
program
|> run_program(1)
|> Utils.output_to_string()
{
part_1,
part_2
}
end
end
|
lib/days/11.ex
| 0.742235 | 0.695454 |
11.ex
|
starcoder
|
defmodule Sanbase.Utils.Transform do
def to_bang(result) do
case result do
{:ok, result} -> result
{:error, error} -> raise error
end
end
@doc ~s"""
Combine all the MapSets from the mapsets_list either by
taking their intersection or their union. The decision is
made based on the `:combinator` field in the opts
## Examples:
iex> Sanbase.Utils.Transform.combine_mapsets([MapSet.new([1,2,3]), MapSet.new([2,3,4,5])], combinator: "or")
MapSet.new([1,2,3,4,5])
iex> Sanbase.Utils.Transform.combine_mapsets([MapSet.new([1,2,3]), MapSet.new([2,3,4,5])], combinator: "and")
MapSet.new([2,3])
"""
def combine_mapsets(mapsets_list, opts) do
case Keyword.fetch!(opts, :combinator) do
c when c in ["or", :or] ->
mapsets_list
|> Enum.reduce(&MapSet.union(&1, &2))
c when c in ["and", :and] ->
mapsets_list
|> Enum.reduce(&MapSet.intersection(&1, &2))
end
end
@doc ~s"""
Simply wrap anything in an :ok tuple
"""
@spec wrap_ok(any()) :: {:ok, any()}
def wrap_ok(data), do: {:ok, data}
@doc ~s"""
Transform the maps from the :ok tuple data so the `key` is duplicated under the
`new_key` name, preserving the original value.
## Examples:
iex> Sanbase.Utils.Transform.duplicate_map_keys({:ok, [%{a: 1}, %{a: 2}]}, old_key: :a, new_key: :b)
{:ok, [%{a: 1, b: 1}, %{a: 2, b: 2}]}
iex> Sanbase.Utils.Transform.duplicate_map_keys({:ok, [%{a: 1}, %{d: 2}]}, old_key: :a, new_key: :b)
{:ok, [%{a: 1, b: 1}, %{d: 2}]}
iex> Sanbase.Utils.Transform.duplicate_map_keys({:error, "bad"}, old_key: :a, new_key: :b)
{:error, "bad"}
"""
@spec duplicate_map_keys({:ok, list(map)}, keyword(atom)) :: {:ok, list(map)}
@spec duplicate_map_keys({:error, any()}, keyword(atom)) :: {:error, any()}
def duplicate_map_keys({:ok, data}, opts) do
old_key = Keyword.fetch!(opts, :old_key)
new_key = Keyword.fetch!(opts, :new_key)
result =
data
|> Enum.map(fn
%{^old_key => value} = elem -> elem |> Map.put(new_key, value)
elem -> elem
end)
{:ok, result}
end
def duplicate_map_keys({:error, error}, _opts) do
{:error, error}
end
@doc ~s"""
Transform the maps from the :ok tuple data so the `key` duplicated with a key
named `new_key`
## Examples:
iex> Sanbase.Utils.Transform.rename_map_keys({:ok, [%{a: 1}, %{a: 2}]}, old_key: :a, new_key: :b)
{:ok, [%{b: 1}, %{b: 2}]}
iex> Sanbase.Utils.Transform.rename_map_keys({:ok, [%{a: 1}, %{d: 2}]}, old_key: :a, new_key: :b)
{:ok, [%{b: 1}, %{d: 2}]}
iex> Sanbase.Utils.Transform.rename_map_keys({:error, "bad"}, old_key: :a, new_key: :b)
{:error, "bad"}
"""
@spec rename_map_keys({:ok, list(map)}, keyword(atom())) :: {:ok, list(map)}
@spec rename_map_keys({:error, any()}, keyword(atom())) :: {:error, any()}
def rename_map_keys({:ok, data}, opts) do
old_key = Keyword.fetch!(opts, :old_key)
new_key = Keyword.fetch!(opts, :new_key)
result =
data
|> Enum.map(fn
%{^old_key => value} = elem ->
elem |> Map.delete(old_key) |> Map.put(new_key, value)
elem ->
elem
end)
{:ok, result}
end
def rename_map_keys({:error, error}, _) do
{:error, error}
end
def rename_map_keys!(map, old_keys: old_keys, new_keys: new_keys) do
old_new_keys_map = Enum.zip(old_keys, new_keys) |> Enum.into(%{})
map
|> Enum.map(fn {k, v} -> {old_new_keys_map[k] || k, v} end)
|> Enum.into(%{})
end
@doc ~s"""
Transform an :ok tuple containing a list of a single value to an :ok tuple
that unwraps the value in the list. Handles the cases of errors
or empty list.
## Examples:
iex> Sanbase.Utils.Transform.maybe_unwrap_ok_value({:ok, [5]})
{:ok, 5}
iex> Sanbase.Utils.Transform.maybe_unwrap_ok_value({:error, "error"})
{:error, "error"}
iex> Sanbase.Utils.Transform.maybe_unwrap_ok_value({:ok, 5})
** (RuntimeError) Unsupported format given to maybe_unwrap_ok_value/1: 5
"""
@spec maybe_unwrap_ok_value({:ok, any}) :: {:ok, any()} | {:error, String.t()}
@spec maybe_unwrap_ok_value({:error, any()}) :: {:error, any()}
def maybe_unwrap_ok_value({:ok, [value]}), do: {:ok, value}
def maybe_unwrap_ok_value({:ok, []}), do: {:ok, nil}
def maybe_unwrap_ok_value({:ok, value}),
do: raise("Unsupported format given to maybe_unwrap_ok_value/1: #{inspect(value)}")
def maybe_unwrap_ok_value({:error, error}), do: {:error, error}
def maybe_apply_function({:ok, list}, fun) when is_function(fun, 1),
do: {:ok, fun.(list)}
def maybe_apply_function({:error, error}, _), do: {:error, error}
@doc ~s"""
Sums the values of all keys with the same datetime
## Examples:
iex> Sanbase.Utils.Transform.sum_by_datetime([%{datetime: ~U[2019-01-01 00:00:00Z], val: 2}, %{datetime: ~U[2019-01-01 00:00:00Z], val: 3}, %{datetime: ~U[2019-01-02 00:00:00Z], val: 2}], :val)
[%{datetime: ~U[2019-01-01 00:00:00Z], val: 5}, %{datetime: ~U[2019-01-02 00:00:00Z], val: 2}]
iex> Sanbase.Utils.Transform.sum_by_datetime([], :key)
[]
"""
@spec sum_by_datetime(list(map), atom()) :: list(map)
def sum_by_datetime(data, key) do
data
|> Enum.group_by(& &1[:datetime], & &1[key])
|> Enum.map(fn {datetime, list} ->
value =
case list do
[] -> 0
[_ | _] = list -> Enum.sum(list)
end
%{:datetime => datetime, key => value}
end)
|> Enum.sort_by(&DateTime.to_unix(&1[:datetime]))
end
@doc ~s"""
Combine the values of `key` in list1 and list2 by using `func`. The result with
the same `datetime` values are chosen to be merged.
## Example
iex> Sanbase.Utils.Transform.merge_by_datetime([%{a: 3.5, datetime: ~U[2020-01-01 00:00:00Z]}, %{a: 2, datetime: ~U[2020-01-02 00:00:00Z]}], [%{a: 10, datetime: ~U[2020-01-01 00:00:00Z]}, %{a: 6, datetime: ~U[2020-01-02 00:00:00Z]}], &Kernel.*/2, :a)
[%{a: 35.0, datetime: ~U[2020-01-01 00:00:00Z]}, %{a: 12, datetime: ~U[2020-01-02 00:00:00Z]}]
"""
@spec merge_by_datetime(list(), list(), fun(), any()) :: list()
def merge_by_datetime(list1, list2, func, key) do
map = list2 |> Enum.into(%{}, fn %{datetime: dt} = item2 -> {dt, item2[key]} end)
list1
|> Enum.map(fn %{datetime: datetime} = item1 ->
value2 = Map.get(map, datetime, 0)
new_value = func.(item1[key], value2)
%{key => new_value, datetime: datetime}
end)
|> Enum.reject(&(&1[key] == 0))
end
@doc ~s"""
Transform some addresses to a name representation
"""
@spec maybe_transform_from_address(String.t()) :: String.t()
def maybe_transform_from_address("0x0000000000000000000000000000000000000000"), do: "mint"
def maybe_transform_from_address(address), do: address
def maybe_transform_to_address("0x0000000000000000000000000000000000000000"), do: "burn"
def maybe_transform_to_address(address), do: address
@doc ~s"""
Remove the `separator` inside the value of the key `key` in the map `map`
## Examples:
iex> Sanbase.Utils.Transform.remove_separator(%{a: "100,000"}, :a, ",")
%{a: "100000"}
iex> Sanbase.Utils.Transform.remove_separator(%{a: "100,000", b: "5,000"}, :a, ",")
%{a: "100000", b: "5,000"}
iex> Sanbase.Utils.Transform.remove_separator(%{a: "100,000"}, :c, ",")
%{a: "100,000"}
"""
def remove_separator(map, key, separator) do
case Map.fetch(map, key) do
:error -> map
{:ok, value} -> Map.put(map, key, String.replace(value, separator, ""))
end
end
@doc ~s"""
Get a list of maps that must have a key named `key` that may be not
computed. These values are recognized by the `has_changed` key that can
be either 0 or 1.
In case the value of a key is missing it is filled with the last known
value by its order in the list
## Example
iex> Sanbase.Utils.Transform.maybe_fill_gaps_last_seen({:ok, [%{a: 1, has_changed: 1}, %{a: nil, has_changed: 0}, %{a: 5, has_changed: 1}]}, :a)
{:ok, [%{a: 1}, %{a: 1}, %{a: 5}]}
iex> Sanbase.Utils.Transform.maybe_fill_gaps_last_seen({:ok, [%{a: nil, has_changed: 0}, %{a: nil, has_changed: 0}, %{a: 5, has_changed: 1}]}, :a)
{:ok, [%{a: 0}, %{a: 0}, %{a: 5}]}
iex> Sanbase.Utils.Transform.maybe_fill_gaps_last_seen({:ok, [%{a: 1, has_changed: 1}, %{a: 2, has_changed: 1}, %{a: 5, has_changed: 1}]}, :a)
{:ok, [%{a: 1}, %{a: 2}, %{a: 5}]}
iex> Sanbase.Utils.Transform.maybe_fill_gaps_last_seen({:ok, [%{a: 1, has_changed: 1}, %{a: nil, has_changed: 0}, %{a: nil, has_changed: 0}]}, :a)
{:ok, [%{a: 1}, %{a: 1}, %{a: 1}]}
"""
def maybe_fill_gaps_last_seen(result_tuple, key, unknown_previous_value \\ 0)
def maybe_fill_gaps_last_seen({:ok, values}, key, unknown_previous_value) do
result =
values
|> Enum.reduce({[], unknown_previous_value}, fn
%{has_changed: 0} = elem, {acc, last_seen} ->
elem = Map.put(elem, key, last_seen) |> Map.delete(:has_changed)
{[elem | acc], last_seen}
%{has_changed: 1} = elem, {acc, _last_seen} ->
elem = Map.delete(elem, :has_changed)
{[elem | acc], elem[key]}
end)
|> elem(0)
|> Enum.reverse()
{:ok, result}
end
def maybe_fill_gaps_last_seen({:error, error}, _key, _unknown_previous_value),
do: {:error, error}
@spec opts_to_limit_offset(page: non_neg_integer(), page_size: pos_integer()) ::
{pos_integer(), non_neg_integer()}
def opts_to_limit_offset(opts) do
page = Keyword.get(opts, :page, 1)
page_size = Keyword.get(opts, :page_size, 10)
offset = (page - 1) * page_size
{page_size, offset}
end
end
|
lib/sanbase/utils/transform.ex
| 0.877503 | 0.641478 |
transform.ex
|
starcoder
|
defmodule EWallet.Web.Paginator do
@moduledoc """
The Paginator allows querying of records by page. It takes in a query, break the query down,
then selectively query only records that are within the given page's scope.
"""
import Ecto.Query
alias EWalletDB.Repo
@default_per_page 10
@default_max_per_page 100
defstruct [
data: [],
pagination: %{
per_page: nil,
current_page: nil,
is_first_page: nil,
is_last_page: nil,
},
]
@doc """
Paginate a query by attempting to extract `page` and `per_page`
from the given map of attributes and returns a paginator.
Note that this function is made to allow an easy passing of user inputs
without the caller needing any knowledge of the pagination attributes
(so long as the attribute keys don't conflict). Therefore this function
expects attribute keys to be strings, not atoms.
"""
def paginate_attrs(_, %{"page" => page}) when not is_integer(page) or page < 0 do
{:error, :invalid_parameter, "`page` must be non-negative integer"}
end
def paginate_attrs(_, %{"per_page" => per_page}) when not is_integer(per_page) or per_page < 1 do
{:error, :invalid_parameter, "`per_page` must be a non-negative, non-zero integer"}
end
def paginate_attrs(queryable, attrs) do
page = Map.get(attrs, "page", 1)
per_page = get_per_page(attrs)
paginate(queryable, page, per_page)
end
# Returns the per_page number or default, but never greater than the system's defined limit
defp get_per_page(attrs) do
per_page = Map.get(attrs, "per_page", @default_per_page)
max_per_page = Application.get_env(:ewallet, :max_per_page, @default_max_per_page)
case per_page do
n when n > max_per_page -> max_per_page
_ -> per_page
end
end
@doc """
Paginate a query using the given `page` and `per_page` and returns a paginator.
"""
def paginate(queryable, page, per_page) do
{records, more_page} = fetch(queryable, page, per_page)
pagination = %{
per_page: per_page,
current_page: page,
is_first_page: page <= 1,
is_last_page: !more_page, # It's the last page if there are no more records
}
%__MODULE__{data: records, pagination: pagination}
end
@doc """
Paginate a query by explicitly specifying `page` and `per_page`
and returns a tuple of records and a flag whether there are more pages.
"""
def fetch(queryable, page, per_page) do
offset =
case page do
n when n > 0 -> (page - 1) * per_page
_ -> 0
end
limit = per_page + 1 # + 1 to see if it is the last page yet
records =
queryable
|> offset(^offset)
|> limit(^limit)
|> Repo.all
# If an extra record is found, remove last one and inform there is more.
case Enum.count(records) do
n when n > per_page ->
{List.delete_at(records, -1), true}
_ ->
{records, false}
end
end
end
|
apps/ewallet/lib/ewallet/web/paginator.ex
| 0.827793 | 0.418905 |
paginator.ex
|
starcoder
|
defmodule NewRelic.Util.AttrStore do
# This is an abstraction around an ETS table that lets us store efficently
# store and access arbitrary key->value pairs for all Transactions we are tracking
@moduledoc false
def new(table) do
:ets.new(table, [
:named_table,
:duplicate_bag,
:public,
read_concurrency: true,
write_concurrency: true
])
end
def track(table, pid) do
insert(table, {{pid, :tracking}, true})
end
def link(table, parent, child, attrs \\ []) do
root = find_root(table, parent)
extras = Enum.map(attrs, fn {key, value} -> {child, {key, value}} end)
insert(table, [
{{child, :tracking}, true},
{{child, :child_of}, root},
{{root, :root_of}, child} | extras
])
end
def add(table, pid, attrs)
when is_list(attrs) do
items = Enum.map(attrs, fn {key, value} -> {pid, {key, value}} end)
insert(table, items)
end
def incr(table, pid, attrs)
when is_list(attrs) do
items = Enum.map(attrs, fn {key, value} -> {pid, {key, {:counter, value}}} end)
insert(table, items)
end
def tracking?(table, pid) do
member?(table, {pid, :tracking})
end
def collect(table, pid) do
pids = [pid | find_children(table, pid)]
table
|> find_attributes(pids)
|> Enum.reduce(%{}, &collect_attr/2)
end
def untrack(table, pid) do
delete(table, {pid, :tracking})
end
def purge(table, pid) do
[pid | find_children(table, pid)]
|> Enum.each(fn pid ->
delete(table, pid)
delete(table, {pid, :tracking})
delete(table, {pid, :child_of})
delete(table, {pid, :root_of})
end)
end
def find_root(table, pid) do
lookup(table, {pid, :child_of})
|> case do
[{_, root} | _] -> root
[] -> pid
end
end
def find_children(table, root_pid) do
lookup(table, {root_pid, :root_of})
|> Enum.map(fn {_, child} -> child end)
end
def find_attributes(table, pids) do
Enum.flat_map(pids, fn pid ->
lookup(table, pid)
end)
end
defp collect_attr({_pid, {k, {:list, item}}}, acc), do: Map.update(acc, k, [item], &[item | &1])
defp collect_attr({_pid, {k, {:counter, n}}}, acc), do: Map.update(acc, k, n, &(&1 + n))
defp collect_attr({_pid, {k, v}}, acc), do: Map.put(acc, k, v)
defp lookup(table, term) do
:ets.lookup(table, term)
rescue
ArgumentError -> []
end
defp insert(table, term) do
:ets.insert(table, term)
rescue
ArgumentError -> false
end
defp delete(table, term) do
:ets.delete(table, term)
rescue
ArgumentError -> false
end
defp member?(table, term) do
:ets.member(table, term)
rescue
ArgumentError -> false
end
end
|
lib/new_relic/util/attr_store.ex
| 0.599954 | 0.595404 |
attr_store.ex
|
starcoder
|
defmodule GGityDemoWeb.ScatterLive do
use GGityDemoWeb, :live_view
alias GGity.Plot
import GGity.Element.{Line, Rect, Text}
@mtcars_data GGity.Examples.mtcars()
|> Enum.map(fn record ->
Enum.map(record, fn {key, value} -> {to_string(key), value} end)
|> Enum.into(%{})
end)
@default_theme [
text: [family: "Helvetica, Arial, sans-serif"],
axis_text_x: [angle: 0],
panel_background: [fill: "#EEEEEE"],
legend_key: [fill: "#EEEEEE"],
axis_line: [size: 0.5, color: nil],
axis_ticks_length: 2,
panel_grid: [color: "#FFFFFF"],
panel_grid_major: [size: 1]
]
# @mapping_params [:x, :y, :color, :color_scale_option, :shape]
# @fixed_aesthetic_params [:color, :alpha, :size]
@impl true
def mount(_params, _session, socket) do
{:ok,
assign(
socket,
mapping: %{x: "qsec", y: "mpg", color: "gear"},
scales: [color_scale_option: :cividis],
fixed_aesthetics: [alpha: 1],
theme: @default_theme
)}
end
@impl true
def handle_event("update_mapping", %{"mapping" => params}, socket) do
mapping =
for {key, value} <- params,
key != "color_scale_option",
value != "none",
do: {String.to_existing_atom(key), cast(value)},
into: %{}
color_scale_option = String.to_existing_atom(params["color_scale_option"]) || :viridis
{:noreply, assign(socket, mapping: mapping, scales: [color_scale_option: color_scale_option])}
end
@impl true
def handle_event("update_fixed", %{"fixed_aesthetics" => params}, socket) do
fixed_aesthetics =
for {key, value} <- params,
do: {String.to_existing_atom(key), cast(value)}
{:noreply, assign(socket, fixed_aesthetics: fixed_aesthetics)}
end
@impl true
def handle_event("update_theme", %{"theme" => params}, socket) do
params = for {key, value} <- params, do: {key, cast(value)}, into: %{}
theme = [
text: [family: params["base_font_family"]],
axis_line: [size: 0.5, color: params["axis_line_color"]],
axis_text_x: [angle: params["angle"]],
axis_ticks_length: params["axis_ticks_length"],
legend_key: [fill: params["legend_key_fill"]],
panel_background: [fill: params["panel_background_fill"]],
panel_grid: [color: params["panel_grid_color"]],
panel_grid_major: [size: params["panel_grid_major_size"]]
]
{:noreply, assign(socket, theme: theme)}
end
defp cast(""), do: nil
defp cast(value) do
try do
String.to_integer(value)
rescue
ArgumentError ->
case Float.parse(value) do
{float, _binary} -> float
:error -> value
end
end
end
defp discrete_variable_options do
[:cyl, :am, :gear]
end
defp continuous_variable_options do
[:wt, :mpg, :qsec, :disp]
end
defp font_options do
[
Default: "Helvetica, Arial, sans-serif",
"Roboto Slab": "Roboto Slab",
Oswald: "Oswald",
serif: "serif",
"sans-serif": "sans-serif",
monospace: "monospace"
]
end
defp draw_chart(mapping, fixed_aesthetics, scales, theme) do
@mtcars_data
|> Plot.new()
|> Plot.geom_point(mapping, fixed_aesthetics)
|> Plot.scale_color_viridis(option: scales[:color_scale_option])
|> Plot.scale_size_discrete()
|> Plot.labs([{:title, "Motor Trend Statistics"} | pretty_label_list(mapping)])
|> Plot.theme(
text: element_text(family: theme[:text][:family]),
axis_line: element_line(theme[:axis_line]),
axis_text_x: element_text(theme[:axis_text_x]),
axis_ticks_length: theme[:axis_ticks_length],
panel_background: element_rect(theme[:panel_background]),
legend_key: element_rect(theme[:legend_key]),
panel_grid_major: element_line(theme[:panel_grid_major]),
panel_grid: element_line(theme[:panel_grid])
)
|> Plot.plot()
end
defp generated_code(mapping, fixed_aesthetics, scales, theme) do
~s"""
alias GGity.{Examples, Plot}
import GGity.Element.{Line, Rect, Text}
Examples.mtcars()
|> Plot.new(#{code_for_x_y(mapping)})
|> Plot.geom_point(#{code_for_geom(mapping, fixed_aesthetics)})
#{code_for_color_scale(scales, mapping)}
#{code_for_labels()}
#{code_for_theme(theme)}
|> Plot.plot()
"""
|> String.replace("\n\n", "\n")
|> Makeup.highlight()
end
defp code_for_x_y(mapping) do
"%{x: :#{mapping.x}, y: :#{mapping.y}}"
end
defp code_for_geom(mapping, fixed_aesthetics) when map_size(mapping) == 1 do
code_for_fixed_aes(fixed_aesthetics)
end
defp code_for_geom(mapping, alpha: 1) do
code_for_mapped_aes(mapping)
end
defp code_for_geom(mapping, fixed_aesthetics) do
actually_fixed_aesthetics =
for {key, value} <- fixed_aesthetics,
key not in Map.keys(mapping),
{key, value} != {:alpha, 1},
{key, value} != {:size, 4},
do: {key, value}
case actually_fixed_aesthetics do
[] ->
code_for_mapped_aes(mapping)
actually_fixed_aesthetics ->
Enum.join(
[code_for_mapped_aes(mapping), code_for_fixed_aes(actually_fixed_aesthetics)],
", "
)
end
end
defp code_for_mapped_aes(mapping) do
mapped =
mapping
|> Map.drop([:x, :y])
|> inspect()
|> String.replace(": \"", ": :")
|> String.replace("\"}", "}")
|> String.replace("\", ", ", ")
case mapped do
"%{}" -> ""
mapped -> mapped
end
end
defp code_for_fixed_aes([]), do: ""
defp code_for_fixed_aes(alpha: 1), do: ""
defp code_for_fixed_aes(fixed_aesthetics) do
fixed_aesthetics
|> List.delete({:alpha, 1})
|> inspect()
|> strip_list_brackets()
end
defp code_for_color_scale([color_scale_option: :viridis], _mapping), do: ""
defp code_for_color_scale(scales, mapping) when is_map_key(mapping, :color) do
"|> Plot.scale_color_viridis(option: :#{to_string(scales[:color_scale_option])})"
end
defp code_for_color_scale(_scales, _mapping), do: ""
defp code_for_labels() do
"""
|> Plot.labs(title: "Motor Trend Statistics")
"""
|> String.trim()
end
defp strip_list_brackets("[" <> printed_list) do
String.trim(printed_list, "]")
end
defp strip_list_brackets(not_a_list), do: not_a_list
def pretty_label(variable), do: variable
defp pretty_label_list(mapping) do
Enum.map(mapping, fn {aesthetic, variable} ->
{aesthetic, pretty_label(variable)}
end)
end
# defp pretty_labels(mapping) do
# mapping
# |> Enum.reverse()
# |> Enum.map_join(",\n ", fn {key, value} ->
# "#{Atom.to_string(key)}: \"#{pretty_label(value)}\""
# end)
# end
defp code_for_theme(theme) do
custom_theme_elements =
for element <- theme, code_for_element(element) != "", do: code_for_element(element)
case custom_theme_elements do
[] -> ""
elements -> "|> Plot.theme(\n " <> Enum.join(elements, ", \n ") <> "\n)"
end
end
defp code_for_element({key, options} = element) when is_list(options) do
changed = for option <- options, option not in @default_theme[key], do: option
if same_as_default?(element) or is_nil(changed) do
""
else
"#{key}: #{element_wrapper({key, changed})}"
end
end
defp code_for_element({key, options} = element) do
if same_as_default?(element) or is_nil(options) do
""
else
"#{key}: #{element_wrapper(element)}"
end
end
defp element_wrapper({key, value}) do
case key do
:panel_background -> "element_rect(#{list_without_brackets(value)})"
:legend_key -> "element_rect(#{list_without_brackets(value)})"
:text -> "element_text(#{list_without_brackets(value)})"
:axis_text_x -> "element_text(#{list_without_brackets(value)})"
_key when is_list(value) -> "element_line(#{list_without_brackets(value)})"
_key -> value
end
end
defp list_without_brackets(list) do
list
|> inspect()
|> strip_list_brackets()
end
defp same_as_default?({key, value}) do
@default_theme[key] == value
end
end
|
lib/ggity_demo_web/live/scatter_live.ex
| 0.709221 | 0.450843 |
scatter_live.ex
|
starcoder
|
defrecord EEx.State, engine: EEx.SmartEngine, dict: [], file: 'nofile', line: 1, start_line: 1
defmodule EEx.Compiler do
@moduledoc false
@doc """
This is the compilation entry point. It glues the tokenizer
and the engine together by handling the tokens and invoking
the engine every time a full expression or text is received.
"""
def compile(source, options) do
line = Keyword.get(options, :line, 1)
tokens = EEx.Tokenizer.tokenize(source, line)
state = EEx.State.new(options)
generate_buffer(tokens, "", [], state)
end
# Generates the buffers by handling each expression from the tokenizer
defp generate_buffer([{ :text, _line, chars }|t], buffer, scope, state) do
buffer = state.engine.handle_text(buffer, chars)
generate_buffer(t, buffer, scope, state)
end
defp generate_buffer([{ :expr, line, mark, chars }|t], buffer, scope, state) do
expr = maybe_block :elixir_translator.forms!(chars, line, state.file, [])
buffer = state.engine.handle_expr(buffer, mark, expr)
generate_buffer(t, buffer, scope, state)
end
defp generate_buffer([{ :start_expr, line, mark, chars }|t], buffer, scope, state) do
{ contents, t } = generate_buffer(t, "", [chars|scope], state.dict([]).line(line).start_line(line))
buffer = state.engine.handle_expr(buffer, mark, contents)
generate_buffer(t, buffer, scope, state.dict([]))
end
defp generate_buffer([{ :middle_expr, line, _, chars }|t], buffer, [current|scope], state) do
{ wrapped, state } = wrap_expr(current, line, buffer, chars, state)
generate_buffer(t, "", [wrapped|scope], state.line(line))
end
defp generate_buffer([{ :end_expr, line, _, chars }|t], buffer, [current|_], state) do
{ wrapped, state } = wrap_expr(current, line, buffer, chars, state)
tuples = maybe_block :elixir_translator.forms!(wrapped, state.start_line, state.file, [])
buffer = insert_quotes(tuples, state.dict)
{ buffer, t }
end
defp generate_buffer([{ :end_expr, line, _, chars }|_], _buffer, [], _state) do
raise EEx.SyntaxError, message: "unexpected token: #{inspect chars} at line #{inspect line}"
end
defp generate_buffer([], buffer, [], _state) do
buffer
end
defp generate_buffer([], _buffer, _scope, _state) do
raise EEx.SyntaxError, message: "unexpected end of string. expecting a closing <% end %>."
end
# Creates a placeholder and wrap it inside the expression block
defp wrap_expr(current, line, buffer, chars, state) do
new_lines = List.duplicate(?\n, line - state.line)
if state.dict == [] and is_empty?(buffer) do
{ current ++ new_lines ++ chars, state }
else
key = length(state.dict)
placeholder = '__EEX__(' ++ integer_to_list(key) ++ ');'
{ current ++ placeholder ++ new_lines ++ chars, state.update_dict([{key, buffer}|&1]) }
end
end
# Check if the syntax node represents an empty string
defp is_empty?(bin) when is_binary(bin) do
bc(<<c>> inbits bin, not c in [?\s, ?\t, ?\r, ?\n], do: <<c>>) == ""
end
defp is_empty?({ :<>, _, [left, right] }) do
is_empty?(left) and is_empty?(right)
end
defp is_empty?(_) do
false
end
# Block wrapping
defp maybe_block([]), do: nil
defp maybe_block([h]), do: h
defp maybe_block(other), do: { :__block__, [], other }
# Changes placeholder to real expression
defp insert_quotes({ :__EEX__, _, [key] }, dict) do
{ ^key, value } = List.keyfind dict, key, 0
value
end
defp insert_quotes({ left, line, right }, dict) do
{ insert_quotes(left, dict), line, insert_quotes(right, dict) }
end
defp insert_quotes({ left, right }, dict) do
{ insert_quotes(left, dict), insert_quotes(right, dict) }
end
defp insert_quotes(list, dict) when is_list(list) do
Enum.map list, insert_quotes(&1, dict)
end
defp insert_quotes(other, _dict) do
other
end
end
|
lib/eex/lib/eex/compiler.ex
| 0.709623 | 0.575409 |
compiler.ex
|
starcoder
|
defmodule ExDgraph.Expr.Uid do
@moduledoc """
Helper functions to deal with uids. Not in use yet. Intended for query builder.
https://docs.dgraph.io/query-language/#uid
## Syntax Examples:
q(func: uid(<uid>))
predicate @filter(uid(<uid1>, ..., <uidn>))
predicate @filter(uid(a)) for variable a
q(func: uid(a,b)) for variables a and b
"""
alias ExDgraph.Expr.Uid
alias ExDgraph.Utils
defstruct [
:value,
:type
]
defmacro __using__(_) do
quote do
def uid(value) do
ExDgraph.Expr.Uid.new(value)
end
end
end
@types [
:literal,
:expression
]
@doc false
def new(value) when is_binary(value) do
new(value, :literal)
end
@doc false
def new(value) when is_atom(value) do
new(value, :expression)
end
@doc false
def new(uids) when is_list(uids) do
# lists of uid literals are rendered inside a `uid(<uids_here>)` function (as in @filter)
# lists of uid variables are rendered inside a `uid(<uids_here>)` function (as in @filter)
# therefore any list is an uid expression
new(uids, :expression)
end
@doc false
def new(value, type)
when (is_atom(value) or is_binary(value) or is_list(value)) and type in @types do
%Uid{
value: value,
type: type
}
end
@doc false
def as_expression(%Uid{} = u) do
%{u | type: :expression}
end
@doc false
def as_literal(%Uid{} = u) do
%{u | type: :literal}
end
@doc false
def as_naked(%Uid{} = u) do
%{u | type: :naked}
end
@doc false
def render(%Uid{value: value}) when is_atom(value) do
render_expression([value])
end
@doc false
def render(%Uid{value: value, type: :literal}) when is_binary(value) do
{:ok, uid_literal} = Utils.as_literal(value, :uid)
uid_literal
end
@doc false
def render(%Uid{value: value, type: :naked}) when is_binary(value) do
value
end
@doc false
def render(%Uid{value: value, type: :expression}) when is_atom(value) or is_binary(value) do
render_expression([value])
end
@doc false
def render(%Uid{value: value, type: :expression}) when is_list(value) do
render_expression(value)
end
@doc false
defp render_expression(uids) when is_list(uids) do
args =
uids
|> Enum.map(&to_string/1)
|> Enum.join(", ")
"uid(" <> args <> ")"
end
end
defimpl String.Chars, for: ExDgraph.Expr.Uid do
def to_string(uid) do
ExDgraph.Expr.Uid.render(uid)
end
end
# Source https://github.com/elbow-jason/dgraph_ex
# Copyright (c) 2017 <NAME>
|
lib/exdgraph/expr/uid.ex
| 0.786869 | 0.562717 |
uid.ex
|
starcoder
|
import TypeClass
defclass Witchcraft.Ord do
@moduledoc ~S"""
`Ord` describes how to order elements of a data type.
This is a total order, so all elements are either `:equal`, `:greater`, or `:lesser`
than each other.
## Type Class
An instance of `Witchcraft.Ord` must also implement `Witchcraft.Setoid`,
and define `Witchcraft.Ord.compare/2`.
Setoid [equivalent?/2]
↓
Ord [compare/2]
"""
extend Witchcraft.Setoid
@type t :: any()
@type ordering :: :lesser | :equal | :greater
alias __MODULE__
import Kernel, except: [<: 2, >: 2, <=: 2, >=: 2]
defmacro __using__(opts \\ []) do
{:ok, new_opts} =
Keyword.get_and_update(opts, :except, fn except ->
{:ok, [<: 2, >: 2, <=: 2, >=: 2] ++ (except || [])}
end)
if Access.get(opts, :override_kernel, true) do
quote do
import Kernel, unquote(new_opts)
use Witchcraft.Semigroupoid, unquote(opts)
import unquote(__MODULE__), unquote(opts)
end
else
quote do
use Witchcraft.Semigroupoid, unquote(opts)
import unquote(__MODULE__), unquote(new_opts)
end
end
end
where do
@doc """
Get the ordering relationship between two elements.
Possible results are `:lesser`, `:equal`, and `:greater`
## Examples
iex> compare(1, 1)
:equal
iex> compare([1], [2])
:lesser
iex> compare([1, 2], [3])
:lesser
iex> compare([3, 2, 1], [1, 2, 3, 4, 5])
:greater
"""
@spec compare(Ord.t(), Ord.t()) :: Ord.ordering()
def compare(ord_a, ord_b)
end
properties do
def reflexivity(data) do
a = generate(data)
comparison = Ord.compare(a, a)
equal?(comparison, :equal) or equal?(comparison, :lesser)
end
def transitivity(data) do
x = generate(data)
y = generate(data)
z = generate(data)
x_y = Ord.compare(x, y)
y_z = Ord.compare(y, z)
x_z = Ord.compare(x, z)
if x_y != :greater and y_z != :greater do
equal?(x_z, :lesser) or equal?(x_z, :equal)
else
true
end
end
def antisymmetry(data) do
a = generate(data)
b = generate(data)
a_b = Ord.compare(a, b)
b_a = Ord.compare(b, a)
if a_b != :greater and b_a != :greater, do: a_b == :equal, else: true
end
end
@doc """
Determine if two elements are `:equal`.
## Examples
iex> equal?(1, 1.0)
true
iex> equal?(1, 2)
false
"""
@spec equal?(Ord.t(), Ord.t()) :: boolean()
def equal?(a, b), do: compare(a, b) == :equal
@doc """
Determine if an element is `:greater` than another.
## Examples
iex> greater?(1, 1)
false
iex> greater?(1.1, 1)
true
"""
@spec greater?(Ord.t(), Ord.t()) :: boolean()
def greater?(a, b), do: compare(a, b) == :greater
defalias a > b, as: :greater?
@doc """
Determine if an element is `:lesser` than another.
## Examples
iex> lesser?(1, 1)
false
iex> lesser?(1, 1.1)
true
"""
@spec lesser?(Ord.t(), Ord.t()) :: boolean()
def lesser?(a, b), do: compare(a, b) == :lesser
defalias a < b, as: :lesser?
@doc """
Determine if an element is `:lesser` or `:equal` to another.
## Examples
iex> use Witchcraft.Ord
...> 1 <= 2
true
...> [] <= [1, 2, 3]
false
...> [1] <= [1, 2, 3]
true
...> [4] <= [1, 2, 3]
false
"""
# credo:disable-for-next-line Credo.Check.Warning.OperationOnSameValues
@spec Ord.t() <= Ord.t() :: boolean()
def a <= b, do: compare(a, b) != :greater
@doc """
Determine if an element is `:greater` or `:equal` to another.
## Examples
iex> use Witchcraft.Ord
...> 2 >= 1
true
...> [1, 2, 3] >= []
true
...> [1, 2, 3] >= [1]
true
...> [1, 2, 3] >= [4]
false
"""
# credo:disable-for-next-line Credo.Check.Warning.OperationOnSameValues
@spec Ord.t() >= Ord.t() :: boolean()
def a >= b, do: compare(a, b) != :lesser
end
|
lib/witchcraft/ord.ex
| 0.896109 | 0.669454 |
ord.ex
|
starcoder
|
defmodule CloudEvents.Event do
@moduledoc """
Module that defines a struct and functions for handling CloudEvents.
Currently implements the 1.0 specversion of CloudEvents.
https://github.com/cloudevents/spec/blob/v1.0/spec.md
"""
@specversion "1.0"
defstruct [
:id,
:source,
:type,
:datacontenttype,
:data,
:dataschema,
:subject,
:time,
specversion: "1.0",
extensions: %{},
encoding_fn: nil
]
use Accessible
import CloudEvents.Encoding
@doc """
Returns a new, uninitialized `%CloudEvents.Event{}` struct.
"""
@spec new() :: %CloudEvents.Event{}
def new(), do: %CloudEvents.Event{encoding_fn: &identity_encoding/1}
@doc """
Returns the the CloudEvents `id` attribute for this event.
"""
@spec id(%CloudEvents.Event{}) :: String.t()
def id(%CloudEvents.Event{id: id}), do: id
@doc """
Modifies the passed in CloudEvent to have the new specified `id` attribute.
"""
@spec with_id(%CloudEvents.Event{}, String.t()) :: %CloudEvents.Event{}
def with_id(event, id), do: %{event | id: id}
@doc """
Returns the CloudEvents `source` attribute as a string.
"""
@spec source(%CloudEvents.Event{}) :: String.t()
def source(%CloudEvents.Event{source: source}), do: source
def with_source(event, source) when is_map(source) do
source_str = URI.to_string(source)
%{event | source: source_str}
end
def with_source(event, source), do: %{event | source: source}
@doc """
Returns the CloudEvents `source` attribute as a parsed %URI
"""
@spec source_uri(%CloudEvents.Event{}) :: %URI{}
def source_uri(%CloudEvents.Event{source: source}), do: URI.parse(source)
@doc """
Returns the CloudEvents `type` attribute.
"""
@spec type(%CloudEvents.Event{}) :: String.t()
def type(%CloudEvents.Event{type: type}), do: type
@spec with_type(%CloudEvents.Event{}, String.t()) :: %CloudEvents.Event{}
def with_type(event, type), do: %{event | type: type}
@spec specversion(%CloudEvents.Event{}) :: String.t()
def specversion(%CloudEvents.Event{specversion: sv}), do: sv
@spec with_specversion(%CloudEvents.Event{}, String.t()) :: %CloudEvents.Event{}
def with_specversion(event = %CloudEvents.Event{}, sv) do
%{event | specversion: sv}
end
@doc """
Returns the CloudEvents `datacontent` attribute.
If this attribute is not present, nil is returned.
"""
@spec datacontenttype(%CloudEvents.Event{}) :: nil | String.t()
def datacontenttype(%CloudEvents.Event{datacontenttype: dct}), do: dct
@spec with_datacontenttype(%CloudEvents.Event{}, String.t()) :: %CloudEvents.Event{}
def with_datacontenttype(event, dct), do: %{event | datacontenttype: dct}
@doc """
Returns the CloudEvents 'data' attribute without decoding.
If this attribute is not present, nil is returned.
"""
@spec data(%CloudEvents.Event{}) :: nil | any()
def data(%CloudEvents.Event{data: data}), do: data
def data_encoded(%CloudEvents.Event{encoding_fn: fun, data: data}) when is_nil(fun) do
data
end
def data_encoded(%CloudEvents.Event{encoding_fn: fun, data: data}) do
fun.(data)
end
@doc """
Sets the data payload with no encoding function
"""
@spec with_data(%CloudEvents.Event{}, any()) :: %CloudEvents.Event{}
def with_data(event, data), do: %{event | data: data}
@doc """
Encodes the set data as a JSON object when sending.
"""
@spec with_data_json_encoding(%CloudEvents.Event{}) :: %CloudEvents.Event{}
def with_data_json_encoding(event) do
%{event | datacontenttype: "application/json", encoding_fn: &json_encoding/1}
end
def with_data_binary_encoding(event) do
%{event | datacontenttype: "text/plain", encoding_fn: &base64_encoding/1}
end
@doc """
Returns the CloudEvents `datacontenttype` attribute as parsed by
`ContentType.content_type`
"""
@spec content_type(%CloudEvents.Event{}) ::
{:ok, type :: binary, subtype :: binary, ContentType.params()} | :error
def content_type(%CloudEvents.Event{datacontenttype: dct}) do
ContentType.content_type(dct)
end
@doc """
Returns the CloudEvents `schema` sttribute as a string.
"""
@spec dataschema(%CloudEvents.Event{}) :: String.t()
def dataschema(%CloudEvents.Event{dataschema: schema}), do: schema
@spec with_dataschema(%CloudEvents.Event{}, String.t()) :: %CloudEvents.Event{}
def with_dataschema(event, dataschema), do: %{event | dataschema: dataschema}
@doc """
Returns the CloudEvents `dataschema` attribute as a parsed `%URI{}`
"""
@spec dataschema_uri(%CloudEvents.Event{}) :: %URI{}
def dataschema_uri(%CloudEvents.Event{dataschema: schema}), do: URI.parse(schema)
def subject(%CloudEvents.Event{subject: subject}), do: subject
def with_subject(event, subject), do: %{event | subject: subject}
def time(%CloudEvents.Event{time: time}), do: time
def datetime(%CloudEvents.Event{time: time}) do
case DateTime.from_iso8601(time) do
{:ok, dt, _} -> {:ok, dt}
{:error, reason} -> {:error, reason}
end
end
def with_time(event, dt), do: %{event | time: DateTime.to_iso8601(dt)}
def with_time_now(event) do
%{event | time: DateTime.to_iso8601(DateTime.utc_now())}
end
@spec extensions(%CloudEvents.Event{}) :: nil | map()
def extensions(%CloudEvents.Event{extensions: ext}), do: ext
def delete_extension(event = %CloudEvents.Event{extensions: ext}, extension) do
%{event | extensions: Map.delete(ext, extension)}
end
@doc """
Returns an extension attribute by key. All extensions are returned as their
`String.t()` representation. `nil` is returned if the extension attribute
is not present.
"""
@spec get_extension(%CloudEvents.Event{}, String.t()) :: nil | String.t()
def get_extension(%CloudEvents.Event{extensions: ext}, key) do
Map.get(ext, key)
end
def with_extension(event = %CloudEvents.Event{extensions: ext}, extension, value) do
if valid_extension?(extension) do
if String.valid?(value) do
%{event | extensions: Map.put(ext, extension, value)}
else
{:error, "Invalid extension attribute value, must be a string"}
end
else
{:error, "Invalid extension attribute name"}
end
end
@doc """
Determines if a given `%Event{}` struct represents a valid CloudEvent.
If the `%Event{}` is valid, `:ok` is returned, otherwise `{:error, reason}`
is returned.
"""
@spec validate(%CloudEvents.Event{}) :: :ok | {:error, [String.t()]}
def validate(event) do
errors =
[]
|> validate_id(event)
|> validate_source(event)
|> validate_specversion(event)
|> validate_type(event)
|> validate_datacontenttype(event)
|> validate_dataschema(event)
|> validate_subject(event)
case length(errors) do
0 -> :ok
_ -> {:error, errors}
end
end
@doc """
Simple true/false evaluation of if an `%Event{}` struct represents a valid
CloudEvent. Speific error messages are not returned.
"""
@spec valid?(%CloudEvents.Event{}) :: boolean()
def valid?(event), do: :ok == validate(event)
## Internal methods for validation.
defp valid_extension?(ext) when is_nil(ext), do: false
defp valid_extension?(ext) do
if String.valid?(ext) do
String.match?(ext, ~r/^([[:lower:]]|[[:digit:]])+$/u)
else
false
end
end
defp validate_id(errors, %CloudEvents.Event{id: id}) when is_nil(id) do
errors ++ ["CloudEvents attribute `id` must be present"]
end
defp validate_id(errors, %CloudEvents.Event{id: id}) do
validate_string(errors, id, "CloudEvents attribute `id` must be a non-empty string")
end
defp validate_source(errors, %CloudEvents.Event{source: source}) when is_nil(source) do
errors ++ ["CloudEvents attribute `source` must be present"]
end
defp validate_source(errors, %CloudEvents.Event{source: source}) do
validate_string(errors, source, "CloudEvents attribute `source` must be a non-empty string")
end
defp validate_specversion(errors, %CloudEvents.Event{specversion: sv}) when is_nil(sv) do
errors ++ ["CloudEvents attribute `specversion` must be present"]
end
defp validate_specversion(errors, %CloudEvents.Event{specversion: sv}) do
if sv == @specversion do
errors
else
errors ++ ["CloudEvents attribute `specversion` must have the value of `1.0`"]
end
end
defp validate_type(errors, %CloudEvents.Event{type: type}) when is_nil(type) do
errors ++ ["CloudEvents attribute `type` must be present"]
end
defp validate_type(errors, %CloudEvents.Event{type: type}) do
validate_string(errors, type, "CloudEvents attribute `type` must be a non-empty string")
end
defp validate_datacontenttype(errors, %CloudEvents.Event{datacontenttype: dct})
when is_nil(dct) do
errors
end
defp validate_datacontenttype(errors, %CloudEvents.Event{datacontenttype: dct}) do
if !String.valid?(dct) || !valid_contenttype?(dct) do
errors ++
["CloudEvents attribute `datacontenttype` must be a valid RFC 2046 string if present."]
else
errors
end
end
defp valid_contenttype?(content_type) do
case ContentType.content_type(content_type) do
{:ok, _, _, _} -> true
_ -> false
end
end
defp validate_dataschema(errors, %CloudEvents.Event{dataschema: schema}) when is_nil(schema) do
errors
end
defp validate_dataschema(errors, %CloudEvents.Event{dataschema: schema}) do
validate_string(
errors,
schema,
"CloudEvents attribute `dataschema`, if present, must be a non-empty URI string"
)
end
defp validate_subject(errors, %CloudEvents.Event{subject: subject}) when is_nil(subject) do
errors
end
defp validate_subject(errors, %CloudEvents.Event{subject: subject}) do
validate_string(
errors,
subject,
"CloudEvents attribute `subject`, if present, must be a non-empty string"
)
end
defp validate_string(errors, str, message) do
if String.valid?(str) && String.length(str) > 0 do
errors
else
errors ++ [message]
end
end
end
|
lib/cloudevents/event.ex
| 0.92361 | 0.420064 |
event.ex
|
starcoder
|
defmodule Extractly.Xtra do
alias Extractly.Messages, as: M
alias Extractly.Toc.Options
@moduledoc """
This wraps `Extractly's` API by putting all messages to be logged to the
`Extractly.Messages` module.
Its primarty use case is for `Mix.Tasks.Xtra` which will pass this module
as a param into the `EEx` template.
The general idea is
```elixir
Extractly.Messages.start_agent
process_input_template # which will collect messages by means of this module's API
Extractly.Messages.get_all |> emit_messages(options)
```
The wrapping works as follows
```elixir
def some_function(some_params) do
case Extractly.some_function(some_params) do
{:ok, result} -> result
{:error, message} -> add_message_to_messages_and_return_html_comment_describing_the_error()
end
end
```
"""
@doc ~S"""
Just a delegator to `Extractly.do_not_edit_warning`
As there can be no error condition no wrapping is needed
iex(1)> do_not_edit_warning()
"<!--\nDO NOT EDIT THIS FILE\nIt has been generated from a template by Extractly (https://github.com/RobertDober/extractly.git)\nand any changes you make in this file will most likely be lost\n-->"
"""
defdelegate do_not_edit_warning(opts \\ []), to: Extractly
@doc false
defdelegate version, to: Extractly
@doc ~S"""
Wraps call to `Extractly.functiondoc` as described above
iex(2)> functiondoc(["Support.Module2.function/0", "Support.Module1.hello/0"])
"A function\nA nice one\n\nFunctiondoc of Module1.hello\n"
"""
def functiondoc(name, opts \\ []) do
M.add_debug("functiondoc called for #{name} #{inspect opts}")
Extractly.functiondoc(name, opts)
|> _split_outputs([])
end
@doc ~S"""
Wraps call to `Extractly.moduledoc` as described above
iex(3)> moduledoc("Support.Module2")
"<!-- module Support.Module2 does not have a moduledoc -->"
"""
def moduledoc(name, opts \\ []) do
M.add_debug("moduledoc called for #{name} #{inspect opts}")
case Extractly.moduledoc(name, opts) do
{:ok, result} -> result
{:error, message} -> _add_error(message)
list -> _split_outputs(list, [])
end
end
@doc ~S"""
Wraps a call to `Extractly.toc`
If a filename is provided the TOC is extracted from the filename and inserted into the document
However, if `:self` is provided as an argument a placeholder is inserted into the document
which allows the `Mix` task `xtra` to replace the placeholder with the TOC created from the
very same document
iex(4)> toc(:self)
"<!---- Extractly Self TOC #{inspect %Extractly.Toc.Options{}} ---->"
iex(5)> toc("test/fixtures/toc-short.md", min_level: 2)
"- Level2 1.first\n- Level2 1.second\n - Level3 1.second(i)"
Errors are handled with comments and output to stderr, as usual
iex(5)> toc("surelythisfiledoesnotexist.eajeza963q1hf")
"<!-- could not read from \"surelythisfiledoesnotexist.eajeza963q1hf\" -->"
"""
def toc(filename_or_self, opts \\ [])
def toc(:self, opts) do
M.add_debug("toc called for :self #{inspect opts}")
Extractly.Toc.placeholder(Options.new!(opts))
end
def toc(name, opts) do
M.add_debug("toc called for #{name} #{inspect opts}")
case Extractly.toc(name, opts) do
{:ok, result} -> result |> Enum.join("\n")
{:error, message} -> _add_error(message)
end
end
defp _add_error(message) do
M.add_error(message)
"<!-- #{message} -->"
end
defp _split_outputs(fdoc_tuples, result)
defp _split_outputs([], result), do: result |> Enum.reverse |> Enum.join("\n")
defp _split_outputs([{:error, message}|rest], result) do
_add_error(message)
_split_outputs(rest, result)
end
defp _split_outputs([{:ok, doc}|rest], result) do
_split_outputs(rest, [doc|result])
end
end
|
lib/extractly/xtra.ex
| 0.78838 | 0.782455 |
xtra.ex
|
starcoder
|
defmodule Eunomo.Formatter.AlphabeticalExpressionSorter do
@moduledoc false
# This module is only meant to be used for alias & import expressions!
alias Eunomo.LineMap
defmodule LineModificationConflict do
@moduledoc false
defexception message: """
Please report a bug.
Same line got mapped twice in `Eunomo.Formatter.AlphabeticalExpressionSorter`.
"""
end
@spec format(LineMap.t(), atom) :: LineMap.t()
def format(line_map, expression_atom) when is_map(line_map) and is_atom(expression_atom) do
{_, res} =
line_map
|> LineMap.to_quoted()
|> Macro.postwalk(%{}, fn
{:__block__, _, _} = triple, acc ->
acc =
Map.merge(
acc,
ast_block_to_modifications(triple, expression_atom),
fn _, _, _ -> raise LineModificationConflict end
)
{triple, acc}
triple, acc ->
{triple, acc}
end)
line_map
|> Enum.map(fn {line_number, line} ->
line_number = Map.get(res, line_number, line_number)
{line_number, line}
end)
|> Enum.into(%{})
end
# Splits into expression blocks
# - separated by a new line
# - separated by a non "split" expression
@spec split_into_expression_blocks([Macro.t()], atom) :: [Macro.t()]
defp split_into_expression_blocks(args, split_expression) do
%{expression_blocks: expression_blocks, current: current} =
Enum.reduce(args, %{expression_blocks: [], current: []}, fn
{^split_expression, [{:end_of_expression, [{:newlines, newline} | _]} | _], _} = element,
acc ->
if newline > 1 do
%{expression_blocks: [[element | acc.current] | acc.expression_blocks], current: []}
else
%{expression_blocks: acc.expression_blocks, current: [element | acc.current]}
end
{^split_expression, _, _} = element, acc ->
%{expression_blocks: acc.expression_blocks, current: [element | acc.current]}
_, %{current: []} = acc ->
acc
_, acc ->
%{expression_blocks: [acc.current | acc.expression_blocks], current: []}
end)
[current | expression_blocks]
end
@typep modifications :: %{optional(LineMap.line_number()) => LineMap.line_number()}
@spec ast_block_to_modifications({:__block__, Macro.metadata(), [Macro.t()]}, atom) ::
modifications()
defp ast_block_to_modifications({:__block__, _, args}, split_expression) do
args
|> split_into_expression_blocks(split_expression)
|> Enum.map(&Enum.sort_by(&1, fn t -> Macro.to_string(t) end))
|> accumulate_modifications(split_expression)
end
# Takes all sorted expression blocks and transforms them into concrete line changes.
@spec accumulate_modifications([Macro.t()], atom) :: modifications
defp accumulate_modifications(expression_blocks, split_expression) do
Enum.reduce(expression_blocks, %{}, fn expression_block, acc ->
first_line =
expression_block
|> Enum.map(fn {^split_expression, meta, _} ->
Keyword.fetch!(meta, :line)
end)
|> Enum.min(fn -> 0 end)
inner_acc = expression_block_to_modification(expression_block, first_line, split_expression)
Map.merge(acc, inner_acc, fn _, _, _ -> raise LineModificationConflict end)
end)
end
# Takes a sorted expression block and moves it to the `start_line`. The `start_line` == the
# original start of the block. So only lines _within_ a block are shuffled but the blocks them
# selves remain static in the file layout.
@spec expression_block_to_modification(Macro.t(), non_neg_integer, atom) :: modifications
defp expression_block_to_modification(expression_block, start_line, split_expression) do
{acc, _} =
Enum.reduce(expression_block, {%{}, start_line}, fn {^split_expression, meta, _},
{acc, current_line} ->
from = Keyword.fetch!(meta, :line)
to = meta |> Keyword.get(:end_of_expression, line: from) |> Keyword.fetch!(:line)
{inner_acc, current_line} = range_to_modification(from..to, current_line)
acc = Map.merge(acc, inner_acc, fn _, _, _ -> raise LineModificationConflict end)
{acc, current_line}
end)
acc
end
# Takes a range and a start line and creates a mapping from each line in the range to start
# line. e.g. 2..3, 5 -> %{2 => 5, 3 => 6} & the last new line number
@spec range_to_modification(Range.t(), non_neg_integer) ::
{modifications, LineMap.line_number()}
defp range_to_modification(block_range, start_line) do
Enum.reduce(block_range, {%{}, start_line}, fn old_line_number, {acc, new_line_number} ->
acc = Map.put(acc, old_line_number, new_line_number)
{acc, new_line_number + 1}
end)
end
end
|
lib/eunomo/formatter/alphabetical_expression_sorter.ex
| 0.817392 | 0.509642 |
alphabetical_expression_sorter.ex
|
starcoder
|
defmodule Adventofcode.Day19Tubes do
defstruct grid: nil,
width: 0,
height: 0,
seen: [],
steps: 0,
position: {0, 0},
direction: {0, 1}
def what_letters(input) do
input
|> new()
|> travel()
|> seen_letters()
end
def how_many_steps(input) do
input
|> new()
|> travel()
|> Map.get(:steps)
end
defp new(input) do
starting_pos = {index_of_first_pipe(input), 0}
lines = String.split(input, "\n")
width = lines |> Enum.map(&String.length/1) |> Enum.max()
height = length(lines)
chars = Enum.flat_map(lines, &String.codepoints/1)
%__MODULE__{
grid: build_grid(chars, width),
width: width,
height: height,
position: starting_pos
}
end
defp travel(state) do
char = state.grid[state.position]
case char do
" " ->
state
nil ->
state
"|" ->
state |> forward() |> travel()
"-" ->
state |> forward() |> travel()
"+" ->
state |> turn() |> forward() |> travel()
letter ->
state |> store_letter(letter) |> forward() |> travel()
end
end
defp forward(state) do
%{state | position: next_pos(state.position, state.direction), steps: state.steps + 1}
end
defp next_pos({x, y}, {dir_x, dir_y}) do
{x + dir_x, y + dir_y}
end
defp store_letter(state, letter) do
%{state | seen: [letter | state.seen]}
end
defp turn(state) do
case state.direction do
{_, 0} -> state |> do_turn([{0, -1}, {0, 1}])
{0, _} -> state |> do_turn([{-1, 0}, {1, 0}])
end
end
defp do_turn(state, directions) do
%{state | direction: next_direction(state, directions)}
end
defp next_direction(state, directions) do
Enum.find(directions, fn direction ->
position = next_pos(state.position, direction)
case state.grid[position] do
" " ->
false
_ ->
true
end
end)
end
defp build_grid(chars, width) do
Enum.reduce(Enum.with_index(chars), %{}, fn {char, index}, acc ->
x = rem(index, width)
y = div(index, width)
Map.put(acc, {x, y}, char)
end)
end
defp index_of_first_pipe(input) do
input |> String.codepoints() |> Enum.find_index(&(&1 == "|"))
end
defp seen_letters(state) do
state
|> Map.get(:seen)
|> Enum.reverse()
|> Enum.join()
end
end
|
lib/day_19_tubes.ex
| 0.64131 | 0.627923 |
day_19_tubes.ex
|
starcoder
|
defmodule ExAudit.Repo do
@moduledoc """
Adds ExAudit version tracking to your Ecto.Repo actions. The following functions are
extended to detect if the given struct or changeset is in the list of :tracked_schemas
given in :ex_audit config:
insert: 2,
update: 2,
insert_or_update: 2,
delete: 2,
insert!: 2,
update!: 2,
insert_or_update!: 2,
delete!: 2
If the given struct or changeset is not tracked then the original function from Ecto.Repo is
executed, i.e., the functions are marked as overridable and the overrided implementations
call `Kernel.super/1` when the given struct or changeset is not tracked.
## How to use it.
Just `use ExAudit.Repo` after `Ecto.Repo`
```elixir
defmodule MyApp.Repo do
use Ecto.Repo,
otp_app: :my_app,
adapter: Ecto.Adapters.Postgres
use ExAudit.Repo
end
```
## Shared options
All normal Ecto.Repo options will work the same, however, there are additional options specific to ex_audit:
* `:ex_audit_custom` - Keyword list of custom data that should be placed in new version entries. Entries in this
list overwrite data with the same keys from the ExAudit.track call
* `:ignore_audit` - If true, ex_audit will not track changes made to entities
"""
defmacro __using__(opts) do
quote location: :keep do
@behaviour ExAudit.Repo
@tracker_repo Keyword.get(unquote(opts), :tracker_repo)
def tracker_repo, do: @tracker_repo
# These are the Ecto.Repo functions that ExAudit "extends" but these are not
# marked as overridable in Ecto.Repo. (ecto v3.4.2)
defoverridable(
insert: 2,
update: 2,
insert_or_update: 2,
delete: 2,
insert!: 2,
update!: 2,
insert_or_update!: 2,
delete!: 2
)
require Logger
defp log(text) do
Logger.debug(text, ansi_color: :blue)
end
defp is_in_tracked_schemas?(struct_or_changeset) do
tracked_schemas = Application.get_env(:ex_audit, :tracked_schemas)
schema =
case struct_or_changeset do
%Ecto.Changeset{} = changeset ->
Map.get(changeset.data, :__struct__)
_ ->
Map.get(struct_or_changeset, :__struct__)
end
if tracked_schemas do
schema in tracked_schemas
end || true
end
def is_in_ignored_schemas?(struct_or_changeset) do
schemas = Application.get_env(:ex_audit, :ignored_schemas)
schema =
case struct_or_changeset do
%Ecto.Changeset{} = changeset ->
Map.get(changeset.data, :__struct__)
_ ->
Map.get(struct_or_changeset, :__struct__)
end
if schemas do
schema in schemas
end || false
end
defp tracked?(struct_or_changeset, opts) do
ignored = Process.get(:ignore_audit, false)
log(
"should track?: #{!ignored && is_in_tracked_schemas?(struct_or_changeset)} -- ignored: #{
ignored
} in_schemas? #{is_in_tracked_schemas?(struct_or_changeset)} in ignore: #{
is_in_ignored_schemas?(struct_or_changeset)
}"
)
!ignored && !is_in_ignored_schemas?(struct_or_changeset) &&
is_in_tracked_schemas?(struct_or_changeset)
end
@compile {:inline, tracked?: 2}
defp wrap_ignore(struct, opts, func) do
prev_val = Process.get(:ignore_audit)
log("before #{inspect(struct)} #{tracked?(struct, opts)} #{inspect(self())}")
if opts != nil && Keyword.get(opts, :ignore_audit) != nil do
Process.put(:ignore_audit, Keyword.get(opts, :ignore_audit))
end
result = func.()
if prev_val do
Process.put(:ignore_audit, prev_val)
else
Process.delete(:ignore_audit)
end
log("after #{inspect(struct)} #{tracked?(struct, opts)} #{inspect(self())}")
result
end
def insert(struct, opts) do
wrap_ignore(struct, opts, fn ->
log("insert call #{inspect(struct)}")
if tracked = tracked?(struct, opts) do
log("Is tracked? #{inspect(tracked)} #{inspect(struct)}")
ExAudit.Schema.insert(
__MODULE__,
get_dynamic_repo(),
struct,
opts
)
else
super(struct, opts)
end
end)
end
def update(struct, opts) do
wrap_ignore(struct, opts, fn ->
if tracked?(struct, opts) do
ExAudit.Schema.update(
__MODULE__,
get_dynamic_repo(),
struct,
opts
)
else
super(struct, opts)
end
end)
end
def insert_or_update(changeset, opts) do
wrap_ignore(changeset, opts, fn ->
if tracked?(changeset, opts) do
ExAudit.Schema.insert_or_update(
__MODULE__,
get_dynamic_repo(),
changeset,
opts
)
else
super(changeset, opts)
end
end)
end
def delete(struct, opts) do
wrap_ignore(struct, opts, fn ->
if tracked?(struct, opts) do
ExAudit.Schema.delete(
__MODULE__,
get_dynamic_repo(),
struct,
opts
)
else
super(struct, opts)
end
end)
end
def insert!(struct, opts) do
wrap_ignore(struct, opts, fn ->
log("insert! call #{struct.__struct__}")
if tracked?(struct, opts) do
ExAudit.Schema.insert!(
__MODULE__,
get_dynamic_repo(),
struct,
opts
)
else
super(struct, opts)
end
end)
end
def update!(struct, opts) do
wrap_ignore(struct, opts, fn ->
if tracked?(struct, opts) do
ExAudit.Schema.update!(
__MODULE__,
get_dynamic_repo(),
struct,
opts
)
else
super(struct, opts)
end
end)
end
def insert_or_update!(changeset, opts) do
wrap_ignore(changeset, opts, fn ->
if tracked?(changeset, opts) do
ExAudit.Schema.insert_or_update!(
__MODULE__,
get_dynamic_repo(),
changeset,
opts
)
else
super(changeset, opts)
end
end)
end
def delete!(struct, opts) do
wrap_ignore(struct, opts, fn ->
if tracked?(struct, opts) do
ExAudit.Schema.delete!(
__MODULE__,
get_dynamic_repo(),
struct,
opts
)
else
super(struct, opts)
end
end)
end
# ExAudit.Repo behaviour
def history(struct, opts \\ []) do
ExAudit.Queryable.history(__MODULE__, struct, opts)
end
def revert(version, opts \\ []) do
ExAudit.Queryable.revert(__MODULE__, version, opts)
end
end
end
@doc """
Gathers the version history for the given struct, ordered by the time the changes
happened from newest to oldest.
### Options
* `:render_structs` if true, renders the _resulting_ struct of the patch for every version in its history.
This will shift the ids of the versions one down, so visualisations are correct and corresponding "Revert"
buttons revert the struct back to the visualized state.
Will append an additional version that contains the oldest ID and the oldest struct known. In most cases, the
`original` will be `nil` which means if this version would be reverted, the struct would be deleted.
`false` by default.
"""
@callback history(struct, opts :: list) :: [version :: struct]
@doc """
Undoes the changes made in the given version, as well as all of the following versions.
Inserts a new version entry in the process, with the `:rollback` flag set to true
### Options
* `:preload` if your changeset depends on assocs being preloaded on the struct before
updating it, you can define a list of assocs to be preloaded with this option
"""
@callback revert(version :: struct, opts :: list) ::
{:ok, struct} | {:error, changeset :: Ecto.Changeset.t()}
end
|
lib/repo/repo.ex
| 0.827932 | 0.716529 |
repo.ex
|
starcoder
|
defmodule ExTwilio.Notify.Binding do
@moduledoc """
Represents a Binding resource in the Twilio Notify.
- [Twilio docs](https://www.twilio.com/docs/notify/api/bindings)
- identity The Identity to which this Binding belongs to. Identity is defined
by your application. Up to 20 Bindings can be created for the same Identity
in a given Service.
- binding_type The type of the Binding. This determines the transport technology to use. Allowed values: `apn`, `fcm`, `gcm`, `sms`, and `facebook-messenger`.
- address The address specific to the channel. For APNS it is the device
token. For FCM and GCM it is the registration token. For SMS it is a phone
number in E.164 format. For Facebook Messenger it is the Messenger ID of the user or a phone number in E.164 format.
- tag The list of tags associated with this Binding. Tags can be used to
select the Bindings to use when sending a notification. Maximum 20 tags are
allowed.
- notification_protocol_version The version of the protocol (data format)
used to send the notification. This defaults to the value of
DefaultXXXNotificationProtocolVersion in the `ExTwilio.Notify.Service`.
The current version is `"3"` for `apn`, `fcm`, and `gcm` type Bindings. The
parameter is not applicable to `sms` and `facebook-messenger` type Bindings as the data format is fixed.
- credential_sid The unique identifier (SID) of the
`ExTwilio.Notify.Credential` resource to be used to send notifications to
this Binding. If present, this overrides the Credential specified in the
Service resource. Applicable only to `apn`, `fcm`, and `gcm` type Bindings.
"""
defstruct sid: nil,
account_sid: nil,
service_sid: nil,
credential_sid: nil,
date_created: nil,
date_updated: nil,
notification_protocol_version: nil,
identity: nil,
binding_type: nil,
address: nil,
tags: nil,
tag: nil,
url: nil,
links: nil
use ExTwilio.Resource,
import: [
:stream,
:all,
:find,
:create,
:update,
:destroy
]
def parents,
do: [
%ExTwilio.Parent{module: ExTwilio.Notify.Service, key: :service}
]
end
|
lib/ex_twilio/resources/notify/binding.ex
| 0.729134 | 0.447158 |
binding.ex
|
starcoder
|
defmodule Bricks.Socket do
@moduledoc """
A Socket represents a *connected* Socket - that is it is
manufactured *after* a connection has been established.
As in Erlang/OTP, a Socket is owned by a single process, the
owner. No other process may interact with the Socket, but the owner
can hand off ownership to another process. If the owner dies, the
socket will be closed.
A Socket is at any given time (as per OTP) in one of these modes:
- `passive` mode - data can only be received when you call `recv`.
- `active` mode - the socket transforms packets into messages as
they arrive and transmits them to the owner.
- `bounded active` mode - like `active` mode, but caps the number of
packets that will be buffered in the owner mailbox before
requiring a reset by the owner.
You should not expect a Socket to be in a particular mode, rather
you should explicitly set it when you receive the Socket with
`set_active/2`. The default activity depends upon the type of Socket
and probably the OTP release you're running under.
## Picking a mode
- `passive` mode: *a safe default* when you don't have aggressive
performance requirements. The kernel will buffer some amount of
data and the rest will be rejected via (TCP) backpressure.
- `active` mode: the process mailbox becomes an effectively
unlimited sized buffer. If you fail to keep up, you might buffer
until the BEAM has eaten all of the memory and gets killed. But
while it doesn't, it's low latency and high throughput.
*WARNING: I hope you know what you're doing, use bounded active*
- `bounded active` mode: like active mode, but caps the number of
packets that will be buffered in the owner mailbox. Achieves
better performance than `passive` mode but without the risks
associated with `active` mode.
## Bounded Active Mode
In addition to taking a boolean value, `set_active` can take an
integer, entering what I call `bounded active` mode (BAM). BAM
provides most of the performance benefits of `active` mode with the
security of not having your BEAM get OOMkilled from overbuffering.
If passive mode doesn't meet your needs, use BAM - avoid OOM!
In BAM, the socket behaves as in `active` mode, sending messages
that are received to the owner. However, it also maintains an
internal `active window` counter of the number of packets that may
be received in active mode, after which the socket is placed into
passive mode and a notification message is sent to the owner who can
put the socket back into BAM with `set_active/2`.
The `bam_window` field in the `Socket` struct holds the intended active
window. It is used by `extend_active/1` to easily reset BAM.
### BAM and setting active
`set_active/2` can take the following values:
- `true` - enable `active` mode
- `false` - enable `passive` mode
- `:once` - enable BAM for one packet
- `integer()` - adjust the internal active window counter by this
many, send a message to notify the owner when the
socket is made passive
Note that while the first three all set the value, providing an
integer can behave in two ways, depending on the current mode:
- You are not in BAM, the active counter is *set* to this
value. If it is lower than zero, the socket is made passive.
- You are in BAM, the active counter is *adjusted by* this value
(by addition). If you pass a negative number and cause the
counter to go 0 or lower, the socket is made passive.
We recommend only using `set_active/2` with a positive integer and
when the socket is in passive mode (such as when you have been
informed the socket has just been made passive). This simplifies the
problem and you can pretend that it *does* set the internal counter.
If you ignore the last paragraph, you'll have to keep track of the
current active window to know what the new one will be after calling
`set_active/2`. The (undocumented) `decr_active/1` function may be
useful to call when you receive a packet.
This behaviour is inherited from OTP. We would not choose to implement
it this way ourselves.
"""
@enforce_keys [
:module,
:host,
:port,
:handle,
:active,
:data_tag,
:error_tag,
:closed_tag,
:passive_tag,
:receive_timeout,
:bam_window
]
defstruct @enforce_keys
alias Bricks.{Socket, Util}
alias Bricks.Error.{BadOption, Closed, NotActive, Posix, UnknownOptions}
import Bricks.Guards
## Defaults
@default_bam_window 10
## Types
@typedoc "The activity mode of the socket. See module documentation."
@type active :: boolean() | :once | -32768..32767
@typedoc "The type of data received from the socket. Depends upon socket binary mode"
@type data :: binary() | charlist()
@typedoc "An IPv4 address"
@type ipv4 :: {byte(), byte(), byte(), byte()}
@typedoc "An IPv6 address"
@type ipv6 :: {char(), char(), char(), char(), char(), char(), char(), char()}
@typedoc "An IPv4 or IPv6 address"
@type ip :: ipv4() | ipv6()
@typedoc "A host to connect to (or a filepath to a unix socket)"
@type host :: binary() | ipv4() | ipv6()
@typedoc "A port number"
@type port_num :: pos_integer()
@typedoc "The activity mode used for extending Bounded Active Mode"
@type window :: :once | pos_integer()
@typedoc "A connected socket"
@type t :: %Socket{
module: atom(),
host: host(),
port: port_num() | :local,
handle: term(),
active: active(),
data_tag: atom(),
error_tag: atom(),
closed_tag: atom(),
passive_tag: atom(),
receive_timeout: timeout(),
bam_window: window() | nil
}
@typedoc "Options provided to `new/1`"
@type new_opts :: %{
:module => atom(),
:host => host(),
:port => port_num(),
:handle => term(),
:active => active(),
:data_tag => term(),
:error_tag => term(),
:closed_tag => term(),
:passive_tag => term(),
optional(:bam_window) => window(),
optional(:receive_timeout) => timeout() | nil
}
@typedoc "The errors that `new/1` may return"
@type new_error :: BadOption.t() | UnknownOptions.t()
@spec new(new_opts()) :: {:ok, t()} | {:error, new_error()}
@doc """
Creates a new Socket from a map of options
Required keys:
- `module`: callback module for the given socket tye
- `handle`: underlying reference to the socket
- `active`: current activity mode
- `data_tag`: tag used to identify a data message from the socket
- `error_tag`: tag used to identify an error message from the socket
- `closed_tag`: tag used to identify a closed message from the socket
- `passive_tag`: tag used to identify a passive message from the socket
Optional keys:
- `receive_timeout`: default timeout used for calls to `recv`
- `bam_window`: default active window to use for resetting bounded active mode
"""
def new(
%{
module: module,
host: host,
port: port,
handle: handle,
active: active,
data_tag: data_tag,
error_tag: error_tag,
closed_tag: closed_tag,
passive_tag: passive_tag
} = opts
) do
receive_timeout = Map.get(opts, :receive_timeout, 5000)
bam_window = Map.get(opts, :bam_window, @default_bam_window)
extra =
Map.keys(
Map.drop(opts, [
:module,
:port,
:handle,
:active,
:data_tag,
:error_tag,
:closed_tag,
:passive_tag,
:receive_timeout,
:bam_window,
:host
])
)
cond do
[] != extra ->
{:error, UnknownOptions.new(extra)}
not host?(host) ->
{:error, BadOption.new(:host, host, [:ipv4, :ipv6, :binary])}
not is_atom(module) ->
{:error, BadOption.new(:module, module, [:atom])}
not is_atom(data_tag) ->
{:error, BadOption.new(:data_tag, data_tag, [:atom])}
not is_atom(error_tag) ->
{:error, BadOption.new(:error_tag, error_tag, [:atom])}
not is_atom(closed_tag) ->
{:error, BadOption.new(:closed_tag, closed_tag, [:atom])}
not is_atom(passive_tag) ->
{:error, BadOption.new(:passive_tag, passive_tag, [:atom])}
not is_active(active) ->
{:error, BadOption.new(:active, active, [:bool, :non_neg_int, :once])}
not is_timeout(receive_timeout) ->
{:error, BadOption.new(:receive_timeout, receive_timeout, [:infinity, :non_neg_int])}
not is_window(bam_window) ->
{:error, BadOption.new(:bam_window, bam_window, [:once, :pos_integer])}
true ->
{:ok,
%Socket{
module: module,
host: host,
port: port,
handle: handle,
active: active,
data_tag: data_tag,
error_tag: error_tag,
closed_tag: closed_tag,
passive_tag: passive_tag,
receive_timeout: receive_timeout,
bam_window: bam_window
}}
end
end
# Functions for manipulating the struct directly
@doc false
@spec decr_active(t()) :: t()
def decr_active(%Socket{active: active} = socket) do
case active do
1 -> tweak_active(socket, false)
:once -> tweak_active(socket, false)
n when is_integer(n) and n > 1 -> %{socket | active: active - 1}
_ -> throw(NotActive.new())
end
end
@doc false
@spec tweak_active(t(), active()) :: t()
def tweak_active(%Socket{} = socket, active), do: %{socket | active: active}
# Receiving data
@typedoc "The types of error `recv/1`, `recv/2` and `recv/3` may return"
@type recv_error :: Closed.t() | Posix.t()
@typedoc "The return of `recv/1`, `recv/2` and `recv/3`"
@type recv_result :: {:ok, data(), t()} | {:error, recv_error()}
@spec recv(t(), non_neg_integer()) :: recv_result()
@doc """
Receives all available data from the socket.
Times out if the Socket's `receive_timeout` is breached.
"""
def recv(%Socket{receive_timeout: timeout} = socket) do
recv(socket, 0, timeout)
end
@doc """
Receives data from the socket
If the socket is not in raw mode (default) or `size` is zero:
Receives all available data from the socket
If the socket is in raw mode and `size` is not zero:
Receives exactly `size` bytes of data from the socket
Times out if the Socket's `receive_timeout` is breached.
"""
def recv(%Socket{receive_timeout: timeout} = socket, size) do
recv(socket, size, timeout)
end
@doc "Receive from the Socket"
@callback recv(t(), non_neg_integer(), timeout()) :: recv_result()
@spec recv(t(), non_neg_integer(), timeout()) :: recv_result()
@doc """
Receives data from the socket
- If the socket is not in raw mode (default) or `size` is zero:
Receives all available data from the socket
- If the socket is in raw mode and `size` is not zero:
Receives exactly `size` bytes of data from the socket
Times out if `timeout` is breached.
"""
def recv(%Socket{module: module, active: false} = socket, size, timeout) do
apply(module, :recv, [socket, size, timeout])
end
# Sending data
@typedoc "The errors `send_data/2` may return"
@type send_error :: Closed.t() | Posix.t()
@typedoc "The return type of `send_data/2`"
@type send_result :: :ok | {:error, send_error()}
@doc "Send the data down the socket"
@callback send_data(t(), iodata()) :: send_result()
@spec send_data(t(), iodata()) :: send_result()
@doc """
Sends the given iolist `data` down the socket.
There is no timeout for this operation unless one was specified
during construction or set after construction.
"""
def send_data(%Socket{module: module} = socket, data) do
apply(module, :send_data, [socket, data])
end
# Setting the socket activity
@typedoc "The return type of `set_active/2`"
@type set_active_return :: {:ok, t()} | {:error, Posix.t()}
@doc "Set the activity mode of the socket"
@callback set_active(t(), active()) :: set_active_return()
@spec set_active(t(), active()) :: set_active_return()
@doc """
Changes the socket's activity mode. Valid activities:
- `true` - enable `active` mode
- `false` - enable `passive` mode
- `:once` - enable BAM for one packet
- `integer()` - adjust the internal active window counter by this many
Note that while the first three all set the value, providing an
integer can behave in two ways, depending on the current mode:
- You are not in BAM, the active counter is *set* to this
value. If it is lower than zero, the socket is made passive.
- You are in BAM, the active counter is *adjusted by* this value
(by addition). If you pass a negative number and cause the
counter to go below 0, the socket is made passive.
We recommend only using `set_active/2` with a positive integer and
when the socket is in passive mode (such as when you have been
informed the socket has just been made passive). This simplifies the
problem and you can pretend that it *does* set the internal counter.
If you ignore the last paragraph, you'll have to keep track of the
current active window to know what the new one will be after calling
`set_active/2`. The (undocumented) `decr_active/1` function may be
useful to call when you receive a packet. You could also use
`fetch_active/1`, but this will be slower.
This behaviour is inherited from OTP. We would not choose to implement
it this way ourselves.
"""
def set_active(%Socket{module: module} = socket, active) do
apply(module, :set_active, [socket, active])
end
# Fetching the socket activity
@typedoc "The errors `fetch_active/1` may return"
@type fetch_active_error :: Closed.t() | Posix.t()
@typedoc "The return type of `fetch_active/1`"
@type fetch_active_return :: {:ok, active()} | {:error, fetch_active_error()}
@doc "Fetches the active mode of the socket"
@callback fetch_active(t()) :: fetch_active_return()
@spec fetch_active(t()) :: fetch_active_return()
@doc """
Fetches the current activity of the socket
"""
def fetch_active(%Socket{module: module} = socket) do
apply(module, :fetch_active, [socket])
end
# Closing the socket
@doc "Closes the socket"
@callback close(t()) :: :ok
@spec close(t()) :: :ok
@doc """
Closes the socket. Always returns `:ok`
"""
def close(%Socket{module: module} = socket) do
apply(module, :close, [socket])
end
# Handing off the socket to another process
@typedoc "The errors `handoff/2` may return"
@type handoff_error :: BadOption.t() | Closed.t() | Posix.t()
@typedoc "The return type of `handoff/2`"
@type handoff_return :: {:ok, t()} | {:error, handoff_error()}
@doc "Makes another process the new owner of the Socket"
@callback handoff(t(), pid()) :: handoff_return()
@spec handoff(t(), pid()) :: handoff_return()
@doc """
Hands a Socket off to a new process, which becomes the owner
"""
def handoff(%Socket{module: module} = socket, pid) when is_pid(pid) do
apply(module, :handoff, [socket, pid])
end
@spec passify(t()) :: {:ok, binary(), Socket.t()} | {:closed, binary()} | {:error, term()}
@doc """
Turns the socket passive, clearing any active data out of the
mailbox. and returning it as one binary
Note: Assumes binary mode!
"""
def passify(%Socket{} = socket) do
with {:ok, socket} <- set_active(socket, false),
do: passify_h(socket, "")
end
defp passify_h(
%Socket{handle: handle, data_tag: d, error_tag: e, closed_tag: c, passive_tag: p} =
socket,
acc
) do
if Util.active?(socket) do
receive do
{^p, ^handle} ->
{:ok, acc, %{socket | active: false}}
{^e, ^handle, reason} ->
{:error, reason}
{^c, ^handle} ->
{:closed, acc}
{^d, ^handle, msg} ->
case msg do
{:data, data} -> passify_h(socket, acc <> data)
:closed -> {:closed, acc}
{:error, reason} -> {:error, reason}
end
after
0 -> {:ok, acc, %{socket | active: false}}
end
else
{:ok, acc, %{socket | active: false}}
end
end
@doc """
Extends the bounded active mode (BAM) of a socket by its bam_window
"""
@spec extend_active(t()) :: set_active_return()
def extend_active(%Socket{bam_window: window} = socket) do
extend_active(socket, window)
end
@doc """
Extends the active of a socket by the given window
"""
@spec extend_active(t(), window()) :: set_active_return()
def extend_active(%Socket{} = socket, window) do
Socket.set_active(socket, window)
end
end
|
bricks/lib/socket.ex
| 0.911017 | 0.503906 |
socket.ex
|
starcoder
|
defprotocol Flop.Schema do
@moduledoc """
This protocol allows you to set query options in your Ecto schemas.
## Usage
Derive `Flop.Schema` in your Ecto schema and set the filterable and sortable
fields.
defmodule Flop.Pet do
use Ecto.Schema
@derive {Flop.Schema,
filterable: [:name, :species],
sortable: [:name, :age]}
schema "pets" do
field :name, :string
field :age, :integer
field :species, :string
end
end
After that, you can pass the module as the `:for` option to `Flop.validate/2`.
iex> Flop.validate(%Flop{order_by: [:name]}, for: Flop.Pet)
{:ok,
%Flop{
filters: [],
limit: nil,
offset: nil,
order_by: [:name],
order_directions: nil,
page: nil,
page_size: nil
}}
iex> {:error, changeset} = Flop.validate(
...> %Flop{order_by: [:species]}, for: Flop.Pet
...> )
iex> changeset.valid?
false
iex> changeset.errors
[
order_by: {"has an invalid entry",
[validation: :subset, enum: [:name, :age]]}
]
### Defining default and maximum limits
To define a default or maximum limit, you can set the `default_limit` and
`max_limit` option when deriving `Flop.Schema`. The maximum limit will be
validated and the default limit applied by `Flop.validate/1`.
@derive {Flop.Schema,
filterable: [:name, :species],
sortable: [:name, :age],
max_limit: 100,
default_limit: 50}
### Defining a default sort order
To define a default sort order, you can set the `default_order_by` and
`default_order_directions` options when deriving `Flop.Schema`. The default
values are applied by `Flop.validate/1`. If no order directions are set,
`:asc` is assumed for all fields.
@derive {Flop.Schema,
filterable: [:name, :species],
sortable: [:name, :age],
default_order_by: [:name, :age],
default_order_directions: [:asc, :desc]}
### Restricting pagination types
By default, `page`/`page_size`, `offset`/`limit` and cursor-based pagination
(`first`/`after` and `last`/`before`) are enabled. If you want to restrict the
pagination type for a schema, you can do that by setting the
`pagination_types` option.
@derive {Flop.Schema,
filterable: [:name, :species],
sortable: [:name, :age],
pagination_types: [:first, :last]}
See also `t:Flop.option/0` and `t:Flop.pagination_type/0`. Setting the value
to `nil` allows all pagination types.
"""
@fallback_to_any true
@doc """
Returns the filterable fields of a schema.
iex> Flop.Schema.filterable(%Flop.Pet{})
[:name, :species]
"""
@spec filterable(any) :: [atom]
def filterable(data)
@doc """
Returns the allowed pagination types of a schema.
iex> Flop.Schema.pagination_types(%Flop.Fruit{})
[:first, :last, :offset]
"""
@doc since: "0.9.0"
@spec pagination_types(any) :: [Flop.pagination_type()] | nil
def pagination_types(data)
@doc """
Returns the sortable fields of a schema.
iex> Flop.Schema.sortable(%Flop.Pet{})
[:name, :age]
"""
@spec sortable(any) :: [atom]
def sortable(data)
@doc """
Returns the default limit of a schema.
iex> Flop.Schema.default_limit(%Flop.Fruit{})
50
"""
@doc since: "0.3.0"
@spec default_limit(any) :: pos_integer | nil
def default_limit(data)
@doc """
Returns the default order of a schema.
iex> Flop.Schema.default_order(%Flop.Fruit{})
%{order_by: [:name], order_directions: [:asc]}
"""
@doc since: "0.7.0"
@spec default_order(any) :: %{
order_by: [atom] | nil,
order_directions: [Flop.order_direction()] | nil
}
def default_order(data)
@doc """
Returns the maximum limit of a schema.
iex> Flop.Schema.max_limit(%Flop.Pet{})
20
"""
@doc since: "0.2.0"
@spec max_limit(any) :: pos_integer | nil
def max_limit(data)
end
defimpl Flop.Schema, for: Any do
@instructions """
Flop.Schema protocol must always be explicitly implemented.
To do this, you have to derive Flop.Schema in your Ecto schema module. You
have to set both the filterable and the sortable option.
@derive {Flop.Schema,
filterable: [:name, :species], sortable: [:name, :age, :species]}
schema "pets" do
field :name, :string
field :age, :integer
field :species, :string
end
"""
defmacro __deriving__(module, _struct, options) do
filterable_fields = Keyword.get(options, :filterable)
sortable_fields = Keyword.get(options, :sortable)
if is_nil(filterable_fields) || is_nil(sortable_fields),
do: raise(ArgumentError, @instructions)
default_limit = Keyword.get(options, :default_limit)
max_limit = Keyword.get(options, :max_limit)
pagination_types = Keyword.get(options, :pagination_types)
default_order = %{
order_by: Keyword.get(options, :default_order_by),
order_directions: Keyword.get(options, :default_order_directions)
}
quote do
defimpl Flop.Schema, for: unquote(module) do
def default_limit(_) do
unquote(default_limit)
end
def default_order(_) do
unquote(Macro.escape(default_order))
end
def filterable(_) do
unquote(filterable_fields)
end
def max_limit(_) do
unquote(max_limit)
end
def pagination_types(_) do
unquote(pagination_types)
end
def sortable(_) do
unquote(sortable_fields)
end
end
end
end
def default_limit(struct) do
raise Protocol.UndefinedError,
protocol: @protocol,
value: struct,
description: @instructions
end
def default_order(struct) do
raise Protocol.UndefinedError,
protocol: @protocol,
value: struct,
description: @instructions
end
def filterable(struct) do
raise Protocol.UndefinedError,
protocol: @protocol,
value: struct,
description: @instructions
end
def max_limit(struct) do
raise Protocol.UndefinedError,
protocol: @protocol,
value: struct,
description: @instructions
end
def pagination_types(struct) do
raise Protocol.UndefinedError,
protocol: @protocol,
value: struct,
description: @instructions
end
def sortable(struct) do
raise Protocol.UndefinedError,
protocol: @protocol,
value: struct,
description: @instructions
end
end
|
lib/flop/schema.ex
| 0.881155 | 0.546012 |
schema.ex
|
starcoder
|
defmodule Tenantex.Repo do
import Application, only: [get_env: 2]
import Tenantex.Prefix
import Mix.Tenantex
defmacro __using__(opts) do
quote bind_quoted: [opts: opts] do
@behaviour Ecto.Repo
alias Tenantex.TenantMissingError
@repo Keyword.fetch!(opts, :repo)
@untenanted [Ecto.Migration.SchemaMigration] ++ Keyword.get(opts, :untenanted, [])
# From Ecto.Repo
defdelegate __adapter__, to: @repo
defdelegate __log__(entry), to: @repo
defdelegate config(), to: @repo
defdelegate start_link(opts \\ []), to: @repo
defdelegate stop(pid, timeout \\ 5000), to: @repo
defdelegate transaction(fun_or_multi, opts \\ []), to: @repo
defdelegate in_transaction?(), to: @repo
defdelegate rollback(value), to: @repo
defdelegate load(schema_or_types, data), to: @repo
# From Ecto.Adapters.SQL
defdelegate __pool__, to: @repo
defdelegate __sql__, to: @repo
def all(queryable, opts \\ []) do
assert_tenant(queryable, opts)
@repo.all(queryable, coerce_prefix(opts))
end
def stream(queryable, opts \\ []) do
assert_tenant(queryable, opts)
@repo.stream(queryable, coerce_prefix(opts))
end
def get(queryable, id, opts \\ []) do
assert_tenant(queryable, opts)
@repo.get(queryable, id, coerce_prefix(opts))
end
def get!(queryable, id, opts \\ []) do
assert_tenant(queryable, opts)
@repo.get!(queryable, id, coerce_prefix(opts))
end
def get_by(queryable, clauses, opts \\ []) do
assert_tenant(queryable, opts)
@repo.get_by(queryable, clauses, coerce_prefix(opts))
end
def get_by!(queryable, clauses, opts \\ []) do
assert_tenant(queryable, opts)
@repo.get_by!(queryable, clauses, coerce_prefix(opts))
end
def one(queryable, opts \\ []) do
assert_tenant(queryable, opts)
@repo.one(queryable, coerce_prefix(opts))
end
def one!(queryable, opts \\ []) do
assert_tenant(queryable, opts)
@repo.one!(queryable, coerce_prefix(opts))
end
def preload(struct_or_structs, preloads, opts \\ []) do
assert_tenant(struct_or_structs, opts)
@repo.preload(struct_or_structs, preloads, coerce_prefix(opts))
end
def aggregate(queryable, aggregate, field, opts \\ []) do
assert_tenant(queryable, opts)
@repo.aggregate(queryable, aggregate, field, coerce_prefix(opts))
end
@insert_all_error """
For insert_all
- For tenanted tables
- Your first parameter must be a tuple with the prefix, and the table name
- OR
- pass in the 'prefix' value in 'opts'
- **Note**
- Your first parameter may not be the string name of the table, because we can't
check the associated model to see if it requires a tenant.
"""
def insert_all(schema_or_source, entries, opts \\ [])
def insert_all({nil, source} = schema_or_source, entries, opts) do
if requires_tenant?(source) do
raise TenantMissingError, message: @insert_all_error
end
@repo.insert_all(schema_or_source, entries, coerce_prefix(opts))
end
def insert_all({_prefix, _source} = schema_or_source, entries, opts), do: @repo.insert_all(schema_or_source, entries, coerce_prefix(opts))
def insert_all(schema_or_source, entries, opts) when is_binary(schema_or_source), do: raise TenantMissingError, message: @insert_all_error
def insert_all(schema_or_source, entries, [prefix: prefix] = opts) when is_atom(schema_or_source) and not is_nil(prefix) do
@repo.insert_all(schema_or_source, entries, coerce_prefix(opts))
end
def insert_all(schema_or_source, entries, opts) when is_atom(schema_or_source) do
if requires_tenant?(schema_or_source) do
raise TenantMissingError, message: @insert_all_error
end
@repo.insert_all(schema_or_source, entries, coerce_prefix(opts))
end
def update_all(queryable, updates, opts \\ []) do
assert_tenant(queryable, opts)
@repo.update_all(queryable, updates, coerce_prefix(opts))
end
def delete_all(queryable, opts \\ []) do
assert_tenant(queryable, opts)
@repo.delete_all(queryable, coerce_prefix(opts))
end
def insert(struct, opts \\ []) do
assert_tenant(struct, opts)
@repo.insert(struct, coerce_prefix(opts))
end
def update(struct, opts \\ []) do
assert_tenant(struct, opts)
@repo.update(struct, coerce_prefix(opts))
end
def insert_or_update(changeset, opts \\ []) do
assert_tenant(changeset, opts)
@repo.insert_or_update(changeset, coerce_prefix(opts))
end
def delete(struct, opts \\ []) do
assert_tenant(struct, opts)
@repo.delete(struct, coerce_prefix(opts))
end
def insert!(struct, opts \\ []) do
assert_tenant(struct, opts)
@repo.insert!(struct, coerce_prefix(opts))
end
def update!(struct, opts \\ []) do
assert_tenant(struct, opts)
@repo.update!(struct, coerce_prefix(opts))
end
def insert_or_update!(changeset, opts \\ []) do
assert_tenant(changeset, opts)
@repo.insert_or_update!(changeset, coerce_prefix(opts))
end
def delete!(struct, opts \\ []) do
assert_tenant(struct, opts)
@repo.delete!(struct, coerce_prefix(opts))
end
defp assert_tenant(_, [prefix: prefix]) when not is_nil(prefix), do: nil
defp assert_tenant(%Ecto.Changeset{} = changeset, opts) do
assert_tenant(changeset.data, opts)
end
defp assert_tenant(%{__meta__: _} = model, _) do
if requires_tenant?(model) && !has_prefix?(model) do
raise TenantMissingError, message: "No tenant specified in #{model.__struct__}"
end
end
defp assert_tenant([], _), do: nil
defp assert_tenant([ %{__meta__: _} = model| _tail], opts), do: assert_tenant(model, opts)
defp assert_tenant(queryable, _) do
query = Ecto.Queryable.to_query(queryable)
if requires_tenant?(query) && !has_prefix?(query) do
raise TenantMissingError, message: "No tenant specified in #{get_model_from_query(query)}"
end
end
defp coerce_prefix([prefix: prefix]=opts) do
Keyword.put(opts, :prefix, schema_name(prefix))
end
defp coerce_prefix(opts), do: opts
defp has_prefix?(%{__meta__: _} = model) do
if Ecto.get_meta(model, :prefix), do: true, else: false
end
defp get_model_from_query(%{from: {_, model}}), do: model
defp requires_tenant?(%{from: {_, model}}), do: not model in @untenanted
defp requires_tenant?(%{__struct__: model}), do: not model in @untenanted
defp requires_tenant?(model), do: not model in @untenanted
defp has_prefix?(%{prefix: nil}), do: false
defp has_prefix?(%{prefix: _}), do: true
end
end
def new_tenant(tenant) do
create_schema(tenant)
Ecto.Migrator.run(get_repo(), tenant_migrations_path(get_repo()), :up, [prefix: schema_name(tenant), all: true])
end
def create_schema(tenant) do
schema = schema_name(tenant)
case get_repo().__adapter__ do
Ecto.Adapters.Postgres -> Ecto.Adapters.SQL.query(get_repo(), "CREATE SCHEMA \"#{schema}\"", [])
Ecto.Adapters.MySQL -> Ecto.Adapters.SQL.query(get_repo(), "CREATE DATABASE #{schema}", [])
end
end
def drop_tenant(tenant) do
schema = schema_name(tenant)
case get_repo().__adapter__ do
Ecto.Adapters.Postgres -> Ecto.Adapters.SQL.query(get_repo(), "DROP SCHEMA \"#{schema}\" CASCADE", [])
Ecto.Adapters.MySQL -> Ecto.Adapters.SQL.query(get_repo(), "DROP DATABASE #{schema}", [])
end
end
def list_tenants do
statement = case get_repo().__adapter__ do
Ecto.Adapters.Postgres ->
"SELECT schema_name FROM information_schema.schemata"
Ecto.Adapters.MySQL ->
"SHOW DATABASES LIKE '" <> get_prefix() <> "%'"
Ecto.Adapters.SQL ->
""
end
get_repo()
|> Ecto.Adapters.SQL.query!(statement)
|> Map.fetch!(:rows)
|> Enum.flat_map(&(&1))
|> Enum.filter(&(String.starts_with?(&1, get_prefix())))
end
def get_repo do
case get_env(:tenantex, Tenantex)[:repo] do
nil -> Mix.Ecto.parse_repo([]) |> hd
repo -> repo
end
end
end
|
lib/tenantex/repo.ex
| 0.594904 | 0.539347 |
repo.ex
|
starcoder
|
defmodule Witai do
alias Witai.Client, as: Client
@base_url "https://api.wit.ai"
@version "20201023"
def message("") do
{:noreply, %{}}
end
@spec message(binary) :: {atom, %{}}
def message(term) when is_binary(term) do
%{
"entities" => entities,
"intents" => intents,
"text" => _text,
"traits" => _traits
} = Client.get "#{ @base_url }/message?v=#{ @version }&q=#{ Client.encode_message(term) }"
{
extract_intent(intents),
extract_entities(entities)
}
end
defp extract_intent(intents) when is_list(intents) do
intents
|> Enum.filter(fn (%{"id" => _, "name" => _name, "confidence" => confidence})-> confidence > 0.5 end)
|> Enum.at(0)
|> to_atom
end
defp to_atom(%{"id" => _, "name" => name, "confidence" => _}) do
String.to_existing_atom(name)
end
defp to_atom(nil) do
:no_intent
end
defp extract_entities(entities) when is_map(entities) do
entities
|> Map.values
|> List.flatten
|> build_entity_map
end
defp build_entity_map(entities) when is_list(entities) do
build_entity_map(entities, 0, %{})
end
defp build_entity_map(entities, index, result) when is_list(entities) and is_integer(index) and is_map(result) do
case Enum.at(entities, index) do
nil -> result
entity ->
new_result = Map.put(result, Map.get(entity, "name"), Map.get(entity, "value"))
build_entity_map(entities, index + 1, new_result)
end
end
# Intents
@spec create_intent(binary) :: map
def create_intent(name) when is_binary(name) do
Client.post "#{ @base_url }/intents?v=#{ @version }", %{"name" => name}
end
@spec create_intents(list) :: list
def create_intents(names) when is_list(names) do
names
|> Enum.map(fn (name)-> create_intent(name) end)
end
@spec delete_intent(binary) :: :ok | :fail
def delete_intent(name) when is_binary(name) do
Client.delete "#{ @base_url }/intents/#{ name }?v=#{ @version }"
end
@spec get_intent(binary) :: map
def get_intent(name) when is_binary(name) do
Client.get "#{ @base_url }/intents/#{ name }?v=#{ @version }"
end
# Entities
@spec get_entities :: list
def get_entities() do
Client.get "#{ @base_url }/entities?v=#{ @version }"
end
@spec get_entity(binary) :: map
def get_entity(name) when is_binary(name) do
Client.get "#{ @base_url }/entities/#{ name }?v=#{ @version }"
end
@doc """
### Request body:
```
%{
"name" => "entity_name",
"roles" => [],
"lookups" => [
"free-text",
"keywords"
]
}
```
"""
@spec create_entity(map) :: map
def create_entity(req_body) when is_map(req_body) do
Client.post "#{ @base_url }/entities?v=#{ @version }", req_body
end
@spec delete_entity(binary) :: :ok | :fail
def delete_entity(name) when is_binary(name) do
Client.delete "#{ @base_url }/entities/#{ name }?v=#{ @version }"
end
@spec update_entity(binary, map) :: :ok | :fail
def update_entity(name, req_body) when is_binary(name) and is_map(req_body) do
Client.put "#{ @base_url }/entities/#{ name }?v=#{ @version }", req_body
end
# Utterances
@spec get_utterances(integer) :: list
def get_utterances(limit) do
Client.get "#{ @base_url }/utterances?v=#{ @version }&limit=#{ to_string(limit) }"
end
@spec get_utterances(integer, integer) :: list
def get_utterances(limit, offset) do
Client.get "#{ @base_url }/utterances?v=#{ @version }&limit=#{ to_string(limit) }&offset=#{ offset }"
end
@spec get_utterances(integer, integer, list) :: list
def get_utterances(limit, offset, intents) do
intents_str = Enum.join intents, ","
Client.get "#{ @base_url }/utterances?v=#{ @version }&limit=#{ to_string(limit) }&offset=#{ offset }&intents=#{ intents_str }"
end
@doc """
### Request body:
```
[
%{
"text" => "I want to buy a bread",
"intent" => "buy_bread",
"entities" => [
%{
"entity" => "wit$location:to",
"start" => 17,
"end" => 20,
"body" => "sfo",
"entities" => []
}
],
"traits" => []
}
]
```
"""
@spec create_utterances(list) :: map
def create_utterances(list) when is_list(list) do
Client.post "#{ @base_url }/utterances?v=#{ @version }", list
end
@doc """
### Request body:
```
[
%{
"text" => "I want to buy some bread"
}
]
```
"""
@spec delete_utterances(list) :: :ok | :fail
def delete_utterances(list) when is_list(list) do
Client.delete "#{ @base_url }/utterances?v=#{ @version }", list
end
end
|
lib/witai.ex
| 0.722233 | 0.562927 |
witai.ex
|
starcoder
|
defmodule Cuid do
@moduledoc """
Collision-resistant ids.
Usage:
```elixir
Cuid.generate()
``
"""
@opaque state :: {binary(), :ets.tid()}
@base 36
@discrete_values @base * @base * @base * @base
@max_discrete_value @discrete_values - 1
@two_discrete_values @discrete_values * @discrete_values
@max_two_discrete_value @two_discrete_values - 1
@doc """
Generates and returns a new CUID.
"""
@spec generate() :: String.t()
@spec generate(state) :: String.t()
def generate({fingerprint, table} \\ global_state()) do
count = :ets.update_counter(table, :counter, {2, 1, @max_discrete_value, 0})
IO.iodata_to_binary([
?c,
timestamp(),
format_counter(count),
fingerprint,
random_block()
])
end
@doc """
Get the global state.
If you're generating a lot of IDs in the same process, this can avoid re-fetching the state on each call.
"""
@spec global_state :: state
def global_state() do
:persistent_term.get(__MODULE__)
end
@doc """
Creates a new generator state.
"""
def new() do
fingerprint = get_fingerprint()
tab =
:ets.new(__MODULE__, [:public, :set, {:read_concurrency, true}, {:write_concurrency, true}])
:ets.insert(tab, {:counter, 0})
{fingerprint, tab}
end
## Helpers
defp format_counter(num) do
num
|> Integer.to_charlist(@base)
|> zero_pad_down()
end
defp timestamp do
microseconds = :os.system_time(:microsecond)
rem(microseconds, @two_discrete_values)
|> Integer.to_charlist(@base)
|> zero_pad_down_big()
end
defp random_block do
@max_two_discrete_value
|> :rand.uniform()
|> Integer.to_charlist(@base)
|> zero_pad_down_big()
end
@operator @base * @base
defp get_fingerprint do
pid = rem(String.to_integer(System.get_pid()), @operator) * @operator
hostname = to_charlist(:net_adm.localhost())
hostid = rem(Enum.sum(hostname) + Enum.count(hostname) + @base, @operator)
(pid + hostid)
|> Integer.to_charlist(@base)
|> zero_pad_down()
end
@compile {:inline, zero_pad_down: 1, zero_pad_down_big: 1, downcase_num: 1}
defp zero_pad_down(charlist) do
case charlist do
[a, b, c, d] ->
[downcase_num(a), downcase_num(b), downcase_num(c), downcase_num(d)]
[a, b, c] ->
[?0, downcase_num(a), downcase_num(b), downcase_num(c)]
[a, b] ->
["00", downcase_num(a), downcase_num(b)]
[a] ->
["000", downcase_num(a)]
end
end
defp zero_pad_down_big(charlist) do
case charlist do
[a, b, c, d, e, f, g, h] ->
[
downcase_num(a),
downcase_num(b),
downcase_num(c),
downcase_num(d),
downcase_num(e),
downcase_num(f),
downcase_num(g),
downcase_num(h)
]
[a, b, c, d, e, f, g] ->
[
?0,
downcase_num(a),
downcase_num(b),
downcase_num(c),
downcase_num(d),
downcase_num(e),
downcase_num(f),
downcase_num(g)
]
[a, b, c, d, e, f] ->
[
"00",
downcase_num(a),
downcase_num(b),
downcase_num(c),
downcase_num(d),
downcase_num(e),
downcase_num(f)
]
[a, b, c, d, e] ->
[
"000",
downcase_num(a),
downcase_num(b),
downcase_num(c),
downcase_num(d),
downcase_num(e)
]
[a, b, c, d] ->
["0000", downcase_num(a), downcase_num(b), downcase_num(c), downcase_num(d)]
[a, b, c] ->
["00000", downcase_num(a), downcase_num(b), downcase_num(c)]
[a, b] ->
["000000", downcase_num(a), downcase_num(b)]
[a] ->
["0000000", downcase_num(a)]
end
end
@downcase_index ?a - ?A
defp downcase_num(letter) when letter > ?9, do: letter + @downcase_index
defp downcase_num(number), do: number
end
|
lib/cuid.ex
| 0.750187 | 0.775775 |
cuid.ex
|
starcoder
|
defmodule KB do
import Utils
#---- Knowledge base
@doc """
Initializes an empty knowledge base.
"""
def init(), do: []
@doc """
Returns all facts in the knowledge base.
"""
def facts(kb), do: Enum.filter(kb, fn thing -> elem(thing, 0) == :fact end)
@doc """
Returns all rules in the knowledge base.
"""
def rules(kb), do: Enum.filter(kb, fn thing -> elem(thing, 0) == :rule end)
@doc """
Checks if the given fact is in the knowledge base.
"""
def kb_fact?(kb, fact), do: Enum.member?(facts(kb), fact)
@doc """
Returns all facts matching the given word.
"""
def lookup_fact(kb, word) do
Enum.filter(facts(kb), fn {:fact, {:predicate, fact_word, _}} -> fact_word == word end)
end
@doc """
Returns all rules matching the given word.
"""
def lookup_rule(kb, word) do
Enum.filter(rules(kb), fn {:rule, {:predicate, rule_word, _}, _} -> rule_word == word end)
end
@doc """
Returns all rules matching the given query.
"""
def matching_rules(kb, {:predicate, word, subjects}) do
Enum.filter(
rules(kb),
fn rule ->
{:rule, {:predicate, rule_word, rule_subjects}, _} = rule
rule_word == word && can_unify?(rule_subjects, subjects)
end
)
end
@doc """
Checks if the given word matches a fact in the database.
"""
def matches_fact?(kb, word) do
Enum.any?(facts(kb), fn {:fact, {:predicate, fact_word, _}} -> fact_word == word end)
end
@doc """
Checks if the given word matches a rule in the database.
"""
def matches_rule?(kb, word) do
Enum.any?(rules(kb), fn {:rule, {:predicate, rule_word, _}, _} -> rule_word == word end)
end
@doc """
Returns all possible subjects for a predicate with the given word.
(To be more precise, it returns "sets of subjects".)
"""
def possible_subjects(kb, word) do
List.foldl(
facts(kb),
[],
fn({:fact, {:predicate, fact_word, fact_subjects}}, subjects) ->
if fact_word == word, do: subjects ++ [fact_subjects], else: subjects
end
)
end
#---- Predicate
@doc """
Constructs a predicate with one or more subjects.
E.g. cityOf(toronto, canada)
-> word: cityOf
-> subjects: toronto, canada
"""
def predicate(word, subjects), do: {:predicate, word, subjects}
def replace_vars_with_const(predicates, var, const) do
List.foldl(
predicates,
[],
fn({:predicate, word, subjects}, new_predicates) ->
new_subjects = List.foldl(
subjects,
[],
fn(subject, new_subjects) ->
new_subjects ++ [(if subject == var, do: const, else: subject)]
end
)
new_predicates ++ [{:predicate, word, new_subjects}]
end
)
end
#---- Fact
@doc """
Constructs a fact consisting of a predicate.
"""
def fact(predicate), do: {:fact, predicate}
#---- Rule
@doc """
Constructs a rule with a consequent (head predicate)
and one or more antecedents (body predicates).
E.g. mortal(X) :- man(X).
-> consequent: mortal(X)
-> antecedent: man(X)
"""
def rule(head, body), do: {:rule, head, body}
@doc """
Returns the antecedents of the given rule.
"""
def antecedents_of_rule({:rule, _, antecedents}), do: antecedents
@doc """
Returns the antecedents of the given rules.
"""
def antecedents_of_rules(rules) do
List.foldl(
rules,
[],
fn(rule, all_antecedents) ->
all_antecedents ++ antecedents_of_rule(rule)
end
)
end
#---- Unification
@doc """
Checks if two lists can be unified.
"""
def can_unify?(list1, list2), do: unify(list1, list2) != :cannot_unify
@doc """
Attempts to unify two lists.
If successful, returns the unified list along with the variable resolutions.
Otherwise, returns :cannot_unify.
(Note: invalid variable or constant names should be already filtered out.)
"""
def unify(list1, list2) do
if length(list1) != length(list2) do
:cannot_unify
else
{unified, matches} = List.foldl(
List.zip([list1, list2]),
{[], []},
fn({s1, s2}, {unified, matches}) ->
if starts_with_lowercase?(s1) do
if starts_with_lowercase?(s2) do
if s1 != s2 do
{unified ++ [:cannot_unify], matches}
else
# Unified two constants
{unified ++ [s1], matches}
end
else
# Unified constant (s1) and variable (s2)
{unified ++ [s1], matches ++ [{s2, s1}]}
end
else
if starts_with_uppercase?(s2) do
if s1 != s2 do
{unified ++ [:cannot_unify], matches}
else
# Unified two variables
{unified ++ [s2], matches}
end
else
# Unified variable (s1) and constant (s2)
{unified ++ [s2], matches ++ [{s1, s2}]}
end
end
end
)
if Enum.member?(unified, :cannot_unify) do
:cannot_unify
else
{unified, matches}
end
end
end
end
|
lib/kb.ex
| 0.671686 | 0.477493 |
kb.ex
|
starcoder
|
defmodule Dicon do
@moduledoc """
Simple release deliverer for Elixir.
Dicon gets most of the information needed to deploy and manage releases from
the configuration of the `:dicon` application. For example, in your
application's configuration (`my_app/config/config.exs`):
config :dicon,
target_dir: "/home/deploy/my_app"
## Configuration options
* `:otp_app` - an atom that specifies the name of the application being
deployed.
* `:target_dir` - a binary that specifies the directory where the release
tarball will be extracted into.
* `:hosts` - a list of `host_name` atoms that specifies which
servers the release should be deployed to. Each host should be then
configured under the `:dicon` application. See the "Configuration for hosts"
section below.
`authority`s should be "authorities" according to [this
RFC](https://tools.ietf.org/html/rfc3986#section-3.2), i.e., binaries with
an optional userinfo followed by `@`, an hostname, and an optional port
preceded by `:`. For example, `me:<EMAIL>:22`.
* `:executor` - a module that will be used to execute commands on servers.
By default, it's `Dicon.SecureShell`.
### Configuration for hosts
Each host listed in the `:hosts` configuration option mentioned above can be
configured under the `:dicon` application. For example, take this configuration:
config :dicon,
hosts: [:app01, :app02]
Now the `:app01` and `:app02` hosts can be configured like this:
config :dicon, :app01,
authority: "<EMAIL>"
These are the supported host configuration options:
* `:authority` - (binary) an "authority" according to [this
RFC](https://tools.ietf.org/html/rfc3986#section-3.2), that is, a binary with
an optional userinfo followed by `@`, an hostname, and an optional port
preceded by `:`. For example, `"me:<EMAIL>:22"`.
* `:os_env` - (map) a map of environment variable name (as a binary) to
value (as a binary). These environment variables will be used when running
commands on the target host.
* `:apps_env` - (keyword list) a keyword list of application to configuration
that can be used to override the configuration for some applications on
the target host.
### Configuration for executors
Each executor can be configured differently; to configure an executor, specify
the configuration for that executor under the configuration for the `:dicon`
application.
config :dicon, Dicon.SecureShell,
dir: "..."
"""
@doc false
def config(key, opts \\ [])
def config(:hosts, opts) do
only = Keyword.get_values(opts, :only) |> Enum.map(&String.to_atom/1)
skip = Keyword.get_values(opts, :skip) |> Enum.map(&String.to_atom/1)
hosts = Application.fetch_env!(:dicon, :hosts)
assert_filtered_hosts_exist(hosts, only ++ skip)
Enum.filter(hosts, hosts_selector(only, skip))
end
def config(key, _opts) do
Application.fetch_env!(:dicon, key)
end
def host_config(name) do
Application.fetch_env!(:dicon, name)
end
defp assert_filtered_hosts_exist(hosts, filtered_hosts) do
if unknown_host = Enum.find(filtered_hosts, ¬(&1 in hosts)) do
Mix.raise "unknown host: #{inspect(Atom.to_string(unknown_host))}"
end
end
defp hosts_selector([], skip) do
¬(&1 in skip)
end
defp hosts_selector(only, skip) do
only = only -- skip
&(&1 in only)
end
end
|
lib/dicon.ex
| 0.880855 | 0.515559 |
dicon.ex
|
starcoder
|
defmodule Dicon.Executor do
@moduledoc """
Behaviour for executors.
This behaviour specifies the callbacks that executors must implement. Look at
the documentation for the `Dicon` module for more information about executors.
"""
alias Dicon.SecureShell
@type conn :: identifier | struct
@type t :: %__MODULE__{executor: module, conn: conn}
defstruct [:executor, :conn]
@doc """
Connects to the given authority, returning a term that identifies the
connection.
"""
@callback connect(authority :: binary) :: {:ok, conn} | {:error, binary}
@doc """
Executes the given `command` on the given connection, writing the output of
`command` to `device`.
"""
@callback exec(conn, command :: charlist, device :: atom | pid) :: :ok | {:error, binary}
@callback write_file(conn, target :: charlist, content :: iodata, :write | :append) ::
:ok | {:error, binary}
@doc """
Copies the local file `source` over to the destination `target` on the given
connection.
"""
@callback copy(conn, source :: charlist, target :: charlist) :: :ok | {:error, binary}
@doc """
Connects to authority.
The connection happens through the executor configured in the configuration
for the `:dicon` application; see the documentation for the `Dicon` module for
more information.
## Examples
%Dicon.Executor{} = Dicon.Executor.connect("meg:<EMAIL>")
"""
@spec connect(binary) :: {:ok, t} | {:error, term}
def connect(authority) do
executor = Application.get_env(:dicon, :executor, SecureShell)
case executor.connect(authority) do
{:ok, conn} ->
Mix.shell().info("Connected to #{authority}")
%__MODULE__{executor: executor, conn: conn}
{:error, reason} ->
raise_error(executor, reason)
end
end
@doc """
Executes the given `command` on the connection in `state`.
## Examples
state = Dicon.Executor.connect("meg:<EMAIL>")
Dicon.Executor.exec(state, "ls -la")
#=> :ok
"""
def exec(%__MODULE__{} = state, command, device \\ Process.group_leader()) do
Mix.shell().info("==> EXEC #{command}")
run(state, :exec, [command, device])
end
@doc """
Copies the `source` file on the local machine to the `target` on the remote
machine on the connection in `state`.
## Examples
state = Dicon.Executor.connect("meg:<EMAIL>")
Dicon.Executor.copy(state, "hello.txt", "uploaded-hello.txt")
#=> :ok
"""
def copy(%__MODULE__{} = state, source, target) do
Mix.shell().info("==> COPY #{source} #{target}")
run(state, :copy, [source, target])
end
def write_file(%__MODULE__{} = state, target, content, mode \\ :write)
when mode in [:write, :append] and (is_binary(content) or is_list(content)) do
Mix.shell().info("==> WRITE #{target}")
run(state, :write_file, [target, content, mode])
end
defp run(%{executor: executor, conn: conn}, fun, args) do
case apply(executor, fun, [conn | args]) do
{:error, reason} -> raise_error(executor, reason)
:ok -> :ok
end
end
defp raise_error(executor, reason) when is_binary(reason) do
Mix.raise("(in #{inspect(executor)}) " <> reason)
end
end
|
lib/dicon/executor.ex
| 0.880508 | 0.489137 |
executor.ex
|
starcoder
|
defmodule EctoTemp.Factory do
@doc """
Inserts values into a temporary table.
## Params:
* struct (optional) - a struct defined the schema used by the data migration.
* table_name
* attrs (optional) - a keyword list of attributes to insert
## Notes:
* If not given a struct, and the temporary table has a primary key, then we return the `id` of the inserted row.
* If given a struct, and the temporary table has a primary key, then we do a `Repo.get` using the `id` of the
inserted row, and return the result as a struct.
* If the temporary table has no primary key, then we return the list of values returned by postgres. This list
is probably ordered by the order in which the columns are defined on the temp table???
## Examples
import EctoTemp.Factory
insert(:thing_with_no_primary_key) == []
insert(:thing_with_no_primary_key, some_thing: "hi") == ["hi"]
insert(:thing_with_primary_key) == 1
insert(:thing_with_primary_key, some_thing: "hi") == 2
%MyDataMigration.Cycle{id: 1} = insert(MyDataMigration.Cycle, :cycles, started_at: ~N[2020-02-03 00:00:00])
"""
defmacro insert(struct_or_table, table_or_params \\ nil, params \\ []) do
quote bind_quoted: [struct_or_table: struct_or_table, table_or_params: table_or_params, params: params] do
cond do
is_atom(struct_or_table) and is_nil(table_or_params) and is_list(params) ->
EctoTemp.Helpers.insert_temporary(
@repo,
@ecto_temporary_tables,
nil,
struct_or_table,
params
)
is_atom(struct_or_table) and is_atom(table_or_params) and is_list(params) ->
EctoTemp.Helpers.insert_temporary(
@repo,
@ecto_temporary_tables,
struct_or_table,
table_or_params,
params
)
is_atom(struct_or_table) and is_list(table_or_params) ->
EctoTemp.Helpers.insert_temporary(
@repo,
@ecto_temporary_tables,
nil,
struct_or_table,
table_or_params
)
end
end
end
end
|
lib/ecto_temp/factory.ex
| 0.747063 | 0.640369 |
factory.ex
|
starcoder
|
defmodule Harnais.Form do
@moduledoc ~S"""
Functions for Testing Quoted Forms
## Documentation Terms
In the documentation these terms, usually in *italics*, are used to mean the same thing.
### *form* and *forms*
A *form* is a quoted form (`Macro.t`). A *forms* is a list of zero, one or more *form*s
### *opts*
An *opts* is a `Keyword` list.
## Bang and Query Functions
All functions have a bang peer.
`harnais_form/1`, `harnais_forms/1` and all compare functions
(e.g. `harnais_form_compare/3`) have a query peer
(e.g. `harnais_form_compare?/3`).
These functions do not appear in the function list to stop clutter. (They are `@doc false`).
"""
use Plymio.Codi
alias Harnais.Error, as: HEE
alias Harnais.Form.Schatten, as: HAS
alias Harnais.List, as: HUL
use Harnais.Error.Attribute
use Harnais.Form.Attribute
use Harnais.Form.Attribute.Schatten
@codi_opts [
{@plymio_codi_key_vekil, Plymio.Vekil.Codi.__vekil__()}
]
import Plymio.Fontais.Option,
only: [
opts_normalise: 1,
opts_validate: 1
]
import Plymio.Fontais.Form,
only: [
forms_edit: 2
]
@type ast :: Harnais.ast()
@type asts :: Harnais.asts()
@type form :: Harnais.form()
@type forms :: Harnais.forms()
@type opts :: Harnais.opts()
@type error :: Harnais.error()
@harnais_form_message_text_form_compare_failed "form compare failed"
@doc ~S"""
`harnais_form_test_forms/2` takes a *forms* with optional
*opts* and evaluate the *forms*, after processing any
options.
It returns `{:ok, {answer, forms}}` if evaluation succeeds, else
`{:error, error}`.
The `forms` in the result will be the form after any transformed have
been applied.
> This function calls `Harnais.Form.Schatten.produce_schatten/2` with common options prepended to any supplied *opts*. Its documentation should be read to understand the production process and allowed options.
## Examples
iex> harnais_form_test_forms(42)
{:ok, {42, 42}}
iex> quote(do: x = 42) |> harnais_form_test_forms
{:ok, {42, quote(do: x = 42)}}
iex> quote(do: x = 42) |> harnais_form_test_forms(
...> transform_opts: [transform: fn _ -> 42 end])
{:ok, {42, 42}}
iex> quote(do: x = 42) |> harnais_form_test_forms(
...> transform: [fn ast -> {ast, 42} end, fn {_ast,ndx} -> ndx end])
{:ok, {42, 42}}
iex> {:error, error} = quote(do: x = 42) |> harnais_form_test_forms(
...> transform: fn _ -> %{a: 1} end)
...> error |> Harnais.Error.export_exception
{:ok, [error: [[m: "form invalid, got: %{a: 1}"]]]}
iex> quote(do: x = 42) |> harnais_form_test_forms(
...> postwalk: fn _ -> 43 end)
{:ok, {43, 43}}
iex> quote(do: x = 42) |> harnais_form_test_forms(
...> postwalk: fn snippet ->
...> case snippet do
...> {:x, [], module} when is_atom(module) -> quote(do: a)
...> # passthru
...> x -> x
...> end
...> end)
{:ok, {42, quote(do: a = 42)}}
iex> quote(do: x = 42) |> harnais_form_test_forms(
...> replace_vars: [x: quote(do: a)])
{:ok, {42, quote(do: a = 42)}}
iex> quote(do: x = var!(a)) |> harnais_form_test_forms(
...> eval_binding: [b: 99],
...> replace_vars: [a: quote(do: b)])
{:ok, {99, quote(do: x = var!(b))}}
iex> quote(do: x = a) |> harnais_form_test_forms(
...> eval_binding: [b: 99],
...> replace_vars: [a: Macro.var(:b, nil)])
{:ok, {99, quote(do: x = unquote(Macro.var(:b, nil)))}}
iex> {:error, error} = harnais_form_test_forms(%{a: 1})
...> error |> Harnais.Error.export_exception
{:ok, [error: [[m: "form invalid, got: %{a: 1}"]]]}
Bang examples:
iex> harnais_form_test_forms!(42)
{42, 42}
iex> quote(do: x = 42) |> harnais_form_test_forms!(
...> transform: fn _ -> %{a: 1} end)
** (ArgumentError) form invalid, got: %{a: 1}
iex> quote(do: x = 42) |> harnais_form_test_forms!(
...> postwalk: fn _ -> 42 end)
{42, 42}
iex> quote(do: x = a) |> harnais_form_test_forms!(
...> eval_binding: [b: 99],
...> replace_vars: [a: Macro.var(:b, nil)])
{99, quote(do: x = unquote(Macro.var(:b, nil)))}
iex> harnais_form_test_forms!(%{a: 1})
** (ArgumentError) form invalid, got: %{a: 1}
"""
@spec harnais_form_test_forms(forms, opts) :: {:ok, any} | {:error, error}
def harnais_form_test_forms(forms, opts \\ [])
def harnais_form_test_forms(nil, _opts) do
{:ok, nil}
end
def harnais_form_test_forms(forms, opts) do
with {:ok, opts} <- opts |> opts_normalise,
{:ok, {values, %HAS{}}} <-
forms |> HAS.produce_schatten(@harnais_form_test_forms_default_opts ++ opts) do
{:ok, values |> Keyword.values() |> List.to_tuple()}
else
{:error, %{__exception__: true}} = result -> result
end
end
@doc ~S"""
`harnais_form_transform_forms/2` takes a *forms* together with optional
*opts* and transforms the *forms* returning `{:ok, forms}` or `{:error, error}`
Note the transformed *forms* do *not* have to be quoted forms.
## Transformations
The function is a convenience wrapper for `Plymio.Fontais.Form.forms_edit/2` and the transforms include :
1. `:postwalk`
1. `:prewalk`
1. `:transform`
1. `:replace_vars`
1. `:rename_vars`
1. `:rename_atoms`
1. `:rename_funs`
## Examples
iex> harnais_form_transform_forms(42)
{:ok, [42]}
iex> quote(do: x = 42) |> harnais_form_transform_forms
{:ok, [quote(do: x = 42)]}
iex> quote(do: x = 42) |> harnais_form_transform_forms(
...> transform: fn _ -> 42 end)
{:ok, [42]}
iex> quote(do: x = 42) |> harnais_form_transform_forms(
...> transform: [fn form -> {form, 42} end, fn {_form,ndx} -> ndx end])
{:ok, [42]}
iex> quote(do: x = 42) |> harnais_form_transform_forms(
...> transform: fn _ -> %{a: 1} end)
{:ok, [%{a: 1}]}
iex> quote(do: x = 42) |> harnais_form_transform_forms(
...> postwalk: fn _ -> 42 end)
{:ok, [42]}
The next two examples show the `x` var being renamed. The first
uses an explicit `:postwalk` while the second uses `:rename_vars`.
iex> quote(do: x = 42) |> harnais_form_transform_forms(
...> postwalk: fn snippet ->
...> case snippet do
...> {:x, [], module} when is_atom(module) -> quote(do: a)
...> # passthru
...> x -> x
...> end
...> end)
{:ok, [quote(do: a = 42)]}
iex> quote(do: x = 42) |> harnais_form_transform_forms(
...> replace_vars: [x: quote(do: a)])
{:ok, [quote(do: a = 42)]}
The *opts*, if any, are validated:
iex> {:error, error} = quote(do: x = 42) |> harnais_form_transform_forms(:opts_not_keyword)
...> error |> Exception.message
"transform failed, got: opts invalid, got: :opts_not_keyword"
The *forms* are validated:
iex> {:error, error} = %{a: 1} |> harnais_form_transform_forms
...> error |> Exception.message
"forms invalid, got invalid indices: [0]"
Bang examples:
iex> harnais_form_transform_forms!(42)
[42]
iex> quote(do: x = 42) |> harnais_form_transform_forms!(
...> transform: [fn ast -> {ast, 42} end, fn {_ast,ndx} -> ndx end])
[42]
iex> harnais_form_transform_forms!(quote(do: x = 42),
...> replace_vars: [x: quote(do: a)])
[quote(do: a = 42)]
iex> quote(do: x = 42) |> harnais_form_transform_forms!(
...> replace_vars: [x: quote(do: a)])
{:ok, quote(do: a = 42)}
[quote(do: a = 42)]
iex> {:error, error} = harnais_form_transform_forms(%{a: 1})
...> error |> Exception.message
"forms invalid, got invalid indices: [0]"
iex> {:error, error} = harnais_form_transform_forms(quote(do: x = 42), :opts_not_keyword)
...> error |> Exception.message
"transform failed, got: opts invalid, got: :opts_not_keyword"
"""
@spec harnais_form_transform_forms(forms, opts) :: {:ok, any} | {:error, error}
def harnais_form_transform_forms(forms, opts \\ [])
def harnais_form_transform_forms(forms, []) do
forms |> harnais_forms
end
def harnais_form_transform_forms(forms, opts) do
with {:ok, opts} <- opts |> opts_validate,
{:ok, forms} <- forms |> harnais_forms,
{:ok, _forms} = result <- forms |> forms_edit(opts) do
result
else
{:error, %{__exception__: true} = error} ->
HEE.new_error_result(m: "transform failed", v: error)
end
end
@doc_harnais_form_format_form ~S"""
`harnais_form_format_form/2` takes either a *form* or string as its argument, together with optional *opts*.
A form is converted first to text using `Macro.to_string/1`, reduced
(`Plymio.Fontais.Form.forms_reduce/1`) and then the text is passed
through the Elixir code formatter (`Code.format_string!/2`),
together with the *opts*.
It returns `{:ok, text}` if normalisation succeeds, else `{:error, error}`
## Examples
iex> harnais_form_format_form(42)
{:ok, "42"}
iex> harnais_form_format_form(:atom)
{:ok, ":atom"}
iex> harnais_form_format_form("string")
{:ok, "string"}
Quoted form:
iex> quote(do: Map.get( %{a: 1}, :a, 42)) |> harnais_form_format_form
{:ok, "Map.get(%{a: 1}, :a, 42)"}
Already text but "untidy":
iex> "Map.get( %{a: 1}, :a, 42) " |> harnais_form_format_form
{:ok, "Map.get(%{a: 1}, :a, 42)"}
"""
@doc_harnais_form_format_forms ~S"""
`harnais_form_format_forms/2` takes a *forms*, and optional *opts*,
and formats each *form* using `harnais_form_format_form/2` returning
`{:ok, texts}`.
## Examples
iex> [quote(do: x = x + 1),
...> quote(do: x = x * x ),
...> quote(do: x=x-1 )
...> ] |> harnais_form_format_forms
{:ok, ["x = x + 1", "x = x * x", "x = x - 1"]}
iex> [quote(do: x = x + 1),
...> quote(do: x = x * x),
...> quote(do: x = x - 1)
...> ] |> harnais_form_format_forms!
["x = x + 1", "x = x * x", "x = x - 1"]
"""
@doc ~S"""
`harnais_form_compare_texts/3` takes two arguments, either forms or strings, together with (optional) *opts*.
Each argument is normalised to text using `harnais_form_format_form/2` and then compared (`Kernel.==/2`).
It returns `{:ok, text}` if the compare succeeds, else `{:error, error}`
## Examples
iex> harnais_form_compare_texts(42, 42)
{:ok, "42"}
iex> harnais_form_compare_texts(
...> quote(do: Map.get( %{a: 1}, :a, 42)),
...> "Map.get(%{a: 1}, :a, 42)")
{:ok, "Map.get(%{a: 1}, :a, 42)"}
iex> harnais_form_compare_texts(
...> quote(do: Map.get(%{a: 1}, :a, 42)),
...> "Map.get( %{a: 1}, :a, 42)")
{:ok, "Map.get(%{a: 1}, :a, 42)"}
iex> {:error, error} = harnais_form_compare_texts(quote(do: x = 42), "x = 41")
...> error |> Exception.message
"form compare failed, reason=:mismatch, type=:arg, value1=x = 42, value2=x = 41"
iex> {:error, error} = harnais_form_compare_texts("x = 42", quote(do: x = 41))
...> error |> Harnais.Error.export_exception
{:ok, [error: [[m: "form compare failed", r: :mismatch, t: :arg, v1: "x = 42", v2: "x = 41"]]]}
Query examples:
iex> harnais_form_compare_texts?(42, 42)
true
iex> harnais_form_compare_texts?(
...> quote(do: (def f(x,y), do: x + y)),
...> "def(f(x, y)) do\n x + y\n end")
true
iex> harnais_form_compare_texts?(quote(do: x = 42), "x = 41")
false
iex> harnais_form_compare_texts?("x = 42", quote(do: x = 41))
false
Bang examples:
iex> harnais_form_compare_texts!(42, 42)
"42"
iex> harnais_form_compare_texts!(
...> quote(do: Map.get(%{a: 1}, :a, 42)),
...> "Map.get( %{a: 1}, :a, 42)")
"Map.get(%{a: 1}, :a, 42)"
iex> harnais_form_compare_texts!(
...> quote(do: (def f(x,y), do: x + y)),
...> "def(f(x, y)) do\n x + y\n end")
"def(f(x, y)) do\n x + y\nend"
iex> harnais_form_compare_texts!(quote(do: x = 42), "x = 41")
** (Harnais.Error) form compare failed, reason=:mismatch, type=:arg, value1=x = 42, value2=x = 41
"""
@spec harnais_form_compare_texts(any, any, opts) :: {:ok, binary} | {:error, error}
def harnais_form_compare_texts(actual_code, expect_code, opts \\ [])
def harnais_form_compare_texts(actual_code, expect_code, opts) do
with {:ok, actual_text} <- actual_code |> harnais_form_format_form(opts),
{:ok, expect_text} <- expect_code |> harnais_form_format_form(opts) do
case actual_text == expect_text do
true ->
{:ok, actual_text}
_ ->
HEE.new_error_result(
m: @harnais_form_message_text_form_compare_failed,
t: @harnais_error_value_field_type_arg,
r: @harnais_error_reason_mismatch,
v1: actual_text,
v2: expect_text
)
end
else
{:error, %{__exception__: true}} = result -> result
end
end
@doc ~S"""
`harnais_form_compare_forms/3` takes two arguments and optional *opts*.
If either argument is text (i.e binary), it performs a textual
comparison using `harnais_form_compare_texts/3` and returns `{:ok, first_argument}`
if the compare succeeds, else returns the `{:error, error}`.
If both arguments are forms, it runs `Macro.postwalk/3` on both,
collecting each form's `snippets` in the accumulator, and then calls
`Harnais.List.harnais_list_compare/2` to compare the two
accumulators returning `{:ok, first_argument}` else `{:error, error}`
from the list compare.
## Examples
Mixed form and text:
iex> harnais_form_compare_forms(42, "42")
{:ok, 42}
iex> harnais_form_compare_forms(
...> quote(do: Map.get( %{a: 1}, :a, 42)),
...> "Map.get(%{a: 1}, :a, 42)")
{:ok, quote(do: Map.get(%{a: 1}, :a, 42))}
iex> harnais_form_compare_forms(
...> quote(do: (def f(x,y), do: x + y)),
...> "def(f(x, y)) do\n x + y\n end")
{:ok, quote(do: (def f(x,y), do: x + y))}
Both forms:
iex> harnais_form_compare_forms(42, 42)
{:ok, 42}
iex> harnais_form_compare_forms(
...> quote(do: Map.get( %{a: 1}, :a, 42)),
...> quote(do: Map.get(%{a: 1}, :a, 42)))
{:ok, quote(do: Map.get(%{a: 1}, :a, 42))}
iex> {:error, error} = harnais_form_compare_forms(
...> quote(do: Map.get( %{a: 1}, :a, 42)),
...> quote(do: Map.get(%{a: 1}, :a, 41)))
...> error |> Exception.message
"form compare failed, reason=:mismatch, type=:value, location=9, value1=42, value2=41"
Note vars with same name (`:x`) but in different modules (`ModA` v `ModB`) will be cuaght:
iex> {:error, error} = harnais_form_compare_forms(
...> Macro.var(:x, ModA),
...> Macro.var(:x, ModB))
...> error |> Exception.message
"form compare failed, reason=:mismatch, type=:value, location=0, value1={:x, [], ModA}, value2={:x, [], ModB}"
Query examples:
iex> harnais_form_compare_forms?(
...> quote(do: Map.get( %{a: 1}, :a, 42)),
...> "Map.get(%{a: 1}, :a, 42)")
true
iex> harnais_form_compare_forms?(quote(do: x = 42), "x = 41")
false
iex> harnais_form_compare_forms?(
...> Macro.var(:x, ModA),
...> Macro.var(:x, ModB))
false
Bang examples:
iex> harnais_form_compare_forms!(42, "42")
42
iex> harnais_form_compare_forms!(quote(do: x = 42), "x = 41")
** (Harnais.Error) form compare failed, reason=:mismatch, type=:arg, value1=x = 42, value2=x = 41
iex> harnais_form_compare_forms!(
...> Macro.var(:x, ModA),
...> Macro.var(:x, ModB))
** (Harnais.Error) form compare failed, reason=:mismatch, type=:value, location=0, value1={:x, [], ModA}, value2={:x, [], ModB}
"""
@spec harnais_form_compare_forms(ast, any, opts) ::
{:ok, form} | {:ok, binary} | {:error, error}
def harnais_form_compare_forms(actual_code, expect_code, opts \\ [])
def harnais_form_compare_forms(actual_code, expect_code, opts)
when is_binary(actual_code) or is_binary(expect_code) do
# do a text compare
harnais_form_compare_texts(actual_code, expect_code, opts)
|> case do
{:ok, _} -> {:ok, actual_code}
x -> x
end
end
def harnais_form_compare_forms(actual_code, expect_code, opts) do
with {:ok, actual_form} <- actual_code |> harnais_form,
{:ok, expect_form} <- expect_code |> harnais_form do
actual_snippets =
actual_form
|> Macro.postwalk([], fn snippet, snippets -> {nil, [snippet | snippets]} end)
|> elem(1)
|> Enum.reverse()
expect_snippets =
expect_form
|> Macro.postwalk([], fn snippet, snippets -> {nil, [snippet | snippets]} end)
|> elem(1)
|> Enum.reverse()
case HUL.harnais_list_compare(actual_snippets, expect_snippets, opts) do
{:ok, _} ->
{:ok, actual_code}
{:error, %Harnais.Error{} = error} ->
{:error,
error
|> struct!([
{@harnais_error_field_message, @harnais_form_message_text_form_compare_failed}
])}
x ->
x
end
end
end
@doc_harnais_form ~S"""
`harnais_form/1` tests whether the argument is a quoted form and,
if true, returns `{:ok, form}` else returns `{:error, error}`.
(Delegated to `Harnais.Utility.form_validate/1`)
## Examples
iex> harnais_form(42)
{:ok, 42}
iex> harnais_form(:atom)
{:ok, :atom}
iex> {:error, error} = harnais_form(%{a: 1})
...> error |> Exception.message
"form invalid, got: %{a: 1}"
ies> Macro.escape(%{a: 1}) |> harnais_form
{:ok, Macro.escape(%{a: 1})}
Query examples:
iex> harnais_form?(42)
true
iex> harnais_form?(quote(do: x = 42))
true
iex> harnais_form?(%{a: 1})
false
Bang examples:
iex> harnais_form!(42)
42
iex> harnais_form!(quote(do: x = 42))
quote(do: x = 42)
iex> harnais_form!(%{a: 1})
** (ArgumentError) form invalid, got: %{a: 1}
"""
@doc_harnais_forms ~S"""
`harnais_forms/1` validates the *forms* returning `{:ok, forms}` if
all are valid, else `{:error, error}`.
(Delegated to `Harnais.Utility.forms_normalise/1`)
## Examples
iex> [1, 2, 3] |> harnais_forms
{:ok, [1, 2, 3]}
iex> 1 |> harnais_forms
{:ok, [1]}
iex> [1, {2, 2}, :three] |> harnais_forms
{:ok, [1, {2, 2}, :three]}
Query examples:
iex> [1, 2, 3] |> harnais_forms?
true
iex> [1, {2, 2}, :three] |> harnais_forms?
true
iex> [1, {2, 2, 2}, %{c: 3}] |> harnais_forms?
false
Bang examples:
iex> [1, 2, 3] |> harnais_forms!
[1, 2, 3]
iex> [1, {2, 2}, :three] |> harnais_forms!
[1, {2, 2}, :three]
iex> [1, {2, 2, 2}, %{c: 3}] |> harnais_forms!
** (ArgumentError) forms invalid, got invalid indices: [1, 2]
"""
@quote_result_list_no_return quote(do: list | no_return)
@quote_result_text_no_return quote(do: String.t() | no_return)
@quote_result_texts_no_return quote(do: [String.t()] | no_return)
@quote_result_form_no_return quote(do: form | no_return)
@quote_result_forms_no_return quote(do: forms | no_return)
@quote_result_form_result quote(do: {:ok, form} | {:error, error})
@quote_result_forms_no_return quote(do: forms | no_return)
@quote_result_forms_texts_result quote(do: {:ok, [String.t()]} | {:error, error})
[
delegate: [
name: :harnais_form,
as: :form_validate,
to: Harnais.Utility,
doc: @doc_harnais_form,
args: :form,
spec_args: :any,
since: "0.1.0",
result: @quote_result_form_result
],
bang: [
doc: false,
as: :harnais_form,
args: :form,
since: "0.1.0",
result: @quote_result_form_no_return
],
query: [doc: false, as: :harnais_form, args: :form, since: "0.1.0", result: true],
delegate: [
name: :harnais_forms,
as: :forms_normalise,
to: Harnais.Utility,
doc: @doc_harnais_forms,
args: :forms,
spec_args: :any,
since: "0.1.0",
result: @quote_result_form_result
],
bang: [
doc: false,
as: :harnais_forms,
args: :forms,
since: "0.1.0",
result: @quote_result_forms_no_return
],
query: [doc: false, as: :harnais_forms, args: :forms, since: "0.1.0", result: true],
bang: [
doc: false,
as: :harnais_form_transform_forms,
args: :form,
since: "0.1.0",
result: @quote_result_list_no_return
],
bang: [
doc: false,
as: :harnais_form_transform_forms,
args: [:form, :opts],
since: "0.1.0",
result: @quote_result_list_no_return
],
bang: [
doc: false,
as: :harnais_form_compare_forms,
args: [:form1, :form2],
since: "0.1.0",
result: @quote_result_forms_no_return
],
bang: [
doc: false,
as: :harnais_form_compare_forms,
args: [:form1, :form2, :opts],
since: "0.1.0",
result: @quote_result_forms_no_return
],
query: [
doc: false,
as: :harnais_form_compare_forms,
args: [:form1, :form2],
since: "0.1.0",
result: true
],
query: [
doc: false,
as: :harnais_form_compare_forms,
args: [:form1, :form2, :opts],
since: "0.1.0",
result: true
],
bang: [
doc: false,
as: :harnais_form_compare_texts,
args: [:code1, :code2],
since: "0.1.0",
result: @quote_result_text_no_return
],
bang: [
doc: false,
as: :harnais_form_compare_texts,
args: [:code1, :code2, :opts],
since: "0.1.0",
result: @quote_result_text_no_return
],
query: [
doc: false,
as: :harnais_form_compare_texts,
args: [:code1, :code2],
since: "0.1.0",
result: true
],
query: [
doc: false,
as: :harnais_form_compare_texts,
args: [:code1, :code2, :opts],
since: "0.1.0",
result: true
],
bang: [
doc: false,
as: :harnais_form_test_forms,
args: :forms,
since: "0.1.0",
result: @quote_result_forms_no_return
],
bang: [
doc: false,
as: :harnais_form_test_forms,
args: [:forms, :opts],
since: "0.1.0",
result: @quote_result_forms_no_return
],
delegate: [
name: :harnais_form_format_form,
as: :form_format,
to: Harnais.Form.Utility,
doc: false,
args: :form,
spec_args: :any,
spec_result: @quote_result_forms_texts_result
],
delegate: [
name: :harnais_form_format_form,
as: :form_format,
to: Harnais.Form.Utility,
doc: @doc_harnais_form_format_form,
args: [:form, :opts],
spec_args: [:any, :any],
spec_result: @quote_result_forms_texts_result
],
bang: [
doc: false,
as: :harnais_form_format_form,
args: :form,
since: "0.1.0",
result: @quote_result_text_no_return
],
bang: [
doc: false,
as: :harnais_form_format_form,
args: [:form, :opts],
since: "0.1.0",
result: @quote_result_text_no_return
],
delegate: [
name: :harnais_form_format_forms,
as: :forms_format,
to: Harnais.Form.Utility,
doc: false,
args: :forms,
spec_args: :any,
spec_result: @quote_result_forms_texts_result
],
delegate: [
name: :harnais_form_format_forms,
as: :forms_format,
to: Harnais.Form.Utility,
doc: @doc_harnais_form_format_forms,
args: [:forms, :opts],
spec_args: [:any, :any],
spec_result: @quote_result_forms_texts_result
],
bang: [
doc: false,
as: :harnais_form_format_forms,
args: :forms,
since: "0.1.0",
result: @quote_result_texts_no_return
],
bang: [
doc: false,
as: :harnais_form_format_forms,
args: [:forms, :opts],
since: "0.1.0",
result: @quote_result_texts_no_return
]
]
|> Enum.flat_map(fn {pattern, opts} ->
[pattern: [pattern: pattern] ++ opts]
end)
|> CODI.reify_codi(@codi_opts)
end
|
lib/form.ex
| 0.88397 | 0.565779 |
form.ex
|
starcoder
|
defmodule Primer.Elements do
@moduledoc """
Functions for displaying common elements that are present on GitHub-authored sites.
"""
use Phoenix.HTML
defmodule MissingConfigurationError do
defexception [:missing_keys]
def exception(key) when is_atom(key), do: exception([key])
def exception(keys) when is_list(keys) do
%__MODULE__{missing_keys: keys}
end
def message(%{missing_keys: missing_keys}) do
"application configuration missing: #{inspect(missing_keys)}"
end
end
@doc """
Renders the GitHub-style `<> with ♥ by [author link]` footer item.
Retrieves the author's name and URL from the application configuration before passing to
`code_with_heart/3`. This information can be added to the application configuration by adding the
following to your `config.exs`:
```
config :primer,
code_with_heart: [
name: "<NAME>",
url: "https://example.com"
]
```
Raises a `Primer.Elements.MissingConfigurationError` if any of the required
application configuration information is not specified and this function is called.
"""
@spec code_with_heart(keyword) :: Phoenix.HTML.safe()
def code_with_heart(options \\ []) do
config = Application.get_env(Application.get_application(__MODULE__), :code_with_heart)
name = config[:name]
url = config[:url]
unless name && url, do: raise(MissingConfigurationError, :code_with_heart)
code_with_heart(name, url, options)
end
@doc """
Renders the GitHub-style `<> with ♥ by [author link]` footer item.
The text in this element is intentionally left untranslated because the form of the element is
intended to be recognizable in its specific format.
## Options
All options are passed to the underlying HTML `a` element.
"""
@spec code_with_heart(binary, binary, keyword) :: Phoenix.HTML.safe()
def code_with_heart(name, url, options \\ []) do
link_options = Keyword.merge([to: url, class: "link-gray-dark"], options)
html_escape([
PhoenixOcticons.octicon(:code),
" with ",
PhoenixOcticons.octicon(:heart),
" by ",
link(name, link_options)
])
end
@doc """
Renders a link to the project on GitHub.
Retrieves the project name or URL from the application configuration. This configuration
information can be added to the application configuration by adding the following to your
`config.exs`:
```
config :primer,
github_link: "owner/name"
```
If the configuration information is missing, a `Primer.Elements.MissingConfigurationError` is
raised.
"""
@spec github_link(binary | keyword) :: Phoenix.HTML.safe()
def github_link(project_or_options \\ [])
def github_link(project) when is_binary(project), do: github_link(project, [])
def github_link(options) when is_list(options) do
url = Application.get_env(Application.get_application(__MODULE__), :github_link)
unless url, do: raise(MissingConfigurationError, :github_link)
github_link(url, options)
end
@doc """
Renders a link to the project on GitHub.
`project` can be either a full URL or the just the GitHub `owner/name` specification.
## Options
* `:tooltip_text` - Description text to display in the tooltip _(default: "View this project on
GitHub")_
All other options are passed to the underlying HTML `a` element.
"""
def github_link(project, options) when is_binary(project) do
# Prepend the `https://github.com/` if only the name with owner is specified
url = if project =~ ~r{^[^/]+/[^/]+$}, do: "https://github.com/#{project}", else: project
link_options =
Keyword.merge(
[
to: url,
"aria-label": options[:tooltip_text] || "View this project on GitHub",
class: "link-gray-dark tooltipped tooltipped-n"
],
options
)
link(link_options) do
PhoenixOcticons.octicon("mark-github")
end
end
end
|
lib/primer/elements.ex
| 0.838993 | 0.84228 |
elements.ex
|
starcoder
|
defmodule NewtonIteration do
@moduledoc """
The NewtonIteration module implements the computation of roots of
real functions using Newton's method.
"""
@doc ~S"""
Runs Newton's iteration
`f` is an anonymous function of one real variable and `fp` is the
derivative of `f`.
`start_value` is the first value used in the iteration process.
Usually, this is a guess of where the wanted root of `f` may be
located.
`max_iteration_difference` and `max_iterations` are used to
terminate the iteration process.
If the absolute value of the difference between two consecutive
values produced by the iteration process is smaller than
`max_iteration_difference`, then `iterate/5` returns `{:ok, x_k}`
where `x_k` is the latest value produced by the iteration.
If the absolute value of the difference between two consecutive
values after `max_iterations` is still larger than
`max_iteration_difference`, then `iterate/5` returns `:error`.
## Examples
The root of the identity function `fn x -> x end` is `0`.
```
iex> {:ok, root} = NewtonIteration.iterate(
...> fn x -> x end,
...> fn _x -> 1 end,
...> 1.0,
...> 1.0e-9,
...> 4
...> )
...> Float.round(root, 6)
0.0
```
The roots of the quadratic function `fn x -> x * x - 4 end` are `2`
and `-2` but `4` iterations are not sufficient to compute a root
with the required accuracy.
```
iex> f = fn x -> x * x - 4 end
...> fp = fn x -> 2 * x end
...> NewtonIteration.iterate(f, fp, 4.0, 1.0e-9, 4)
:error
...> {:ok, root} = NewtonIteration.iterate(f, fp, 4.0, 1.0e-9, 8)
...> Float.round(root, 6)
2.0
```
"""
def iterate(f, fp, start_value, max_iteration_difference, max_iterations) do
iterate(f, fp, start_value, max_iteration_difference, max_iterations, 0)
end
defp iterate(_, _, _, _, max_iterations, iteration_count)
when iteration_count > max_iterations do
:error
end
defp iterate(
f,
fp,
previous_iteration,
max_iteration_difference,
max_iterations,
iteration_count
) do
iteration = previous_iteration - f.(previous_iteration) / fp.(previous_iteration)
if abs(iteration - previous_iteration) <= max_iteration_difference do
{:ok, iteration}
else
iterate(f, fp, iteration, max_iteration_difference, max_iterations, iteration_count + 1)
end
end
end
|
lib/newton_iteration.ex
| 0.946324 | 0.951639 |
newton_iteration.ex
|
starcoder
|
defmodule TtrCore.Mechanics do
@moduledoc """
Game play mechanics, rules and state transformer.
"""
@type cost :: integer
@type count :: integer
alias TtrCore.{
Board,
Cards,
Players
}
alias TtrCore.Mechanics.{
Context,
OtherPlayer,
Score,
State
}
alias TtrCore.Players.{
Player,
User
}
# API
@max_players 4
@doc """
Checks to see if the state allows going to the `setup` stage.
Returns `:ok`, if it can.
Returns `{:error, :not_owner}` if the user id used is not the owner
requesting the stage change.
Returns `{:error, :not_enough_players}` if the game does not have
more than 1 player joined.
Returns `{:error, :not_in_unstarted}` if the game is not in the
`unstarted` stage.
"""
@spec can_setup?(State.t, User.id) :: :ok |
{:error, :not_owner | :not_enough_players | :not_in_unstarted}
def can_setup?(%{owner_id: owner_id, players: players} = state, user_id) do
[]
|> validate_owner(owner_id, user_id)
|> validate_enough_players(players)
|> validate_game_unstarted(state)
|> handle_result()
end
@doc """
Checks to see if the state allows going to the `begin` stage.
Returns `:ok`, if it can.
Returns `{:error, :not_owner}` if the user id used is not the owner
requesting the stage change.
Returns `{:error, :not_enough_players}` if the game does not have
more than 1 player joined.
Returns `{:error, :not_in_setup}` if the game is not in the
`unstarted` stage.
Returns `{:error, :tickets_not_selected}` if any of the players have
not selected their initial tickets.
"""
@spec can_begin?(State.t, User.id) :: :ok |
{:error, :not_owner | :not_enough_players | :not_in_setup | :tickets_not_selected}
def can_begin?(%{owner_id: owner_id, players: players} = state, user_id) do
[]
|> validate_tickets_selected(state)
|> validate_owner(owner_id, user_id)
|> validate_enough_players(players)
|> validate_game_in_setup(state)
|> handle_result()
end
@doc """
Checks to see if a player can join.
Returns `:ok` if it is possible.
Returns `{:error, :game_full}` if the game has reach the maximum
number of players (4).
Returns `{:error, :already_joined}` if the player has already
joined.
"""
@spec can_join?(State.t, User.id) :: :ok | {:error, :game_full | :already_joined}
def can_join?(%{players: players}, user_id) do
[]
|> validate_not_full(players)
|> validate_no_duplicate_players(players, user_id)
|> handle_result()
end
@doc """
Checks to see if a player has already joined and returns a boolean.
"""
@spec is_joined?(State.t, User.id) :: boolean()
def is_joined?(%{players: players}, user_id) do
Map.has_key?(players, user_id)
end
@doc """
Adds a player to the state.
"""
@spec add_player(State.t, User.id) :: State.t
def add_player(%{players: players, turn_order: order} = state, user_id) do
%{state |
players: Players.add_player(players, user_id),
turn_order: order ++ [user_id]}
end
@doc """
Removes a player from the state. If the player is the owner, then
another player is automatically assigned as the owner.
"""
@spec remove_player(State.t, User.id) :: State.t
def remove_player(%{players: players, turn_order: order} = state, user_id) do
updated_players = Players.remove_player(players, user_id)
state
|> Map.put(:players, updated_players)
|> Map.put(:turn_order, List.delete(order, user_id))
|> transfer_ownership_if_host_left
end
@doc """
Randomly chooses starting player.
"""
@spec choose_starting_player(State.t) :: State.t
def choose_starting_player(%{players: players} = state) do
%Player{id: id} = Players.select_random_player(players)
%{state | current_player: id}
end
@doc """
Transforms game state to a `setup` stage. This stage will:
* Deal initial trains to players (4 to each player)
* Deal tickets for selections to players (3 to each player)
* Displays 5 trains face up for all user to select during normal
gameplay
"""
@spec setup_game(State.t) :: State.t
def setup_game(state) do
state
|> deal_trains()
|> deal_tickets()
|> display_trains()
|> Map.put(:stage, :setup)
end
@doc """
Transforms game state to a `started` stage. This stage will:
* Choose a starting player
"""
@spec start_game(State.t) :: State.t
def start_game(state) do
state
|> choose_starting_player()
|> Map.put(:stage, :started)
|> Map.put(:stage_meta, [])
end
@doc """
Deals trains to all players during the `setup` stage or a normal turn
(`started` stage). Called by `setup_game/1`.
"""
@spec deal_trains(State.t) :: State.t
def deal_trains(%{train_deck: train_deck, players: players} = state) do
{remaining, updated} = Cards.deal_initial_trains(train_deck, players)
state
|> Map.put(:train_deck, remaining)
|> Map.put(:players, updated)
end
@doc """
Draws trains to a player from the a train deck. Can draw 1 or 2 cards.
Returns `{:ok, state}` if the draw was successful.
Returns `{:error, :not_turn}` if the user asking for tickets is not
in possession of the turn.
"""
@spec draw_trains(State.t, User.id, count()) :: {:ok, State.t} | {:error, :not_turn | :user_not_found}
def draw_trains(%{current_player: id}, user_id, _) when id != user_id, do: {:error, :not_turn}
def draw_trains(%{train_deck: [], discard_deck: []} = state, _, _), do: {:ok, state}
def draw_trains(%{train_deck: [], discard_deck: deck} = state, id, count) do
new_deck = Enum.shuffle(deck)
new_state = %{state | train_deck: new_deck, discard_deck: []}
draw_trains(new_state, id, count)
end
def draw_trains(%{train_deck: deck, players: players} = state, id, count) do
if player = players[id] do
{remainder, updated_player} = Cards.draw_trains(deck, player, count)
updated_players = Players.replace_player(players, updated_player)
new_state = state
|> Map.put(:train_deck, remainder)
|> Map.put(:players, updated_players)
{:ok, new_state}
else
{:error, :user_not_found}
end
end
@doc """
Deals tickets to all players during the `setup` stage. Called by
`setup_game/1`.
"""
@spec deal_tickets(State.t) :: State.t
def deal_tickets(%{ticket_deck: deck, players: players} = state) do
{remaining, updated} = Cards.deal_tickets(deck, players)
state
|> Map.put(:ticket_deck, remaining)
|> Map.put(:players, updated)
end
@doc """
Draw tickets from deck to a player for selections. Always draws 3
and places them in the players selection buffer.
Returns `{:ok, state}` if draw was successful.
Returns `{:error, :not_turn}` if the user asking for tickets is not
in possession of the turn.
"""
@spec draw_tickets(State.t, User.id) :: {:ok, State.t} | {:error, :not_turn | :user_not_found}
def draw_tickets(%{current_player: id}, user_id) when id != user_id, do: {:error, :not_turn}
def draw_tickets(%{ticket_deck: deck, players: players} = state, user_id) do
if player = players[user_id] do
{new_deck, updated_player} = Cards.draw_tickets(deck, player)
updated_players = Players.replace_player(players, updated_player)
new_state = state
|> Map.put(:ticket_deck, new_deck)
|> Map.put(:players, updated_players)
{:ok, new_state}
else
{:error, :user_not_found}
end
end
@doc """
Select tickets that were drawn into buffer for a player.
Returns `{:ok, state}` if selections were successful.
Returns `{:error, :invalid_tickets}` if the tickets selected were
not available to be chosen.
Returns `{:error, :not_turn}` if the user asking for tickets is not
in possession of the turn. Turn is only checked when the game has
started or when on the last round.
"""
@spec select_tickets(State.t, User.id, [TicketCard.t]) ::
{:ok, State.t} | {:error, :invalid_tickets | :not_turn | :user_not_found}
def select_tickets(%{current_player: id, stage_meta: stage}, user_id, _)
when (stage == :started or stage == :last_round) and id != user_id, do: {:error, :not_turn}
def select_tickets(%{ticket_deck: ticket_deck, players: players} = state, user_id, tickets) do
if player = players[user_id] do
if Players.has_tickets?(player, tickets) do
{updated_player, removed} = player
|> Players.add_tickets(tickets)
|> Players.remove_tickets_from_buffer(tickets)
updated_players = Players.replace_player(players, updated_player)
updated_tickets = Cards.return_tickets(ticket_deck, removed)
new_state = state
|> Map.put(:ticket_deck, updated_tickets)
|> Map.put(:players, updated_players)
|> update_meta(user_id)
{:ok, new_state}
else
{:error, :invalid_tickets}
end
else
{:error, :user_not_found}
end
end
@doc """
Select trains from the display deck and replenish train display.
Returns `{:ok, state}` if selections were successful.
Returns `{:error, :invalid_trains}` if the trains selected were
not available to be chosen.
Returns `{:error, :not_turn}` if the user asking for trains is not
in possession of the turn.
"""
@spec select_trains(State.t, User.id, [TrainCard.t]) ::
{:ok, State.t} | {:error, :invalid_trains | :not_turn | :user_not_found}
def select_trains(%{current_player: id}, user_id, _) when id != user_id, do: {:error, :not_turn}
def select_trains(%{players: players, train_deck: train_deck, displayed_trains: displayed} = state, user_id, trains) do
if player = players[user_id] do
selected = Enum.take(trains, 2) # Only grab up to 2 trains, ignore the rest
if Cards.has_cards?(displayed, selected) do
updated_player = Players.add_trains_on_turn(player, selected)
updated_players = Players.replace_player(players, updated_player)
{new_display, new_deck} = displayed
|> Cards.remove_from_display(selected)
|> Cards.replenish_display(train_deck)
new_state = state
|> Map.put(:players, updated_players)
|> Map.put(:displayed_trains, new_display)
|> Map.put(:train_deck, new_deck)
{:ok, new_state}
else
{:error, :invalid_trains}
end
else
{:error, :user_not_found}
end
end
@doc """
Claims a route for a player and pays out the cost in trains.
Returns `{:ok, state}` if succesful.
Returns `{:error, :unavailable}` if the route is not eligible to be
claimed.
Returns `{:error, :not_turn}` if user is not in possession of the
turn.
"""
@spec claim_route(State.t, User.id, Route.t, [TrainCard.t]) ::
{:ok, State.t} | {:error, :unavailable}
def claim_route(%{current_player: id}, user_id, _, _) when id != user_id, do: {:error, :not_turn}
def claim_route(%{players: players, discard_deck: discard} = state, user_id, route, trains_used) do
if player = players[user_id] do
claimed = Players.get_claimed_routes(players)
claimable = Board.get_claimable_routes(claimed, player, Enum.count(players))
has_stake = Enum.member?(claimable, route)
has_trains = Players.can_use_trains_for_route?(player, route, trains_used)
has_pieces = Players.has_enough_pieces?(player, route)
if has_stake and has_trains and has_pieces do
{updated_player, removed} = player
|> Players.add_route(route)
|> Players.remove_trains(trains_used)
updated_players = Players.replace_player(players, updated_player)
new_discard = Cards.add_trains_to_discard(discard, removed)
new_state = state
|> Map.put(:discard_deck, new_discard)
|> Map.put(:players, updated_players)
{:ok, new_state}
else
{:error, :unavailable}
end
else
{:error, :user_not_found}
end
end
@doc """
Generates a player's context from the game state. This includes the
view a player has of other players (not including their secrets or
the details of decks).
"""
@spec generate_context(State.t, User.id) :: {:ok, Context.t} | {:error, :user_not_found}
def generate_context(%{players: players} = state, user_id) do
if player = players[user_id] do
other_players = players
|> Enum.reject(fn {id, _} -> id == user_id end)
|> Enum.map(fn {_, player} ->
%OtherPlayer{
name: player.name,
tickets: Enum.count(player.tickets),
trains: Enum.count(player.trains),
pieces: player.pieces,
routes: player.routes
}
end)
{:ok, %Context{
id: player.id,
stage: state.stage,
game_id: state.id,
name: player.name,
pieces: player.pieces,
tickets: player.tickets,
tickets_buffer: player.tickets_buffer,
trains: player.trains,
trains_selected: player.trains_selected,
routes: player.routes,
train_deck: Enum.count(state.train_deck),
ticket_deck: Enum.count(state.ticket_deck),
displayed_trains: state.displayed_trains,
current_player: state.current_player,
other_players: other_players,
longest_path_owner: state.longest_path_owner
}
}
else
{:error, :user_not_found}
end
end
@doc """
End a player's turn.
Returns `{:ok, state}` if successful.
Returns `{:error, :not_turn}` if it is not the user id of the
current player.
"""
@spec end_turn(State.t, User.id()) :: {:ok, State.t} | {:error, :not_turn}
def end_turn(%{current_player: current_id} = state, user_id) do
if user_id == current_id do
{:ok, force_end_turn(state)}
else
{:error, :not_turn}
end
end
@doc """
Force the end of a turn regardless of player identification. Used by
the `Ticker` timer.
"""
@spec force_end_turn(State.t) :: State.t
def force_end_turn(%{current_player: current_id, turn_order: order} = state) do
count = Enum.count(order)
index = Enum.find_index(order, fn id -> id == current_id end)
# Find out the next player's id and set it
next = if index == (count - 1), do: 0, else: count - 1
id = Enum.at(order, next)
new_state = state
|> reset_players_selections()
|> Map.put(:current_player, id)
|> move_stage()
new_state
end
# Private
defp move_stage(%{current_player: id, stage: :last_round, stage_meta: meta, players: players} = state) do
if all_players_played_last_round?(players, meta) do
# Get baseline scores for every player
scores = Enum.map(players, fn {_, player} -> calculate_score(player) end)
# Get longest route length from player pool
{_, _, _, longest} = Enum.max_by(scores, fn {_, _, _, length} -> length end)
# Separate all players that scored the longest length route from everyone else
{high_scorers, remainder} = Enum.split_with(scores, fn {_, _, _, length} ->
length == longest
end)
# Apply bonus points to high scorers and calculate final scores
achievers = Enum.map(high_scorers, fn {id, route_score, ticket_score, _} ->
{id, ticket_score + route_score + 10}
end)
others = Enum.map(remainder, fn {id, route_score, ticket_score, _} ->
{id, ticket_score + route_score}
end)
finals = achievers ++ others
# Calculate winner
{winner_id, score} = Enum.max_by(finals, fn {_, score} -> score end)
%{state |
winner_id: winner_id,
winner_score: score,
scores: finals,
stage: :finished,
stage_meta: []}
else
%{state | stage_meta: [id|meta]}
end
end
defp move_stage(%{stage: :started, players: players} = state) do
if Players.any_out_of_stock?(players) do
%{state | stage: :last_round, stage_meta: []}
else
state
end
end
defp move_stage(%{stage: _} = state), do: state
defp calculate_score(player), do: Score.calculate(player)
defp all_players_played_last_round?(players, meta) do
ids = players |> Map.keys() |> Enum.sort()
meta_ids = Enum.sort(meta)
ids == meta_ids
end
defp reset_players_selections(%{players: players} = state) do
updated = Enum.reduce(players, %{}, fn {id, player}, acc ->
Map.put(acc, id, Players.reset_selections(player))
end)
%{state | players: updated}
end
defp display_trains(%{displayed_trains: displayed, train_deck: deck} = state) do
{display, new_deck} = Cards.replenish_display(displayed, deck)
%{state| train_deck: new_deck, displayed_trains: display}
end
defp transfer_ownership_if_host_left(%{players: players, owner_id: owner_id} = state) do
if players[owner_id] do
%{id: new_owner_id} = Players.select_random_player(players)
%{state | owner_id: new_owner_id}
else
state
end
end
defp update_meta(%{stage: :setup} = state, user_id) do
if Enum.member?(state.stage_meta, user_id) do
state
else
%{state | stage_meta: [user_id | state.stage_meta]}
end
end
defp update_meta(%{stage: _} = state, _), do: state
defp validate_owner(errors, owner_id, user_id) do
if owner_id == user_id do
errors
else
[{:error, :not_owner} | errors]
end
end
defp validate_enough_players(errors, players) do
if Enum.count(players) > 1 do
errors
else
[{:error, :not_enough_players} | errors]
end
end
defp validate_game_unstarted(errors, %{stage: stage}) do
if stage == :unstarted do
errors
else
[{:error, :not_in_unstarted} | errors]
end
end
defp validate_game_in_setup(errors, %{stage: stage}) do
if stage == :setup do
errors
else
[{:error, :not_in_setup} | errors]
end
end
defp validate_not_full(errors, players) do
if Enum.count(players) >= @max_players do
[{:error, :game_full} | errors]
else
errors
end
end
defp validate_tickets_selected(errors, %{players: players, stage_meta: meta}) do
ids = Map.keys(players)
if Enum.all?(ids, fn id -> Enum.member?(meta, id) end) do
errors
else
[{:error, :tickets_not_selected} | errors]
end
end
defp validate_no_duplicate_players(errors, players, user_id) do
if Map.has_key?(players, user_id) do
[{:error, :already_joined} | errors]
else
errors
end
end
defp handle_result(results) do
case results do
[error|_] -> error
_ -> :ok
end
end
end
|
lib/ttr_core/mechanics.ex
| 0.914477 | 0.512022 |
mechanics.ex
|
starcoder
|
defmodule Exhort.SAT.Constraint do
@moduledoc """
A constraint on the model.
> #### Suggestion {: .warning}
>
> Consider using `Exhort.SAT.Expr` instead of using this module directly.
The binary constraints are:
```
:< | :<= | :== | :>= | :> | :"abs=="
```
The list constraints are:
```
:"all!=" | :no_overlap
```
The expression must include a boundary: `<`, `<=`, `==`, `>=`, `>`.
```
"x" < "y"
```
The components of the expressoin may be simple mathematical expressions,
including the use of `+` and `*`:
```
"x" * "y" = "z"
```
The `sum/1` function may be used to sum over a series of terms:
```
sum("x" + "y") == "z"
```
The variables in the expression may be model variables or Elixir variables.
For example, where `"x"` is a model variable (e.g., `def_int_var(x, {0, 3}`))
and `y` is an Elixir variable (e.g., `y = 2`):
```
"x" < y
```
A `for` comprehension may be used to generate list values:
```
sum(for {x, y} <- list, do: "x" * "y") == "z"
```
As a larger example:
```
y = 20
z = [{0, 1}, {2, 3}, {4, 5}]
Builder.new()
|> Builder.def_int_var("x", {0, 3})
|> Builder.constrain(sum(for {a, b} <- z, do: a * b) < "y")
|> Builder.build()
...
```
"""
alias __MODULE__
alias Exhort.SAT.BoolVar
alias Exhort.SAT.DSL
alias Exhort.SAT.IntVar
alias Exhort.SAT.LinearExpression
@type constraint :: :< | :<= | :== | :>= | :> | :"abs==" | :"all!=" | :no_overlap
@type t :: %__MODULE__{}
defstruct [:res, :defn]
@doc """
Define a bounded constraint.
"""
defmacro new(expr, opts \\ []) do
expr =
case expr do
{:==, m1, [lhs, {:abs, _m2, [var]}]} ->
{:"abs==", m1, [lhs, var]}
expr ->
expr
end
{op, _, [lhs, rhs]} = expr
lhs = DSL.transform_expression(lhs)
rhs = DSL.transform_expression(rhs)
opts = Enum.map(opts, &DSL.transform_expression(&1))
quote do
%Constraint{defn: {unquote(lhs), unquote(op), unquote(rhs), unquote(opts)}}
end
end
@doc false
@spec constrain(
lhs :: atom() | String.t() | BoolVar.t() | IntVar.t() | LinearExpression.t(),
constraint :: Constraint.constraint(),
rhs :: atom() | String.t() | BoolVar.t() | IntVar.t() | LinearExpression.t(),
opts :: [{:if, BoolVar.t()}] | [{:unless, BoolVar.t()}]
) :: Constraint.t()
def constrain(lhs, constraint, rhs, opts \\ []) do
%Constraint{defn: {lhs, constraint, rhs, opts}}
end
@doc """
Add an implication constraint where `bool1` implies `bool2`.
"""
def implication(bool1, bool2) do
%Constraint{defn: {:implication, bool1, bool2}}
end
@doc """
Create a constraint that requires one of the booleans in the list to be true.
"""
@spec bool_or(list()) :: Exhort.SAT.Constraint.t()
def bool_or(list) do
%Constraint{defn: {:or, list}}
end
@doc """
Create longical and constraint on the list of booleans.
"""
@spec bool_and(list()) :: Exhort.SAT.Constraint.t()
def bool_and(list) do
%Constraint{defn: {:and, list}}
end
@doc """
Create a constraint that ensures no overlap among the variables.
"""
@spec no_overlap(list(), Keyword.t()) :: Exhort.SAT.Constraint.t()
def no_overlap(list, opts \\ []) do
%Constraint{defn: {:no_overlap, list, opts}}
end
@doc """
Create a constraint that ensures each item in the list is different in the
solution.
"""
@spec all_different(list(), Keyword.t()) :: Exhort.SAT.Constraint.t()
def all_different(list, opts \\ []) do
%Constraint{defn: {:"all!=", list, opts}}
end
end
|
lib/exhort/sat/constraint.ex
| 0.889942 | 0.960287 |
constraint.ex
|
starcoder
|
defmodule CImg.Builder do
@moduledoc """
Build and excecute image processing sequence.
** under construction **
"""
alias __MODULE__
alias CImg.NIF
# builder object
# :handle - work image.
# :src - source image.
# :script - image operations
defstruct handle: nil, src: nil, script: []
@doc """
Building image processing sequence. It allows to execute mutable operation in
this sequence.
## Parameters
* image - %CImg{} or %Builder{} object. if %CImg{} is passed, `builder` duplicates
the image object and returns it wrapped with %Builder{}.
## Examples
```Elixir
img = CImg.load("sample.jpg")
res = CImg.builder(img)
|> CImg.draw_circle(100, 100, 30, {0, 255, 0}) # draw a circle on the duplicated img.
```
"""
def builder(%Builder{}=builder) do
builder
end
def builder(cimg) do
dup = CImg.dup(cimg)
%Builder{handle: dup.handle}
end
@doc """
Building image processing sequence. This one creates empty image object instead
recieving a image.
## Parameters
* x, y, z, c - image shape.
* val - filling value
## Examples
```Elixir
res = CImg.builder(100, 100, 1, 3, 0) # 0 filled color image {100, 100, 1, 3}
|> CImg.draw_circle(100, 100, 30, {0, 255, 0}) # draw a circle on the duplicated img.
```
"""
def builder(x, y, z, c, val) do
with {:ok, h} <- NIF.cimg_create(x, y, z, c, val),
do: %Builder{handle: h}
end
@doc """
Create a %Builder{} from jpeg/png format binary.
You can create an image from loaded binary of the JPEG/PNG file.
## Parameters
* jpeg_or_png - loaded binary of the image file.
## Examples
```Elixir
jpeg = File.read!("sample.jpg")
img = Builder.from_binary(jpeg)
```
"""
def from_binary(jpeg_or_png) do
with {:ok, h} <- NIF.cimg_load_from_memory(jpeg_or_png),
do: %Builder{handle: h}
end
@doc """
Return %CImg{} converted from %Builder{}. Of course, mutable operations cannot
be applied to %CImg{}.
## Parameters
* builder - %Builder{} object.
## Examples
```Elixir
cimg = CImg.builder(100, 100, 1, 3, 0)
|> CImg.draw_circle(100, 100, 30, {0, 255, 0}) # draw a circle on the duplicated img.
|> CImg.runit()
# cimg is %CImg{} object with a circle drawn on it.
```
"""
def runit(%Builder{handle: h}) do
%CImg{handle: h}
end
def resize(%Builder{handle: h}=builder, {x, y}=_size, align, fill) do
align = case align do
:none -> 0
:ul -> 1
:br -> 2
_ -> raise(ArgumentError, "unknown align '#{align}'.")
end
#[{:resize, x, y, align, fill} | builder.script]
with {:ok, packed} <- NIF.cimg_get_resize(%CImg{handle: h}, x, y, align, fill),
do: %Builder{handle: packed}
end
end
|
lib/cimg/builder.ex
| 0.879283 | 0.762778 |
builder.ex
|
starcoder
|
defmodule Noegle.Schema do
@moduledoc """
Add authentication support to your User schema module.
Example:
```elixir
defmodule MyApp.User do
use MyApp.Web, :model
use Noegle.Schema
```
The following functions are available:
* `hash_password/1` - hashes a password string using `Comeonin.Bcrypt.hashpwsalt`
* `valid_password?/2` - checks that a clear text `password` matches the `hashed_password`
## Examples
The following is a full example of what is required to use Noegle for your authentication
```elixir
defmodule MyApp.Noegle do
use MyApp.Web, :model
use Noegle.Schema
schema "users" do
field :name, :string
field :email, :string
# These are the fields required for Noegle to work
field :password_digest, :string
field :password, :string, virtual: true
field :password_confirmation, :string, virtual: true
timestamps
end
@required_fields ~w(name email)
@optional_fields ~w(password password_confirmation)
def changeset(model, params \\ %{}) do
model
|> cast(params, @required_fields, @optional_fields)
|> unique_constraint(:email)
|> validate_confirmation(:password)
|> hash_password() # Function that comes from Noegle which hashes the password
end
```
"""
defmacro __using__(_opts \\ []) do
quote do
import unquote(__MODULE__)
import Ecto.Changeset
import Comeonin.Bcrypt, only: [hashpwsalt: 1]
@doc """
Hashes password in a changeset using `Comeonin.Bcrypt.hashpwsalt/1`
Returns the changeset
"""
def hash_password(changeset) do
if password = get_change(changeset, :password) do
changeset
|> put_change(:password_digest, hashpwsalt(password))
else
changeset
end
end
@doc """
Takes a clear text `password` and matches it against `hashed_password`
Returns true/false
"""
def valid_password?(password, hashed_password) do
try do
Comeonin.Bcrypt.checkpw(password, <PASSWORD>_password)
rescue
_ -> false
end
end
end
end
end
|
lib/noegle/schema.ex
| 0.884984 | 0.622459 |
schema.ex
|
starcoder
|
defmodule Esolix.Langs.Brainfuck do
@moduledoc """
Documentation for the Brainfuck Module.
"""
# > Move the pointer to the right
# < Move the pointer to the left
# + Increment the memory cell at the pointer
# - Decrement the memory cell at the pointer
# . Output the character signified by the cell at the pointer
# , Input a character and store it in the cell at the pointer
# [ Jump past the matching ] if the cell at the pointer is 0
# ] Jump back to the matching [ if the cell at the pointer is nonzero
alias Esolix.DataStructures.Tape
import ExUnit.CaptureIO
defmodule BrainfuckTape do
@moduledoc false
defstruct code: "",
instruction_pointer: 0,
tape: Tape
end
@default_tape_params [
width: 300_000,
loop: false,
cell_byte_size: 1,
initial_cell_value: 0,
initial_pointer: 0
]
# Custom Module Errors
defmodule UnbalancedBracketsError do
@moduledoc false
defexception message: "Invalid Brainfuck Code caused by unbalanced square brackets"
end
defmodule WrongFileExtensionError do
@moduledoc false
defexception [:message]
def exception(file) do
msg = "File #{file} does not have the .bf extension"
%WrongFileExtensionError{message: msg}
end
end
@spec eval(String.t(), keyword()) :: String.t()
@doc """
Runs Brainfuck Code and returns the IO output as a string.
## Examples
iex> Template.eval("some hello world code")
"Hello World!"
"""
def eval(code, params \\ []) do
capture_io(fn ->
execute(code, params)
end)
end
@spec eval_file(String.t(), keyword()) :: String.t()
@doc """
Runs Brainfuck Code from file and returns the IO output as a string.
## Examples
iex> Template.eval_file("path/to/some/hello_world.file")
"Hello World!"
"""
def eval_file(file, params \\ []) do
validate_file(file)
|> extract_file_contents()
|> eval(params)
end
@spec execute_alt(String.t(), keyword()) :: :ok
def execute_alt(code, params \\ []) do
code =
clean_code(code)
|> validate_code()
bf_code = %BrainfuckTape{code: code, tape: init_tape(params)}
run_step(bf_code)
:ok
end
@spec execute(String.t(), keyword()) :: :ok
@doc """
Run Brainfuck Code
## Examples
iex> Brainfuck.execute("++++++++[>++++[>++>+++>+++>+<<<<-]>+>+>->>+[<]<-]>>.>---.+++++++..+++.>>.<-.<.+++.------.--------.>>+.>++.")
"Hello World!"
"""
def execute(code, params \\ []) do
code =
clean_code(code)
|> validate_code()
|> List.to_string()
tape = init_tape(params)
code
|> group_by_brackets()
|> Enum.reduce(tape, fn section, tape_acc ->
run_section(section, tape_acc)
end)
:ok
end
@spec execute_file(String.t(), keyword()) :: :ok
@doc """
Run Brainfuck Code from file
## Examples
iex> Brainfuck.execute_file("path/to/hello_world.bf")
"Hello World!"
"""
def execute_file(file, params \\ []) do
validate_file(file)
|> extract_file_contents()
|> execute(params)
end
defp run_step(
%BrainfuckTape{code: code, tape: tape, instruction_pointer: instruction_pointer} =
bf_code
) do
instruction = Enum.at(code, instruction_pointer)
# debug(bf_code, code: false)
tape = run_instruction(instruction, tape)
instruction_pointer =
case instruction do
?[ ->
# Skip to next ']' if current cell 0
if Tape.value(tape) == 0 do
Enum.split(code, instruction_pointer)
|> elem(1)
|> Enum.reduce_while({0, 0}, fn char, {index, open_brackets} ->
open_brackets =
case char do
?[ ->
open_brackets + 1
?] ->
open_brackets - 1
_ ->
open_brackets
end
if open_brackets == 0 do
{:halt, index}
else
{:cont, {index + 1, open_brackets}}
end
end)
|> Kernel.+(instruction_pointer)
else
instruction_pointer + 1
end
?] ->
# Jump back to previous '[' if current cell not zero
if Tape.value(tape) != 0 do
subtractor =
Enum.split(code, instruction_pointer)
|> elem(0)
|> Enum.reverse()
|> Enum.reduce_while({0, 1}, fn char, {index, open_brackets} ->
open_brackets =
case char do
?[ ->
open_brackets - 1
?] ->
open_brackets + 1
_ ->
open_brackets
end
if open_brackets == 0 do
{:halt, index}
else
{:cont, {index + 1, open_brackets}}
end
end)
|> Kernel.+(1)
instruction_pointer - subtractor
else
instruction_pointer + 1
end
_ ->
instruction_pointer + 1
end
if instruction_pointer < length(code) do
run_step(%{bf_code | instruction_pointer: instruction_pointer, tape: tape})
end
end
defp init_tape(params \\ []) do
tape_params = params[:tape_params] || @default_tape_params
input = params[:input] || ""
tape_params = tape_params ++ [input: input]
params[:tape] || Tape.init(tape_params)
end
defp validate_file(file) do
if String.ends_with?(file, ".bf"), do: file, else: raise(WrongFileExtensionError, file)
end
defp extract_file_contents(file) do
File.read!(file)
end
defp clean_code(code) do
symbols = '[]+-,.<>'
String.to_charlist(code)
|> Enum.filter(fn char ->
Enum.any?(symbols, &(&1 == char))
end)
end
defp validate_code(code) do
# TODO if brackets are balnced also check if they are positioned correctly to catch cases like "]+++["
unless Enum.count(code, &(&1 == ?[)) == Enum.count(code, &(&1 == ?])) do
raise UnbalancedBracketsError
end
code
end
defp run_section(code, tape) do
cond do
# Case 1: Skip Section and jump behind corresponding "]"
String.starts_with?(code, "[") && Tape.value(tape) == 0 ->
tape
# Case 2: Run Section between []-brackets, at the end decide if the bracket section needs to be done another time
String.starts_with?(code, "[") ->
tape =
String.slice(code, 1..-2)
|> group_by_brackets()
|> Enum.reduce(tape, fn section, tape_acc ->
run_section(section, tape_acc)
end)
# Reached end of bracket section, if current cell != 0 do it again
if Tape.value(tape) != 0, do: run_section(code, tape), else: tape
# Case 3: Run single instructions
true ->
code
|> String.to_charlist()
|> Enum.reduce(tape, fn char, tape_acc ->
run_instruction(char, tape_acc)
end)
end
end
defp group_by_brackets(code) do
# https://stackoverflow.com/a/19863847/12954117
# https://www.regular-expressions.info/recurse.html#balanced
regex = ~r/\[(?>[^\[\]]|(?R))*\]/
Regex.split(regex, code, include_captures: true)
end
defp run_instruction(char, tape) do
case char do
?> -> Tape.right(tape)
?< -> Tape.left(tape)
?+ -> Tape.inc(tape)
?- -> Tape.dec(tape)
?. -> Tape.print(tape, mode: :ascii)
?, -> Tape.handle_input(tape)
?[ -> tape
?] -> tape
_ -> tape
end
end
defp debug(
%BrainfuckTape{code: code, instruction_pointer: instruction_pointer, tape: tape} =
bf_code,
opts \\ []
) do
if opts[:code] do
IO.inspect(
List.replace_at(
code,
instruction_pointer,
" '''' #{[Enum.at(code, instruction_pointer)]} '''' "
)
|> List.to_string(),
label: "code"
)
end
IO.inspect("#{[Enum.at(code, instruction_pointer)]}", label: "#{instruction_pointer}")
IO.inspect("#{Tape.cell(tape)}", label: "cell#{tape.pointer}")
IO.puts("\n")
IO.puts("\n")
IO.puts("\n")
end
end
|
lib/langs/brainfuck.ex
| 0.639286 | 0.514583 |
brainfuck.ex
|
starcoder
|
defmodule Blockchain.Extensions.SmartContracts do
@moduledoc """
Smart contracts
"""
alias Blockchain.Transaction
alias Blockchain.Wallet
@typedoc """
Represents the possible operators that can be used in a contract
"""
@type operator :: :+ | :* | :- | :== | :> | :< | :and | :or
@typedoc """
Represents a contract expression
"""
@type contract ::
number()
| String.t()
| []
| {}
| %{}
| binary()
| {:if, contract(), contract(), contract()}
| {operator(), contract(), contract()}
| :from
| :to
| :value
| any()
@doc """
Determines if both the transcation and the contract are valid or not
"""
@spec valid_transaction_contract?(Transaction.t(), contract()) :: boolean()
def valid_transaction_contract?(%Transaction{} = transaction, contract) do
eval(transaction, contract) and Transaction.valid?(transaction)
end
@doc """
Evaluates a contract against a transaction
"""
@spec eval(Transaction.t(), contract()) ::
number() | String.t() | boolean() | contract() | Wallet.t()
def eval(%Transaction{from: from, to: to, value: value} = t, contract) do
case contract do
x when is_number(x) -> x
s when is_binary(s) -> s
[] -> true
{} -> true
%{} -> true
true -> true
false -> false
{:if, condition, tr, fa} -> if eval(t, condition), do: eval(t, tr), else: eval(t, fa)
{:+, left, right} -> eval(t, left) + eval(t, right)
{:*, left, right} -> eval(t, left) * eval(t, right)
{:-, left, right} -> eval(t, left) - eval(t, right)
{:==, left, right} -> eval(t, left) == eval(t, right)
{:>, left, right} -> eval(t, left) > eval(t, right)
{:<, left, right} -> eval(t, left) < eval(t, right)
{:and, left, right} -> eval(t, left) and eval(t, right)
{:or, left, right} -> eval(t, left) or eval(t, right)
:from -> from
:to -> to
:value -> value
_ -> false
end
end
end
|
lib/blockchain/extensions/smart_contracts.ex
| 0.758242 | 0.575051 |
smart_contracts.ex
|
starcoder
|
defmodule Droll do
@moduledoc """
Simple implementation of standard dice notation
See [The Wikipedia Page](https://en.wikipedia.org/wiki/Dice_notation) for
more information.
"""
alias Droll.{Result, Formula}
@doc """
Parse a standard dice notation formula
Examples:
iex> Droll.parse("d20")
{:ok, %Droll.Formula{num_dice: 1, num_sides: 20, modifier: 0, operation: :+}}
iex> Droll.parse("4d6")
{:ok, %Droll.Formula{num_dice: 4, num_sides: 6, modifier: 0, operation: :+}}
iex> Droll.parse("1d6+1")
{:ok, %Droll.Formula{num_dice: 1, num_sides: 6, modifier: 1, operation: :+}}
iex> Droll.parse("10d5-2")
{:ok, %Droll.Formula{num_dice: 10, num_sides: 5, modifier: 2, operation: :-}}
iex> Droll.parse("1d10/1")
{:ok, %Droll.Formula{num_dice: 1, num_sides: 10, modifier: 1, operation: :/}}
iex> Droll.parse("1d10x5")
{:ok, %Droll.Formula{num_dice: 1, num_sides: 10, modifier: 5, operation: :x}}
"""
@spec parse(iodata()) :: {:ok, Formula.t()} | {:error, String.t()}
def parse(formula) do
with {:ok, tokens, _} <- :dice_lexer.string(to_charlist(formula)),
{:ok, {num_dice, :d, num_sides, operation, modifier}} <- :dice_parser.parse(tokens) do
{:ok,
%Formula{
num_dice: num_dice,
num_sides: num_sides,
modifier: modifier,
operation: operation
}}
else
{:error, {_, :dice_lexer, {:illegal, chars}}, _} ->
{:error, "could not decode formula. unexpected input: #{inspect(chars)}"}
{:error, {_, :dice_parser, _}} ->
{:error, "could not decode formula"}
{:error, reason} ->
{:error, reason}
e when is_list(e) ->
{:error, "could not decode formula"}
end
end
@doc """
Execute a roll based on a formula. See `Droll.parse/1` for more information
"""
def roll(formula_str) do
with {:ok, formula} <- parse(formula_str) do
{:ok,
%Result{}
|> apply_roll(formula)
|> apply_modifier(formula)
|> total()
|> min()
|> max()
|> avg()}
end
end
@spec apply_roll(Result.t(), Formula.t()) :: Result.t()
defp apply_roll(result, formula) do
rolls =
Enum.map(1..formula.num_dice, fn _ ->
1 + :erlang.floor(:rand.uniform() * formula.num_sides)
end)
%{result | rolls: rolls}
end
@spec total(Result.t()) :: Result.t()
defp total(result) do
Enum.reduce(1..Enum.count(result.rolls), result, fn b, result ->
%{result | total: result.total + Enum.at(result.rolls, b - 1)}
end)
end
@spec min(Result.t()) :: Result.t()
defp min(result), do: %{result | min: Enum.min(result.rolls)}
@spec max(Result.t()) :: Result.t()
defp max(result), do: %{result | min: Enum.max(result.rolls)}
@spec avg(Result.t()) :: Result.t()
defp avg(result), do: %{result | avg: result.total / Enum.count(result.rolls)}
@spec apply_modifier(Result.t(), Formula.t()) :: Result.t()
defp apply_modifier(result, %{modifier: modifier, operation: :+}),
do: %{result | total: result.total + modifier}
defp apply_modifier(result, %{modifier: modifier, operation: :-}),
do: %{result | total: result.total - modifier}
defp apply_modifier(result, %{modifier: modifier, operation: :/}),
do: %{result | total: result.total / modifier}
defp apply_modifier(result, %{modifier: modifier, operation: :x}),
do: %{result | total: result.total * modifier}
end
|
lib/droll.ex
| 0.857037 | 0.615521 |
droll.ex
|
starcoder
|
defmodule TimeZoneInfo.DataPersistence.FileSystem do
@moduledoc """
An implementation for the behaviour `TimeZoneInfo.DataPersistence` to persist
data in the file system.
"""
@behaviour TimeZoneInfo.DataPersistence
alias File.Stat
alias TimeZoneInfo.ExternalTermFormat
@impl true
def put(data) do
with {:ok, path} <- fetch_path(),
{:ok, data} <- ExternalTermFormat.encode(data) do
File.write(path, data)
end
end
@impl true
def fetch do
with {:ok, path} <- fetch_path(),
{:ok, data} <- File.read(path) do
ExternalTermFormat.decode(data)
end
end
@impl true
def checksum do
with {:ok, path} <- fetch_path(),
{:ok, data} <- File.read(path) do
ExternalTermFormat.checksum(data)
end
end
@impl true
def fetch_last_update do
with {:ok, path} <- fetch_path(),
{:ok, %Stat{mtime: mtime}} <- File.stat(path, time: :posix) do
{:ok, mtime}
end
end
@impl true
def put_last_update(time) do
with {:ok, path} <- fetch_path() do
case File.exists?(path) do
true -> File.touch(path, time)
false -> {:error, :enoent}
end
end
end
@impl true
def info do
with {:ok, path} <- fetch_path(),
{:ok, stat} <- File.stat(path),
{:ok, data} <- File.read(path) do
%{
stat: stat,
path: path,
checksum: ExternalTermFormat.checksum(data)
}
end
end
@spec fetch_path ::
{:ok, Path.t()} | {:error, {:invalid_config, Keyword.key() | [Keyword.key()]}}
defp fetch_path do
with {:file_system, {:ok, file_system}} <-
{:file_system, Application.fetch_env(:time_zone_info, :file_system)},
{:path, {:ok, path}} when is_binary(path) <-
{:path, Keyword.fetch(file_system, :path)} do
{:ok, path}
else
{:file_system, :error} ->
{:error, {:invalid_config, :file_system}}
{:path, :error} ->
{:error, {:invalid_config, [:file_system, :path]}}
{:path, {:ok, path}} ->
{:error, {:invalid_config, [file_system: [path: path]]}}
end
end
end
|
lib/time_zone_info/data_persistence/file_system.ex
| 0.76999 | 0.435421 |
file_system.ex
|
starcoder
|
defmodule OpenGraph do
defstruct [
# Basic Metadata
:title,
:type,
:image,
:url,
# Optional Metadata
:audio,
:description,
:determiner,
:locale,
:site_name,
:video
]
@type value() :: String.t() | nil
@type t() :: %__MODULE__{
title: value(),
type: value(),
image: value(),
url: value(),
audio: value(),
description: value(),
determiner: value(),
locale: value(),
site_name: value(),
video: value()
}
@doc """
Fetch URL and parse Open Graph protocol.
Returns `{:ok, %OpenGraph{}}` for succussful request, otherwise, returns `{:error, %OpenGraph.Error{}}`.
"""
@spec fetch(String.t()) :: {:ok, OpenGraph.t()} | {:error, OpenGraph.Error.t()}
def fetch(url) do
case Finch.build(:get, url) |> Finch.request(OpenGraph.Finch) do
{:ok, %Finch.Response{status: status} = response} when status in 200..299 ->
{:ok, parse(response.body)}
{:ok, %Finch.Response{status: status} = response} when status in 300..399 ->
case List.keyfind(response.headers, "location", 0) do
{_, location} ->
fetch(location)
nil ->
reason = {:missing_redirect_location, status}
{:error, %OpenGraph.Error{reason: reason}}
end
{:ok, %Finch.Response{status: status}} ->
reason = {:unexpected_status_code, status}
{:error, %OpenGraph.Error{reason: reason}}
{:error, error} ->
reason = {:request_error, Exception.message(error)}
{:error, %OpenGraph.Error{reason: reason}}
end
end
@doc """
Similar to `fetch/1` but raises a `OpenGraph.Error` exception if request failed.
Returns `%OpenGraph{}`.
"""
@spec fetch!(String.t()) :: OpenGraph.t()
def fetch!(url) do
case fetch(url) do
{:ok, result} ->
result
{:error, error} ->
raise error
end
end
@doc """
Parse Open Graph protocol.
Returns `%OpenGraph{}`.
## Examples
iex> OpenGraph.parse("<meta property='og:title' content='GitHub' />")
%OpenGraph{title: "GitHub"}
"""
@spec parse(String.t()) :: OpenGraph.t()
def parse(html) do
{:ok, document} = Floki.parse_document(html)
og_elements = Floki.find(document, "meta[property^='og:'][content]")
properties = Floki.attribute(og_elements, "property")
contents = Floki.attribute(og_elements, "content")
fields =
[properties, contents]
|> List.zip()
|> Enum.reduce(%{}, &put_field/2)
struct(__MODULE__, fields)
end
defp put_field({"og:" <> property, content}, acc) do
Map.put_new(acc, String.to_existing_atom(property), content)
rescue
ArgumentError ->
acc
end
end
|
lib/open_graph.ex
| 0.895573 | 0.481941 |
open_graph.ex
|
starcoder
|
defmodule SharedSettings do
@moduledoc ~S"""
SharedSettings is a library for fetching and updating settings at runtime.
The goal of this is to provide a simple, language-agnostic storage interface
as well as an accompanying Ruby gem (TODO) and UI. This is not intended
to be a fully-fledged feature flagging library (see FunWithFlags if you need that).
Instead, this is geared toward updating settings represented by a string, integer, etc.,
for the purpose of easing runtime tweaking of knobs.
"""
alias SharedSettings.Config
alias SharedSettings.Setting
@cache Config.cache_adapter()
@store Config.storage_adapter()
@type setting_name :: atom() | String.t()
@doc ~S"""
Creates or updates a setting.
Settings are unique by name and creating a second setting with the same name will overwrite the original.
## Arguments
* `name` - An atom or string representing the name of the setting. Used for fetching/deleting
* `value` - Any data of type string, number, boolean, or range
## Returns
If a setting is successfully stored, a tuple of `:ok` and the setting name as a string is returned.
If `value`'s type isn't supported, {:error, :unsupported_type} is returned
Any other failures (say, from the storage adaptor) will be returned as-is.
Failures to write to cache will not be returned as an error so long as writing to storage succeeds.
"""
@spec put(setting_name(), any()) :: {:ok, String.t()} | {:error, any()}
def put(_name, _value, opts \\ [])
def put(name, value, opts) when is_atom(name) do
name
|> Atom.to_string()
|> put(value, opts)
end
def put(name, value, opts) when is_binary(name) do
name
|> Setting.build(value, opts)
|> do_put()
end
defp do_put(setting_result) do
case setting_result do
{:ok, setting} ->
@cache.put(setting)
@store.put(setting)
error ->
error
end
end
@doc ~S"""
Fetches a setting by name.
Fetches from cache first and falls back to storage if a setting isn't found/is expired.
## Arguments
* `name` - An atom or string representing the name of the setting to fetch
## Returns
If a setting is found, returns a tuple of `:ok` and the stored value
If a setting is not found, returns `{:error, :not_found}`
If there is an error with the storage adaptor that error is passed straight though as `{:error, any()}`
"""
@spec get(setting_name()) :: {:ok, any()} | {:error, any()}
def get(name) when is_atom(name) do
name
|> Atom.to_string()
|> do_get()
end
def get(name) when is_binary(name) do
do_get(name)
end
defp do_get(stringified_name) do
case @cache.get(stringified_name) do
{:ok, setting} -> Setting.restore(setting)
{:error, :miss, _} -> fetch_from_persistence(stringified_name)
end
end
@doc ~S"""
Fetches all stored settings.
This method differs from others in the fact that:
1) The cache isn't hit, only the source of truth (ie: the store)
2) The raw `Setting.t()` is returned instead of the final re-hydrated value (save for decryption)
Both of these changes come from the fact that this is meant to feed the UI.
The reason it's exposed on the main module is that there's a secondary personal usecase
for setting presence validation on app boot.
Since this is hitting the store directly thought should be put into if/how frequently this is called
## Returns
If successful (even if no settings are found), returns `{:ok, [Setting.t()]}`
If there is an error with the storage adaptor that error is passed straight though as `{:error, any()}`
"""
@spec get_all() :: {:ok, [Setting.t()]} | {:error, any()}
def get_all do
{:ok, raw_settings} = @store.get_all()
settings =
Enum.map(raw_settings, fn setting ->
# Handles decryption if needed.
# Maintains `encrypted` state for UI purposes
{:ok, value} = Setting.restore(setting)
%Setting{setting | value: value}
end)
{:ok, settings}
end
@doc ~S"""
Deletes a setting by name from cache and storage.
## Arguments
* `name` - An atom representing the name of the setting to delete
## Returns
If the setting was deleted `:ok` is returned.
This method returns `:ok` if the setting wasn't found so it's safe to match on `:ok`
"""
@spec delete(setting_name()) :: :ok
def delete(name) when is_atom(name) do
name
|> Atom.to_string()
|> do_delete()
end
def delete(name) when is_binary(name) do
do_delete(name)
end
defp do_delete(stringified_name) do
@cache.delete(stringified_name)
@store.delete(stringified_name)
end
@doc ~S"""
Checks whether a given setting exists
## Arguments
* `name` - An atom or string representing the name of the setting to check
## Returns
Returns a boolean based on if the setting was found.
This uses the same logic as `get` so cache is hit first
"""
@spec exists?(setting_name()) :: boolean()
def exists?(name) when is_atom(name) or is_binary(name) do
case get(name) do
{:ok, _} -> true
_ -> false
end
end
defp fetch_from_persistence(name) do
case @store.get(name) do
{:ok, setting} -> Setting.restore(setting)
error -> error
end
end
end
|
lib/shared_settings.ex
| 0.807081 | 0.522202 |
shared_settings.ex
|
starcoder
|
defmodule Geometry.WKT.Parser do
@moduledoc false
import Geometry.WKT.Parsers
@spec parse(Geometry.wkt()) ::
{:ok, Geometry.t() | {Geometry.t(), Geometry.srid()}} | Geometry.wkt_error()
def parse(string) do
with {:ok, [info], rest, _context, line, byte_offset} <- geometry(string),
{:ok, geometry, _rest, _context, _line, _byte_offset} <-
geometry_text({info.geometry, info.tag}, rest, line: line, byte_offset: byte_offset) do
case Map.fetch(info, :srid) do
{:ok, srid} -> {:ok, {geometry, srid}}
:error -> {:ok, geometry}
end
else
{:error, message, rest, _context, line, byte_offset} ->
{:error, message, rest, line, byte_offset}
end
end
defp geometry_collection_item(string, opts) do
with {:ok, [info], rest, context, line, byte_offset} <-
geometry(string, opts) do
case {info.tag == opts[:tag], Map.get(info, :srid)} do
{true, nil} ->
geometry_text({info.geometry, info.tag}, rest, line: line, byte_offset: byte_offset)
{false, nil} ->
{:error, "unexpected geometry in collection", rest, context, line, byte_offset}
{_tag, _srid} ->
{:error, "unexpected SRID in collection", rest, context, line, byte_offset}
end
end
end
[
"point",
"polygon",
"line_string",
"multi_point",
"multi_line_string",
"multi_polygon",
"geometry_collection"
]
|> Enum.map(fn parser ->
module = Macro.camelize(parser)
modules = [
"Elixir.Geometry.#{module}",
"Elixir.Geometry.#{module}Z",
"Elixir.Geometry.#{module}M",
"Elixir.Geometry.#{module}ZM"
]
{String.to_atom(parser), Enum.map(modules, &String.to_atom/1)}
end)
|> Enum.each(fn {parser, [module, module_z, module_m, module_zm]} ->
defp geometry_text({unquote(parser), tag}, rest, opts) do
opts = Keyword.put(opts, :tag, tag)
case tag do
:xy ->
geometry_text(unquote(module), unquote(:"#{parser}_xy")(rest, opts))
:xyz ->
geometry_text(unquote(module_z), unquote(:"#{parser}_xyz")(rest, opts))
:xym ->
geometry_text(unquote(module_m), unquote(:"#{parser}_xyz")(rest, opts))
:xyzm ->
geometry_text(unquote(module_zm), unquote(:"#{parser}_xyzm")(rest, opts))
end
end
end)
defp geometry_text(module, data) do
case data do
{:ok, [], rest, context, line, byte_offset} ->
{:ok, struct(module), rest, context, line, byte_offset}
{:ok, {:geometries, geometries}, rest, context, line, byte_offset} ->
{:ok, module.new(geometries), rest, context, line, byte_offset}
{:ok, coordinates, rest, context, line, byte_offset} ->
{:ok, module.from_coordinates(coordinates), rest, context, line, byte_offset}
error ->
error
end
end
Enum.each(
[:geometry_collection_xy, :geometry_collection_xyz, :geometry_collection_xyzm],
fn geometry_collection ->
defp unquote(geometry_collection)(string, opts, acc \\ []) do
tag = Keyword.get(opts, :tag)
case next(string, opts) do
{:ok, [:empty], rest, context, line, byte_offset} ->
{:ok, {:geometries, acc}, rest, context, line, byte_offset}
{:ok, [:next], rest, _context, line, byte_offset} ->
with {:ok, geometry, rest, _context, line, byte_offset} <-
geometry_collection_item(rest,
line: line,
byte_offset: byte_offset,
tag: tag
) do
unquote(geometry_collection)(
rest,
[line: line, byte_offset: byte_offset, tag: tag],
[geometry | acc]
)
end
{:ok, [:halt], rest, context, line, byte_offset} ->
{:ok, {:geometries, acc}, rest, context, line, byte_offset}
error ->
error
end
end
end
)
end
|
lib/geometry/wkt/parser.ex
| 0.787237 | 0.57093 |
parser.ex
|
starcoder
|
defmodule Mix do
@moduledoc ~S"""
Mix is a build tool that provides tasks for creating, compiling,
and testing Elixir projects, managing its dependencies, and more.
## Mix.Project
The foundation of Mix is a project. A project can be defined by using
`Mix.Project` in a module, usually placed in a file named `mix.exs`:
defmodule MyApp.MixProject do
use Mix.Project
def project do
[
app: :my_app,
version: "1.0.0"
]
end
end
See the `Mix.Project` module for detailed documentation on Mix projects.
Once the project is defined, a number of default Mix tasks can be run
directly from the command line:
* `mix compile` - compiles the current project
* `mix test` - runs tests for the given project
* `mix run` - runs a particular command inside the project
Each task has its own options and sometimes specific configuration
to be defined in the `project/0` function. You can use `mix help`
to list all available tasks and `mix help NAME` to show help for
a particular task.
The best way to get started with your first project is by calling
`mix new my_project` from the command line.
## Mix.Task
Tasks are what make Mix extensible.
Projects can extend Mix behaviour by adding their own tasks. For
example, adding the task below inside your project will
make it available to everyone that uses your project:
defmodule Mix.Tasks.Hello do
use Mix.Task
def run(_) do
Mix.shell.info "hello"
end
end
The task can now be invoked with `mix hello`.
## Dependencies
Mix also manages your dependencies and integrates nicely with the [Hex package
manager](https://hex.pm).
In order to use dependencies, you need to add a `:deps` key
to your project configuration. We often extract the list of dependencies
into its own function:
defmodule MyApp.MixProject do
use Mix.Project
def project do
[
app: :my_app,
version: "1.0.0",
deps: deps()
]
end
defp deps do
[
{:ecto, "~> 2.0"},
{:plug, github: "elixir-lang/plug"}
]
end
end
You can run `mix help deps` to learn more about dependencies in Mix.
## Environments
Mix supports different environments. Environments allow developers to prepare
and organize their project specifically for different scenarios. By default,
Mix provides three environments:
* `:dev` - the default environment
* `:test` - the environment `mix test` runs on
* `:prod` - the environment your dependencies run on
The environment can be changed via the command line by setting
the `MIX_ENV` environment variable, for example:
$ MIX_ENV=prod mix run server.exs
## Aliases
Aliases are shortcuts or tasks specific to the current project.
In the `Mix.Task` section, we have defined a task that would be
available to everyone using our project as a dependency. What if
we wanted the task to only be available for our project? Just
define an alias:
defmodule MyApp.MixProject do
use Mix.Project
def project do
[
app: :my_app,
version: "1.0.0",
aliases: aliases()
]
end
defp aliases do
[
c: "compile",
hello: &hello/1
]
end
defp hello(_) do
Mix.shell.info "Hello world"
end
end
In the example above, we have defined two aliases. One is `mix c`
which is a shortcut for `mix compile`. The other is named
`mix hello`, which is the equivalent to the `Mix.Tasks.Hello`
we have defined in the `Mix.Task` section.
Aliases may also be lists, specifying multiple tasks to be run
consecutively:
[all: [&hello/1, "deps.get --only #{Mix.env}", "compile"]]
In the example above, we have defined an alias named `mix all`,
that prints hello, then fetches dependencies specific to the
current environment and compiles it.
Arguments given to the alias will be appended to the arguments
of the last task in the list, if the last task is a function
they will be given as a list of strings to the function.
Finally, aliases can also be used to augment existing tasks.
Let's suppose you want to augment `mix clean` to clean another
directory Mix does not know about:
[clean: ["clean", &clean_extra/1]]
Where `&clean_extra/1` would be a function in your `mix.exs`
with extra cleanup logic.
Note aliases do not show up on `mix help`.
Aliases defined in the current project do not affect its dependencies and aliases defined in dependencies are not accessible from the current project.
## Environment variables
Several environment variables can be used to modify Mix's behaviour.
Mix responds to the following variables:
* `MIX_ARCHIVES` - specifies the directory into which the archives should be installed
* `MIX_DEBUG` - outputs debug information about each task before running it
* `MIX_ENV` - specifies which environment should be used. See [Environments](#module-environments)
* `MIX_EXS` - changes the full path to the `mix.exs` file
* `MIX_HOME` - path to mix's home directory, stores configuration files and scripts used by mix
* `MIX_PATH` - appends extra code paths
* `MIX_QUIET` - does not print information messages to the terminal
* `MIX_REBAR` - path to rebar command that overrides the one mix installs
* `MIX_REBAR3` - path to rebar3 command that overrides the one mix installs
Environment variables that are not meant to hold a value (and act basically as
flags) should be set to either `1` or `true`, for example:
$ MIX_DEBUG=1 mix compile
"""
use Application
@doc false
def start do
{:ok, _} = Application.ensure_all_started(:mix)
:ok
end
@doc false
def start(_type, []) do
children = [Mix.State, Mix.TasksServer, Mix.ProjectStack]
opts = [strategy: :one_for_one, name: Mix.Supervisor, max_restarts: 0]
Supervisor.start_link(children, opts)
end
@doc """
Returns the Mix environment.
This function should not be used at runtime in application code (as opposed
to infrastructure and build code like Mix tasks). Mix is a build tool and may
not be available after the code is compiled (for example in a release).
To differentiate the program behavior depending on the environment, it is
recommended to use application environment through `Application.get_env/3`.
Proper configuration can be set in `Mix.Config` files, often per-environment
(see `Mix.Config.import_config/1` for more information).
"""
def env do
# env is not available on bootstrapping, so set a :dev default
Mix.State.get(:env, :dev)
end
@doc """
Changes the current Mix environment to `env`.
Be careful when invoking this function as any project
configuration won't be reloaded.
This function should not be used at runtime in application code
(see `env/0` for more information).
"""
def env(env) when is_atom(env) do
Mix.State.put(:env, env)
end
@doc """
Returns the default compilers used by Mix.
It can be used in your `mix.exs` to prepend or
append new compilers to Mix:
def project do
[compilers: Mix.compilers ++ [:foo, :bar]]
end
"""
def compilers do
[:yecc, :leex, :erlang, :elixir, :xref, :app]
end
@doc """
Returns the current shell.
`shell/0` can be used as a wrapper for the current shell. It contains
conveniences for requesting information from the user, printing to the shell and so
forth. The Mix shell is swappable (see `shell/1`), allowing developers to use
a test shell that simply sends messages to the current process instead of
performing IO (see `Mix.Shell.Process`).
By default, this returns `Mix.Shell.IO`.
"""
def shell do
Mix.State.get(:shell, Mix.Shell.IO)
end
@doc """
Sets the current shell.
After calling this function, `shell` becomes the shell that is returned by
`shell/0`.
"""
def shell(shell) do
Mix.State.put(:shell, shell)
end
@doc """
Returns true if Mix is in debug mode.
"""
def debug? do
Mix.State.get(:debug, false)
end
@doc """
Sets Mix debug mode.
"""
def debug(debug) when is_boolean(debug) do
Mix.State.put(:debug, debug)
end
@doc """
Raises a Mix error that is nicely formatted.
"""
@spec raise(binary) :: no_return
def raise(message) when is_binary(message) do
Kernel.raise(Mix.Error, mix: true, message: message)
end
end
|
lib/mix/lib/mix.ex
| 0.880348 | 0.642853 |
mix.ex
|
starcoder
|
defmodule CoursePlanner.Classes.Summaries do
@moduledoc """
Provides helper functions for the summary page
"""
import Ecto.Query
alias CoursePlanner.{Repo, Terms.Term, Courses.OfferedCourse, Tasks.Task, Settings}
alias Ecto.DateTime
def get_term_offered_course_for_user(user) do
get_term_offered_course_for_user(user, Settings.utc_to_system_timezone(Timex.now()))
end
def get_term_offered_course_for_user(%{id: user_id, role: role}, time) do
case role do
"Student" -> get_student_registered_data(user_id, time)
"Teacher" -> get_teacher_registered_data(user_id, time)
"Coordinator" -> get_all_terms_and_offered_courses(time)
"Volunteer" -> get_all_terms_and_offered_courses(time)
_ -> extract_data_from_offered_courses([])
end
end
def get_all_terms_and_offered_courses(time) do
offered_courses =
Repo.all(from oc in OfferedCourse,
join: t in assoc(oc, :term),
preload: [:course, :classes, term: t],
where: t.end_date >= ^time)
terms =
Repo.all(from t in Term,
where: t.end_date >= ^time)
%{terms: terms, offered_courses: offered_courses}
end
def get_student_registered_data(student_id, time) do
offered_courses =
Repo.all(from oc in OfferedCourse,
join: s in assoc(oc, :students),
join: t in assoc(oc, :term),
preload: [:course, :classes, term: t, students: s],
where: s.id == ^student_id and t.end_date >= ^time)
extract_data_from_offered_courses(offered_courses)
end
def get_teacher_registered_data(teacher_id, time) do
offered_courses =
Repo.all(from oc in OfferedCourse,
join: t in assoc(oc, :teachers),
join: te in assoc(oc, :term),
preload: [:term, :course, :classes, term: te, teachers: t],
where: t.id == ^teacher_id and te.end_date >= ^time)
extract_data_from_offered_courses(offered_courses)
end
defp extract_data_from_offered_courses(offered_courses)
when is_list(offered_courses) and length(offered_courses) > 0 do
terms =
offered_courses
|> Enum.map(&(&1.term))
|> Enum.uniq()
%{terms: terms, offered_courses: offered_courses}
end
defp extract_data_from_offered_courses(_offered_courses), do: %{terms: [], offered_courses: []}
def get_next_class(offered_courses) do
get_next_class(offered_courses, Settings.utc_to_system_timezone(Timex.now()))
end
def get_next_class(offered_courses, time)
when is_list(offered_courses) and length(offered_courses) > 0 do
offered_courses
|> Enum.flat_map(&(&1.classes))
|> Enum.filter(fn(class) ->
class_starting_at_datetime =
class.date
|> DateTime.from_date_and_time(class.starting_at)
|> Timex.Ecto.DateTime.cast!
Timex.compare(class_starting_at_datetime, time) >= 0
end)
|> Enum.sort(fn(class1, class2) ->
class1_datetime = DateTime.from_date_and_time(class1.date, class1.starting_at)
class2_datetime = DateTime.from_date_and_time(class2.date, class2.starting_at)
DateTime.compare(class1_datetime, class2_datetime) != :gt
end)
|> List.first
end
def get_next_class(_offered_courses, _time), do: nil
def get_next_task(user) do
get_next_task(user, Settings.utc_to_system_timezone(Timex.now()))
end
def get_next_task(%{id: user_id, role: "Volunteer"}, time) do
Repo.one(from t in Task,
join: v in assoc(t, :volunteers),
preload: [volunteers: v],
where: v.id == ^user_id and t.start_time >= ^time,
order_by: [:start_time],
limit: 1)
end
def get_next_task(_user, _time), do: nil
end
|
lib/course_planner/classes/summaries.ex
| 0.568775 | 0.512937 |
summaries.ex
|
starcoder
|
defmodule BPXE.Engine.PrecedenceGateway do
@moduledoc """
*Note: This gateway is not described in BPMN 2.0. However, it's available through
BPXE's extension schema.*
This gateway will only process the first model of a received token
(tracked by token_id) and send it out to a corresponding output. The
correspondance is achieved by requiring the same number of incoming and
outgoing sequence flows and they will be mapped directly, so that Nth incoming
flow will trigger Nth outgoing flow.
"""
use GenServer
use BPXE.Engine.FlowNode
alias BPXE.Engine.Process
alias BPXE.Engine.Process.Log
defstate precedence: %{}
@persist_state :precedence
def start_link(element, attrs, model, process) do
GenServer.start_link(__MODULE__, {element, attrs, model, process})
end
def init({_element, attrs, model, process}) do
state =
%__MODULE__{}
|> put_state(Base, %{attrs: attrs, model: model, process: process})
state = initialize(state)
{:ok, state}
end
def handle_token({%BPXE.Token{} = token, id}, state) do
base_state = get_state(state, BPXE.Engine.Base)
Process.log(base_state.process, %Log.PrecedenceGatewayActivated{
pid: self(),
id: base_state.attrs["id"],
token_id: token.token_id
})
case state.precedence[token.token_id] do
nil ->
Process.log(base_state.process, %Log.PrecedenceGatewayPrecedenceEstablished{
pid: self(),
id: base_state.attrs["id"],
token_id: token.token_id
})
state = %{state | precedence: Map.put(state.precedence, token.token_id, [id])}
case corresponds_to(id, state) do
nil ->
# There's no mapping between these flows
# Drop the token
{:dontsend, state}
outgoing ->
# There's a mapping, send it there
{:send, token, [outgoing], state}
end
precedence ->
Process.log(base_state.process, %Log.PrecedenceGatewayTokenDiscarded{
pid: self(),
id: base_state.attrs["id"],
token_id: token.token_id
})
new_precedence = [id | precedence]
flow_node_state = get_state(state, BPXE.Engine.FlowNode)
if length(new_precedence) == length(flow_node_state.incoming) ==
length(flow_node_state.outgoing) do
# We've received them all, drop it from the state
{:dontsend, %{state | precedence: Map.delete(state.precedence, token.token_id)}}
else
# Drop the token
{:dontsend,
%{state | precedence: Map.put(state.precedence, token.token_id, new_precedence)}}
end
end
end
defp corresponds_to(id, state) do
flow_node_state = get_state(state, BPXE.Engine.FlowNode)
index = Enum.find_index(flow_node_state.incoming, fn x -> x == id end)
Enum.at(flow_node_state.outgoing, index)
end
end
|
lib/bpxe/engine/precedence_gateway.ex
| 0.73914 | 0.408395 |
precedence_gateway.ex
|
starcoder
|
defmodule Geocalc.Calculator do
@moduledoc false
alias Geocalc.Point
@earth_radius 6_371_000
@pi :math.pi()
@epsilon 2.220446049250313e-16
@intersection_not_found "No intersection point found"
def distance_between(point_1, point_2, radius \\ @earth_radius) do
fo_1 = degrees_to_radians(Point.latitude(point_1))
fo_2 = degrees_to_radians(Point.latitude(point_2))
diff_fo = degrees_to_radians(Point.latitude(point_2) - Point.latitude(point_1))
diff_la = degrees_to_radians(Point.longitude(point_2) - Point.longitude(point_1))
a =
:math.sin(diff_fo / 2) * :math.sin(diff_fo / 2) +
:math.cos(fo_1) * :math.cos(fo_2) * :math.sin(diff_la / 2) * :math.sin(diff_la / 2)
c = 2 * :math.atan2(:math.sqrt(a), :math.sqrt(1 - a))
radius * c
end
def bearing(point_1, point_2) do
fo_1 = degrees_to_radians(Point.latitude(point_1))
fo_2 = degrees_to_radians(Point.latitude(point_2))
la_1 = degrees_to_radians(Point.longitude(point_1))
la_2 = degrees_to_radians(Point.longitude(point_2))
y = :math.sin(la_2 - la_1) * :math.cos(fo_2)
x =
:math.cos(fo_1) * :math.sin(fo_2) -
:math.sin(fo_1) * :math.cos(fo_2) * :math.cos(la_2 - la_1)
:math.atan2(y, x)
end
def destination_point(point_1, brng, distance) do
destination_point(point_1, brng, distance, @earth_radius)
end
defp destination_point(point_1, brng, distance, radius) when is_number(brng) do
fo_1 = degrees_to_radians(Point.latitude(point_1))
la_1 = degrees_to_radians(Point.longitude(point_1))
rad_lat =
:math.asin(
:math.sin(fo_1) * :math.cos(distance / radius) +
:math.cos(fo_1) * :math.sin(distance / radius) * :math.cos(brng)
)
rad_lng =
la_1 +
:math.atan2(
:math.sin(brng) * :math.sin(distance / radius) * :math.cos(fo_1),
:math.cos(distance / radius) - :math.sin(fo_1) * :math.sin(rad_lat)
)
{:ok, [radians_to_degrees(rad_lat), radians_to_degrees(rad_lng)]}
end
defp destination_point(point_1, point_2, distance, radius) do
brng = bearing(point_1, point_2)
destination_point(point_1, brng, distance, radius)
end
def intersection_point(point_1, bearing_1, point_2, bearing_2)
when is_number(bearing_1) and is_number(bearing_2) do
intersection_point!(point_1, bearing_1, point_2, bearing_2)
catch
message -> {:error, message}
end
def intersection_point(point_1, bearing_1, point_3, point_4) when is_number(bearing_1) do
brng_3 = bearing(point_3, point_4)
intersection_point(point_1, bearing_1, point_3, brng_3)
end
def intersection_point(point_1, point_2, point_3, bearing_2) when is_number(bearing_2) do
brng_1 = bearing(point_1, point_2)
intersection_point(point_1, brng_1, point_3, bearing_2)
end
def intersection_point(point_1, point_2, point_3, point_4) do
brng_1 = bearing(point_1, point_2)
brng_3 = bearing(point_3, point_4)
intersection_point(point_1, brng_1, point_3, brng_3)
end
defp intersection_point!(point_1, bearing_1, point_2, bearing_2) do
fo_1 = degrees_to_radians(Point.latitude(point_1))
la_1 = degrees_to_radians(Point.longitude(point_1))
fo_2 = degrees_to_radians(Point.latitude(point_2))
la_2 = degrees_to_radians(Point.longitude(point_2))
bo_13 = bearing_1
bo_23 = bearing_2
diff_fo = fo_2 - fo_1
diff_la = la_2 - la_1
# angular distance point_1 - point_2
be_12 =
2 *
:math.asin(
:math.sqrt(
:math.sin(diff_fo / 2) * :math.sin(diff_fo / 2) +
:math.cos(fo_1) * :math.cos(fo_2) * :math.sin(diff_la / 2) * :math.sin(diff_la / 2)
)
)
if (abs(be_12) < @epsilon) do
{:ok, [Point.latitude(point_1), Point.longitude(point_1)]}
else
cos_fo_a = (:math.sin(fo_2) - :math.sin(fo_1) * :math.cos(be_12)) / (:math.sin(be_12) * :math.cos(fo_1))
cos_fo_b = (:math.sin(fo_1) - :math.sin(fo_2) * :math.cos(be_12)) / (:math.sin(be_12) * :math.cos(fo_2))
bo_1 = :math.acos(min(max(cos_fo_a, -1), 1))
bo_2 = :math.acos(min(max(cos_fo_b, -1), 1))
{bo_12, bo_21} =
if :math.sin(la_2 - la_1) > 0 do
{bo_1, 2 * :math.pi() - bo_2}
else
{2 * :math.pi() - bo_1, bo_2}
end
a_1 = bo_13 - bo_12
a_2 = bo_21 - bo_23
# infinite intersections
if :math.sin(a_1) == 0 && :math.sin(a_2) == 0, do: throw(@intersection_not_found)
# ambiguous intersection
if :math.sin(a_1) * :math.sin(a_2) < 0, do: throw(@intersection_not_found)
a_3 =
:math.acos(
-:math.cos(a_1) * :math.cos(a_2) + :math.sin(a_1) * :math.sin(a_2) * :math.cos(be_12)
)
be_13 =
:math.atan2(
:math.sin(be_12) * :math.sin(a_1) * :math.sin(a_2),
:math.cos(a_2) + :math.cos(a_1) * :math.cos(a_3)
)
fo_3 =
:math.asin(
:math.sin(fo_1) * :math.cos(be_13) + :math.cos(fo_1) * :math.sin(be_13) * :math.cos(bo_13)
)
diff_la_13 =
:math.atan2(
:math.sin(bo_13) * :math.sin(be_13) * :math.cos(fo_1),
:math.cos(be_13) - :math.sin(fo_1) * :math.sin(fo_3)
)
la_3 = la_1 + diff_la_13
{:ok, [radians_to_degrees(fo_3), radians_to_degrees(la_3)]}
end
end
def rem_float(float_1, float_2) when float_1 < 0 do
float_1 - Float.ceil(float_1 / float_2) * float_2
end
def rem_float(float_1, float_2) do
float_1 - Float.floor(float_1 / float_2) * float_2
end
def degrees_to_radians(degrees) do
normalize_degrees(degrees) * :math.pi() / 180
end
defp normalize_degrees(degrees) when degrees < -180 do
normalize_degrees(degrees + 2 * 180)
end
defp normalize_degrees(degrees) when degrees > 180 do
normalize_degrees(degrees - 2 * 180)
end
defp normalize_degrees(degrees) do
degrees
end
def radians_to_degrees(radians) do
normalize_radians(radians) * 180 / :math.pi()
end
defp normalize_radians(radians) when radians < -@pi do
normalize_radians(radians + 2 * :math.pi())
end
defp normalize_radians(radians) when radians > @pi do
normalize_radians(radians - 2 * :math.pi())
end
defp normalize_radians(radians) do
radians
end
def bounding_box(point, radius_in_m) do
lat = degrees_to_radians(Point.latitude(point))
lon = degrees_to_radians(Point.longitude(point))
radius = earth_radius(lat)
pradius = radius * :math.cos(lat)
lat_min = lat - radius_in_m / radius
lat_max = lat + radius_in_m / radius
lon_min = lon - radius_in_m / pradius
lon_max = lon + radius_in_m / pradius
[
[radians_to_degrees(lat_min), radians_to_degrees(lon_min)],
[radians_to_degrees(lat_max), radians_to_degrees(lon_max)]
]
end
def bounding_box_for_points([]) do
[[0, 0], [0, 0]]
end
def bounding_box_for_points([point]) do
bounding_box(point, 0)
end
def bounding_box_for_points([point | points]) do
extend_bounding_box(bounding_box(point, 0), bounding_box_for_points(points))
end
def extend_bounding_box([sw_point_1, ne_point_1], [sw_point_2, ne_point_2]) do
sw_lat = Kernel.min(Point.latitude(sw_point_2), Point.latitude(sw_point_1))
sw_lon = Kernel.min(Point.longitude(sw_point_2), Point.longitude(sw_point_1))
ne_lat = Kernel.max(Point.latitude(ne_point_2), Point.latitude(ne_point_1))
ne_lon = Kernel.max(Point.longitude(ne_point_2), Point.longitude(ne_point_1))
[
[sw_lat, sw_lon],
[ne_lat, ne_lon]
]
end
def contains_point?([sw_point, ne_point], point) do
Point.latitude(point) >= Point.latitude(sw_point) &&
Point.latitude(point) <= Point.latitude(ne_point) &&
Point.longitude(point) >= Point.longitude(sw_point) &&
Point.longitude(point) <= Point.longitude(ne_point)
end
def intersects_bounding_box?([sw_point_1, ne_point_1], [sw_point_2, ne_point_2]) do
Point.latitude(ne_point_2) >= Point.latitude(sw_point_1) &&
Point.latitude(sw_point_2) <= Point.latitude(ne_point_1) &&
Point.longitude(ne_point_2) >= Point.longitude(sw_point_1) &&
Point.longitude(sw_point_2) <= Point.longitude(ne_point_1)
end
def overlaps_bounding_box?([sw_point_1, ne_point_1], [sw_point_2, ne_point_2]) do
Point.latitude(ne_point_2) > Point.latitude(sw_point_1) &&
Point.latitude(sw_point_2) < Point.latitude(ne_point_1) &&
Point.longitude(ne_point_2) > Point.longitude(sw_point_1) &&
Point.longitude(sw_point_2) < Point.longitude(ne_point_1)
end
# Semi-axes of WGS-84 geoidal reference
# Major semiaxis [m]
@wgsa 6_378_137.0
# Minor semiaxis [m]
@wgsb 6_356_752.3
defp earth_radius(lat) do
# http://en.wikipedia.org/wiki/Earth_radius
an = @wgsa * @wgsa * :math.cos(lat)
bn = @wgsb * @wgsb * :math.sin(lat)
ad = @wgsa * :math.cos(lat)
bd = @wgsb * :math.sin(lat)
:math.sqrt((an * an + bn * bn) / (ad * ad + bd * bd))
end
def geographic_center(points) do
[xa, ya, za] =
points
|> Enum.map(fn point ->
[degrees_to_radians(Point.latitude(point)), degrees_to_radians(Point.longitude(point))]
end)
|> Enum.reduce([[], [], []], fn point, [x, y, z] ->
x = [:math.cos(Point.latitude(point)) * :math.cos(Point.longitude(point)) | x]
y = [:math.cos(Point.latitude(point)) * :math.sin(Point.longitude(point)) | y]
z = [:math.sin(Point.latitude(point)) | z]
[x, y, z]
end)
|> Enum.map(fn list -> Enum.sum(list) / length(list) end)
lon = :math.atan2(ya, xa)
hyp = :math.sqrt(xa * xa + ya * ya)
lat = :math.atan2(za, hyp)
[radians_to_degrees(lat), radians_to_degrees(lon)]
end
def max_latitude(point, bearing) do
lat = degrees_to_radians(Point.latitude(point))
max_lat = :math.acos(Kernel.abs(:math.sin(bearing) * :math.cos(lat)))
radians_to_degrees(max_lat)
end
def cross_track_distance_to(point, path_start_point, path_end_point, radius \\ @earth_radius) do
dist_13 = distance_between(path_start_point, point, radius) / radius
be_13 = bearing(path_start_point, point)
be_12 = bearing(path_start_point, path_end_point)
:math.asin(:math.sin(dist_13) * :math.sin(be_13 - be_12)) * radius
end
def crossing_parallels(point_1, point_2, latitude) do
lat = degrees_to_radians(latitude)
lat_1 = degrees_to_radians(Point.latitude(point_1))
lon_1 = degrees_to_radians(Point.longitude(point_1))
lat_2 = degrees_to_radians(Point.latitude(point_2))
lon_2 = degrees_to_radians(Point.longitude(point_2))
diff_lon = lon_2 - lon_1
x = :math.sin(lat_1) * :math.cos(lat_2) * :math.cos(lat) * :math.sin(diff_lon)
y =
:math.sin(lat_1) * :math.cos(lat_2) * :math.cos(lat) * :math.cos(diff_lon) -
:math.cos(lat_1) * :math.sin(lat_2) * :math.cos(lat)
z = :math.cos(lat_1) * :math.cos(lat_2) * :math.sin(lat) * :math.sin(diff_lon)
if z * z > x * x + y * y do
{:error, "Not found"}
else
lon_max = :math.atan2(-y, x)
diff_lon_i = :math.acos(z / :math.sqrt(x * x + y * y))
lon_i_1 = lon_1 + lon_max - diff_lon_i
lon_i_2 = lon_1 + lon_max + diff_lon_i
{:ok, rem_float(radians_to_degrees(lon_i_1) + 540, 360) - 180,
rem_float(radians_to_degrees(lon_i_2) + 540, 360) - 180}
end
end
end
|
lib/geocalc/calculator.ex
| 0.865594 | 0.875893 |
calculator.ex
|
starcoder
|
defmodule ShEx.TripleConstraint do
@moduledoc false
defstruct [
# tripleExprLabel?
:id,
# shapeExpr?
:value_expr,
# IRIREF
:predicate,
# BOOL?
:inverse,
# INTEGER?
:min,
# INTEGER?
:max,
# [SemAct+]?
:sem_acts,
# [Annotation+]?
:annotations
]
import ShEx.TripleExpression.Shared
def matches(
%__MODULE__{inverse: true} = triple_constraint,
{arcs_in, arcs_out},
graph,
schema,
association,
state
) do
with {:ok, matched, remainder} <-
matches(triple_constraint, arcs_in, graph, schema, association, state) do
{:ok, matched, {remainder, arcs_out}}
end
end
def matches(triple_constraint, {arcs_in, arcs_out}, graph, schema, association, state) do
with {:ok, matched, remainder} <-
matches(triple_constraint, arcs_out, graph, schema, association, state) do
{:ok, matched, {arcs_in, remainder}}
end
end
def matches(triple_constraint, triples, graph, schema, association, state) do
with {matched, mismatched, remainder, violations} <-
find_matches(triples, triple_constraint, graph, schema, association, state),
:ok <-
check_cardinality(
length(matched),
ShEx.TripleExpression.min_cardinality(triple_constraint),
triple_constraint,
violations
) do
{:ok, matched, mismatched ++ remainder}
else
violation ->
{:error, violation}
end
end
defp find_matches(triples, triple_constraint, graph, schema, association, state) do
do_find_matches(
{[], [], triples, []},
triple_constraint.value_expr,
triple_constraint.predicate,
triple_constraint.inverse,
ShEx.TripleExpression.max_cardinality(triple_constraint),
{graph, schema, association, state}
)
end
defp do_find_matches(acc, value_expr, predicate, inverse, max, match_context)
defp do_find_matches({_, _, [], _} = acc, _, _, _, _, _), do: acc
defp do_find_matches({matched, _, _, _} = acc, _, _, _, max, _)
when length(matched) == max,
do: acc
defp do_find_matches(
{matched, mismatched, [{_, predicate, _} = statement | remainder], violations},
nil,
predicate,
inverse,
max,
match_context
) do
{[statement | matched], mismatched, remainder, violations}
|> do_find_matches(nil, predicate, inverse, max, match_context)
end
defp do_find_matches(
{matched, mismatched, [{subject, predicate, object} = statement | remainder],
violations},
value_expr,
predicate,
inverse,
max,
{graph, schema, _association, state} = match_context
) do
value = if inverse, do: subject, else: object
ShEx.ShapeExpression.satisfies(
value_expr,
graph,
schema,
ShEx.ShapeMap.Association.new(value, value_expr),
state
)
|> case do
%{status: :conformant} ->
{[statement | matched], mismatched, remainder, violations}
%{status: :nonconformant} = nonconformant ->
{matched, [statement | mismatched], remainder,
violations ++
List.wrap(nonconformant.reason)}
end
|> do_find_matches(value_expr, predicate, inverse, max, match_context)
end
defp do_find_matches(
{matched, mismatched, [statement | remainder], violations},
value_expr,
predicate,
inverse,
max,
match_context
) do
{matched, [statement | mismatched], remainder, violations}
|> do_find_matches(value_expr, predicate, inverse, max, match_context)
end
defimpl ShEx.TripleExpression do
def matches(triple_constraint, triples, graph, schema, association, state) do
ShEx.TripleConstraint.matches(triple_constraint, triples, graph, schema, association, state)
end
def min_cardinality(triple_constraint),
do: ShEx.TripleExpression.Shared.min_cardinality(triple_constraint)
def max_cardinality(triple_constraint),
do: ShEx.TripleExpression.Shared.max_cardinality(triple_constraint)
def predicates(%ShEx.TripleConstraint{predicate: predicate}, _), do: [predicate]
def triple_constraints(triple_constraint, _), do: [triple_constraint]
def required_arcs(%ShEx.TripleConstraint{inverse: true}, _), do: {:ok, :arcs_in}
def required_arcs(_, _), do: {:ok, :arcs_out}
end
defimpl ShEx.Operator do
def children(triple_constraint) do
cond do
is_nil(triple_constraint.value_expr) ->
[]
RDF.term?(triple_constraint.value_expr) ->
[{:shape_expression_label, triple_constraint.value_expr}]
true ->
[triple_constraint.value_expr]
end
end
def triple_expression_label_and_operands(triple_constraint),
do: {triple_constraint.id, List.wrap(triple_constraint.value_expr)}
end
end
|
lib/shex/shape_expressions/triple_constraint.ex
| 0.558568 | 0.405272 |
triple_constraint.ex
|
starcoder
|
defmodule AWS.Lightsail do
@moduledoc """
Amazon Lightsail is the easiest way to get started with AWS for developers
who just need virtual private servers. Lightsail includes everything you
need to launch your project quickly - a virtual machine, SSD-based storage,
data transfer, DNS management, and a static IP - for a low, predictable
price. You manage those Lightsail servers through the Lightsail console or
by using the API or command-line interface (CLI).
For more information about Lightsail concepts and tasks, see the [Lightsail
Dev Guide](https://lightsail.aws.amazon.com/ls/docs/all).
To use the Lightsail API or the CLI, you will need to use AWS Identity and
Access Management (IAM) to generate access keys. For details about how to
set this up, see the [Lightsail Dev
Guide](http://lightsail.aws.amazon.com/ls/docs/how-to/article/lightsail-how-to-set-up-access-keys-to-use-sdk-api-cli).
"""
@doc """
Allocates a static IP address.
"""
def allocate_static_ip(client, input, options \\ []) do
request(client, "AllocateStaticIp", input, options)
end
@doc """
Attaches a block storage disk to a running or stopped Lightsail instance
and exposes it to the instance with the specified disk name.
"""
def attach_disk(client, input, options \\ []) do
request(client, "AttachDisk", input, options)
end
@doc """
Attaches one or more Lightsail instances to a load balancer.
After some time, the instances are attached to the load balancer and the
health check status is available.
"""
def attach_instances_to_load_balancer(client, input, options \\ []) do
request(client, "AttachInstancesToLoadBalancer", input, options)
end
@doc """
Attaches a Transport Layer Security (TLS) certificate to your load
balancer. TLS is just an updated, more secure version of Secure Socket
Layer (SSL).
Once you create and validate your certificate, you can attach it to your
load balancer. You can also use this API to rotate the certificates on your
account. Use the `AttachLoadBalancerTlsCertificate` operation with the
non-attached certificate, and it will replace the existing one and become
the attached certificate.
"""
def attach_load_balancer_tls_certificate(client, input, options \\ []) do
request(client, "AttachLoadBalancerTlsCertificate", input, options)
end
@doc """
Attaches a static IP address to a specific Amazon Lightsail instance.
"""
def attach_static_ip(client, input, options \\ []) do
request(client, "AttachStaticIp", input, options)
end
@doc """
Closes the public ports on a specific Amazon Lightsail instance.
"""
def close_instance_public_ports(client, input, options \\ []) do
request(client, "CloseInstancePublicPorts", input, options)
end
@doc """
Creates a block storage disk that can be attached to a Lightsail instance
in the same Availability Zone (e.g., `us-east-2a`). The disk is created in
the regional endpoint that you send the HTTP request to. For more
information, see [Regions and Availability Zones in
Lightsail](https://lightsail.aws.amazon.com/ls/docs/overview/article/understanding-regions-and-availability-zones-in-amazon-lightsail).
"""
def create_disk(client, input, options \\ []) do
request(client, "CreateDisk", input, options)
end
@doc """
Creates a block storage disk from a disk snapshot that can be attached to a
Lightsail instance in the same Availability Zone (e.g., `us-east-2a`). The
disk is created in the regional endpoint that you send the HTTP request to.
For more information, see [Regions and Availability Zones in
Lightsail](https://lightsail.aws.amazon.com/ls/docs/overview/article/understanding-regions-and-availability-zones-in-amazon-lightsail).
"""
def create_disk_from_snapshot(client, input, options \\ []) do
request(client, "CreateDiskFromSnapshot", input, options)
end
@doc """
Creates a snapshot of a block storage disk. You can use snapshots for
backups, to make copies of disks, and to save data before shutting down a
Lightsail instance.
You can take a snapshot of an attached disk that is in use; however,
snapshots only capture data that has been written to your disk at the time
the snapshot command is issued. This may exclude any data that has been
cached by any applications or the operating system. If you can pause any
file systems on the disk long enough to take a snapshot, your snapshot
should be complete. Nevertheless, if you cannot pause all file writes to
the disk, you should unmount the disk from within the Lightsail instance,
issue the create disk snapshot command, and then remount the disk to ensure
a consistent and complete snapshot. You may remount and use your disk while
the snapshot status is pending.
"""
def create_disk_snapshot(client, input, options \\ []) do
request(client, "CreateDiskSnapshot", input, options)
end
@doc """
Creates a domain resource for the specified domain (e.g., example.com).
"""
def create_domain(client, input, options \\ []) do
request(client, "CreateDomain", input, options)
end
@doc """
Creates one of the following entry records associated with the domain: A
record, CNAME record, TXT record, or MX record.
"""
def create_domain_entry(client, input, options \\ []) do
request(client, "CreateDomainEntry", input, options)
end
@doc """
Creates a snapshot of a specific virtual private server, or *instance*. You
can use a snapshot to create a new instance that is based on that snapshot.
"""
def create_instance_snapshot(client, input, options \\ []) do
request(client, "CreateInstanceSnapshot", input, options)
end
@doc """
Creates one or more Amazon Lightsail virtual private servers, or
*instances*.
"""
def create_instances(client, input, options \\ []) do
request(client, "CreateInstances", input, options)
end
@doc """
Uses a specific snapshot as a blueprint for creating one or more new
instances that are based on that identical configuration.
"""
def create_instances_from_snapshot(client, input, options \\ []) do
request(client, "CreateInstancesFromSnapshot", input, options)
end
@doc """
Creates sn SSH key pair.
"""
def create_key_pair(client, input, options \\ []) do
request(client, "CreateKeyPair", input, options)
end
@doc """
Creates a Lightsail load balancer. To learn more about deciding whether to
load balance your application, see [Configure your Lightsail instances for
load
balancing](https://lightsail.aws.amazon.com/ls/docs/how-to/article/configure-lightsail-instances-for-load-balancing).
You can create up to 5 load balancers per AWS Region in your account.
When you create a load balancer, you can specify a unique name and port
settings. To change additional load balancer settings, use the
`UpdateLoadBalancerAttribute` operation.
"""
def create_load_balancer(client, input, options \\ []) do
request(client, "CreateLoadBalancer", input, options)
end
@doc """
Creates a Lightsail load balancer TLS certificate.
TLS is just an updated, more secure version of Secure Socket Layer (SSL).
"""
def create_load_balancer_tls_certificate(client, input, options \\ []) do
request(client, "CreateLoadBalancerTlsCertificate", input, options)
end
@doc """
Deletes the specified block storage disk. The disk must be in the
`available` state (not attached to a Lightsail instance).
<note> The disk may remain in the `deleting` state for several minutes.
</note>
"""
def delete_disk(client, input, options \\ []) do
request(client, "DeleteDisk", input, options)
end
@doc """
Deletes the specified disk snapshot.
When you make periodic snapshots of a disk, the snapshots are incremental,
and only the blocks on the device that have changed since your last
snapshot are saved in the new snapshot. When you delete a snapshot, only
the data not needed for any other snapshot is removed. So regardless of
which prior snapshots have been deleted, all active snapshots will have
access to all the information needed to restore the disk.
"""
def delete_disk_snapshot(client, input, options \\ []) do
request(client, "DeleteDiskSnapshot", input, options)
end
@doc """
Deletes the specified domain recordset and all of its domain records.
"""
def delete_domain(client, input, options \\ []) do
request(client, "DeleteDomain", input, options)
end
@doc """
Deletes a specific domain entry.
"""
def delete_domain_entry(client, input, options \\ []) do
request(client, "DeleteDomainEntry", input, options)
end
@doc """
Deletes a specific Amazon Lightsail virtual private server, or *instance*.
"""
def delete_instance(client, input, options \\ []) do
request(client, "DeleteInstance", input, options)
end
@doc """
Deletes a specific snapshot of a virtual private server (or *instance*).
"""
def delete_instance_snapshot(client, input, options \\ []) do
request(client, "DeleteInstanceSnapshot", input, options)
end
@doc """
Deletes a specific SSH key pair.
"""
def delete_key_pair(client, input, options \\ []) do
request(client, "DeleteKeyPair", input, options)
end
@doc """
Deletes a Lightsail load balancer and all its associated SSL/TLS
certificates. Once the load balancer is deleted, you will need to create a
new load balancer, create a new certificate, and verify domain ownership
again.
"""
def delete_load_balancer(client, input, options \\ []) do
request(client, "DeleteLoadBalancer", input, options)
end
@doc """
Deletes an SSL/TLS certificate associated with a Lightsail load balancer.
"""
def delete_load_balancer_tls_certificate(client, input, options \\ []) do
request(client, "DeleteLoadBalancerTlsCertificate", input, options)
end
@doc """
Detaches a stopped block storage disk from a Lightsail instance. Make sure
to unmount any file systems on the device within your operating system
before stopping the instance and detaching the disk.
"""
def detach_disk(client, input, options \\ []) do
request(client, "DetachDisk", input, options)
end
@doc """
Detaches the specified instances from a Lightsail load balancer.
This operation waits until the instances are no longer needed before they
are detached from the load balancer.
"""
def detach_instances_from_load_balancer(client, input, options \\ []) do
request(client, "DetachInstancesFromLoadBalancer", input, options)
end
@doc """
Detaches a static IP from the Amazon Lightsail instance to which it is
attached.
"""
def detach_static_ip(client, input, options \\ []) do
request(client, "DetachStaticIp", input, options)
end
@doc """
Downloads the default SSH key pair from the user's account.
"""
def download_default_key_pair(client, input, options \\ []) do
request(client, "DownloadDefaultKeyPair", input, options)
end
@doc """
Returns the names of all active (not deleted) resources.
"""
def get_active_names(client, input, options \\ []) do
request(client, "GetActiveNames", input, options)
end
@doc """
Returns the list of available instance images, or *blueprints*. You can use
a blueprint to create a new virtual private server already running a
specific operating system, as well as a preinstalled app or development
stack. The software each instance is running depends on the blueprint image
you choose.
"""
def get_blueprints(client, input, options \\ []) do
request(client, "GetBlueprints", input, options)
end
@doc """
Returns the list of bundles that are available for purchase. A bundle
describes the specs for your virtual private server (or *instance*).
"""
def get_bundles(client, input, options \\ []) do
request(client, "GetBundles", input, options)
end
@doc """
Returns information about a specific block storage disk.
"""
def get_disk(client, input, options \\ []) do
request(client, "GetDisk", input, options)
end
@doc """
Returns information about a specific block storage disk snapshot.
"""
def get_disk_snapshot(client, input, options \\ []) do
request(client, "GetDiskSnapshot", input, options)
end
@doc """
Returns information about all block storage disk snapshots in your AWS
account and region.
If you are describing a long list of disk snapshots, you can paginate the
output to make the list more manageable. You can use the pageToken and
nextPageToken values to retrieve the next items in the list.
"""
def get_disk_snapshots(client, input, options \\ []) do
request(client, "GetDiskSnapshots", input, options)
end
@doc """
Returns information about all block storage disks in your AWS account and
region.
If you are describing a long list of disks, you can paginate the output to
make the list more manageable. You can use the pageToken and nextPageToken
values to retrieve the next items in the list.
"""
def get_disks(client, input, options \\ []) do
request(client, "GetDisks", input, options)
end
@doc """
Returns information about a specific domain recordset.
"""
def get_domain(client, input, options \\ []) do
request(client, "GetDomain", input, options)
end
@doc """
Returns a list of all domains in the user's account.
"""
def get_domains(client, input, options \\ []) do
request(client, "GetDomains", input, options)
end
@doc """
Returns information about a specific Amazon Lightsail instance, which is a
virtual private server.
"""
def get_instance(client, input, options \\ []) do
request(client, "GetInstance", input, options)
end
@doc """
Returns temporary SSH keys you can use to connect to a specific virtual
private server, or *instance*.
"""
def get_instance_access_details(client, input, options \\ []) do
request(client, "GetInstanceAccessDetails", input, options)
end
@doc """
Returns the data points for the specified Amazon Lightsail instance metric,
given an instance name.
"""
def get_instance_metric_data(client, input, options \\ []) do
request(client, "GetInstanceMetricData", input, options)
end
@doc """
Returns the port states for a specific virtual private server, or
*instance*.
"""
def get_instance_port_states(client, input, options \\ []) do
request(client, "GetInstancePortStates", input, options)
end
@doc """
Returns information about a specific instance snapshot.
"""
def get_instance_snapshot(client, input, options \\ []) do
request(client, "GetInstanceSnapshot", input, options)
end
@doc """
Returns all instance snapshots for the user's account.
"""
def get_instance_snapshots(client, input, options \\ []) do
request(client, "GetInstanceSnapshots", input, options)
end
@doc """
Returns the state of a specific instance. Works on one instance at a time.
"""
def get_instance_state(client, input, options \\ []) do
request(client, "GetInstanceState", input, options)
end
@doc """
Returns information about all Amazon Lightsail virtual private servers, or
*instances*.
"""
def get_instances(client, input, options \\ []) do
request(client, "GetInstances", input, options)
end
@doc """
Returns information about a specific key pair.
"""
def get_key_pair(client, input, options \\ []) do
request(client, "GetKeyPair", input, options)
end
@doc """
Returns information about all key pairs in the user's account.
"""
def get_key_pairs(client, input, options \\ []) do
request(client, "GetKeyPairs", input, options)
end
@doc """
Returns information about the specified Lightsail load balancer.
"""
def get_load_balancer(client, input, options \\ []) do
request(client, "GetLoadBalancer", input, options)
end
@doc """
Returns information about health metrics for your Lightsail load balancer.
"""
def get_load_balancer_metric_data(client, input, options \\ []) do
request(client, "GetLoadBalancerMetricData", input, options)
end
@doc """
Returns information about the TLS certificates that are associated with the
specified Lightsail load balancer.
TLS is just an updated, more secure version of Secure Socket Layer (SSL).
You can have a maximum of 2 certificates associated with a Lightsail load
balancer. One is active and the other is inactive.
"""
def get_load_balancer_tls_certificates(client, input, options \\ []) do
request(client, "GetLoadBalancerTlsCertificates", input, options)
end
@doc """
Returns information about all load balancers in an account.
If you are describing a long list of load balancers, you can paginate the
output to make the list more manageable. You can use the pageToken and
nextPageToken values to retrieve the next items in the list.
"""
def get_load_balancers(client, input, options \\ []) do
request(client, "GetLoadBalancers", input, options)
end
@doc """
Returns information about a specific operation. Operations include events
such as when you create an instance, allocate a static IP, attach a static
IP, and so on.
"""
def get_operation(client, input, options \\ []) do
request(client, "GetOperation", input, options)
end
@doc """
Returns information about all operations.
Results are returned from oldest to newest, up to a maximum of 200. Results
can be paged by making each subsequent call to `GetOperations` use the
maximum (last) `statusChangedAt` value from the previous request.
"""
def get_operations(client, input, options \\ []) do
request(client, "GetOperations", input, options)
end
@doc """
Gets operations for a specific resource (e.g., an instance or a static IP).
"""
def get_operations_for_resource(client, input, options \\ []) do
request(client, "GetOperationsForResource", input, options)
end
@doc """
Returns a list of all valid regions for Amazon Lightsail. Use the `include
availability zones` parameter to also return the availability zones in a
region.
"""
def get_regions(client, input, options \\ []) do
request(client, "GetRegions", input, options)
end
@doc """
Returns information about a specific static IP.
"""
def get_static_ip(client, input, options \\ []) do
request(client, "GetStaticIp", input, options)
end
@doc """
Returns information about all static IPs in the user's account.
"""
def get_static_ips(client, input, options \\ []) do
request(client, "GetStaticIps", input, options)
end
@doc """
Imports a public SSH key from a specific key pair.
"""
def import_key_pair(client, input, options \\ []) do
request(client, "ImportKeyPair", input, options)
end
@doc """
Returns a Boolean value indicating whether your Lightsail VPC is peered.
"""
def is_vpc_peered(client, input, options \\ []) do
request(client, "IsVpcPeered", input, options)
end
@doc """
Adds public ports to an Amazon Lightsail instance.
"""
def open_instance_public_ports(client, input, options \\ []) do
request(client, "OpenInstancePublicPorts", input, options)
end
@doc """
Tries to peer the Lightsail VPC with the user's default VPC.
"""
def peer_vpc(client, input, options \\ []) do
request(client, "PeerVpc", input, options)
end
@doc """
Sets the specified open ports for an Amazon Lightsail instance, and closes
all ports for every protocol not included in the current request.
"""
def put_instance_public_ports(client, input, options \\ []) do
request(client, "PutInstancePublicPorts", input, options)
end
@doc """
Restarts a specific instance. When your Amazon Lightsail instance is
finished rebooting, Lightsail assigns a new public IP address. To use the
same IP address after restarting, create a static IP address and attach it
to the instance.
"""
def reboot_instance(client, input, options \\ []) do
request(client, "RebootInstance", input, options)
end
@doc """
Deletes a specific static IP from your account.
"""
def release_static_ip(client, input, options \\ []) do
request(client, "ReleaseStaticIp", input, options)
end
@doc """
Starts a specific Amazon Lightsail instance from a stopped state. To
restart an instance, use the reboot instance operation.
"""
def start_instance(client, input, options \\ []) do
request(client, "StartInstance", input, options)
end
@doc """
Stops a specific Amazon Lightsail instance that is currently running.
"""
def stop_instance(client, input, options \\ []) do
request(client, "StopInstance", input, options)
end
@doc """
Attempts to unpeer the Lightsail VPC from the user's default VPC.
"""
def unpeer_vpc(client, input, options \\ []) do
request(client, "UnpeerVpc", input, options)
end
@doc """
Updates a domain recordset after it is created.
"""
def update_domain_entry(client, input, options \\ []) do
request(client, "UpdateDomainEntry", input, options)
end
@doc """
Updates the specified attribute for a load balancer. You can only update
one attribute at a time.
"""
def update_load_balancer_attribute(client, input, options \\ []) do
request(client, "UpdateLoadBalancerAttribute", input, options)
end
@spec request(map(), binary(), map(), list()) ::
{:ok, Poison.Parser.t | nil, Poison.Response.t} |
{:error, Poison.Parser.t} |
{:error, HTTPoison.Error.t}
defp request(client, action, input, options) do
client = %{client | service: "lightsail"}
host = get_host("lightsail", client)
url = get_url(host, client)
headers = [{"Host", host},
{"Content-Type", "application/x-amz-json-1.1"},
{"X-Amz-Target", "Lightsail_20161128.#{action}"}]
payload = Poison.Encoder.encode(input, [])
headers = AWS.Request.sign_v4(client, "POST", url, headers, payload)
case HTTPoison.post(url, payload, headers, options) do
{:ok, response=%HTTPoison.Response{status_code: 200, body: ""}} ->
{:ok, nil, response}
{:ok, response=%HTTPoison.Response{status_code: 200, body: body}} ->
{:ok, Poison.Parser.parse!(body), response}
{:ok, _response=%HTTPoison.Response{body: body}} ->
error = Poison.Parser.parse!(body)
exception = error["__type"]
message = error["message"]
{:error, {exception, message}}
{:error, %HTTPoison.Error{reason: reason}} ->
{:error, %HTTPoison.Error{reason: reason}}
end
end
defp get_host(endpoint_prefix, client) do
if client.region == "local" do
"localhost"
else
"#{endpoint_prefix}.#{client.region}.#{client.endpoint}"
end
end
defp get_url(host, %{:proto => proto, :port => port}) do
"#{proto}://#{host}:#{port}/"
end
end
|
lib/aws/lightsail.ex
| 0.810291 | 0.588948 |
lightsail.ex
|
starcoder
|
defmodule Braintree.PaymentMethod do
@moduledoc """
Create, update, find and delete payment methods. Payment methods
may be a `CreditCard` or a `PaypalAccount`.
"""
alias Braintree.{AndroidPayCard, ApplePayCard, CreditCard, HTTP, PaypalAccount}
alias Braintree.ErrorResponse, as: Error
@doc """
Create a payment method record, or return an error response with after failed
validation.
## Example
{:ok, customer} = Braintree.Customer.create(%{
first_name: "Jen",
last_name: "Smith"
})
{:ok, credit_card} = Braintree.PaymentMethod.create(%{
customer_id: customer.id,
payment_method_nonce: Braintree.Testing.Nonces.transactable
})
credit_card.type # "Visa"
"""
@spec create(map, Keyword.t()) ::
{:ok, CreditCard.t()} | {:ok, PaypalAccount.t()} | {:error, Error.t()}
def create(params \\ %{}, opts \\ []) do
with {:ok, payload} <- HTTP.post("payment_methods", %{payment_method: params}, opts) do
{:ok, new(payload)}
end
end
@doc """
Update a payment method record, or return an error response with after failed
validation.
## Example
{:ok, customer} = Braintree.Customer.create(%{
first_name: "Jen",
last_name: "Smith"
})
{:ok, credit_card} = Braintree.PaymentMethod.create(%{
customer_id: customer.id,
cardholder_name: "<NAME>",
payment_method_nonce: Braintree.Testing.Nonces.transactable
})
{:ok, payment_method} = Braintree.PaymentMethod.update(
credit_card.token,
%{cardholder_name: "NEW"}
)
payment_method.cardholder_name # "NEW"
"""
@spec update(String.t(), map, Keyword.t()) ::
{:ok, CreditCard.t()} | {:ok, PaypalAccount.t()} | {:error, Error.t()}
def update(token, params \\ %{}, opts \\ []) do
path = "payment_methods/any/" <> token
with {:ok, payload} <- HTTP.put(path, %{payment_method: params}, opts) do
{:ok, new(payload)}
end
end
@doc """
Delete a payment method record, or return an error response if token invalid
## Example
{:ok, "Success"} = Braintree.PaymentMethod.delete(token)
"""
@spec delete(String.t(), Keyword.t()) :: :ok | {:error, Error.t()}
def delete(token, opts \\ []) do
path = "payment_methods/any/" <> token
with {:ok, _response} <- HTTP.delete(path, opts) do
:ok
end
end
@doc """
Find a payment method record, or return an error response if token invalid
## Example
{:ok, payment_method} = Braintree.PaymentMethod.find(token)
payment_method.type # CreditCard
"""
@spec find(String.t(), Keyword.t()) ::
{:ok, CreditCard.t()} | {:ok, PaypalAccount.t()} | {:error, Error.t()}
def find(token, opts \\ []) do
path = "payment_methods/any/" <> token
with {:ok, payload} <- HTTP.get(path, opts) do
{:ok, new(payload)}
end
end
@spec new(map) :: AndroidPayCard.t() | ApplePayCard.t() | CreditCard.t() | PaypalAccount.t()
defp new(%{"android_pay_card" => android_pay_card}) do
AndroidPayCard.new(android_pay_card)
end
defp new(%{"apple_pay_card" => apple_pay_card}) do
ApplePayCard.new(apple_pay_card)
end
defp new(%{"credit_card" => credit_card}) do
CreditCard.new(credit_card)
end
defp new(%{"paypal_account" => paypal_account}) do
PaypalAccount.new(paypal_account)
end
end
|
lib/payment_method.ex
| 0.882228 | 0.428532 |
payment_method.ex
|
starcoder
|
defmodule Advent.D7 do
@steps_keywords ["Step ", " must be finished before step ", " can begin."]
def instructions() do
steps =
"inputs/d7.txt"
|> file_to_steps
steps
|> order_steps([])
|> Enum.reverse()
|> Enum.join()
end
def order_steps(steps, order) do
next = next_step(steps, order)
if is_list(next) do
order
else
steps
|> order_steps([next] ++ order)
end
end
def next_step(steps, order) do
cond do
all_steps_done?(steps, order) ->
order
{letter, _} = next_available_item(steps, order) ->
letter
end
end
def all_steps_done?(steps, order) do
length(order) == length(to_do_list(steps))
end
@doc """
get next available item based on the requirements
"""
def next_available_item(steps, order) do
steps
|> Enum.filter(fn {_letter, %{requirements: requirements}} ->
Enum.all?(requirements, &(&1 in order))
end)
|> Enum.find(fn {letter, _} -> letter not in order end)
end
def to_do_list(steps) do
requirements =
steps
|> Enum.map(fn {_k, %{requirements: requirements}} -> requirements end)
|> List.flatten()
letters = steps |> Enum.map(fn {letter, _} -> letter end)
Enum.concat(letters, requirements) |> Enum.uniq() |> Enum.sort()
end
def file_to_steps(file_path) do
file_path
|> File.stream!()
|> Stream.map(&String.trim/1)
|> Stream.map(&steps_from_line/1)
|> Enum.take_every(1)
|> convert_to_steps()
|> sort_requirements()
|> add_letters_with_no_requirements()
end
def steps_from_line(line_stream) do
line_stream
|> String.splitter(@steps_keywords, trim: true)
|> Enum.take_every(1)
|> List.to_tuple()
end
def sort_requirements(steps) do
Enum.reduce(steps, steps, fn {letter1, %{requirements: requirements}}, acc ->
put_in(acc[letter1][:requirements], Enum.sort(requirements))
end)
end
def add_letters_with_no_requirements(steps) do
steps
|> to_do_list()
|> Enum.filter(&(steps[&1] == nil))
|> Enum.reduce(steps, fn letter, acc -> Map.put(acc, letter, %{requirements: []}) end)
end
@doc """
converts a list of tuples to a steps structure
"""
def convert_to_steps(steps) when is_list(steps) do
Enum.reduce(steps, %{}, fn {letter1, letter2}, acc ->
case acc do
%{^letter2 => %{requirements: to_do}} ->
put_in(acc[letter2][:requirements], [letter1] ++ to_do)
_ ->
Map.put(acc, letter2, %{requirements: [letter1]})
end
end)
end
end
|
lib/advent/d7.ex
| 0.525612 | 0.541227 |
d7.ex
|
starcoder
|
defmodule Blockchain.Block.HolisticValidity do
@moduledoc """
This module is responsible for holistic validity check, as defined in Eq.(31),
section 4.3.2, of the Yellow Paper - Byzantium Version e94ebda.
"""
alias Blockchain.{Block, Chain, Genesis}
alias MerklePatriciaTree.TrieStorage
@doc """
Determines whether or not a block is valid. This is
defined in Eq.(29) of the Yellow Paper.
This is an intensive operation because we must run all transactions in the
block to validate it
## Examples
iex> db = MerklePatriciaTree.Test.random_ets_db()
iex> chain = Blockchain.Test.ropsten_chain()
iex> beneficiary = <<0x05::160>>
iex> private_key = <<1::256>>
iex> sender = <<126, 95, 69, 82, 9, 26, 105, 18, 93, 93, 252, 183, 184, 194, 101, 144, 41, 57, 91, 223>> # based on simple private key
iex> machine_code = EVM.MachineCode.compile([:push1, 3, :push1, 5, :add, :push1, 0x00, :mstore, :push1, 0, :push1, 32, :return])
iex> trx = %Blockchain.Transaction{nonce: 5, gas_price: 3, gas_limit: 100_000, to: <<>>, value: 5, init: machine_code}
...> |> Blockchain.Transaction.Signature.sign_transaction(private_key)
iex> state = MerklePatriciaTree.Trie.new(db)
...> |> Blockchain.Account.put_account(sender, %Blockchain.Account{balance: 400_000, nonce: 5})
iex> parent_block = %Blockchain.Block{header: %Block.Header{number: 50, state_root: state.root_hash, difficulty: 50_000, timestamp: 9999, gas_limit: 125_001}}
iex> block = Blockchain.Block.gen_child_block(parent_block, chain, beneficiary: beneficiary, timestamp: 10000, gas_limit: 125_001)
...> |> Blockchain.Block.add_transactions([trx], db)
...> |> Blockchain.Block.add_rewards(db)
iex> Blockchain.Block.HolisticValidity.validate(block, chain, parent_block, db)
:valid
iex> db = MerklePatriciaTree.Test.random_ets_db()
iex> chain = Blockchain.Test.ropsten_chain()
iex> beneficiary = <<0x05::160>>
iex> private_key = <<1::256>>
iex> sender = <<126, 95, 69, 82, 9, 26, 105, 18, 93, 93, 252, 183, 184, 194, 101, 144, 41, 57, 91, 223>> # based on simple private key
iex> machine_code = EVM.MachineCode.compile([:push1, 3, :push1, 5, :add, :push1, 0x00, :mstore, :push1, 0, :push1, 32, :return])
iex> trx = %Blockchain.Transaction{nonce: 5, gas_price: 3, gas_limit: 100_000, to: <<>>, value: 5, init: machine_code}
...> |> Blockchain.Transaction.Signature.sign_transaction(private_key)
iex> state = MerklePatriciaTree.Trie.new(db)
...> |> Blockchain.Account.put_account(sender, %Blockchain.Account{balance: 400_000, nonce: 5})
iex> parent_block = %Blockchain.Block{header: %Block.Header{number: 50, state_root: state.root_hash, difficulty: 50_000, timestamp: 9999, gas_limit: 125_001}}
iex> block = Blockchain.Block.gen_child_block(parent_block, chain, beneficiary: beneficiary, timestamp: 10000, gas_limit: 125_001)
...> |> Blockchain.Block.add_transactions([trx], db)
iex> %{block | header: %{block.header | state_root: <<1,2,3>>, ommers_hash: <<2,3,4>>, transactions_root: <<3,4,5>>, receipts_root: <<4,5,6>>}}
...> |> Blockchain.Block.validate(chain, parent_block, db)
{:invalid, [:receipts_root_mismatch, :transactions_root_mismatch, :ommers_hash_mismatch, :state_root_mismatch]}
"""
@spec validate(Block.t(), Chain.t(), Block.t() | nil, TrieStorage.t()) ::
{:valid, TrieStorage.t()} | {:invalid, [atom()]}
def validate(block, chain, parent_block, trie) do
{base_block, state} =
if is_nil(parent_block) do
Genesis.create_block(chain, trie)
else
{Block.gen_child_block(
parent_block,
chain,
beneficiary: block.header.beneficiary,
timestamp: block.header.timestamp,
gas_limit: block.header.gas_limit,
extra_data: block.header.extra_data
), trie}
end
child_block_with_ommers = Block.add_ommers(base_block, block.ommers)
{child_block_with_transactions, updated_trie} =
Block.add_transactions(child_block_with_ommers, block.transactions, state, chain)
{child_block, updated_trie} =
Block.add_rewards(child_block_with_transactions, updated_trie, chain)
# The following checks Holistic Validity,
# as defined in Eq.(31), section 4.3.2 of Yellow Paper
errors =
[]
|> check_state_root_validity(child_block, block)
|> check_ommers_hash_validity(child_block, block)
|> check_transactions_root_validity(child_block, block)
|> check_gas_used(child_block, block)
|> check_receipts_root_validity(child_block, block)
|> check_logs_bloom(child_block, block)
if errors == [] do
{:valid, updated_trie}
else
{:invalid, errors}
end
end
@spec check_state_root_validity([atom()], Block.t(), Block.t()) :: [atom()]
defp check_state_root_validity(errors, child_block, block) do
if child_block.header.state_root == block.header.state_root do
errors
else
[:state_root_mismatch | errors]
end
end
@spec check_ommers_hash_validity([atom()], Block.t(), Block.t()) :: [atom()]
defp check_ommers_hash_validity(errors, child_block, block) do
if child_block.header.ommers_hash == block.header.ommers_hash do
errors
else
[:ommers_hash_mismatch | errors]
end
end
@spec check_transactions_root_validity([atom()], Block.t(), Block.t()) :: [atom()]
defp check_transactions_root_validity(errors, child_block, block) do
if child_block.header.transactions_root == block.header.transactions_root do
errors
else
[:transactions_root_mismatch | errors]
end
end
@spec check_receipts_root_validity([atom()], Block.t(), Block.t()) :: [atom()]
defp check_receipts_root_validity(errors, child_block, block) do
if child_block.header.receipts_root == block.header.receipts_root do
errors
else
[:receipts_root_mismatch | errors]
end
end
@spec check_gas_used([atom()], Block.t(), Block.t()) :: [atom()]
defp check_gas_used(errors, child_block, block) do
if child_block.header.gas_used == block.header.gas_used do
errors
else
[:gas_used_mismatch | errors]
end
end
@spec check_logs_bloom([atom()], Block.t(), Block.t()) :: [atom()]
defp check_logs_bloom(errors, child_block, block) do
if child_block.header.logs_bloom == block.header.logs_bloom do
errors
else
[:logs_bloom_mismatch | errors]
end
end
end
|
apps/blockchain/lib/blockchain/block/holistic_validity.ex
| 0.810629 | 0.435481 |
holistic_validity.ex
|
starcoder
|
defmodule GraphQL.Type.Interface do
alias GraphQL.Type.AbstractType
@type t :: %GraphQL.Type.Interface{
name: binary,
description: binary | nil,
fields: Map.t | function,
resolver: (any -> GraphQL.Type.ObjectType.t) | nil
}
defstruct name: "", description: "", fields: %{}, resolver: nil
def new(map) do
struct(GraphQL.Type.Interface, map)
end
defimpl AbstractType do
@doc """
Returns a boolean indicating if the provided type implements the interface
"""
def possible_type?(interface, object) do
GraphQL.Type.implements?(object, interface)
end
@doc """
Unlike Union, Interfaces don't explicitly declare what Types implement them,
so we have to iterate over a full typemap and filter the Types in the Schema
down to just those that implement the provided interface.
"""
def possible_types(interface, schema) do
# get the complete typemap from this schema
schema.type_cache
# filter them down to a list of types that implement this interface
|> Enum.filter(fn {_, typedef} -> GraphQL.Type.implements?(typedef, interface) end)
# then return the type, instead of the {name, type} tuple that comes from
# the type_cache
|> Enum.map(fn({_,v}) -> v end)
end
@doc """
Returns the typedef of the provided ObjectType using either the Interface's
resolve function (if it exists), or by iterating over all the typedefs that
implement this Interface and returning the first one that matches against
the ObjectType's isTypeOf function.
"""
def get_object_type(interface, object, schema) do
if interface.resolver do
interface.resolver.(object)
else
AbstractType.possible_types(interface, schema)
|> Enum.find(fn(x) -> x.isTypeOf.(object) end)
end
end
end
defimpl String.Chars do
def to_string(iface), do: iface.name
end
defimpl GraphQL.Execution.Completion do
alias GraphQL.Execution.Selection
def complete_value(return_type, context, field_asts, info, result) do
runtime_type = AbstractType.get_object_type(return_type, result, info.schema)
Selection.complete_sub_fields(runtime_type, context, field_asts, result)
end
end
end
|
lib/graphql/type/interface.ex
| 0.671363 | 0.458773 |
interface.ex
|
starcoder
|
defmodule Sobelow.Misc.FilePath do
@moduledoc ~S"""
# Insecure use of `File` and `Path`
In Elixir, `File` methods are null-terminated, while `Path`
functions are not. This may cause security issues in certain
situations. For example:
```
user_input = "/var/www/secret.txt\0/name"
path = Path.dirname(user_input)
public_file = path <> "/public.txt"
File.read(public_file)
```
Because `Path` functions are not null-terminated, this
will attempt to read the file, "/var/www/secret.txt\\0/public.txt".
However, due to the null-byte termination of `File` functions
"secret.txt" will ultimately be read.
`File/Path` checks can be ignored with the following command:
$ mix sobelow -i Misc.FilePath
"""
alias Sobelow.Utils
use Sobelow.Finding
def run(fun, meta_file) do
{vars, params, {fun_name, [{_, line_no}]}} = parse_def(fun)
Enum.each(vars, fn var ->
add_finding(line_no, meta_file.filename, fun_name, fun, var, Utils.get_sev(params, var))
end)
end
defp add_finding(line_no, filename, fun_name, fun, var, severity) do
type = "Insecure use of `File` and `Path`"
case Sobelow.format() do
"json" ->
finding = [
type: type,
file: filename,
function: "#{fun_name}:#{line_no}",
variable: var
]
Sobelow.log_finding(finding, severity)
"txt" ->
Sobelow.log_finding(type, severity)
IO.puts(Utils.finding_header(type, severity))
IO.puts(Utils.finding_file_metadata(filename, fun_name, line_no))
IO.puts(Utils.finding_variable(var))
Utils.maybe_print_file_path_code(fun, var)
IO.puts(Utils.finding_break())
"compact" ->
Utils.log_compact_finding(type, filename, line_no, severity)
_ ->
Sobelow.log_finding(type, severity)
end
end
def parse_def(fun) do
{params, {fun_name, line_no}} = Utils.get_fun_declaration(fun)
file_assigns = Utils.get_assigns_from(fun, [:File])
path_assigns = Utils.get_assigns_from(fun, [:Path])
path_vars =
Utils.get_funs_by_module(fun, [:Path])
|> Enum.map(&Utils.extract_opts(&1, 0))
|> List.flatten()
file_vars =
Utils.get_funs_by_module(fun, [:File])
|> Enum.map(&Utils.extract_opts(&1, 0))
|> List.flatten()
shared_path =
Enum.filter(path_vars, fn var ->
Enum.member?(file_assigns, var)
end)
shared_file =
Enum.filter(file_vars, fn var ->
Enum.member?(path_assigns, var)
end)
vars =
Enum.filter(file_vars, fn var ->
Enum.member?(path_vars, var)
end)
{vars ++ shared_file ++ shared_path, params, {fun_name, line_no}}
end
end
|
lib/sobelow/misc/file_path.ex
| 0.717012 | 0.694821 |
file_path.ex
|
starcoder
|
defmodule Cizen.Filter do
@moduledoc """
Creates a filter.
## Basic
Filter.new(
fn %Event{body: %SomeEvent{field: value}} ->
value == :a
end
)
Filter.new(
fn %Event{body: %SomeEvent{field: :a}} -> true end
)
value = :a
Filter.new(
fn %Event{body: %SomeEvent{field: ^value}} -> true end
)
## With guard
Filter.new(
fn %Event{source_saga_id: source} when not is_nil(source) -> true end
)
## Matches all
Filter.new(fn _ -> true end)
## Matches the specific type of struct
Filter.new(
fn %Event{source_saga: %SomeSaga{}} -> true end
)
## Compose filters
Filter.new(
fn %Event{body: %SomeEvent{field: value}} ->
Filter.match?(other_filter, value)
end
)
## Multiple filters
Filter.any([
Filter.new(fn %Event{body: %Resolve{id: id}} -> id == "some id" end),
Filter.new(fn %Event{body: %Reject{id: id}} -> id == "some id" end)
])
## Multiple cases
Filter.new(fn
%Event{body: %SomeEvent{field: :ignore}} -> false
%Event{body: %SomeEvent{field: value}} -> true
end)
"""
@type t :: %__MODULE__{}
defstruct code: true
alias Cizen.Event
alias Cizen.Filter.Code
@doc """
Creates a filter with the given anonymous function.
"""
defmacro new(filter) do
filter
|> Macro.prewalk(fn
{:when, _, [args, _guard]} ->
args
{:->, meta, [args, _expression]} ->
{:->, meta, [args, true]}
{:^, _, [{_var, _, _}]} ->
{:_, [], nil}
{_var, _, args} when not is_list(args) ->
{:_, [], nil}
node ->
node
end)
|> Elixir.Code.eval_quoted([], __CALLER__)
code = filter |> Code.generate(__CALLER__) |> remove_assertion()
quote do
%unquote(__MODULE__){
code: unquote(code)
}
end
end
defp remove_assertion(
{:and,
[{:and, [{:is_map, [{:access, []}]}, {:==, [{:access, [:__struct__]}, Event]}]}, second]}
),
do: remove_assertion(second)
defp remove_assertion({:and, [{:is_map, [{:access, [:body]}]}, second]}),
do: second
defp remove_assertion({:and, [first, second]}),
do: {:and, [remove_assertion(first), second]}
defp remove_assertion(code), do: code
@doc """
Checks whether the given struct matches or not.
"""
@spec match?(t, term) :: boolean
def match?(%__MODULE__{code: code}, struct) do
if eval(code, struct), do: true, else: false
end
@doc """
Joins the given filters with `and`.
"""
@spec all([t()]) :: t()
def all(filters) do
code = filters |> Enum.map(& &1.code) |> Code.all()
%__MODULE__{code: code}
end
@doc """
Joins the given filters with `or`.
"""
@spec any([t()]) :: t()
def any(filters) do
code = filters |> Enum.map(& &1.code) |> Code.any()
%__MODULE__{code: code}
end
def eval({:access, keys}, struct) do
Enum.reduce(keys, struct, fn key, struct ->
Map.get(struct, key)
end)
end
def eval({:call, [{module, fun} | args]}, struct) do
args = args |> Enum.map(&eval(&1, struct))
apply(module, fun, args)
end
@macro_unary_operators [:is_nil, :to_string, :to_charlist, :not, :!]
for operator <- @macro_unary_operators do
def eval({unquote(operator), [arg]}, struct) do
Kernel.unquote(operator)(eval(arg, struct))
end
end
@macro_binary_operators [:and, :&&, :or, :||, :in, :.., :<>]
for operator <- @macro_binary_operators do
def eval({unquote(operator), [arg1, arg2]}, struct) do
Kernel.unquote(operator)(eval(arg1, struct), eval(arg2, struct))
end
end
def eval({operator, args}, struct) do
args = args |> Enum.map(&eval(&1, struct))
apply(Kernel, operator, args)
end
def eval(value, _struct) do
value
end
end
|
lib/cizen/filter.ex
| 0.726911 | 0.537709 |
filter.ex
|
starcoder
|
require Pedro.Helpers, as: H
defmodule Pedro.Db.Repo do
@moduledoc """
Helpers to interact with Mnesia in single node mode.
"""
@tables [Pedro.Db.EntryQueue, Pedro.Db.Messages, Pedro.Db.Throttles]
@doc """
create_schema and then start Mnesia in single node mode
Returns `:ok` or `{:error, reason}`
"""
def init() do
create_schema
|> start_mnesia
|> create_all_tables
end
defp create_schema do
case :mnesia.create_schema([node()]) do
:ok -> :ok
{:error, {_, {:already_exists, _}}} -> :ok
any -> any
end
end
defp start_mnesia(:ok), do: :mnesia.start
defp start_mnesia(any), do: any # error piping
defp create_all_tables(:ok) do
results = @tables
|> Enum.map(&(apply(&1, :table_definition, [])))
|> Enum.map(&create_table/1)
case Enum.all?(results, &(match?(:ok,&1))) do
true -> :ok
false -> {:error, results}
end
end
defp create_all_tables(any), do: any # error piping
defp create_table([name: name, opts: opts]) when is_atom(name) do
opts = opts
|> H.requires([:attributes])
|> H.defaults(disc_copies: [node()])
case :mnesia.create_table(name, opts) do
{:atomic, :ok} -> :ok
{:aborted, {:already_exists, _}} -> :ok
{:aborted, reason} -> {:error, reason}
end
end
# Use `:ets.fun2ms` to get the spec
def select(table, spec) do
table
|> :mnesia.select(spec)
|> Enum.map(&(record2struct(table, &1)))
end
# Use `:ets.fun2ms` to get the spec
def dirty_select(table, spec) do
table
|> :mnesia.dirty_select(spec)
|> Enum.map(&(record2struct(table, &1)))
end
# :ets.fun2ms(fn(x)-> x end)
def all(table), do: select(table, [{:"$1", [], [:"$1"]}])
def dirty_all(table), do: dirty_select(table, [{:"$1", [], [:"$1"]}])
def write(struct) when is_map(struct) do
struct |> struct2record |> :mnesia.write
end
def dirty_write(struct) when is_map(struct) do
struct |> struct2record |> :mnesia.dirty_write
end
defp struct2record(struct) do
module = struct.__struct__
struct
|> Map.from_struct # remove struct's name
|> Map.values
|> List.insert_at(0, module)
|> List.to_tuple
end
defp record2struct(module, record) do
module
|> module2attributes
|> List.insert_at(0, :__struct__)
|> Enum.zip(Tuple.to_list(record)) # zip struct's keys with record's values
|> attributes2struct(module)
end
def module2attributes(module) do
module.__struct__ # an empty struct
|> Map.from_struct # remove struct's name
|> Map.keys
end
defp attributes2struct(attributes, module) do
attributes
|> List.insert_at(0, {:__struct__, module}) # add struct's name
|> Enum.into(%{}) # to struct
end
end
|
lib/pedro/db/repo.ex
| 0.691602 | 0.412708 |
repo.ex
|
starcoder
|
defmodule Pbkdf2 do
@moduledoc """
Pbkdf2 password hashing library main module.
For a lower-level API, see Pbkdf2.Base.
## Pbkdf2
Pbkdf2 is a password-based key derivation function
that uses a password, a variable-length salt and an iteration
count and applies a pseudorandom function to these to
produce a key.
The original implementation used SHA-1 as the pseudorandom function,
but this version uses HMAC-SHA-512, the default, or HMAC-SHA-256.
## Warning
It is recommended that you set a maximum length for the password
when using the `hash_pwd_salt`, `verify_pass` and `Base.hash_password`
functions. This maximum length should not prevent valid users from setting
long passwords. It is instead needed to combat denial-of-service attacks.
As an example, Django sets the maximum length to 4096 bytes.
For more information, see [this link](https://www.djangoproject.com/weblog/2013/sep/15/security/).
"""
alias Pbkdf2.Base
@doc """
Generate a random salt.
The minimum length of the salt is 8 bytes and the maximum length is
1024. The default length for the salt is 16 bytes. We do not recommend
using a salt shorter than the default.
"""
def gen_salt(salt_length \\ 16)
def gen_salt(salt_length) when salt_length in 8..1024 do
:crypto.strong_rand_bytes(salt_length)
end
def gen_salt(_) do
raise ArgumentError, """
The salt is the wrong length. It should be between 8 and 1024 bytes long.
"""
end
@doc """
Generate a random salt and hash a password using Pbkdf2.
## Options
For more information about the options for the underlying hash function,
see the documentation for Pbkdf2.Base.hash_password/3.
This function has the following additional option:
* salt_len - the length of the random salt
* the default is 16 (the minimum is 8) bytes
* we do not recommend using a salt less than 16 bytes long
"""
def hash_pwd_salt(password, opts \\ []) do
Base.hash_password(password, Keyword.get(opts, :salt_len, 16) |> gen_salt, opts)
end
@doc """
Check the password by comparing it with the stored hash.
The check is performed in constant time to avoid timing attacks.
"""
def verify_pass(password, stored_hash) do
[alg, rounds, salt, hash] = String.split(stored_hash, "$", trim: true)
{digest, length} = if alg =~ "sha512", do: {:sha512, 64}, else: {:sha256, 32}
Base.verify_pass(password, hash, salt, digest, rounds, length, output(stored_hash))
end
@doc """
A dummy verify function to help prevent user enumeration.
This always returns false. The reason for implementing this check is
in order to make it more difficult for an attacker to identify users
by timing responses.
"""
def no_user_verify(opts \\ []) do
hash_pwd_salt("password", opts)
false
end
defp output("$pbkdf2" <> _), do: :modular
defp output("pbkdf2" <> _), do: :django
end
|
deps/pbkdf2_elixir/lib/pbkdf2.ex
| 0.911839 | 0.483953 |
pbkdf2.ex
|
starcoder
|
defmodule Mix.Tasks.Firmware.Unpack do
use Mix.Task
import Mix.Nerves.Utils
alias Mix.Nerves.Preflight
@shortdoc "Unpack a firmware bundle for inspection"
@moduledoc """
Unpack the firmware so that its contents can be inspected locally.
## Usage
mix firmware.unpack [--output output directory] [--fw path to firmware]
## Command line options
* `--fw` - (Optional) The path to the .fw file for unpacking.
Defaults to `Nerves.Env.firmware_path/1`
* `--output` - (Optional) The output directory for the unpacked firmware.
Defaults to the name of the firmware bundle with the extension replaced
with `.unpacked`.
## Examples
```
# Create a firmware bundle. It will be under the _build directory
mix firmware
# Unpack the built firmware
mix firmware.unpack --output firmware_contents
# Unpack a specified fw file
mix firmware.unpack --fw hello_nerves.fw
# Inspect it
ls hello_nerves.unpacked/
```
"""
@switches [output: :string, fw: :string]
@aliases [o: :output, f: :fw]
@impl true
def run(args) do
Preflight.check!()
debug_info("Nerves Firmware Unpack")
config = Mix.Project.config()
{opts, _, _} = OptionParser.parse(args, strict: @switches, alies: @aliases)
fw = opts[:fw] || Nerves.Env.firmware_path(config)
output = opts[:output] || "#{Path.rootname(Path.basename(fw))}.unpacked"
_ = check_nerves_system_is_set!()
_ = check_nerves_toolchain_is_set!()
unless File.exists?(fw) do
Mix.raise("""
Firmware not found.
Please supply a valid firmware path with `--fw` or run `mix firmware`
""")
end
unpack(fw, output)
end
defp unpack(fw, output_path) do
abs_output_path = Path.expand(output_path)
rootfs_output_path = Path.join(abs_output_path, "rootfs")
rootfs_image = Path.join([abs_output_path, "data", "rootfs.img"])
Mix.shell().info("Unpacking to #{output_path}...")
_ = File.rm_rf!(abs_output_path)
File.mkdir_p!(abs_output_path)
{_, 0} = shell("unzip", [fw, "-d", abs_output_path])
{_, 0} = shell("unsquashfs", ["-d", rootfs_output_path, "-no-xattrs", rootfs_image])
:ok
end
end
|
lib/mix/tasks/firmware.unpack.ex
| 0.795817 | 0.402803 |
firmware.unpack.ex
|
starcoder
|
defmodule TreeSet do
defstruct set: nil
@behaviour Set
@implementation :gb_sets
def delete(set, el) do
%{set | :set => @implementation.del_element(el, set.set)}
end
def difference(set1, set2) do
%{set1 | :set => @implementation.difference(set1.set, set2.set)}
end
def disjoint?(set1, set2) do
@implementation.is_disjoint(set1.set, set2.set)
end
def equal?(set1, set2) do
(size(set1) == size(set2)) && subset?(set1, set2)
end
def intersection(set1, set2) do
%{set1 | :set => @implementation.intersection(set1.set, set2.set)}
end
def member?(set, el) do
@implementation.is_element(el, set.set)
end
def new do
%__MODULE__{set: @implementation.new}
end
def put(set, el) do
%{set | :set => @implementation.add_element(el, set.set)}
end
def size(set) do
@implementation.size(set.set)
end
def subset?(set1, set2) do
@implementation.is_subset(set1.set, set2.set)
end
def to_list(set) do
@implementation.to_list(set.set)
end
def union(set1, set2) do
%{set1 | :set => @implementation.union(set1.set, set2.set)}
end
if @implementation == :gb_sets do
def reduce(set, acc, reducer) do
(Stream.unfold @implementation.iterator(set.set), fn (iterator) ->
case @implementation.next(iterator) do
{el, iterator} -> {el, iterator}
:none -> nil
end
end).(acc, reducer)
end
else
def reduce(set, acc, reducer) do
Enumerable.List.reduce(Set.to_list(set), acc, reducer)
end
end
end
defimpl Enumerable, for: TreeSet do
def reduce(set, acc, fun), do: TreeSet.reduce(set, acc, fun)
def member?(set, v), do: {:ok, TreeSet.member?(set, v)}
def count(set), do: {:ok, TreeSet.size(set)}
end
defimpl Inspect, for: TreeSet do
import Inspect.Algebra
def inspect(set, opts) do
concat ["#TreeSet<", Inspect.List.inspect(TreeSet.to_list(set), opts), ">"]
end
end
defimpl Collectable, for: TreeSet do
def empty(_dict) do
TreeSet.new
end
def into(original) do
{original, fn
set, {:cont, x} -> TreeSet.put(set, x)
set, :done -> set
_, :halt -> :ok
end}
end
end
|
lib/structurez/tree_set.ex
| 0.625095 | 0.455986 |
tree_set.ex
|
starcoder
|
defmodule Membrane.Core.Child.PadsSpecs do
@moduledoc false
# Functions parsing element and bin pads specifications, generating functions and docs
# based on them.
use Bunch
alias Membrane.Caps
alias Membrane.Core.OptionsSpecs
alias Membrane.Pad
require Pad
@spec def_pads([{Pad.name_t(), raw_spec :: Macro.t()}], Pad.direction_t(), :element | :bin) ::
Macro.t()
def def_pads(pads, direction, component) do
pads
|> Enum.reduce(
quote do
end,
fn {name, spec}, acc ->
pad_def = def_pad(component, name, direction, spec)
quote do
unquote(acc)
unquote(pad_def)
end
end
)
end
@doc """
Returns documentation string common for both input and output pads
"""
@spec def_pad_docs(Pad.direction_t(), :bin | :element) :: String.t()
def def_pad_docs(direction, component) do
{entity, pad_type_spec} =
case component do
:bin -> {"bin", "bin_spec_t/0"}
:element -> {"element", "#{direction}_spec_t/0"}
end
"""
Macro that defines #{direction} pad for the #{entity}.
Allows to use `one_of/1` and `range/2` functions from `Membrane.Caps.Matcher`
without module prefix.
It automatically generates documentation from the given definition
and adds compile-time caps specs validation.
The type `t:Membrane.Pad.#{pad_type_spec}` describes how the definition of pads should look.
"""
end
@doc """
Returns AST inserted into element's or bin's module defining a pad
"""
@spec def_pad(Pad.name_t(), Pad.direction_t(), Macro.t(), :element | :bin) :: Macro.t()
def def_pad(pad_name, direction, raw_specs, component) do
Code.ensure_loaded(Caps.Matcher)
specs =
raw_specs
|> Bunch.Macro.inject_calls([
{Caps.Matcher, :one_of},
{Caps.Matcher, :range}
])
{escaped_pad_opts, pad_opts_typedef} = OptionsSpecs.def_pad_options(pad_name, specs[:options])
specs =
specs
|> Keyword.put(:options, escaped_pad_opts)
quote do
unquote(do_ensure_default_membrane_pads())
@membrane_pads unquote(__MODULE__).parse_pad_specs!(
{unquote(pad_name), unquote(specs)},
unquote(direction),
unquote(component),
__ENV__
)
unquote(pad_opts_typedef)
end
end
defmacro ensure_default_membrane_pads() do
do_ensure_default_membrane_pads()
end
defp do_ensure_default_membrane_pads() do
quote do
if Module.get_attribute(__MODULE__, :membrane_pads) == nil do
Module.register_attribute(__MODULE__, :membrane_pads, accumulate: true)
@before_compile {unquote(__MODULE__), :generate_membrane_pads}
end
end
end
@doc """
Generates `membrane_pads/0` function, along with docs and typespecs.
"""
defmacro generate_membrane_pads(env) do
pads = Module.get_attribute(env.module, :membrane_pads, []) |> Enum.reverse()
:ok = validate_pads!(pads, env)
alias Membrane.Pad
quote do
@doc """
Returns pads descriptions for `#{inspect(__MODULE__)}`
"""
@spec membrane_pads() :: [{unquote(Pad).name_t(), unquote(Pad).description_t()}]
def membrane_pads() do
unquote(pads |> Macro.escape())
end
end
end
@spec validate_pads!(
pads :: [{Pad.name_t(), Pad.description_t()}],
env :: Macro.Env.t()
) :: :ok
defp validate_pads!(pads, env) do
with [] <- pads |> Keyword.keys() |> Bunch.Enum.duplicates() do
:ok
else
dups ->
raise CompileError, file: env.file, description: "Duplicate pad names: #{inspect(dups)}"
end
end
@spec parse_pad_specs!(
specs :: Pad.spec_t(),
direction :: Pad.direction_t(),
:element | :bin,
declaration_env :: Macro.Env.t()
) :: {Pad.name_t(), Pad.description_t()}
def parse_pad_specs!(specs, direction, component, env) do
with {:ok, specs} <- parse_pad_specs(specs, direction, component) do
specs
else
{:error, reason} ->
raise CompileError,
file: env.file,
line: env.line,
description: """
Error parsing pad specs defined in #{inspect(env.module)}.def_#{direction}_pads/1,
reason: #{inspect(reason)}
"""
end
end
@spec parse_pad_specs(Pad.spec_t(), Pad.direction_t(), :element | :bin) ::
{Pad.name_t(), Pad.description_t()} | {:error, reason :: any}
def parse_pad_specs(spec, direction, component) do
withl spec: {name, config} when Pad.is_pad_name(name) and is_list(config) <- spec,
config:
{:ok, config} <-
config
|> Bunch.Config.parse(
availability: [in: [:always, :on_request], default: :always],
caps: [validate: &Caps.Matcher.validate_specs/1],
mode: [in: [:pull, :push], default: :pull],
demand_mode: [
in: [:auto, :manual],
default: :manual
],
demand_unit: [
in: [:buffers, :bytes],
require_if:
&(&1.mode == :pull and &1.demand_mode != :auto and
(component == :bin or direction == :input)),
default: :buffers
],
options: [default: nil]
) do
config = if component == :bin, do: Map.delete(config, :demand_mode), else: config
config
|> Map.put(:direction, direction)
|> Map.put(:name, name)
~> {:ok, {name, &1}}
else
spec: spec -> {:error, {:invalid_pad_spec, spec}}
config: {:error, reason} -> {:error, {reason, pad: name}}
end
end
@doc """
Generates docs describing pads based on pads specification.
"""
@spec generate_docs_from_pads_specs([{Pad.name_t(), Pad.description_t()}]) :: Macro.t()
def generate_docs_from_pads_specs([]) do
quote do
"""
There are no pads.
"""
end
end
def generate_docs_from_pads_specs(pads_specs) do
pads_docs =
pads_specs
|> Enum.sort_by(fn {_, config} -> config[:direction] end)
|> Enum.map(&generate_docs_from_pad_specs/1)
|> Enum.reduce(fn x, acc ->
quote do
"""
#{unquote(acc)}
#{unquote(x)}
"""
end
end)
quote do
"""
## Pads
#{unquote(pads_docs)}
"""
end
end
defp generate_docs_from_pad_specs({name, config}) do
{pad_opts, config} = config |> Map.pop(:options)
config_doc =
config
|> Enum.map(fn {k, v} ->
{
k |> to_string() |> String.replace("_", " ") |> String.capitalize(),
generate_pad_property_doc(k, v)
}
end)
|> Enum.map_join("\n", fn {k, v} ->
"<tr><td>#{k}</td> <td>#{v}</td></tr>"
end)
options_doc =
if pad_opts != nil do
quote do
"""
#{Bunch.Markdown.indent("Options:")}
#{unquote(OptionsSpecs.generate_opts_doc(pad_opts))}
"""
end
else
quote_expr("")
end
quote do
"""
### `#{inspect(unquote(name))}`
<table>
#{unquote(config_doc)}
</table>
""" <> unquote(options_doc)
end
end
defp generate_pad_property_doc(:caps, caps) do
caps
|> Bunch.listify()
|> Enum.map(fn
{module, params} ->
params_doc =
Enum.map_join(params, ",<br/>", fn {k, v} ->
Bunch.Markdown.hard_indent("<code>#{k}: #{inspect(v)}</code>")
end)
"<code>#{inspect(module)}</code>, restrictions:<br/>#{params_doc}"
module ->
"<code>#{inspect(module)}</code>"
end)
~> (
[doc] -> doc
docs -> docs |> Enum.join(",<br/>")
)
end
defp generate_pad_property_doc(_k, v) do
"<code>#{inspect(v)}</code>"
end
end
|
lib/membrane/core/child/pads_specs.ex
| 0.847684 | 0.560253 |
pads_specs.ex
|
starcoder
|
defmodule Phone do
@moduledoc ~S"""
Phone is a real telephone number parser, that will help you get useful information from numbers.
## How to use
Very simple to use:
iex> Phone.parse("555132345678")
{:ok, %{a2: "BR", a3: "BRA", country: "Brazil", international_code: "55", area_code: "51", number: "32345678", area_abbreviation: "RS", area_type: "state", area_name: "Rio Grande do Sul"}}
"""
import Helper.Parser
@doc """
Parses a string or integer and returns a map with information about that number.
```
iex> Phone.parse("555132345678")
{:ok, %{a2: "BR", a3: "BRA", country: "Brazil", international_code: "55", area_code: "51", number: "32345678", area_abbreviation: "RS", area_type: "state", area_name: "Rio Grande do Sul"}}
iex> Phone.parse("+55(51)3234-5678")
{:ok, %{a2: "BR", a3: "BRA", country: "Brazil", international_code: "55", area_code: "51", number: "32345678", area_abbreviation: "RS", area_type: "state", area_name: "Rio Grande do Sul"}}
iex> Phone.parse("55 51 3234-5678")
{:ok, %{a2: "BR", a3: "BRA", country: "Brazil", international_code: "55", area_code: "51", number: "32345678", area_abbreviation: "RS", area_type: "state", area_name: "Rio Grande do Sul"}}
iex> Phone.parse(555132345678)
{:ok, %{a2: "BR", a3: "BRA", country: "Brazil", international_code: "55", area_code: "51", number: "32345678", area_abbreviation: "RS", area_type: "state", area_name: "Rio Grande do Sul"}}
```
"""
@spec parse(String.t()) :: {:ok, Map.t()}
def parse(number) when is_bitstring(number) do
number
|> prepare_number
|> Phone.Countries.build()
end
@spec parse(pos_integer) :: {:ok, Map.t()}
def parse(number) when is_integer(number) do
number = Integer.to_string(number)
parse(number)
end
def parse(_) do
{:error, "Not a valid parameter, only string or integer."}
end
@doc false
defp prepare_number(number) do
number = clear(number)
try do
number |> String.to_integer() |> Integer.to_string()
rescue
_ -> ""
end
end
@doc false
defp clear(number) when is_bitstring(number) do
number
|> String.graphemes()
|> Enum.filter(fn n -> valid_char(n) end)
|> Enum.join("")
end
@doc false
defp valid_char("+"), do: false
defp valid_char("("), do: false
defp valid_char(")"), do: false
defp valid_char("-"), do: false
defp valid_char(" "), do: false
defp valid_char(_), do: true
@doc """
Same as `parse/1`, except it raises on error.
```
iex> Phone.parse!("555132345678")
%{a2: "BR", a3: "BRA", country: "Brazil", international_code: "55", area_code: "51", number: "32345678", area_abbreviation: "RS", area_type: "state", area_name: "Rio Grande do Sul"}
iex> Phone.parse!("+55(51)3234-5678")
%{a2: "BR", a3: "BRA", country: "Brazil", international_code: "55", area_code: "51", number: "32345678", area_abbreviation: "RS", area_type: "state", area_name: "Rio Grande do Sul"}
iex> Phone.parse!("55 51 3234-5678")
%{a2: "BR", a3: "BRA", country: "Brazil", international_code: "55", area_code: "51", number: "32345678", area_abbreviation: "RS", area_type: "state", area_name: "Rio Grande do Sul"}
iex> Phone.parse!(555132345678)
%{a2: "BR", a3: "BRA", country: "Brazil", international_code: "55", area_code: "51", number: "32345678", area_abbreviation: "RS", area_type: "state", area_name: "Rio Grande do Sul"}
```
"""
@spec parse!(String.t()) :: Map.t()
def parse!(number) when is_bitstring(number) do
number = clear(number)
number =
try do
number |> String.to_integer() |> Integer.to_string()
rescue
_ -> ""
end
Phone.Countries.build!(number)
end
@spec parse!(pos_integer) :: Map.t()
def parse!(number) when is_integer(number) do
number = Integer.to_string(number)
parse!(number)
end
def parse!(_) do
raise ArgumentError, message: "Not a valid parameter, only string or integer."
end
@spec parse(String.t(), Atom.t()) :: {:ok, Map.t()}
@spec parse(pos_integer, Atom.t()) :: {:ok, Map.t()}
@spec parse!(String.t(), Atom.t()) :: Map.t()
@spec parse!(pos_integer, Atom.t()) :: Map.t()
country_parser()
@doc """
Returns `true` if the number can be parsed, otherwhise returns `false`.
```
iex> Phone.valid?("555132345678")
true
iex> Phone.valid?("+55(51)3234-5678")
true
iex> Phone.valid?("55 51 3234-5678")
true
iex> Phone.valid?(555132345678)
true
```
"""
@spec parse(String.t()) :: boolean
def valid?(number) when is_bitstring(number) do
number
|> prepare_number
|> Phone.Countries.match?()
end
@spec parse(pos_integer) :: boolean
def valid?(number) when is_integer(number) do
number
|> to_string
|> valid?
end
end
|
lib/phone.ex
| 0.78609 | 0.906901 |
phone.ex
|
starcoder
|
defmodule Snap.Bulk do
@moduledoc """
Supports streaming bulk operations against a `Snap.Cluster`.
"""
@default_page_size 5000
@default_page_wait 15_000
alias Snap.Bulk.Actions
@doc """
Performs a bulk operation.
Takes an `Enumerable` of action structs, where each struct is one of:
* `Snap.Bulk.Action.Create`
* `Snap.Bulk.Action.Index`
* `Snap.Bulk.Action.Update`
* `Snap.Bulk.Action.Delete`
```
actions = [
%Snap.Bulk.Action.Create{_id: 1, doc: %{foo: "bar"}},
%Snap.Bulk.Action.Create{_id: 2, doc: %{foo: "bar"}},
%Snap.Bulk.Action.Create{_id: 3, doc: %{foo: "bar"}}
]
actions
|> Snap.Bulk.perform(Cluster, "index")
```
It chunks the `Enumerable` into pages, and pauses between pages for
Elasticsearch to catch up. Uses `Stream` under the hood, so you can lazily
feed it a stream of actions, such as out of an `Ecto.Repo` to bulk load
documents from an SQL database.
If no errors occur on any page it returns `:ok`. If any errors occur, on
any page, it returns `{:error, %Snap.BulkError{}}`, containing a list of
the errors. It will continue to the end of the stream, even if errors
occur.
Options:
* `page_size` - defines the size of each page, defaulting to 5000 actions.
* `page_wait` - defines wait period between pages in ms, defaulting to
15000ms.
* `max_errors` - aborts when the number of errors returned exceedes this
count (defaults to `nil`, which will run to the end)
* `request_opts` - defines the options to be used with `Snap.Request`
Any other options, such as `pipeline: "foo"` are passed through as query
parameters to the [Bulk
API](https://www.elastic.co/guide/en/elasticsearch/reference/current/docs-bulk.html)
endpoint.
"""
@spec perform(
stream :: Enumerable.t(),
cluster :: module(),
index :: String.t(),
opts :: Keyword.t()
) ::
:ok | Snap.Cluster.error() | {:error, Snap.BulkError.t()}
def perform(stream, cluster, index, opts) do
{page_size, opts} = Keyword.pop(opts, :page_size, @default_page_size)
{page_wait, opts} = Keyword.pop(opts, :page_wait, @default_page_wait)
{max_errors, opts} = Keyword.pop(opts, :max_errors, nil)
{request_opts, request_params} = Keyword.pop(opts, :request_opts, [])
stream
|> Stream.chunk_every(page_size)
|> Stream.intersperse({:wait, page_wait})
|> Stream.transform(
0,
&process_chunk(&1, cluster, index, request_params, request_opts, &2, max_errors)
)
|> Enum.to_list()
|> handle_result()
end
defp process_chunk(
{:wait, 0},
_cluster,
_index,
_params,
_request_opts,
error_count,
_max_errors
) do
{[], error_count}
end
defp process_chunk(
{:wait, wait},
_cluster,
_index,
_params,
_request_opts,
error_count,
_max_errors
) do
:ok = :timer.sleep(wait)
{[], error_count}
end
defp process_chunk(_actions, _cluster, _index, _params, _request_opts, error_count, max_errors)
when is_integer(max_errors) and error_count > max_errors do
{:halt, error_count}
end
defp process_chunk(actions, cluster, index, params, request_opts, error_count, _max_errors) do
body = Actions.encode(actions)
headers = [{"content-type", "application/x-ndjson"}]
result = Snap.post(cluster, "/#{index}/_bulk", body, params, headers, request_opts)
add_errors =
case result do
{:ok, %{"errors" => true, "items" => items}} ->
process_errors(items)
{:ok, _} ->
[]
{:error, error} ->
[error]
end
error_count = error_count + Enum.count(add_errors)
{add_errors, error_count}
end
defp handle_result([]), do: :ok
defp handle_result(errors) do
err = Snap.BulkError.exception(errors)
{:error, err}
end
defp process_errors(items) do
items
|> Enum.map(&process_item/1)
|> Enum.reject(&is_nil/1)
end
defp process_item(%{"create" => %{"error" => error} = item}) when is_map(error) do
Snap.ResponseError.exception_from_json(item)
end
defp process_item(%{"index" => %{"error" => error} = item}) when is_map(error) do
Snap.ResponseError.exception_from_json(item)
end
defp process_item(%{"update" => %{"error" => error} = item}) when is_map(error) do
Snap.ResponseError.exception_from_json(item)
end
defp process_item(%{"delete" => %{"error" => error} = item}) when is_map(error) do
Snap.ResponseError.exception_from_json(item)
end
defp process_item(_), do: nil
end
|
lib/snap/bulk/bulk.ex
| 0.855941 | 0.912436 |
bulk.ex
|
starcoder
|
defmodule Euler do
@moduledoc """
This module defines general-purpose functions used in solving Project Euler challenges.
"""
defmacro divides?(d, n) do
quote do
rem(unquote(n), unquote(d)) == 0
end
end
@doc """
Calculates the sum of all natural numbers less than max which have any of the given factors.
"""
@spec sum_multiples([pos_integer, ...], pos_integer) :: non_neg_integer
def sum_multiples(factors, max) do
Enum.filter(1..max-1, &Enum.any?(factors, fn f -> rem(&1, f) == 0 end))
|> Enum.sum
end
def factors(n), do: factors(n, Primes.stream)
def factors(1, _), do: [1]
def factors(n, primes) when n > 1 do
pf = Primes.factorize(n, primes) |> Enum.to_list
expand_factors(pf, 1)
|> Enum.sort
end
defp expand_factors([], f), do: [f]
defp expand_factors(pf, f) do
Enum.dedup(pf)
|> Enum.flat_map(fn p -> Enum.drop_while(pf, &(&1 < p)) |> tl |> expand_factors(f*p) end)
|> List.insert_at(-1, f)
end
def factor_pairs(n) do
f = factors(n)
Enum.zip(f, Enum.reverse(f))
|> Enum.take(round(length(f)/2))
end
def palindromes(x) do
Stream.unfold(x, fn x ->
Integer.digits(x)
|> Enum.concat(Integer.digits(x) |> Enum.reverse)
|> Integer.undigits
|> List.wrap
|> List.insert_at(-1, x-1)
|> List.to_tuple
end)
end
def digits?(n, d), do: length(Integer.digits(n)) == d
def digit_sum(n) when is_integer(n), do: Integer.digits(n) |> Enum.sum
def product([]), do: 1
def product(list), do: Enum.reduce(list, &*/2)
def stream_product(digits, k) do
Stream.transform(digits, {0, []}, fn
0, _ -> {[], {0, []}}
x, {0, _} -> {[], {x, [x]}}
x, {p, buffer} when length(buffer) == k ->
p = x*div(p, hd(buffer))
{[p], {p, tl(buffer) ++ [x]}}
x, {p, buffer} -> {[], {x*p, buffer ++ [x]}}
end)
end
def factorial(0), do: 1
def factorial(n) when n > 0, do: product(1..n)
def pythagorean_triples() do
Stream.iterate(2, &(&1+2))
|> Stream.flat_map(fn r ->
Euler.factor_pairs(div(r*r, 2))
|> Enum.map(fn {s, t} -> {r+s, r+t, r+s+t} end)
end)
end
def spiral_stream(n) do
Stream.iterate(n, &next_spiral/1)
end
def next_spiral(n), do: (trunc(:math.sqrt(n) + 1) |> div(2)) * 2 + n
def is_square(x), do: :math.sqrt(x) |> trunc |> :math.pow(2) |> Kernel.==(x)
def count_stream(), do: count_stream(1)
def count_stream(x), do: Stream.iterate(x, &(&1+1))
def max_index(x) do
max = Enum.max(x)
Enum.find_index(x, &(&1 == max))
end
def triangle_stream(), do: count_stream() |> Stream.scan(&(&1+&2))
def collatz(1), do: nil
def collatz(n) when rem(n, 2) == 1, do: 3*n+1
def collatz(n), do: div(n, 2)
def collatz_count(m, x) when x > 0 do
if Map.has_key?(m, x) do
m
else
y = collatz(x)
m = collatz_count(m, y)
Map.put(m, x, Map.get(m, y) + 1)
end
end
def comb(_, 0), do: 1
def comb(n, 1), do: n
def comb(n, r) when n == r, do: 1
def comb(n, r) when n > r, do: div(product(r+1..n), product(1..n-r))
def letter_count(n) do
cond do
n < 10 -> Enum.at([0, 3, 3, 5, 4, 4, 3, 5, 5, 4], n)
n < 20 -> Enum.at([3, 6, 6, 8, 8, 7, 7, 9, 8, 8], n-10)
n < 100 -> Enum.at([6, 6, 5, 5, 5, 7, 6, 6], div(n, 10)-2) + letter_count(rem(n, 10))
n < 1_000 -> letter_count(div(n, 100)) + if(rem(n, 100) == 0, do: 7, else: 10 + letter_count(rem(n, 100)))
n < 1_000_000 -> letter_count(div(n, 1000)) + 8 + letter_count(rem(n, 1000))
end
end
def abundant?(n), do: abundant?(n, Primes.stream)
def abundant?(n, primes), do: factors(n, primes) |> Enum.sum |> Kernel.-(n) |> Kernel.>(n)
def nth_lex_perm(chars, n), do: find_lex_perm(Enum.sort(chars), n-1)
defp find_lex_perm([], 0), do: []
defp find_lex_perm(chars, n) do
r = factorial(length(chars)-1)
{d, chars} = List.pop_at(chars, div(n, r))
[d | find_lex_perm(chars, rem(n, r))]
end
def pow(_, 0), do: 1
def pow(n, 1), do: n
def pow(n, k) when rem(k, 2) == 0, do: pow(n*n, div(k, 2))
def pow(n, k), do: n * pow(n, k-1)
def mult_order(n, p) do
Stream.iterate(n, &(&1*n))
|> Enum.reduce_while([], fn x, a ->
k = rem(x, p)
case Enum.find_index(a, &(&1 == k)) do
nil -> {:cont, a ++ [k]}
i -> {:halt, length(a) - i}
end
end)
end
def repetend_period(n) do
f = Primes.factorize(n) |> Enum.to_list
g = Enum.reject(f, &(&1 in [2, 5]))
cond do
g == [] -> 0
hd(f) == n -> mult_order(10, n)
length(Enum.uniq(f)) == 1 ->
if divides?(product(f), pow(10, n-1)) do
repetend_period(hd(f))
else
repetend_period(hd(f)) * product(f)
end
true ->
Enum.chunk_by(g, &(&1))
|> Enum.map(&repetend_period(product(&1)))
|> lcm
end
end
def gcd(0, _), do: 0
def gcd(a, b) when a == b, do: a
def gcd(a, b) when b > a, do: gcd(b, a)
def gcd(a, b), do: gcd(a-b, b)
def lcm(a, b), do: div(a*b, gcd(a, b))
def lcm([]), do: 1
def lcm(list) when is_list(list), do: lcm(hd(list), lcm(tl(list)))
def digit_bases(n, d) do
Stream.iterate(pow(10, n) + 1, fn x ->
case rem(x, 10) do
3 -> x + 4
_ -> x + 2
end
end)
|> Stream.filter(fn x ->
digits = Integer.digits(x)
zeros = Enum.count(digits, &(&1 == 0))
ones = Enum.count(digits, &(&1 == 1))
zeros >= d or (hd(digits) == 1 and ones >= d)
end)
end
@doc """
Returns a list of digit replacement families of size n or greater that can be generated from x.
A digit replacement family is generated by replacing all copies of a given digit with each greater value.
An example family for 20056 is [21156, 22256, 23356, 24456, 25556, 26656, 27756, 28856, 29956]
"""
def digit_replacements(x, n) do
digits = Integer.digits(x) |> Enum.reverse
Enum.flat_map(0..10-n, fn d ->
find_indices(digits, d)
|> superset
|> Enum.map(&Enum.reduce(&1, 0, fn y, a -> a + pow(10, y) end))
|> Enum.map(fn y -> for(i <- 0..9-d, do: x + i * y) end)
end)
end
@doc """
Returns a list of all indices where x is found within list.
"""
def find_indices(list, x) do
Enum.with_index(list)
|> Enum.filter(fn {y, _} -> x == y end)
|> Enum.unzip
|> elem(1)
end
@doc """
Returns the superset of the input, excluding the null set.
"""
def superset([]), do: []
def superset(x) do
s = superset(tl(x))
Enum.concat([[[hd(x)]], s, Enum.map(s, &([hd(x)] ++ &1))])
end
def root_cont_fraction(n) do
a0 = trunc(:math.sqrt(n))
Stream.unfold({a0, a0, 1, []}, fn {an, b, c, history} ->
if {an, b, c} in history do
nil
else
history = history ++ [{an, b, c}]
c = div(n - b*b, c)
a = trunc((:math.sqrt(n) + b) / c)
b = a*c - b
{an, {a, b, c, history}}
end
end)
|> Enum.to_list
end
def root_convergents(n) do
a0 = trunc(:math.sqrt(n))
Stream.unfold({a0, 1}, fn {b, c} ->
c = div(n - b*b, c)
a = trunc((:math.sqrt(n) + b) / c)
b = a*c - b
{a, {b, c}}
end)
|> Stream.transform({{1, a0}, {0, 1}}, fn a, {{p0, p1}, {q0, q1}} ->
{p2, q2} = {a*p1 + p0, a*q1 + q0}
{[{p2, q2}], {{p1, p2}, {q1, q2}}}
end)
end
def totient(), do: totient(Primes.stream)
def totient(primes) do
count_stream(2)
|> Stream.map(fn x ->
Primes.factorize(x, primes)
|> Enum.dedup
|> Enum.reduce(x, &(div(&2, &1) * (&1-1)))
end)
end
def perm?(x, y), do: Enum.sort(x) == Enum.sort(y)
def min_grid_path(col) do
hd(col)
|> Tuple.to_list
|> Enum.sum
|> List.wrap
|> Kernel.++(tl(col))
|> Enum.scan(fn {l, r}, p -> min(l + r, l + p) end)
end
def sums(n) when n > 2 do
sums_memoized(n, 3, 2, %{})
|> Map.get({n, n-1})
end
def sums_memoized(0, _k, _memo), do: 1
def sums_memoized(1, _k, _memo), do: 1
def sums_memoized(_m, 1, _memo), do: 1
def sums_memoized(m, k, memo) when m <= k, do: sums_memoized(m, m-1, memo) + 1
def sums_memoized(m, k, memo), do: Map.get(memo, {m, k})
def sums_memoized(n, _m, k, memo) when n == k, do: memo
def sums_memoized(n, m, k, memo) when m == k, do: sums_memoized(n, m+1, 2, memo)
def sums_memoized(n, m, k, memo) do
sum = Stream.unfold(m, fn
x when x < 0 -> nil
x -> {x, x-k}
end)
|> Stream.map(&sums_memoized(&1, k-1, memo))
|> Enum.sum
sums_memoized(n, m, k+1, Map.put(memo, {m, k}, sum))
end
def coin_piles(n), do: coin_piles(n, 3, 2, %{})
def coin_piles(0, _k, _memo), do: 1
def coin_piles(1, _k, _memo), do: 1
def coin_piles(_m, 1, _memo), do: 1
def coin_piles(m, k, memo) when m <= k, do: coin_piles(m, m-1, memo) + 1
def coin_piles(m, k, memo), do: Map.get(memo, {m, k})
def coin_piles(n, m, k, memo) when m == k do
if rem(coin_piles(m, k, memo), n) == 0 do
m
else
coin_piles(n, m+1, 2, memo)
end
end
def coin_piles(n, m, k, memo) do
sum = Stream.unfold(m, fn
x when x < 0 -> nil
x -> {x, x-k}
end)
|> Stream.map(&coin_piles(&1, k-1, memo))
|> Enum.sum
coin_piles(n, m, k+1, Map.put(memo, {m, k}, sum))
end
def digit_factorial_chain(x, memo) do
with :error <- Map.fetch(memo, x),
dfs <- Integer.digits(x) |> Enum.map(&factorial/1) |> Enum.sum,
false <- dfs == x,
{n, memo} <- digit_factorial_chain(dfs, memo)
do
{n+1, Map.put(memo, x, n+1)}
else
{:ok, n} -> {n, memo}
true -> {1, Map.put(memo, x, 1)}
end
end
def rectangles(m, n) when m < n, do: rectangles(n, m)
def rectangles(m, n) do
Enum.map(0..m-1, fn i ->
Enum.map(0..n-1, &((i+1)*(&1+1)))
|> Enum.sum
end)
|> Enum.sum
end
end
|
lib/euler.ex
| 0.695338 | 0.623148 |
euler.ex
|
starcoder
|
defmodule Hologram.Compiler.IR do
# TYPES
defmodule AnonymousFunctionType do
defstruct arity: nil, params: [], bindings: [], body: nil
end
defmodule AtomType do
defstruct value: nil
end
defmodule BinaryType do
defstruct parts: []
end
defmodule BooleanType do
defstruct value: nil
end
defmodule FloatType do
defstruct value: nil
end
defmodule IntegerType do
defstruct value: nil
end
defmodule ListType do
defstruct data: []
end
defmodule MapType do
defstruct data: []
end
defmodule ModuleType do
defstruct module: nil
end
defmodule NilType do
defstruct []
end
defmodule StringType do
defstruct value: nil
end
defmodule StructType do
defstruct module: nil, data: []
end
defmodule TupleType do
defstruct data: []
end
# OPERATORS
defmodule AccessOperator do
defstruct data: nil, key: nil
end
defmodule AdditionOperator do
defstruct left: nil, right: nil
end
defmodule ConsOperator do
defstruct head: nil, tail: nil
end
defmodule DivisionOperator do
defstruct left: nil, right: nil
end
defmodule DotOperator do
defstruct left: nil, right: nil
end
defmodule EqualToOperator do
defstruct left: nil, right: nil
end
defmodule LessThanOperator do
defstruct left: nil, right: nil
end
defmodule ListConcatenationOperator do
defstruct left: nil, right: nil
end
defmodule ListSubtractionOperator do
defstruct left: nil, right: nil
end
defmodule MatchOperator do
defstruct bindings: [], left: nil, right: nil
end
defmodule MembershipOperator do
defstruct left: nil, right: nil
end
defmodule ModuleAttributeOperator do
defstruct name: nil
end
defmodule MultiplicationOperator do
defstruct left: nil, right: nil
end
defmodule NotEqualToOperator do
defstruct left: nil, right: nil
end
defmodule RelaxedBooleanAndOperator do
defstruct left: nil, right: nil
end
defmodule RelaxedBooleanNotOperator do
defstruct value: nil
end
defmodule RelaxedBooleanOrOperator do
defstruct left: nil, right: nil
end
defmodule StrictBooleanAndOperator do
defstruct left: nil, right: nil
end
defmodule SubtractionOperator do
defstruct left: nil, right: nil
end
defmodule TypeOperator do
defstruct left: nil, right: nil
end
defmodule UnaryNegativeOperator do
defstruct value: nil
end
defmodule UnaryPositiveOperator do
defstruct value: nil
end
# DEFINITIONS
defmodule FunctionDefinition do
defstruct module: nil,
name: nil,
arity: nil,
params: [],
bindings: [],
body: nil,
visibility: nil
end
defmodule FunctionDefinitionVariants do
defstruct name: nil, variants: []
end
# DEFER: implement
defmodule FunctionHead do
defstruct []
end
defmodule MacroDefinition do
defstruct module: nil, name: nil, arity: nil, params: [], bindings: [], body: nil
end
defmodule ModuleDefinition do
defstruct module: nil,
uses: [],
imports: [],
requires: [],
aliases: [],
attributes: [],
functions: [],
macros: [],
component?: nil,
layout?: nil,
page?: nil,
templatable?: nil
end
defmodule ModuleAttributeDefinition do
defstruct name: nil, value: nil
end
# DEFER: implement
defmodule ProtocolDefinition do
defstruct []
end
# DIRECTIVES
defmodule AliasDirective do
defstruct module: nil, as: nil
end
defmodule ImportDirective do
defstruct module: nil, only: nil
end
defmodule RequireDirective do
defstruct module: nil
end
defmodule UseDirective do
defstruct module: nil, opts: []
end
# CONTROL FLOW
defmodule AnonymousFunctionCall do
defstruct name: nil, args: []
end
defmodule CaseExpression do
defstruct condition: nil, clauses: []
end
defmodule FunctionCall do
defstruct module: nil, function: nil, args: []
end
defmodule IfExpression do
defstruct condition: nil, do: nil, else: nil, ast: nil
end
# BINDINGS
defmodule Binding do
defstruct name: nil, access_path: []
end
defmodule CaseConditionAccess do
defstruct []
end
defmodule ListIndexAccess do
defstruct index: nil
end
defmodule ListTailAccess do
defstruct []
end
defmodule MapAccess do
defstruct key: nil
end
defmodule MatchAccess do
defstruct []
end
defmodule ParamAccess do
defstruct index: nil
end
defmodule TupleAccess do
defstruct index: nil
end
# OTHER
defmodule Block do
defstruct expressions: []
end
defmodule Quote do
defstruct body: nil
end
defmodule ModulePseudoVariable do
defstruct []
end
defmodule Unquote do
defstruct expression: nil
end
defmodule Typespec do
defstruct []
end
defmodule Variable do
defstruct name: nil
end
# NOT SUPPORTED
defmodule NotSupportedExpression do
defstruct ast: nil, type: nil
end
end
|
lib/hologram/compiler/ir.ex
| 0.518059 | 0.839537 |
ir.ex
|
starcoder
|
defmodule Evaluation.Adaptations.CombineWithGuaranteesEval do
@moduledoc false
use Observables.GenObservable
alias Reactivity.Processing.Matching
require Logger
def init([imap, tmap, gmap, rtype]) do
Logger.debug("CombineWithGuaranteesEval: #{inspect(self())}")
{:ok, {:buffer, imap, :types, tmap, :guarantees, gmap, :result, rtype}}
end
def handle_event({:newvalue, index, msg},
{:buffer, buffer, :types, tmap, :guarantees, gmap, :result, rtype}) do
updated_buffer = %{buffer | index => Map.get(buffer, index) ++ [msg]}
case Matching.match(updated_buffer, msg, index, tmap, gmap) do
:nomatch ->
{:novalue, {:buffer, updated_buffer, :types, tmap, :guarantees, gmap, :result, rtype}}
{:ok, match, contexts, new_buffer} ->
{vals, _contextss} =
match
|> Enum.unzip()
if first_value?(index, buffer) and
Map.get(tmap, index) == :behaviour and
rtype == :event_stream do
Process.send(self(), {:event, {:spit, index}}, [])
end
{val, _cxts} = msg
#ADAPTATION: RETURN THE VALUE OF THE CURRENT MESSAGE AS WELL
{:value, {vals, val, contexts},
{:buffer, new_buffer, :types, tmap, :guarantees, gmap, :result, rtype}}
end
end
def handle_event({:spit, index},
{:buffer, buffer, :types, tmap, :guarantees, gmap, :result, rtype}) do
msg =
buffer
|> Map.get(index)
|> List.first()
case Matching.match(buffer, msg, index, tmap, gmap) do
:nomatch ->
{:novalue, {:buffer, buffer, :types, tmap, :guarantees, gmap, :result, rtype}}
{:ok, match, contexts, new_buffer} ->
{vals, _contextss} =
match
|> Enum.unzip()
Process.send(self(), {:event, {:spit, index}}, [])
{val, _cxts} = msg
#ADAPTATION: RETURN THE VALUE OF THE CURRENT MESSAGE AS WELL
{:value, {vals, val, contexts},
{:buffer, new_buffer, :types, tmap, :guarantees, gmap, :result, rtype}}
end
end
def handle_done(_pid, _state) do
Logger.debug("#{inspect(self())}: combinelatestn has one dead dependency, going on.")
{:ok, :continue}
end
defp first_value?(index, buffer) do
buffer
|> Map.get(index)
|> Enum.empty?()
end
end
|
lib/evaluation/adaptations/combine_with_guarantees_eval.ex
| 0.646125 | 0.417093 |
combine_with_guarantees_eval.ex
|
starcoder
|
defmodule Hive.MQ.NodeAgent do
use Agent
require Logger
@moduledoc """
This agent is responsible to keep track of all nodes that are connected
to the network.
"""
@doc """
Starts the agent with an empty map
"""
def start_link(opts \\ []) do
Agent.start_link(fn -> %{} end, opts)
end
@doc """
This endpoint is used to add a node to the agent. This function only
accepts a `%Hive.MQ.Message.Greet{}`, otherwise it returns
`{:error, :wrngmsg}`. If the entry of the same hostname exists then
the existing entry is updated. It returns `:ok` on successful additions.
"""
def add(pid, greet_msg) do
if greet_msg.__struct__ == Hive.MQ.Message.Greet do
%{hostname: hostname} = greet_msg
node_info = greet_msg
|> Map.from_struct
|> Map.delete(:reply)
Logger.info "Adding #{hostname} to Node Agent: " <> inspect pid
Agent.update(pid, &Map.put(&1, hostname, node_info))
else
{:error, :wrngmsg}
end
end
@doc """
This endpoint gets the requested node based on the node's hostname. A map
containing similar fields to the `Hive.MQ.Message.Greet` struct is returned
if found. If the hostname is not found then `:notfound` is returned.
"""
def get(pid, key) do
getter = fn map ->
case Map.fetch(map, key) do
{:ok, value} -> value
:error -> :notfound
end
end
Agent.get(pid,getter)
end
defp getIPAddress() do
interface = Application.get_env(:hive, :interface, "lo")
{:ok, lst} = :inet.getifaddrs
getIPAddress(interface, lst)
end
defp getIPAddress(interface, [head | tail]) do
case head do
{^interface, lst} ->
{a, b, c, d} = Keyword.get(lst, :addr)
"#{a}.#{b}.#{c}.#{d}"
_ -> getIPAddress(interface, tail)
end
end
defp getIPAddress(_interface, []) do
:notfound
end
@doc """
This endpoint just registers the current node in the agent's state. It takes
parameters based on the RabbitMQ connection and is supposed to be called
when the connection to RabbitMQ is established.
"""
def registerSelf(pid, exchange, queue, routing_key) do
os_version = case :os.version do
{maj, min, _} -> "#{maj}.#{min}"
version -> inspect version
end
os = case :os.type do
{:unix, type} -> "#{type}"
type -> inspect type
end
{:ok, hostname} = :inet.gethostname
greet = %Hive.MQ.Message.Greet{
routing_key: routing_key,
hostname: hostname,
ip_address: getIPAddress(),
exchange: exchange,
queue: queue,
os: os,
os_version: os_version,
purpose: Application.get_env(:hive, :purpose, "UNKNOWN"),
}
add(pid, greet)
end
@doc """
This endpoint is a simple get of the current node
"""
def getSelf(pid) do
{:ok, hostname} = :inet.gethostname
get(pid, hostname)
end
end
|
lib/hive/mq/node_agent.ex
| 0.64579 | 0.442456 |
node_agent.ex
|
starcoder
|
defmodule AWS.CognitoIdentity do
@moduledoc """
Amazon Cognito Federated Identities
Amazon Cognito Federated Identities is a web service that delivers scoped
temporary credentials to mobile devices and other untrusted environments.
It uniquely identifies a device and supplies the user with a consistent identity
over the lifetime of an application.
Using Amazon Cognito Federated Identities, you can enable authentication with
one or more third-party identity providers (Facebook, Google, or Login with
Amazon) or an Amazon Cognito user pool, and you can also choose to support
unauthenticated access from your app. Cognito delivers a unique identifier for
each user and acts as an OpenID token provider trusted by AWS Security Token
Service (STS) to access temporary, limited-privilege AWS credentials.
For a description of the authentication flow from the Amazon Cognito Developer
Guide see [Authentication Flow](https://docs.aws.amazon.com/cognito/latest/developerguide/authentication-flow.html).
For more information see [Amazon Cognito Federated Identities](https://docs.aws.amazon.com/cognito/latest/developerguide/cognito-identity.html).
"""
alias AWS.Client
alias AWS.Request
def metadata do
%AWS.ServiceMetadata{
abbreviation: nil,
api_version: "2014-06-30",
content_type: "application/x-amz-json-1.1",
credential_scope: nil,
endpoint_prefix: "cognito-identity",
global?: false,
protocol: "json",
service_id: "Cognito Identity",
signature_version: "v4",
signing_name: "cognito-identity",
target_prefix: "AWSCognitoIdentityService"
}
end
@doc """
Creates a new identity pool.
The identity pool is a store of user identity information that is specific to
your AWS account. The keys for `SupportedLoginProviders` are as follows:
* Facebook: `graph.facebook.com`
* Google: `accounts.google.com`
* Amazon: `www.amazon.com`
* Twitter: `api.twitter.com`
* Digits: `www.digits.com`
You must use AWS Developer credentials to call this API.
"""
def create_identity_pool(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "CreateIdentityPool", input, options)
end
@doc """
Deletes identities from an identity pool.
You can specify a list of 1-60 identities that you want to delete.
You must use AWS Developer credentials to call this API.
"""
def delete_identities(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "DeleteIdentities", input, options)
end
@doc """
Deletes an identity pool.
Once a pool is deleted, users will not be able to authenticate with the pool.
You must use AWS Developer credentials to call this API.
"""
def delete_identity_pool(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "DeleteIdentityPool", input, options)
end
@doc """
Returns metadata related to the given identity, including when the identity was
created and any associated linked logins.
You must use AWS Developer credentials to call this API.
"""
def describe_identity(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "DescribeIdentity", input, options)
end
@doc """
Gets details about a particular identity pool, including the pool name, ID
description, creation date, and current number of users.
You must use AWS Developer credentials to call this API.
"""
def describe_identity_pool(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "DescribeIdentityPool", input, options)
end
@doc """
Returns credentials for the provided identity ID.
Any provided logins will be validated against supported login providers. If the
token is for cognito-identity.amazonaws.com, it will be passed through to AWS
Security Token Service with the appropriate role for the token.
This is a public API. You do not need any credentials to call this API.
"""
def get_credentials_for_identity(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "GetCredentialsForIdentity", input, options)
end
@doc """
Generates (or retrieves) a Cognito ID.
Supplying multiple logins will create an implicit linked account.
This is a public API. You do not need any credentials to call this API.
"""
def get_id(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "GetId", input, options)
end
@doc """
Gets the roles for an identity pool.
You must use AWS Developer credentials to call this API.
"""
def get_identity_pool_roles(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "GetIdentityPoolRoles", input, options)
end
@doc """
Gets an OpenID token, using a known Cognito ID.
This known Cognito ID is returned by `GetId`. You can optionally add additional
logins for the identity. Supplying multiple logins creates an implicit link.
The OpenID token is valid for 10 minutes.
This is a public API. You do not need any credentials to call this API.
"""
def get_open_id_token(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "GetOpenIdToken", input, options)
end
@doc """
Registers (or retrieves) a Cognito `IdentityId` and an OpenID Connect token for
a user authenticated by your backend authentication process.
Supplying multiple logins will create an implicit linked account. You can only
specify one developer provider as part of the `Logins` map, which is linked to
the identity pool. The developer provider is the "domain" by which Cognito will
refer to your users.
You can use `GetOpenIdTokenForDeveloperIdentity` to create a new identity and to
link new logins (that is, user credentials issued by a public provider or
developer provider) to an existing identity. When you want to create a new
identity, the `IdentityId` should be null. When you want to associate a new
login with an existing authenticated/unauthenticated identity, you can do so by
providing the existing `IdentityId`. This API will create the identity in the
specified `IdentityPoolId`.
You must use AWS Developer credentials to call this API.
"""
def get_open_id_token_for_developer_identity(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "GetOpenIdTokenForDeveloperIdentity", input, options)
end
@doc """
Use `GetPrincipalTagAttributeMap` to list all mappings between `PrincipalTags`
and user attributes.
"""
def get_principal_tag_attribute_map(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "GetPrincipalTagAttributeMap", input, options)
end
@doc """
Lists the identities in an identity pool.
You must use AWS Developer credentials to call this API.
"""
def list_identities(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "ListIdentities", input, options)
end
@doc """
Lists all of the Cognito identity pools registered for your account.
You must use AWS Developer credentials to call this API.
"""
def list_identity_pools(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "ListIdentityPools", input, options)
end
@doc """
Lists the tags that are assigned to an Amazon Cognito identity pool.
A tag is a label that you can apply to identity pools to categorize and manage
them in different ways, such as by purpose, owner, environment, or other
criteria.
You can use this action up to 10 times per second, per account.
"""
def list_tags_for_resource(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "ListTagsForResource", input, options)
end
@doc """
Retrieves the `IdentityID` associated with a `DeveloperUserIdentifier` or the
list of `DeveloperUserIdentifier` values associated with an `IdentityId` for an
existing identity.
Either `IdentityID` or `DeveloperUserIdentifier` must not be null. If you supply
only one of these values, the other value will be searched in the database and
returned as a part of the response. If you supply both,
`DeveloperUserIdentifier` will be matched against `IdentityID`. If the values
are verified against the database, the response returns both values and is the
same as the request. Otherwise a `ResourceConflictException` is thrown.
`LookupDeveloperIdentity` is intended for low-throughput control plane
operations: for example, to enable customer service to locate an identity ID by
username. If you are using it for higher-volume operations such as user
authentication, your requests are likely to be throttled.
`GetOpenIdTokenForDeveloperIdentity` is a better option for higher-volume
operations for user authentication.
You must use AWS Developer credentials to call this API.
"""
def lookup_developer_identity(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "LookupDeveloperIdentity", input, options)
end
@doc """
Merges two users having different `IdentityId`s, existing in the same identity
pool, and identified by the same developer provider.
You can use this action to request that discrete users be merged and identified
as a single user in the Cognito environment. Cognito associates the given source
user (`SourceUserIdentifier`) with the `IdentityId` of the
`DestinationUserIdentifier`. Only developer-authenticated users can be merged.
If the users to be merged are associated with the same public provider, but as
two different users, an exception will be thrown.
The number of linked logins is limited to 20. So, the number of linked logins
for the source user, `SourceUserIdentifier`, and the destination user,
`DestinationUserIdentifier`, together should not be larger than 20. Otherwise,
an exception will be thrown.
You must use AWS Developer credentials to call this API.
"""
def merge_developer_identities(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "MergeDeveloperIdentities", input, options)
end
@doc """
Sets the roles for an identity pool.
These roles are used when making calls to `GetCredentialsForIdentity` action.
You must use AWS Developer credentials to call this API.
"""
def set_identity_pool_roles(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "SetIdentityPoolRoles", input, options)
end
@doc """
You can use this operation to use default (username and clientID) attribute or
custom attribute mappings.
"""
def set_principal_tag_attribute_map(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "SetPrincipalTagAttributeMap", input, options)
end
@doc """
Assigns a set of tags to the specified Amazon Cognito identity pool.
A tag is a label that you can use to categorize and manage identity pools in
different ways, such as by purpose, owner, environment, or other criteria.
Each tag consists of a key and value, both of which you define. A key is a
general category for more specific values. For example, if you have two versions
of an identity pool, one for testing and another for production, you might
assign an `Environment` tag key to both identity pools. The value of this key
might be `Test` for one identity pool and `Production` for the other.
Tags are useful for cost tracking and access control. You can activate your tags
so that they appear on the Billing and Cost Management console, where you can
track the costs associated with your identity pools. In an IAM policy, you can
constrain permissions for identity pools based on specific tags or tag values.
You can use this action up to 5 times per second, per account. An identity pool
can have as many as 50 tags.
"""
def tag_resource(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "TagResource", input, options)
end
@doc """
Unlinks a `DeveloperUserIdentifier` from an existing identity.
Unlinked developer users will be considered new identities next time they are
seen. If, for a given Cognito identity, you remove all federated identities as
well as the developer user identifier, the Cognito identity becomes
inaccessible.
You must use AWS Developer credentials to call this API.
"""
def unlink_developer_identity(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "UnlinkDeveloperIdentity", input, options)
end
@doc """
Unlinks a federated identity from an existing account.
Unlinked logins will be considered new identities next time they are seen.
Removing the last linked login will make this identity inaccessible.
This is a public API. You do not need any credentials to call this API.
"""
def unlink_identity(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "UnlinkIdentity", input, options)
end
@doc """
Removes the specified tags from the specified Amazon Cognito identity pool.
You can use this action up to 5 times per second, per account
"""
def untag_resource(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "UntagResource", input, options)
end
@doc """
Updates an identity pool.
You must use AWS Developer credentials to call this API.
"""
def update_identity_pool(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "UpdateIdentityPool", input, options)
end
end
|
lib/aws/generated/cognito_identity.ex
| 0.89742 | 0.474753 |
cognito_identity.ex
|
starcoder
|
defmodule TypeCheck.Builtin.Map do
defstruct [:key_type, :value_type]
use TypeCheck
@opaque! t :: %__MODULE__{key_type: TypeCheck.Type.t(), value_type: TypeCheck.Type.t()}
@type! problem_tuple ::
{t(), :not_a_map, %{}, any()}
| {t(), :key_error,
%{problem: lazy(TypeCheck.TypeError.Formatter.problem_tuple()), key: any()}, any()}
| {t(), :value_error,
%{problem: lazy(TypeCheck.TypeError.Formatter.problem_tuple()), key: any()}, any()}
defimpl TypeCheck.Protocols.ToCheck do
def to_check(s, param) do
quote generated: true, location: :keep do
case unquote(param) do
x when not is_map(x) ->
{:error, {unquote(Macro.escape(s)), :not_a_map, %{}, unquote(param)}}
_ ->
unquote(build_keypairs_check(s.key_type, s.value_type, param, s))
end
end
end
defp build_keypairs_check(%TypeCheck.Builtin.Any{}, %TypeCheck.Builtin.Any{}, param, _s) do
quote generated: true, location: :keep do
{:ok, [], unquote(param)}
end
end
defp build_keypairs_check(key_type, value_type, param, s) do
key_check =
TypeCheck.Protocols.ToCheck.to_check(key_type, Macro.var(:single_field_key, __MODULE__))
value_check =
TypeCheck.Protocols.ToCheck.to_check(
value_type,
Macro.var(:single_field_value, __MODULE__)
)
quote generated: true, location: :keep do
orig_param = unquote(param)
res =
orig_param
|> Enum.reduce_while({:ok, [], []}, fn {key, value}, {:ok, bindings, altered_param} ->
var!(single_field_key, unquote(__MODULE__)) = key
var!(single_field_value, unquote(__MODULE__)) = value
case {unquote(key_check), unquote(value_check)} do
{{:ok, key_bindings, altered_key}, {:ok, value_bindings, altered_value}} ->
altered_param = [{altered_key, altered_value} | altered_param]
res = {:ok, value_bindings ++ key_bindings ++ bindings, altered_param}
{:cont, res}
{{:error, problem}, _} ->
res =
{:error,
{unquote(Macro.escape(s)), :key_error, %{problem: problem, key: key}, orig_param}}
{:halt, res}
{_, {:error, problem}} ->
res =
{:error,
{unquote(Macro.escape(s)), :value_error, %{problem: problem, key: key},
orig_param}}
{:halt, res}
end
end)
case res do
{:ok, bindings, altered_param} ->
{:ok, bindings, :maps.from_list(altered_param)}
other -> other
end
end
end
end
defimpl TypeCheck.Protocols.Inspect do
def inspect(list, opts) do
Inspect.Algebra.container_doc(
"map(",
[
TypeCheck.Protocols.Inspect.inspect(list.key_type, opts),
TypeCheck.Protocols.Inspect.inspect(list.value_type, opts)
],
")",
opts,
fn x, _ -> x end,
separator: ",",
break: :maybe
)
|> Inspect.Algebra.color(:builtin_type, opts)
end
end
if Code.ensure_loaded?(StreamData) do
defimpl TypeCheck.Protocols.ToStreamData do
def to_gen(s) do
key_gen = TypeCheck.Protocols.ToStreamData.to_gen(s.key_type)
value_gen = TypeCheck.Protocols.ToStreamData.to_gen(s.value_type)
StreamData.map_of(key_gen, value_gen)
end
end
end
end
|
lib/type_check/builtin/map.ex
| 0.643889 | 0.482673 |
map.ex
|
starcoder
|
defmodule Tz.PeriodsBuilder do
@moduledoc false
def build_periods(zone_lines, rule_records, mode \\ :compilation, prev_period \\ nil, periods \\ [])
def build_periods([], _rule_records, _mode, _prev_period, periods), do: Enum.reverse(periods)
def build_periods([zone_line | rest_zone_lines], rule_records, mode, prev_period, periods) do
rules = Map.get(rule_records, zone_line.rules, zone_line.rules)
periods =
build_periods_for_zone_line(zone_line, rules, mode, prev_period)
|> concat_dedup_periods(periods)
build_periods(rest_zone_lines, rule_records, mode, hd(periods), periods)
end
defp concat_dedup_periods(periods, []), do: periods
defp concat_dedup_periods(periods1, [first_period2 | tail_period2] = periods2) do
last_period1 = List.last(periods1)
compare_keys = [:std_offset_from_utc_time, :local_offset_from_std_time, :zone_abbr]
cond do
Map.take(last_period1, compare_keys) == Map.take(first_period2, compare_keys) ->
(periods1 |> Enum.reverse() |> tl() |> Enum.reverse())
++ [%{first_period2 | to: last_period1.to} | tail_period2]
true ->
periods1 ++ periods2
end
end
defp offset_diff_from_prev_period(_zone_line, _local_offset, nil), do: 0
defp offset_diff_from_prev_period(zone_line, local_offset, prev_period) do
total_offset = zone_line.std_offset_from_utc_time + local_offset
prev_total_offset = prev_period.std_offset_from_utc_time + prev_period.local_offset_from_std_time
total_offset - prev_total_offset
end
defp build_periods_for_zone_line(zone_line, offset, _mode, prev_period) when is_integer(offset) do
if zone_line.from != :min && prev_period != nil do
{zone_from, zone_from_modifier} = zone_line.from
if prev_period.to[zone_from_modifier] != zone_from do
raise "logic error"
end
end
offset_diff = offset_diff_from_prev_period(zone_line, offset, prev_period)
period_from =
if zone_line.from == :min do
:min
else
add_to_and_convert_date_tuple({prev_period.to.wall, :wall}, offset_diff, zone_line.std_offset_from_utc_time, offset)
end
[%{
from: period_from,
to: convert_date_tuple(zone_line.to, zone_line.std_offset_from_utc_time, offset),
std_offset_from_utc_time: zone_line.std_offset_from_utc_time,
local_offset_from_std_time: offset,
zone_abbr: zone_abbr(zone_line, offset)
}]
end
defp build_periods_for_zone_line(zone_line, rules, mode, prev_period) when is_list(rules) do
if zone_line.from != :min && prev_period != nil do
{zone_from, zone_from_modifier} = zone_line.from
if prev_period.to[zone_from_modifier] != zone_from do
raise "logic error"
end
end
if mode == :dynamic_far_future do
rules
else
rules
|> filter_rules_for_zone_line(zone_line, prev_period, if(prev_period == nil, do: 0, else: prev_period.local_offset_from_std_time))
|> maybe_pad_left_rule(zone_line, prev_period)
|> trim_zone_rules(zone_line, prev_period)
end
|> do_build_periods_for_zone_line(zone_line, prev_period, [])
end
defp filter_rules_for_zone_line(rules, zone_line, prev_period, prev_local_offset_from_std_time, filtered_rules \\ [])
defp filter_rules_for_zone_line(rules, %{from: :min, to: :max}, _, _, _), do: rules
defp filter_rules_for_zone_line([], _zone_line, _, _, filtered_rules), do: Enum.reverse(filtered_rules)
defp filter_rules_for_zone_line([rule | rest_rules], zone_line, prev_period, prev_local_offset_from_std_time, filtered_rules) do
is_rule_included =
cond do
zone_line.to == :max && rule.to == :max ->
true
zone_line.to == :max ->
{rule_to, rule_to_modifier} = rule.to
prev_period == nil || NaiveDateTime.compare(prev_period.to[rule_to_modifier], rule_to) == :lt
zone_line.from == :min || rule.to == :max ->
{zone_to, zone_to_modifier} = zone_line.to
rule_from = convert_date_tuple(rule.from, prev_period.std_offset_from_utc_time, prev_local_offset_from_std_time)
NaiveDateTime.compare(zone_to, rule_from[zone_to_modifier]) == :gt
true ->
{zone_to, zone_to_modifier} = zone_line.to
{rule_to, rule_to_modifier} = rule.to
rule_from = convert_date_tuple(rule.from, prev_period.std_offset_from_utc_time, prev_local_offset_from_std_time)
NaiveDateTime.compare(prev_period.to[rule_to_modifier], rule_to) == :lt
&& NaiveDateTime.compare(zone_to, rule_from[zone_to_modifier]) == :gt
end
if is_rule_included do
filter_rules_for_zone_line(rest_rules, zone_line, prev_period, rule.local_offset_from_std_time, [rule | filtered_rules])
else
filter_rules_for_zone_line(rest_rules, zone_line, prev_period, prev_local_offset_from_std_time, filtered_rules)
end
end
defp trim_zone_rules([], _zone_line, _), do: []
defp trim_zone_rules([first_rule | tail_rules] = rules, zone_line, prev_period) do
rules =
if rule_starts_before_zone_line_range?(zone_line, first_rule, if(prev_period == nil, do: 0, else: prev_period.local_offset_from_std_time)) do
[%{first_rule | from: zone_line.from} | tail_rules]
else
rules
end
last_rule = List.last(rules)
if rule_ends_after_zone_line_range?(zone_line, last_rule) do
[%{last_rule | to: zone_line.to} | (Enum.reverse(rules) |> tl())]
|> Enum.reverse()
else
rules
end
end
defp rule_starts_before_zone_line_range?(%{from: :min}, _rule, _), do: false
defp rule_starts_before_zone_line_range?(zone_line, rule, prev_local_offset_from_std_time) do
rule_from = convert_date_tuple(rule.from, zone_line.std_offset_from_utc_time, prev_local_offset_from_std_time)
%{from: {zone_from, zone_from_modifier}} = zone_line
NaiveDateTime.compare(rule_from[zone_from_modifier], zone_from) == :lt
end
defp rule_ends_after_zone_line_range?(%{to: :max}, _rule), do: false
defp rule_ends_after_zone_line_range?(_zone_line, %{to: :max}), do: true
defp rule_ends_after_zone_line_range?(zone_line, rule) do
rule_to = convert_date_tuple(rule.to, zone_line.std_offset_from_utc_time, rule.local_offset_from_std_time)
%{to: {zone_to, zone_to_modifier}} = zone_line
NaiveDateTime.compare(rule_to[zone_to_modifier], zone_to) == :gt
end
defp maybe_pad_left_rule([], _zone_line, _), do: []
defp maybe_pad_left_rule([first_rule | _] = rules, %{from: :min}, _) do
rule = %{
record_type: :rule,
from: :min,
name: "",
local_offset_from_std_time: 0,
letter: Enum.find(rules, & &1.local_offset_from_std_time == 0).letter,
to: first_rule.from
}
[rule | rules]
end
defp maybe_pad_left_rule(rules, _zone_line, nil), do: rules
defp maybe_pad_left_rule([first_rule | _] = rules, zone_line, prev_period) do
{rule_from, rule_from_modifier} = first_rule.from
if NaiveDateTime.compare(prev_period.to[rule_from_modifier], rule_from) == :lt do
letter =
# find first rule with local offset to 0
case Enum.find(rules, & &1.local_offset_from_std_time == 0) do
%{letter: letter} -> letter
_ -> ""
end
rule = %{
record_type: :rule,
from: zone_line.from,
name: first_rule.name,
local_offset_from_std_time: 0,
letter: letter,
to: first_rule.from
}
[rule | rules]
else
rules
end
end
defp do_build_periods_for_zone_line([], _zone_line, _prev_period, periods), do: periods
defp do_build_periods_for_zone_line([rule | rest_rules], zone_line, prev_period, periods) do
offset_diff = offset_diff_from_prev_period(zone_line, rule.local_offset_from_std_time, prev_period)
period_from =
case prev_period do
nil ->
convert_date_tuple(zone_line.from, zone_line.std_offset_from_utc_time, 0)
%{to: :max} ->
convert_date_tuple(rule.from, zone_line.std_offset_from_utc_time, prev_period.local_offset_from_std_time)
_ ->
add_to_and_convert_date_tuple({prev_period.to.wall, :wall}, offset_diff, zone_line.std_offset_from_utc_time, rule.local_offset_from_std_time)
end
period_to = convert_date_tuple(rule.to, zone_line.std_offset_from_utc_time, rule.local_offset_from_std_time)
if period_from != :min && period_to != :max && period_from.utc_gregorian_seconds == period_to.utc_gregorian_seconds do
raise "logic error"
end
period = %{
from: period_from,
to: period_to,
std_offset_from_utc_time: zone_line.std_offset_from_utc_time,
local_offset_from_std_time: rule.local_offset_from_std_time,
zone_abbr: zone_abbr(zone_line, rule.local_offset_from_std_time, rule.letter),
rules_and_template:
if(period_to == :max && prev_period && prev_period.to == :max) do
{zone_line.rules, zone_line.format_time_zone_abbr}
end
}
periods = concat_dedup_periods([period], periods)
do_build_periods_for_zone_line(rest_rules, zone_line, period, periods)
end
defp zone_abbr(zone_line, offset, letter \\ "") do
is_standard_time = offset == 0
cond do
String.contains?(zone_line.format_time_zone_abbr, "/") ->
[zone_abbr_std_time, zone_abbr_dst_time] = String.split(zone_line.format_time_zone_abbr, "/")
if(is_standard_time, do: zone_abbr_std_time, else: zone_abbr_dst_time)
String.contains?(zone_line.format_time_zone_abbr, "%s") ->
String.replace(zone_line.format_time_zone_abbr, "%s", letter)
true ->
zone_line.format_time_zone_abbr
end
end
defp add_to_and_convert_date_tuple(:min, _, _, _), do: :min
defp add_to_and_convert_date_tuple(:max, _, _, _), do: :max
defp add_to_and_convert_date_tuple({date, time_modifier}, add_seconds, std_offset_from_utc_time, local_offset_from_std_time) do
date = NaiveDateTime.add(date, add_seconds, :second)
convert_date_tuple({date, time_modifier}, std_offset_from_utc_time, local_offset_from_std_time)
end
defp convert_date_tuple(:min, _, _), do: :min
defp convert_date_tuple(:max, _, _), do: :max
defp convert_date_tuple({date, time_modifier}, std_offset_from_utc_time, local_offset_from_std_time) do
utc = convert_date(date, std_offset_from_utc_time, local_offset_from_std_time, time_modifier, :utc)
%{
utc: utc,
wall: convert_date(date, std_offset_from_utc_time, local_offset_from_std_time, time_modifier, :wall),
standard: convert_date(date, std_offset_from_utc_time, local_offset_from_std_time, time_modifier, :standard)
}
|> Map.put(:utc_gregorian_seconds, naive_datetime_to_gregorian_seconds(utc))
end
def periods_to_tuples_and_reverse(periods, shrank_periods \\ [], prev_period \\ nil)
def periods_to_tuples_and_reverse([], shrank_periods, _), do: shrank_periods
def periods_to_tuples_and_reverse([period | tail], shrank_periods, prev_period) do
period = {
if(period.from == :min, do: 0, else: period.from.utc_gregorian_seconds),
{
period.std_offset_from_utc_time,
period.local_offset_from_std_time,
period.zone_abbr
},
prev_period && elem(prev_period, 1),
period[:rules_and_template]
}
periods_to_tuples_and_reverse(tail, [period | shrank_periods], period)
end
defp convert_date(ndt, _, _, modifier, modifier), do: ndt
defp convert_date(ndt, standard_offset_from_utc_time, local_offset_from_standard_time, :wall, :utc) do
NaiveDateTime.add(ndt, -1 * (standard_offset_from_utc_time + local_offset_from_standard_time), :second)
end
defp convert_date(ndt, _standard_offset_from_utc_time, local_offset_from_standard_time, :wall, :standard) do
NaiveDateTime.add(ndt, -1 * local_offset_from_standard_time, :second)
end
defp convert_date(ndt, standard_offset_from_utc_time, local_offset_from_standard_time, :utc, :wall) do
NaiveDateTime.add(ndt, standard_offset_from_utc_time + local_offset_from_standard_time, :second)
end
defp convert_date(ndt, standard_offset_from_utc_time, _local_offset_from_standard_time, :utc, :standard) do
NaiveDateTime.add(ndt, standard_offset_from_utc_time, :second)
end
defp convert_date(ndt, standard_offset_from_utc_time, _local_offset_from_standard_time, :standard, :utc) do
NaiveDateTime.add(ndt, -1 * standard_offset_from_utc_time, :second)
end
defp convert_date(ndt, _standard_offset_from_utc_time, local_offset_from_standard_time, :standard, :wall) do
NaiveDateTime.add(ndt, local_offset_from_standard_time, :second)
end
defp naive_datetime_to_gregorian_seconds(datetime) do
NaiveDateTime.to_erl(datetime)
|> :calendar.datetime_to_gregorian_seconds()
end
end
|
lib/periods_builder.ex
| 0.644673 | 0.588002 |
periods_builder.ex
|
starcoder
|
defmodule Shake do
@moduledoc """
A Plug-like middleware optimized for search.
Full-text search is build on layers of dependencies:
results <- query <- index <- catalog <- params
The data-processing requirements resembles 'make' - where you assemble
results out of intermediate parts.
At every step of the build chain, you want parameter validation and caching
so you don't have to re-generate everything from scratch.
As we go forward, other types of intermediate data structures and indexes
will be introduced. We need a data-processing abstraction that is flexible,
pluggable and composable.
That's the idea of Shake.
## Inspired by Plug
The architecture of Shake is inspired by Plug. Here are some differences between
Shake and Plug...
### Terminology
| Plug | Shake |
|--------------|--------------|
| Plug.Builder | Shake.Module |
| Plug.Conn | Shake.Frame |
| plug | step |
| pipeline | job |
| conn | frame |
| call | call |
### The Shake Frame
Plug is centered around a the Conn struct. Shake uses the Frame struct.
See the documentation for `Shake.Frame` for more info.
### Search Focus
Shake omits Plug's HTTP-oriented helpers, and adds search-specific helpers:
- validate - a method to perform data validations
- digest - to create content digests
- cache - a LRU cache for Elixir terms
Going forward, we'll add helpers to accomodate different types of
intermediate caches, document and configuration sources.
## Use Modes
Right now `Searchex` is a CLI so everything works in 'batch processing' mode.
Single-collection operations on small datasets.
At some point we'll add 'server mode' where the catalogs and indexes stay
resident in memory (or JIT retrieved from disk) and handle live updates.
In 'server mode', we want the ability to handle thousands of gigabyte-sized
collections running distributed across a cluster.
## Search Architecture
There are two big data structures:
1) The Catalog is like a table. The key field is 'docid' (the content
digest). Other fields are document location (filepath, byte offset, doc
length) and the various fields that are extracted in the catalog process.
2) The Index is organized in a three-level tree. 1) all the stemmed words in
the collection for each word, 2) a list of each document that contains the word
for each document, 3) a list of positions that the document occurred.
For a document database of 1GB, the catalog would typically be 100MB, and the
index typically 1GB.
## Data Characterization
Data comes in two forms:
- DataTerm - any Elixir term: map, list, atom, etc.
- ProcessTree - a supervisor and all it's children
Data may be in one of three states:
- Active - in-memory ready to use
- Cache - in-memory hot standby
- Backup - on-disk survives executable restart
| | DataTerm | ProcessTree |
|--------|-----------|-------------------|
| Active | variable | PID / ProcessName |
| Cached | LRU Cache | LRU Registry(TBD) |
| Backup | DETS | DETS(?) |
Right now we have a nice LRU-Cache based on ETS and DETS. Going forward
we will need a LRU-Registry based on ETS and the Process Registry in Elixir 1.4.
## Work in Progress
We're using an iterative design process. `Build > Use > Learn > Repeat` The
design is evolving rapidly. Feedback/ideas/contributions are welcome!
"""
@type opts :: binary | tuple | atom | integer | float | [opts] | %{opts => opts}
@callback init(opts) :: opts
@callback call(Shake.Job.t, opts) :: Shake.Job.t
end
|
lib/shake.ex
| 0.858719 | 0.660125 |
shake.ex
|
starcoder
|
if Code.ensure_loaded?(:fuse) do
defmodule JsonApiClient.Middleware.Fuse do
@moduledoc """
Circuit Breaker middleware using [fuse](https://github.com/jlouis/fuse). In order to use this middleware the
fuse package must be added to your mix project and the `fuse` and `sasl` applications must be started. e.g:
```elixir
defp deps do
[
{:fuse, "~> 2.4"}
]
end
defp applications do
[
extra_applications: [:sasl, :fuse]
]
end
```
### Options
- `service_name -> :opts` - fuse options per service
- `:opts` - fuse options when options are not configured per service (see fuse docs for reference)
```elixir
config :json_api_client,
middlewares: [
{JsonApiClient.Middleware.Fuse,
opts: {{:standard, 2, 10_000}, {:reset, 60_000}},
service1: {{:standard, 10, 5_000}, {:reset, 120_000}},
}
]
```
In this example we're specifying the default fuse options with `opts` and
then specifying different fuse options for the `service1` fuse. Fuses are
named based on the `service_name` of the request, if present.
"""
@behaviour JsonApiClient.Middleware
alias JsonApiClient.{Request, RequestError}
@defaults {{:standard, 2, 10_000}, {:reset, 60_000}}
@impl JsonApiClient.Middleware
def call(%Request{service_name: service_name, base_url: base_url} = request, next, options) do
opts = options || []
name = service_name || base_url || "json_api_client"
case :fuse.ask(name, :sync) do
:ok ->
run(request, next, name)
:blown ->
{:error, %RequestError{
original_error: "Unavailable - #{name} circuit blown",
message: "Unavailable - #{name} circuit blown",
status: nil
}}
{:error, :not_found} ->
:fuse.install(name, fuse_options(service_name, opts))
run(request, next, name)
end
end
defp fuse_options(nil, opts), do: Keyword.get(opts, :opts, @defaults)
defp fuse_options(service_name, opts) do
Keyword.get_lazy(opts, service_name, fn -> fuse_options(nil, opts) end)
end
defp run(env, next, name) do
case next.(env) do
{:error, error} ->
:fuse.melt(name)
{:error, error}
success ->
success
end
end
end
end
|
lib/json_api_client/middleware/fuse.ex
| 0.860457 | 0.708893 |
fuse.ex
|
starcoder
|
defmodule ValidatorsRo.CIF do
@moduledoc """
See `ValidatorsRo`
"""
import ValidatorsRo.Utils, only: [control_sum: 2]
defmacro __using__(_opts) do
quote location: :keep do
@cif_test_key 753217532
|> Integer.digits
|> Enum.reverse
@doc """
Provides validation of Romanian CIFs
(cod de identificare fiscala - fiscal identification code).
https://ro.wikipedia.org/wiki/Cod_de_Identificare_Fiscal%C4%83
"""
@spec valid_cif?(String.t) :: boolean
def valid_cif?(cif) when is_bitstring(cif) do
cif_well_formed?(cif) && cif_valid_control_sum?(cif)
end
@spec valid_cif?(cif :: integer) :: boolean
def valid_cif?(cif) when is_integer(cif) do
valid_cif?(cif |> to_string)
end
@doc """
Given an integer (or a string representation of one),
returns the next highest valid CIF
"""
@spec next_valid_cif(int :: String.t) :: integer
def next_valid_cif(int) when is_bitstring(int) do
next_valid_cif(int |> String.to_integer)
end
@spec next_valid_cif(int :: integer) :: integer
def next_valid_cif(int) when is_integer(int) do
{control, sum} = control_sum(int |> to_string, @cif_test_key)
last_digit_of_next_cif =
sum
|> Kernel.*(10)
|> rem(11)
|> rem(10)
if last_digit_of_next_cif < control do
next_valid_cif((div(int, 10) + 1) * 10)
else
replace_last_digit(int, last_digit_of_next_cif)
end
end
@doc """
Returns a `Stream` of valid CIFs, starting at `start` (defaults to `1` if missing)
Example:
```
iex> ValidatorsRo.cif_stream(10_000) |> Enum.take(10)
[10004, 10012, 10020, 10039, 10047, 10055, 10063, 10071, 10080, 10098]
```
"""
@spec cif_stream(start :: integer) :: integer
def cif_stream(start \\ 1)
def cif_stream(start) when (is_integer(start) and start < 1) do
cif_stream(1)
end
@spec cif_stream(start :: integer) :: integer
def cif_stream(start) when is_integer(start) and start >= 1 do
Stream.unfold start, fn cif ->
next = next_valid_cif(cif)
{next, next + 1}
end
end
@doc false
defp cif_well_formed?(cif) do
Regex.match?(~r/^\d{2,10}$/, cif)
end
@doc false
defp cif_valid_control_sum?(cif) do
{control, sum} = control_sum(cif, @cif_test_key)
sum
|> Kernel.*(10)
|> rem(11)
|> rem(10)
|> Kernel.===(control)
end
@doc false
defp replace_last_digit(number, digit) when is_bitstring(number) do
replace_last_digit(number |> String.to_integer, digit)
end
@doc false
defp replace_last_digit(number, digit) when is_integer(number) do
(Integer.to_charlist(div(number, 10))) ++ to_charlist(digit) |> to_string |> String.to_integer
end
end
end
end
|
lib/cif/cif.ex
| 0.876112 | 0.77907 |
cif.ex
|
starcoder
|
defmodule ElixirRigidPhysics.Geometry.Hull do
@moduledoc """
Hull geometry module.
Hulls are lists of coplanar faces wound CCW (to find normal, follow right-hand rule).
Their center is assumed to be at the origin, but in the absence of a frame that doesn't matter.
"""
alias Graphmath.Vec3
require Record
Record.defrecord(:hull, faces: [], verts: [])
@type hull_face :: [Vec3.vec3()]
@type hull :: record(:hull, faces: [hull_face], verts: [Vec3.vec3()])
@doc """
Creates a hull geometry in the shape of a box.
"""
@spec create_box(number, number, number) :: hull
def create_box(w, h, d) do
hw = w / 2.0
hh = h / 2.0
hd = d / 2.0
top = [{hw, hh, hd}, {hw, hh, -hd}, {-hw, hh, -hd}, {-hw, hh, hd}]
bottom = [{hw, -hh, hd}, {-hw, -hh, hd}, {-hw, -hh, -hd}, {hw, -hh, -hd}]
front = [{hw, hh, hd}, {-hw, hh, hd}, {-hw, -hh, hd}, {hw, -hh, hd}]
back = [{hw, hh, -hd}, {hw, -hh, -hd}, {-hw, -hh, -hd}, {-hw, hh, -hd}]
left = [{-hw, hh, hd}, {-hw, hh, -hd}, {-hw, -hh, -hd}, {-hw, -hh, hd}]
right = [{hw, hh, hd}, {hw, -hh, hd}, {hw, -hh, -hd}, {hw, hh, -hd}]
hull(
faces: [
top,
bottom,
front,
back,
left,
right
],
verts: [
{hw, hh, hd},
{hw, -hh, hd},
{-hw, hh, hd},
{-hw, -hh, hd},
{hw, hh, -hd},
{hw, -hh, -hd},
{-hw, hh, -hd},
{-hw, -hh, -hd}
]
)
end
@near_infinite 1.0e280
@doc """
Finds the support point (for GJK usually) of a convex hull.
Again, we thank <NAME> and Reactphysics3d, and also Shiny PIxel (see [point cloud support section](http://vec3.ca/gjk/)).
## Examples
iex> ElixirRigidPhysics.Geometry.Hull, as: Hull
iex> h = Hull.create_box(1,1,1)
iex> Hull.support_point(hull, {0.2,0.2,0.2})
{1.0,1.0,1.0}
iex> ElixirRigidPhysics.Geometry.Hull, as: Hull
iex> h = Hull.create_box(1,1,1)
iex> Hull.support_point(hull, {-0.15,-0.15,-0.25})
{-1.0,-1.0,-1.0}
"""
@spec support_point(hull, Vec3.vec3()) :: Vec3.vec3()
def support_point(hull(verts: verts), direction) do
{_max_dot_product, best_vert} =
Enum.reduce(
verts,
{-@near_infinite, {-@near_infinite, -@near_infinite, -@near_infinite}},
fn vert, {best_dot, _best_guess} = acc ->
dot = Vec3.dot(direction, vert)
if dot > best_dot do
{dot, vert}
else
acc
end
end
)
best_vert
end
end
|
lib/geometry/hull.ex
| 0.845209 | 0.560253 |
hull.ex
|
starcoder
|
defmodule SwotEx.AcademicTlds do
@moduledoc """
List of tlds that belong to educational institutes.
"""
@academic_tlds ~w(
ac.ae
ac.at
ac.bd
ac.be
ac.cn
ac.cr
ac.cy
ac.fj
ac.gg
ac.gn
ac.id
ac.il
ac.in
ac.ir
ac.jp
ac.ke
ac.kr
ac.ma
ac.me
ac.mu
ac.mw
ac.mz
ac.ni
ac.nz
ac.om
ac.pa
ac.pg
ac.pr
ac.rs
ac.ru
ac.rw
ac.sz
ac.th
ac.tz
ac.ug
ac.uk
ac.yu
ac.za
ac.zm
ac.zw
cc.al.us
cc.ar.us
cc.az.us
cc.ca.us
cc.co.us
cc.fl.us
cc.ga.us
cc.hi.us
cc.ia.us
cc.id.us
cc.il.us
cc.in.us
cc.ks.us
cc.ky.us
cc.la.us
cc.md.us
cc.me.us
cc.mi.us
cc.mn.us
cc.mo.us
cc.ms.us
cc.mt.us
cc.nc.us
cc.nd.us
cc.ne.us
cc.nj.us
cc.nm.us
cc.nv.us
cc.ny.us
cc.oh.us
cc.ok.us
cc.or.us
cc.pa.us
cc.ri.us
cc.sc.us
cc.sd.us
cc.tx.us
cc.va.us
cc.vi.us
cc.wa.us
cc.wi.us
cc.wv.us
cc.wy.us
ed.ao
ed.cr
ed.jp
edu
edu.af
edu.al
edu.ar
edu.au
edu.az
edu.ba
edu.bb
edu.bd
edu.bh
edu.bi
edu.bn
edu.bo
edu.br
edu.bs
edu.bt
edu.bz
edu.ck
edu.cn
edu.co
edu.cu
edu.do
edu.dz
edu.ec
edu.ee
edu.eg
edu.er
edu.es
edu.et
edu.ge
edu.gh
edu.gr
edu.gt
edu.hk
edu.hn
edu.ht
edu.in
edu.iq
edu.jm
edu.jo
edu.kg
edu.kh
edu.kn
edu.kw
edu.ky
edu.kz
edu.la
edu.lb
edu.lr
edu.lv
edu.ly
edu.me
edu.mg
edu.mk
edu.ml
edu.mm
edu.mn
edu.mo
edu.mt
edu.mv
edu.mw
edu.mx
edu.my
edu.ni
edu.np
edu.om
edu.pa
edu.pe
edu.ph
edu.pk
edu.pl
edu.pr
edu.ps
edu.pt
edu.pw
edu.py
edu.qa
edu.rs
edu.ru
edu.sa
edu.sc
edu.sd
edu.sg
edu.sh
edu.sl
edu.sv
edu.sy
edu.tr
edu.tt
edu.tw
edu.ua
edu.uy
edu.ve
edu.vn
edu.ws
edu.ye
edu.zm
es.kr
g12.br
hs.kr
ms.kr
sc.kr
sc.ug
sch.ae
sch.gg
sch.id
sch.ir
sch.je
sch.jo
sch.lk
sch.ly
sch.my
sch.om
sch.ps
sch.sa
sch.uk
school.nz
school.za
tec.ar.us
tec.az.us
tec.co.us
tec.fl.us
tec.ga.us
tec.ia.us
tec.id.us
tec.il.us
tec.in.us
tec.ks.us
tec.ky.us
tec.la.us
tec.ma.us
tec.md.us
tec.me.us
tec.mi.us
tec.mn.us
tec.mo.us
tec.ms.us
tec.mt.us
tec.nc.us
tec.nd.us
tec.nh.us
tec.nm.us
tec.nv.us
tec.ny.us
tec.oh.us
tec.ok.us
tec.pa.us
tec.sc.us
tec.sd.us
tec.tx.us
tec.ut.us
tec.vi.us
tec.wa.us
tec.wi.us
tec.wv.us
vic.edu.au
)
@doc """
Returns list of academic tlds
"""
@spec academic_tlds() :: [String.t(), ...]
def academic_tlds, do: @academic_tlds
end
|
lib/swotex/academic_tlds.ex
| 0.527073 | 0.560433 |
academic_tlds.ex
|
starcoder
|
defmodule Data.NPC do
@moduledoc """
NPC Schema
"""
use Data.Schema
alias Data.Script
alias Data.Event
alias Data.Stats
alias Data.NPCItem
alias Data.NPCSpawner
@fields [
:level,
:name,
:tags,
:status_line,
:status_listen,
:description,
:experience_points,
:currency,
:is_quest_giver,
:is_trainer,
:stats,
:events,
:script,
:notes
]
schema "npcs" do
field(:original_id, :integer, virtual: true)
field(:name, :string)
field(:level, :integer, default: 1)
# given after defeat
field(:experience_points, :integer, default: 0)
field(:stats, Data.Stats)
field(:events, {:array, Event}, default: [])
field(:script, {:array, Script.Line})
field(:notes, :string)
field(:tags, {:array, :string}, default: [])
field(:status_line, :string, default: "[name] is here.")
field(:status_listen, :string)
field(:description, :string, default: "[status_line]")
field(:is_quest_giver, :boolean, default: false)
field(:is_trainer, :boolean, default: false)
field(:trainable_skills, {:array, :integer}, default: [])
field(:currency, :integer, default: 0)
has_many(:npc_items, NPCItem)
has_many(:npc_spawners, NPCSpawner)
timestamps()
end
@doc """
Get fields for an NPC, used for cloning.
"""
@spec fields() :: [atom()]
def fields(), do: @fields
def changeset(struct, params) do
struct
|> cast(params, [
:name,
:level,
:experience_points,
:stats,
:currency,
:notes,
:tags,
:events,
:script,
:status_line,
:status_listen,
:description,
:is_quest_giver,
:is_trainer
])
|> validate_required([
:name,
:level,
:experience_points,
:stats,
:currency,
:tags,
:events,
:status_line,
:description,
:is_quest_giver,
:is_trainer
])
|> validate_stats()
|> Event.validate_events()
|> Script.validate_script()
|> validate_script()
|> validate_status_line()
end
def trainable_skills_changeset(struct, params) do
struct
|> cast(params, [:trainable_skills])
|> validate_required([:trainable_skills])
|> validate_is_trainer()
end
defp validate_stats(changeset) do
case changeset do
%{changes: %{stats: stats}} when stats != nil ->
case Stats.valid_character?(stats) do
true -> changeset
false -> add_error(changeset, :stats, "are invalid")
end
_ ->
changeset
end
end
defp validate_status_line(changeset) do
changeset
|> validate_status_line_ends_in_period()
|> validate_status_line_includes_name()
end
defp validate_status_line_ends_in_period(changeset) do
case Regex.match?(~r/\.$/, get_field(changeset, :status_line)) do
true -> changeset
false -> add_error(changeset, :status_line, "must end with a period.")
end
end
defp validate_status_line_includes_name(changeset) do
case Regex.match?(~r/[name]/, get_field(changeset, :status_line)) do
true -> changeset
false -> add_error(changeset, :status_line, "must include `[name]`")
end
end
defp validate_script(changeset) do
case get_field(changeset, :script) do
nil -> changeset
script -> _validate_script(changeset, script)
end
end
defp _validate_script(changeset, script) do
case Script.valid_for_npc?(script) do
true ->
changeset
false ->
add_error(
changeset,
:script,
"cannot include a conversation that has a trigger with quest"
)
end
end
defp validate_is_trainer(changeset) do
case get_field(changeset, :is_trainer) do
true -> changeset
false -> add_error(changeset, :trainable_skills, "not a trainer")
end
end
end
|
lib/data/npc.ex
| 0.721841 | 0.453806 |
npc.ex
|
starcoder
|
defmodule AWS.ElastiCache do
@moduledoc """
Amazon ElastiCache
Amazon ElastiCache is a web service that makes it easier to set up,
operate, and scale a distributed cache in the cloud.
With ElastiCache, customers get all of the benefits of a high-performance,
in-memory cache with less of the administrative burden involved in
launching and managing a distributed cache. The service makes setup,
scaling, and cluster failure handling much simpler than in a self-managed
cache deployment.
In addition, through integration with Amazon CloudWatch, customers get
enhanced visibility into the key performance statistics associated with
their cache and can receive alarms if a part of their cache runs hot.
"""
@doc """
Adds up to 50 cost allocation tags to the named resource. A cost allocation
tag is a key-value pair where the key and value are case-sensitive. You can
use cost allocation tags to categorize and track your AWS costs.
When you apply tags to your ElastiCache resources, AWS generates a cost
allocation report as a comma-separated value (CSV) file with your usage and
costs aggregated by your tags. You can apply tags that represent business
categories (such as cost centers, application names, or owners) to organize
your costs across multiple services. For more information, see [Using Cost
Allocation Tags in Amazon
ElastiCache](https://docs.aws.amazon.com/AmazonElastiCache/latest/red-ug/Tagging.html)
in the *ElastiCache User Guide*.
"""
def add_tags_to_resource(client, input, options \\ []) do
request(client, "AddTagsToResource", input, options)
end
@doc """
Allows network ingress to a cache security group. Applications using
ElastiCache must be running on Amazon EC2, and Amazon EC2 security groups
are used as the authorization mechanism.
<note> You cannot authorize ingress from an Amazon EC2 security group in
one region to an ElastiCache cluster in another region.
</note>
"""
def authorize_cache_security_group_ingress(client, input, options \\ []) do
request(client, "AuthorizeCacheSecurityGroupIngress", input, options)
end
@doc """
Apply the service update. For more information on service updates and
applying them, see [Applying Service
Updates](https://docs.aws.amazon.com/AmazonElastiCache/latest/red-ug/applying-updates.html).
"""
def batch_apply_update_action(client, input, options \\ []) do
request(client, "BatchApplyUpdateAction", input, options)
end
@doc """
Stop the service update. For more information on service updates and
stopping them, see [Stopping Service
Updates](https://docs.aws.amazon.com/AmazonElastiCache/latest/red-ug/stopping-self-service-updates.html).
"""
def batch_stop_update_action(client, input, options \\ []) do
request(client, "BatchStopUpdateAction", input, options)
end
@doc """
Complete the migration of data.
"""
def complete_migration(client, input, options \\ []) do
request(client, "CompleteMigration", input, options)
end
@doc """
Makes a copy of an existing snapshot.
<note> This operation is valid for Redis only.
</note> <important> Users or groups that have permissions to use the
`CopySnapshot` operation can create their own Amazon S3 buckets and copy
snapshots to it. To control access to your snapshots, use an IAM policy to
control who has the ability to use the `CopySnapshot` operation. For more
information about using IAM to control the use of ElastiCache operations,
see [Exporting
Snapshots](https://docs.aws.amazon.com/AmazonElastiCache/latest/red-ug/backups-exporting.html)
and [Authentication & Access
Control](https://docs.aws.amazon.com/AmazonElastiCache/latest/red-ug/IAM.html).
</important> You could receive the following error messages.
<p class="title"> **Error Messages**
<ul> <li> **Error Message:** The S3 bucket %s is outside of the region.
**Solution:** Create an Amazon S3 bucket in the same region as your
snapshot. For more information, see [Step 1: Create an Amazon S3
Bucket](https://docs.aws.amazon.com/AmazonElastiCache/latest/red-ug/backups-exporting.html#backups-exporting-create-s3-bucket)
in the ElastiCache User Guide.
</li> <li> **Error Message:** The S3 bucket %s does not exist.
**Solution:** Create an Amazon S3 bucket in the same region as your
snapshot. For more information, see [Step 1: Create an Amazon S3
Bucket](https://docs.aws.amazon.com/AmazonElastiCache/latest/red-ug/backups-exporting.html#backups-exporting-create-s3-bucket)
in the ElastiCache User Guide.
</li> <li> **Error Message:** The S3 bucket %s is not owned by the
authenticated user.
**Solution:** Create an Amazon S3 bucket in the same region as your
snapshot. For more information, see [Step 1: Create an Amazon S3
Bucket](https://docs.aws.amazon.com/AmazonElastiCache/latest/red-ug/backups-exporting.html#backups-exporting-create-s3-bucket)
in the ElastiCache User Guide.
</li> <li> **Error Message:** The authenticated user does not have
sufficient permissions to perform the desired activity.
**Solution:** Contact your system administrator to get the needed
permissions.
</li> <li> **Error Message:** The S3 bucket %s already contains an object
with key %s.
**Solution:** Give the `TargetSnapshotName` a new and unique value. If
exporting a snapshot, you could alternatively create a new Amazon S3 bucket
and use this same value for `TargetSnapshotName`.
</li> <li> **Error Message: ** ElastiCache has not been granted READ
permissions %s on the S3 Bucket.
**Solution:** Add List and Read permissions on the bucket. For more
information, see [Step 2: Grant ElastiCache Access to Your Amazon S3
Bucket](https://docs.aws.amazon.com/AmazonElastiCache/latest/red-ug/backups-exporting.html#backups-exporting-grant-access)
in the ElastiCache User Guide.
</li> <li> **Error Message: ** ElastiCache has not been granted WRITE
permissions %s on the S3 Bucket.
**Solution:** Add Upload/Delete permissions on the bucket. For more
information, see [Step 2: Grant ElastiCache Access to Your Amazon S3
Bucket](https://docs.aws.amazon.com/AmazonElastiCache/latest/red-ug/backups-exporting.html#backups-exporting-grant-access)
in the ElastiCache User Guide.
</li> <li> **Error Message: ** ElastiCache has not been granted READ_ACP
permissions %s on the S3 Bucket.
**Solution:** Add View Permissions on the bucket. For more information, see
[Step 2: Grant ElastiCache Access to Your Amazon S3
Bucket](https://docs.aws.amazon.com/AmazonElastiCache/latest/red-ug/backups-exporting.html#backups-exporting-grant-access)
in the ElastiCache User Guide.
</li> </ul>
"""
def copy_snapshot(client, input, options \\ []) do
request(client, "CopySnapshot", input, options)
end
@doc """
Creates a cluster. All nodes in the cluster run the same protocol-compliant
cache engine software, either Memcached or Redis.
This operation is not supported for Redis (cluster mode enabled) clusters.
"""
def create_cache_cluster(client, input, options \\ []) do
request(client, "CreateCacheCluster", input, options)
end
@doc """
Creates a new Amazon ElastiCache cache parameter group. An ElastiCache
cache parameter group is a collection of parameters and their values that
are applied to all of the nodes in any cluster or replication group using
the CacheParameterGroup.
A newly created CacheParameterGroup is an exact duplicate of the default
parameter group for the CacheParameterGroupFamily. To customize the newly
created CacheParameterGroup you can change the values of specific
parameters. For more information, see:
<ul> <li>
[ModifyCacheParameterGroup](https://docs.aws.amazon.com/AmazonElastiCache/latest/APIReference/API_ModifyCacheParameterGroup.html)
in the ElastiCache API Reference.
</li> <li> [Parameters and Parameter
Groups](https://docs.aws.amazon.com/AmazonElastiCache/latest/red-ug/ParameterGroups.html)
in the ElastiCache User Guide.
</li> </ul>
"""
def create_cache_parameter_group(client, input, options \\ []) do
request(client, "CreateCacheParameterGroup", input, options)
end
@doc """
Creates a new cache security group. Use a cache security group to control
access to one or more clusters.
Cache security groups are only used when you are creating a cluster outside
of an Amazon Virtual Private Cloud (Amazon VPC). If you are creating a
cluster inside of a VPC, use a cache subnet group instead. For more
information, see
[CreateCacheSubnetGroup](https://docs.aws.amazon.com/AmazonElastiCache/latest/APIReference/API_CreateCacheSubnetGroup.html).
"""
def create_cache_security_group(client, input, options \\ []) do
request(client, "CreateCacheSecurityGroup", input, options)
end
@doc """
Creates a new cache subnet group.
Use this parameter only when you are creating a cluster in an Amazon
Virtual Private Cloud (Amazon VPC).
"""
def create_cache_subnet_group(client, input, options \\ []) do
request(client, "CreateCacheSubnetGroup", input, options)
end
@doc """
Global Datastore for Redis offers fully managed, fast, reliable and secure
cross-region replication. Using Global Datastore for Redis, you can create
cross-region read replica clusters for ElastiCache for Redis to enable
low-latency reads and disaster recovery across regions. For more
information, see [Replication Across Regions Using Global
Datastore](/AmazonElastiCache/latest/red-ug/Redis-Global-Clusters.html).
<ul> <li> The **GlobalReplicationGroupIdSuffix** is the name of the Global
Datastore.
</li> <li> The **PrimaryReplicationGroupId** represents the name of the
primary cluster that accepts writes and will replicate updates to the
secondary cluster.
</li> </ul>
"""
def create_global_replication_group(client, input, options \\ []) do
request(client, "CreateGlobalReplicationGroup", input, options)
end
@doc """
Creates a Redis (cluster mode disabled) or a Redis (cluster mode enabled)
replication group.
This API can be used to create a standalone regional replication group or a
secondary replication group associated with a Global Datastore.
A Redis (cluster mode disabled) replication group is a collection of
clusters, where one of the clusters is a read/write primary and the others
are read-only replicas. Writes to the primary are asynchronously propagated
to the replicas.
A Redis (cluster mode enabled) replication group is a collection of 1 to 90
node groups (shards). Each node group (shard) has one read/write primary
node and up to 5 read-only replica nodes. Writes to the primary are
asynchronously propagated to the replicas. Redis (cluster mode enabled)
replication groups partition the data across node groups (shards).
When a Redis (cluster mode disabled) replication group has been
successfully created, you can add one or more read replicas to it, up to a
total of 5 read replicas. If you need to increase or decrease the number of
node groups (console: shards), you can avail yourself of ElastiCache for
Redis' scaling. For more information, see [Scaling ElastiCache for Redis
Clusters](https://docs.aws.amazon.com/AmazonElastiCache/latest/red-ug/Scaling.html)
in the *ElastiCache User Guide*.
<note> This operation is valid for Redis only.
</note>
"""
def create_replication_group(client, input, options \\ []) do
request(client, "CreateReplicationGroup", input, options)
end
@doc """
Creates a copy of an entire cluster or replication group at a specific
moment in time.
<note> This operation is valid for Redis only.
</note>
"""
def create_snapshot(client, input, options \\ []) do
request(client, "CreateSnapshot", input, options)
end
@doc """
For Redis engine version 6.04 onwards: Creates a Redis user. For more
information, see [Using Role Based Access Control
(RBAC)](http://docs.aws.amazon.com/AmazonElastiCache/latest/red-ug/Clusters.RBAC.html).
"""
def create_user(client, input, options \\ []) do
request(client, "CreateUser", input, options)
end
@doc """
For Redis engine version 6.04 onwards: Creates a Redis user group. For more
information, see [Using Role Based Access Control
(RBAC)](http://docs.aws.amazon.com/AmazonElastiCache/latest/red-ug/Clusters.RBAC.html)
"""
def create_user_group(client, input, options \\ []) do
request(client, "CreateUserGroup", input, options)
end
@doc """
Decreases the number of node groups in a Global Datastore
"""
def decrease_node_groups_in_global_replication_group(client, input, options \\ []) do
request(client, "DecreaseNodeGroupsInGlobalReplicationGroup", input, options)
end
@doc """
Dynamically decreases the number of replicas in a Redis (cluster mode
disabled) replication group or the number of replica nodes in one or more
node groups (shards) of a Redis (cluster mode enabled) replication group.
This operation is performed with no cluster down time.
"""
def decrease_replica_count(client, input, options \\ []) do
request(client, "DecreaseReplicaCount", input, options)
end
@doc """
Deletes a previously provisioned cluster. `DeleteCacheCluster` deletes all
associated cache nodes, node endpoints and the cluster itself. When you
receive a successful response from this operation, Amazon ElastiCache
immediately begins deleting the cluster; you cannot cancel or revert this
operation.
This operation is not valid for:
<ul> <li> Redis (cluster mode enabled) clusters
</li> <li> A cluster that is the last read replica of a replication group
</li> <li> A node group (shard) that has Multi-AZ mode enabled
</li> <li> A cluster from a Redis (cluster mode enabled) replication group
</li> <li> A cluster that is not in the `available` state
</li> </ul>
"""
def delete_cache_cluster(client, input, options \\ []) do
request(client, "DeleteCacheCluster", input, options)
end
@doc """
Deletes the specified cache parameter group. You cannot delete a cache
parameter group if it is associated with any cache clusters.
"""
def delete_cache_parameter_group(client, input, options \\ []) do
request(client, "DeleteCacheParameterGroup", input, options)
end
@doc """
Deletes a cache security group.
<note> You cannot delete a cache security group if it is associated with
any clusters.
</note>
"""
def delete_cache_security_group(client, input, options \\ []) do
request(client, "DeleteCacheSecurityGroup", input, options)
end
@doc """
Deletes a cache subnet group.
<note> You cannot delete a cache subnet group if it is associated with any
clusters.
</note>
"""
def delete_cache_subnet_group(client, input, options \\ []) do
request(client, "DeleteCacheSubnetGroup", input, options)
end
@doc """
Deleting a Global Datastore is a two-step process:
<ul> <li> First, you must `DisassociateGlobalReplicationGroup` to remove
the secondary clusters in the Global Datastore.
</li> <li> Once the Global Datastore contains only the primary cluster, you
can use DeleteGlobalReplicationGroup API to delete the Global Datastore
while retainining the primary cluster using Retain…= true.
</li> </ul> Since the Global Datastore has only a primary cluster, you can
delete the Global Datastore while retaining the primary by setting
`RetainPrimaryCluster=true`.
When you receive a successful response from this operation, Amazon
ElastiCache immediately begins deleting the selected resources; you cannot
cancel or revert this operation.
"""
def delete_global_replication_group(client, input, options \\ []) do
request(client, "DeleteGlobalReplicationGroup", input, options)
end
@doc """
Deletes an existing replication group. By default, this operation deletes
the entire replication group, including the primary/primaries and all of
the read replicas. If the replication group has only one primary, you can
optionally delete only the read replicas, while retaining the primary by
setting `RetainPrimaryCluster=true`.
When you receive a successful response from this operation, Amazon
ElastiCache immediately begins deleting the selected resources; you cannot
cancel or revert this operation.
<note> This operation is valid for Redis only.
</note>
"""
def delete_replication_group(client, input, options \\ []) do
request(client, "DeleteReplicationGroup", input, options)
end
@doc """
Deletes an existing snapshot. When you receive a successful response from
this operation, ElastiCache immediately begins deleting the snapshot; you
cannot cancel or revert this operation.
<note> This operation is valid for Redis only.
</note>
"""
def delete_snapshot(client, input, options \\ []) do
request(client, "DeleteSnapshot", input, options)
end
@doc """
For Redis engine version 6.04 onwards: Deletes a user. The user will be
removed from all user groups and in turn removed from all replication
groups. For more information, see [Using Role Based Access Control
(RBAC)](http://docs.aws.amazon.com/AmazonElastiCache/latest/red-ug/Clusters.RBAC.html).
"""
def delete_user(client, input, options \\ []) do
request(client, "DeleteUser", input, options)
end
@doc """
For Redis engine version 6.04 onwards: Deletes a ser group. The user group
must first be disassociated from the replcation group before it can be
deleted. For more information, see [Using Role Based Access Control
(RBAC)](http://docs.aws.amazon.com/AmazonElastiCache/latest/red-ug/Clusters.RBAC.html).
"""
def delete_user_group(client, input, options \\ []) do
request(client, "DeleteUserGroup", input, options)
end
@doc """
Returns information about all provisioned clusters if no cluster identifier
is specified, or about a specific cache cluster if a cluster identifier is
supplied.
By default, abbreviated information about the clusters is returned. You can
use the optional *ShowCacheNodeInfo* flag to retrieve detailed information
about the cache nodes associated with the clusters. These details include
the DNS address and port for the cache node endpoint.
If the cluster is in the *creating* state, only cluster-level information
is displayed until all of the nodes are successfully provisioned.
If the cluster is in the *deleting* state, only cluster-level information
is displayed.
If cache nodes are currently being added to the cluster, node endpoint
information and creation time for the additional nodes are not displayed
until they are completely provisioned. When the cluster state is
*available*, the cluster is ready for use.
If cache nodes are currently being removed from the cluster, no endpoint
information for the removed nodes is displayed.
"""
def describe_cache_clusters(client, input, options \\ []) do
request(client, "DescribeCacheClusters", input, options)
end
@doc """
Returns a list of the available cache engines and their versions.
"""
def describe_cache_engine_versions(client, input, options \\ []) do
request(client, "DescribeCacheEngineVersions", input, options)
end
@doc """
Returns a list of cache parameter group descriptions. If a cache parameter
group name is specified, the list contains only the descriptions for that
group.
"""
def describe_cache_parameter_groups(client, input, options \\ []) do
request(client, "DescribeCacheParameterGroups", input, options)
end
@doc """
Returns the detailed parameter list for a particular cache parameter group.
"""
def describe_cache_parameters(client, input, options \\ []) do
request(client, "DescribeCacheParameters", input, options)
end
@doc """
Returns a list of cache security group descriptions. If a cache security
group name is specified, the list contains only the description of that
group. This applicable only when you have ElastiCache in Classic setup
"""
def describe_cache_security_groups(client, input, options \\ []) do
request(client, "DescribeCacheSecurityGroups", input, options)
end
@doc """
Returns a list of cache subnet group descriptions. If a subnet group name
is specified, the list contains only the description of that group. This is
applicable only when you have ElastiCache in VPC setup. All ElastiCache
clusters now launch in VPC by default.
"""
def describe_cache_subnet_groups(client, input, options \\ []) do
request(client, "DescribeCacheSubnetGroups", input, options)
end
@doc """
Returns the default engine and system parameter information for the
specified cache engine.
"""
def describe_engine_default_parameters(client, input, options \\ []) do
request(client, "DescribeEngineDefaultParameters", input, options)
end
@doc """
Returns events related to clusters, cache security groups, and cache
parameter groups. You can obtain events specific to a particular cluster,
cache security group, or cache parameter group by providing the name as a
parameter.
By default, only the events occurring within the last hour are returned;
however, you can retrieve up to 14 days' worth of events if necessary.
"""
def describe_events(client, input, options \\ []) do
request(client, "DescribeEvents", input, options)
end
@doc """
Returns information about a particular global replication group. If no
identifier is specified, returns information about all Global Datastores.
"""
def describe_global_replication_groups(client, input, options \\ []) do
request(client, "DescribeGlobalReplicationGroups", input, options)
end
@doc """
Returns information about a particular replication group. If no identifier
is specified, `DescribeReplicationGroups` returns information about all
replication groups.
<note> This operation is valid for Redis only.
</note>
"""
def describe_replication_groups(client, input, options \\ []) do
request(client, "DescribeReplicationGroups", input, options)
end
@doc """
Returns information about reserved cache nodes for this account, or about a
specified reserved cache node.
"""
def describe_reserved_cache_nodes(client, input, options \\ []) do
request(client, "DescribeReservedCacheNodes", input, options)
end
@doc """
Lists available reserved cache node offerings.
"""
def describe_reserved_cache_nodes_offerings(client, input, options \\ []) do
request(client, "DescribeReservedCacheNodesOfferings", input, options)
end
@doc """
Returns details of the service updates
"""
def describe_service_updates(client, input, options \\ []) do
request(client, "DescribeServiceUpdates", input, options)
end
@doc """
Returns information about cluster or replication group snapshots. By
default, `DescribeSnapshots` lists all of your snapshots; it can optionally
describe a single snapshot, or just the snapshots associated with a
particular cache cluster.
<note> This operation is valid for Redis only.
</note>
"""
def describe_snapshots(client, input, options \\ []) do
request(client, "DescribeSnapshots", input, options)
end
@doc """
Returns details of the update actions
"""
def describe_update_actions(client, input, options \\ []) do
request(client, "DescribeUpdateActions", input, options)
end
@doc """
Returns a list of user groups.
"""
def describe_user_groups(client, input, options \\ []) do
request(client, "DescribeUserGroups", input, options)
end
@doc """
Returns a list of users.
"""
def describe_users(client, input, options \\ []) do
request(client, "DescribeUsers", input, options)
end
@doc """
Remove a secondary cluster from the Global Datastore using the Global
Datastore name. The secondary cluster will no longer receive updates from
the primary cluster, but will remain as a standalone cluster in that AWS
region.
"""
def disassociate_global_replication_group(client, input, options \\ []) do
request(client, "DisassociateGlobalReplicationGroup", input, options)
end
@doc """
Used to failover the primary region to a selected secondary region. The
selected secondary region will become primary, and all other clusters will
become secondary.
"""
def failover_global_replication_group(client, input, options \\ []) do
request(client, "FailoverGlobalReplicationGroup", input, options)
end
@doc """
Increase the number of node groups in the Global Datastore
"""
def increase_node_groups_in_global_replication_group(client, input, options \\ []) do
request(client, "IncreaseNodeGroupsInGlobalReplicationGroup", input, options)
end
@doc """
Dynamically increases the number of replics in a Redis (cluster mode
disabled) replication group or the number of replica nodes in one or more
node groups (shards) of a Redis (cluster mode enabled) replication group.
This operation is performed with no cluster down time.
"""
def increase_replica_count(client, input, options \\ []) do
request(client, "IncreaseReplicaCount", input, options)
end
@doc """
Lists all available node types that you can scale your Redis cluster's or
replication group's current node type.
When you use the `ModifyCacheCluster` or `ModifyReplicationGroup`
operations to scale your cluster or replication group, the value of the
`CacheNodeType` parameter must be one of the node types returned by this
operation.
"""
def list_allowed_node_type_modifications(client, input, options \\ []) do
request(client, "ListAllowedNodeTypeModifications", input, options)
end
@doc """
Lists all cost allocation tags currently on the named resource. A `cost
allocation tag` is a key-value pair where the key is case-sensitive and the
value is optional. You can use cost allocation tags to categorize and track
your AWS costs.
If the cluster is not in the *available* state, `ListTagsForResource`
returns an error.
You can have a maximum of 50 cost allocation tags on an ElastiCache
resource. For more information, see [Monitoring Costs with
Tags](https://docs.aws.amazon.com/AmazonElastiCache/latest/red-ug/Tagging.html).
"""
def list_tags_for_resource(client, input, options \\ []) do
request(client, "ListTagsForResource", input, options)
end
@doc """
Modifies the settings for a cluster. You can use this operation to change
one or more cluster configuration parameters by specifying the parameters
and the new values.
"""
def modify_cache_cluster(client, input, options \\ []) do
request(client, "ModifyCacheCluster", input, options)
end
@doc """
Modifies the parameters of a cache parameter group. You can modify up to 20
parameters in a single request by submitting a list parameter name and
value pairs.
"""
def modify_cache_parameter_group(client, input, options \\ []) do
request(client, "ModifyCacheParameterGroup", input, options)
end
@doc """
Modifies an existing cache subnet group.
"""
def modify_cache_subnet_group(client, input, options \\ []) do
request(client, "ModifyCacheSubnetGroup", input, options)
end
@doc """
Modifies the settings for a Global Datastore.
"""
def modify_global_replication_group(client, input, options \\ []) do
request(client, "ModifyGlobalReplicationGroup", input, options)
end
@doc """
Modifies the settings for a replication group.
<ul> <li> [Scaling for Amazon ElastiCache for Redis (cluster mode
enabled)](https://docs.aws.amazon.com/AmazonElastiCache/latest/red-ug/scaling-redis-cluster-mode-enabled.html)
in the ElastiCache User Guide
</li> <li>
[ModifyReplicationGroupShardConfiguration](https://docs.aws.amazon.com/AmazonElastiCache/latest/APIReference/API_ModifyReplicationGroupShardConfiguration.html)
in the ElastiCache API Reference
</li> </ul> <note> This operation is valid for Redis only.
</note>
"""
def modify_replication_group(client, input, options \\ []) do
request(client, "ModifyReplicationGroup", input, options)
end
@doc """
Modifies a replication group's shards (node groups) by allowing you to add
shards, remove shards, or rebalance the keyspaces among exisiting shards.
"""
def modify_replication_group_shard_configuration(client, input, options \\ []) do
request(client, "ModifyReplicationGroupShardConfiguration", input, options)
end
@doc """
Changes user password(s) and/or access string.
"""
def modify_user(client, input, options \\ []) do
request(client, "ModifyUser", input, options)
end
@doc """
Changes the list of users that belong to the user group.
"""
def modify_user_group(client, input, options \\ []) do
request(client, "ModifyUserGroup", input, options)
end
@doc """
Allows you to purchase a reserved cache node offering.
"""
def purchase_reserved_cache_nodes_offering(client, input, options \\ []) do
request(client, "PurchaseReservedCacheNodesOffering", input, options)
end
@doc """
Redistribute slots to ensure uniform distribution across existing shards in
the cluster.
"""
def rebalance_slots_in_global_replication_group(client, input, options \\ []) do
request(client, "RebalanceSlotsInGlobalReplicationGroup", input, options)
end
@doc """
Reboots some, or all, of the cache nodes within a provisioned cluster. This
operation applies any modified cache parameter groups to the cluster. The
reboot operation takes place as soon as possible, and results in a
momentary outage to the cluster. During the reboot, the cluster status is
set to REBOOTING.
The reboot causes the contents of the cache (for each cache node being
rebooted) to be lost.
When the reboot is complete, a cluster event is created.
Rebooting a cluster is currently supported on Memcached and Redis (cluster
mode disabled) clusters. Rebooting is not supported on Redis (cluster mode
enabled) clusters.
If you make changes to parameters that require a Redis (cluster mode
enabled) cluster reboot for the changes to be applied, see [Rebooting a
Cluster](http://docs.aws.amazon.com/AmazonElastiCache/latest/red-ug/Clusters.Rebooting.html)
for an alternate process.
"""
def reboot_cache_cluster(client, input, options \\ []) do
request(client, "RebootCacheCluster", input, options)
end
@doc """
Removes the tags identified by the `TagKeys` list from the named resource.
"""
def remove_tags_from_resource(client, input, options \\ []) do
request(client, "RemoveTagsFromResource", input, options)
end
@doc """
Modifies the parameters of a cache parameter group to the engine or system
default value. You can reset specific parameters by submitting a list of
parameter names. To reset the entire cache parameter group, specify the
`ResetAllParameters` and `CacheParameterGroupName` parameters.
"""
def reset_cache_parameter_group(client, input, options \\ []) do
request(client, "ResetCacheParameterGroup", input, options)
end
@doc """
Revokes ingress from a cache security group. Use this operation to disallow
access from an Amazon EC2 security group that had been previously
authorized.
"""
def revoke_cache_security_group_ingress(client, input, options \\ []) do
request(client, "RevokeCacheSecurityGroupIngress", input, options)
end
@doc """
Start the migration of data.
"""
def start_migration(client, input, options \\ []) do
request(client, "StartMigration", input, options)
end
@doc """
Represents the input of a `TestFailover` operation which test automatic
failover on a specified node group (called shard in the console) in a
replication group (called cluster in the console).
<p class="title"> **Note the following**
<ul> <li> A customer can use this operation to test automatic failover on
up to 5 shards (called node groups in the ElastiCache API and AWS CLI) in
any rolling 24-hour period.
</li> <li> If calling this operation on shards in different clusters
(called replication groups in the API and CLI), the calls can be made
concurrently.
</li> <li> If calling this operation multiple times on different shards in
the same Redis (cluster mode enabled) replication group, the first node
replacement must complete before a subsequent call can be made.
</li> <li> To determine whether the node replacement is complete you can
check Events using the Amazon ElastiCache console, the AWS CLI, or the
ElastiCache API. Look for the following automatic failover related events,
listed here in order of occurrance:
<ol> <li> Replication group message: `Test Failover API called for node
group <node-group-id>`
</li> <li> Cache cluster message: `Failover from master node
<primary-node-id> to replica node <node-id> completed`
</li> <li> Replication group message: `Failover from master node
<primary-node-id> to replica node <node-id> completed`
</li> <li> Cache cluster message: `Recovering cache nodes <node-id>`
</li> <li> Cache cluster message: `Finished recovery for cache nodes
<node-id>`
</li> </ol> For more information see:
<ul> <li> [Viewing ElastiCache
Events](https://docs.aws.amazon.com/AmazonElastiCache/latest/red-ug/ECEvents.Viewing.html)
in the *ElastiCache User Guide*
</li> <li>
[DescribeEvents](https://docs.aws.amazon.com/AmazonElastiCache/latest/APIReference/API_DescribeEvents.html)
in the ElastiCache API Reference
</li> </ul> </li> </ul> Also see, [Testing Multi-AZ
](https://docs.aws.amazon.com/AmazonElastiCache/latest/red-ug/AutoFailover.html#auto-failover-test)
in the *ElastiCache User Guide*.
"""
def test_failover(client, input, options \\ []) do
request(client, "TestFailover", input, options)
end
@spec request(AWS.Client.t(), binary(), map(), list()) ::
{:ok, map() | nil, map()}
| {:error, term()}
defp request(client, action, input, options) do
client = %{client | service: "elasticache"}
host = build_host("elasticache", client)
url = build_url(host, client)
headers = [
{"Host", host},
{"Content-Type", "application/x-www-form-urlencoded"}
]
input = Map.merge(input, %{"Action" => action, "Version" => "2015-02-02"})
payload = encode!(client, input)
headers = AWS.Request.sign_v4(client, "POST", url, headers, payload)
post(client, url, payload, headers, options)
end
defp post(client, url, payload, headers, options) do
case AWS.Client.request(client, :post, url, payload, headers, options) do
{:ok, %{status_code: 200, body: body} = response} ->
body = if body != "", do: decode!(client, body)
{:ok, body, response}
{:ok, response} ->
{:error, {:unexpected_response, response}}
error = {:error, _reason} -> error
end
end
defp build_host(_endpoint_prefix, %{region: "local", endpoint: endpoint}) do
endpoint
end
defp build_host(_endpoint_prefix, %{region: "local"}) do
"localhost"
end
defp build_host(endpoint_prefix, %{region: region, endpoint: endpoint}) do
"#{endpoint_prefix}.#{region}.#{endpoint}"
end
defp build_url(host, %{:proto => proto, :port => port}) do
"#{proto}://#{host}:#{port}/"
end
defp encode!(client, payload) do
AWS.Client.encode!(client, payload, :query)
end
defp decode!(client, payload) do
AWS.Client.decode!(client, payload, :xml)
end
end
|
lib/aws/generated/elasticache.ex
| 0.775775 | 0.606673 |
elasticache.ex
|
starcoder
|
defmodule EllipticCurve.PublicKey do
@moduledoc """
Used to convert public keys between struct and .der or .pem formats.
Functions:
- toPem()
- toDer()
- fromPem()
- fromPem!()
- fromDer()
- fromDer!()
"""
alias EllipticCurve.Utils.{Der, BinaryAscii, Point}
alias EllipticCurve.{Curve}
alias EllipticCurve.PublicKey.{Data}
@doc """
Converts a public key in decoded struct format into a pem string
Parameters:
- publicKey [%EllipticCurve.PublicKey.Data]: decoded public key struct;
Returns:
- pem [string]: public key in pem format
## Example:
iex> EllipticCurve.PublicKey.toPem(%EllipticCurve.PublicKey.Data{...})
"-----<KEY>"
"""
def toPem(publicKey) do
publicKey
|> toDer()
|> Der.toPem("PUBLIC KEY")
end
@doc """
Converts a public key in decoded struct format into a der string (raw binary)
Parameters:
- publicKey [%EllipticCurve.PublicKey.Data]: decoded public key struct;
Returns:
- der [string]: public key in der format
## Example:
iex> EllipticCurve.PublicKey.toDer(%EllipticCurve.PublicKey.Data{...})
<<48, 86, 48, 16, 6, 7, 42, 134, 72, 206, 61, ...>>
"""
def toDer(publicKey) do
Der.encodeSequence([
Der.encodeSequence([
Der.encodeOid([1, 2, 840, 10045, 2, 1]),
Der.encodeOid(publicKey.curve.oid)
]),
Der.encodeBitString(toString(publicKey, true))
])
end
@doc false
def toString(publicKey, encoded \\ false) do
curveLength = Curve.getLength(publicKey.curve)
xString =
BinaryAscii.stringFromNumber(
publicKey.point.x,
curveLength
)
yString =
BinaryAscii.stringFromNumber(
publicKey.point.y,
curveLength
)
if encoded do
"\x00\x04" <> xString <> yString
else
xString <> yString
end
end
@doc """
Converts a public key in pem format into decoded struct format
Parameters:
- pem [string]: public key in pem format
Returns {:ok, publicKey}:
- publicKey [%EllipticCurve.PublicKey.Data]: decoded public key struct;
## Example:
iex> EllipticCurve.PublicKey.fromPem("-----<KEY> KEY-----\n")
{:ok, %EllipticCurve.PublicKey.Data{...}}
"""
def fromPem(pem) do
{:ok, fromPem!(pem)}
rescue
e in RuntimeError -> {:error, e}
end
@doc """
Converts a public key in pem format into decoded struct format
Parameters:
- pem [string]: public key in pem format
Returns:
- publicKey [%EllipticCurve.PublicKey.Data]: decoded public key struct;
## Example:
iex> EllipticCurve.PublicKey.fromPem!("-----BEGIN PUBLIC KEY-----\<KEY> PUBLIC KEY-----\n")
%EllipticCurve.PublicKey.Data{...}
"""
def fromPem!(pem) do
pem
|> Der.fromPem()
|> fromDer!()
end
@doc """
Converts a public key in der (raw binary) format into decoded struct format
Parameters:
- der [string]: public key in der format
Returns {:ok, publicKey}:
- publicKey [%EllipticCurve.PublicKey.Data]: decoded public key struct;
## Example:
iex> EllipticCurve.PublicKey.fromDer(<<48, 86, 48, 16, 6, 7, 42, 134, ...>>)
{:ok, %EllipticCurve.PublicKey.Data{...}}
"""
def fromDer(der) do
{:ok, fromDer!(der)}
rescue
e in RuntimeError -> {:error, e}
end
@doc """
Converts a public key in der (raw binary) format into decoded struct format
Parameters:
- der [string]: public key in der format
Returns:
- publicKey [%EllipticCurve.PublicKey.Data]: decoded public key struct;
## Example:
iex> EllipticCurve.PublicKey.fromDer!(<<48, 86, 48, 16, 6, 7, 42, 134, ...>>)
%EllipticCurve.PublicKey.Data{...}
"""
def fromDer!(der) do
{s1, empty} = Der.removeSequence(der)
if byte_size(empty) != 0 do
raise "trailing junk after DER public key: #{BinaryAscii.hexFromBinary(empty)}"
end
{s2, pointBitString} = Der.removeSequence(s1)
{_oidPublicKey, rest} = Der.removeObject(s2)
{oidCurve, empty} = Der.removeObject(rest)
if byte_size(empty) != 0 do
raise "trailing junk after DER public key objects: #{BinaryAscii.hexFromBinary(empty)}"
end
curveData = Curve.KnownCurves.getCurveByOid(oidCurve)
{pointString, empty} = Der.removeBitString(pointBitString)
if byte_size(empty) != 0 do
raise "trailing junk after public key point-string: #{BinaryAscii.hexFromBinary(empty)}"
end
binary_part(pointString, 2, byte_size(pointString) - 2)
|> fromString!(curveData.name)
end
@doc false
def fromString(string, curve \\ :secp256k1, validatePoint \\ true) do
{:ok, fromString!(string, curve, validatePoint)}
rescue
e in RuntimeError -> {:error, e}
end
@doc false
def fromString!(string, curve \\ :secp256k1, validatePoint \\ true) do
curveData = Curve.KnownCurves.getCurveByName(curve)
baseLength = Curve.getLength(curveData)
xs = binary_part(string, 0, baseLength)
ys = binary_part(string, baseLength, byte_size(string) - baseLength)
point = %Point{
x: BinaryAscii.numberFromString(xs),
y: BinaryAscii.numberFromString(ys)
}
if validatePoint and !Curve.contains?(curveData, point) do
throw("point (#{point.x}, #{point.y}) is not valid for curve #{curveData.name}")
end
%Data{point: point, curve: curveData}
end
end
|
lib/publicKey/publicKey.ex
| 0.925361 | 0.522019 |
publicKey.ex
|
starcoder
|
defmodule EdgehogWeb.Schema.AstarteTypes do
use Absinthe.Schema.Notation
use Absinthe.Relay.Schema.Notation, :modern
alias EdgehogWeb.Middleware
alias EdgehogWeb.Resolvers
@desc """
Describes a set of filters to apply when fetching a list of devices.
When multiple filters are specified, they are applied in an AND fashion to \
further refine the results.
"""
input_object :device_filter do
@desc "Whether to return devices connected or not to Astarte."
field :online, :boolean
@desc """
A string to match against the device ID. The match is case-insensitive \
and tests whether the string is included in the device ID.
"""
field :device_id, :string
@desc """
A string to match against the part number of the device's system model.
The match is case-insensitive and tests whether the string is included in \
the part number of the device's system model.
"""
field :system_model_part_number, :string
@desc """
A string to match against the handle of the device's system model.
The match is case-insensitive and tests whether the string is included in \
the handle of the device's system model.
"""
field :system_model_handle, :string
@desc """
A string to match against the name of the device's system model.
The match is case-insensitive and tests whether the string is included in \
the name of the device's system model.
"""
field :system_model_name, :string
@desc """
A string to match against the part number of the device's hardware type.
The match is case-insensitive and tests whether the string is included in \
the part number of the device's hardware type.
"""
field :hardware_type_part_number, :string
@desc """
A string to match against the handle of the device's hardware type.
The match is case-insensitive and tests whether the string is included in \
the handle of the device's hardware type.
"""
field :hardware_type_handle, :string
@desc """
A string to match against the name of the device's hardware type.
The match is case-insensitive and tests whether the string is included in \
the name of the device's hardware type.
"""
field :hardware_type_name, :string
end
@desc """
Describes hardware-related info of a device.
It exposes data read by a device's operating system about the underlying \
hardware.
"""
object :hardware_info do
@desc "The architecture of the CPU."
field :cpu_architecture, :string
@desc "The reference code of the CPU model."
field :cpu_model, :string
@desc "The display name of the CPU model."
field :cpu_model_name, :string
@desc "The vendor's name."
field :cpu_vendor, :string
@desc "The Bytes count of memory."
field :memory_total_bytes, :integer
end
@desc "Describes the current usage of a storage unit on a device."
object :storage_unit do
@desc "The label of the storage unit."
field :label, non_null(:string)
@desc "The total number of bytes of the storage unit."
field :total_bytes, :integer
@desc "The number of free bytes of the storage unit."
field :free_bytes, :integer
end
@desc """
Describes the position of a device.
The position is estimated by means of Edgehog's Geolocation modules and the \
data published by the device.
"""
object :device_location do
@desc "The latitude coordinate."
field :latitude, non_null(:float)
@desc "The longitude coordinate."
field :longitude, non_null(:float)
@desc "The accuracy of the measurement, in meters."
field :accuracy, :float
@desc "The formatted address estimated for the position."
field :address, :string
@desc "The date at which the measurement was made."
field :timestamp, non_null(:datetime)
end
@desc "Describes an operating system's base image for a device."
object :base_image do
@desc "The name of the image."
field :name, :string
@desc "The version of the image."
field :version, :string
@desc "Human readable build identifier of the image."
field :build_id, :string
@desc """
A unique string that identifies the release, usually the image hash.
"""
field :fingerprint, :string
end
@desc "Describes an operating system of a device."
object :os_info do
@desc "The name of the operating system."
field :name, :string
@desc "The version of the operating system."
field :version, :string
end
@desc """
Describes the current status of the operating system of a device.
"""
object :system_status do
@desc "The identifier of the performed boot sequence."
field :boot_id, :string
@desc "The number of free bytes of memory."
field :memory_free_bytes, :integer
@desc "The number of running tasks on the system."
field :task_count, :integer
@desc "The number of milliseconds since the last system boot."
field :uptime_milliseconds, :integer
@desc "The date at which the system status was read."
field :timestamp, non_null(:datetime)
end
@desc """
Describes the list of WiFi Access Points found by the device.
"""
object :wifi_scan_result do
@desc "The channel used by the Access Point."
field :channel, :integer
@desc "The ESSID advertised by the Access Point."
field :essid, :string
@desc "The MAC address advertised by the Access Point."
field :mac_address, :string
@desc "The power of the radio signal, measured in dBm."
field :rssi, :integer
@desc "The date at which the device found the Access Point."
field :timestamp, non_null(:datetime)
end
@desc """
The current status of the battery.
"""
enum :battery_status do
@desc "The battery is charging."
value :charging
@desc "The battery is discharging."
value :discharging
@desc "The battery is idle."
value :idle
@desc """
The battery is either in a charging or in an idle state, \
since the hardware doesn't allow to distinguish between them.
"""
value :either_idle_or_charging
@desc "The battery is in a failed state."
value :failure
@desc "The battery is removed."
value :removed
@desc "The battery status cannot be determined."
value :unknown
end
@desc "Describes a battery slot of a device."
object :battery_slot do
@desc "The identifier of the battery slot."
field :slot, non_null(:string)
@desc "Battery level estimated percentage [0.0%-100.0%]"
field :level_percentage, :float
@desc "Battery level measurement absolute error [0.0-100.0]"
field :level_absolute_error, :float
@desc "The current status of the battery."
field :status, :battery_status do
resolve &Resolvers.Astarte.battery_status_to_enum/3
middleware Middleware.ErrorHandler
end
end
@desc """
The current GSM/LTE registration status of the modem.
"""
enum :modem_registration_status do
@desc "Not registered, modem is not currently searching a new operator to register to."
value :not_registered
@desc "Registered, home network."
value :registered
@desc "Not registered, but modem is currently searching a new operator to register to."
value :searching_operator
@desc "Registration denied."
value :registration_denied
@desc "Unknown (e.g. out of GERAN/UTRAN/E-UTRAN coverage)."
value :unknown
@desc "Registered, roaming."
value :registered_roaming
end
@desc """
The current access technology of the serving cell.
"""
enum :modem_technology do
@desc "GSM."
value :gsm
@desc "GSM Compact."
value :gsm_compact
@desc "UTRAN."
value :utran
@desc "GSM with EGPRS."
value :gsm_egprs
@desc "UTRAN with HSDPA."
value :utran_hsdpa
@desc "UTRAN with HSUPA."
value :utran_hsupa
@desc "UTRAN with HSDPA and HSUPA."
value :utran_hsdpa_hsupa
@desc "E-UTRAN."
value :eutran
end
@desc "Describes a modem of a device."
object :modem do
@desc "The identifier of the modem."
field :slot, non_null(:string)
@desc "The operator apn address."
field :apn, :string
@desc "The modem IMEI code."
field :imei, :string
@desc "The SIM IMSI code."
field :imsi, :string
@desc "Carrier operator name."
field :carrier, :string
@desc "Unique identifier of the cell."
field :cell_id, :integer
@desc "The cell tower's Mobile Country Code (MCC)."
field :mobile_country_code, :integer
@desc "The cell tower's Mobile Network Code."
field :mobile_network_code, :integer
@desc "The Local Area Code."
field :local_area_code, :integer
@desc "The current registration status of the modem."
field :registration_status, :modem_registration_status do
resolve &Resolvers.Astarte.modem_registration_status_to_enum/3
middleware Middleware.ErrorHandler
end
@desc "Signal strength in dBm."
field :rssi, :float
@desc "Access Technology"
field :technology, :modem_technology do
resolve &Resolvers.Astarte.modem_technology_to_enum/3
middleware Middleware.ErrorHandler
end
end
@desc "Describes an Edgehog runtime."
object :runtime_info do
@desc "The name of the Edgehog runtime."
field :name, :string
@desc "The version of the Edgehog runtime."
field :version, :string
@desc "The environment of the Edgehog runtime."
field :environment, :string
@desc "The URL that uniquely identifies the Edgehog runtime implementation."
field :url, :string
end
@desc """
Denotes a device instance that connects and exchanges data.
Each Device is associated to a specific SystemModel, which in turn is \
associated to a specific HardwareType.
A Device also exposes info about its connection status and some sets of \
data read by its operating system.
"""
node object(:device) do
@desc "The display name of the device."
field :name, non_null(:string)
@desc "The device ID used to connect to the Astarte cluster."
field :device_id, non_null(:string)
@desc "Tells whether the device is connected or not to Astarte."
field :online, non_null(:boolean)
@desc "The date at which the device last connected to Astarte."
field :last_connection, :datetime
@desc "The date at which the device last disconnected from Astarte."
field :last_disconnection, :datetime
@desc "The system model of the device."
field :system_model, :system_model
@desc "Info read from the device's hardware."
field :hardware_info, :hardware_info do
resolve &Resolvers.Astarte.get_hardware_info/3
middleware Middleware.ErrorHandler
end
@desc "The estimated location of the device."
field :location, :device_location do
resolve &Resolvers.Astarte.fetch_device_location/3
middleware Middleware.ErrorHandler
end
@desc "The current usage of the storage units of the device."
field :storage_usage, list_of(non_null(:storage_unit)) do
resolve &Resolvers.Astarte.fetch_storage_usage/3
middleware Middleware.ErrorHandler
end
@desc "The current status of the operating system of the device."
field :system_status, :system_status do
resolve &Resolvers.Astarte.fetch_system_status/3
middleware Middleware.ErrorHandler
end
@desc "The list of WiFi Access Points found by the device."
field :wifi_scan_results, list_of(non_null(:wifi_scan_result)) do
resolve &Resolvers.Astarte.fetch_wifi_scan_results/3
middleware Middleware.ErrorHandler
end
@desc "The status of the battery slots of the device."
field :battery_status, list_of(non_null(:battery_slot)) do
resolve &Resolvers.Astarte.fetch_battery_status/3
middleware Middleware.ErrorHandler
end
@desc "Information about the operating system's base image for the device."
field :base_image, :base_image do
resolve &Resolvers.Astarte.fetch_base_image/3
middleware Middleware.ErrorHandler
end
@desc "Information about the operating system of the device."
field :os_info, :os_info do
resolve &Resolvers.Astarte.fetch_os_info/3
middleware Middleware.ErrorHandler
end
@desc "The existing OTA operations for this device"
field :ota_operations, non_null(list_of(non_null(:ota_operation))) do
# TODO: this causes an N+1 if used on the device list, we should use dataloader instead
resolve &Resolvers.OSManagement.ota_operations_for_device/3
end
@desc "The status of cellular connection of the device."
field :cellular_connection, list_of(non_null(:modem)) do
resolve &Resolvers.Astarte.fetch_cellular_connection/3
middleware Middleware.ErrorHandler
end
@desc "Information about the Edgehog runtime running on the device."
field :runtime_info, :runtime_info do
resolve &Resolvers.Astarte.fetch_runtime_info/3
middleware Middleware.ErrorHandler
end
end
object :astarte_queries do
@desc "Fetches the list of all devices."
field :devices, non_null(list_of(non_null(:device))) do
@desc "An optional set of filters to apply when fetching the devices."
arg :filter, :device_filter
resolve &Resolvers.Astarte.list_devices/3
end
@desc "Fetches a single device."
field :device, :device do
@desc "The ID of the device."
arg :id, non_null(:id)
middleware Absinthe.Relay.Node.ParseIDs, id: :device
resolve &Resolvers.Astarte.find_device/2
end
end
@desc "Led behavior"
enum :led_behavior do
@desc "Blink for 60 seconds."
value :blink
@desc "Double blink for 60 seconds."
value :double_blink
@desc "Slow blink for 60 seconds."
value :slow_blink
end
object :astarte_mutations do
@desc "Sets led behavior."
payload field :set_led_behavior do
input do
@desc "The GraphQL ID (not the Astarte Device ID) of the target device"
field :device_id, non_null(:id)
@desc "The led behavior"
field :behavior, non_null(:led_behavior)
end
output do
@desc "The resulting led behavior."
field :behavior, non_null(:led_behavior)
end
middleware Absinthe.Relay.Node.ParseIDs, device_id: :device
resolve &Resolvers.Astarte.set_led_behavior/2
end
@desc "Updates a device."
payload field :update_device do
input do
@desc "The GraphQL ID (not the Astarte Device ID) of the device to be updated."
field :device_id, non_null(:id)
@desc "The display name of the device."
field :name, :string
end
output do
@desc "The updated device."
field :device, non_null(:device)
end
middleware Absinthe.Relay.Node.ParseIDs, device_id: :device
resolve &Resolvers.Astarte.update_device/2
end
end
end
|
backend/lib/edgehog_web/schema/astarte_types.ex
| 0.843283 | 0.632091 |
astarte_types.ex
|
starcoder
|
defmodule Exonerate.Filter.Items do
@moduledoc false
@behaviour Exonerate.Filter
@derive Exonerate.Compiler
@derive {Inspect, except: [:context]}
alias Exonerate.Validator
import Validator, only: [fun: 2]
defstruct [:context, :schema, :additional_items, :prefix_size]
def parse(artifact = %{context: context}, %{"items" => true}) do
# true means any array is valid
# this header clause is provided as an optimization.
%{artifact | filters: [%__MODULE__{context: context, schema: true} | artifact.filters]}
end
def parse(artifact = %{context: context}, schema = %{"items" => false}) do
# false means everything after prefixItems gets checked.
if prefix_items = schema["prefixItems"] do
filter = %__MODULE__{context: context, schema: false, prefix_size: length(prefix_items)}
%{artifact |
needs_accumulator: true,
accumulator_pipeline: [fun(artifact, "items") | artifact.accumulator_pipeline],
accumulator_init: Map.put(artifact.accumulator_init, :index, 0),
filters: [filter | artifact.filters]}
else
# this is provided as an optimization.
filter = %__MODULE__{context: context, schema: false, prefix_size: 0}
%{artifact | filters: [filter]}
end
end
def parse(artifact = %{context: context}, %{"items" => s}) when is_map(s) do
fun = fun(artifact, "items")
schema = Validator.parse(context.schema,
["items" | context.pointer],
authority: context.authority,
format: context.format,
draft: context.draft)
%{artifact |
needs_accumulator: true,
accumulator_pipeline: [fun | artifact.accumulator_pipeline],
accumulator_init: Map.put(artifact.accumulator_init, :index, 0),
filters: [
%__MODULE__{
context: context,
schema: schema} | artifact.filters]}
end
def parse(artifact = %{context: context}, %{"items" => s}) when is_list(s) do
fun = fun(artifact, "items")
schemas = Enum.map(0..(length(s) - 1),
&Validator.parse(context.schema,
["#{&1}", "items" | context.pointer],
authority: context.authority,
format: context.format,
draft: context.draft))
%{artifact |
needs_accumulator: true,
accumulator_pipeline: [fun | artifact.accumulator_pipeline],
accumulator_init: Map.put(artifact.accumulator_init, :index, 0),
filters: [
%__MODULE__{
context: artifact.context,
schema: schemas,
additional_items: artifact.additional_items} | artifact.filters]}
end
def compile(%__MODULE__{schema: true}), do: {[], []}
def compile(filter = %__MODULE__{schema: false, prefix_size: 0}) do
{[quote do
defp unquote(fun(filter, []))(array, path) when is_list(array) and array != [] do
Exonerate.mismatch(array, path, guard: "items")
end
end], []}
end
def compile(filter = %__MODULE__{schema: false}) do
{[], [
quote do
defp unquote(fun(filter, "items"))(acc = %{index: index}, {path, array})
when index < unquote(filter.prefix_size) do
acc
end
defp unquote(fun(filter, "items"))(%{index: index}, {path, array}) do
Exonerate.mismatch(array, path, guard: to_string(index))
end
end
]}
end
def compile(filter = %__MODULE__{schema: schema}) when is_map(schema) do
{[], [
quote do
defp unquote(fun(filter, "items"))(acc, {path, item}) do
unquote(fun(filter, "items"))(item, Path.join(path, to_string(acc.index)))
acc
end
unquote(Validator.compile(schema))
end
]}
end
def compile(filter = %__MODULE__{schema: schemas}) when is_list(schemas) do
{trampolines, children} = schemas
|> Enum.with_index()
|> Enum.map(fn {schema, index} ->
{quote do
defp unquote(fun(filter, "items"))(acc = %{index: unquote(index)}, {path, item}) do
unquote(fun(filter, ["items", to_string(index)]))(item, Path.join(path, unquote("#{index}")))
acc
end
end,
Validator.compile(schema)}
end)
|> Enum.unzip()
additional_item_filter = if filter.additional_items do
quote do
defp unquote(fun(filter, "items"))(acc = %{index: index}, {path, item}) do
unquote(fun(filter, "additionalItems"))(item, Path.join(path, to_string(index)))
acc
end
end
else
quote do
defp unquote(fun(filter, "items"))(acc = %{index: _}, {_item, _path}), do: acc
end
end
{[], trampolines ++ [additional_item_filter] ++ children}
end
end
|
lib/exonerate/filter/items.ex
| 0.731442 | 0.443661 |
items.ex
|
starcoder
|
defmodule ForthVM.Words.Math do
@moduledoc """
Math words
"""
alias ForthVM.Process
# ---------------------------------------------
# Basic math operations
# ---------------------------------------------
@doc """
+: ( y x -- n ) sums y to x
"""
def plus(tokens, [y, x | data_stack], return_stack, dictionary, meta) do
Process.next(tokens, [x + y | data_stack], return_stack, dictionary, meta)
end
@doc """
-: ( y x -- n ) subtracts y from x
"""
def minus(tokens, [y, x | data_stack], return_stack, dictionary, meta) do
Process.next(tokens, [x - y | data_stack], return_stack, dictionary, meta)
end
@doc """
*: ( x y -- n ) multiplies x by y
"""
def mult(tokens, [y, x | data_stack], return_stack, dictionary, meta) do
Process.next(tokens, [x * y | data_stack], return_stack, dictionary, meta)
end
@doc """
/: ( x y -- n ) divides x by y
"""
def div(tokens, [y, x | data_stack], return_stack, dictionary, meta) do
Process.next(tokens, [x / y | data_stack], return_stack, dictionary, meta)
end
@doc """
/mod: ( x y -- rem div ) divides x by y, places divident and reminder on top of data stack
"""
def mod(tokens, [y, x | data_stack], return_stack, dictionary, meta) do
Process.next(tokens, [div(x, y), rem(x, y) | data_stack], return_stack, dictionary, meta)
end
@doc """
**: ( x y -- n ) calculates pow of x by y
"""
def pow(tokens, [y, x | data_stack], return_stack, dictionary, meta) do
Process.next(tokens, [:math.pow(x, y) | data_stack], return_stack, dictionary, meta)
end
@doc """
*/: ( x -- n ) perform multiplication and divide result by x
"""
def mult_div(tokens, [x | data_stack], return_stack, dictionary, meta) do
Process.next(["*", x, "/" | tokens], data_stack, return_stack, dictionary, meta)
end
@doc """
1+: ( x -- n ) adds 1 to x
"""
def one_plus(tokens, [x | data_stack], return_stack, dictionary, meta) do
Process.next(tokens, [x + 1 | data_stack], return_stack, dictionary, meta)
end
@doc """
1-: ( x -- n ) subtracts 1 to x
"""
def one_minus(tokens, [x | data_stack], return_stack, dictionary, meta) do
Process.next(tokens, [x - 1 | data_stack], return_stack, dictionary, meta)
end
@doc """
@-: ( x -- n ) negates x
"""
def negate(tokens, [x | data_stack], return_stack, dictionary, meta) do
Process.next(tokens, [-x | data_stack], return_stack, dictionary, meta)
end
@doc """
abs: ( x -- n ) absolute of x
"""
def abs(tokens, [x | data_stack], return_stack, dictionary, meta) do
Process.next(tokens, [abs(x) | data_stack], return_stack, dictionary, meta)
end
@doc """
rand: ( -- n ) puts random number on stack
"""
def rand(tokens, data_stack, return_stack, dictionary, meta) do
Process.next(tokens, [:rand.uniform() | data_stack], return_stack, dictionary, meta)
end
@doc """
min: ( x y -- n ) minimum between two values
"""
def min(tokens, [y, x | data_stack], return_stack, dictionary, meta) do
Process.next(tokens, [min(x, y) | data_stack], return_stack, dictionary, meta)
end
@doc """
max: ( x y -- n ) miaximum between two values
"""
def max(tokens, [y, x | data_stack], return_stack, dictionary, meta) do
Process.next(tokens, [max(x, y) | data_stack], return_stack, dictionary, meta)
end
@doc """
sqrt: ( x -- n ) sqrt of x
"""
def sqrt(tokens, [x | data_stack], return_stack, dictionary, meta) do
Process.next(tokens, [:math.sqrt(x) | data_stack], return_stack, dictionary, meta)
end
@doc """
sin: ( x -- n ) sin of x
"""
def sin(tokens, [x | data_stack], return_stack, dictionary, meta) do
Process.next(tokens, [:math.sin(x) | data_stack], return_stack, dictionary, meta)
end
@doc """
cos: ( x -- n ) cos of x
"""
def cos(tokens, [x | data_stack], return_stack, dictionary, meta) do
Process.next(tokens, [:math.cos(x) | data_stack], return_stack, dictionary, meta)
end
@doc """
tan: ( x -- n ) tan of x
"""
def tan(tokens, [x | data_stack], return_stack, dictionary, meta) do
Process.next(tokens, [:math.tan(x) | data_stack], return_stack, dictionary, meta)
end
@doc """
pi: ( -- n ) puts Pi value on top of stack
"""
def pi(tokens, data_stack, return_stack, dictionary, meta) do
Process.next(tokens, [:math.pi() | data_stack], return_stack, dictionary, meta)
end
end
|
lib/forthvm/words/math.ex
| 0.813127 | 0.905322 |
math.ex
|
starcoder
|
defmodule KiteConnect do
@moduledoc """
Elixir module for the Zerodha Kite Connect API
## Installation
Add `kite_connect` to your list of dependencies in `mix.exs`:
```elixir
def deps do
[
{:kite_connect, "~> 0.1"}
]
end
```
## Setup
Your project should be an OTP application. In your `application.ex`
file start `KiteConnect.State` as a worker.
```elixir
# Define workers and child supervisors to be supervised
children = [
# Start your own worker
worker(KiteConnect.State, []),
]
```
A sample iex session:
```
iex(1)> KiteConnect.init(your_api_key, your_api_secret)
:ok
iex(2)> KiteConnect.set_access_token(your_request_token)
:ok
iex(3)> KiteConnect.Quote.ltp("NSE:INFY")
724.3
```
"""
@api_version 3
@module_map %{
"quote.ltp" => "quote/ltp",
"quote.market" => "quote",
"session.token" => "session/token",
"user.margins" => "user/margins",
"instruments.historical" => "instruments/historical",
"orders.regular" => "orders/regular",
"orders.bo" => "orders/bo",
"orders.history" => "orders",
"orders" => "orders",
}
def init(api_key, api_secret) do
KiteConnect.State.set(:api_key, api_key)
KiteConnect.State.set(:api_secret, api_secret)
end
def api_key do
KiteConnect.State.get(:api_key)
end
def api_secret do
KiteConnect.State.get(:api_secret)
end
def access_token do
KiteConnect.State.get(:access_token)
end
def set_access_token(request_token) do
{:ok, at} = gen_access_token(request_token)
KiteConnect.State.set(:access_token, at)
end
def gen_url(module, a1 \\ "", a2 \\ "") do
api_endpoint = "https://api.kite.trade"
case module do
"quote.ltp" ->
case is_list(a1) do
true ->
"#{api_endpoint}/#{@module_map[module]}?i=" <> Enum.join(a1, "&i=")
false ->
"#{api_endpoint}/#{@module_map[module]}?i=#{a1}"
end
"quote.market" ->
case is_list(a1) do
true ->
"#{api_endpoint}/#{@module_map[module]}?i=" <> Enum.join(a1, "&i=")
false ->
"#{api_endpoint}/#{@module_map[module]}?i=#{a1}"
end
"instruments.historical" ->
"#{api_endpoint}/#{@module_map[module]}/#{a1}/#{a2}"
"orders.history" ->
"#{api_endpoint}/#{@module_map[module]}/#{a1}"
_ ->
"#{api_endpoint}/#{@module_map[module]}"
end
end
def gen_headers do
[
"X-Kite-Version": @api_version,
Authorization: "token #{KiteConnect.api_key}:#{KiteConnect.access_token}"
]
end
def gen_headers_post do
[
"Content-Type": "application/x-www-form-urlencoded",
"X-Kite-Version": @api_version,
Authorization: "token #{KiteConnect.api_key}:#{KiteConnect.access_token}"
]
end
defp gen_access_token(request_token) do
checksum =
Base.encode16(
:crypto.hash(
:sha256,
"#{KiteConnect.api_key}#{request_token}#{KiteConnect.api_secret}"
)
)
|> String.downcase()
url = KiteConnect.gen_url("session.token")
headers = ["X-Kite-Version": 3, "Content-Type": "application/x-www-form-urlencoded"]
body =
%{"api_key" => KiteConnect.api_key, "request_token" => request_token, "checksum" => checksum}
|> URI.encode_query()
res_body = KiteConnect.HTTP.post(url, body, headers)
at = res_body["data"]["access_token"]
cond do
at != nil -> {:ok, at}
at == nil -> {:error, nil}
end
end
end
|
lib/kite_connect.ex
| 0.72331 | 0.61832 |
kite_connect.ex
|
starcoder
|
defmodule Pokerap.Ez do
@moduledoc """
Helper functions to make pokemon searching more intuitve
"get what I mean" instead of the almost firehose-esk data from the API
"""
alias Pokerap.Env, as: Env
#cleaner to move this out to its own funtion
defp parse_evo(chain) do
[chain["species"]["name"]|Enum.map(chain["evolves_to"], fn(x) -> parse_evo(x) end)]
end
@doc ~S"""
Returns a tuple containg status, and a "simple" evolution chain in list form
This is a simpilifed combination of the chain `/pokemon-species` into `/evolution-chain`.
`/evolution-chain` only takes in a numerical id of the evolution chain, that you can only get
from `/pokemon-species` (What? You don't know that `Garbodor's` evolution chain ID is 290 ?), so having the ability to just pass in
a string for name is much easier.
## Example
iex> Pokerap.Ez.evolution("garbodor")
{:ok, ["trubbish", ["garbodor"]]}
"""
def evolution(name) do
with {:ok, species} <- Pokerap.pokemon_species(name),
{:ok, evo} <- Pokerap.Url.get_url(species["evolution_chain"]["url"]),
do: {:ok, parse_evo(evo["chain"])}
end
#Parses flavor_texts, filters by language, returns map of "game version" => "text"
defp parse_flavor_text(flavor_texts) do
#this does not fee "Elixir-y"
filter_lang = fn(x) -> x["language"]["name"] == Env.language end
Enum.filter(flavor_texts["flavor_text_entries"], filter_lang)
|> Enum.reduce(%{}, fn(entry, acc) ->
Map.merge(acc, %{entry["version"]["name"] => entry["flavor_text"]})
end)
end
@doc ~S"""
Returns a tuple containing request status, and map of flavor texts.
This data is found inside of the `/pokemon` endpoint (`Pokerap.pokemon/1`)
but is nested somewhat deeply.
Will return in the language set in config under `config :pokerap, language:`. Defaults to "en".
See [Readme](readme.html#env-settings) for more details on this function.
## Example
iex> Pokerap.Ez.flavor_text("bidoof")
{:ok,
%{"alpha-sapphire" => "It constantly gnaws on logs and rocks to whittle\ndown its front teeth. It nests alongside water.",
"black" => "A comparison revealed that\nBidoof’s front teeth grow at\nthe same rate as Rattata’s.",
"black-2" => "A comparison revealed that\nBidoof’s front teeth grow at\nthe same rate as Rattata’s.",
"diamond" => "With nerves of steel, nothing can\nperturb it. It is more agile and\nactive than it appears.",
"heartgold" => "It lives in groups by the water. \nIt chews up boulders and trees\naround its nest with its incisors.",
"omega-ruby" => "With nerves of steel, nothing can perturb it. It is\nmore agile and active than it appears.",
"pearl" => "It constantly gnaws on logs and\nrocks to whittle down its front\nteeth. It nests alongside water.",
"platinum" => "A comparison revealed that\nBIDOOF’s front teeth grow at\nthe same rate as RATTATA’s.",
"soulsilver" => "It lives in groups by the water. \nIt chews up boulders and trees\naround its nest with its incisors.",
"white" => "A comparison revealed that\nBidoof’s front teeth grow at\nthe same rate as Rattata’s.",
"white-2" => "A comparison revealed that\nBidoof’s front teeth grow at\nthe same rate as Rattata’s.",
"x" => "With nerves of steel, nothing can perturb it. It is\nmore agile and active than it appears.",
"y" => "It constantly gnaws on logs and rocks to whittle\ndown its front teeth. It nests alongside water."}}
"""
def flavor_text(name) do
with {:ok, species} <- Pokerap.pokemon_species(name),
do: {:ok, parse_flavor_text(species)}
end
@doc ~S"""
Gets map of pokedex style descriptions.
Returns map of flavor texts. Raises exceptions upon error. `!` version of `Pokerap.Ez.flavor_text/1`
## Example
iex> Pokerap.Ez.flavor_text!("bidoof")
%{"alpha-sapphire" => "It constantly gnaws on logs and rocks to whittle\ndown its front teeth. It nests alongside water.",
"black" => "A comparison revealed that\nBidoof’s front teeth grow at\nthe same rate as Rattata’s.",
"black-2" => "A comparison revealed that\nBidoof’s front teeth grow at\nthe same rate as Rattata’s.",
"diamond" => "With nerves of steel, nothing can\nperturb it. It is more agile and\nactive than it appears.",
"heartgold" => "It lives in groups by the water. \nIt chews up boulders and trees\naround its nest with its incisors.",
"omega-ruby" => "With nerves of steel, nothing can perturb it. It is\nmore agile and active than it appears.",
"pearl" => "It constantly gnaws on logs and\nrocks to whittle down its front\nteeth. It nests alongside water.",
"platinum" => "A comparison revealed that\nBIDOOF’s front teeth grow at\nthe same rate as RATTATA’s.",
"soulsilver" => "It lives in groups by the water. \nIt chews up boulders and trees\naround its nest with its incisors.",
"white" => "A comparison revealed that\nBidoof’s front teeth grow at\nthe same rate as Rattata’s.",
"white-2" => "A comparison revealed that\nBidoof’s front teeth grow at\nthe same rate as Rattata’s.",
"x" => "With nerves of steel, nothing can perturb it. It is\nmore agile and active than it appears.",
"y" => "It constantly gnaws on logs and rocks to whittle\ndown its front teeth. It nests alongside water."}
"""
def flavor_text!(name) do
parse_flavor_text(Pokerap.pokemon_species!(name))
end
@doc ~S"""
Returns a tuple containing request status, and map of images.
This data is found inside of the `/pokemon` endpoint (`Pokerap.pokemon/1`)
but is nested somewhat deeply.
## Example
iex> Pokerap.Ez.images("poliwhirl")
{:ok,
%{"back_default" => "https://raw.githubusercontent.com/PokeAPI/sprites/master/sprites/pokemon/back/61.png",
"back_female" => nil,
"back_shiny" => "https://raw.githubusercontent.com/PokeAPI/sprites/master/sprites/pokemon/back/shiny/61.png",
"back_shiny_female" => nil,
"front_default" => "https://raw.githubusercontent.com/PokeAPI/sprites/master/sprites/pokemon/61.png",
"front_female" => nil,
"front_shiny" => "https://raw.githubusercontent.com/PokeAPI/sprites/master/sprites/pokemon/shiny/61.png",
"front_shiny_female" => nil}}
"""
def images(name) do
with {:ok, pokemon} <- Pokerap.pokemon(name),
do: {:ok, pokemon["sprites"]}
end
@doc ~S"""
Gets map of images.
Returns map of images. Raises exceptions upon error.
`!` version of `Pokerap.Ez.images/1`
## Example
iex> Pokerap.Ez.images!("poliwhirl")
%{"back_default" => "https://raw.githubusercontent.com/PokeAPI/sprites/master/sprites/pokemon/back/61.png",
"back_female" => nil,
"back_shiny" => "https://raw.githubusercontent.com/PokeAPI/sprites/master/sprites/pokemon/back/shiny/61.png",
"back_shiny_female" => nil,
"front_default" => "https://raw.githubusercontent.com/PokeAPI/sprites/master/sprites/pokemon/61.png",
"front_female" => nil,
"front_shiny" => "https://raw.githubusercontent.com/PokeAPI/sprites/master/sprites/pokemon/shiny/61.png",
"front_shiny_female" => nil}
"""
def images!(name) do
Pokerap.pokemon!(name)["sprites"]
end
@doc ~S"""
Returns a tuple containing request status, and list of Pokemon types.
This data is found inside of the `/pokemon` endpoint (`Pokerap.pokemon/1`)
but is nested somewhat deeply.
## Example
iex(1)> Pokerap.Ez.types("rotom")
{:ok, ["ghost", "electric"]}
"""
def types(name) do
with {:ok, pokemon} <- Pokerap.pokemon(name),
types <- Enum.map(pokemon["types"], &(&1["type"]["name"])),
do: {:ok, types}
end
@doc ~S"""
Gets a list of types per pokemon
Returns list. Raises exceptions upon error.
`!` version of `Pokerap.Ez.types/1`
## Example
iex> Pokerap.Ez.types!("rotom")
["ghost", "electric"]
"""
def types!(name) do
Pokerap.pokemon!(name)["types"]
|> Enum.map(&(&1["type"]["name"]))
end
@doc ~S"""
Returns a tuple containing request status, and list of possible moves a Pokemon **can** have/learn.
Returns "simple" list of ALL moves a Pokemon **can** have/learn
## Example
iex> Pokerap.Ez.moves("caterpie")
{:ok, ["tackle", "string-shot", "snore", "bug-bite", "electroweb"]}
Be advised, Some pokemon have very long move sets
"""
def moves(name) do
with {:ok, pokemon} <- Pokerap.pokemon(name),
moves <- Enum.map(pokemon["moves"], &(&1["move"]["name"])),
do: {:ok, moves}
end
@doc ~S"""
Returns a list of possible moves a Pokemon can have/learn.
Returns "simple" list of ALL moves a Pokemon **can** have/learn.
Raises exceptions upon error. `!` version of `Pokerap.Ez.moves/1`
## Example
iex> Pokerap.Ez.moves!("caterpie")
["tackle", "string-shot", "snore", "bug-bite", "electroweb"]
Be advised, Some pokemon have very long move sets
"""
def moves!(name) do
Enum.map(Pokerap.pokemon!(name)["moves"], fn(x)-> x["move"]["name"] end)
end
end
|
lib/Pokerap/Ez.ex
| 0.752831 | 0.562837 |
Ez.ex
|
starcoder
|
if Appsignal.live_view?() do
defmodule Appsignal.Phoenix.LiveView do
alias Appsignal.{ErrorHandler, Stacktrace, Transaction, TransactionRegistry, Utils.MapFilter}
import Appsignal.Utils
require Appsignal.Stacktrace
@transaction Application.get_env(:appsignal, :appsignal_transaction, Transaction)
@moduledoc """
Instrumentation for LiveView actions
## Instrumenting a LiveView action
A LiveView action is instrumented by wrapping its contents in a
`Appsignal.Phoenix.LiveView.live_view_action/4` block.
defmodule AppsignalPhoenixExampleWeb.ClockLive do
use Phoenix.LiveView
def render(assigns) do
AppsignalPhoenixExampleWeb.ClockView.render("index.html", assigns)
end
def mount(_session, socket) do
:timer.send_interval(1000, self(), :tick)
{:ok, assign(socket, state: Time.utc_now())}
end
def handle_info(:tick, socket) do
{:ok, assign(socket, state: Time.utc_now())}
end
end
Given a live view that updates its own state every second, we can add
AppSignal instrumentation by wrapping both the mount/2 and handle_info/2
functions with a `Appsignal.Phoenix.LiveView.live_view_action`/4 call:
defmodule AppsignalPhoenixExampleWeb.ClockLive do
use Phoenix.LiveView
import Appsignal.Phoenix.LiveView, only: [live_view_action: 4]
def render(assigns) do
AppsignalPhoenixExampleWeb.ClockView.render("index.html", assigns)
end
def mount(_session, socket) do
live_view_action(__MODULE__, :mount, socket, fn ->
:timer.send_interval(1000, self(), :tick)
{:ok, assign(socket, state: Time.utc_now())}
end)
end
def handle_info(:tick, socket) do
live_view_action(__MODULE__, :mount, socket, fn ->
{:ok, assign(socket, state: Time.utc_now())}
end)
end
end
Calling one of these functions in your app will now automatically create a
sample that's sent to AppSignal. These are displayed under the `:live_view`
namespace.
For more fine-grained performance instrumentation, use the instrumentation
helper functions in `Appsignal.Instrumentation.Helpers`.
"""
@doc """
Record a live_view action.
"""
@spec live_view_action(atom, String.t(), Phoenix.LiveView.Socket.t(), fun) :: any
def live_view_action(module, name, %Phoenix.LiveView.Socket{} = socket, function) do
live_view_action(module, name, socket, %{}, function)
end
@spec live_view_action(atom, String.t(), Phoenix.LiveView.Socket.t(), map, fun) :: any
def live_view_action(module, name, %Phoenix.LiveView.Socket{} = socket, params, function) do
if Appsignal.Config.active?() do
transaction =
@transaction.generate_id()
|> @transaction.start(:live_view)
|> @transaction.set_action("#{module_name(module)}##{name}")
try do
function.()
catch
kind, reason ->
stacktrace = Stacktrace.get()
ErrorHandler.set_error(transaction, reason, stacktrace)
finish_with_socket(transaction, socket, params)
TransactionRegistry.ignore(self())
:erlang.raise(kind, reason, stacktrace)
else
result ->
finish_with_socket(transaction, socket, params)
result
end
else
function.()
end
end
@spec finish_with_socket(Transaction.t() | nil, Phoenix.LiveView.Socket.t(), map()) ::
:ok | nil
defp finish_with_socket(transaction, socket, params) do
if @transaction.finish(transaction) == :sample do
transaction
|> @transaction.set_sample_data("params", MapFilter.filter_parameters(params))
|> @transaction.set_sample_data("environment", request_environment(socket))
end
@transaction.complete(transaction)
end
@socket_fields ~w(id root_view view endpoint router)a
@spec request_environment(Phoenix.LiveView.Socket.t()) :: map
defp request_environment(socket) do
@socket_fields
|> Enum.into(%{}, fn k -> {k, Map.get(socket, k)} end)
end
end
end
|
lib/appsignal/phoenix/live_view.ex
| 0.698946 | 0.458288 |
live_view.ex
|
starcoder
|
defmodule TeslaMate.Vehicles.Vehicle.Summary do
import TeslaMate.Convert, only: [miles_to_km: 2, mph_to_kmh: 1]
alias TeslaApi.Vehicle.State.{Drive, Charge}
alias TeslaApi.Vehicle
defstruct [
:display_name,
:state,
:since,
:healthy,
:latitude,
:longitude,
:battery_level,
:ideal_battery_range_km,
:est_battery_range_km,
:rated_battery_range_km,
:charge_energy_added,
:speed,
:outside_temp,
:inside_temp,
:locked,
:sentry_mode,
:plugged_in,
:scheduled_charging_start_time,
:charge_limit_soc,
:charger_power,
:windows_open,
:odometer,
:shift_state,
:charge_port_door_open,
:time_to_full_charge,
:charger_phases,
:charger_actual_current,
:charger_voltage
]
def into(nil, %{state: :start, healthy?: healthy?}) do
%__MODULE__{state: :unavailable, healthy: healthy?}
end
def into(vehicle, %{state: state, since: since, healthy?: healthy?}) do
%__MODULE__{
format_vehicle(vehicle)
| state: format_state(state),
since: since,
healthy: healthy?
}
end
defp format_state({:charging, "Complete", _process_id}), do: :charging_complete
defp format_state({:charging, _state, _process_id}), do: :charging
defp format_state({:driving, {:offline, _}, _id}), do: :offline
defp format_state({:driving, _state, _id}), do: :driving
defp format_state({state, _}) when is_atom(state), do: state
defp format_state(state) when is_atom(state), do: state
defp format_vehicle(%Vehicle{} = vehicle) do
%__MODULE__{
# General
display_name: vehicle.display_name,
# Drive State
latitude: get_in_struct(vehicle, [:drive_state, :latitude]),
longitude: get_in_struct(vehicle, [:drive_state, :longitude]),
speed: speed(vehicle),
shift_state: get_in_struct(vehicle, [:drive_state, :shift_state]),
# Charge State
plugged_in: plugged_in(vehicle),
battery_level: charge(vehicle, :battery_level),
charge_energy_added: charge(vehicle, :charge_energy_added),
charge_limit_soc: charge(vehicle, :charge_limit_soc),
charge_port_door_open: charge(vehicle, :charge_port_door_open),
charger_actual_current: charge(vehicle, :charger_actual_current),
charger_phases: charge(vehicle, :charger_phases),
charger_power: charge(vehicle, :charger_power),
charger_voltage: charge(vehicle, :charger_voltage),
est_battery_range_km: charge(vehicle, :est_battery_range) |> miles_to_km(2),
ideal_battery_range_km: charge(vehicle, :ideal_battery_range) |> miles_to_km(2),
rated_battery_range_km: charge(vehicle, :battery_range) |> miles_to_km(2),
time_to_full_charge: charge(vehicle, :time_to_full_charge),
scheduled_charging_start_time:
charge(vehicle, :scheduled_charging_start_time) |> to_datetime(),
# Climate State
outside_temp: get_in_struct(vehicle, [:climate_state, :outside_temp]),
inside_temp: get_in_struct(vehicle, [:climate_state, :inside_temp]),
# Vehicle State
odometer: get_in_struct(vehicle, [:vehicle_state, :odometer]) |> miles_to_km(2),
locked: get_in_struct(vehicle, [:vehicle_state, :locked]),
sentry_mode: get_in_struct(vehicle, [:vehicle_state, :sentry_mode]),
windows_open: window_open?(vehicle)
}
end
defp charge(vehicle, key), do: get_in_struct(vehicle, [:charge_state, key])
defp speed(%Vehicle{drive_state: %Drive{speed: s}}) when not is_nil(s), do: mph_to_kmh(s)
defp speed(_vehicle), do: nil
defp plugged_in(%Vehicle{charge_state: nil}), do: nil
defp plugged_in(%Vehicle{vehicle_state: nil}), do: nil
defp plugged_in(%Vehicle{
charge_state: %Charge{charge_port_latch: "Engaged", charge_port_door_open: true}
}) do
true
end
defp plugged_in(_vehicle), do: false
defp window_open?(vehicle) do
get_in_struct(vehicle, [:vehicle_state, :fd_window]) == 1 or
get_in_struct(vehicle, [:vehicle_state, :fp_window]) == 1 or
get_in_struct(vehicle, [:vehicle_state, :rd_window]) == 1 or
get_in_struct(vehicle, [:vehicle_state, :rp_window]) == 1
end
defp to_datetime(nil), do: nil
defp to_datetime(ts), do: DateTime.from_unix!(ts)
defp get_in_struct(struct, keys) do
Enum.reduce(keys, struct, fn key, acc -> if acc, do: Map.get(acc, key) end)
end
end
|
lib/teslamate/vehicles/vehicle/summary.ex
| 0.689828 | 0.561004 |
summary.ex
|
starcoder
|
defmodule Contentful.Delivery.Assets do
@moduledoc """
Deals with the loading of assets from a given `Contentful.Space`
See https://www.contentful.com/developers/docs/references/content-delivery-api/#/reference/assets.end
## Simple asset calls
A `Contentful.Asset` can be retrieved by its `asset_id`:
import Contentful.Query
alias Contentful.Asset
alias Contentful.Delivery.Assets
{:ok, %Asset{id: "my_asset_id"}} = Assets |> fetch_one("my_asset_id")
or just as a collection:
import Contentful.Query
alias Contentful.Asset
alias Contentful.Delivery.Assets
{:ok, [%Asset{id: "my_asset_id"} | _ ]} = Assets |> fetch_all
## Resolving assets from entries
In the case of inclusion of assets with entries, see the docs for `Contentful.Entries` to see how to resolve assets
from entries.
## Accessing common resource attributes
A `Contentful.Asset` embeds `Contentful.SysData` with extra information about the entry:
import Contentful.Query
alias Contentful.Asset
alias Contentful.Delivery.Assets
{:ok, asset} = Assets |> fetch_one("my_asset_id")
"my_asset_id" = asset.id
"<a timestamp for updated_at>" = asset.sys.updated_at
"<a timestamp for created_at>" = asset.sys.created_at
"<a locale string>" = asset.sys.locale
"""
alias Contentful.{Asset, Queryable, SysData}
@behaviour Queryable
@endpoint "/assets"
@doc """
Returns the endpoint for assets
"""
@impl Queryable
def endpoint do
@endpoint
end
@impl Queryable
def resolve_entity_response(%{
"fields" =>
%{
"file" => %{
"contentType" => content_type,
"details" => details,
"fileName" => file_name,
"url" => url
}
} = fields,
"sys" => %{
"id" => id,
"revision" => rev,
"createdAt" => created_at,
"updatedAt" => updated_at,
"locale" => locale
}
}) do
# title and description optional fields for assets
title = fields |> Map.get("title", nil)
desc = fields |> Map.get("description", nil)
asset = %Asset{
sys: %SysData{
id: id,
revision: rev,
created_at: created_at,
updated_at: updated_at,
locale: locale
},
fields: %Asset.Fields{
title: title,
description: desc,
file: %{
content_type: content_type,
url: URI.parse(url),
file_name: file_name,
details: details
}
}
}
{:ok, asset}
end
@impl Queryable
def resolve_collection_response(%{"items" => items, "total" => total}) do
assets =
items
|> Enum.map(&resolve_entity_response/1)
|> Enum.map(fn {:ok, asset} -> asset end)
{:ok, assets, total: total}
end
end
|
lib/contentful_delivery/assets.ex
| 0.855731 | 0.445409 |
assets.ex
|
starcoder
|
defmodule Explorer.Series do
@moduledoc """
The Series struct and API.
A series can be of the following data types:
* `:float` - 64-bit floating point number
* `:integer` - 64-bit signed integer
* `:boolean` - Boolean
* `:string` - UTF-8 encoded binary
* `:date` - Date type that unwraps to `Elixir.Date`
* `:datetime` - DateTime type that unwraps to `Elixir.NaiveDateTime`
A series must consist of a single data type only. Series are nullable, but may not consist only of
nils.
Many functions only apply to certain dtypes. Where that is the case, you'll find a `Supported
dtypes` section in the function documentation and the function will raise an `ArgumentError` if
a series with an invalid dtype is used.
"""
alias __MODULE__, as: Series
alias Kernel, as: K
import Explorer.Shared, only: [impl!: 1, check_types!: 1, cast_numerics: 2]
import Nx.Defn.Kernel, only: [keyword!: 2]
import Kernel, except: [length: 1, and: 2]
@type data :: Explorer.Backend.Series.t()
@type dtype :: :float | :integer | :boolean | :string | :date | :datetime
@type t :: %Series{data: data, dtype: dtype}
@enforce_keys [:data, :dtype]
defstruct [:data, :dtype]
@behaviour Access
@impl true
def fetch(series, idx) when is_integer(idx), do: {:ok, get(series, idx)}
def fetch(series, indices) when is_list(indices), do: {:ok, take(series, indices)}
def fetch(series, %Range{} = range), do: {:ok, take(series, Enum.to_list(range))}
@impl true
def pop(series, idx) when is_integer(idx) do
mask = 0..(length(series) - 1) |> Enum.map(&(&1 != idx)) |> from_list()
value = get(series, idx)
series = filter(series, mask)
{value, series}
end
def pop(series, indices) when is_list(indices) do
mask = 0..(length(series) - 1) |> Enum.map(&(&1 not in indices)) |> from_list()
value = take(series, indices)
series = filter(series, mask)
{value, series}
end
def pop(series, %Range{} = range) do
mask = 0..(length(series) - 1) |> Enum.map(&(&1 not in range)) |> from_list()
value = take(series, Enum.to_list(range))
series = filter(series, mask)
{value, series}
end
@impl true
def get_and_update(series, idx, fun) when is_integer(idx) do
value = get(series, idx)
{current_value, new_value} = fun.(value)
new_data = series |> to_list() |> List.replace_at(idx, new_value) |> from_list()
{current_value, new_data}
end
# Conversion
@doc """
Creates a new series from a list.
The list must consist of a single data type and nils only; however, the list may not only
consist of nils.
## Options
* `:backend` - The backend to allocate the series on.
## Examples
Explorer will infer the type from the values in the list.
iex> Explorer.Series.from_list([1, 2, 3])
#Explorer.Series<
integer[3]
[1, 2, 3]
>
Series are nullable, so you may also include nils.
iex> Explorer.Series.from_list([1.0, nil, 2.5, 3.1])
#Explorer.Series<
float[4]
[1.0, nil, 2.5, 3.1]
>
A mix of integers and floats will be downcasted to a float.
iex> Explorer.Series.from_list([1, 2.0])
#Explorer.Series<
float[2]
[1.0, 2.0]
>
Mixing non-numeric data types will raise an ArgumentError.
iex> Explorer.Series.from_list([1, "a"])
** (ArgumentError) cannot make a series from mismatched types - the value "a" does not match inferred dtype integer
Trying to create a "nil" series will result in an ArgumentError exception.
iex> Explorer.Series.from_list([nil, nil])
** (ArgumentError) cannot make a series from a list of all nils
"""
@spec from_list(list :: list(), opts :: Keyword.t()) :: Series.t()
def from_list(list, opts \\ []) do
backend = backend_from_options!(opts)
type = check_types!(list)
{list, type} = cast_numerics(list, type)
backend.from_list(list, type)
end
@doc """
Converts a series to a list.
## Examples
iex> series = Explorer.Series.from_list([1, 2, 3])
iex> Explorer.Series.to_list(series)
[1, 2, 3]
"""
@spec to_list(series :: Series.t()) :: list()
def to_list(series), do: apply_impl(series, :to_list)
@doc """
Converts a `t:Nx.Tensor.t/0` to a series.
## Examples
iex> tensor = Nx.tensor([1, 2, 3])
iex> Explorer.Series.from_tensor(tensor)
#Explorer.Series<
integer[3]
[1, 2, 3]
>
iex> tensor = Nx.tensor([1.0, 2.0, 3.0])
iex> Explorer.Series.from_tensor(tensor)
#Explorer.Series<
float[3]
[1.0, 2.0, 3.0]
>
"""
@spec from_tensor(tensor :: Nx.Tensor.t(), opts :: Keyword.t()) :: Series.t()
def from_tensor(tensor, opts \\ []) do
backend = backend_from_options!(opts)
type =
case Nx.type(tensor) do
{t, _} when t in [:s, :u] -> :integer
{t, _} when t in [:f, :bf] -> :float
end
tensor |> Nx.to_flat_list() |> backend.from_list(type)
end
@doc """
Converts a series to a `t:Nx.Tensor.t/0`.
Options are passed directly to `Nx.tensor/2`.
## Supported dtypes
* `:float`
* `:integer`
## Examples
iex> s = Explorer.Series.from_list([1, 2, 3])
iex> Explorer.Series.to_tensor(s)
#Nx.Tensor<
s64[3]
[1, 2, 3]
>
Tensor options can be passed directly to `to_tensor/2`.
iex> s = Explorer.Series.from_list([1, 2, 3])
iex> Explorer.Series.to_tensor(s, names: [:y], type: {:f, 64})
#Nx.Tensor<
f64[y: 3]
[1.0, 2.0, 3.0]
>
"""
@spec to_tensor(series :: Series.t(), tensor_opts :: Keyword.t()) :: Nx.Tensor.t()
def to_tensor(series, tensor_opts \\ []), do: series |> to_list() |> Nx.tensor(tensor_opts)
@doc """
Cast the series to another type.
## Examples
iex> s = Explorer.Series.from_list([1, 2, 3])
iex> Explorer.Series.cast(s, :string)
#Explorer.Series<
string[3]
["1", "2", "3"]
>
`cast/2` will return the series as a no-op if you try to cast to the same dtype.
iex> s = Explorer.Series.from_list([1, 2, 3])
iex> Explorer.Series.cast(s, :integer)
#Explorer.Series<
integer[3]
[1, 2, 3]
>
"""
@spec cast(series :: Series.t(), dtype :: dtype()) :: Series.t()
def cast(%Series{dtype: dtype} = series, dtype), do: series
def cast(series, dtype), do: apply_impl(series, :cast, [dtype])
# Introspection
@doc """
Returns the data type of the series.
A series can be of the following data types:
* `:float` - 64-bit floating point number
* `:integer` - 64-bit signed integer
* `:boolean` - Boolean
* `:string` - UTF-8 encoded binary
* `:date` - Date type that unwraps to `Elixir.Date`
* `:datetime` - DateTime type that unwraps to `Elixir.NaiveDateTime`
## Examples
iex> s = Explorer.Series.from_list([1, 2, 3])
iex> Explorer.Series.dtype(s)
:integer
iex> s = Explorer.Series.from_list(["a", nil, "b", "c"])
iex> Explorer.Series.dtype(s)
:string
"""
@spec dtype(series :: Series.t()) :: dtype()
def dtype(%Series{dtype: dtype}), do: dtype
@doc """
Returns the length of the series.
## Examples
iex> s = Explorer.Series.from_list([~D[1999-12-31], ~D[1989-01-01]])
iex> Explorer.Series.length(s)
2
"""
@spec length(series :: Series.t()) :: integer()
def length(series), do: apply_impl(series, :length)
# Slice and dice
@doc """
Returns the first N elements of the series.
## Examples
iex> s = 1..100 |> Enum.to_list() |> Explorer.Series.from_list()
iex> Explorer.Series.head(s)
#Explorer.Series<
integer[10]
[1, 2, 3, 4, 5, 6, 7, 8, 9, 10]
>
"""
@spec head(series :: Series.t(), n_elements :: integer()) :: Series.t()
def head(series, n_elements \\ 10), do: apply_impl(series, :head, [n_elements])
@doc """
Returns the last N elements of the series.
## Examples
iex> s = 1..100 |> Enum.to_list() |> Explorer.Series.from_list()
iex> Explorer.Series.tail(s)
#Explorer.Series<
integer[10]
[91, 92, 93, 94, 95, 96, 97, 98, 99, 100]
>
"""
@spec tail(series :: Series.t(), n_elements :: integer()) :: Series.t()
def tail(series, n_elements \\ 10), do: apply_impl(series, :tail, [n_elements])
@doc """
Returns the first element of the series.
## Examples
iex> s = 1..100 |> Enum.to_list() |> Explorer.Series.from_list()
iex> Explorer.Series.first(s)
1
"""
@spec first(series :: Series.t()) :: any()
def first(series), do: series[0]
@doc """
Returns the last element of the series.
## Examples
iex> s = 1..100 |> Enum.to_list() |> Explorer.Series.from_list()
iex> Explorer.Series.last(s)
100
"""
@spec last(series :: Series.t()) :: any()
def last(series), do: series[-1]
@doc """
Returns a random sample of the series.
If given an integer as the second argument, it will return N samples. If given a float, it will
return that proportion of the series.
Can sample with or without replacement.
## Examples
iex> s = 1..100 |> Enum.to_list() |> Explorer.Series.from_list()
iex> Explorer.Series.sample(s, 10, seed: 100)
#Explorer.Series<
integer[10]
[72, 33, 15, 4, 16, 49, 23, 96, 45, 47]
>
iex> s = 1..100 |> Enum.to_list() |> Explorer.Series.from_list()
iex> Explorer.Series.sample(s, 0.05, seed: 100)
#Explorer.Series<
integer[5]
[68, 24, 6, 8, 36]
>
"""
@spec sample(series :: Series.t(), n_or_frac :: number(), opts :: Keyword.t()) ::
Series.t()
def sample(series, n_or_frac, opts \\ [])
def sample(series, n, opts) when is_integer(n) do
opts = keyword!(opts, with_replacement?: false, seed: Enum.random(1..1_000_000_000_000))
length = length(series)
case {n > length, opts[:with_replacement?]} do
{true, false} ->
raise ArgumentError,
"in order to sample more elements than are in the series (#{length}), sampling " <>
"`with_replacement?` must be true"
_ ->
:ok
end
apply_impl(series, :sample, [n, opts[:with_replacement?], opts[:seed]])
end
def sample(series, frac, opts) when is_float(frac) do
length = length(series)
n = round(frac * length)
sample(series, n, opts)
end
@doc """
Takes every *n*th value in this series, returned as a new series.
## Examples
iex> s = 1..10 |> Enum.to_list() |> Explorer.Series.from_list()
iex> s |> Explorer.Series.take_every(2)
#Explorer.Series<
integer[5]
[1, 3, 5, 7, 9]
>
If *n* is bigger than the length of the series, the result is a new series with only the first value of the supplied series.
iex> s = 1..10 |> Enum.to_list() |> Explorer.Series.from_list()
iex> s |> Explorer.Series.take_every(20)
#Explorer.Series<
integer[1]
[1]
>
"""
@spec take_every(series :: Series.t(), every_n :: integer()) :: Series.t()
def take_every(series, every_n), do: apply_impl(series, :take_every, [every_n])
@doc """
Filters a series with a mask or callback.
## Examples
iex> s1 = Explorer.Series.from_list([1,2,3])
iex> s2 = Explorer.Series.from_list([true, false, true])
iex> Explorer.Series.filter(s1, s2)
#Explorer.Series<
integer[2]
[1, 3]
>
iex> s1 = Explorer.Series.from_list([1,2,1])
iex> s2 = Explorer.Series.from_list([1])
iex> Explorer.Series.filter(s1, &Explorer.Series.equal(&1, s2))
#Explorer.Series<
integer[2]
[1, 1]
>
"""
@spec filter(series :: Series.t(), mask :: Series.t()) :: Series.t()
def filter(series, %Series{} = mask), do: apply_impl(series, :filter, [mask])
@spec filter(series :: Series.t(), fun :: function()) :: Series.t()
def filter(series, fun) when is_function(fun), do: apply_impl(series, :filter, [fun])
@doc """
Returns a slice of the series, with `length` elements starting at `offset`.
## Examples
iex> s = Explorer.Series.from_list([1, 2, 3, 4, 5])
iex> Explorer.Series.slice(s, 1, 2)
#Explorer.Series<
integer[2]
[2, 3]
>
Negative offsets count from the end of the series.
iex> s = Explorer.Series.from_list([1, 2, 3, 4, 5])
iex> Explorer.Series.slice(s, -3, 2)
#Explorer.Series<
integer[2]
[3, 4]
>
If the length would run past the end of the series, the result may be shorter than the length.
iex> s = Explorer.Series.from_list([1, 2, 3, 4, 5])
iex> Explorer.Series.slice(s, -3, 4)
#Explorer.Series<
integer[3]
[3, 4, 5]
>
"""
@spec slice(series :: Series.t(), offset :: integer(), length :: integer()) :: Series.t()
def slice(series, offset, length), do: apply_impl(series, :slice, [offset, length])
@doc """
Returns the elements at the given indices as a new series.
## Examples
iex> s = Explorer.Series.from_list(["a", "b", "c"])
iex> Explorer.Series.take(s, [0, 2])
#Explorer.Series<
string[2]
["a", "c"]
>
"""
@spec take(series :: Series.t(), indices :: [integer()]) :: Series.t()
def take(series, indices), do: apply_impl(series, :take, [indices])
@doc """
Returns the value of the series at the given index.
## Examples
iex> s = Explorer.Series.from_list(["a", "b", "c"])
iex> Explorer.Series.get(s, 2)
"c"
iex> s = Explorer.Series.from_list(["a", "b", "c"])
iex> Explorer.Series.get(s, 4)
** (ArgumentError) index 4 out of bounds for series of length 3
"""
@spec get(series :: Series.t(), idx :: integer()) :: any()
def get(series, idx) do
s_len = length(series)
if idx > s_len - 1 || idx < -s_len,
do: raise(ArgumentError, "index #{idx} out of bounds for series of length #{s_len}")
apply_impl(series, :get, [idx])
end
@doc """
Concatenate one or more series.
The dtypes must match unless all are numeric, in which case all series will be downcast to float.
## Examples
iex> s1 = Explorer.Series.from_list([1, 2, 3])
iex> s2 = Explorer.Series.from_list([4, 5, 6])
iex> Explorer.Series.concat([s1, s2])
#Explorer.Series<
integer[6]
[1, 2, 3, 4, 5, 6]
>
iex> s1 = Explorer.Series.from_list([1, 2, 3])
iex> s2 = Explorer.Series.from_list([4.0, 5.0, 6.4])
iex> Explorer.Series.concat(s1, s2)
#Explorer.Series<
float[6]
[1.0, 2.0, 3.0, 4.0, 5.0, 6.4]
>
"""
def concat([%Series{} = h | t] = _series) do
Enum.reduce(t, h, &concat_reducer/2)
end
@doc """
Concatenate one or more series.
`concat(s1, s2)` is equivalent to `concat([s1, s2])`.
"""
def concat(%Series{} = s1, %Series{} = s2),
do: concat([s1, s2])
def concat(%Series{} = s1, [%Series{} | _] = series),
do: concat([s1 | series])
defp concat_reducer(%Series{dtype: dtype} = s, %Series{dtype: dtype} = acc),
do: apply_impl(acc, :concat, [s])
defp concat_reducer(%Series{dtype: s_dtype} = s, %Series{dtype: acc_dtype} = acc)
when K.and(s_dtype == :float, acc_dtype == :integer),
do: acc |> cast(:float) |> apply_impl(:concat, [s])
defp concat_reducer(%Series{dtype: s_dtype} = s, %Series{dtype: acc_dtype} = acc)
when K.and(s_dtype == :integer, acc_dtype == :float),
do: apply_impl(acc, :concat, [cast(s, :float)])
defp concat_reducer(%Series{dtype: dtype1}, %Series{dtype: dtype2}),
do: raise(ArgumentError, "dtypes must match, found #{dtype1} and #{dtype2}")
# Aggregation
@doc """
Gets the sum of the series.
## Supported dtypes
* `:integer`
* `:float`
* `:boolean`
## Examples
iex> s = Explorer.Series.from_list([1, 2, nil, 3])
iex> Explorer.Series.sum(s)
6
iex> s = Explorer.Series.from_list([1.0, 2.0, nil, 3.0])
iex> Explorer.Series.sum(s)
6.0
iex> s = Explorer.Series.from_list([true, false, true])
iex> Explorer.Series.sum(s)
2
iex> s = Explorer.Series.from_list([~D[2021-01-01], ~D[1999-12-31]])
iex> Explorer.Series.sum(s)
** (ArgumentError) Explorer.Series.sum/1 not implemented for dtype :date. Valid dtypes are [:integer, :float, :boolean].
"""
@spec sum(series :: Series.t()) :: number()
def sum(%Series{dtype: dtype} = series) when dtype in [:integer, :float, :boolean],
do: apply_impl(series, :sum)
def sum(%Series{dtype: dtype}), do: dtype_error("sum/1", dtype, [:integer, :float, :boolean])
@doc """
Gets the minimum value of the series.
## Supported dtypes
* `:integer`
* `:float`
* `:date`
* `:datetime`
## Examples
iex> s = Explorer.Series.from_list([1, 2, nil, 3])
iex> Explorer.Series.min(s)
1
iex> s = Explorer.Series.from_list([1.0, 2.0, nil, 3.0])
iex> Explorer.Series.min(s)
1.0
iex> s = Explorer.Series.from_list([~D[2021-01-01], ~D[1999-12-31]])
iex> Explorer.Series.min(s)
~D[1999-12-31]
iex> s = Explorer.Series.from_list([~N[2021-01-01 00:00:00], ~N[1999-12-31 00:00:00]])
iex> Explorer.Series.min(s)
~N[1999-12-31 00:00:00.000]
iex> s = Explorer.Series.from_list(["a", "b", "c"])
iex> Explorer.Series.min(s)
** (ArgumentError) Explorer.Series.min/1 not implemented for dtype :string. Valid dtypes are [:integer, :float, :date, :datetime].
"""
@spec min(series :: Series.t()) :: number() | Date.t() | NaiveDateTime.t()
def min(%Series{dtype: dtype} = series) when dtype in [:integer, :float, :date, :datetime],
do: apply_impl(series, :min)
def min(%Series{dtype: dtype}),
do: dtype_error("min/1", dtype, [:integer, :float, :date, :datetime])
@doc """
Gets the maximum value of the series.
## Supported dtypes
* `:integer`
* `:float`
* `:date`
* `:datetime`
## Examples
iex> s = Explorer.Series.from_list([1, 2, nil, 3])
iex> Explorer.Series.max(s)
3
iex> s = Explorer.Series.from_list([1.0, 2.0, nil, 3.0])
iex> Explorer.Series.max(s)
3.0
iex> s = Explorer.Series.from_list([~D[2021-01-01], ~D[1999-12-31]])
iex> Explorer.Series.max(s)
~D[2021-01-01]
iex> s = Explorer.Series.from_list([~N[2021-01-01 00:00:00], ~N[1999-12-31 00:00:00]])
iex> Explorer.Series.max(s)
~N[2021-01-01 00:00:00.000]
iex> s = Explorer.Series.from_list(["a", "b", "c"])
iex> Explorer.Series.max(s)
** (ArgumentError) Explorer.Series.max/1 not implemented for dtype :string. Valid dtypes are [:integer, :float, :date, :datetime].
"""
@spec max(series :: Series.t()) :: number() | Date.t() | NaiveDateTime.t()
def max(%Series{dtype: dtype} = series) when dtype in [:integer, :float, :date, :datetime],
do: apply_impl(series, :max)
def max(%Series{dtype: dtype}),
do: dtype_error("max/1", dtype, [:integer, :float, :date, :datetime])
@doc """
Gets the mean value of the series.
## Supported dtypes
* `:integer`
* `:float`
## Examples
iex> s = Explorer.Series.from_list([1, 2, nil, 3])
iex> Explorer.Series.mean(s)
2.0
iex> s = Explorer.Series.from_list([1.0, 2.0, nil, 3.0])
iex> Explorer.Series.mean(s)
2.0
iex> s = Explorer.Series.from_list([~D[2021-01-01], ~D[1999-12-31]])
iex> Explorer.Series.mean(s)
** (ArgumentError) Explorer.Series.mean/1 not implemented for dtype :date. Valid dtypes are [:integer, :float].
"""
@spec mean(series :: Series.t()) :: float()
def mean(%Series{dtype: dtype} = series) when dtype in [:integer, :float],
do: apply_impl(series, :mean)
def mean(%Series{dtype: dtype}), do: dtype_error("mean/1", dtype, [:integer, :float])
@doc """
Gets the median value of the series.
## Supported dtypes
* `:integer`
* `:float`
## Examples
iex> s = Explorer.Series.from_list([1, 2, nil, 3])
iex> Explorer.Series.median(s)
2.0
iex> s = Explorer.Series.from_list([1.0, 2.0, nil, 3.0])
iex> Explorer.Series.median(s)
2.0
iex> s = Explorer.Series.from_list([~D[2021-01-01], ~D[1999-12-31]])
iex> Explorer.Series.median(s)
** (ArgumentError) Explorer.Series.median/1 not implemented for dtype :date. Valid dtypes are [:integer, :float].
"""
@spec median(series :: Series.t()) :: float()
def median(%Series{dtype: dtype} = series) when dtype in [:integer, :float],
do: apply_impl(series, :median)
def median(%Series{dtype: dtype}), do: dtype_error("median/1", dtype, [:integer, :float])
@doc """
Gets the variance of the series.
## Supported dtypes
* `:integer`
* `:float`
## Examples
iex> s = Explorer.Series.from_list([1, 2, nil, 3])
iex> Explorer.Series.var(s)
1.0
iex> s = Explorer.Series.from_list([1.0, 2.0, nil, 3.0])
iex> Explorer.Series.var(s)
1.0
iex> s = Explorer.Series.from_list([~N[2021-01-01 00:00:00], ~N[1999-12-31 00:00:00]])
iex> Explorer.Series.var(s)
** (ArgumentError) Explorer.Series.var/1 not implemented for dtype :datetime. Valid dtypes are [:integer, :float].
"""
@spec var(series :: Series.t()) :: float()
def var(%Series{dtype: dtype} = series) when dtype in [:integer, :float],
do: apply_impl(series, :var)
def var(%Series{dtype: dtype}), do: dtype_error("var/1", dtype, [:integer, :float])
@doc """
Gets the standard deviation of the series.
## Supported dtypes
* `:integer`
* `:float`
## Examples
iex> s = Explorer.Series.from_list([1, 2, nil, 3])
iex> Explorer.Series.std(s)
1.0
iex> s = Explorer.Series.from_list([1.0, 2.0, nil, 3.0])
iex> Explorer.Series.std(s)
1.0
iex> s = Explorer.Series.from_list(["a", "b", "c"])
iex> Explorer.Series.std(s)
** (ArgumentError) Explorer.Series.std/1 not implemented for dtype :string. Valid dtypes are [:integer, :float].
"""
@spec std(series :: Series.t()) :: float()
def std(%Series{dtype: dtype} = series) when dtype in [:integer, :float],
do: apply_impl(series, :std)
def std(%Series{dtype: dtype}), do: dtype_error("std/1", dtype, [:integer, :float])
@doc """
Gets the given quantile of the series.
## Supported dtypes
* `:integer`
* `:float`
* `:date`
* `:datetime`
## Examples
iex> s = Explorer.Series.from_list([1, 2, nil, 3])
iex> Explorer.Series.quantile(s, 0.2)
1
iex> s = Explorer.Series.from_list([1.0, 2.0, nil, 3.0])
iex> Explorer.Series.quantile(s, 0.5)
2.0
iex> s = Explorer.Series.from_list([~D[2021-01-01], ~D[1999-12-31]])
iex> Explorer.Series.quantile(s, 0.5)
~D[2021-01-01]
iex> s = Explorer.Series.from_list([~N[2021-01-01 00:00:00], ~N[1999-12-31 00:00:00]])
iex> Explorer.Series.quantile(s, 0.5)
~N[2021-01-01 00:00:00.000]
iex> s = Explorer.Series.from_list([true, false, true])
iex> Explorer.Series.quantile(s, 0.5)
** (ArgumentError) Explorer.Series.quantile/2 not implemented for dtype :boolean. Valid dtypes are [:integer, :float, :date, :datetime].
"""
@spec quantile(series :: Series.t(), quantile :: float()) :: any()
def quantile(%Series{dtype: dtype} = series, quantile)
when dtype in [:integer, :float, :date, :datetime],
do: apply_impl(series, :quantile, [quantile])
def quantile(%Series{dtype: dtype}, _),
do: dtype_error("quantile/2", dtype, [:integer, :float, :date, :datetime])
# Cumulative
@doc """
Calculates the cumulative maximum of the series.
Optionally, can fill in reverse.
Does not fill nil values. See `fill_missing/2`.
## Supported dtypes
* `:integer`
* `:float`
* `:date`
* `:datetime`
## Examples
iex> s = [1, 2, 3, 4] |> Explorer.Series.from_list()
iex> Explorer.Series.cum_max(s)
#Explorer.Series<
integer[4]
[1, 2, 3, 4]
>
iex> s = [1, 2, nil, 4] |> Explorer.Series.from_list()
iex> Explorer.Series.cum_max(s)
#Explorer.Series<
integer[4]
[1, 2, nil, 4]
>
"""
@spec cum_max(series :: Series.t(), reverse? :: boolean()) :: Series.t()
def cum_max(series, reverse? \\ false)
def cum_max(%Series{dtype: dtype} = series, reverse?)
when dtype in [:integer, :float, :date, :datetime],
do: apply_impl(series, :cum_max, [reverse?])
def cum_max(%Series{dtype: dtype}, _),
do: dtype_error("cum_max/2", dtype, [:integer, :float, :date, :datetime])
@doc """
Calculates the cumulative minimum of the series.
Optionally, can fill in reverse.
Does not fill nil values. See `fill_missing/2`.
## Supported dtypes
* `:integer`
* `:float`
* `:date`
* `:datetime`
## Examples
iex> s = [1, 2, 3, 4] |> Explorer.Series.from_list()
iex> Explorer.Series.cum_min(s)
#Explorer.Series<
integer[4]
[1, 1, 1, 1]
>
iex> s = [1, 2, nil, 4] |> Explorer.Series.from_list()
iex> Explorer.Series.cum_min(s)
#Explorer.Series<
integer[4]
[1, 1, nil, 1]
>
"""
@spec cum_min(series :: Series.t(), reverse? :: boolean()) :: Series.t()
def cum_min(series, reverse? \\ false)
def cum_min(%Series{dtype: dtype} = series, reverse?)
when dtype in [:integer, :float, :date, :datetime],
do: apply_impl(series, :cum_min, [reverse?])
def cum_min(%Series{dtype: dtype}, _),
do: dtype_error("cum_min/2", dtype, [:integer, :float, :date, :datetime])
@doc """
Calculates the cumulative sum of the series.
Optionally, can fill in reverse.
Does not fill nil values. See `fill_missing/2`.
## Supported dtypes
* `:integer`
* `:float`
* `:boolean`
## Examples
iex> s = [1, 2, 3, 4] |> Explorer.Series.from_list()
iex> Explorer.Series.cum_sum(s)
#Explorer.Series<
integer[4]
[1, 3, 6, 10]
>
iex> s = [1, 2, nil, 4] |> Explorer.Series.from_list()
iex> Explorer.Series.cum_sum(s)
#Explorer.Series<
integer[4]
[1, 3, nil, 7]
>
"""
@spec cum_sum(series :: Series.t(), reverse? :: boolean()) :: Series.t()
def cum_sum(series, reverse? \\ false)
def cum_sum(%Series{dtype: dtype} = series, reverse?)
when dtype in [:integer, :float, :boolean],
do: apply_impl(series, :cum_sum, [reverse?])
def cum_sum(%Series{dtype: dtype}, _),
do: dtype_error("cum_sum/2", dtype, [:integer, :float])
# Local minima/maxima
@doc """
Returns a boolean mask with `true` where the 'peaks' (series max or min, default max) are.
## Supported dtypes
* `:integer`
* `:float`
* `:date`
* `:datetime`
## Examples
iex> s = Explorer.Series.from_list([1, 2, 4, 1, 4])
iex> Explorer.Series.peaks(s)
#Explorer.Series<
boolean[5]
[false, false, true, false, true]
>
"""
@spec peaks(series :: Series.t(), max_or_min :: :max | :min) :: Series.t()
def peaks(series, max_or_min \\ :max)
def peaks(%Series{dtype: dtype} = series, max_or_min)
when dtype in [:integer, :float, :date, :datetime],
do: apply_impl(series, :peaks, [max_or_min])
def peaks(%Series{dtype: dtype}, _),
do: dtype_error("peaks/2", dtype, [:integer, :float, :date, :datetime])
# Arithmetic
@doc """
Adds right to left, element-wise.
When mixing floats and integers, the resulting series will have dtype `:float`.
## Supported dtypes
* `:integer`
* `:float`
## Examples
iex> s1 = Explorer.Series.from_list([1, 2, 3])
iex> s2 = Explorer.Series.from_list([4, 5, 6])
iex> Explorer.Series.add(s1, s2)
#Explorer.Series<
integer[3]
[5, 7, 9]
>
"""
@spec add(left :: Series.t(), right :: Series.t() | number()) :: Series.t()
def add(%Series{dtype: left_dtype} = left, %Series{dtype: right_dtype} = right)
when K.and(left_dtype in [:integer, :float], right_dtype in [:integer, :float]),
do: apply_impl(left, :add, [right])
def add(%Series{dtype: left_dtype}, %Series{dtype: right_dtype}),
do: dtype_mismatch_error("add/2", left_dtype, right_dtype)
def add(%Series{dtype: dtype} = left, right)
when K.and(dtype in [:integer, :float], is_number(right)),
do: apply_impl(left, :add, [right])
def add(%Series{dtype: dtype}, _), do: dtype_error("add/2", dtype, [:integer, :float])
@doc """
Subtracts right from left, element-wise.
When mixing floats and integers, the resulting series will have dtype `:float`.
## Supported dtypes
* `:integer`
* `:float`
## Examples
iex> s1 = Explorer.Series.from_list([1, 2, 3])
iex> s2 = Explorer.Series.from_list([4, 5, 6])
iex> Explorer.Series.subtract(s1, s2)
#Explorer.Series<
integer[3]
[-3, -3, -3]
>
"""
@spec subtract(left :: Series.t(), right :: Series.t() | number()) :: Series.t()
def subtract(%Series{dtype: left_dtype} = left, %Series{dtype: right_dtype} = right)
when K.and(left_dtype in [:integer, :float], right_dtype in [:integer, :float]),
do: apply_impl(left, :subtract, [right])
def subtract(%Series{dtype: left_dtype}, %Series{dtype: right_dtype}),
do: dtype_mismatch_error("subtract/2", left_dtype, right_dtype)
def subtract(%Series{dtype: dtype} = left, right)
when K.and(dtype in [:integer, :float], is_number(right)),
do: apply_impl(left, :subtract, [right])
def subtract(%Series{dtype: dtype}, _), do: dtype_error("subtract/2", dtype, [:integer, :float])
@doc """
Multiplies left and right, element-wise.
When mixing floats and integers, the resulting series will have dtype `:float`.
## Supported dtypes
* `:integer`
* `:float`
## Examples
iex> s1 = 1..10 |> Enum.to_list() |> Explorer.Series.from_list()
iex> s2 = 11..20 |> Enum.to_list() |> Explorer.Series.from_list()
iex> Explorer.Series.multiply(s1, s2)
#Explorer.Series<
integer[10]
[11, 24, 39, 56, 75, 96, 119, 144, 171, 200]
>
iex> s1 = 1..5 |> Enum.to_list() |> Explorer.Series.from_list()
iex> Explorer.Series.multiply(s1, 2)
#Explorer.Series<
integer[5]
[2, 4, 6, 8, 10]
>
"""
@spec multiply(left :: Series.t(), right :: Series.t() | number()) :: Series.t()
def multiply(%Series{dtype: left_dtype} = left, %Series{dtype: right_dtype} = right)
when K.and(left_dtype in [:integer, :float], right_dtype in [:integer, :float]),
do: apply_impl(left, :multiply, [right])
def multiply(%Series{dtype: left_dtype}, %Series{dtype: right_dtype}),
do: dtype_mismatch_error("multiply/2", left_dtype, right_dtype)
def multiply(%Series{dtype: dtype} = left, right)
when K.and(dtype in [:integer, :float], is_number(right)),
do: apply_impl(left, :multiply, [right])
def multiply(%Series{dtype: dtype}, _), do: dtype_error("multiply/2", dtype, [:integer, :float])
@doc """
Divides left by right, element-wise.
When mixing floats and integers, the resulting series will have dtype `:float`.
## Supported dtypes
* `:integer`
* `:float`
## Examples
iex> s1 = [10, 10 ,10] |> Explorer.Series.from_list()
iex> s2 = [2, 2, 2] |> Explorer.Series.from_list()
iex> Explorer.Series.divide(s1, s2)
#Explorer.Series<
integer[3]
[5, 5, 5]
>
iex> s1 = [10, 10 ,10] |> Explorer.Series.from_list()
iex> Explorer.Series.divide(s1, 2)
#Explorer.Series<
integer[3]
[5, 5, 5]
>
"""
@spec divide(left :: Series.t(), right :: Series.t() | number()) :: Series.t()
def divide(%Series{dtype: left_dtype} = left, %Series{dtype: right_dtype} = right)
when K.and(left_dtype in [:integer, :float], right_dtype in [:integer, :float]),
do: apply_impl(left, :divide, [right])
def divide(%Series{dtype: left_dtype}, %Series{dtype: right_dtype}),
do: dtype_mismatch_error("divide/2", left_dtype, right_dtype)
def divide(%Series{dtype: dtype} = left, right)
when K.and(dtype in [:integer, :float], is_number(right)),
do: apply_impl(left, :divide, [right])
def divide(%Series{dtype: dtype}, _), do: dtype_error("divide/2", dtype, [:integer, :float])
@doc """
Raises a numeric series to the power of the exponent.
## Supported dtypes
* `:integer`
* `:float`
## Examples
iex> s = [8, 16, 32] |> Explorer.Series.from_list()
iex> Explorer.Series.pow(s, 2.0)
#Explorer.Series<
float[3]
[64.0, 256.0, 1024.0]
>
iex> s = [2, 4, 6] |> Explorer.Series.from_list()
iex> Explorer.Series.pow(s, 3)
#Explorer.Series<
integer[3]
[8, 64, 216]
>
iex> s = [2, 4, 6] |> Explorer.Series.from_list()
iex> Explorer.Series.pow(s, -3.0)
#Explorer.Series<
float[3]
[0.125, 0.015625, 0.004629629629629629]
>
iex> s = [1.0, 2.0, 3.0] |> Explorer.Series.from_list()
iex> s |> Explorer.Series.pow(3.0)
#Explorer.Series<
float[3]
[1.0, 8.0, 27.0]
>
iex> s = [2.0, 4.0, 6.0] |> Explorer.Series.from_list()
iex> s |> Explorer.Series.pow(2)
#Explorer.Series<
float[3]
[4.0, 16.0, 36.0]
>
"""
@spec pow(series :: Series.t(), exponent :: number()) :: Series.t()
def pow(%Series{dtype: dtype} = series, exponent) when dtype in [:integer, :float],
do: apply_impl(series, :pow, [exponent])
def pow(%Series{dtype: dtype}, _), do: dtype_error("pow/2", dtype, [:integer, :float])
# Comparisons
@doc """
Returns boolean mask of `left == right`, element-wise.
## Examples
iex> s1 = Explorer.Series.from_list([1, 2, 3])
iex> s2 = Explorer.Series.from_list([1, 2, 4])
iex> Explorer.Series.equal(s1, s2)
#Explorer.Series<
boolean[3]
[true, true, false]
>
iex> s = Explorer.Series.from_list([1, 2, 3])
iex> Explorer.Series.equal(s, 1)
#Explorer.Series<
boolean[3]
[true, false, false]
>
iex> s = Explorer.Series.from_list([true, true, false])
iex> Explorer.Series.equal(s, true)
#Explorer.Series<
boolean[3]
[true, true, false]
>
iex> s = Explorer.Series.from_list(["a", "b", "c"])
iex> Explorer.Series.equal(s, "a")
#Explorer.Series<
boolean[3]
[true, false, false]
>
iex> s = Explorer.Series.from_list([~D[2021-01-01], ~D[1999-12-31]])
iex> Explorer.Series.equal(s, ~D[1999-12-31])
#Explorer.Series<
boolean[2]
[false, true]
>
iex> s = Explorer.Series.from_list([~N[2022-01-01 00:00:00], ~N[2022-01-01 23:00:00]])
iex> Explorer.Series.equal(s, ~N[2022-01-01 00:00:00])
#Explorer.Series<
boolean[2]
[true, false]
>
"""
@spec equal(
left :: Series.t(),
right :: Series.t() | number() | Date.t() | NaiveDateTime.t() | boolean() | String.t()
) :: Series.t()
def equal(%Series{dtype: dtype} = left, %Series{dtype: dtype} = right),
do: apply_impl(left, :eq, [right])
def equal(%Series{dtype: dtype} = left, right)
when K.and(dtype in [:integer, :float], is_number(right)),
do: apply_impl(left, :eq, [right])
def equal(%Series{dtype: :date} = left, %Date{} = right),
do: apply_impl(left, :eq, [right])
def equal(%Series{dtype: :datetime} = left, %NaiveDateTime{} = right),
do: apply_impl(left, :eq, [right])
def equal(%Series{dtype: :string} = left, right) when is_binary(right),
do: apply_impl(left, :eq, [right])
def equal(%Series{dtype: :boolean} = left, right) when is_boolean(right),
do: apply_impl(left, :eq, [right])
@doc """
Returns boolean mask of `left != right`, element-wise.
## Examples
iex> s1 = Explorer.Series.from_list([1, 2, 3])
iex> s2 = Explorer.Series.from_list([1, 2, 4])
iex> Explorer.Series.not_equal(s1, s2)
#Explorer.Series<
boolean[3]
[false, false, true]
>
iex> s = Explorer.Series.from_list([1, 2, 3])
iex> Explorer.Series.not_equal(s, 1)
#Explorer.Series<
boolean[3]
[false, true, true]
>
iex> s = Explorer.Series.from_list([true, true, false])
iex> Explorer.Series.not_equal(s, true)
#Explorer.Series<
boolean[3]
[false, false, true]
>
iex> s = Explorer.Series.from_list(["a", "b", "c"])
iex> Explorer.Series.not_equal(s, "a")
#Explorer.Series<
boolean[3]
[false, true, true]
>
iex> s = Explorer.Series.from_list([~D[2021-01-01], ~D[1999-12-31]])
iex> Explorer.Series.not_equal(s, ~D[1999-12-31])
#Explorer.Series<
boolean[2]
[true, false]
>
iex> s = Explorer.Series.from_list([~N[2022-01-01 00:00:00], ~N[2022-01-01 23:00:00]])
iex> Explorer.Series.not_equal(s, ~N[2022-01-01 00:00:00])
#Explorer.Series<
boolean[2]
[false, true]
>
"""
@spec not_equal(
left :: Series.t(),
right :: Series.t() | number() | Date.t() | NaiveDateTime.t() | boolean() | String.t()
) :: Series.t()
def not_equal(%Series{dtype: dtype} = left, %Series{dtype: dtype} = right),
do: apply_impl(left, :neq, [right])
def not_equal(%Series{dtype: dtype} = left, right)
when K.and(dtype in [:integer, :float], is_number(right)),
do: apply_impl(left, :neq, [right])
def not_equal(%Series{dtype: :date} = left, %Date{} = right),
do: apply_impl(left, :neq, [right])
def not_equal(%Series{dtype: :datetime} = left, %NaiveDateTime{} = right),
do: apply_impl(left, :neq, [right])
def not_equal(%Series{dtype: :string} = left, right) when is_binary(right),
do: apply_impl(left, :neq, [right])
def not_equal(%Series{dtype: :boolean} = left, right) when is_boolean(right),
do: apply_impl(left, :neq, [right])
@doc """
Returns boolean mask of `left > right`, element-wise.
## Supported dtypes
* `:integer`
* `:float`
* `:date`
* `:datetime`
## Examples
iex> s1 = Explorer.Series.from_list([1, 2, 3])
iex> s2 = Explorer.Series.from_list([1, 2, 4])
iex> Explorer.Series.greater(s1, s2)
#Explorer.Series<
boolean[3]
[false, false, false]
>
"""
@spec greater(
left :: Series.t(),
right :: Series.t() | number() | Date.t() | NaiveDateTime.t()
) :: Series.t()
def greater(%Series{dtype: dtype} = left, %Series{dtype: dtype} = right)
when dtype in [:integer, :float, :date, :datetime],
do: apply_impl(left, :gt, [right])
def greater(%Series{dtype: left_dtype} = left, %Series{dtype: right_dtype} = right)
when K.and(left_dtype in [:integer, :float], right_dtype in [:integer, :float]),
do: apply_impl(left, :gt, [right])
def greater(%Series{dtype: dtype} = left, right)
when K.and(dtype in [:integer, :float], is_number(right)),
do: apply_impl(left, :gt, [right])
def greater(%Series{dtype: :date} = left, %Date{} = right),
do: apply_impl(left, :gt, [right])
def greater(%Series{dtype: :datetime} = left, %NaiveDateTime{} = right),
do: apply_impl(left, :gt, [right])
def greater(%Series{dtype: dtype}, _),
do: dtype_error("greater/2", dtype, [:integer, :float, :date, :datetime])
@doc """
Returns boolean mask of `left >= right`, element-wise.
## Supported dtypes
* `:integer`
* `:float`
* `:date`
* `:datetime`
## Examples
iex> s1 = Explorer.Series.from_list([1, 2, 3])
iex> s2 = Explorer.Series.from_list([1, 2, 4])
iex> Explorer.Series.greater_equal(s1, s2)
#Explorer.Series<
boolean[3]
[true, true, false]
>
"""
@spec greater_equal(
left :: Series.t(),
right :: Series.t() | number() | Date.t() | NaiveDateTime.t()
) :: Series.t()
def greater_equal(%Series{dtype: dtype} = left, %Series{dtype: dtype} = right)
when dtype in [:integer, :float, :date, :datetime],
do: apply_impl(left, :gt_eq, [right])
def greater_equal(%Series{dtype: left_dtype} = left, %Series{dtype: right_dtype} = right)
when K.and(left_dtype in [:integer, :float], right_dtype in [:integer, :float]),
do: apply_impl(left, :gt_eq, [right])
def greater_equal(%Series{dtype: dtype} = left, right)
when K.and(dtype in [:integer, :float], is_number(right)),
do: apply_impl(left, :gt_eq, [right])
def greater_equal(%Series{dtype: :date} = left, %Date{} = right),
do: apply_impl(left, :gt_eq, [right])
def greater_equal(%Series{dtype: :datetime} = left, %NaiveDateTime{} = right),
do: apply_impl(left, :gt_eq, [right])
def greater_equal(%Series{dtype: dtype}, _),
do: dtype_error("greater_equal/2", dtype, [:integer, :float, :date, :datetime])
@doc """
Returns boolean mask of `left < right`, element-wise.
## Supported dtypes
* `:integer`
* `:float`
* `:date`
* `:datetime`
## Examples
iex> s1 = Explorer.Series.from_list([1, 2, 3])
iex> s2 = Explorer.Series.from_list([1, 2, 4])
iex> Explorer.Series.less(s1, s2)
#Explorer.Series<
boolean[3]
[false, false, true]
>
"""
@spec less(
left :: Series.t(),
right :: Series.t() | number() | Date.t() | NaiveDateTime.t()
) :: Series.t()
def less(%Series{dtype: dtype} = left, %Series{dtype: dtype} = right)
when dtype in [:integer, :float, :date, :datetime],
do: apply_impl(left, :lt, [right])
def less(%Series{dtype: left_dtype} = left, %Series{dtype: right_dtype} = right)
when K.and(left_dtype in [:integer, :float], right_dtype in [:integer, :float]),
do: apply_impl(left, :lt, [right])
def less(%Series{dtype: dtype} = left, right)
when K.and(dtype in [:integer, :float], is_number(right)),
do: apply_impl(left, :lt, [right])
def less(%Series{dtype: :date} = left, %Date{} = right),
do: apply_impl(left, :lt, [right])
def less(%Series{dtype: :datetime} = left, %NaiveDateTime{} = right),
do: apply_impl(left, :lt, [right])
def less(%Series{dtype: dtype}, _),
do: dtype_error("less/2", dtype, [:integer, :float, :date, :datetime])
@doc """
Returns boolean mask of `left <= right`, element-wise.
## Supported dtypes
* `:integer`
* `:float`
* `:date`
* `:datetime`
## Examples
iex> s1 = Explorer.Series.from_list([1, 2, 3])
iex> s2 = Explorer.Series.from_list([1, 2, 4])
iex> Explorer.Series.less_equal(s1, s2)
#Explorer.Series<
boolean[3]
[true, true, true]
>
"""
@spec less_equal(
left :: Series.t(),
right :: Series.t() | number() | Date.t() | NaiveDateTime.t()
) :: Series.t()
def less_equal(%Series{dtype: dtype} = left, %Series{dtype: dtype} = right)
when dtype in [:integer, :float, :date, :datetime],
do: apply_impl(left, :lt_eq, [right])
def less_equal(%Series{dtype: left_dtype} = left, %Series{dtype: right_dtype} = right)
when K.and(left_dtype in [:integer, :float], right_dtype in [:integer, :float]),
do: apply_impl(left, :lt_eq, [right])
def less_equal(%Series{dtype: dtype} = left, right)
when K.and(dtype in [:integer, :float], is_number(right)),
do: apply_impl(left, :lt_eq, [right])
def less_equal(%Series{dtype: :date} = left, %Date{} = right),
do: apply_impl(left, :lt_eq, [right])
def less_equal(%Series{dtype: :datetime} = left, %NaiveDateTime{} = right),
do: apply_impl(left, :lt_eq, [right])
def less_equal(%Series{dtype: dtype}, _),
do: dtype_error("less_equal/2", dtype, [:integer, :float, :date, :datetime])
@doc """
Returns a boolean mask of `left and right`, element-wise
## Examples
iex> s1 = Explorer.Series.from_list([1, 2, 3])
iex> mask1 = Explorer.Series.greater(s1, 1)
iex> mask2 = Explorer.Series.less(s1, 3)
iex> Explorer.Series.and(mask1, mask2)
#Explorer.Series<
boolean[3]
[false, true, false]
>
"""
def (%Series{} = left) and (%Series{} = right),
do: apply_impl(left, :binary_and, [right])
@doc """
Returns a boolean mask of `left or right`, element-wise
## Examples
iex> s1 = Explorer.Series.from_list([1, 2, 3])
iex> mask1 = Explorer.Series.less(s1, 2)
iex> mask2 = Explorer.Series.greater(s1, 2)
iex> Explorer.Series.or(mask1, mask2)
#Explorer.Series<
boolean[3]
[true, false, true]
>
"""
def (%Series{} = left) or (%Series{} = right),
do: apply_impl(left, :binary_or, [right])
@doc """
Checks equality between two entire series.
## Examples
iex> s1 = Explorer.Series.from_list(["a", "b"])
iex> s2 = Explorer.Series.from_list(["a", "b"])
iex> Explorer.Series.all_equal?(s1, s2)
true
iex> s1 = Explorer.Series.from_list(["a", "b"])
iex> s2 = Explorer.Series.from_list(["a", "c"])
iex> Explorer.Series.all_equal?(s1, s2)
false
iex> s1 = Explorer.Series.from_list(["a", "b"])
iex> s2 = Explorer.Series.from_list([1, 2])
iex> Explorer.Series.all_equal?(s1, s2)
false
"""
def all_equal?(%Series{dtype: dtype} = left, %Series{dtype: dtype} = right),
do: apply_impl(left, :all_equal?, [right])
def all_equal?(%Series{dtype: left_dtype}, %Series{dtype: right_dtype})
when left_dtype !=
right_dtype,
do: false
# Sort
@doc """
Sorts the series.
## Examples
iex> s = Explorer.Series.from_list([9, 3, 7, 1])
iex> s |> Explorer.Series.sort()
#Explorer.Series<
integer[4]
[1, 3, 7, 9]
>
"""
def sort(series, reverse? \\ false), do: apply_impl(series, :sort, [reverse?])
@doc """
Returns the indices that would sort the series.
"""
def argsort(series, reverse? \\ false), do: apply_impl(series, :argsort, [reverse?])
@doc """
Reverses the series order.
## Example
iex> s = [1, 2, 3] |> Explorer.Series.from_list()
iex> Explorer.Series.reverse(s)
#Explorer.Series<
integer[3]
[3, 2, 1]
>
"""
def reverse(series), do: apply_impl(series, :reverse)
# Distinct
@doc """
Returns the unique values of the series.
## Examples
iex> s = [1, 1, 2, 2, 3, 3] |> Explorer.Series.from_list()
iex> s |> Explorer.Series.distinct()
#Explorer.Series<
integer[3]
[1, 2, 3]
>
"""
def distinct(series), do: apply_impl(series, :distinct)
@doc """
Returns the unique values of the series, but does not maintain order.
Faster than `distinct/1`.
## Examples
iex> s = [1, 1, 2, 2, 3, 3] |> Explorer.Series.from_list()
iex> s |> Explorer.Series.unordered_distinct()
"""
def unordered_distinct(series), do: apply_impl(series, :unordered_distinct)
@doc """
Returns the number of unique values in the series.
## Examples
iex> s = Explorer.Series.from_list(["a", "b", "a", "b"])
iex> Explorer.Series.n_distinct(s)
2
"""
def n_distinct(series), do: apply_impl(series, :n_distinct)
@doc """
Creates a new dataframe with unique values and the count of each.
## Examples
iex> s = Explorer.Series.from_list(["a", "a", "b", "c", "c", "c"])
iex> Explorer.Series.count(s)
#Explorer.DataFrame<
[rows: 3, columns: 2]
values string ["c", "a", "b"]
counts integer [3, 2, 1]
>
"""
def count(series), do: apply_impl(series, :count)
# Rolling
@doc """
Calculate the rolling sum, given a window size and optional list of weights.
## Options
* `:weights` - An optional list of weights with the same length as the window
that will be multiplied elementwise with the values in the window. Defaults to `nil`.
* `:min_periods` - The number of values in the window that should be non-nil
before computing a result. If `nil`, it will be set equal to window size. Defaults to `1`.
* `:center` - Set the labels at the center of the window. Defaults to `false`.
## Examples
iex> s = 1..10 |> Enum.to_list() |> Explorer.Series.from_list()
iex> Explorer.Series.rolling_sum(s, 4)
#Explorer.Series<
integer[10]
[1, 3, 6, 10, 14, 18, 22, 26, 30, 34]
>
iex> s = 1..10 |> Enum.to_list() |> Explorer.Series.from_list()
iex> Explorer.Series.rolling_sum(s, 2, weights: [1.0, 2.0])
#Explorer.Series<
float[10]
[1.0, 5.0, 8.0, 11.0, 14.0, 17.0, 20.0, 23.0, 26.0, 29.0]
>
"""
def rolling_sum(series, window_size, opts \\ []),
do: apply_impl(series, :rolling_sum, [window_size, rolling_opts_with_defaults(opts)])
@doc """
Calculate the rolling mean, given a window size and optional list of weights.
## Options
* `:weights` - An optional list of weights with the same length as the window
that will be multiplied elementwise with the values in the window. Defaults to `nil`.
* `:min_periods` - The number of values in the window that should be non-nil
before computing a result. If `nil`, it will be set equal to window size. Defaults to `1`.
* `:center` - Set the labels at the center of the window. Defaults to `false`.
## Examples
iex> s = 1..10 |> Enum.to_list() |> Explorer.Series.from_list()
iex> Explorer.Series.rolling_mean(s, 4)
#Explorer.Series<
float[10]
[1.0, 1.5, 2.0, 2.5, 3.5, 4.5, 5.5, 6.5, 7.5, 8.5]
>
iex> s = 1..10 |> Enum.to_list() |> Explorer.Series.from_list()
iex> Explorer.Series.rolling_mean(s, 2, weights: [1.0, 2.0])
#Explorer.Series<
float[10]
[1.0, 2.5, 4.0, 5.5, 7.0, 8.5, 10.0, 11.5, 13.0, 14.5]
>
"""
def rolling_mean(series, window_size, opts \\ []),
do: apply_impl(series, :rolling_mean, [window_size, rolling_opts_with_defaults(opts)])
@doc """
Calculate the rolling min, given a window size and optional list of weights.
## Options
* `:weights` - An optional list of weights with the same length as the window
that will be multiplied elementwise with the values in the window. Defaults to `nil`.
* `:min_periods` - The number of values in the window that should be non-nil
before computing a result. If `nil`, it will be set equal to window size. Defaults to `1`.
* `:center` - Set the labels at the center of the window. Defaults to `false`.
## Examples
iex> s = 1..10 |> Enum.to_list() |> Explorer.Series.from_list()
iex> Explorer.Series.rolling_min(s, 4)
#Explorer.Series<
integer[10]
[1, 1, 1, 1, 2, 3, 4, 5, 6, 7]
>
iex> s = 1..10 |> Enum.to_list() |> Explorer.Series.from_list()
iex> Explorer.Series.rolling_min(s, 2, weights: [1.0, 2.0])
#Explorer.Series<
float[10]
[1.0, 1.0, 2.0, 3.0, 4.0, 5.0, 6.0, 7.0, 8.0, 9.0]
>
"""
def rolling_min(series, window_size, opts \\ []),
do: apply_impl(series, :rolling_min, [window_size, rolling_opts_with_defaults(opts)])
@doc """
Calculate the rolling max, given a window size and optional list of weights.
## Options
* `:weights` - An optional list of weights with the same length as the window
that will be multiplied elementwise with the values in the window. Defaults to `nil`.
* `:min_periods` - The number of values in the window that should be non-nil
before computing a result. If `nil`, it will be set equal to window size. Defaults to `1`.
* `:center` - Set the labels at the center of the window. Defaults to `false`.
## Examples
iex> s = 1..10 |> Enum.to_list() |> Explorer.Series.from_list()
iex> Explorer.Series.rolling_max(s, 4)
#Explorer.Series<
integer[10]
[1, 2, 3, 4, 5, 6, 7, 8, 9, 10]
>
iex> s = 1..10 |> Enum.to_list() |> Explorer.Series.from_list()
iex> Explorer.Series.rolling_max(s, 2, weights: [1.0, 2.0])
#Explorer.Series<
float[10]
[1.0, 4.0, 6.0, 8.0, 10.0, 12.0, 14.0, 16.0, 18.0, 20.0]
>
"""
def rolling_max(series, window_size, opts \\ []),
do: apply_impl(series, :rolling_max, [window_size, rolling_opts_with_defaults(opts)])
defp rolling_opts_with_defaults(opts) do
defaults = [weights: nil, min_periods: 1, center: false]
Keyword.merge(defaults, opts, fn _key, _left, right -> right end)
end
# Missing values
@doc """
Fill missing values with the given strategy. If a scalar value is provided instead of a strategy
atom, `nil` will be replaced with that value. It must be of the same `dtype` as the series.
## Strategies
* `:forward` - replace nil with the previous value
* `:backward` - replace nil with the next value
* `:max` - replace nil with the series maximum
* `:min` - replace nil with the series minimum
* `:mean` - replace nil with the series mean
## Examples
iex> s = Explorer.Series.from_list([1, 2, nil, 4])
iex> Explorer.Series.fill_missing(s, :forward)
#Explorer.Series<
integer[4]
[1, 2, 2, 4]
>
iex> s = Explorer.Series.from_list([1, 2, nil, 4])
iex> Explorer.Series.fill_missing(s, :backward)
#Explorer.Series<
integer[4]
[1, 2, 4, 4]
>
iex> s = Explorer.Series.from_list([1, 2, nil, 4])
iex> Explorer.Series.fill_missing(s, :max)
#Explorer.Series<
integer[4]
[1, 2, 4, 4]
>
iex> s = Explorer.Series.from_list([1, 2, nil, 4])
iex> Explorer.Series.fill_missing(s, :min)
#Explorer.Series<
integer[4]
[1, 2, 1, 4]
>
iex> s = Explorer.Series.from_list([1, 2, nil, 4])
iex> Explorer.Series.fill_missing(s, :mean)
#Explorer.Series<
integer[4]
[1, 2, 2, 4]
>
iex> s = Explorer.Series.from_list([1, 2, nil, 4])
iex> Explorer.Series.fill_missing(s, 3)
#Explorer.Series<
integer[4]
[1, 2, 3, 4]
>
iex> s = Explorer.Series.from_list([1.0, 2.0, nil, 4.0])
iex> Explorer.Series.fill_missing(s, 3.0)
#Explorer.Series<
float[4]
[1.0, 2.0, 3.0, 4.0]
>
iex> s = Explorer.Series.from_list(["a", "b", nil, "d"])
iex> Explorer.Series.fill_missing(s, "c")
#Explorer.Series<
string[4]
["a", "b", "c", "d"]
>
"""
@spec fill_missing(Series.t(), atom()) :: Series.t()
def fill_missing(series, strategy), do: apply_impl(series, :fill_missing, [strategy])
@doc """
Returns a mask of nil values.
## Examples
iex> s = Explorer.Series.from_list([1, 2, nil, 4])
iex> Explorer.Series.nil?(s)
#Explorer.Series<
boolean[4]
[false, false, true, false]
>
"""
@spec nil?(Series.t()) :: Series.t()
def nil?(series), do: apply_impl(series, :nil?)
@doc """
Returns a mask of not nil values.
## Examples
iex> s = Explorer.Series.from_list([1, 2, nil, 4])
iex> Explorer.Series.not_nil?(s)
#Explorer.Series<
boolean[4]
[true, true, false, true]
>
"""
@spec not_nil?(Series.t()) :: Series.t()
def not_nil?(series), do: apply_impl(series, :not_nil?)
# Escape hatch
@doc """
Returns an `Explorer.Series` where each element is the result of invoking `fun` on each
corresponding element of `series`.
This is an expensive operation meant to enable the use of arbitrary Elixir functions against
any backend. The implementation will vary by backend but in most (all?) cases will require
converting to an `Elixir.List`, applying `Enum.map/2`, and then converting back to an
`Explorer.Series`.
## Examples
iex> s = Explorer.Series.from_list(["this ", " is", "great "])
iex> Explorer.Series.transform(s, &String.trim/1)
#Explorer.Series<
string[3]
["this", "is", "great"]
>
iex> s = Explorer.Series.from_list(["this", "is", "great"])
iex> Explorer.Series.transform(s, &String.length/1)
#Explorer.Series<
integer[3]
[4, 2, 5]
>
"""
def transform(series, fun), do: apply_impl(series, :transform, [fun])
# Helpers
defp backend_from_options!(opts) do
backend = Explorer.Shared.backend_from_options!(opts) || Explorer.default_backend()
Module.concat(backend, "Series")
end
defp apply_impl(series, fun, args \\ []) do
impl = impl!(series)
apply(impl, fun, [series | args])
end
defp dtype_error(function, dtype, valid_dtypes),
do:
raise(
ArgumentError,
"Explorer.Series.#{function} not implemented for dtype #{inspect(dtype)}. Valid " <>
"dtypes are #{inspect(valid_dtypes)}."
)
defp dtype_mismatch_error(function, left_dtype, right_dtype),
do:
raise(
ArgumentError,
"cannot invoke Explorer.Series.#{function} with mismatched dtypes: #{left_dtype} and " <>
"#{right_dtype}."
)
end
|
lib/explorer/series.ex
| 0.939116 | 0.777785 |
series.ex
|
starcoder
|
defmodule BitcoinRpc.Transactions do
@moduledoc """
BitcoinRpc.Transactions allows you to stay connected to an bitcoin wallet and be notified of incomming
or outgoing transactions that affect that wallet.
## Examples
Connect to the bitcoin node using the configured BitcoinRpc connection and listen for new transactions
that affect the wallet since the "last_block" and check for new transactions every 60 seconds.
The list of transactions will include all transactions with a number of confirmations lower or equal to
the given confirmation number. This will cause some transactions to be received multiple times until the
last_block is updated and the number of confirmations is above the threashold.
See [ListSinceBlock RPC method](https://bitcoin.org/en/developer-reference#listsinceblock) for more details.
iex> BitcoinRpc.Transactions.start_link([timeout: 60, last_block: "", confirmations: 1, callback: {MyModule, :handle_transaction}])
{:ok, pid}
MyModule looks something like this:
defmodule MyModule do
def handle_transaction(transactions, last_block) do
# do something with the transaction
IO.inspect last_block
IO.inspect transactions
end
end
"""
use GenServer
alias BitcoinRpc
@doc """
Start listening for incomming transactions that can affect the current wallet
iex> BitcoinRpc.Transactions.start_link([timeout: 60, last_block: "", confirmations: 1, callback: {MyModule, :handle_transaction}])
{:ok, pid}
"""
def start_link(opts \\ [timeout: 60, last_block: "", confirmations: 1]) do
opts = [
timeout: Keyword.get(opts, :timeout, 60),
last_block: Keyword.get(opts, :last_block, ""),
confirmations: Keyword.get(opts, :confirmations, 1),
callback: Keyword.get(opts, :callback, Application.get_env(:bitcoin_rpc, :callback, nil))
]
case opts[:callback] do
{_, _} -> GenServer.start_link(__MODULE__, opts, name: __MODULE__)
callback -> {:error, "[BitcoinRPC] Invalid callback method received #{inspect callback}"}
end
end
@doc """
Initiate the scheduler and configure the initial state of the listener
"""
def init(opts) do
ticker(opts[:timeout])
{:ok, [
last_block: opts[:last_block],
confirmations: opts[:confirmations],
callback: opts[:callback],
]}
end
@doc """
Handle ticker event and check for new transactions since the last scanned block
"""
def handle_info({:ticker, timeout}, state) do
ticker(timeout)
confirmations = state[:confirmations]
{module, method} = state[:callback]
resp = BitcoinRpc.list_since_block(state[:last_block], confirmations)
last_block = case resp do
{:ok, %{
"lastblock" => last_block,
"transactions" => transactions,
}} ->
apply(module, method, [transactions, last_block])
last_block
{:error, _} -> state[:last_block]
end
{:noreply, Keyword.put(state, :last_block, last_block)}
end
def handle_info(msg, state), do: super(msg, state)
defp ticker(timeout), do: Process.send_after(self(), {:ticker, timeout}, timeout * 1000)
end
|
lib/bitcoin_rpc/transactions.ex
| 0.84626 | 0.519399 |
transactions.ex
|
starcoder
|
defmodule Day21 do
use Tensor
def read_file(path) do
File.stream!(path)
|> parse_input
end
def parse_input(rows) do
rows
|> Enum.map(&parse_rule/1)
|> Enum.reduce([], fn {from, to}, acc -> acc ++ (variations(from) |> Enum.map(&({&1, to}))) end)
|> Map.new
end
def parse_rule(row) do
[from, to] = Regex.run(~r{([.#/]+) => ([.#/]+)}, row, capture: :all_but_first)
{parse_pattern(from), parse_pattern(to)}
end
def parse_pattern(input) do
pattern = input
|> String.split("/")
|> Enum.map(&(String.split(&1, "", trim: :true)))
size = length(pattern |> hd)
Matrix.new(pattern, size, size)
end
def variations(pattern) do
[
pattern,
pattern |> Matrix.rotate_clockwise,
pattern |> Matrix.rotate_180,
pattern |> Matrix.rotate_counterclockwise,
pattern |> Matrix.flip_horizontal,
pattern |> Matrix.flip_horizontal |> Matrix.rotate_clockwise,
pattern |> Matrix.flip_horizontal |> Matrix.rotate_180,
pattern |> Matrix.flip_horizontal |> Matrix.rotate_counterclockwise
]
|> Enum.uniq
end
def squares(pattern) do
size = Matrix.width(pattern) * Matrix.height(pattern)
square = if rem(size, 2) == 0, do: 2, else: 3
pattern
|> Matrix.to_list
|> Enum.map(&(Enum.chunk_every(&1, square)))
|> Enum.zip
|> Enum.flat_map(&Tuple.to_list/1)
|> Enum.chunk_every(square)
|> Enum.map(&(Matrix.new(&1, square, square)))
# squares by column (1, 4, 7, 2, 5, 8, 3, 6, 9)
end
def recombine(squares) do
square_size = Matrix.width(squares |> hd)
row_size = length(squares) |> :math.sqrt |> trunc
squares
|> Enum.chunk_every(row_size)
|> List.zip
|> Enum.map(&Tuple.to_list/1)
|> List.flatten
|> Enum.map(&Matrix.to_sparse_map/1)
|> Enum.with_index
|> Enum.reduce(%{}, fn {row, index}, acc -> Map.merge(acc, row |> Map.to_list |> Enum.map(fn {[y, x], v} -> {[y + square_size * div(index, row_size), x + rem((square_size * index), (row_size * square_size))], v} end) |> Map.new) end)
|> Matrix.from_sparse_map(square_size * row_size, square_size * row_size)
end
def initial do
".#./..#/###" |> parse_pattern
end
def step(pattern, rules) do
pattern |> squares |> Enum.map(&(rules[&1])) |> recombine
end
def iterate(pattern, rules, times) do
Enum.reduce(1..times, pattern, fn _, acc -> step(acc, rules) end)
end
def pixels(file, times) do
iterate(initial(), read_file(file), times) |> Matrix.to_list |> List.flatten |> Enum.filter(&(&1 == "#")) |> Enum.count
end
end
|
lib/day21.ex
| 0.620392 | 0.610105 |
day21.ex
|
starcoder
|
defmodule Cldr.Calendar.Compiler.Week do
@moduledoc false
defmacro __before_compile__(env) do
config =
Module.get_attribute(env.module, :options)
|> Keyword.put(:calendar, env.module)
|> Cldr.Calendar.Config.extract_options()
|> Cldr.Calendar.Config.validate_config!(:week)
Module.put_attribute(env.module, :calendar_config, config)
quote location: :keep do
@moduledoc false
@behaviour Calendar
@behaviour Cldr.Calendar
@type year :: -9999..9999
@type month :: 1..12
@type week :: 1..53
@type day :: 1..7
import Cldr.Macros
alias Cldr.Calendar.Base.Week
def __config__ do
@calendar_config
end
@doc """
Identifies that the calendar is week based.
"""
@impl true
def calendar_base do
:week
end
@doc """
Defines the CLDR calendar type for this calendar.
This type is used in support of `Cldr.Calendar.localize/3`.
Currently only `:gregorian` is supported.
"""
@impl true
def cldr_calendar_type do
:gregorian
end
@doc """
Determines if the date given is valid according to the this calendar.
"""
@impl true
def valid_date?(year, week, day) do
Week.valid_date?(year, week, day, __config__())
end
@doc """
Calculates the year and era from the given `year`.
The ISO calendar has two eras: the current era which
starts in year 1 and is defined as era "1". And a
second era for those years less than 1 defined as
era "0".
"""
@spec year_of_era(year) :: {year, era :: non_neg_integer}
@impl true
def year_of_era(year) do
Week.year_of_era(year, __config__())
end
@doc """
Calculates the quarter of the year from the given `year`, `month`, and `day`.
It is an integer from 1 to 4.
"""
@spec quarter_of_year(year, Cldr.Calendar.week(), day) :: 1..4
@impl true
def quarter_of_year(year, week, day) do
Week.quarter_of_year(year, week, day, __config__())
end
@doc """
Calculates the month of the year from the given `year`, `month`, and `day`.
It is an integer from 1 to 12.
"""
@spec month_of_year(year, Cldr.Calendar.week(), day) :: month
@impl true
def month_of_year(year, week, day) do
Week.month_of_year(year, week, day, __config__())
end
@doc """
Calculates the week of the year from the given `year`, `month`, and `day`.
It is an integer from 1 to 53.
"""
@spec week_of_year(year, Cldr.Calendar.week(), day) :: {year, Cldr.Calendar.week()}
@impl true
def week_of_year(year, week, day) do
Week.week_of_year(year, week, day, __config__())
end
@doc """
Calculates the ISO week of the year from the given `year`, `month`, and `day`.
It is an integer from 1 to 53.
"""
@spec iso_week_of_year(year, Cldr.Calendar.week(), day) :: {year, Cldr.Calendar.week()}
@impl true
def iso_week_of_year(year, week, day) do
Week.iso_week_of_year(year, week, day, __config__())
end
@doc """
Calculates the week of the month from the given `year`, `month`, and `day`.
It is an integer from 1 to 5.
"""
@spec week_of_month(year, Cldr.Calendar.week(), day) :: {month, Cldr.Calendar.week()}
@impl true
def week_of_month(year, week, day) do
Week.week_of_month(year, week, day, __config__())
end
@doc """
Calculates the day and era from the given `year`, `month`, and `day`.
"""
@spec day_of_era(year, month, day) :: {day :: pos_integer(), era :: 0..1}
@impl true
def day_of_era(year, week, day) do
Week.day_of_era(year, week, day, __config__())
end
@doc """
Calculates the day of the year from the given `year`, `month`, and `day`.
It is an integer from 1 to 366.
"""
@spec day_of_year(year, month, day) :: 1..366
@impl true
def day_of_year(year, week, day) do
Week.day_of_year(year, week, day, __config__())
end
@doc """
Calculates the day of the week from the given `year`, `month`, and `day`.
It is an integer from 1 to 7, where 1 is Monday and 7 is Sunday.
"""
@spec day_of_week(year, month, day) :: 1..7
@impl true
def day_of_week(year, week, day) do
Week.day_of_week(year, week, day, __config__())
end
@doc """
Calculates the number of period in a given `year`. A period
corresponds to a month in month-based calendars and
a week in week-based calendars..
"""
@spec periods_in_year(year) :: Calendar.week()
@impl true
def periods_in_year(year) do
weeks_in_year(year)
end
@doc """
Returns the number weeks in a given year.
"""
@spec weeks_in_year(year) :: Calendar.week()
@impl true
def weeks_in_year(year) do
Week.weeks_in_year(year, __config__())
end
@doc """
Returns the number days in a given year.
"""
@spec days_in_year(year) :: Calendar.day()
@impl true
def days_in_year(year) do
Week.days_in_year(year, __config__())
end
@doc """
Returns how many days there are in the given year-month.
"""
@spec days_in_month(year, month) :: Calendar.day()
@impl true
def days_in_month(year, month) do
Week.days_in_month(year, month, __config__())
end
@doc """
Returns the number days in a a week.
"""
def days_in_week do
Week.days_in_week()
end
@doc """
Returns a `Date.Range.t` representing
a given year.
"""
@impl true
def year(year) do
Week.year(year, __config__())
end
@doc """
Returns a `Date.Range.t` representing
a given quarter of a year.
"""
@impl true
def quarter(year, quarter) do
Week.quarter(year, quarter, __config__())
end
@doc """
Returns a `Date.Range.t` representing
a given month of a year.
"""
@impl true
def month(year, month) do
Week.month(year, month, __config__())
end
@doc """
Returns a `Date.Range.t` representing
a given week of a year.
"""
@impl true
def week(year, week) do
Week.week(year, week, __config__())
end
@doc """
Adds an `increment` number of `date_part`s
to a `year-month-day`.
`date_part` can be `:years`, `:quarters`,
or `:months`
"""
@impl true
def plus(year, month, day, date_part, increment, options \\ [])
def plus(year, month, day, :years, quarters, options) do
Week.plus(year, month, day, __config__(), :years, quarters, options)
end
def plus(year, week, day, :quarters, quarters, options) do
Week.plus(year, week, day, __config__(), :quarters, quarters, options)
end
def plus(year, week, day, :months, months, options) do
Week.plus(year, week, day, __config__(), :months, months, options)
end
@doc """
Adds a :year, :month, :day or time increments
These functions support CalendarInterval
"""
def add(year, month, day, hour, minute, second, microsecond, :year, step) do
{year, month, day} = plus(year, month, day, :years, step)
{year, month, day, hour, minute, second, microsecond}
end
def add(year, month, day, hour, minute, second, microsecond, :quarter, step) do
{year, month, day} = plus(year, month, day, :quarters, step)
{year, month, day, hour, minute, second, microsecond}
end
def add(year, month, day, hour, minute, second, microsecond, :month, step) do
{year, month, day} = plus(year, month, day, :months, step)
{year, month, day, hour, minute, second, microsecond}
end
@doc """
Returns if the given year is a leap year.
"""
@spec leap_year?(year) :: boolean()
@impl true
def leap_year?(year) do
Week.long_year?(year, __config__())
end
@doc """
Returns the number of days since the calendar
epoch for a given `year-month-day`
"""
def date_to_iso_days(year, week, day) do
Week.date_to_iso_days(year, week, day, __config__())
end
@doc """
Returns `{year, month, day}` calculated from
the number of `iso_days`.
"""
def date_from_iso_days(iso_days) do
Week.date_from_iso_days(iso_days, __config__())
end
@doc """
Returns the number of `iso_days` that is
the first day of the given
year for this calendar.
"""
def first_gregorian_day_of_year(year) do
Week.first_gregorian_day_of_year(year, __config__())
end
@doc """
Returns the number of `iso_days` that is
the last day of the given
year for this calendar.
"""
def last_gregorian_day_of_year(year) do
Week.last_gregorian_day_of_year(year, __config__())
end
@doc """
Returns the `t:Calendar.iso_days/0` format of the specified date.
"""
@impl true
@spec naive_datetime_to_iso_days(
Calendar.year(),
Calendar.month(),
Calendar.day(),
Calendar.hour(),
Calendar.minute(),
Calendar.second(),
Calendar.microsecond()
) :: Calendar.iso_days()
def naive_datetime_to_iso_days(year, week, day, hour, minute, second, microsecond) do
Week.naive_datetime_to_iso_days(
year,
week,
day,
hour,
minute,
second,
microsecond,
__config__()
)
end
@doc """
Converts the `t:Calendar.iso_days/0` format to the datetime format specified by this calendar.
"""
@spec naive_datetime_from_iso_days(Calendar.iso_days()) :: {
Calendar.year(),
Calendar.month(),
Calendar.day(),
Calendar.hour(),
Calendar.minute(),
Calendar.second(),
Calendar.microsecond()
}
@impl true
def naive_datetime_from_iso_days({days, day_fraction}) do
Week.naive_datetime_from_iso_days({days, day_fraction}, __config__())
end
@doc false
@impl true
def date_to_string(year, month, day) do
Week.date_to_string(year, month, day)
end
@doc false
@impl true
def datetime_to_string(
year,
month,
day,
hour,
minute,
second,
microsecond,
time_zone,
zone_abbr,
utc_offset,
std_offset
) do
Week.datetime_to_string(
year,
month,
day,
hour,
minute,
second,
microsecond,
time_zone,
zone_abbr,
utc_offset,
std_offset
)
end
@doc false
@impl true
def naive_datetime_to_string(year, month, day, hour, minute, second, microsecond) do
Week.naive_datetime_to_string(year, month, day, hour, minute, second, microsecond)
end
@doc false
calendar_impl()
def parse_date(string) do
Cldr.Calendar.Parse.parse_week_date(string, __MODULE__)
end
@doc false
calendar_impl()
def parse_utc_datetime(string) do
Cldr.Calendar.Parse.parse_utc_datetime(string, __MODULE__)
end
@doc false
calendar_impl()
def parse_naive_datetime(string) do
Cldr.Calendar.Parse.parse_naive_datetime(string, __MODULE__)
end
if Version.match?(System.version(), ">= 1.10.0-dev") do
@doc false
defdelegate parse_time(string), to: Calendar.ISO
end
@doc false
defdelegate day_rollover_relative_to_midnight_utc, to: Calendar.ISO
@doc false
defdelegate months_in_year(year), to: Calendar.ISO
@doc false
defdelegate time_from_day_fraction(day_fraction), to: Calendar.ISO
@doc false
defdelegate time_to_day_fraction(hour, minute, second, microsecond), to: Calendar.ISO
@doc false
defdelegate time_to_string(hour, minute, second, microsecond), to: Calendar.ISO
@doc false
defdelegate valid_time?(hour, minute, second, microsecond), to: Calendar.ISO
end
end
end
|
lib/cldr/calendar/backend/week_compiler.ex
| 0.868771 | 0.434641 |
week_compiler.ex
|
starcoder
|
defmodule Statifier.Schema do
@moduledoc """
A compiled and parsed state chart definition
The Schema Struct:
The fields of a Schema should not be adjusted manually but are publicly
available to read. They are as follows:
* `initial_configuration` - the initial configuration of the machine (root)
* `state_identifiers` - collection of all the known state identifiers
* `valid?` - Whether the parsed definition led to a valid schema
* `transitions` - All the states that transitions move to
"""
alias Statifier.Schema.{Root, State, Transition, Tree}
alias Statifier.Zipper.Tree
@type t :: %__MODULE__{
initial_configuration: Tree.t(),
state_identifiers: MapSet.t(State.state_identifier()),
valid?: boolean(),
transitions: MapSet.t(State.state_identifier())
}
defstruct initial_configuration: nil,
state_identifiers: MapSet.new(),
valid?: false,
transitions: MapSet.new()
@doc """
Creates a new schema
"""
def new(%Root{} = root) do
%__MODULE__{
initial_configuration: Tree.root(root)
}
end
def current_state(%__MODULE__{initial_configuration: initial_configuration}) do
Tree.focus(initial_configuration)
end
@doc """
Moves up to the parent state of current focused stat node.
Also resets children list so that when going back into children the first
child would be visited.
"""
def rparent_state(%__MODULE__{initial_configuration: configuration} = schema) do
configuration =
case Tree.rparent(configuration) do
{:ok, configuration} -> configuration
_ -> configuration
end
%__MODULE__{
schema
| initial_configuration: configuration,
valid?: check_validity(schema)
}
end
@doc """
Adds a new substate to the current focused state node in configuration
"""
def add_substate(%__MODULE__{initial_configuration: configuration} = schema, %State{} = state) do
state_identifiers = MapSet.put(schema.state_identifiers, state.id)
# Add all transitions with targets to our transitions set
transitions =
Enum.reduce(state.transitions, schema.transitions, fn transition, acc ->
if transition.target != nil do
MapSet.put(acc, transition.target)
else
acc
end
end)
configuration =
case Tree.children(configuration) do
{:ok, configuration} ->
Tree.insert_right(configuration, state)
|> Tree.right!()
{:error, :cannot_make_move} ->
Tree.insert_child(configuration, state)
|> Tree.children!()
end
%__MODULE__{
schema
| initial_configuration: configuration,
transitions: transitions,
state_identifiers: state_identifiers,
valid?: check_validity(schema)
}
end
def add_transition(
%__MODULE__{transitions: transitions, initial_configuration: configuration} = schema,
%Transition{target: target} = transition
)
when not is_nil(target) do
state = Tree.focus(configuration)
%__MODULE__{
schema
| transitions: MapSet.put(transitions, target),
initial_configuration:
Tree.replace(configuration, State.add_transition(state, transition)),
valid?: check_validity(schema)
}
end
def add_transition(
%__MODULE__{initial_configuration: configuration} = schema,
%Transition{} = transition_without_target
) do
state = Tree.focus(configuration)
%__MODULE__{
schema
| initial_configuration:
Tree.replace(configuration, State.add_transition(state, transition_without_target)),
valid?: check_validity(schema)
}
end
defp check_validity(%__MODULE__{
transitions: transitions,
initial_configuration: configuration,
state_identifiers: states
}) do
# all the checks we have to do for validity
[
# Are we at the root of our tree
Tree.focus(configuration),
# Do all transitions move to known discovered states
MapSet.subset?(transitions, states)
]
|> Enum.all?()
end
end
|
impl/ex/lib/schema/schema.ex
| 0.884825 | 0.726547 |
schema.ex
|
starcoder
|
defmodule Penelope.ML.Vector do
@moduledoc """
This is a the vector library used by the ML modules. It provides an
interface to an efficient binary representation of 32-bit floating point
values. Math is done via the BLAS interface, wrapped in a NIF module.
"""
alias Penelope.NIF, as: NIF
@type t :: binary
@doc "the empty vector"
@spec empty() :: t
def empty do
<<>>
end
@doc "calculates the number of elements in a vector"
@spec size(vector :: t) :: non_neg_integer
def size(vector) do
div(byte_size(vector), 4)
end
@doc "retrieves a vector element by 0-based index"
@spec get(vector :: t, index :: non_neg_integer) :: float
def get(vector, index) do
vector
|> binary_part(index * 4, 4)
|> binary2float()
end
@doc "creates a vector of length n containing all zeros"
@spec zeros(n :: non_neg_integer) :: t
def zeros(0), do: empty()
def zeros(n) do
from_list(for _ <- 1..n, do: 0)
end
@doc "converts a list of floats to a vector"
@spec from_list(numbers :: [float]) :: t
def from_list(numbers) do
numbers
|> Enum.map(&float2binary/1)
|> Enum.reduce(empty(), &(&2 <> &1))
end
@doc "converts a vector to a list of floats"
@spec to_list(vector :: t) :: [float]
def to_list(<<>>), do: []
def to_list(vector) do
Enum.map(0..(size(vector) - 1), &get(vector, &1))
end
@doc "concatenates two vectors"
@spec concat(vector :: t, vector :: t) :: vector :: t
def concat(x, y) do
x <> y
end
@doc "computes y = ax"
@spec scale(x :: t, a :: float) :: t
def scale(x, a), do: NIF.blas_sscal(a / 1, x)
@doc "computes z = x + y"
@spec add(x :: t, y :: t) :: t
def add(x, y), do: NIF.blas_saxpy(1.0, x, y)
@doc "computes z = ax + y"
@spec scale_add(y :: t, a :: float, x :: t) :: t
def scale_add(y, a, x), do: NIF.blas_saxpy(a / 1, x, y)
defp binary2float(<<value::float()-native()-size(32)>>), do: value
defp binary2float(_value), do: :NaN
defp float2binary(:NaN), do: <<0, 0, 128, 127>>
defp float2binary(x), do: <<x::float()-native()-size(32)>>
end
|
lib/penelope/ml/vector.ex
| 0.82347 | 0.689384 |
vector.ex
|
starcoder
|
defmodule Numbers.Protocols do
@moduledoc """
A set of protocols that can be implemented for your data structure, to add `Numbers`-support to it.
In older versions of `Numbers`, structures were required to follow a single, very strict, behaviour.
But because there are many different kind of structures that benefit from a numeric interface, including
those for which one or multiple of these operations cannot be (unambiguously) defined,
this has been split in these different protocols.
By using the different protocols, each data structure can 'pick and choose' what functionality
is supported. As protocol dispatching is used, the result should be a lot faster than in older
versions of Numbers, which performed behaviour-based runtime dispatch on the struct name.
## Coercion
Numbers does not automatically transform numbers from one type to another if one of the functions is called with two different types.
Frequently you do want to use other data types together with your custom data type. For this, a custom coercion can be specified,
using `Coerce.defcoercion/3` as exposed by the [`Coerce`](https://hex.pm/packages/coerce) library that `Numbers` depends on.
"""
end
defprotocol Numbers.Protocols.Addition do
@moduledoc """
For supporting `Numbers.add/2`.
"""
@doc """
Adds two numbers together.
"""
@spec add(t, t) :: t
def add(a, b)
@doc """
Should return the 'additive identity' of the same type as the argument.
This is the value that can be added to another number,
to let the result remain equal to that number.
(For integers, this is `0`, for floats, `0.0`. Most other numeric types have their own 'zero' variant as well.)
This should be defined so that:
a = some_num
add(a, add_id()) == a
add(add_id(), a) == a
If the numeric structure also implements `Numbers.Protocols.Subtraction`, the following should also be true:
a = some_num
sub(a, add_id()) == a
sub(add_id(), a) == a
(Note that it is fine if the result is not structurally identical, as long as it is logically equal.)
"""
@spec add_id(t) :: t
def add_id(_num)
end
defprotocol Numbers.Protocols.Subtraction do
@moduledoc """
For supporting `Numbers.sub/2`.
"""
@doc """
Subtracts the rhs number from the lhs number.
"""
@spec sub(t, t) :: t
def sub(a, b)
end
defprotocol Numbers.Protocols.Minus do
@moduledoc """
For supporting `Numbers.minus/1`.
"""
@doc """
Unary minus. Should return the negation of the number.
"""
@spec minus(t) :: t
def minus(num)
end
defprotocol Numbers.Protocols.Absolute do
@moduledoc """
For supporting `Numbers.abs/1`.
"""
@doc """
The absolute value of a number.
"""
@spec abs(t) :: t
def abs(num)
end
defprotocol Numbers.Protocols.Multiplication do
@moduledoc """
For supporting `Numbers.mult/2`.
"""
@doc """
Multiplies the two numbers together.
"""
@spec mult(t, t) :: t
def mult(a, b)
@doc """
Should return the 'multiplicative identity' of the same type as the argument.
This is the value that can be added to another number,
to let the result remain equal to that number.
(For integers, this is `1`, for floats, `1.0`. Most other numeric types have their own 'one' variant as well.)
This should be defined so that:
a = some_num
mult(a, mult_id()) == a
mult(mult_id(), a) == a
If the numeric structure also implements `Numbers.Protocols.Division`, the following should also be true:
a = some_num
div(a, mult_id()) == a
div(mult_id(), a) == a
(Note that it is fine if the result is not structurally identical, as long as it is logically equal.)
"""
@spec mult_id(t) :: t
def mult_id(_num)
end
defprotocol Numbers.Protocols.Division do
@moduledoc """
For supporting `Numbers.div/2`.
"""
@doc """
Divides the rhs by the lhs.
To be clear, this division operation is supposed to be precise.
"""
@spec div(t, t) :: t
def div(a, b)
end
defprotocol Numbers.Protocols.Exponentiation do
@moduledoc """
For supporting `Numbers.pow/2`.
"""
@doc """
Power function, x^n.
Unless a dedicated fast power algorithm exists for your data structure,
you could use the 'Exponentiation by Squaring' algorithm, by calling
`Numbers.Helper.pow_by_sq(num, integer_power)` in the implementation,
which is a reasonably fast algorithm that uses `log(n)` multiplication steps.
"""
@spec pow(t, non_neg_integer) :: t
def pow(num, integer_power)
end
defprotocol Numbers.Protocols.ToFloat do
@moduledoc """
For supporting `Numbers.to_float/1`.
"""
@doc """
Convert the custom Numeric struct
to the built-in float datatype.
It is okay to lose precision during this conversion.
"""
@spec to_float(t) :: {:ok, t_as_float :: float} | :error
def to_float(num)
end
|
lib/numbers/protocols.ex
| 0.894775 | 0.789599 |
protocols.ex
|
starcoder
|
defmodule ExCmd do
@moduledoc """
ExCmd is an Elixir library to run and communicate with external programs with back-pressure.
"""
@doc """
Runs the given command with arguments and return an Enumerable to read command output.
First parameter must be a list containing command with arguments. example: `["cat", "file.txt"]`.
### Options
* `input` - Input can be either an `Enumerable` or a function which accepts `Collectable`.
* Enumerable:
```
# list
ExCmd.stream!(~w(base64), input: ["hello", "world"]) |> Enum.to_list()
# stream
ExCmd.stream!(~w(cat), input: File.stream!("log.txt", [], 65536)) |> Enum.to_list()
```
* Collectable:
If the input in a function with arity 1, ExCmd will call that function with a `Collectable` as the argument. The function must *push* input to this collectable. Return value of the function is ignored.
```
ExCmd.stream!(~w(cat), input: fn sink -> Enum.into(1..100, sink, &to_string/1) end)
|> Enum.to_list()
```
By defaults no input is given
* `exit_timeout` - Duration to wait for external program to exit after completion before raising an error. Defaults to `:infinity`
* `chunk_size` - Size of each iodata chunk emitted by Enumerable stream. When set to `nil` the output is unbuffered and chunk size will be variable. Defaults to 65336
All other options are passed to `ExCmd.Process.start_link/2`
### Example
```
ExCmd.stream!(~w(ffmpeg -i pipe:0 -f mp3 pipe:1), input: File.stream!("music_video.mkv", [], 65336))
|> Stream.into(File.stream!("music.mp3"))
|> Stream.run()
```
"""
@type collectable_func() :: (Collectable.t() -> any())
@spec stream!(nonempty_list(String.t()),
input: Enum.t() | collectable_func(),
exit_timeout: timeout(),
chunk_size: pos_integer() | nil
) :: ExCmd.Stream.t()
def stream!(cmd_with_args, opts \\ []) do
ExCmd.Stream.__build__(cmd_with_args, opts)
end
end
|
lib/ex_cmd.ex
| 0.868255 | 0.858955 |
ex_cmd.ex
|
starcoder
|
defmodule Grizzly.ZWave.Commands.NodeAddStatus do
@moduledoc """
Command for NODE_ADD_STATUS
This command is normally the report from adding a node to the Z-Wave network
Params:
* `:seq_number` - the sequence number of the inclusion command
* `:status` - the status of the inclusion
* `:node_id` - the new id of the new Z-Wave node
* `:listening?` - if the node is a listening node or not
* `:basic_device_class` - the Z-Wave basic device class
* `:generic_device_class` - the Z-Wave generic device class
* `:specific_device_class` - the Z-Wave specific device class
* `:command_classes` - a list of the command class the device supports, tagged by their security level
used only if the device was included securely
* `:granted_keys` - the security keys granted during S2 inclusion (optional)
* `:kex_fail_type` - the error that occurred in the S2 bootstrapping (optional)
* `:input_dsk` - the device DSK
"""
@behaviour Grizzly.ZWave.Command
alias Grizzly.ZWave.{Command, CommandClasses, DSK, Security}
alias Grizzly.ZWave.CommandClasses.NetworkManagementInclusion
@type tagged_command_classes() ::
{:non_secure_supported, [CommandClasses.command_class()]}
| {:non_secure_controlled, [CommandClasses.command_class()]}
| {:secure_supported, [CommandClasses.command_class()]}
| {:secure_controlled, [CommandClasses.command_class()]}
@type param() ::
{:node_id, Grizzly.node_id()}
| {:status, NetworkManagementInclusion.node_add_status()}
| {:seq_number, Grizzly.seq_number()}
| {:listening?, boolean()}
| {:basic_device_class, byte()}
| {:generic_device_class, byte()}
| {:specific_device_class, byte()}
| {:command_classes, [tagged_command_classes]}
| {:granted_keys, [Security.key()]}
| {:kex_fail_type, Security.key_exchange_fail_type()}
| {:input_dsk, DSK.t()}
@impl true
@spec new([param]) :: {:ok, Command.t()}
def new(params \\ []) do
# TODO: validate params
command = %Command{
name: :node_add_status,
command_byte: 0x02,
command_class: NetworkManagementInclusion,
params: params,
impl: __MODULE__
}
{:ok, command}
end
@impl true
def encode_params(command) do
node_id = Command.param!(command, :node_id)
status = Command.param!(command, :status)
seq_number = Command.param!(command, :seq_number)
if status == :failed do
<<seq_number, encode_status(status), 0x00, node_id, 0x01>>
else
listening? = Command.param!(command, :listening?)
basic_device_class = Command.param!(command, :basic_device_class)
generic_device_class = Command.param!(command, :generic_device_class)
specific_device_class = Command.param!(command, :specific_device_class)
command_classes = Command.param!(command, :command_classes)
# We add 6 to the length of the command classes to account for the 3 device
# classes 2 Z-Wave protocol bytes and the node info length byte.
# Also add the number of command classes plus 4 bytes for the separators
# See SDS13784 4.4.8.2 for more details
node_info_length = 6 + cc_count(command_classes)
# TODO: fix opt func bit (after the listening bit)
binary =
<<seq_number, encode_status(status), 0x00, node_id, node_info_length,
encode_listening_bit(listening?)::size(1), 0x00::size(7), 0x00, basic_device_class,
generic_device_class,
specific_device_class>> <>
CommandClasses.command_class_list_to_binary(command_classes)
maybe_add_version_2_fields(command, binary)
end
end
@impl true
def decode_params(<<seq_number, status_byte, _reserved, node_id, 0x01>>) do
{:ok,
[
status: NetworkManagementInclusion.parse_node_add_status(status_byte),
seq_number: seq_number,
node_id: node_id,
listening?: false,
basic_device_class: :unknown,
generic_device_class: :unknown,
specific_device_class: :unknown,
command_classes: []
]}
end
def decode_params(<<seq_number, status_byte, _reserved, node_id, node_info_bin::binary>>) do
node_info = NetworkManagementInclusion.parse_node_info(node_info_bin)
params =
%{
status: NetworkManagementInclusion.parse_node_add_status(status_byte),
seq_number: seq_number,
node_id: node_id
}
|> Map.merge(node_info)
|> Enum.into([])
{:ok, params}
end
@spec encode_status(NetworkManagementInclusion.node_add_status()) :: byte()
def encode_status(:done), do: 0x06
def encode_status(:failed), do: 0x07
def encode_status(:security_failed), do: 0x09
@spec encode_listening_bit(boolean()) :: byte()
def encode_listening_bit(true), do: 0x01
def encode_listening_bit(false), do: 0x00
defp maybe_add_version_2_fields(command, command_bin) do
case Command.param(command, :keys_granted) do
nil ->
command_bin
keys_granted ->
kex_failed_type = Command.param!(command, :kex_fail_type)
command_bin <>
<<Security.keys_to_byte(keys_granted), Security.failed_type_to_byte(kex_failed_type)>>
end
end
defp cc_count(tagged_command_classes) do
padding = get_padding(tagged_command_classes)
cc_length = tagged_command_classes |> Keyword.values() |> List.flatten() |> length()
cc_length + padding
end
defp get_padding(tagged_command_classes) do
Enum.reduce(tagged_command_classes, 0, fn
{_, []}, padding ->
padding
{:secure_supported, _}, padding ->
padding + 2
{other, _}, padding when other in [:non_secure_controlled, :secure_controlled] ->
padding + 1
_, padding ->
padding
end)
end
end
|
lib/grizzly/zwave/commands/node_add_status.ex
| 0.732209 | 0.45302 |
node_add_status.ex
|
starcoder
|
defmodule GoogleMaps.MapData do
alias GoogleMaps.MapData.Path
alias GoogleMaps.MapData.Marker
alias GoogleMaps.MapData.Layers
defmodule Point do
defstruct x: 0, y: 0
@type t :: %__MODULE__{
x: integer,
y: integer
}
end
defmodule Padding do
defstruct left: 0, right: 0, top: 0, bottom: 0
@type t :: %__MODULE__{
left: integer,
right: integer,
top: integer,
bottom: integer
}
end
@moduledoc """
Represents the data required to build a a google map.
"""
@default_dynamic_options %{gestureHandling: "cooperative"}
@type lat_lng :: %{latitude: float, longitude: float}
defstruct default_center: %{latitude: 42.360718, longitude: -71.05891},
markers: [],
paths: [],
width: 0,
height: 0,
zoom: nil,
scale: 1,
dynamic_options: @default_dynamic_options,
layers: %Layers{},
auto_init: true,
reset_bounds_on_update: false,
bound_padding: nil
@type t :: %__MODULE__{
default_center: lat_lng,
markers: [Marker.t()],
paths: [Path.t()],
width: integer,
height: integer,
zoom: integer | nil,
scale: 1 | 2,
dynamic_options: %{atom => String.t() | boolean},
layers: Layers.t(),
auto_init: boolean,
reset_bounds_on_update: boolean,
bound_padding: Padding.t() | nil
}
@typep static_query_key :: :markers | :path | :zoom | :scale | :center | :size
@typep query_entry :: {static_query_key, String.t() | nil}
@doc """
Given a MapData stuct, returns a Keyword list representing
a static query.
"""
@spec static_query(t) :: [query_entry]
def static_query(map_data) do
[
center: center_value(map_data),
size: size_value(map_data),
scale: map_data.scale,
zoom: map_data.zoom
]
|> format_static_markers(map_data.markers)
|> format_static_paths(map_data.paths)
end
@spec new({integer, integer}, integer | nil, 1 | 2) :: t
def new({width, height}, zoom \\ nil, scale \\ 1) do
%__MODULE__{
width: width,
height: height,
zoom: zoom,
scale: scale
}
end
@spec auto_init?(t, boolean) :: t
def auto_init?(map_data, auto_init) do
%{map_data | auto_init: auto_init}
end
@doc """
Controls whether the map should change its visible bounds
when a marker is added or removed. Defaults to false.
"""
@spec reset_bounds_on_update?(t, boolean) :: t
def reset_bounds_on_update?(map_data, reset) do
%{map_data | reset_bounds_on_update: reset}
end
@doc """
Controls the padding added to the map.fitBounds() call
when the map's bounds are reset. Defaults to nil.
see https://developers.google.com/maps/documentation/javascript/reference/map
for more info.
"""
@spec bound_padding(t, Padding.t()) :: t
def bound_padding(map_data, %Padding{} = padding) do
%{map_data | bound_padding: padding}
end
@doc """
Update the default center value for the map. Center defaults to
roughly Government Center area.
"""
@spec default_center(t, %{required(:latitude) => float, required(:longitude) => float} | nil) ::
t
def default_center(map_data, center) do
%{map_data | default_center: center}
end
@doc """
Returns a new MapData struct where the given marker is appended
to the current list of markers
"""
@spec add_marker(t, Marker.t()) :: t
def add_marker(map_data, marker) do
%{map_data | markers: [marker | map_data.markers]}
end
@doc """
Returns a new MapData struct where the given markers are appended
to the current list of markers
"""
@spec add_markers(t, [Marker.t()]) :: t
def add_markers(map_data, markers) do
%{map_data | markers: Enum.concat(map_data.markers, markers)}
end
@doc """
Returns a new MapData struct where the given path is appended
to the current list of paths
"""
@spec add_path(t, Path.t()) :: t
def add_path(map_data, path) do
%{map_data | paths: [path | map_data.paths]}
end
@doc """
Returns a new MapData struct where the given paths are appended
to the current list of paths
"""
@spec add_paths(t, [Path.t()]) :: t
def add_paths(map_data, paths) do
%{map_data | paths: Enum.concat(map_data.paths, paths)}
end
@doc """
Enable or disable layers on the map.
"""
@spec add_layers(t, Layers.t()) :: t
def add_layers(%__MODULE__{} = map_data, %Layers{} = layers) do
%{map_data | layers: layers}
end
@doc """
Adds params that will disable the streetViewControl
and MapTypeControl on a dynamic map.
"""
@spec disable_map_type_controls(t) :: t
def disable_map_type_controls(map_data) do
opts_map = %{streetViewControl: false, mapTypeControl: false}
%{map_data | dynamic_options: Map.merge(map_data.dynamic_options, opts_map)}
end
@spec center_value(t) :: String.t() | nil
defp center_value(map_data) do
do_center_value(map_data, Enum.any?(map_data.markers, & &1.visible?))
end
@spec do_center_value(t, boolean) :: String.t() | nil
defp do_center_value(%__MODULE__{markers: [marker | _]}, false) do
Marker.format_static_marker(marker)
end
defp do_center_value(_map_data, _all_hiden), do: nil
@spec size_value(t) :: String.t()
defp size_value(%__MODULE__{width: width, height: height}), do: "#{width}x#{height}"
@doc """
Formats a list of Markers. Markers are grouped by icon.
"""
@spec format_static_markers(Keyword.t(), [Marker.t()]) :: Keyword.t()
def format_static_markers(params, markers) do
markers
|> Enum.filter(& &1.visible?)
|> Enum.group_by(& &1.icon)
|> Enum.map(&do_format_static_markers/1)
|> add_values_for_key(:markers, params)
end
@spec do_format_static_markers({String.t() | nil, [Marker.t()]}) :: String.t()
defp do_format_static_markers({nil, markers}) do
formatted_markers = Enum.map(markers, &Marker.format_static_marker/1)
"anchor:center|#{Enum.join(formatted_markers, "|")}"
end
defp do_format_static_markers({icon, markers}) do
formatted_markers = Enum.map(markers, &Marker.format_static_marker/1)
"anchor:center|icon:#{icon}|#{Enum.join(formatted_markers, "|")}"
end
@spec format_static_paths([query_entry], [Path.t()]) :: [query_entry]
defp format_static_paths(params, paths) do
paths
|> Enum.map(&Path.format_static_path/1)
|> add_values_for_key(:path, params)
end
defp add_values_for_key(values, key, params) do
Enum.reduce(values, params, fn value, key_list -> [{key, value} | key_list] end)
end
end
|
apps/google_maps/lib/google_maps/map_data.ex
| 0.911839 | 0.501282 |
map_data.ex
|
starcoder
|
defmodule XQLite3 do
@moduledoc """
SQLite3 driver for Elixir.
"""
alias XQLite3.{Query, Result, Error}
@doc """
Start the connection process and connect to the database.
## Options
* `:path` - the path to the database. If no database exists at that
path, one will be created there. For in-memory database, provide `:memory:`
* `:timeout` - configure the default statement timeout in ms.
Default is provided by `esqlite3` and is currently 5000 ms.
## Examples
iex> {:ok, pid} = XQLite3.start_link(":memory:")
{:ok #PID<0.71.0>}
"""
def start_link(path, opts \\ []) do
opts = [path: path] ++ opts
DBConnection.start_link(XQLite3.Protocol, opts)
end
@doc """
Returns a supervisor child specification for a DBConnection pool.
"""
def child_spec(opts) do
DBConnection.child_spec(XQLite3.Protocol, opts)
end
@doc """
Runs a query and returns the result as `{:ok, %XQLite3.Result{}}` or
`{:error, %XQLite3.Error{}}` if there was a database error. Parameters
can be set in the query as `$1` embeddded in the query string. Parameters
are given as a list of Elixir values.
"""
def query(conn, statement, params, opts \\ []) do
case DBConnection.prepare_execute(conn, %Query{statement: statement}, params, opts) do
{:ok, _, result} -> {:ok, result}
{:error, _} = error -> error
end
end
@spec prepare_execute(any, iodata, iodata, list, [any]) ::
{:ok, Query.t(), Result.t()} | {:error, Error.t()}
def prepare_execute(conn, name, statement, params, opts \\ []) do
query = %Query{name: name, statement: statement}
DBConnection.prepare_execute(conn, query, params, opts)
end
@doc """
Runs a query and returns the result or raises `XQLite3.Error` if there
was an error. See `query/3`.
"""
def query!(conn, statement, params, opts \\ []) do
case query(conn, statement, params, opts) do
{:ok, result} -> result
{:error, err} -> raise err
end
end
@spec execute(any, Query.t(), list, [any]) ::
{:ok, Query.t(), Result.t()} | {:error, Error.t()}
def execute(conn, query, params, opts \\ []) do
DBConnection.execute(conn, query, params, opts)
end
end
|
lib/xqlite3.ex
| 0.830697 | 0.405302 |
xqlite3.ex
|
starcoder
|
# <NAME>
# https://github.com/boone
# https://twitter.com/boonedocks
# Trying to learn Elixir by modeling an Enigma machine, inspired by the Ruby
# code written by @albert_still in:
# http://red-badger.com/blog/2015/02/23/understanding-the-enigma-machine-with-30-lines-of-ruby-star-of-the-2014-film-the-imitation-game
# Encrypt/decrypt a message with:
# Enigma.process_string("SECRETMESSAGE")
# To decrypt an encrypted message, supply the encrypted message and the
# matching plugboard, rotor, and reflector configurations:
# Enigma.process_string("LDINKZRVIDGPO", 'YXSDPFLHVQKGOUMEJRCTNIZBAW',
# 'FKDRHSXGVYNBLZIWMEJQOACUTP', 'XTQFWNBCKYVSZODMJIHPGERAUL',
# 'MZEVUBYCLKHOSIWQNADGFTRPXJ', 'OQFGUCDPZKJVXWAHBTYRELNMSI')
defmodule Enigma do
def rotor do
# Return a randomized rotor: a random list of chars from A-Z.
Enum.take_random(?A..?Z, 26)
end
def reflector do
# Get a random A-Z character list and break it into pairs.
# For each pair, the first letter will map to the second and vice versa.
# Create a character list similar to the rotors which represents this
# reflected pair relationship.
random_pairs = Enum.chunk(Enum.take_random(?A..?Z, 26), 2)
# Start with a blank list with 26 empty slots, which we need to fill with
# the pairs.
reflector = List.duplicate(nil, 26)
# Fill in the blank list with the pairs.
reflector_iterate(random_pairs, reflector)
end
def plugboard do
# The plugboard is like a reflector, but only 10 letters are swapped.
# The remaining letters map to themselves.
random_pairs = Enum.chunk(Enum.take_random(?A..?Z, 26), 2)
# Keep 10 pairs, throw away 6
random_pairs = Enum.take(random_pairs, 10)
# Start with an A-Z list.
plugboard = Enum.to_list(?A..?Z)
# Overwrite list with the pairs, leaving 6 letters unchanged.
reflector_iterate(random_pairs, plugboard)
end
def process_string(str, plugboard \\ plugboard(), rotor1 \\ rotor(),
rotor2 \\ rotor(), rotor3 \\ rotor(), reflector \\ reflector()) do
# We accept any string as input, but we really want a charlist of only
# A-Z characters, no spacing or punctuation.
str = str
|> String.upcase
|> to_charlist
|> Enum.reject(fn(x) -> not(x in ?A..?Z) end)
# Output the configuration of the Enigma machine.
IO.puts "Plugboard: #{plugboard}"
IO.puts "Rotor 1: #{rotor1}"
IO.puts "Rotor 2: #{rotor2}"
IO.puts "Rotor 3: #{rotor3}"
IO.puts "Reflector: #{reflector}"
# Process the message!
result = iterate(str, plugboard, rotor1, rotor2, rotor3, reflector, 0, [])
IO.puts "#{str} was translated to #{result}"
to_string(result)
end
defp iterate([head | tail], plugboard, rotor1, rotor2, rotor3, reflector, count, newlist) do
# Spin Rotor 1
rotor1 = tick_rotor(rotor1)
# Spin Rotor 2 if Rotor 1 has gone all the way around.
rotor2 = case rem(count, 25) do
0 -> tick_rotor(rotor2)
_ -> rotor2
end
# Spin Rotor 3 if Rotor 2 has gone all the way around.
rotor3 = case rem(count, 25 * 25) do
0 -> tick_rotor(rotor3)
_ -> rotor3
end
# Send the character through the plugboard.
head = list_value(plugboard, head)
# Send the character through each rotor.
head = list_value(rotor1, head)
head = list_value(rotor2, head)
head = list_value(rotor3, head)
# Send the character through the reflector.
head = list_value(reflector, head)
# Send the character back through the rotors in reverse.
head = inverted_list_value(rotor3, head)
head = inverted_list_value(rotor2, head)
head = inverted_list_value(rotor1, head)
# Send the character back through the plugboard in reverse.
head = inverted_list_value(plugboard, head)
# Append the character to our message.
newlist = List.insert_at(newlist, -1, head)
# Track the iteration count.
count = count + 1
# Recurse with the remaining message.
iterate(tail, plugboard, rotor1, rotor2, rotor3, reflector, count, newlist)
end
defp iterate([], _, _, _, _, _, _, newlist) do
# Recursion is complete, return the final character list.
newlist
end
# Character translations are used in both the rotors and the reflector.
# Here we store them as character lists, where A-Z map to the respective
# position in the character list. Hence we need functions that will find the
# translation for 'A' from the list, and vice versa.
# take the char and find the corresponding translated char in the list
defp list_value(list, char) do
Enum.at(list, char - 65)
end
# take the translated char and find the corresponding original char
defp inverted_list_value(list, char) do
(Enum.find_index list, fn(x) -> x == char end) + 65
end
defp reflector_iterate([head | tail], reflector) do
# head will be a character list with two elements.
# Add the first/last relationship to the reflector.
reflector = List.replace_at(reflector, List.first(head) - 65, List.last(head))
# Add the last/first "reflected" relationship to the reflector.
reflector = List.replace_at(reflector, List.last(head) - 65, List.first(head))
# Recurse until complete.
reflector_iterate(tail, reflector)
end
defp reflector_iterate([], reflector) do
# Recursion is complete, return the final reflector.
reflector
end
defp tick_rotor(rotor) do
# Spin the rotor to the next position.
# ABCDEFGHIJKLMNOPQRSTUVWXYZ shifts to BCDEFGHIJKLMNOPQRSTUVWXYZA
List.insert_at(List.delete_at(rotor, 0), 25, List.first(rotor))
end
end
|
lib/enigma.ex
| 0.685739 | 0.485478 |
enigma.ex
|
starcoder
|
defmodule Vaultag do
@moduledoc """
Vault agent.
A wrapper around `libvault` library.
## Configuration
* `vault` - `libvault` options;
* `ets_table_options` - options for ETS table;
* `token_renewal_time_shift` - a time in seconds;
* `:vault` - `libvault` configuration. See the options for `Vault.new/1`;
* `:cache_cleanup_interval` - an interval in seconds after which the cache has to be cleaned up
from the outdated entries. Defaults to `3600`;
* `:token_renew` - a boolean which indicates whether to use the token renewal functionality.
Defaults to `true`;
* `:token_renewal_time_shift` - Defaults to `60` seconds;
* `:lease_renewal_time_shift` - Defaults to `60` seconds;
"""
use GenServer
alias Vaultag.{Logger, Cache}
import Vaultag.Config
def start_link(opts) do
GenServer.start_link(__MODULE__, :ok, Keyword.put_new(opts, :name, __MODULE__))
end
def read(path, opts \\ []) do
maybe_call({:read, path, opts})
end
def list(path, opts \\ []) do
maybe_call({:list, path, opts})
end
def write(path, value, opts \\ []) do
maybe_call({:write, path, value, opts})
end
def delete(path, opts \\ []) do
maybe_call({:delete, path, opts})
end
def request(method, path, opts \\ []) do
maybe_call({:request, method, path, opts})
end
def get_vault do
maybe_call(:get_vault)
end
def set_vault(vault) do
maybe_call({:set_vault, vault})
end
@impl true
def init(:ok) do
if is_nil(config(:vault)) do
Logger.info("not configured")
:ignore
else
Process.flag(:trap_exit, true)
:timer.send_interval(config(:cache_cleanup_interval, 3600) * 1000, self(), :cleanup_cache)
send(self(), {:auth, 1})
{:ok, %{table: Cache.init(), vault: Vault.new([])}}
end
end
@impl true
def handle_call({:read, path, opts}, _, state) do
# we always gonna put full responses into the cache
key = Cache.key_for_request(path, Keyword.drop(opts, [:full_response]))
response =
with {:cache, nil} <- {:cache, Cache.get(state.table, key)},
{:ok, data} <- Vault.read(state.vault, path, Keyword.put(opts, :full_response, true)) do
Cache.put(state.table, key, data)
maybe_schedule_lease_renewal(data)
{:ok, data}
else
{:cache, data} -> {:ok, data}
resp -> resp
end
reply =
case {response, Keyword.get(opts, :full_response, false)} do
{{:ok, data}, false} -> {:ok, Map.fetch!(data, "data")}
{{:ok, data}, true} -> {:ok, data}
_ -> response
end
{:reply, reply, state}
end
@impl true
def handle_call({:list, path, opts}, _, state) do
{:reply, Vault.list(state.vault, path, opts), state}
end
@impl true
def handle_call({:write, path, value, opts}, _, state) do
{:reply, Vault.write(state.vault, path, value, opts), state}
end
@impl true
def handle_call({:delete, path, opts}, _, state) do
{:reply, Vault.delete(state.vault, path, opts), state}
end
@impl true
def handle_call({:request, method, path, opts}, _, state) do
{:reply, Vault.request(state.vault, method, path, opts), state}
end
@impl true
def handle_call(:get_vault, _, state) do
{:reply, state.vault, state}
end
@impl true
def handle_call({:set_vault, vault}, _, state) do
{:reply, vault, %{state | vault: vault}}
end
@impl true
def handle_info({:auth, attempt}, state) do
case config(:vault, []) |> Vault.new() |> Vault.auth() do
{:ok, vault} ->
Logger.info("authenticated")
maybe_schedule_token_renewal(vault)
Cache.reset(state.table)
{:noreply, %{state | vault: vault}}
{:error, reason} ->
Logger.error("authentication failed: #{inspect(reason)}, retrying in #{attempt}s")
Process.send_after(self(), {:auth, attempt + 1}, attempt * 1000)
{:noreply, state}
end
end
@impl true
def handle_info({:renew_token, attempt}, state) do
case Vault.request(state.vault, :post, "/auth/token/renew-self") do
{:ok, %{"auth" => %{"lease_duration" => lease_duration}, "warnings" => warnings}} ->
Logger.info("token renewed")
unless is_nil(warnings), do: Logger.warn("token renewal: #{inspect(warnings)}")
vault = put_token_expires_at(state.vault, lease_duration)
maybe_schedule_token_renewal(vault)
{:noreply, %{state | vault: vault}}
{:ok, %{"errors" => errors}} ->
Logger.warn("token renewal failed: #{inspect(errors)}")
{:noreply, state}
request_error ->
Logger.error("token renewal failed: #{inspect(request_error)}, retrying in #{attempt}s")
Process.send_after(self(), {:renew_token, attempt + 1}, attempt * 1000)
{:noreply, state}
end
end
@impl true
def handle_info({:renew_lease, lease_id, attempt}, state) do
case Vault.request(state.vault, :put, "/sys/leases/renew", body: %{lease_id: lease_id}) do
{:ok, %{"lease_id" => ^lease_id} = data} ->
Cache.update(state.table, data)
maybe_schedule_lease_renewal(data)
Logger.info("lease ID #{inspect(lease_id)} renewed")
{:ok, %{"errors" => errors}} ->
Logger.warn("lease ID #{inspect(lease_id)} failed to renew: #{inspect(errors)}")
request_error ->
Logger.error(
"lease ID #{inspect(lease_id)} failed to renew: " <>
"#{inspect(request_error)}, retrying in #{attempt}s"
)
Process.send_after(self(), {:renew_lease, lease_id, attempt + 1}, attempt * 1000)
end
{:noreply, state}
end
@impl true
def handle_info(:cleanup_cache, state) do
count = Cache.cleanup(state.table)
Logger.debug("cache cleanup: #{count} entries removed")
{:noreply, state}
end
@impl true
def terminate(_reason, %{vault: vault}) do
unless is_nil(vault), do: Vault.request(vault, :post, "/auth/token/revoke-self")
:ok
end
defp maybe_call(message) do
case GenServer.whereis(__MODULE__) do
nil -> {:error, :disabled}
pid -> GenServer.call(pid, message)
end
end
defp maybe_schedule_token_renewal(vault) do
if config(:token_renewal, true) do
ttl = NaiveDateTime.diff(vault.token_expires_at, NaiveDateTime.utc_now())
shift = config(:token_renewal_time_shift, 60)
delay = ttl - shift
# FIXME: the token with TTL less than 2 x :token_renewal_time_shift cannot be renewed
if delay > shift do
Logger.debug("token renewal scheduled in #{delay}s")
Process.send_after(self(), {:renew_token, 1}, delay * 1000)
else
Logger.debug("re-authentication scheduled in #{ttl}s")
Process.send_after(self(), {:auth, 1}, ttl * 1000)
end
else
Logger.debug("token renewal disabled")
end
end
defp maybe_schedule_lease_renewal(%{
"renewable" => true,
"lease_id" => lease_id,
"lease_duration" => lease_duration,
"warnings" => warnings
}) do
shift = config(:lease_renewal_time_shift, 60)
delay = lease_duration - shift
# FIXME: the lease with the duration less than 2 x :lease_renewal_time_shift cannot be renewed
if delay > shift do
Logger.debug("lease ID #{inspect(lease_id)} renewal scheduled in #{delay}s")
unless is_nil(warnings),
do: Logger.warn("lease ID #{inspect(lease_id)} renewal: #{inspect(warnings)}")
Process.send_after(self(), {:renew_lease, lease_id, 1}, delay * 1000)
end
end
defp maybe_schedule_lease_renewal(_), do: :ok
defp put_token_expires_at(vault, ttl) do
# https://github.com/matthewoden/libvault/blob/360eb7b2a19fda665c4e05a0aead1f52d3be80fd/lib/vault.ex#L368
%{vault | token_expires_at: NaiveDateTime.utc_now() |> NaiveDateTime.add(ttl, :second)}
end
end
|
lib/vaultag.ex
| 0.780035 | 0.47993 |
vaultag.ex
|
starcoder
|
defmodule BMP280.BME680Sensor do
@moduledoc false
alias BMP280.{BME680Calibration, BME680Comm, Calc, Comm, Measurement}
@behaviour BMP280.Sensor
@type raw_samples() :: %{
raw_pressure: non_neg_integer(),
raw_temperature: non_neg_integer(),
raw_humidity: non_neg_integer(),
raw_gas_resistance: non_neg_integer(),
raw_gas_range: non_neg_integer()
}
@type heater_duration_ms() :: 1..4032
@type heater_temperature_c() :: 200..400
@heater_temperature_c 300
@heater_duration_ms 100
@ambient_temperature_c 25
@impl true
def init(%{transport: transport} = initial_state) do
with :ok <- Comm.reset(transport),
{:ok, cal_binary} <- BME680Comm.read_calibration(transport),
calibration <- BME680Calibration.from_binary(cal_binary),
:ok <- BME680Comm.set_oversampling(transport),
:ok <- BME680Comm.set_filter(transport),
:ok <- BME680Comm.enable_gas_sensor(transport),
:ok <-
BME680Comm.set_gas_heater_temperature(
transport,
heater_resistance_code(calibration, @heater_temperature_c, @ambient_temperature_c)
),
:ok <-
BME680Comm.set_gas_heater_duration(
transport,
heater_duration_code(@heater_duration_ms)
),
:ok <- BME680Comm.set_gas_heater_profile(transport, 0),
do: %{initial_state | calibration: calibration}
end
@impl true
def read(%{transport: transport} = state) do
case BME680Comm.read_raw_samples(transport) do
{:ok, raw_samples} -> {:ok, measurement_from_raw_samples(raw_samples, state)}
error -> error
end
end
@spec measurement_from_raw_samples(<<_::80>>, BMP280.Sensor.t()) :: BMP280.Measurement.t()
def measurement_from_raw_samples(raw_samples, state) do
<<raw_pressure::20, _::4, raw_temperature::20, _::4, raw_humidity::16, _::16>> = raw_samples
<<_::64, raw_gas_resistance::10, _::2, raw_gas_range::4>> = raw_samples
%{calibration: calibration, sea_level_pa: sea_level_pa} = state
temperature_c = BME680Calibration.raw_to_temperature(calibration, raw_temperature)
pressure_pa = BME680Calibration.raw_to_pressure(calibration, temperature_c, raw_pressure)
humidity_rh = BME680Calibration.raw_to_humidity(calibration, temperature_c, raw_humidity)
gas_resistance_ohms =
BME680Calibration.raw_to_gas_resistance(
calibration,
raw_gas_resistance,
raw_gas_range
)
# Derived calculations
altitude_m = Calc.pressure_to_altitude(pressure_pa, sea_level_pa)
dew_point_c = Calc.dew_point(humidity_rh, temperature_c)
%Measurement{
temperature_c: temperature_c,
pressure_pa: pressure_pa,
altitude_m: altitude_m,
humidity_rh: humidity_rh,
dew_point_c: dew_point_c,
gas_resistance_ohms: gas_resistance_ohms,
timestamp_ms: System.monotonic_time(:millisecond)
}
end
@doc """
Convert the heater temperature into a register code.
## Examples
iex> cal = %{
...> par_gh1: -30,
...> par_gh2: -5969,
...> par_gh3: 18,
...> res_heat_val: 50,
...> res_heat_range: 1,
...> range_switching_error: 1
...> }
iex> BME680Sensor.heater_resistance_code(cal, 300, 28)
112
"""
@spec heater_resistance_code(BME680Calibration.t(), heater_temperature_c(), integer()) ::
integer()
def heater_resistance_code(cal, heater_temp_c, amb_temp_c) do
%{
par_gh1: par_gh1,
par_gh2: par_gh2,
par_gh3: par_gh3,
res_heat_range: res_heat_range,
res_heat_val: res_heat_val
} = cal
var1 = par_gh1 / 16.0 + 49.0
var2 = par_gh2 / 32_768.0 * 0.0005 + 0.00235
var3 = par_gh3 / 1024.0
var4 = var1 * (1.0 + var2 * heater_temp_c)
var5 = var4 + var3 * amb_temp_c
round(
3.4 *
(var5 * (4.0 / (4.0 + res_heat_range)) *
(1.0 /
(1.0 +
res_heat_val * 0.002)) - 25)
)
end
@doc """
Convert the heater duration milliseconds into a register code. Heating durations between 1 ms and
4032 ms can be configured. In practice, approximately 20–30 ms are necessary for the heater to
reach the intended target temperature.
## Examples
iex> BME680Sensor.heater_duration_code(63)
63
iex> BME680Sensor.heater_duration_code(64)
80
iex> BME680Sensor.heater_duration_code(100)
89
iex> BME680Sensor.heater_duration_code(4032)
255
iex> BME680Sensor.heater_duration_code(4033)
** (FunctionClauseError) no function clause matching in BMP280.BME680Sensor.heater_duration_code/2
"""
@spec heater_duration_code(heater_duration_ms(), non_neg_integer()) :: non_neg_integer()
def heater_duration_code(duration, factor \\ 0)
def heater_duration_code(duration, factor) when duration in 64..4032 do
duration |> div(4) |> heater_duration_code(factor + 1)
end
def heater_duration_code(duration, factor) when duration in 1..63 do
duration + factor * 64
end
end
|
lib/bmp280/sensor/bme680_sensor.ex
| 0.824427 | 0.547464 |
bme680_sensor.ex
|
starcoder
|
defmodule RlStudy.DP.PolicyIterationPlanner do
alias RlStudy.DP.PolicyIterationPlanner
alias RlStudy.DP.Planner
alias RlStudy.MDP.Environment
require Logger
@type t :: %RlStudy.DP.PolicyIterationPlanner{
env: Environment.t(),
log: [] | [binary()],
policy: [any()]
}
defstruct Planner.planner_data() ++ [policy: nil]
@spec initialize(RlStudy.DP.PolicyIterationPlanner.t()) :: RlStudy.DP.PolicyIterationPlanner.t()
def initialize(planner) do
planner_initializing = Planner.initialize(planner)
Logger.info("planner_initializing: #{inspect(planner_initializing, pretty: true)}")
policy =
Enum.map(Environment.states(planner_initializing.env), fn s ->
action_map =
Environment.actions()
|> Enum.map(fn a ->
{a, 1 / length(Environment.actions())}
end)
|> Map.new()
{s, action_map}
end)
|> Map.new()
Logger.info("policy: #{inspect(policy, pretty: true)}")
%PolicyIterationPlanner{
env: planner_initializing.env,
log: planner_initializing.log,
policy: policy
}
end
@spec estimate_by_policy(
%{:env => RlStudy.MDP.Environment.t(), optional(any) => any},
float(),
float()
) :: float()
def estimate_by_policy(planner, gamma, threshold) do
v =
Environment.states(planner.env)
|> Enum.map(fn s ->
{s, 0}
end)
|> Map.new()
{:ok, v_updated} = calc_v(planner, gamma, threshold, v, 0)
v_updated
end
def calc_v(planner, gamma, threshold, v, delta) do
%{v: v_updated, delta: delta_updated} =
Enum.reduce(v, %{v: v, delta: delta}, fn {v_state, _v_reward}, acc ->
max_reward = max_reward(planner, gamma, v, v_state)
delta_updating = Enum.max([acc.delta, Kernel.abs(max_reward - acc.v[v_state])])
v_updating = Map.update(acc.v, v_state, max_reward, fn _value -> max_reward end)
%{v: v_updating, delta: delta_updating}
end)
if delta_updated >= threshold do
calc_v(planner, gamma, threshold, v_updated, 0)
else
{:ok, v_updated}
end
end
@spec max_reward(
atom | %{:policy => nil | maybe_improper_list | map, optional(any) => any},
float(),
map(),
binary()
) :: float()
def max_reward(planner, gamma, v, state) do
planner.policy[state]
|> Enum.map(fn {action, action_prob} ->
transitions = Planner.transitions_at(planner, state, action)
Enum.reduce(transitions, 0, fn %{prob: prob, next_state: state, reward: reward}, r ->
r + action_prob * prob * (reward + gamma * v[state])
end)
end)
|> Enum.max()
end
defimpl RlStudy.DP.Planner.Plan, for: RlStudy.DP.PolicyIterationPlanner do
@spec plan(RlStudy.DP.PolicyIterationPlanner.t(), float(), float()) :: float()
def plan(planner, gamma \\ 0.9, threshold \\ 0.0001) do
init_planner = Planner.initialize(planner)
Logger.info("planner: #{inspect(init_planner, pretty: true)}")
states = Environment.states(init_planner.env)
actions = Environment.actions()
{:ok, updated_planner, updated_v} = calc(planner, gamma, threshold, states)
Planner.dict_to_grid(updated_planner, updated_v)
end
def take_max_action(action_value_dict) do
Enum.max_by(action_value_dict, fn {_k, v} -> v end)
end
@spec calc(
%{:env => RlStudy.MDP.Environment.t(), :log => list, optional(any) => any},
float,
float,
any
) :: {:ok, %{:env => map, :log => [...], optional(any) => any}, float}
def calc(planner, gamma, threshold, states) do
v = PolicyIterationPlanner.estimate_by_policy(planner, gamma, threshold)
planner_updated = %{planner | log: planner.log ++ [Planner.dict_to_grid(planner, v)]}
%{policy: updated_policy, update_stable: update_stable} =
Enum.reduce(states, %{policy: planner.policy, update_stable: true}, fn {state, action_map},
acc ->
policy_action = take_max_action(planner.policy[state])
best_action =
Enum.reduce(Environment.actions(), %{}, fn a, acc ->
r =
Enum.reduce(planner.transitions_at(state, a), 0, fn {prob, next_state, reward},
acc ->
acc + prob * (reward + gamma * v[next_state])
end)
Map.put(acc, a, r)
end)
|> take_max_action()
update_stable = policy_action == best_action
updated_policy =
Enum.reduce(planner.policy[state], planner.policy, fn a, acc ->
updated_policy_s =
Map.put(planner.policy[state], a, if(a == best_action, do: 1, else: 0))
Map.put(acc, state, updated_policy_s)
end)
%{acc | policy: updated_policy, update_stable: update_stable}
nil
end)
planner_updated = %{planner_updated | policy: updated_policy}
if update_stable do
{:ok, planner_updated, v}
else
calc(planner_updated, gamma, threshold, updated_policy)
end
end
end
end
|
lib/dp/policy_iteration_planner.ex
| 0.778607 | 0.665963 |
policy_iteration_planner.ex
|
starcoder
|
defmodule Chisel.Renderer do
@moduledoc """
The renderer is capable of draw the text on any target using for that
a function that receives the x, y coordinates of the pixel to be painted.
"""
alias Chisel.Font
alias Chisel.Font.Glyph
@draw_default_opts [
size_x: 1,
size_y: 1
]
@type acc :: any()
@typedoc """
Use `size_x` and `size_y` options to scale up the font.
"""
@type draw_options :: list({:size_x, integer()} | {:size_y, integer})
@typedoc """
The function used to paint the canvas.
Chisel will use this function to draw the text.
"""
@type pixel_fun :: (x :: integer(), y :: integer() -> term())
@typedoc """
The function used to paint the canvas.
Chisel will use this function to draw the text.
"""
@type reduce_pixel_fun :: (acc :: acc(), x :: integer(), y :: integer() -> acc())
@doc """
Draws an string
The coordinates (`tlx`, `tly`) are for the top left corner.
"""
@spec draw_text(
text :: String.t(),
tlx :: integer(),
tly :: integer(),
font :: Font.t(),
put_pixel :: pixel_fun,
opts :: draw_options()
) ::
{x :: integer(), y :: integer()}
def draw_text(text, tlx, tly, %Font{} = font, put_pixel, opts \\ []) when is_binary(text) do
reduce_pixel = fn x, y, _ -> put_pixel.(x, y) end
{_acc, dx, dy} = reduce_draw_text(text, tlx, tly, font, nil, reduce_pixel, opts)
{dx, dy}
end
@doc """
Draws a character using the codepoint
The coordinates (`tlx`, `tly`) are for the top left corner.
"""
@spec draw_char(
codepoint :: integer(),
clx :: integer(),
cly :: integer(),
font :: Font.t(),
put_pixel :: pixel_fun,
opts :: draw_options()
) ::
{x :: integer(), y :: integer()}
def draw_char(codepoint, clx, cly, %Font{} = font, put_pixel, opts \\ [])
when is_integer(codepoint) do
reduce_pixel = fn x, y, _ -> put_pixel.(x, y) end
{_acc, dx, dy} = reduce_draw_char(codepoint, clx, cly, font, nil, reduce_pixel, opts)
{dx, dy}
end
@doc """
Draws an string calling a reducer function
The coordinates (`tlx`, `tly`) are for the top left corner.
"""
@spec reduce_draw_text(
text :: String.t(),
tlx :: integer(),
tly :: integer(),
font :: Font.t(),
acc :: acc(),
reduce_pixel :: reduce_pixel_fun,
opts :: draw_options()
) ::
{acc :: acc(), x :: integer(), y :: integer()}
def reduce_draw_text(text, tlx, tly, %Font{} = font, acc, reduce_pixel, opts \\ [])
when is_binary(text) do
opts = Keyword.merge(@draw_default_opts, opts)
text
|> to_charlist()
|> Enum.reduce({acc, tlx, tly}, fn
char, {acc1, x, y} ->
case char do
# Ignore carraige return
13 ->
{acc1, x, y}
# New line
10 ->
%{size: {_, font_h}} = font
{acc1, tlx, y + font_h * opts[:size_y]}
_ ->
reduce_draw_char(char, x, y, font, acc1, reduce_pixel, opts)
end
end)
end
@doc """
Draws a character using the codepoint calling a reducer function.
The coordinates (`tlx`, `tly`) are for the top left corner.
"""
@spec reduce_draw_char(
codepoint :: integer(),
clx :: integer(),
cly :: integer(),
font :: Font.t(),
acc :: acc(),
reduce_pixel :: reduce_pixel_fun,
opts :: draw_options()
) ::
{acc :: acc(), x :: integer(), y :: integer()}
def reduce_draw_char(codepoint, clx, cly, %Font{} = font, acc, reduce_pixel, opts \\ [])
when is_integer(codepoint) do
opts = Keyword.merge(@draw_default_opts, opts)
size_x = opts[:size_x]
%{size: {_, font_h}} = font
case lookup_glyph(codepoint, font) do
%Glyph{} = glyph ->
acc1 = draw_glyph(glyph, clx, cly + font_h, reduce_pixel, opts, acc)
glyph_dx = glyph.dwx
{acc1, clx + glyph_dx * size_x, cly}
_ ->
{acc, clx, cly}
end
end
@doc """
Gets the size of the rendered string using the font and options provided
"""
@spec get_text_width(
text :: String.t(),
font :: Font.t(),
opts :: draw_options()
) :: integer()
def get_text_width(text, %Font{} = font, opts \\ []) when is_binary(text) do
opts = Keyword.merge(@draw_default_opts, opts)
size_x = opts[:size_x]
to_charlist(text)
|> Enum.reduce(0, fn char, size ->
case lookup_glyph(char, font) do
%Glyph{} = glyph ->
glyph_dx = glyph.dwx
size + glyph_dx * size_x
_ ->
size
end
end)
end
defp draw_glyph(%Glyph{} = glyph, gx, gy, reduce_pixel, opts, acc) do
opts = Keyword.merge(@draw_default_opts, opts)
%{
data: data,
size: {bb_w, bb_h},
offset: {bb_xoff, bb_yoff}
} = glyph
x = gx - bb_xoff
y = gy - bb_yoff - bb_h
for(<<row::bitstring-size(bb_w) <- data>>, do: row)
|> Enum.reverse()
|> do_render_glyph({x, y}, reduce_pixel, opts, acc)
end
defp do_render_glyph(rows, pos, reduce_pixel, opts, acc, iy \\ 0)
defp do_render_glyph([], _pos, _put_pixel, _opts, acc, _iy),
do: acc
defp do_render_glyph([row | rows], pos, reduce_pixel, opts, acc, iy) do
acc = render_glyph_row(row, pos, iy, reduce_pixel, opts, acc)
do_render_glyph(rows, pos, reduce_pixel, opts, acc, iy + opts[:size_y])
end
defp render_glyph_row(row, pos, iy, reduce_pixel, opts, acc, ix \\ 0)
defp render_glyph_row(<<>>, _pos, _iy, _put_pixel, _opts, acc, _ix),
do: acc
defp render_glyph_row(<<1::1, rest::bitstring>>, pos, iy, reduce_pixel, opts, acc, ix) do
{x, y} = pos
acc =
for(ox <- 0..(opts[:size_x] - 1), oy <- 0..(opts[:size_y] - 1), do: {ox, oy})
|> Enum.reduce(acc, fn {ox, oy}, acc1 ->
reduce_pixel.(x + ix + ox, y + iy + oy, acc1)
end)
render_glyph_row(rest, pos, iy, reduce_pixel, opts, acc, ix + opts[:size_x])
end
defp render_glyph_row(<<_::1, rest::bitstring>>, pos, iy, reduce_pixel, opts, acc, ix),
do: render_glyph_row(rest, pos, iy, reduce_pixel, opts, acc, ix + opts[:size_x])
defp lookup_glyph(char, font),
do: Map.get(font.glyphs, char)
end
|
lib/chisel/renderer.ex
| 0.870501 | 0.560253 |
renderer.ex
|
starcoder
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.