code
stringlengths 114
1.05M
| path
stringlengths 3
312
| quality_prob
float64 0.5
0.99
| learning_prob
float64 0.2
1
| filename
stringlengths 3
168
| kind
stringclasses 1
value |
---|---|---|---|---|---|
defmodule ExJenga.SendMoney.ToMobileWallets do
@moduledoc """
This enables your application to send money to telco :iphone: wallets across Kenya, Uganda, Tanzania & Rwanda.
"""
import ExJenga.JengaBase
alias ExJenga.Signature
@doc """
Send Money To Mobile Wallets
## Parameters
attrs: - a map containing:
- `source` - a map containing; `countryCode`, `name` and `accountNumber`
- `destination` - a map containing; `type`, `countryCode`, `name`, `mobileNumber` and `walletName`
- `transfer` - a map containing; `type`, `amount`, `currencyCode`, `reference`, `date` and `description`
and second optional parameter(boolean): `true` - if sending Equitel Number else `false` or ignore when sending to Safaricom/Airtel
Read More about the parameters' descriptions here: https://developer.jengaapi.io/reference#remittance
## Example
iex> ExJenga.SendMoney.ToMobileWallets.request(%{ source: %{ countryCode: "KE", name: "<NAME>", accountNumber: "0770194201783" }, destination: %{ type: "mobile", countryCode: "KE", name: "<NAME>", mobileNumber: "0722000000", walletName: "Mpesa" }, transfer: %{ type: "MobileWallet", amount: "1000", currencyCode: "KES", reference: "639434645738", date: "2020-11-25", description: "some remarks here" } })
{:ok,
%{
"transactionId" => "1452854",
"status" => "SUCCESS"
}}
"""
@spec request(map(), boolean()) :: {:error, any()} | {:ok, any()}
def request(requestBody, toEquitel \\ false)
def request(
%{
source: %{
countryCode: _countryCode,
name: _name,
accountNumber: accountNumber
},
destination: %{
type: _type,
countryCode: _cc,
name: _n,
mobileNumber: _mN,
walletName: _wN
},
transfer: %{
type: _t,
amount: amount,
currencyCode: currencyCode,
reference: refence,
date: _date,
description: _description
}
} = requestBody,
toEquitel
) do
message =
if toEquitel,
do: "#{accountNumber}#{amount}#{currencyCode}#{refence}",
else: "#{amount}#{currencyCode}#{refence}#{accountNumber}"
make_request("/transaction/v2/remittance#sendmobile", requestBody, [
{"signature", Signature.sign(message)}
])
end
def request(_, _toEquitel) do
{:error, "Required Parameters missing, check your request body"}
end
end
|
lib/ex_jenga/send_money/to_mobile_wallets.ex
| 0.810141 | 0.479626 |
to_mobile_wallets.ex
|
starcoder
|
defmodule Lingua do
@moduledoc """
Lingua wraps [<NAME>](https://github.com/pemistahl)'s [linuga-rs](https://github.com/pemistahl/lingua-rs) language detection library.
This wrapper follows the lingua-rs API closely, so consult the [documentation](https://docs.rs/lingua/1.0.3/lingua/index.html) for more information.
"""
@default_builder_option :all_languages
@default_languages []
@default_minimum_relative_distance 0.0
@default_compute_language_confidence_values false
@default_preload_language_models false
@doc """
Initialize the detector. Calling this is optional but it may come in handy in cases where you want lingua-rs to load
the language corpora so that subsequent calls to `detect` are fast. The first time the detector is run it can take some time to load (~12 seconds on my Macbook Pro). See
also the `preload_language_models` option below.
## Example
iex> Lingua.init()
:ok
"""
defdelegate init(), to: Lingua.Nif
@doc """
Detect the language of the given input text. By default, all supported languages will be considered and
the minimum relative distance is `0.0`.
Returns the detected language, or a list of languages and their confidence values, or `:no_match` if the given text
doesn't match a language.
Options:
* `builder_option:` - can be one of the following (defaults to `:all_languages`):
* `:all_languages` - consider every supported language
* `:all_spoken_languages` - consider only currently spoken languages
* `:all_languages_with_arabic_script` - consider only languages written in Arabic script
* `:all_languages_with_cyrillic_script` - consider only languages written in Cyrillic script
* `:all_languages_with_devanagari_script` - consider only languages written in Devanagari script
* `:all_languages_with_latin_script` - consider only languages written in Latin script
* `:with_languages` - consider only the languages supplied in the `languages` option.
* `:without_languages` - consider all languages except those supplied in the `languages` option. Two or more are required. (see below)
* `languages:` - specify two or more languages to consider or to not consider depending on the `builder_option:` (defaults to `[]`). Accepts
any combination of languages and ISO 639-1 or 639-3 codes, for example: `[:english, :ru, :lit]` to consider English, Russian and Lithuanian.
* `minimum_relative_distance:` - specify the minimum relative distance (0.0 - 0.99) required for a language to be considered a match for the input.
See the lingua-rs [documentation](https://docs.rs/lingua/1.0.3/lingua/struct.LanguageDetectorBuilder.html#method.with_minimum_relative_distance) for details. (defaults to `0.0`)
* `compute_language_confidence_values:` - returns the full list of language matches for the input and their confidence values. (defaults to `false`)
* `preload_language_models:` - preload all language models instead of just those required for the match. (defaults to `false`)
## Examples
iex> Lingua.detect("this is definitely English")
{:ok, :english}
iex> Lingua.detect("וזה בעברית")
{:ok, :hebrew}
iex> Lingua.detect("państwowych", builder_option: :with_languages, languages: [:english, :russian, :polish])
{:ok, :polish}
iex> Lingua.detect("ѕидови", builder_option: :all_languages_with_cyrillic_script)
{:ok, :macedonian}
iex> Lingua.detect("כלב", builder_option: :with_languages, languages: [:english, :russian, :polish])
{:ok, :no_match}
iex> Lingua.detect("what in the world is this", builder_option: :with_languages, languages: [:english, :russian, :hebrew], compute_language_confidence_values: true)
{:ok, [english: 1.0]}
"""
def detect(text, options \\ []) do
builder_option = Keyword.get(options, :builder_option, @default_builder_option)
languages = Keyword.get(options, :languages, @default_languages)
minimum_relative_distance =
Keyword.get(options, :minimum_relative_distance, @default_minimum_relative_distance)
preload_language_models =
Keyword.get(options, :preload_language_models, @default_preload_language_models)
compute_language_confidence_values =
Keyword.get(
options,
:compute_language_confidence_values,
@default_compute_language_confidence_values
)
Lingua.Nif.run_detection(
text,
builder_option,
languages,
compute_language_confidence_values,
minimum_relative_distance,
preload_language_models
)
end
@doc """
Like `detect`, but returns the result value or raises an error.
"""
def detect!(text, options \\ []) do
case detect(text, options) do
{:ok, value} -> value
{:error, error} -> raise error
end
end
@doc """
Get the list of supported languages.
## Example
iex> Lingua.all_languages()
[:afrikaans, :albanian, :arabic, :armenian, :azerbaijani, :basque, :belarusian,
:bengali, :bokmal, :bosnian, :bulgarian, :catalan, :chinese, :croatian, :czech,
:danish, :dutch, :english, :esperanto, :estonian, :finnish, :french, :ganda,
:georgian, :german, :greek, :gujarati, :hebrew, :hindi, :hungarian, :icelandic,
:indonesian, :irish, :italian, :japanese, :kazakh, :korean, :latin, :latvian,
:lithuanian, :macedonian, :malay, :maori, :marathi, :mongolian, :nynorsk,
:persian, :polish, :portuguese, :punjabi, :romanian, :russian, :serbian,
:shona, :slovak, :slovene, :somali, :sotho, :spanish, :swahili, :swedish,
:tagalog, :tamil, :telugu, :thai, :tsonga, :tswana, :turkish, :ukrainian,
:urdu, :vietnamese, :welsh, :xhosa, :yoruba, :zulu]
"""
defdelegate all_languages(), to: Lingua.Nif
@doc """
Get the list of supported spoken languages.
## Example
iex> Lingua.all_spoken_languages()
[:afrikaans, :albanian, :arabic, :armenian, :azerbaijani, :basque, :belarusian,
:bengali, :bokmal, :bosnian, :bulgarian, :catalan, :chinese, :croatian, :czech,
:danish, :dutch, :english, :esperanto, :estonian, :finnish, :french, :ganda,
:georgian, :german, :greek, :gujarati, :hebrew, :hindi, :hungarian, :icelandic,
:indonesian, :irish, :italian, :japanese, :kazakh, :korean, :latvian,
:lithuanian, :macedonian, :malay, :maori, :marathi, :mongolian, :nynorsk,
:persian, :polish, :portuguese, :punjabi, :romanian, :russian, :serbian,
:shona, :slovak, :slovene, :somali, :sotho, :spanish, :swahili, :swedish,
:tagalog, :tamil, :telugu, :thai, :tsonga, :tswana, :turkish, :ukrainian,
:urdu, :vietnamese, :welsh, :xhosa, :yoruba, :zulu]
"""
defdelegate all_spoken_languages(), to: Lingua.Nif
@doc """
Get the list of supported languages using Arabic script.
## Example
iex> Lingua.all_languages_with_arabic_script()
[:arabic, :persian, :urdu]
"""
defdelegate all_languages_with_arabic_script(), to: Lingua.Nif
@doc """
Get the list of supported languages using Cyrillic script.
## Example
iex> Lingua.all_languages_with_cyrillic_script()
[:belarusian, :bulgarian, :kazakh, :macedonian, :mongolian, :russian, :serbian, :ukrainian]
"""
defdelegate all_languages_with_cyrillic_script(), to: Lingua.Nif
@doc """
Get the list of supported languages using Devanagari script.
## Example
iex> Lingua.all_languages_with_devanagari_script()
[:hindi, :marathi]
"""
defdelegate all_languages_with_devanagari_script(), to: Lingua.Nif
@doc """
Get the list of supported languages using Latin script.
## Example
iex> Lingua.all_languages_with_latin_script()
[:afrikaans, :albanian, :azerbaijani, :basque, :bokmal, :bosnian, :catalan,
:croatian, :czech, :danish, :dutch, :english, :esperanto, :estonian, :finnish,
:french, :ganda, :german, :hungarian, :icelandic, :indonesian, :irish,
:italian, :latin, :latvian, :lithuanian, :malay, :maori, :nynorsk, :polish,
:portuguese, :romanian, :shona, :slovak, :slovene, :somali, :sotho, :spanish,
:swahili, :swedish, :tagalog, :tsonga, :tswana, :turkish, :vietnamese, :welsh,
:xhosa, :yoruba, :zulu]
"""
defdelegate all_languages_with_latin_script(), to: Lingua.Nif
@doc """
Get the language for the given ISO 639-1 language code.
## Example
iex> Lingua.language_for_iso_code_639_1(:en)
{:ok, :english}
iex> Lingua.language_for_iso_code_639_1(:er)
{:error, :unrecognized_iso_code}
"""
defdelegate language_for_iso_code_639_1(code), to: Lingua.Nif
@doc """
Get the language for the given ISO 639-3 language code.
## Example
iex> Lingua.language_for_iso_code_639_3(:eng)
{:ok, :english}
iex> Lingua.language_for_iso_code_639_3(:enr)
{:error, :unrecognized_iso_code}
"""
defdelegate language_for_iso_code_639_3(code), to: Lingua.Nif
@doc """
Get the language for the given ISO 639-1 or 639-3 language code.
## Example
iex> Lingua.language_for_iso_code(:en)
{:ok, :english}
iex> Lingua.language_for_iso_code(:eng)
{:ok, :english}
iex> Lingua.language_for_iso_code(:mop)
{:error, :unrecognized_iso_code}
"""
defdelegate language_for_iso_code(code), to: Lingua.Nif
@doc """
Get the ISO 639-1 language code for the given language.
## Example
iex> Lingua.iso_code_639_1_for_language(:english)
{:ok, :en}
iex> Lingua.iso_code_639_1_for_language(:nope)
{:error, :unrecognized_language}
"""
defdelegate iso_code_639_1_for_language(language), to: Lingua.Nif
@doc """
Get the ISO 639-3 language code for the given language.
## Example
iex> Lingua.iso_code_639_3_for_language(:english)
{:ok, :eng}
iex> Lingua.iso_code_639_1_for_language(:nope)
{:error, :unrecognized_language}
"""
defdelegate iso_code_639_3_for_language(language), to: Lingua.Nif
end
|
lib/lingua.ex
| 0.87639 | 0.723383 |
lingua.ex
|
starcoder
|
defmodule NYSETL.Engines.E2.Processor do
@moduledoc """
For a TestResult record, decide the following
* Find or create a Person record, based on :patient_key or unique (dob/last_name/first_name)
* Create or update one more IndexCase records for this county
* Create or update a LabResult record for the IndexCase
## Edge cases
* Pre-existing data in CommCare may not be well deduplicated for people. If we have deduped a person,
but there are multiple IndexCase records for that person in this county, creation of new LabResult
records is duplicated across all IndexCase records.
"""
require Logger
alias NYSETL.Commcare
alias NYSETL.ECLRS
alias NYSETL.Format
require Logger
@reinfection_fields ~w(first_name last_name full_name doh_mpi_id initials name_and_id preferred_name dob contact_phone_number phone_home phone_work address address_city address_county address_state address_street address_zip commcare_email_address gender gender_other race race_specify ethnicity ec_address_city ec_address_state ec_address_street ec_address_zip ec_email_address ec_first_name ec_last_name ec_phone_home ec_phone_work ec_relation provider_name provider_phone_number provider_email_address provider_affiliated_facility)
def process(test_result, ignore_before \\ Application.get_env(:nys_etl, :eclrs_ignore_before_timestamp)) do
with :ok <- processable?(test_result, ignore_before),
{:ok, county} <- get_county(test_result.county_id),
{:ok, person} <- find_or_create_person(test_result) do
person
|> find_or_create_index_cases(test_result, county)
|> Enum.map(&process_one(&1, test_result, county))
|> maybe_register_summary_event(test_result)
:ok
else
{:non_participating_county, county} ->
ECLRS.save_event(test_result,
type: "test_result_ignored",
data: %{reason: "test_result is for fips #{county.fips} (a non-participating county)"}
)
:ok
{:unprocessable, threshold} ->
Logger.warn(
"[#{__MODULE__}}] ignoring test_result_id=#{test_result.id} because its eclrs_create_date was older than eclrs_ignore_before_timestamp"
)
ECLRS.save_event(test_result, type: "test_result_ignored", data: %{reason: "test_result older than #{threshold}"})
:ok
{status, reason} ->
Sentry.capture_message("Mishandled case", extra: %{test_result_id: test_result.id, status: status, reason: reason})
{:error, "Mishandled case"}
_ ->
Sentry.capture_message("Unhandled case", extra: %{test_result_id: test_result.id})
{:error, "Unhandled case"}
end
end
defp processable?(%ECLRS.TestResult{eclrs_create_date: eclrs_create_date}, ignore_before) do
if :gt == DateTime.compare(eclrs_create_date, ignore_before),
do: :ok,
else: {:unprocessable, ignore_before}
end
defp process_one({ic_creation_status, index_case}, test_result, county) do
{:ok, lr_creation_status} = create_or_update_lab_result(index_case, test_result, county)
{ic_creation_status, lr_creation_status}
end
defp maybe_register_summary_event(list, test_result) do
if Enum.all?(list, &(&1 == {:untouched, :untouched})) do
{:ok, _} = ECLRS.save_event(test_result, "no_new_information")
end
end
@doc """
Returns a tuple where the first element is a map of additions and the second element is a map of updated values.
## Examples
iex> a = %{a: 1, b: 2, z: 26}
iex> b = %{a: 1, b: 3, y: 25}
iex> diff(a, b)
{%{y: 25}, %{b: 3}}
"""
@spec diff(map(), map(), keyword()) :: {map(), map()}
def diff(a, b, opts \\ [])
def diff(a, b, opts) do
prefer_right = opts |> Keyword.get(:prefer_right, [])
a = Euclid.Extra.Map.stringify_keys(a)
b = Euclid.Extra.Map.stringify_keys(b)
a_set = MapSet.new(a)
b_set = MapSet.new(b)
merged_set = a |> Map.merge(b, &pick_merge_value(&1, &2, &3, prefer_right)) |> MapSet.new()
updates = b_set |> MapSet.difference(merged_set)
additions = b_set |> MapSet.difference(a_set) |> MapSet.difference(updates)
{Enum.into(additions, %{}), Enum.into(updates, %{})}
end
defp pick_merge_value(_key, av, bv, _) when av == "", do: bv
defp pick_merge_value(key, av, bv, prefer_right) do
if key in prefer_right,
do: bv || av,
else: av || bv
end
defp find_or_create_person(test_result) do
ECLRS.fingerprint(test_result)
|> NYSETL.Engines.E1.Cache.transaction(fn _cache ->
find_person_by_patient_key(test_result) || reuse_person_by_name_and_dob(test_result) || create_person(test_result)
end)
end
defp find_person_by_patient_key(test_result) do
Commcare.get_person(patient_key: test_result.patient_key)
|> case do
{:ok, person} ->
ECLRS.save_event(test_result, type: "person_matched", data: %{person_id: person.id})
:telemetry.execute([:transformer, :person, :found], %{count: 1})
{:ok, person}
{:error, :not_found} ->
nil
end
end
defp reuse_person_by_name_and_dob(%{patient_dob: nil}), do: nil
defp reuse_person_by_name_and_dob(%{patient_name_first: nil}), do: nil
defp reuse_person_by_name_and_dob(%{patient_name_last: nil}), do: nil
defp reuse_person_by_name_and_dob(test_result) do
Commcare.get_person(
dob: test_result.patient_dob,
name_first: first_name(test_result),
name_last: test_result.patient_name_last |> String.upcase()
)
|> case do
{:ok, person} ->
ECLRS.save_event(test_result, type: "person_added_patient_key", data: %{person_id: person.id})
:telemetry.execute([:transformer, :person, :added_patient_key], %{count: 1})
Commcare.add_patient_key(person, test_result.patient_key)
{:error, :not_found} ->
nil
end
end
defp create_person(test_result) do
:telemetry.execute([:transformer, :person, :created], %{count: 1})
name_last = if test_result.patient_name_last, do: test_result.patient_name_last |> String.upcase()
{:ok, person} =
%{
data: %{},
patient_keys: [test_result.patient_key],
name_last: name_last,
name_first: first_name(test_result),
dob: test_result.patient_dob
}
|> Commcare.create_person()
ECLRS.save_event(test_result, type: "person_created", data: %{person_id: person.id})
Logger.info("[#{__MODULE__}] created person #{person.id} patient_key=#{test_result.patient_key} from test_result id=#{test_result.id}")
{:ok, person}
end
defp reinfected_case_created?({_modification_status, %{data: %{"reinfected_case_created" => "yes"}}}), do: true
defp reinfected_case_created?({_modification_status, _index_case}), do: false
defp find_or_create_index_cases(person, test_result, commcare_county) do
with {:ok, index_cases} <-
Commcare.get_index_cases(person, county_id: commcare_county.fips, accession_number: test_result.request_accession_number),
open_cases when open_cases != [] <- Enum.filter(index_cases, &is_open_case?/1),
updated_cases = Enum.map(open_cases, &update_index_case(&1, person, test_result, commcare_county)),
{_reinfection_cases, current_infection_cases} when current_infection_cases != [] <-
Enum.split_with(updated_cases, &reinfected_case_created?/1) do
current_infection_cases
else
{:error, :not_found} ->
[{:created, create_index_case(person, test_result, commcare_county)}]
[] ->
[{:created, create_index_case(person, test_result, commcare_county)}]
{updated_reinfection_cases, []} ->
reinfection_cases = Enum.map(updated_reinfection_cases, fn {_status, rc} -> rc end)
[{:created, create_index_case(person, test_result, commcare_county, reinfection_cases)}]
end
end
defp is_open_case?(%Commcare.IndexCase{closed: true}), do: false
defp is_open_case?(%Commcare.IndexCase{data: %{"final_disposition" => final_disposition}})
when final_disposition in ["registered_in_error", "duplicate", "not_a_case"],
do: false
defp is_open_case?(%Commcare.IndexCase{data: %{"stub" => "yes"}}), do: false
defp is_open_case?(%Commcare.IndexCase{data: %{"current_status" => "closed", "patient_type" => "pui"}}), do: false
defp is_open_case?(%Commcare.IndexCase{data: %{"transfer_status" => transfer_status}})
when transfer_status in ["pending", "sent"],
do: false
defp is_open_case?(_index_case), do: true
def repeat_type(%Commcare.IndexCase{data: %{"reinfected_case_created" => "yes"}}, _test_result), do: nil
def repeat_type(
%Commcare.IndexCase{
data: %{"current_status" => "closed", "date_opened" => date_opened, "all_activity_complete_date" => activity_date} = data
},
test_result
)
when is_binary(date_opened) and is_binary(activity_date) do
with {:ok, all_activity_complete_date} <- data["all_activity_complete_date"] |> String.trim() |> Date.from_iso8601(),
{:ok, datetime_opened, 0} <- data["date_opened"] |> String.trim() |> DateTime.from_iso8601() do
date_opened = DateTime.to_date(datetime_opened)
eclrs_date =
[test_result.request_collection_date, test_result.eclrs_create_date]
|> Enum.reject(&is_nil/1)
|> Enum.reject(&(DateTime.compare(&1, ~U[2020-01-01 00:00:00Z]) == :lt))
|> Enum.min(DateTime, fn -> test_result.eclrs_create_date end)
|> DateTime.to_date()
if Date.compare(eclrs_date, all_activity_complete_date) == :gt do
if Date.diff(eclrs_date, date_opened) >= 90 do
:reinfection
else
:reactivation
end
end
else
{:error, :invalid_format} -> nil
end
end
def repeat_type(_index_case, _test_result), do: nil
defp update_index_case(%Commcare.IndexCase{} = index_case, person, test_result, commcare_county) do
old_data = index_case.data
repeat_type = repeat_type(index_case, test_result)
new_data =
case repeat_type do
:reinfection ->
%{reinfected_case_created: "yes"}
:reactivation ->
test_result |> to_index_case_data(person, commcare_county) |> Map.put(:reactivated_case, "yes")
_ ->
to_index_case_data(test_result, person, commcare_county)
end
|> Euclid.Extra.Map.stringify_keys()
prefer_right_fields =
if repeat_type == :reactivation do
["has_phone_number", "new_lab_result_received", "owner_id"]
else
["has_phone_number", "new_lab_result_received"]
end
{additions, updates} = old_data |> diff(new_data, prefer_right: prefer_right_fields)
merged_data = old_data |> NYSETL.Extra.Map.merge_empty_fields(additions)
address_complete = address_complete?(merged_data)
{additions, updates} =
if address_complete != old_data["address_complete"] do
additions = additions |> Map.put("address_complete", address_complete)
updates = updates |> Map.drop(["address_complete"])
{additions, updates}
else
{additions, updates}
end
NYSETL.ChangeLog.changeset(%{
source_type: "test_result",
source_id: test_result.id,
destination_type: "index_case",
destination_id: index_case.id,
previous_state: old_data,
applied_changes: additions,
dropped_changes: updates
})
|> NYSETL.Repo.insert!()
change_metadata = %{
source_type: "test_result",
source_id: test_result.id,
dropped_changes: updates
}
{:ok, index_case} = index_case |> Commcare.update_index_case(%{data: Map.merge(old_data, additions)}, change_metadata)
modification_status = save_index_case_diff_event(index_case, additions, test_result)
{modification_status, index_case}
end
defp save_index_case_diff_event(index_case, changes_to_be_made, test_result) do
if Enum.any?(changes_to_be_made) do
ECLRS.save_event(test_result, type: "index_case_updated", data: %{index_case_id: index_case.id})
Commcare.save_event(index_case, type: "index_case_updated", data: %{test_result_id: test_result.id})
:updated
else
ECLRS.save_event(test_result, type: "index_case_untouched", data: %{index_case_id: index_case.id})
Commcare.save_event(index_case, type: "index_case_untouched", data: %{test_result_id: test_result.id})
:untouched
end
end
defp create_index_case(person, test_result, commcare_county, reinfection_cases \\ []) do
%{
data: to_index_case_data(test_result, person, commcare_county, reinfection_cases) |> Euclid.Extra.Map.stringify_keys(),
county_id: commcare_county.fips,
person_id: person.id
}
|> Commcare.create_index_case()
|> case do
{:ok, index_case} ->
ECLRS.save_event(test_result, type: "index_case_created", data: %{index_case_id: index_case.id})
Commcare.save_event(index_case, type: "index_case_created", data: %{test_result_id: test_result.id})
Logger.info("[#{__MODULE__}] test_result #{test_result.id} created index case case_id=#{index_case.case_id} for person id=#{person.id}")
index_case
other ->
Logger.info("[#{__MODULE__}] failed to create index case for person id=#{person.id}")
other
end
end
defp create_or_update_lab_result(index_case, test_result, commcare_county) do
Commcare.get_lab_results(index_case, accession_number: test_result.request_accession_number)
|> case do
{:error, :not_found} ->
{:ok, _lab_result, :created} = create_lab_result(index_case, test_result, commcare_county)
{:ok, :created}
{:ok, lab_results} ->
lab_results
|> Enum.map(fn lab_result ->
{:ok, _lab_result, status} = update_lab_result(lab_result, index_case, test_result, commcare_county)
status
end)
|> Enum.all?(&(&1 == :untouched))
|> case do
true -> {:ok, :untouched}
_ -> {:ok, :updated}
end
end
end
defp create_lab_result(index_case, test_result, commcare_county) do
%{
data: to_lab_result_data(index_case, test_result, commcare_county),
index_case_id: index_case.id,
accession_number: test_result.request_accession_number
}
|> Commcare.create_lab_result()
|> case do
{:ok, lab_result} ->
ECLRS.save_event(test_result, type: "lab_result_created", data: %{lab_result_id: lab_result.id, index_case_id: index_case.id})
Commcare.save_event(index_case, type: "lab_result_created", data: %{test_result_id: test_result.id})
{:ok, lab_result, :created}
error ->
error
end
end
defp update_lab_result(lab_result, index_case, test_result, commcare_county) do
new_data = to_lab_result_data(index_case, test_result, commcare_county) |> Euclid.Extra.Map.stringify_keys()
merged = new_data |> NYSETL.Extra.Map.merge_empty_fields(lab_result.data)
{additions, updates} = lab_result.data |> diff(merged)
data = lab_result.data |> Map.merge(additions)
change_metadata = %{
source_type: "test_result",
source_id: test_result.id,
dropped_changes: updates
}
lab_result
|> Commcare.update_lab_result(%{data: data}, change_metadata)
|> case do
{:ok, updated_lab_result} ->
NYSETL.ChangeLog.changeset(%{
source_type: "test_result",
source_id: test_result.id,
destination_type: "lab_result",
destination_id: updated_lab_result.id,
previous_state: updated_lab_result.data,
applied_changes: additions,
dropped_changes: updates
})
|> NYSETL.Repo.insert!()
result =
if data != lab_result.data do
ECLRS.save_event(test_result, type: "lab_result_updated", data: %{lab_result_id: lab_result.id, index_case_id: index_case.id})
Commcare.save_event(index_case, type: "lab_result_updated", data: %{test_result_id: test_result.id})
:updated
else
ECLRS.save_event(test_result, type: "lab_result_untouched", data: %{lab_result_id: lab_result.id, index_case_id: index_case.id})
Commcare.save_event(index_case, type: "lab_result_untouched", data: %{test_result_id: test_result.id})
:untouched
end
{:ok, updated_lab_result, result}
error ->
error
end
end
defp get_county(county_id) do
NYSETL.Commcare.County.get(fips: county_id)
|> case do
{:ok, county} -> {:ok, county}
{:error, _} -> NYSETL.Commcare.County.statewide_county()
{:non_participating, county} -> {:non_participating_county, county}
end
end
defp address(%{patient_address_1: patient_address_1, patient_city: patient_city, patient_zip: patient_zip}, state) do
[patient_address_1, patient_city, state, patient_zip]
|> Euclid.Extra.Enum.compact()
|> Enum.join(", ")
end
defp address_complete?(%{"address_street" => patient_address_1, "address_city" => patient_city, "address_zip" => patient_zip}),
do: address_complete?(%{address_street: patient_address_1, address_city: patient_city, address_zip: patient_zip})
defp address_complete?(%{address_street: patient_address_1, address_city: patient_city, address_zip: patient_zip})
when byte_size(patient_address_1) > 0 and byte_size(patient_city) > 0 and byte_size(patient_zip) > 0,
do: "yes"
defp address_complete?(_), do: "no"
defp dob_known?(%Date{}), do: "yes"
defp dob_known?(_), do: "no"
defp first_name(%{patient_name_first: name}) when is_binary(name) do
name |> String.split(" ") |> List.first() |> String.upcase()
end
defp first_name(_), do: nil
defp gender(nil), do: {"", ""}
defp gender(binary) when is_binary(binary) do
binary
|> String.upcase()
|> case do
"M" -> {"male", ""}
"F" -> {"female", ""}
_ -> {"other", binary}
end
end
defp has_phone_number?(%{contact_phone_number: number}) when byte_size(number) > 0, do: "yes"
defp has_phone_number?(_), do: "no"
defp full_name(%{patient_name_first: first, patient_name_last: last}) do
[first, last]
|> Euclid.Extra.Enum.compact()
|> Enum.join(" ")
end
defp state_for(<<zipcode::binary-size(5)>>), do: lookup_state(zipcode)
defp state_for(<<zipcode::binary-size(5), "-", _suffix::binary-size(4)>>), do: lookup_state(zipcode)
defp state_for(_), do: nil
defp lookup_state(zipcode) do
zipcode
|> Zipcode.to_state()
|> case do
{:ok, abbr} -> abbr
{:error, _} -> nil
end
end
def to_index_case_data(test_result, person, commcare_county, reinfection_cases \\ []) do
external_id = Commcare.external_id(person)
%{
doh_mpi_id: external_id,
external_id: external_id,
patient_type: "confirmed",
new_lab_result_received: "yes"
}
|> Map.merge(to_index_case_data_address_block(test_result))
|> Map.merge(to_index_case_data_county_block(commcare_county))
|> Map.merge(to_index_case_data_person_block(test_result, external_id))
|> Map.merge(to_index_case_data_rest(test_result))
|> with_reinfection_case_fields(reinfection_cases)
|> with_index_case_data_complete_fields()
end
defp with_reinfection_case_fields(data, []), do: data
defp with_reinfection_case_fields(data, current_cases) do
most_recent_case = List.last(current_cases)
fields_to_copy =
most_recent_case.data
|> Map.take(@reinfection_fields)
|> Euclid.Extra.Map.atomize_keys()
fields_to_copy
|> NYSETL.Extra.Map.merge_empty_fields(data)
|> Map.merge(%{
reinfected_case: "yes",
archived_case_id: most_recent_case.case_id
})
end
def with_index_case_data_complete_fields(data) do
data
|> Map.merge(%{
address_complete: address_complete?(data),
has_phone_number: has_phone_number?(data)
})
end
def to_index_case_data_address_block(test_result) do
state = state_for(test_result.patient_zip)
%{
address: address(test_result, state),
address_city: Format.format(test_result.patient_city),
address_state: Format.format(state),
address_street: Format.format(test_result.patient_address_1),
address_zip: Format.format(test_result.patient_zip)
}
end
def to_index_case_data_county_block(commcare_county) do
%{
address_county: Format.format(commcare_county.name),
county_commcare_domain: Format.format(commcare_county.domain),
county_display: Format.format(commcare_county.display),
fips: Format.format(commcare_county.fips),
gaz: Format.format(commcare_county.gaz),
owner_id: Format.format(commcare_county.location_id)
}
end
def to_index_case_data_person_block(test_result, external_id) do
{gender, gender_other} = gender(test_result.patient_gender)
%{
contact_phone_number: test_result.patient_phone_home_normalized |> Format.format() |> Format.us_phone_number(),
dob: Format.format(test_result.patient_dob),
dob_known: dob_known?(test_result.patient_dob),
first_name: test_result.patient_name_first,
full_name: full_name(test_result),
gender: gender,
gender_other: gender_other,
initials: initials(test_result.patient_name_first, test_result.patient_name_last),
last_name: test_result.patient_name_last,
name: full_name(test_result),
name_and_id: "#{full_name(test_result)} (#{external_id})",
phone_home: Format.format(test_result.patient_phone_home_normalized)
}
end
def to_index_case_data_rest(test_result) do
%{
analysis_date: Format.format(test_result.result_analysis_date),
case_import_date: Format.format(DateTime.utc_now()),
eclrs_create_date: Format.format(test_result.eclrs_create_date)
}
end
def to_lab_result_data(index_case, tr = %ECLRS.TestResult{}, commcare_county) do
doh_mpi_id = index_case.data |> Map.get("external_id")
%{
accession_number: tr.request_accession_number,
analysis_date: Format.format(tr.result_analysis_date),
aoe_date: Format.format(tr.aoe_date),
doh_mpi_id: doh_mpi_id,
eclrs_congregate_care_resident: tr.eclrs_congregate_care_resident,
eclrs_create_date: Format.format(tr.eclrs_create_date),
eclrs_hospitalized: tr.eclrs_hospitalized,
eclrs_icu: tr.eclrs_icu,
eclrs_loinc: tr.result_loinc_code,
eclrs_pregnant: tr.eclrs_pregnant,
eclrs_symptom_onset_date: Format.format(tr.eclrs_symptom_onset_date),
eclrs_symptomatic: tr.eclrs_symptomatic,
employee_job_title: tr.employee_job_title,
employee_number: tr.employee_number,
employer_address: tr.employer_address,
employer_name: tr.employer_name,
employer_phone_2: tr.employer_phone_alt,
employer_phone: tr.employer_phone,
external_id: "#{doh_mpi_id}##{tr.patient_key}##{tr.request_accession_number}",
first_test: tr.first_test,
healthcare_employee: tr.healthcare_employee,
lab_result: lab_result_text(tr.result),
laboratory: tr.lab_name,
name_employer_school: compact_join([tr.school_name, tr.employer_name], ", "),
name: "#{doh_mpi_id} lab_result",
ordering_facility_address: compact_join([tr.request_facility_address_1, tr.request_facility_address_2], "\n"),
ordering_facility_city: tr.request_facility_city,
ordering_facility_name: tr.request_facility_name,
ordering_facility_phone: tr.request_phone_facility_normalized,
ordering_provider_address: tr.request_provider_address_1,
ordering_provider_city: tr.request_provider_city,
ordering_provider_first_name: tr.request_provider_name_first,
ordering_provider_last_name: tr.request_provider_name_last,
ordering_provider_name: compact_join([tr.request_provider_name_first, tr.request_provider_name_last], " "),
owner_id: commcare_county.location_id,
parent_external_id: doh_mpi_id,
parent_type: "patient",
school_attended: tr.school_present,
school_code: tr.school_code,
school_district: tr.school_district,
school_name: tr.school_name,
school_visitor_type: tr.school_job_class,
specimen_collection_date: Format.format(tr.request_collection_date),
specimen_source: tr.request_specimen_source_name,
test_type: tr.result_loinc_desc
}
end
def lab_result_text(nil), do: "other"
def lab_result_text(value) do
match =
Regex.scan(~r/positive|negative|inconclusive|invalid|unknown/i, String.downcase(value))
|> List.flatten()
|> List.first()
match || "other"
end
def compact_join(list, joiner) do
list
|> Euclid.Extra.Enum.compact()
|> Enum.join(joiner)
|> Euclid.Exists.presence()
end
def initials(<<a::binary-size(1), _::binary>>, <<b::binary-size(1), _::binary>>), do: a <> b
def initials(_, _), do: nil
end
|
lib/nys_etl/engines/e2/processor.ex
| 0.743727 | 0.42185 |
processor.ex
|
starcoder
|
defmodule ShittyLinqEx do
@moduledoc """
Documentation for `ShittyLinqEx`.
"""
@doc """
Applies an accumulator function over a sequence. The specified seed value is used as the initial
accumulator value, and the specified function is used to select the result value.
## Parameters
- `source`: an enumerable to aggregate over.
- `seed`: the initial accumulator value.
- `func`: an accumulator function to be invoked on each element.
- `resultSelector`: a function to transform the final accumulator value into the result value.
## Returns
The transformed final accumulator value.
## Examples
iex> import ShittyLinqEx, only: [aggregate: 4]
iex> fruits = ["apple", "mango", "orange", "passionfruit", "grape"]
iex> aggregate(
...> fruits,
...> "banana",
...> fn next, longest ->
...> if String.length(next) > String.length(longest) do
...> next
...> else
...> longest
...> end
...> end,
...> &String.upcase/1)
"PASSIONFRUIT"
"""
def aggregate(source, seed, func, resultSelector)
when is_list(source) and is_function(func, 2) and is_function(resultSelector, 1) do
:lists.foldl(func, seed, source)
|> resultSelector.()
end
def aggregate(first..last, seed, func, resultSelector)
when is_function(func, 2) and is_function(resultSelector, 1) do
if first <= last do
aggregate_range_inc(first, last, seed, func)
else
aggregate_range_dec(first, last, seed, func)
end
|> resultSelector.()
end
def aggregate(%{} = source, seed, func, resultSelector)
when is_function(func, 2) and is_function(resultSelector, 1) do
:maps.fold(
fn key, value, accumulator -> func.({key, value}, accumulator) end,
seed,
source
)
|> resultSelector.()
end
def aggregate(source, seed, func, resultSelector)
when is_list(source) and is_function(func, 2) and is_function(resultSelector, 1) do
:lists.foldl(func, seed, source)
|> resultSelector.()
end
def aggregate(first..last, seed, func, resultSelector)
when is_function(func, 2) and is_function(resultSelector, 1) do
if first <= last do
aggregate_range_inc(first, last, seed, func)
else
aggregate_range_dec(first, last, seed, func)
end
|> resultSelector.()
end
@doc """
Applies an accumulator function over a sequence.
The specified seed value is used as the initial accumulator value.
## Parameters
- `source`: an enumerable to aggregate over.
- `seed`: the initial accumulator value.
- `func`: an accumulator function to be invoked on each element.
## Returns
The final accumulator value.
## Examples
iex> import ShittyLinqEx, only: [aggregate: 3]
iex> ints = [4, 8, 8, 3, 9, 0, 7, 8, 2]
iex> aggregate(
...> ints,
...> 0,
...> fn next, total ->
...> if rem(next, 2) == 0 do
...> total + 1
...> else
...> total
...> end
...> end)
6
iex> import ShittyLinqEx, only: [aggregate: 3]
iex> aggregate(4..1, 1, &*/2)
24
iex> import ShittyLinqEx, only: [aggregate: 3]
iex> aggregate(1..3, 1, &+/2)
7
iex> import ShittyLinqEx, only: [aggregate: 3]
iex> letters_to_numbers = %{a: 1, b: 2, c: 3}
iex> aggregate(
...> letters_to_numbers,
...> [],
...> fn {key, _value}, keys -> [key | keys] end)
[:c, :b, :a]
"""
def aggregate(source, seed, func)
when is_list(source) and is_function(func, 2) do
:lists.foldl(func, seed, source)
end
def aggregate(first..last, seed, func)
when is_function(func, 2) do
if first <= last do
aggregate_range_inc(first, last, seed, func)
else
aggregate_range_dec(first, last, seed, func)
end
end
def aggregate(%{} = source, seed, func)
when is_function(func, 2) do
:maps.fold(
fn key, value, acc -> func.({key, value}, acc) end,
seed,
source
)
end
@doc """
Applies an accumulator function over a sequence.
## Parameters
- `source`: an enumerable to aggregate over.
- `func`: an accumulator function to be invoked on each element.
## Returns
The final accumulator value.
## Examples
iex> import ShittyLinqEx, only: [aggregate: 2]
iex> sentence = "the quick brown fox jumps over the lazy dog"
iex> words = String.split(sentence)
iex> aggregate(
...> words,
...> fn word, workingSentence -> word <> " " <> workingSentence end)
"dog lazy the over jumps fox brown quick the"
"""
def aggregate([head | tail], func)
when is_function(func, 2) do
aggregate(tail, head, func)
end
@doc """
Determines whether all elements of a sequence satisfy a condition.
##Parameters
- `list`: A list that contains the elements to apply the predicate to.
- `funciton`: A function to test each element for a condition.
##Returns
true if every element of the source sequence passes the test in the specified predicate, or if the sequence is empty; otherwise, false.
##Examples
iex> import ShittyLinqEx, only: [all: 2]
iex> all(
...> ["Barley", "Boots", "Whiskers"],
...> fn pet -> String.first(pet) == "B" end)
false
iex> import ShittyLinqEx, only: [all: 2]
iex> all(
...> [1, 3, 5, 7, 9],
...> fn number -> rem(number,2) == 1 end)
true
"""
@spec all(list, fun) :: bool
def all(list, predicate) when is_list(list) and is_function(predicate, 1),
do: do_all(list, predicate)
defp do_all([], _predicate), do: true
defp do_all([head | tail], predicate), do: predicate.(head) && do_all(tail, predicate)
@doc """
Inverts the order of the elements in a sequence.
## Parameters
- `list`: A sequence of values to reverse.
## Returns
A sequence whose elements correspond to those of the input sequence in reverse order.
## Examples
iex> import ShittyLinqEx, only: [reverse: 1]
iex> reverse(["A", "B", "C"])
["C", "B", "A"]
iex> import ShittyLinqEx, only: [reverse: 1]
iex> reverse([42, "orange", ":atom"])
[":atom", "orange", 42]
"""
@spec reverse(list) :: list
def reverse(list) when is_list(list), do: reverse(list, [])
def reverse([head | tail], acc), do: reverse(tail, [head | acc])
def reverse([], acc), do: acc
@doc """
Returns the first element of a sequence.
## Parameters
- `list`: A sequence of values of which the first element should be returned.
- `predicate`: A function to check for each element
- `value`: A value which will be checked in the predicate function
## Returns
First value of the input sequence.
## Examples
iex> import ShittyLinqEx, only: [first: 1]
iex> first(["A", "B", "C"])
"A"
iex> import ShittyLinqEx, only: [first: 1]
iex> first([42, "orange", ":atom"])
42
iex> import ShittyLinqEx, only: [first: 3]
iex> first([4, 2, 3], &>/2, 1)
4
"""
def first(list) when is_list(list), do: List.first(list)
def first([]), do: nil
def first(nil), do: nil
def first([head | tail], func, value) when is_list(tail) and is_function(func, 2) do
case func.(head, value) do
true -> head
false -> first(tail, func, value)
end
end
def first([], _func, _value), do: nil
@doc """
Returns a specified number of contiguous elements from the start of a sequence.
## Parameters
- `source`: A sequence of values to take.
- `count`: The number of elements to return.
## Returns
A sequence that contains the specified number of elements from the start of the input sequence.
## Examples
iex> import ShittyLinqEx, only: [take: 2]
iex> take(["A", "B", "C"], 2)
["A", "B"]
iex> import ShittyLinqEx, only: [take: 2]
iex> take([42, "orange", ":atom"], 7)
[42, "orange", ":atom"]
iex> import ShittyLinqEx, only: [take: 2]
iex> take([1, 2, 3], 0)
[]
iex> import ShittyLinqEx, only: [take: 2]
iex> take(nil, 5)
nil
"""
def take(_source, 0), do: []
def take(_souce, count) when is_integer(count) and count < 0, do: []
def take(nil, _count), do: nil
def take([], _count), do: []
def take(source, count)
when is_list(source) and is_integer(count) and count > 0 do
take_list(source, count)
end
@doc """
Returns a new enumerable collection that contains the last count elements from source.
## Parameters
- `source`: A sequence of values to take.
- `count`: The number of elements to take from the end of the collection.
## Returns
A new enumerable collection that contains the last count elements from source.
## Examples
iex> import ShittyLinqEx, only: [take_last: 2]
iex> take_last(["A", "B", "C"], 2)
["B", "C"]
iex> import ShittyLinqEx, only: [take_last: 2]
iex> take_last([42, "orange", :atom], 7)
[42, "orange", :atom]
iex> import ShittyLinqEx, only: [take_last: 2]
iex> take_last([1, 2, 3], 0)
[]
iex> import ShittyLinqEx, only: [take_last: 2]
iex> take_last(nil, 5)
nil
"""
def take_last(nil, _count), do: nil
def take_last(source, count) do
source
|> reverse()
|> take(count)
|> reverse()
end
@doc """
Filters a sequence of values based on a predicate.
## Parameters
- `source`: an enumerable to filter.
- `predicate`: a function to test each element for a condition.
## Returns
An enumerable that contains elements from the input sequence that satisfy the condition.
## Examples
iex> import ShittyLinqEx, only: [where: 2]
iex> where(
...> ["apple", "passionfruit", "banana", "mango", "orange", "blueberry", "grape", "strawberry"],
...> fn fruit -> String.length(fruit) < 6 end)
["apple", "mango", "grape"]
iex> import ShittyLinqEx, only: [where: 2]
iex> where(
...> [0, 30, 20, 15, 90, 85, 40, 75],
...> fn number, index -> number <= index * 10 end)
[0, 20, 15, 40]
"""
def where(source, predicate) when is_list(source) and is_function(predicate, 1) do
where_list(source, predicate)
end
def where(source, predicate) when is_list(source) and is_function(predicate, 2) do
where_list(source, predicate, 0)
end
defp aggregate_range_inc(first, first, seed, func) do
func.(first, seed)
end
defp aggregate_range_inc(first, last, seed, func) do
aggregate_range_inc(first + 1, last, func.(first, seed), func)
end
defp aggregate_range_dec(first, first, seed, func) do
func.(first, seed)
end
defp aggregate_range_dec(first, last, seed, func) do
aggregate_range_dec(first - 1, last, func.(first, seed), func)
end
defp take_list([head | _], 1), do: [head]
defp take_list([head | tail], counter), do: [head | take_list(tail, counter - 1)]
defp take_list([], _counter), do: []
defp where_list([head | tail], fun) do
case fun.(head) do
true -> [head | where_list(tail, fun)]
_ -> where_list(tail, fun)
end
end
defp where_list([], _fun) do
[]
end
defp where_list([head | tail], fun, index) do
case fun.(head, index) do
true -> [head | where_list(tail, fun, index + 1)]
_ -> where_list(tail, fun, index + 1)
end
end
defp where_list([], _fun, _index) do
[]
end
@doc """
Return the value repeated n times inside a list,
## Parameters
- `value`: a value to repeat.
- `count`: the number of times that the `value` can be repeated.
## Returns
Returns a list with the value repeated `count` times, if
value is equal 0 or less it will raise an error
## Examples
iex> import ShittyLinqEx, only: [repeat: 2]
iex> repeat("Hello there", 10)
["Hello there", "Hello there", "Hello there", "Hello there", "Hello there",
"Hello there", "Hello there", "Hello there", "Hello there", "Hello there"]
iex> import ShittyLinqEx, only: [repeat: 2]
iex> repeat(%{"map" => "Example", "key" => "value"}, 5)
\n
[
%{"key" => "value", "map" => "Example"},
%{"key" => "value", "map" => "Example"},
%{"key" => "value", "map" => "Example"},
%{"key" => "value", "map" => "Example"},
%{"key" => "value", "map" => "Example"}
]
"""
def repeat(_value, count) when count <= 0 do
raise "Count must be 1 or more"
end
def repeat(_value, count) when not is_number(count) do
raise "Count must be a number"
end
def repeat(value, 1), do: value
def repeat(value, count) do
for _ <- 1..count do
value
end
end
@doc """
Return the value when no count is passed
## Parameters
- `value`: a value to repeat.
## Returns
The value itself besauce no count was passed
## Examples
iex> import ShittyLinqEx, only: [repeat: 1]
iex> repeat("hi")
"hi"
"""
def repeat(value), do: value
end
|
lib/shitty_linq_ex.ex
| 0.904824 | 0.672795 |
shitty_linq_ex.ex
|
starcoder
|
defmodule Entitiex.Entity do
@moduledoc """
Defines a entity module.
## Example
defmodule UserEntity do
use Entitiex.Entity
expose [:first_name, :last_name], format: :to_s
expose :contacts, using: ContactEntity, if: :owner_is_admin?
expose [:registered_at, :updated_at], format: &DateTimeFormatter.format/1
def owner_is_admin?(_struct, _value, %{owner: %{admin: admin}}),
do: admin
def owner_is_admin?(_struct, _value, _context),
do: false
end
Entity module provides a `represent` function, which allow to transform given struct into new structure.
iex> UserEntity.represent(struct)
%{first_name: "...", ...}
iex> UserEntity.represent([struct])
[%{first_name: "...", ...}]
iex> UserEntity.represent(struct, root: :users, extra: [meta: %{}])
%{users: %{first_name: "...", ...}, meta: %{}}
iex> UserEntity.represent(struct, root: :users, context: [owner: %User{admin: true}])
%{users: %{first_name: "...", contacts: %{...}, ...}}
"""
alias Entitiex.Exposure
alias Entitiex.Types
defmacro __using__(_opts \\ []) do
quote location: :keep do
@__exposures__ []
@__pre_exposures__ []
@__shared_options__ []
@__key_formatters__ []
@__pre_key_formatters__ []
@__root__ [singular: nil, plural: nil]
Module.register_attribute(__MODULE__, :__exposures__, accumulate: true)
Module.register_attribute(__MODULE__, :__pre_exposures__, accumulate: true)
Module.register_attribute(__MODULE__, :__pre_key_formatters__, accumulate: true)
alias Entitiex.Exposure
alias Entitiex.Utils
import unquote(__MODULE__), only: [
expose: 2, expose: 1, root: 2, root: 1, format_keys: 1,
nesting: 2, nesting: 3, inline: 2, inline: 3, with_options: 2
]
@before_compile unquote(__MODULE__)
@doc """
Transform a struct into the structure defined in the entity module.
It gives struct or list of structs as a first argument and options as a
second. The result can consist of two parts: inner and outer structure.
The inner structure is described in the entity module. The outer
structure exists only if `:root` option is defined and contains inner
structure under the root key and some additional data if `extra` option
is defined.
Inner:
%{name: "<NAME>"}
Outer:
%{root_key: %{name: "Jon Snow"}}
Outer with extra:
%{root_key: %{name: "Jon Snow"}, meta: %{data: %{...}}}
Available options:
- `:root` - it allows us to define root key for final structure.
- `:extra` - it allows us to define extra structure which will be
merged into outer map. This option will be omitted if `:root` option is
not defined. That's because there is no outer structure without
`:root` option.
- `:context` - it allows us to define the context of the process of
representation. In other words, it allows setting runtime options for
the particular representation.
"""
def represent(struct, opts \\ [])
def represent(structs, opts) when is_list(structs) do
context = Keyword.get(opts, :context, %{})
extra = Keyword.get(opts, :extra, %{})
root = get_root(opts, :plural)
do_represent(structs, root, context, extra)
end
def represent(struct, opts) do
context = Keyword.get(opts, :context, %{})
extra = Keyword.get(opts, :extra, %{})
root = get_root(opts, :singular)
do_represent(struct, root, context, extra)
end
defp do_represent(struct, :nil, context, extra),
do: serializable_map(struct, context)
defp do_represent(struct, root, context, extra) do
extra
|> Utils.transform_keys(&(format_key(&1)))
|> Map.put(format_key(root), serializable_map(struct, context))
end
@doc """
Transform a struct into the inner structure.
"""
def serializable_map(structs, context \\ %{})
def serializable_map(structs, context) when is_list(structs),
do: Enum.map(structs, fn (struct) -> serializable_map(struct, context) end)
def serializable_map(struct, context) when is_map(struct) do
Enum.reduce(exposures(), %{}, fn (exposure, acc) ->
with key <- Exposure.key(exposure) do
case Exposure.value(exposure, struct, context) do
{:merge, value} -> Map.merge(acc, value)
{:put, value} -> Map.put(acc, key, value)
:skip -> acc
end
end
end)
end
def serializable_map(struct, _context),
do: struct
defoverridable [serializable_map: 1]
end
end
defmacro __before_compile__(_env) do
exposures = Enum.map(Module.get_attribute(__CALLER__.module, :__pre_exposures__), fn {attribute, opts, block} ->
nesting = Keyword.get(opts, :nesting, false)
quote location: :keep do
opts = if unquote(nesting) do
using = Entitiex.Entity.generate_module(
__ENV__.module,
unquote(Macro.escape(block)),
Macro.Env.location(__ENV__)
)
Keyword.merge(unquote(opts), [using: using])
else
unquote(opts)
end
@__exposures__ Entitiex.Exposure.new(
__ENV__.module,
unquote(attribute),
opts
)
end
end)
key_formatters = Module.get_attribute(__CALLER__.module, :__pre_key_formatters__)
key_formatters = Entitiex.Formatter.normalize(key_formatters, __CALLER__.module) |> Enum.reverse()
key_formatters = quote location: :keep do
@__key_formatters__ unquote(key_formatters)
end
Module.delete_attribute(__CALLER__.module, :__pre_exposures__)
Module.delete_attribute(__CALLER__.module, :__shared_options__)
Module.delete_attribute(__CALLER__.module, :__pre_key_formatters__)
quote location: :keep do
unquote(key_formatters)
unquote(exposures)
def key_formatters do
@__key_formatters__
end
def exposures do
@__exposures__
end
def format_key(key) do
Entitiex.Formatter.format(key_formatters(), key)
end
def get_root(opts, type) do
case Keyword.get(opts, :root) do
nil -> Keyword.get(@__root__, type)
any -> any
end
end
end
end
defmacro expose(attributes, opts \\ []),
do: expose_attributes(attributes, opts)
defmacro nesting(key, [do: block]),
do: expose_attributes([nil], [nesting: true, as: key], block)
defmacro nesting(key, opts, [do: block]),
do: expose_attributes([nil], Keyword.merge(opts, [nesting: true, as: key]), block)
defmacro inline(attribute, [do: block]),
do: expose_attributes([attribute], [nesting: true], block)
defmacro inline(attribute, opts, [do: block]),
do: expose_attributes([attribute], Keyword.merge(opts, [nesting: true]), block)
defp expose_attributes(attribute, opts, block \\ nil)
defp expose_attributes(attribute, opts, block) when is_binary(attribute) or is_atom(attribute),
do: expose_attributes([attribute], opts, block)
defp expose_attributes(attributes, opts, block) when is_list(attributes) do
Enum.map(attributes, fn (attribute) ->
quote do
@__pre_exposures__ {unquote(attribute), Entitiex.Entity.reduce_options(__ENV__.module, unquote(opts)), unquote(Macro.escape(block))}
end
end)
end
defmacro root(plural, singular \\ nil),
do: set_root(plural, singular)
defp set_root(plural, nil),
do: set_root(plural, plural)
defp set_root(plural, singular) do
quote location: :keep do
@__root__ [singular: unquote(singular), plural: unquote(plural)]
end
end
defmacro format_keys(func) do
quote location: :keep do
@__pre_key_formatters__ unquote(func)
end
end
defmacro with_options(opts, [do: block]) do
quote location: :keep do
@__shared_options__ [unquote(opts) | @__shared_options__]
try do
unquote(block)
after
@__shared_options__ tl(@__shared_options__)
end
end
end
@doc false
@spec reduce_options(module(), Types.exp_opts()) :: Types.exp_opts()
def reduce_options(base, opts) do
shared_opts = Module.get_attribute(base, :__shared_options__, [])
Enum.reduce([opts|shared_opts], &Entitiex.Options.merge/2)
end
@doc false
@spec generate_module(module(), any(), any()) :: module()
def generate_module(base, content, env) do
index = base
|> exposures()
|> length()
key_formatters = base
|> key_formatters()
|> Enum.map(fn key -> quote(do: @__key_formatters__ unquote(key)) end)
content = content
|> inject_code(key_formatters)
|> inject_code(quote do: use Entitiex.Entity)
{:module, nesting, _, _} = Module.create(:"#{base}.CodeGen.Nesting#{index}", content, env)
nesting
end
defp exposures(base) do
if Module.open?(base) do
Module.get_attribute(base, :__exposures__)
else
base.exposures()
end
end
defp key_formatters(base) do
if Module.open?(base) do
Module.get_attribute(base, :__key_formatters__)
else
base.key_formatters()
end
end
defp inject_code(content, injection) when is_tuple(injection),
do: inject_code(content, [injection])
defp inject_code(content, []),
do: content
defp inject_code(content, [injection|injections]) do
nodes = case content do
{:__block__, [], all} -> all
any -> [any]
end
inject_code({:__block__, [], [injection|nodes]}, injections)
end
end
|
lib/entitiex/entity.ex
| 0.869271 | 0.449997 |
entity.ex
|
starcoder
|
defmodule Furlong.Solver do
@moduledoc """
Elixir-port of the Kiwisolver; usage example below adapted from [Kiwisolver docs](https://kiwisolver.readthedocs.io/en/latest/basis/basic_systems.html).
Variables are represented by refs.
```
# create variables
x1 = make_ref()
x2 = make_ref()
xm = make_ref()
import Furlong.Solver
import Furlong.Constraint
# create system of in-/equalities
system =
new()
|> add_constraint(constraint(x1 >= 0))
|> add_constraint(constraint(x2 <= 100))
|> add_constraint(constraint(x2 >= x1 + 10))
|> add_constraint(constraint(xm == (x1 + x2) / 2))
|> add_constraint(constraint(x1 == 40), :weak)
|> add_edit_variable(xm, :strong)
|> suggest_value(xm, 60)
# query sytem for values assigned to variables
value?(system, x1)
value?(system, x2)
value?(system, xm)
# update edit variable value
system =
system |>
suggest_value(xm, 90)
# query values -- as above
value?(system, x1)
value?(system, x2)
value?(system, xm)
```
For further information, see [overconstrained.io](http://overconstrained.io/).
"""
alias Furlong.Row
alias Furlong.Solver
alias Furlong.Symbol
alias Furlong.Strength
import Furlong.Util, only: [near_zero?: 1]
# constraint -> tag
defstruct cns: %{},
# symbol -> row
rows: %{},
# variable (ref) -> symbol
vars: %{},
# variable -> edit_info
edits: %{},
# [symbol]
infeasible_rows: [],
objective: Row.new(),
# Row
artificial: nil,
strengths: %{}
@doc """
Creates a new, empty, solver instance, i.e. a system of in-/equalities.
"""
def new(), do: %Solver{}
@doc """
Creates a new solver instance by adding a constraint to the given solver instance with `:required` strength.
"""
def add_constraint(%Solver{} = solver, {:constraint, _, _} = constraint),
do: add_constraint(solver, constraint, Strength.required())
@doc """
Creates a new solver instance by adding a constraint to the given solver instance. Default constraint strength is `:required`.
`:required`, `:strong`, `:medium`, `:weak` can be passed as the second argument.
"""
def add_constraint(%Solver{} = solver, {:constraint, _, _} = constraint, :required),
do: add_constraint(solver, constraint, Strength.required())
def add_constraint(%Solver{} = solver, {:constraint, _, _} = constraint, :strong),
do: add_constraint(solver, constraint, Strength.strong())
def add_constraint(%Solver{} = solver, {:constraint, _, _} = constraint, :medium),
do: add_constraint(solver, constraint, Strength.medium())
def add_constraint(%Solver{} = solver, {:constraint, _, _} = constraint, :weak),
do: add_constraint(solver, constraint, Strength.weak())
def add_constraint(
%Solver{} = solver,
{:constraint, _, _} = constraint,
{:strength, _} = strength
) do
if Map.has_key?(solver.cns, constraint) do
raise "Duplicate constraint."
end
solver = %Solver{solver | strengths: Map.put(solver.strengths, constraint, strength)}
{solver, row, tag} = create_row(solver, constraint, strength)
{:symbol, type, _} = subject = choose_subject(solver, row, tag)
subject =
if type == :invalid and Row.all_dummies?(row) do
if !near_zero?(row.constant) do
raise "Unsatisfiable constraint."
else
{:tag, marker, _other} = tag
marker
end
else
subject
end
{:symbol, type, _} = subject
solver =
if type == :invalid do
{solver, success} = add_with_artificial_variable(solver, row)
if !success do
raise "Unsatisfiable constraint."
end
solver
else
row = Row.solve_for(row, subject)
solver = substitute(solver, subject, row)
%Solver{solver | rows: Map.put(solver.rows, subject, row)}
end
solver = %Solver{solver | cns: Map.put(solver.cns, constraint, tag)}
solver = optimize(solver, solver.objective)
solver
end
defp create_row(
%Solver{} = solver,
{:constraint, {:expression, terms, constant}, op} = _constraint,
{:strength, str_coeff} = strength
) do
{solver, row} =
terms
|> Enum.reject(fn {:term, _var, coefficient} -> near_zero?(coefficient) end)
|> Enum.reduce({solver, Row.new(constant)}, fn {:term, var, coefficient}, {solver, row} ->
{solver, symbol} = var_symbol?(solver, var)
row =
case row?(solver, symbol) do
nil ->
Row.insert(row, symbol, coefficient)
other_row ->
Row.insert(row, other_row, coefficient)
end
{solver, row}
end)
{solver, row, tag} =
if op == :eq do
if Strength.weaker_than?(strength, Strength.required()) do
err_plus = Symbol.error()
err_minus = Symbol.error()
row =
row
|> Row.insert(err_plus, -1)
|> Row.insert(err_minus, 1)
solver =
solver
|> insert_into_objective(err_plus, str_coeff)
|> insert_into_objective(err_minus, str_coeff)
{solver, row, {:tag, err_plus, err_minus}}
else
dummy = Symbol.dummy()
row = Row.insert(row, dummy)
{solver, row, {:tag, dummy, Symbol.invalid()}}
end
else
coeff =
if op == :lte do
1.0
else
-1.0
end
slack = Symbol.slack()
row = Row.insert(row, slack, coeff)
if Strength.weaker_than?(strength, Strength.required()) do
error = Symbol.error()
row = Row.insert(row, error, -coeff)
solver = insert_into_objective(solver, error, str_coeff)
{solver, row, {:tag, slack, error}}
else
{solver, row, {:tag, slack, Symbol.invalid()}}
end
end
row =
if row.constant < 0 do
Row.reverse_sign(row)
else
row
end
{solver, row, tag}
end
defp choose_subject(
%Solver{} = _solver,
%Row{} = row,
{:tag, {:symbol, marker_type, _} = marker, {:symbol, other_type, _} = other} = _tag
) do
external = Row.get_external_var(row)
cond do
external != nil ->
external
(marker_type == :slack or marker_type == :error) and Row.coefficient_for(row, marker) < 0 ->
marker
(other_type == :slack or other_type == :error) and Row.coefficient_for(row, other) < 0 ->
other
true ->
Symbol.invalid()
end
end
defp get_leaving_row(%Solver{rows: rows} = _solver, {:symbol, _type, _ref} = entering) do
rows
|> Enum.reduce({nil, nil}, fn {{:symbol, type, _} = _key, candidate_row},
{ratio, _row} = best_so_far ->
coefficient = Row.coefficient_for(candidate_row, entering)
cond do
type == :external ->
best_so_far
coefficient >= 0 ->
best_so_far
ratio != nil and -candidate_row.constant / coefficient >= ratio ->
best_so_far
true ->
{-candidate_row.constant / coefficient, candidate_row}
end
end)
|> elem(1)
end
defp optimize(%Solver{} = solver, %Row{} = objective) do
{:symbol, type, _} = entering = Row.get_entering_symbol(objective)
if type == :invalid do
solver
else
entry = get_leaving_row(solver, entering)
if entry == nil do
raise "Objective function is unbounded -- internal solver error."
end
leaving =
solver.rows
|> Enum.find(fn {_key, row} -> row == entry end)
|> elem(0)
solver = %Solver{solver | rows: Map.delete(solver.rows, leaving)}
entry = Row.solve_for(entry, leaving, entering)
solver = substitute(solver, entering, entry)
solver = %Solver{solver | rows: Map.put(solver.rows, entering, entry)}
# The objective function can be the same as either solver.objective or
# solver.artificial; in languages with mutability, there is a pointer alias
# at play, but not so in Elixir. The || below leads to the same effect.
solver = optimize(solver, solver.artificial || solver.objective)
solver
end
end
defp substitute(%Solver{} = solver, {:symbol, _, _} = symbol, %Row{} = row) do
rows =
solver.rows
|> Enum.map(fn {s, r} -> {s, Row.substitute(r, symbol, row)} end)
infeasible_rows =
rows
|> Enum.filter(fn {{:symbol, type, _}, r} -> type != :external and r.constant < 0 end)
|> Enum.map(fn {s, _r} -> s end)
objective = Row.substitute(solver.objective, symbol, row)
artificial =
if solver.artificial != nil do
Row.substitute(solver.artificial, symbol, row)
else
nil
end
%Solver{
solver
| rows: Enum.into(rows, %{}),
infeasible_rows: solver.infeasible_rows ++ infeasible_rows,
artificial: artificial,
objective: objective
}
end
defp add_with_artificial_variable(%Solver{} = solver, %Row{} = row) do
art = Symbol.slack()
solver = %Solver{solver | rows: Map.put(solver.rows, art, row), artificial: row}
solver = optimize(solver, solver.artificial)
success = near_zero?(solver.artificial.constant)
solver = %Solver{solver | artificial: nil}
{solver, return, val} =
case Map.get(solver.rows, art) do
%Row{} = rowptr ->
delete_queue =
solver.rows
|> Enum.filter(fn {_s, r} -> r == rowptr end)
|> Enum.map(fn {s, _r} -> s end)
solver =
delete_queue
|> Enum.reduce(solver, fn sym, sol ->
%Solver{sol | rows: Map.delete(sol.rows, sym)}
end)
if map_size(rowptr.cells) == 0 do
{solver, true, success}
else
{:symbol, type, _} = entering = Row.any_pivotable_symbol(rowptr)
if type == :invalid do
{solver, true, false}
else
rowptr = Row.solve_for(rowptr, art, entering)
solver = substitute(solver, entering, rowptr)
solver = %Solver{solver | rows: Map.put(solver.rows, entering, rowptr)}
{solver, false, nil}
end
end
nil ->
{solver, false, nil}
end
if return == true do
{solver, val}
else
solver = %Solver{
solver
| rows:
solver.rows
|> Enum.map(fn {s, r} -> {s, Row.remove(r, art)} end)
|> Enum.into(%{}),
objective: Row.remove(solver.objective, art)
}
{solver, success}
end
end
defp insert_into_objective(
%Solver{objective: objective} = solver,
{:symbol, _, _} = symbol,
coefficient
) do
%Solver{solver | objective: Row.insert(objective, symbol, coefficient)}
end
@doc """
Creates a new solver instance by removing the given constraint from the given solver instance.
"""
def remove_constraint(%Solver{} = solver, {:constraint, _, _} = constraint) do
case Map.get(solver.cns, constraint) do
nil ->
raise "Unknown constraint."
{:tag, marker, _} = tag ->
solver = %Solver{solver | cns: Map.delete(solver.cns, constraint)}
strength = Map.get(solver.strengths, constraint)
solver = %Solver{solver | strengths: Map.delete(solver.strengths, constraint)}
solver = remove_constraint_effects(solver, tag, strength)
solver =
case Map.get(solver.rows, marker) do
%Row{} ->
%Solver{solver | rows: Map.delete(solver.rows, marker)}
nil ->
row = get_marker_leaving_row(solver, marker)
if row == nil do
raise "Internal solver error."
end
leaving =
solver.rows
|> Enum.find(fn {_key, r} -> r == row end)
|> elem(0)
if leaving == nil do
raise "Internal solver error."
end
solver = %Solver{solver | rows: Map.delete(solver.rows, leaving)}
row = Row.solve_for(row, leaving, marker)
solver = substitute(solver, marker, row)
solver
end
optimize(solver, solver.objective)
end
end
defp remove_constraint_effects(
%Solver{} = solver,
{:tag, {:symbol, :error, _} = marker, _} = _tag,
strength
),
do: remove_marker_effects(solver, marker, strength)
defp remove_constraint_effects(
%Solver{} = solver,
{:tag, _, {:symbol, :error, _} = other} = _tag,
strength
),
do: remove_marker_effects(solver, other, strength)
defp remove_constraint_effects(%Solver{} = solver, {:tag, _, _} = _tag, _strength), do: solver
defp remove_marker_effects(%Solver{rows: rows} = solver, {:symbol, _, _} = marker, strength)
when is_number(strength) do
case Map.get(rows, marker) do
%Row{} = row ->
insert_into_objective(solver, row, -strength)
nil ->
insert_into_objective(solver, marker, -strength)
end
end
defp get_marker_leaving_row(%Solver{rows: rows}, {:symbol, _, _} = marker) do
{_r1, _r2, first, second, third} =
Enum.reduce(rows, {nil, nil, nil, nil, nil}, fn {{:symbol, type, _} = _key,
%Row{} = candidate_row},
{r1, r2, first, second, third} = acc ->
c = Row.coefficient_for(candidate_row, marker)
if c == 0 do
acc
else
if type == :external do
{r1, r2, first, second, candidate_row}
else
if c < 0 do
r = -candidate_row.constant / c
if r < r1 do
{r, r2, candidate_row, second, third}
else
acc
end
else
r = candidate_row.constant / c
if r < r2 do
{r1, r, first, candidate_row, third}
else
acc
end
end
end
end
end)
first || second || third
end
@doc """
Returns a new solver instance, where the given variable is editable. Strength can be `:strong`, `:medium`, `:weak`.
"""
def add_edit_variable(%Solver{} = solver, var, :strong) when is_reference(var),
do: add_edit_variable(solver, var, Strength.strong())
def add_edit_variable(%Solver{} = solver, var, :medium) when is_reference(var),
do: add_edit_variable(solver, var, Strength.medium())
def add_edit_variable(%Solver{} = solver, var, :weak) when is_reference(var),
do: add_edit_variable(solver, var, Strength.weak())
def add_edit_variable(%Solver{} = solver, var, {:strength, _} = strength)
when is_reference(var) do
if Map.has_key?(solver.edits, var) do
raise "Duplicate edit variable."
end
strength = Strength.clip(strength)
if !Strength.weaker_than?(strength, Strength.required()) do
raise "Edit variable must be weaker than :required."
end
constraint = {:constraint, {:expression, [{:term, var, 1}], 0}, :eq}
solver = add_constraint(solver, constraint, strength)
%Solver{
solver
| edits:
Map.put(solver.edits, var, {:edit_info, constraint, Map.get(solver.cns, constraint), 0})
}
end
@doc """
Returns a new solver instance where the given variable is no longer editable.
"""
def remove_edit_variable(%Solver{} = solver, var) when is_reference(var) do
case Map.get(solver.edits, var) do
nil ->
raise "Unknown edit variable."
{:edit_info, constraint, _tag, _constant} ->
solver = remove_constraint(solver, constraint)
%Solver{solver | edits: Map.delete(solver.edits, var)}
end
end
@doc """
Returns a new solver instance in which the given value is suggested for the given edit variable.
"""
def suggest_value(%Solver{} = solver, var, value) when is_reference(var) and is_number(value) do
case Map.get(solver.edits, var) do
nil ->
raise "Unknown edit variable."
{:edit_info, constraint, {:tag, marker, other} = tag, constant} ->
delta = value - constant
solver = %Solver{
solver
| edits: Map.put(solver.edits, var, {:edit_info, constraint, tag, value})
}
{row, sym} =
if row?(solver, marker) != nil do
{row?(solver, marker), marker}
else
if row?(solver, other) != nil do
{row?(solver, other), other}
else
{nil, nil}
end
end
if row != nil do
row = Row.add(row, -delta)
solver = %Solver{solver | rows: Map.put(solver.rows, sym, row)}
solver =
if row.constant < 0 do
%Solver{solver | infeasible_rows: [sym | solver.infeasible_rows]}
else
solver
end
dual_optimize(solver)
else
solver =
solver.rows
|> Enum.reduce(solver, fn {{:symbol, type, _} = sym, r}, solver ->
coefficient = Row.coefficient_for(r, marker)
r = Row.add(r, delta * coefficient)
solver = %Solver{solver | rows: Map.put(solver.rows, sym, r)}
if coefficient != 0 and r.constant < 0 and type != :external do
%Solver{solver | infeasible_rows: [sym | solver.infeasible_rows]}
else
solver
end
end)
dual_optimize(solver)
end
end
end
defp dual_optimize(%Solver{} = solver) do
if length(solver.infeasible_rows) == 0 do
solver
else
[leaving | infeasible] = solver.infeasible_rows
case row?(solver, leaving) do
%Row{constant: constant} = row when constant < 0 ->
solver = %Solver{solver | infeasible_rows: infeasible}
{:symbol, type, _} = entering = get_dual_entering_symbol(solver, row)
if type == :invalid do
raise "Internal solver error."
end
solver = %Solver{solver | rows: Map.delete(solver.rows, leaving)}
row = Row.solve_for(row, leaving, entering)
solver = substitute(solver, entering, row)
solver = %Solver{solver | rows: Map.put(solver.rows, entering, row)}
dual_optimize(solver)
_ ->
%Solver{solver | infeasible_rows: infeasible}
end
end
end
defp get_dual_entering_symbol(%Solver{} = solver, %Row{} = row) do
row.cells
|> Enum.reduce({Symbol.invalid(), nil}, fn {{:symbol, type, _} = symbol, coefficient},
{_entering, ratio} = best ->
if type != :dummy and coefficient > 0 do
candidate_ratio = Row.coefficient_for(solver.objective, symbol) / coefficient
if ratio == nil or candidate_ratio < ratio do
{symbol, candidate_ratio}
else
best
end
else
best
end
end)
|> elem(0)
end
defp row?(%Solver{rows: rows}, {:symbol, _, _} = symbol), do: Map.get(rows, symbol)
defp var_symbol?(%Solver{vars: vars} = solver, var) when is_reference(var) do
case Map.get(vars, var) do
{:symbol, _, _} = sym ->
{solver, sym}
nil ->
sym = Symbol.external()
{%Solver{solver | vars: Map.put(vars, var, sym)}, sym}
end
end
@doc """
Looks up the value of the given variable in the given solver instance. Default value is 0.
"""
def value?(%Solver{} = solver, var) when is_reference(var) do
case Map.get(solver.vars, var) do
{:symbol, _, _} = symbol ->
case Map.get(solver.rows, symbol) do
%Row{} = row -> row.constant
_ -> 0
end
nil ->
0
end
end
end
|
lib/furlong/solver.ex
| 0.853654 | 0.825976 |
solver.ex
|
starcoder
|
defmodule NervesLivebook.PatternLED do
@moduledoc """
Functions for sending patterns to LEDs
This module uses the Linux's sysclass LED pattern trigger to control LEDs.
With it, you can make LEDs do tons of things without spending any time in the
Erlang VM. The code in this module constructs patterns to blink the LED in
various ways. Pattern strings get sent to the kernel by calling
`PatternLED.set_led_pattern/2`.
To use the pattern trigger, it is important to have the following Linux
kernel options enabled:
```text
CONFIG_LEDS_CLASS=y
CONFIG_LEDS_TRIGGER_PATTERN=y
```
Additionally, the LEDs need to be defined in the device tree file so that
they show up in the `"/sys/class/leds"` directory.
This module has functions to generate patterns. They don't need to be used.
Patterns are strings of the form `<on/off> <milliseconds>...`. The `on/off`
field should be set to `0` or `1`. The `milliseconds` field is the transition
time between the current state and the next one. Linux will interpolate the
LED value in over the duration. If the LED had on/off values of `0` and
`255`, this would be interesting. However, most LED drivers only support `0`
and `1` and to avoid any fade transitions, the pattern generators in this
module set up `0` duration fades.
"""
@typedoc false
@type blink_option() :: {:duty_cycle, number()} | {:off_first, boolean()}
@doc """
Initialize an LED so that patterns can be written to it
This should be called any time the code isn't sure what state the LED is in.
It will force it to use the "pattern" trigger. For this to succeed, the
specified LED must exist in `/sys/class/leds` and the Linux kernel must have
pattern triggers enabled. Pattern triggers are enabled in Nerves systems, but
they're not as commonly enabled elsewhere.
"""
@spec initialize_led(String.t()) :: :ok | {:error, atom()}
def initialize_led(led) when is_binary(led) do
File.write(["/sys/class/leds/", led, "/trigger"], "pattern")
end
@doc """
Set the LED pattern
The LED pattern can be any of the various strings returned by other functions
in this module.
"""
@spec set_led_pattern(String.t(), String.t()) :: :ok | {:error, atom()}
def set_led_pattern(led, pattern) when is_binary(led) do
File.write(["/sys/class/leds/", led, "/pattern"], pattern)
end
@doc """
Get the maximum brightness for an LED
"""
@spec get_max_brightness(String.t()) :: {:ok, pos_integer()} | {:error, atom()}
def get_max_brightness(led) when is_binary(led) do
case File.read(["/sys/class/leds/", led, "/max_brightness"]) do
{:ok, str} -> {:ok, str |> String.trim() |> String.to_integer()}
error -> error
end
end
@doc """
Turn the LED on
"""
@spec on(pos_integer()) :: String.t()
def on(brightness) do
"#{brightness} 3600000 #{brightness} 3600000"
end
@doc """
Turn the LED off
"""
@spec off() :: String.t()
def off() do
"0 3600000 0 3600000"
end
@doc """
Return a simple blinking pattern
Options:
* `:duty_cycle` - a number between 0 and 1.0 that's the fraction of time the
LED is on (default is 0.5)
* `:off_first` - set to `true` to start in the "off" state, then switch to "on"
"""
@spec blink(pos_integer(), number(), [blink_option()]) :: String.t()
def blink(brightness, frequency, opts \\ []) when frequency > 0 do
duty_cycle = bound(0, opts[:duty_cycle] || 0.5, 1)
off_first = opts[:off_first]
period_ms = round(1000 / frequency)
on_time = round(period_ms * duty_cycle)
off_time = period_ms - on_time
on_pattern = "#{brightness} #{on_time} #{brightness} 0"
off_pattern = "0 #{off_time} 0 0"
if off_first do
off_pattern <> " " <> on_pattern
else
on_pattern <> " " <> off_pattern
end
end
defp bound(lower, value, upper) do
value
|> max(lower)
|> min(upper)
end
end
|
lib/nerves_livebook/pattern_led.ex
| 0.837188 | 0.841761 |
pattern_led.ex
|
starcoder
|
defmodule Socket.TCP do
@moduledoc """
This module wraps a passive TCP socket using `gen_tcp`.
## Options
When creating a socket you can pass a series of options to use for it.
* `:as` sets the kind of value returned by recv, either `:binary` or `:list`,
the default is `:binary`
* `:mode` can be either `:passive` or `:active`, default is `:passive`
* `:local` must be a keyword list
- `:address` the local address to use
- `:port` the local port to use
- `:fd` an already opened file descriptor to use
* `:backlog` sets the listen backlog
* `:watermark` must be a keyword list
- `:low` defines the `:low_watermark`, see `inet:setopts`
- `:high` defines the `:high_watermark`, see `inet:setopts`
* `:version` sets the IP version to use
* `:options` must be a list of atoms
- `:keepalive` sets `SO_KEEPALIVE`
- `:nodelay` sets `TCP_NODELAY`
* `:packet` see `inet:setopts`
* `:size` sets the max length of the packet body, see `inet:setopts`
## Examples
server = Socket.TCP.listen!(1337, packet: :line)
client = server |> Socket.accept!
client |> Socket.Stream.send!(client |> Socket.Stream.recv!)
client |> Socket.Stream.close
"""
use Socket.Helpers
require Record
@opaque t :: port
@doc """
Return a proper error string for the given code or nil if it can't be
converted.
"""
@spec error(term) :: String.t
def error(code) do
case :inet.format_error(code) do
'unknown POSIX error' ->
nil
message ->
message |> to_string
end
end
@doc """
Create a TCP socket connecting to the given host and port tuple.
"""
@spec connect({ Socket.Address.t, :inet.port_number }) :: { :ok, t } | { :error, Socket.Error.t }
def connect({ address, port }) do
connect(address, port)
end
@doc """
Create a TCP socket connecting to the given host and port tuple, raising if
an error occurs.
"""
@spec connect({ Socket.Address.t, :inet.port_number }) :: t | no_return
defbang connect(descriptor)
@doc """
Create a TCP socket connecting to the given host and port tuple and options,
or to the given host and port.
"""
@spec connect({ Socket.Address.t, :inet.port_number } | Socket.Address.t, Keyword.t | :inet.port_number) :: { :ok, t } | { :error, Socket.Error.t }
def connect({ address, port }, options) when options |> is_list do
connect(address, port, options)
end
def connect(address, port) when port |> is_integer do
connect(address, port, [])
end
@doc """
Create a TCP socket connecting to the given host and port tuple and options,
or to the given host and port, raising if an error occurs.
"""
@spec connect({ Socket.Address.t, :inet.port_number } | Socket.Address.t, Keyword.t | :inet.port_number) :: t | no_return
defbang connect(address, port)
@doc """
Create a TCP socket connecting to the given host and port.
"""
@spec connect(String.t | :inet.ip_address, :inet.port_number, Keyword.t) :: { :ok, t } | { :error, Socket.Error.t }
def connect(address, port, options) when address |> is_binary do
timeout = options[:timeout] || :infinity
options = Keyword.delete(options, :timeout)
:gen_tcp.connect(String.to_char_list(address), port, arguments(options), timeout)
end
@doc """
Create a TCP socket connecting to the given host and port, raising in case of
error.
"""
@spec connect!(String.t | :inet.ip_address, :inet.port_number, Keyword.t) :: t | no_return
defbang connect(address, port, options)
@doc """
Create a TCP socket listening on an OS chosen port, use `local` to know the
port it was bound on.
"""
@spec listen :: { :ok, t } | { :error, Socket.Error.t }
def listen do
listen(0, [])
end
@doc """
Create a TCP socket listening on an OS chosen port, use `local` to know the
port it was bound on, raising in case of error.
"""
@spec listen! :: t | no_return
defbang listen
@doc """
Create a TCP socket listening on an OS chosen port using the given options or
listening on the given port.
"""
@spec listen(:inet.port_number | Keyword.t) :: { :ok, t } | { :error, Socket.Error.t }
def listen(port) when port |> is_integer do
listen(port, [])
end
def listen(options) when options |> is_list do
listen(0, options)
end
@doc """
Create a TCP socket listening on an OS chosen port using the given options or
listening on the given port, raising in case of error.
"""
@spec listen!(:inet.port_number | Keyword.t) :: t | no_return
defbang listen(port_or_options)
@doc """
Create a TCP socket listening on the given port and using the given options.
"""
@spec listen(:inet.port_number, Keyword.t) :: { :ok, t } | { :error, Socket.Error.t }
def listen(port, options) when options |> is_list do
options = options
|> Keyword.put(:mode, :passive)
|> Keyword.put_new(:reuse, true)
:gen_tcp.listen(port, arguments(options))
end
@doc """
Create a TCP socket listening on the given port and using the given options,
raising in case of error.
"""
@spec listen!(:inet.port_number, Keyword.t) :: t | no_return
defbang listen(port, options)
@doc """
Accept a new client from a listening socket, optionally passing options.
"""
@spec accept(t | port) :: { :ok, t } | { :error, Error.t }
@spec accept(t | port, Keyword.t) :: { :ok, t } | { :error, Error.t }
def accept(sock, options \\ []) do
case :gen_tcp.accept(sock, options[:timeout] || :infinity) do
{ :ok, sock } ->
case options[:mode] do
:active ->
:inet.setopts(sock, active: true)
:once ->
:inet.setopts(sock, active: :once)
:passive ->
:inet.setopts(sock, active: false)
nil ->
:ok
end
{ :ok, sock }
error ->
error
end
end
@doc """
Accept a new client from a listening socket, optionally passing options,
raising if an error occurs.
"""
@spec accept!(t) :: t | no_return
@spec accept!(t, Keyword.t) :: t | no_return
defbang accept(self)
defbang accept(self, options)
@doc """
Set the process which will receive the messages.
"""
@spec process(t, pid) :: :ok | { :error, :closed | :not_owner | Error.t }
def process(sock, pid) do
:gen_tcp.controlling_process(sock, pid)
end
@doc """
Set the process which will receive the messages, raising if an error occurs.
"""
@spec process!(t | port, pid) :: :ok | no_return
def process!(sock, pid) do
case process(sock, pid) do
:ok ->
:ok
:closed ->
raise RuntimeError, message: "the socket is closed"
:not_owner ->
raise RuntimeError, message: "the current process isn't the owner"
code ->
raise Socket.Error, reason: code
end
end
@doc """
Set options of the socket.
"""
@spec options(t | Socket.SSL.t | port, Keyword.t) :: :ok | { :error, Socket.Error.t }
def options(socket, options) when socket |> Record.is_record(:sslsocket) do
Socket.SSL.options(socket, options)
end
def options(socket, options) when socket |> is_port do
:inet.setopts(socket, arguments(options))
end
@doc """
Set options of the socket, raising if an error occurs.
"""
@spec options!(t | Socket.SSL.t | port, Keyword.t) :: :ok | no_return
defbang options(socket, options)
@doc """
Convert TCP options to `:inet.setopts` compatible arguments.
"""
@spec arguments(Keyword.t) :: list
def arguments(options) do
options = options
|> Keyword.put_new(:as, :binary)
options = Enum.group_by(options, fn
{ :as, _ } -> true
{ :size, _ } -> true
{ :packet, _ } -> true
{ :backlog, _ } -> true
{ :watermark, _ } -> true
{ :local, _ } -> true
{ :verion, _ } -> true
{ :options, _ } -> true
_ -> false
end)
{ local, global } = {
Map.get(options, true, []),
Map.get(options, false, [])
}
Socket.arguments(global) ++ Enum.flat_map(local, fn
{ :as, :binary } ->
[:binary]
{ :as, :list } ->
[:list]
{ :size, size } ->
[{ :packet_size, size }]
{ :packet, packet } ->
[{ :packet, packet }]
{ :backlog, backlog } ->
[{ :backlog, backlog }]
{ :watermark, options } ->
Enum.flat_map(options, fn
{ :low, low } ->
[{ :low_watermark, low }]
{ :high, high } ->
[{ :high_watermark, high }]
end)
{ :local, options } ->
Enum.flat_map(options, fn
{ :address, address } ->
[{ :ip, Socket.Address.parse(address) }]
{ :port, port } ->
[{ :port, port }]
{ :fd, fd } ->
[{ :fd, fd }]
end)
{ :version, 4 } ->
[:inet]
{ :version, 6 } ->
[:inet6]
{ :options, options } ->
Enum.flat_map(options, fn
:keepalive ->
[{ :keepalive, true }]
:nodelay ->
[{ :nodelay, true }]
end)
end)
end
end
|
deps/socket/lib/socket/tcp.ex
| 0.916147 | 0.629945 |
tcp.ex
|
starcoder
|
defprotocol Chess.Piece do
def position(piece)
def color(piece)
def moves(piece, board)
def to_string(piece)
end
defmodule Chess.Pieces do
def new(piece_kind, color, position) do
struct(piece_kind, color: color, position: position)
end
defmodule Pawn do
defstruct [:color, :position]
end
defmodule Rook do
defstruct [:color, :position]
end
defmodule Knight do
defstruct [:color, :position]
end
defmodule Bishop do
defstruct [:color, :position]
end
defmodule Queen do
defstruct [:color, :position]
end
defmodule King do
defstruct [:color, :position]
end
end
defimpl Chess.Piece, for: Chess.Pieces.Pawn do
alias Chess.Position
def color(this) do
this.color
end
def position(this) do
this.position
end
# TODO: add check for en passant
def moves(this, board) do
same_color_piece_positions =
board.pieces
|> Enum.filter(fn piece ->
piece.color == color(this)
end)
|> Enum.map(&Chess.Piece.position(&1))
|> Enum.into(MapSet.new())
opposite_color_piece_positions =
board.pieces
|> Enum.reject(fn piece ->
piece.color == color(this)
end)
|> Enum.map(&Chess.Piece.position(&1))
|> Enum.into(MapSet.new())
all_piece_positions = MapSet.union(same_color_piece_positions, opposite_color_piece_positions)
in_home_row? =
case {color(this), Position.to_xy(this.position)} do
{:white, {_, 1}} -> true
{:black, {_, 6}} -> true
_ -> false
end
case color(this) do
:black ->
straight =
[
Position.down(this.position)
]
|> Enum.reject(fn position ->
MapSet.member?(all_piece_positions, position)
end)
takes =
[
Position.down_left(this.position),
Position.down_right(this.position)
]
|> Enum.filter(fn position ->
MapSet.member?(opposite_color_piece_positions, position)
end)
special =
if in_home_row? do
[Position.compose([:down, :down]).(this.position)]
|> Enum.reject(fn move ->
MapSet.member?(all_piece_positions, move) ||
MapSet.member?(all_piece_positions, Position.down(this.position))
end)
else
[]
end
straight ++ takes ++ special
:white ->
straight =
[
Position.up(this.position)
]
|> Enum.reject(fn position ->
MapSet.member?(all_piece_positions, position)
end)
takes =
[
Position.up_left(this.position),
Position.up_right(this.position)
]
|> Enum.filter(fn position ->
MapSet.member?(opposite_color_piece_positions, position)
end)
special =
if in_home_row? do
[Position.compose([:up, :up]).(this.position)]
|> Enum.reject(fn move ->
MapSet.member?(all_piece_positions, move) ||
MapSet.member?(all_piece_positions, Position.up(this.position))
end)
else
[]
end
straight ++ takes ++ special
end
|> Enum.reject(fn move ->
MapSet.member?(same_color_piece_positions, move)
end)
|> Enum.into(MapSet.new())
end
def to_string(this) do
case this.color do
:black -> "\u265F"
:white -> "\u2659"
end
end
end
defimpl Chess.Piece, for: Chess.Pieces.Rook do
alias Chess.Position
def color(this) do
this.color
end
def position(this) do
this.position
end
def moves(this, board) do
pieces_map =
board.pieces
|> Enum.map(fn piece ->
{Chess.Piece.position(piece), piece}
end)
|> Enum.into(%{})
Enum.flat_map([:up, :right, :down, :left], fn move_direction ->
this.position
|> Position.stream(move_direction)
|> Enum.take_while(fn position ->
Position.to_xy(position) != :off_board
end)
|> Enum.map(fn position ->
{position, Map.get(pieces_map, position)}
end)
|> Enum.reduce_while([], fn {position, piece}, positions ->
if piece do
if Chess.Piece.color(piece) == this.color do
{:halt, positions}
else
{:halt, [position | positions]}
end
else
{:cont, [position | positions]}
end
end)
end)
end
def to_string(this) do
case this.color do
:black -> "\u265C"
:white -> "\u2656"
end
end
end
defimpl Chess.Piece, for: Chess.Pieces.Knight do
alias Chess.Position
def color(this) do
this.color
end
def position(this) do
this.position
end
def moves(this, board) do
same_color_piece_positions =
board.pieces
|> Enum.filter(fn piece ->
Chess.Piece.color(piece) == color(this)
end)
|> Enum.map(&Chess.Piece.position(&1))
|> Enum.into(MapSet.new())
[
Position.compose([:up, :up, :right]),
Position.compose([:up, :up, :left]),
Position.compose([:right, :right, :up]),
Position.compose([:right, :right, :down]),
Position.compose([:down, :down, :right]),
Position.compose([:down, :down, :left]),
Position.compose([:left, :left, :down]),
Position.compose([:left, :left, :up])
]
|> Stream.map(fn move ->
move.(this.position)
end)
|> Stream.filter(fn position ->
Position.to_xy(position) != :off_board
end)
|> Stream.reject(fn position ->
MapSet.member?(same_color_piece_positions, position)
end)
|> Enum.to_list()
end
def to_string(this) do
case color(this) do
:black -> "\u265E"
:white -> "\u2658"
end
end
end
defimpl Chess.Piece, for: Chess.Pieces.Bishop do
alias Chess.Position
def color(this) do
this.color
end
def position(this) do
this.position
end
def moves(this, board) do
pieces_map =
board.pieces
|> Enum.map(fn piece ->
{Chess.Piece.position(piece), piece}
end)
|> Enum.into(%{})
Enum.flat_map([:up_left, :up_right, :down_right, :down_left], fn move_direction ->
this.position
|> Position.stream(move_direction)
|> Enum.take_while(fn position ->
Position.to_xy(position) != :off_board
end)
|> Enum.map(fn position ->
{position, Map.get(pieces_map, position)}
end)
|> Enum.reduce_while([], fn {position, piece}, positions ->
if piece do
if Chess.Piece.color(piece) == this.color do
{:halt, positions}
else
{:halt, [position | positions]}
end
else
{:cont, [position | positions]}
end
end)
end)
end
def to_string(this) do
case this.color do
:black -> "\u265D"
:white -> "\u2657"
end
end
end
defimpl Chess.Piece, for: Chess.Pieces.Queen do
alias Chess.Position
def color(this) do
this.color
end
def position(this) do
this.position
end
def moves(this, board) do
pieces_map =
board.pieces
|> Enum.map(fn piece ->
{Chess.Piece.position(piece), piece}
end)
|> Enum.into(%{})
Enum.flat_map(
[:up, :right, :down, :left, :up_left, :up_right, :down_right, :down_left],
fn move_direction ->
this.position
|> Position.stream(move_direction)
|> Enum.take_while(fn position ->
Position.to_xy(position) != :off_board
end)
|> Enum.map(fn position ->
{position, Map.get(pieces_map, position)}
end)
|> Enum.reduce_while([], fn {position, piece}, positions ->
if piece do
if Chess.Piece.color(piece) == this.color do
{:halt, positions}
else
{:halt, [position | positions]}
end
else
{:cont, [position | positions]}
end
end)
end
)
end
def to_string(this) do
case this.color do
:black -> "\u265B"
:white -> "\u2655"
end
end
end
defimpl Chess.Piece, for: Chess.Pieces.King do
alias Chess.Position
def color(this) do
this.color
end
def position(this) do
this.position
end
# TODO: add check for moving into check
# TODO: add castling
def moves(this, board) do
same_color_piece_positions =
board.pieces
|> Enum.filter(fn piece ->
piece.color == color(this)
end)
|> Enum.map(&Chess.Piece.position(&1))
|> Enum.into(MapSet.new())
[
Position.up(this.position),
Position.up_right(this.position),
Position.right(this.position),
Position.down_right(this.position),
Position.down(this.position),
Position.down_left(this.position),
Position.left(this.position),
Position.up_left(this.position)
]
|> Enum.reject(fn position -> Position.to_xy(position) == :off_board end)
|> Enum.reject(fn move ->
MapSet.member?(same_color_piece_positions, move)
end)
|> Enum.into(MapSet.new())
end
def to_string(this) do
case this.color do
:black -> "\u265A"
:white -> "\u2654"
end
end
end
|
lib/pieces.ex
| 0.504639 | 0.655639 |
pieces.ex
|
starcoder
|
defmodule Bricks.Guards do
@moduledoc false
defguard is_byte(x) when is_integer(x) and x >= 0 and x <= 255
defguard is_char(x) when is_integer(x) and x >= 0 and x <= 0x10FFFF
defguard is_opt_atom(x) when is_atom(x) or is_nil(x)
defguard is_opt_bool(x) when is_boolean(x) or is_nil(x)
defguard is_opt_int(x) when is_integer(x) or is_nil(x)
defguard is_int_gte(x, min) when is_integer(x) and x >= min
defguard is_int_in_range(x, min, max) when is_integer(x) and x >= min and x <= max
defguard is_active(x) when is_boolean(x) or x == :once or is_int_in_range(x, -32767, 32766)
defguard is_window(x) when x == :once or is_int_gte(x, 1)
defguard is_timeout(x) when is_int_gte(x, 0) or x == :infinity
defguard is_opt_active(x) when is_nil(x) or is_active(x)
defguard is_opt_timeout(x) when is_nil(x) or is_timeout(x)
defguard is_non_neg_int(x) when is_integer(x) and x >= 0
defguard is_pos_int(x) when is_integer(x) and x > 0
defguard is_port_num(x) when is_non_neg_int(x)
defguard is_deliver(x) when x in [:port, :term]
def byte?(x), do: is_byte(x)
def char?(x), do: is_char(x)
def opt_atom?(x), do: is_opt_atom(x)
def opt_bool?(x), do: is_opt_bool(x)
def opt_int?(x), do: is_opt_int(x)
def int_gte?(x, min), do: is_int_gte(x, min)
def int_in_range?(x, min, max), do: is_int_in_range(x, min, max)
def active?(x), do: is_active(x)
def window?(x), do: is_window(x)
def timeout?(x), do: is_timeout(x)
def opt_active?(x), do: is_opt_active(x)
def opt_timeout?(x), do: is_opt_timeout(x)
def non_neg_int?(x), do: is_non_neg_int(x)
def pos_int?(x), do: is_pos_int(x)
def port?(x), do: is_port_num(x)
def deliver?(x), do: is_deliver(x)
def linger?({x, y}), do: is_boolean(x) and is_pos_int(y)
def linger?(_), do: false
@packet_types [
:raw,
1,
2,
4,
:asn1,
:cdr,
:sunrm,
:fcgi,
:tpkt,
:line,
:http,
:http_bin,
:httph,
:httph_bin
]
def packet_type?(x), do: x in @packet_types
@doc false
# True if the provided value is a binary or an ip
def host?(h) when is_binary(h), do: true
def host?({a, b, c, d})
when is_byte(a) and is_byte(b) and is_byte(c) and is_byte(d),
do: true
def host?({a, b, c, d, e, f, g, h})
when is_char(a) and is_char(b) and is_char(c) and is_char(d) and is_char(e) and is_char(f) and
is_char(g) and is_char(h),
do: true
def host?(_), do: false
end
|
bricks/lib/guards.ex
| 0.578329 | 0.613844 |
guards.ex
|
starcoder
|
defmodule Utils do
@moduledoc """
Documentation for `Utils`.
"""
alias Kino.ValidatedForm
defp box(text, style) do
"<div style=\"margin: 0 10px; font-size: 24px; font-weight: bold; height: 50px; width: 50px; background-color: #{style.background}; border: #{style.border} solid 1px; display: flex; align-items: center; justify-content: center;\">#{text}</div>"
end
defp space, do: box("", %{background: "none", border: "none"})
defp green_box(text), do: box(text, %{background: "#D5E8D4", border: "#82B366"})
defp grey_box(text), do: box(text, %{background: "#F5F5F5", border: "#666666"})
defp row(title, items) do
"<div style=\"height: 80px; display: flex; width: 100%; align-items: center;\">
<p style=\"font-weight: bold; font-size: 24px; margin: 0; width: 10%;\">#{title}</p>
<div style=\"display: flex; width: 90%;\">#{items}</div>
</div>"
end
defp spaces(0), do: []
defp spaces(integer), do: Enum.map(1..integer, fn _ -> space() end)
defp green_boxes(range), do: Enum.map(range, &green_box/1)
defp grey_boxes(range), do: Enum.map(range, &grey_box/1)
@doc """
iex> Utils.animate(:eager_evaluation)
iex> Utils.animate(:lazy_evaluation)
iex> Utils.animate(:remainder)
"""
def animate(:eager_evaluation) do
sequence = [
"
#{row("1..10", Enum.map(1..10, &green_box/1))}
#{row("map", [])}
#{row("filter", [])}
#{row("take", [])}
",
"
#{row("1..10", Enum.map(1..10, &green_box/1))}
#{row("map", Enum.map(2..20//2, &green_box/1))}
#{row("filter", [])}
#{row("take", [])}
",
"
#{row("1..10", Enum.map(1..10, &green_box/1))}
#{row("map", Enum.map(2..20//2, &green_box/1))}
#{row("filter", Enum.map(2..10//2, &green_box/1) ++ Enum.map(12..20//2, &grey_box/1))}
#{row("take", [])}
",
"
#{row("1..10", Enum.map(1..10, &green_box/1))}
#{row("map", Enum.map(2..20//2, &green_box/1))}
#{row("filter", Enum.map(2..10//2, &green_box/1) ++ Enum.map(12..20//2, &grey_box/1))}
#{row("take", Enum.map(2..8//2, &green_box/1) ++ Enum.map(10..20//2, fn _ -> space() end))}
"
]
Kino.animate(2000, 0, fn i ->
md = Kino.Markdown.new(Enum.at(sequence, i))
{:cont, md, rem(i + 1, length(sequence))}
end)
end
def animate(:lazy_evaluation) do
Kino.animate(500, {0, 0}, fn {current_row, current_column} ->
current_element = current_column + 1
range = green_boxes(1..current_element) ++ grey_boxes((current_element + 1)..10)
maybe_display = fn expected_row, display ->
if current_row === expected_row, do: display, else: []
end
indent = spaces(current_column)
md = Kino.Markdown.new("
#{row("1..10", range)}
#{row("map", maybe_display.(1, indent ++ [green_box(current_element * 2)]))}
#{row("filter", maybe_display.(2, indent ++ [green_box(current_element * 2)]))}
#{row("take", green_boxes(2..(current_element * 2 - 2)//2) ++ maybe_display.(3, [green_box(current_element * 2)]))}
")
next_row = rem(current_row + 1, 4)
next_column = rem((current_row === 3 && current_column + 1) || current_column, 4)
{:cont, md, {next_row, next_column}}
end)
end
def animate(:remainder) do
Kino.animate(500, 0, fn i ->
md = Kino.Markdown.new("
```elixir
rem(#{i}, 10) = #{rem(i, 10)}
```
")
{:cont, md, i + 1}
end)
end
@doc ~S"""
iex> %Kino.JS{} = Utils.form(:comparison_operators)
iex> %Kino.JS{} = Utils.form(:boolean_fill_in_the_blank)
iex> %Kino.JS{} = Utils.form(:lists_vs_tuples)
"""
def form(:comparison_operators) do
ValidatedForm.new([
%{label: "7 __ 8", answers: ["<"]},
%{label: "8 __ 7", answers: [">"]},
%{label: "8 __ 8 and 9 __ 9 and not 10 __ 10.0", answers: ["==="]},
%{label: "8 __ 8.0 and 0 __ 9.0", answers: ["=="]},
%{label: "8 __ 7 and 7 __ 7", answers: [">="]},
%{label: "7 __ 8 and 7 __ 7", answers: ["<="]}
])
end
def form(:boolean_fill_in_the_blank) do
ValidatedForm.new([
%{label: "not ____ and true === false", answers: ["true"]},
%{label: "true ____ false === true", answers: ["or"]},
%{label: "not true ____ true === true", answers: ["or"]},
%{label: "not false ____ true === true", answers: ["and"]},
%{label: "not (true and false) ____ false === true", answers: ["or"]},
%{label: "not (true or false) or not (not ____ and true) === true", answers: ["false"]},
%{label: "____ false and true === true", answers: ["not"]},
%{label: "false or ____ true === false", answers: ["not"]}
])
end
def form(:lists_vs_tuples) do
options = [
"",
"O(n)",
"O(n*)",
"O(1)",
"O(n1)",
"O(n1 + n2)"
]
ValidatedForm.new([
%{label: "concatenating two lists", answers: ["O(n1)"], options: options},
%{label: "inserting an element in tuple", answers: ["O(n)"], options: options},
%{label: "deleting an element in a list", answers: ["O(n*)"], options: options},
%{label: "prepending an element in a list", answers: ["O(1)"], options: options},
%{label: "updating an element in a list", answers: ["O(n*)"], options: options},
%{label: "concatenating two tuples", answers: ["O(n1 + n1)"], options: options},
%{label: "inserting an element in list", answers: ["O(n*)"], options: options},
%{label: "updating an element in tuple", answers: ["O(n)"], options: options},
%{label: "deleting an element in a tuple", answers: ["O(n)"], options: options},
%{label: "finding the length of a tuple", answers: ["O(1)"], options: options},
%{label: "deleting an element in a list", answers: ["O(n*)"], options: options},
%{label: "finding the length of a list", answers: ["O(n)"], options: options},
%{label: "finding the length of a list", answers: ["O(n)"]}
])
end
@doc ~S"""
iex> %{} = Utils.get(:string_maze)
iex> %{} = Utils.get(:atom_maze)
"""
def get(:string_maze) do
put_in(
%{},
Enum.map(
[
"west",
"south",
"east",
"north",
"east",
"south",
"east",
"south",
"east",
"south",
"west",
"north"
],
&Access.key(&1, %{})
),
"Exit!"
)
end
def get(:atom_maze) do
put_in(
%{},
Enum.map(
[
:south,
:east,
:north,
:east,
:south,
:west,
:north,
:east,
:south,
:east,
:north,
:west,
:south,
:west,
:south,
:west,
:north,
:west,
:south,
:west,
:south,
:west,
:south,
:east,
:north,
:east,
:south,
:east,
:south
],
&Access.key(&1, %{})
),
"Exit!"
)
end
@doc ~S"""
iex> %Kino.JS.Live{} = Utils.graph(:big_o_notation)
iex> %Kino.JS.Live{} = Utils.graph(:binary_search)
iex> %Kino.JS.Live{} = Utils.graph(:comprehension_big_o)
iex> %Kino.JS.Live{} = Utils.graph(:exponential_complexity)
iex> %Kino.JS.Live{} = Utils.graph(:factorial_complexity)
iex> %Kino.JS.Live{} = Utils.graph(:linear_complexity)
iex> %Kino.JS.Live{} = Utils.graph(:pigeon_beats_internet)
iex> %Kino.JS.Live{} = Utils.graph(:polynomial_complexity)
"""
def graph(:big_o_notation) do
size = 600
widget =
VegaLite.new(width: size, height: size)
|> VegaLite.mark(:line)
|> VegaLite.encode_field(:x, "x", type: :quantitative)
|> VegaLite.encode_field(:y, "y", type: :quantitative)
|> VegaLite.transform(groupby: ["color"], extent: [2500, 6500])
|> VegaLite.encode_field(:color, "type", title: "Big O Notation", type: :nominal)
|> Kino.VegaLite.new()
max_x = 5
initial_x = 2
linear = Enum.map(initial_x..max_x, &%{x: &1, y: &1, type: "O(n)"})
constant = Enum.map(initial_x..max_x, &%{x: &1, y: 1, type: "O(1)"})
polonomial = Enum.map(initial_x..max_x, &%{x: &1, y: &1 ** 2, type: "O(n^2)"})
logarithmic = Enum.map(initial_x..max_x, &%{x: &1, y: :math.log2(&1), type: "O(log n)"})
exponential = Enum.map(initial_x..max_x, &%{x: &1, y: 2 ** &1, type: "O(2^n)"})
factorial = Enum.map(initial_x..max_x, &%{x: &1, y: Math.factorial(&1), type: "O(n!)"})
Kino.VegaLite.push_many(widget, factorial)
Kino.VegaLite.push_many(widget, exponential)
Kino.VegaLite.push_many(widget, polonomial)
Kino.VegaLite.push_many(widget, linear)
Kino.VegaLite.push_many(widget, logarithmic)
Kino.VegaLite.push_many(widget, constant)
widget
end
def graph(:binary_search) do
size = 600
widget =
VegaLite.new(width: size, height: size)
|> VegaLite.mark(:line)
|> VegaLite.encode_field(:x, "number of elements", type: :quantitative)
|> VegaLite.encode_field(:y, "time", type: :quantitative)
|> VegaLite.transform(groupby: ["color"], extent: [2500, 6500])
|> VegaLite.encode_field(:color, "type", title: "Big O Notation", type: :nominal)
|> Kino.VegaLite.new()
init = 1
max = 500
logn = for n <- init..max, do: %{"number of elements": n, time: :math.log2(n), type: "log(n)"}
Kino.VegaLite.push_many(widget, logn)
widget
end
def graph(:comprehension_big_o) do
size = 600
widget =
VegaLite.new(width: size, height: size)
|> VegaLite.mark(:line)
|> VegaLite.encode_field(:x, "number of elements", type: :quantitative)
|> VegaLite.encode_field(:y, "time", type: :quantitative)
|> VegaLite.transform(groupby: ["color"], extent: [2500, 6500])
|> VegaLite.encode_field(:color, "type", title: "Number Of Generators", type: :nominal)
|> Kino.VegaLite.new()
init = 1
max = 5
n1 = for n <- init..max, do: %{"number of elements": n, time: n, type: "1"}
n2 = for n <- init..max, do: %{"number of elements": n, time: n ** 2, type: "2"}
n3 = for n <- init..max, do: %{"number of elements": n, time: n ** 3, type: "3"}
n4 = for n <- init..max, do: %{"number of elements": n, time: n ** 4, type: "4"}
Kino.VegaLite.push_many(widget, n1)
Kino.VegaLite.push_many(widget, n2)
Kino.VegaLite.push_many(widget, n3)
Kino.VegaLite.push_many(widget, n4)
widget
end
def graph(:exponential_complexity) do
size = 600
widget =
VegaLite.new(width: size, height: size)
|> VegaLite.mark(:line)
|> VegaLite.encode_field(:x, "x", type: :quantitative)
|> VegaLite.encode_field(:y, "y", type: :quantitative)
|> VegaLite.transform(groupby: ["color"], extent: [2500, 6500])
|> VegaLite.encode_field(:color, "type", title: "Exponential Growth", type: :nominal)
|> Kino.VegaLite.new()
init = 1
max = 10
exponential = for n <- init..max, do: %{x: n, y: 10 ** n, type: "2^n"}
Kino.VegaLite.push_many(widget, exponential)
widget
end
def graph(:factorial_complexity) do
size = 600
widget =
VegaLite.new(width: size, height: size)
|> VegaLite.mark(:line)
|> VegaLite.encode_field(:x, "x", type: :quantitative)
|> VegaLite.encode_field(:y, "y", type: :quantitative)
|> VegaLite.transform(groupby: ["color"], extent: [2500, 6500])
|> VegaLite.encode_field(:color, "type", title: "Factorial Growth", type: :nominal)
|> Kino.VegaLite.new()
init = 1
max = 5
factorial = for n <- init..max, do: %{x: n, y: Math.factorial(n), type: "n!"}
Kino.VegaLite.push_many(widget, factorial)
widget
end
def graph(:linear_complexity) do
size = 600
widget =
VegaLite.new(width: size, height: size)
|> VegaLite.mark(:line)
|> VegaLite.encode_field(:x, "number of elements", type: :quantitative)
|> VegaLite.encode_field(:y, "time", type: :quantitative)
|> VegaLite.encode_field(:color, "type", title: "Linear Growth", type: :nominal)
|> Kino.VegaLite.new()
init = 1
max = 100
linear = for n <- init..max, do: %{"number of elements": n, time: n, type: "O(n)"}
Kino.VegaLite.push_many(widget, linear)
widget
end
def graph(:pigeon_beats_internet) do
size = 600
widget =
VegaLite.new(width: size, height: size)
|> VegaLite.mark(:line)
|> VegaLite.encode_field(:x, "data", type: :quantitative)
|> VegaLite.encode_field(:y, "time", type: :quantitative)
|> VegaLite.encode_field(:color, "type", title: "Linear Growth", type: :nominal)
|> Kino.VegaLite.new()
init = 1
max = 200
internet = for n <- init..max, do: %{data: n, time: n, type: "Internet"}
pigeon = for n <- init..max, do: %{data: n, time: 100, type: "Pigeon"}
Kino.VegaLite.push_many(widget, internet)
Kino.VegaLite.push_many(widget, pigeon)
widget
end
def graph(:polynomial_complexity) do
size = 600
widget =
VegaLite.new(width: size, height: size)
|> VegaLite.mark(:line)
|> VegaLite.encode_field(:x, "number of elements", type: :quantitative)
|> VegaLite.encode_field(:y, "time", type: :quantitative)
|> VegaLite.transform(groupby: ["color"], extent: [2500, 6500])
|> VegaLite.encode_field(:color, "type", title: "Polonomial Growth", type: :nominal)
|> Kino.VegaLite.new()
init = 1
max = 5
n2 = for n <- init..max, do: %{"number of elements": n, time: n ** 2, type: "n^2"}
n3 = for n <- init..max, do: %{"number of elements": n, time: n ** 3, type: "n^3"}
n4 = for n <- init..max, do: %{"number of elements": n, time: n ** 4, type: "n^4"}
Kino.VegaLite.push_many(widget, n2)
Kino.VegaLite.push_many(widget, n3)
Kino.VegaLite.push_many(widget, n4)
widget
end
@doc """
Display a list of slides.
iex> %Kino.JS.Live{} = Utils.slide(:case)
iex> %Kino.JS.Live{} = Utils.slide(:cond)
iex> %Kino.JS.Live{} = Utils.slide(:functions)
iex> %Kino.JS.Live{} = Utils.slide(:recursion)
iex> %Kino.JS.Live{} = Utils.slide(:reduce)
"""
def slide(:case) do
[
"
We create a case statement.
```elixir
case \"snowy\" do
\"sunny\" -> \"wear a t-shirt\"
\"rainy\" -> \"wear a rain jacket\"
\"cold\" -> \"wear a sweater\"
\"snowy\" -> \"wear a thick coat\"
end
```
",
"
Check if snowy equals sunny.
```elixir
case \"snowy\" do
\"snowy\" === \"sunny\" -> \"wear a t-shirt\"
\"rainy\" -> \"wear a rain jacket\"
\"cold\" -> \"wear a sweater\"
\"snowy\" -> \"wear a thick coat\"
end
```
",
"
It's false, so check if snowy equals rainy.
```elixir
case \"snowy\" do
false -> \"wear a t-shirt\"
\"snowy\" === \"rainy\" -> \"wear a rain jacket\"
\"cold\" -> \"wear a sweater\"
\"snowy\" -> \"wear a thick coat\"
end
```
",
"
It's false, so check if snowy equals cold.```elixir
```elixir
case \"snowy\" do
false -> \"wear a t-shirt\"
false -> \"wear a rain jacket\"
\"snowy\" === \"cold\" -> \"wear a sweater\"
\"snowy\" -> \"wear a thick coat\"
end
```
",
"
It's false, so check if snowy equals snowy.```elixir
```elixir
case \"snowy\" do
false -> \"wear a t-shirt\"
false -> \"wear a rain jacket\"
false -> \"wear a sweater\"
\"snowy\" === \"snowy\" -> \"wear a thick coat\"
end
```
",
"
snowy equals snowy.
```elixir
case \"snowy\" do
false -> \"wear a t-shirt\"
false -> \"wear a rain jacket\"
false -> \"wear a sweater\"
true -> \"wear a thick coat\"
end
```
",
"
Return wear a thick coat.
```elixir
\"wear a thick coat\"
```
"
]
|> Enum.map(&Kino.Markdown.new/1)
|> Kino.Slide.new()
end
def slide(:cond) do
[
"
Check if plant is dead.
```elixir
daylight = true
days_since_watered = 14
plant = \"healthy\"
cond do
plant === \"dead\" -> \"get a new plant\"
plant === \"wilting\" && !daylight -> \"use a UV light\"
plant === \"wilting\" && daylight -> \"put the plant in sunlight\"
days_since_watered >= 14 -> \"water the plant\"
end
```
",
"
`false`, so check if plant is wilting and it's dark.
```elixir
daylight = true
days_since_watered = 14
plant = \"healthy\"
cond do
false -> \"get a new plant\"
plant === \"wilting\" && !daylight -> \"use a UV light\"
plant === \"wilting\" && daylight -> \"put the plant in sunlight\"
days_since_watered >= 14 -> \"water the plant\"
end
```
",
"
`false`, so check if plant is wilting and it's sunny.
```elixir
daylight = true
days_since_watered = 14
plant = \"healthy\"
cond do
false -> \"get a new plant\"
false -> \"use a UV light\"
plant === \"wilting\" && daylight -> \"put the plant in sunlight\"
days_since_watered >= 14 -> \"water the plant\"
end
```
",
"
`false`, so check if days_since_watered is >= 14.
```elixir
daylight = true
days_since_watered = 14
plant = \"healthy\"
cond do
false -> \"get a new plant\"
false -> \"use a UV light\"
false -> \"put the plant in sunlight\"
days_since_watered >= 14 -> \"water the plant\"
end
```
",
"
`true`! days_since_watered is >= 14.
```elixir
daylight = true
days_since_watered = 14
plant = \"healthy\"
cond do
false -> \"get a new plant\"
false -> \"use a UV light\"
false -> \"put the plant in sunlight\"
true -> \"water the plant\"
end
```
",
"
Water the plant.
```elixir
\"water the plant\"
```
"
]
|> Enum.map(&Kino.Markdown.new/1)
|> Kino.Slide.new()
end
def slide(:functions) do
[
"
First, we define the `double` function and call it.
```elixir
double = fn number -> number * 2 end
double.(3)
```
",
"
The `double` function executes in place of the `double.(call)` with `number` bound to `3`.
```elixir
double = fn number -> number * 2 end
fn 3 -> 3 * 2 end
```
",
"
The function evaluates the function body between the `->` and the `end`
```elixir
double = fn number -> number * 2 end
3 * 2
```
",
"
`3` * `2` is `6`, so the function call returns `6`.
```elixir
double = fn number -> number * 2 end
6
```
"
]
|> Enum.map(&Kino.Markdown.new/1)
|> Kino.Slide.new()
end
def slide(:recursion) do
[
"
The base case would return the accumulator when the list is empty.
```elixir
defmodule Recursion do
def sum([], accumulator), do: accumulator
end
```
",
"
Otherwise, we'll add the head to an accumulator and recurse on the tail of the list.
```elixir
defmodule Recursion do
def sum([], accumulator), do: accumulator
def sum([head | tail], accumulator do
sum(tail, accumulator + head)
end
end
Recursion.sum([4, 5, 6], 0)
```
",
"
The `sum/2` function is called with the list `[4, 5, 6]`.
```elixir
defmodule Recursion do
def sum([], accumulator), do: accumulator
def sum([4 | [5, 6]], 0) do
sum([5, 6], 4 + 0)
end
end
Recursion.sum([4, 5, 6], 0)
```
",
"
The `sum/2` function is called again on the tail of the list `[5, 6]`.
```elixir
defmodule Recursion do
def sum([], accumulator), do: accumulator
def sum([5 | [6]], 4) do
sum([6], 5 + 4)
end
end
Recursion.sum([4, 5, 6], 0)
```
",
"
The `sum/2` function is called again on the tail of the list `[6]`.
```elixir
defmodule Recursion do
def sum([], accumulator), do: accumulator
def sum([6 | []], 9) do
sum([], 6 + 9)
end
end
Recursion.sum([4, 5, 6], 0)
```
",
"
The `sum/2` function is called again on the tail of the list `[]`. This triggers the base case to return the accumulator.
```elixir
defmodule Recursion do
def sum([], 15), do: 15
def sum([head | tail], accumulator) do
sum(tail, accumulator + head)
end
end
Recursion.sum([4, 5, 6], 0)
```
",
"
And our function returns `15`.
```elixir
defmodule Recursion do
def sum([], accumulator), do: accumulator
def sum([head | tail], accumulator) do
sum(tail, accumulator + head)
end
end
15
```
"
]
|> Enum.map(&Kino.Markdown.new/1)
|> Kino.Slide.new()
end
def slide(:reduce) do
[
"
First, we define the call the `Enum.reduce/2` function with a list, and a function.
```elixir
Enum.reduce([1, 2, 3, 4], fn integer, accumulator -> integer + accumulator end)
```
",
"
The first element in the list `1` is the initial accumulator value.
```elixir
Enum.reduce([2, 3, 4], fn integer, 1 -> integer + 1 end)
```
",
"
The function is called on the next element `2`. The next accumulator is 2 + 1
```elixir
Enum.reduce([3, 4], fn 2, 1 -> 2 + 1 end)
```
",
"
The function is called on the next element `3`. The next accumulator is 3 + 3
```elixir
Enum.reduce([4], fn 3, 3 -> 3 + 3 end)
```
",
"
The function is called on the next element `4`. The next accumulator is 4 + 6
```elixir
Enum.reduce([], fn 4, 6 -> 4 + 6 end)
```
",
"
4 + 6 equals 10.
```elixir
Enum.reduce([], fn 4, 6 -> 10 end)
```
",
"
`10` is the last accumulator value, so `Enum.reduce/2` returns `10`.
```elixir
10
```
"
]
|> Enum.map(&Kino.Markdown.new/1)
|> Kino.Slide.new()
end
@doc """
Create a Data Table
iex> %Kino.JS.Live{} = Utils.table(:exponential_growth)
iex> %Kino.JS.Live{} = Utils.table(:factorial_complexity)
iex> %Kino.JS.Live{} = Utils.table(:fib_cache)
iex> %Kino.JS.Live{} = Utils.table(:lists_vs_tuples)
iex> %Kino.JS.Live{} = Utils.table(:n2)
iex> %Kino.JS.Live{} = Utils.table(:n3)
iex> %Kino.JS.Live{} = Utils.table(:unicode)
"""
def table(:exponential_growth) do
Enum.map(1..100, fn each ->
%{
"# of elements": each,
result: 10 ** each,
equation: "100 ** #{each}"
}
end)
|> Kino.DataTable.new()
end
def table(:factorial_complexity) do
Enum.map(1..10, fn each ->
equation =
Enum.map(each..1, fn
^each -> "#{each}"
n -> " * #{n}"
end)
|> Enum.join()
%{"# of elements": each, result: each ** each, equation: equation}
end)
|> Kino.DataTable.new()
end
def table(:fib_cache) do
defmodule Fib do
def get(n) do
sequence =
Stream.unfold({1, 1}, fn {a, b} ->
{a, {b, a + b}}
end)
|> Enum.take(n)
[0 | sequence]
end
end
data =
Fib.get(150)
|> Enum.with_index()
|> Enum.map(fn {value, index} -> %{input: index, output: value} end)
Kino.DataTable.new(data)
end
def table(:lists_vs_tuples) do
Kino.DataTable.new(
[
%{operation: "length", tuple: "O(1)", list: "O(n)"},
%{operation: "prepend", tuple: "O(n)", list: "O(1)"},
%{operation: "insert", tuple: "O(n)", list: "O(n*)"},
%{operation: "access", tuple: "O(1)", list: "O(n*)"},
%{operation: "update/replace", tuple: "O(n)", list: "O(n*)"},
%{operation: "delete", tuple: "O(n)", list: "O(n*)"},
%{operation: "concatenation", tuple: "O(n1 + n2)", list: "O(n1)"}
],
keys: [:operation, :tuple, :list]
)
end
def table(:n2) do
Enum.map(1..1000, fn each ->
%{
"# of elements": each,
result: each ** 2,
notation: "#{each}**2",
equation: "#{each} * #{each}"
}
end)
|> Kino.DataTable.new()
end
def table(:n3) do
Enum.map(1..1000, fn each ->
%{
"# of elements": each,
result: each ** 3,
notation: "#{each}**3",
equation: "#{each} * #{each} * #{each}"
}
end)
|> Kino.DataTable.new()
end
def table(:unicode) do
"ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz"
|> String.codepoints()
|> Enum.map(fn char ->
<<code_point::utf8>> = char
%{character: char, code_point: code_point}
end)
|> Kino.DataTable.new()
end
@doc ~S"""
iex> Utils.test(:naming_numbers, fn int -> Enum.at(["zero", "one", "two", "three", "four", "five", "six", "seven", "eight", "nine"], int) end)
"""
def test(:naming_numbers, convert_to_named_integer) do
ExUnit.start(auto_run: false)
:persistent_term.put(:convert_to_named_integer, convert_to_named_integer)
defmodule NamingNumbers do
use ExUnit.Case
test "convert_to_named_integer" do
answer_key = [
{0, "zero"},
{1, "one"},
{2, "two"},
{3, "three"},
{4, "four"},
{5, "five"},
{6, "six"},
{7, "seven"},
{8, "eight"},
{9, "nine"}
]
convert_to_named_integer = :persistent_term.get(:convert_to_named_integer)
Enum.each(answer_key, fn {key, value} ->
assert convert_to_named_integer.(key) === value
end)
end
end
ExUnit.run()
end
def test(:file_copy_challenge) do
ExUnit.start(auto_run: false)
defmodule CopyExample do
use ExUnit.Case
test "file was copied" do
assert File.read("data/copied_example") === "Copy me!"
end
end
ExUnit.run()
end
@doc ~S"""
iex> %Kino.Markdown{} = Utils.visual(:loading_bar, 50)
iex> %Kino.Image{} = Utils.visual(:light_control, true)
"""
def visual(:loading_bar, percentage) do
Kino.Markdown.new("
<div style=\"height: 20px; width: 100%; background-color: grey\">
<div style=\"height: 20px; width: #{percentage}%; background-color: green\"></div>
</div>
")
end
def visual(:light_control, power) do
content = if power, do: "/images/on.png", else: "/images/off.png"
Kino.Image.new(File.read!(__DIR__ <> content), :png)
end
def visual(:comparison_examples) do
Kino.DataTable.new([
%{comparison: "5 === 5", result: true},
%{comparison: "5 === 5.0", result: false},
%{comparison: "5 == 5.0", result: true},
%{comparison: "5 === 4", result: false},
%{comparison: "5 > 4", result: true},
%{comparison: "4 > 5", result: false},
%{comparison: "5 < 4", result: false},
%{comparison: "4 < 5", result: true},
%{comparison: "5 >= 5", result: true},
%{comparison: "5 >= 4", result: true},
%{comparison: "4 >= 5", result: false},
%{comparison: "5 <= 5", result: true},
%{comparison: "4 <= 5", result: true},
%{comparison: "5 <= 4", result: false}
])
end
end
|
utils/lib/utils.ex
| 0.593491 | 0.45048 |
utils.ex
|
starcoder
|
defmodule AWS.WAFV2 do
@moduledoc """
<note> This is the latest version of the **AWS WAF** API, released in
November, 2019. The names of the entities that you use to access this API,
like endpoints and namespaces, all have the versioning information added,
like "V2" or "v2", to distinguish from the prior version. We recommend
migrating your resources to this version, because it has a number of
significant improvements.
If you used AWS WAF prior to this release, you can't use this AWS WAFV2 API
to access any AWS WAF resources that you created before. You can access
your old rules, web ACLs, and other AWS WAF resources only through the AWS
WAF Classic APIs. The AWS WAF Classic APIs have retained the prior names,
endpoints, and namespaces.
For information, including how to migrate your AWS WAF resources to this
version, see the [AWS WAF Developer
Guide](https://docs.aws.amazon.com/waf/latest/developerguide/waf-chapter.html).
</note> AWS WAF is a web application firewall that lets you monitor the
HTTP and HTTPS requests that are forwarded to Amazon CloudFront, an Amazon
API Gateway API, or an Application Load Balancer. AWS WAF also lets you
control access to your content. Based on conditions that you specify, such
as the IP addresses that requests originate from or the values of query
strings, API Gateway, CloudFront, or the Application Load Balancer responds
to requests either with the requested content or with an HTTP 403 status
code (Forbidden). You also can configure CloudFront to return a custom
error page when a request is blocked.
This API guide is for developers who need detailed information about AWS
WAF API actions, data types, and errors. For detailed information about AWS
WAF features and an overview of how to use AWS WAF, see the [AWS WAF
Developer Guide](https://docs.aws.amazon.com/waf/latest/developerguide/).
You can make calls using the endpoints listed in [AWS Service Endpoints for
AWS
WAF](https://docs.aws.amazon.com/general/latest/gr/rande.html#waf_region).
<ul> <li> For regional applications, you can use any of the endpoints in
the list. A regional application can be an Application Load Balancer (ALB)
or an API Gateway stage.
</li> <li> For AWS CloudFront applications, you must use the API endpoint
listed for US East (N. Virginia): us-east-1.
</li> </ul> Alternatively, you can use one of the AWS SDKs to access an API
that's tailored to the programming language or platform that you're using.
For more information, see [AWS SDKs](http://aws.amazon.com/tools/#SDKs).
We currently provide two versions of the AWS WAF API: this API and the
prior versions, the classic AWS WAF APIs. This new API provides the same
functionality as the older versions, with the following major improvements:
<ul> <li> You use one API for both global and regional applications. Where
you need to distinguish the scope, you specify a `Scope` parameter and set
it to `CLOUDFRONT` or `REGIONAL`.
</li> <li> You can define a Web ACL or rule group with a single call, and
update it with a single call. You define all rule specifications in JSON
format, and pass them to your rule group or Web ACL calls.
</li> <li> The limits AWS WAF places on the use of rules more closely
reflects the cost of running each type of rule. Rule groups include
capacity settings, so you know the maximum cost of a rule group when you
use it.
</li> </ul>
"""
@doc """
<note> This is the latest version of **AWS WAF**, named AWS WAFV2, released
in November, 2019. For information, including how to migrate your AWS WAF
resources from the prior release, see the [AWS WAF Developer
Guide](https://docs.aws.amazon.com/waf/latest/developerguide/waf-chapter.html).
</note> Associates a Web ACL with a regional application resource, to
protect the resource. A regional application can be an Application Load
Balancer (ALB) or an API Gateway stage.
For AWS CloudFront, don't use this call. Instead, use your CloudFront
distribution configuration. To associate a Web ACL, in the CloudFront call
`UpdateDistribution`, set the web ACL ID to the Amazon Resource Name (ARN)
of the Web ACL. For information, see
[UpdateDistribution](https://docs.aws.amazon.com/cloudfront/latest/APIReference/API_UpdateDistribution.html).
"""
def associate_web_a_c_l(client, input, options \\ []) do
request(client, "AssociateWebACL", input, options)
end
@doc """
<note> This is the latest version of **AWS WAF**, named AWS WAFV2, released
in November, 2019. For information, including how to migrate your AWS WAF
resources from the prior release, see the [AWS WAF Developer
Guide](https://docs.aws.amazon.com/waf/latest/developerguide/waf-chapter.html).
</note> Returns the web ACL capacity unit (WCU) requirements for a
specified scope and set of rules. You can use this to check the capacity
requirements for the rules you want to use in a `RuleGroup` or `WebACL`.
AWS WAF uses WCUs to calculate and control the operating resources that are
used to run your rules, rule groups, and web ACLs. AWS WAF calculates
capacity differently for each rule type, to reflect the relative cost of
each rule. Simple rules that cost little to run use fewer WCUs than more
complex rules that use more processing power. Rule group capacity is fixed
at creation, which helps users plan their web ACL WCU usage when they use a
rule group. The WCU limit for web ACLs is 1,500.
"""
def check_capacity(client, input, options \\ []) do
request(client, "CheckCapacity", input, options)
end
@doc """
<note> This is the latest version of **AWS WAF**, named AWS WAFV2, released
in November, 2019. For information, including how to migrate your AWS WAF
resources from the prior release, see the [AWS WAF Developer
Guide](https://docs.aws.amazon.com/waf/latest/developerguide/waf-chapter.html).
</note> Creates an `IPSet`, which you use to identify web requests that
originate from specific IP addresses or ranges of IP addresses. For
example, if you're receiving a lot of requests from a ranges of IP
addresses, you can configure AWS WAF to block them using an IPSet that
lists those IP addresses.
"""
def create_i_p_set(client, input, options \\ []) do
request(client, "CreateIPSet", input, options)
end
@doc """
<note> This is the latest version of **AWS WAF**, named AWS WAFV2, released
in November, 2019. For information, including how to migrate your AWS WAF
resources from the prior release, see the [AWS WAF Developer
Guide](https://docs.aws.amazon.com/waf/latest/developerguide/waf-chapter.html).
</note> Creates a `RegexPatternSet`, which you reference in a
`RegexPatternSetReferenceStatement`, to have AWS WAF inspect a web request
component for the specified patterns.
"""
def create_regex_pattern_set(client, input, options \\ []) do
request(client, "CreateRegexPatternSet", input, options)
end
@doc """
<note> This is the latest version of **AWS WAF**, named AWS WAFV2, released
in November, 2019. For information, including how to migrate your AWS WAF
resources from the prior release, see the [AWS WAF Developer
Guide](https://docs.aws.amazon.com/waf/latest/developerguide/waf-chapter.html).
</note> Creates a `RuleGroup` per the specifications provided.
A rule group defines a collection of rules to inspect and control web
requests that you can use in a `WebACL`. When you create a rule group, you
define an immutable capacity limit. If you update a rule group, you must
stay within the capacity. This allows others to reuse the rule group with
confidence in its capacity requirements.
"""
def create_rule_group(client, input, options \\ []) do
request(client, "CreateRuleGroup", input, options)
end
@doc """
<note> This is the latest version of **AWS WAF**, named AWS WAFV2, released
in November, 2019. For information, including how to migrate your AWS WAF
resources from the prior release, see the [AWS WAF Developer
Guide](https://docs.aws.amazon.com/waf/latest/developerguide/waf-chapter.html).
</note> Creates a `WebACL` per the specifications provided.
A Web ACL defines a collection of rules to use to inspect and control web
requests. Each rule has an action defined (allow, block, or count) for
requests that match the statement of the rule. In the Web ACL, you assign a
default action to take (allow, block) for any request that does not match
any of the rules. The rules in a Web ACL can be a combination of the types
`Rule`, `RuleGroup`, and managed rule group. You can associate a Web ACL
with one or more AWS resources to protect. The resources can be Amazon
CloudFront, an Amazon API Gateway API, or an Application Load Balancer.
"""
def create_web_a_c_l(client, input, options \\ []) do
request(client, "CreateWebACL", input, options)
end
@doc """
Deletes all rule groups that are managed by AWS Firewall Manager for the
specified web ACL.
You can only use this if `ManagedByFirewallManager` is false in the
specified `WebACL`.
"""
def delete_firewall_manager_rule_groups(client, input, options \\ []) do
request(client, "DeleteFirewallManagerRuleGroups", input, options)
end
@doc """
<note> This is the latest version of **AWS WAF**, named AWS WAFV2, released
in November, 2019. For information, including how to migrate your AWS WAF
resources from the prior release, see the [AWS WAF Developer
Guide](https://docs.aws.amazon.com/waf/latest/developerguide/waf-chapter.html).
</note> Deletes the specified `IPSet`.
"""
def delete_i_p_set(client, input, options \\ []) do
request(client, "DeleteIPSet", input, options)
end
@doc """
<note> This is the latest version of **AWS WAF**, named AWS WAFV2, released
in November, 2019. For information, including how to migrate your AWS WAF
resources from the prior release, see the [AWS WAF Developer
Guide](https://docs.aws.amazon.com/waf/latest/developerguide/waf-chapter.html).
</note> Deletes the `LoggingConfiguration` from the specified web ACL.
"""
def delete_logging_configuration(client, input, options \\ []) do
request(client, "DeleteLoggingConfiguration", input, options)
end
@doc """
Permanently deletes an IAM policy from the specified rule group.
You must be the owner of the rule group to perform this operation.
"""
def delete_permission_policy(client, input, options \\ []) do
request(client, "DeletePermissionPolicy", input, options)
end
@doc """
<note> This is the latest version of **AWS WAF**, named AWS WAFV2, released
in November, 2019. For information, including how to migrate your AWS WAF
resources from the prior release, see the [AWS WAF Developer
Guide](https://docs.aws.amazon.com/waf/latest/developerguide/waf-chapter.html).
</note> Deletes the specified `RegexPatternSet`.
"""
def delete_regex_pattern_set(client, input, options \\ []) do
request(client, "DeleteRegexPatternSet", input, options)
end
@doc """
<note> This is the latest version of **AWS WAF**, named AWS WAFV2, released
in November, 2019. For information, including how to migrate your AWS WAF
resources from the prior release, see the [AWS WAF Developer
Guide](https://docs.aws.amazon.com/waf/latest/developerguide/waf-chapter.html).
</note> Deletes the specified `RuleGroup`.
"""
def delete_rule_group(client, input, options \\ []) do
request(client, "DeleteRuleGroup", input, options)
end
@doc """
<note> This is the latest version of **AWS WAF**, named AWS WAFV2, released
in November, 2019. For information, including how to migrate your AWS WAF
resources from the prior release, see the [AWS WAF Developer
Guide](https://docs.aws.amazon.com/waf/latest/developerguide/waf-chapter.html).
</note> Deletes the specified `WebACL`.
You can only use this if `ManagedByFirewallManager` is false in the
specified `WebACL`.
"""
def delete_web_a_c_l(client, input, options \\ []) do
request(client, "DeleteWebACL", input, options)
end
@doc """
<note> This is the latest version of **AWS WAF**, named AWS WAFV2, released
in November, 2019. For information, including how to migrate your AWS WAF
resources from the prior release, see the [AWS WAF Developer
Guide](https://docs.aws.amazon.com/waf/latest/developerguide/waf-chapter.html).
</note> Provides high-level information for a managed rule group, including
descriptions of the rules.
"""
def describe_managed_rule_group(client, input, options \\ []) do
request(client, "DescribeManagedRuleGroup", input, options)
end
@doc """
<note> This is the latest version of **AWS WAF**, named AWS WAFV2, released
in November, 2019. For information, including how to migrate your AWS WAF
resources from the prior release, see the [AWS WAF Developer
Guide](https://docs.aws.amazon.com/waf/latest/developerguide/waf-chapter.html).
</note> Disassociates a Web ACL from a regional application resource. A
regional application can be an Application Load Balancer (ALB) or an API
Gateway stage.
For AWS CloudFront, don't use this call. Instead, use your CloudFront
distribution configuration. To disassociate a Web ACL, provide an empty web
ACL ID in the CloudFront call `UpdateDistribution`. For information, see
[UpdateDistribution](https://docs.aws.amazon.com/cloudfront/latest/APIReference/API_UpdateDistribution.html).
"""
def disassociate_web_a_c_l(client, input, options \\ []) do
request(client, "DisassociateWebACL", input, options)
end
@doc """
<note> This is the latest version of **AWS WAF**, named AWS WAFV2, released
in November, 2019. For information, including how to migrate your AWS WAF
resources from the prior release, see the [AWS WAF Developer
Guide](https://docs.aws.amazon.com/waf/latest/developerguide/waf-chapter.html).
</note> Retrieves the specified `IPSet`.
"""
def get_i_p_set(client, input, options \\ []) do
request(client, "GetIPSet", input, options)
end
@doc """
<note> This is the latest version of **AWS WAF**, named AWS WAFV2, released
in November, 2019. For information, including how to migrate your AWS WAF
resources from the prior release, see the [AWS WAF Developer
Guide](https://docs.aws.amazon.com/waf/latest/developerguide/waf-chapter.html).
</note> Returns the `LoggingConfiguration` for the specified web ACL.
"""
def get_logging_configuration(client, input, options \\ []) do
request(client, "GetLoggingConfiguration", input, options)
end
@doc """
Returns the IAM policy that is attached to the specified rule group.
You must be the owner of the rule group to perform this operation.
"""
def get_permission_policy(client, input, options \\ []) do
request(client, "GetPermissionPolicy", input, options)
end
@doc """
<note> This is the latest version of **AWS WAF**, named AWS WAFV2, released
in November, 2019. For information, including how to migrate your AWS WAF
resources from the prior release, see the [AWS WAF Developer
Guide](https://docs.aws.amazon.com/waf/latest/developerguide/waf-chapter.html).
</note> Retrieves the keys that are currently blocked by a rate-based rule.
The maximum number of managed keys that can be blocked for a single
rate-based rule is 10,000. If more than 10,000 addresses exceed the rate
limit, those with the highest rates are blocked.
"""
def get_rate_based_statement_managed_keys(client, input, options \\ []) do
request(client, "GetRateBasedStatementManagedKeys", input, options)
end
@doc """
<note> This is the latest version of **AWS WAF**, named AWS WAFV2, released
in November, 2019. For information, including how to migrate your AWS WAF
resources from the prior release, see the [AWS WAF Developer
Guide](https://docs.aws.amazon.com/waf/latest/developerguide/waf-chapter.html).
</note> Retrieves the specified `RegexPatternSet`.
"""
def get_regex_pattern_set(client, input, options \\ []) do
request(client, "GetRegexPatternSet", input, options)
end
@doc """
<note> This is the latest version of **AWS WAF**, named AWS WAFV2, released
in November, 2019. For information, including how to migrate your AWS WAF
resources from the prior release, see the [AWS WAF Developer
Guide](https://docs.aws.amazon.com/waf/latest/developerguide/waf-chapter.html).
</note> Retrieves the specified `RuleGroup`.
"""
def get_rule_group(client, input, options \\ []) do
request(client, "GetRuleGroup", input, options)
end
@doc """
<note> This is the latest version of **AWS WAF**, named AWS WAFV2, released
in November, 2019. For information, including how to migrate your AWS WAF
resources from the prior release, see the [AWS WAF Developer
Guide](https://docs.aws.amazon.com/waf/latest/developerguide/waf-chapter.html).
</note> Gets detailed information about a specified number of requests--a
sample--that AWS WAF randomly selects from among the first 5,000 requests
that your AWS resource received during a time range that you choose. You
can specify a sample size of up to 500 requests, and you can specify any
time range in the previous three hours.
`GetSampledRequests` returns a time range, which is usually the time range
that you specified. However, if your resource (such as a CloudFront
distribution) received 5,000 requests before the specified time range
elapsed, `GetSampledRequests` returns an updated time range. This new time
range indicates the actual period during which AWS WAF selected the
requests in the sample.
"""
def get_sampled_requests(client, input, options \\ []) do
request(client, "GetSampledRequests", input, options)
end
@doc """
<note> This is the latest version of **AWS WAF**, named AWS WAFV2, released
in November, 2019. For information, including how to migrate your AWS WAF
resources from the prior release, see the [AWS WAF Developer
Guide](https://docs.aws.amazon.com/waf/latest/developerguide/waf-chapter.html).
</note> Retrieves the specified `WebACL`.
"""
def get_web_a_c_l(client, input, options \\ []) do
request(client, "GetWebACL", input, options)
end
@doc """
<note> This is the latest version of **AWS WAF**, named AWS WAFV2, released
in November, 2019. For information, including how to migrate your AWS WAF
resources from the prior release, see the [AWS WAF Developer
Guide](https://docs.aws.amazon.com/waf/latest/developerguide/waf-chapter.html).
</note> Retrieves the `WebACL` for the specified resource.
"""
def get_web_a_c_l_for_resource(client, input, options \\ []) do
request(client, "GetWebACLForResource", input, options)
end
@doc """
<note> This is the latest version of **AWS WAF**, named AWS WAFV2, released
in November, 2019. For information, including how to migrate your AWS WAF
resources from the prior release, see the [AWS WAF Developer
Guide](https://docs.aws.amazon.com/waf/latest/developerguide/waf-chapter.html).
</note> Retrieves an array of managed rule groups that are available for
you to use. This list includes all AWS Managed Rules rule groups and the
AWS Marketplace managed rule groups that you're subscribed to.
"""
def list_available_managed_rule_groups(client, input, options \\ []) do
request(client, "ListAvailableManagedRuleGroups", input, options)
end
@doc """
<note> This is the latest version of **AWS WAF**, named AWS WAFV2, released
in November, 2019. For information, including how to migrate your AWS WAF
resources from the prior release, see the [AWS WAF Developer
Guide](https://docs.aws.amazon.com/waf/latest/developerguide/waf-chapter.html).
</note> Retrieves an array of `IPSetSummary` objects for the IP sets that
you manage.
"""
def list_i_p_sets(client, input, options \\ []) do
request(client, "ListIPSets", input, options)
end
@doc """
<note> This is the latest version of **AWS WAF**, named AWS WAFV2, released
in November, 2019. For information, including how to migrate your AWS WAF
resources from the prior release, see the [AWS WAF Developer
Guide](https://docs.aws.amazon.com/waf/latest/developerguide/waf-chapter.html).
</note> Retrieves an array of your `LoggingConfiguration` objects.
"""
def list_logging_configurations(client, input, options \\ []) do
request(client, "ListLoggingConfigurations", input, options)
end
@doc """
<note> This is the latest version of **AWS WAF**, named AWS WAFV2, released
in November, 2019. For information, including how to migrate your AWS WAF
resources from the prior release, see the [AWS WAF Developer
Guide](https://docs.aws.amazon.com/waf/latest/developerguide/waf-chapter.html).
</note> Retrieves an array of `RegexPatternSetSummary` objects for the
regex pattern sets that you manage.
"""
def list_regex_pattern_sets(client, input, options \\ []) do
request(client, "ListRegexPatternSets", input, options)
end
@doc """
<note> This is the latest version of **AWS WAF**, named AWS WAFV2, released
in November, 2019. For information, including how to migrate your AWS WAF
resources from the prior release, see the [AWS WAF Developer
Guide](https://docs.aws.amazon.com/waf/latest/developerguide/waf-chapter.html).
</note> Retrieves an array of the Amazon Resource Names (ARNs) for the
regional resources that are associated with the specified web ACL. If you
want the list of AWS CloudFront resources, use the AWS CloudFront call
`ListDistributionsByWebACLId`.
"""
def list_resources_for_web_a_c_l(client, input, options \\ []) do
request(client, "ListResourcesForWebACL", input, options)
end
@doc """
<note> This is the latest version of **AWS WAF**, named AWS WAFV2, released
in November, 2019. For information, including how to migrate your AWS WAF
resources from the prior release, see the [AWS WAF Developer
Guide](https://docs.aws.amazon.com/waf/latest/developerguide/waf-chapter.html).
</note> Retrieves an array of `RuleGroupSummary` objects for the rule
groups that you manage.
"""
def list_rule_groups(client, input, options \\ []) do
request(client, "ListRuleGroups", input, options)
end
@doc """
<note> This is the latest version of **AWS WAF**, named AWS WAFV2, released
in November, 2019. For information, including how to migrate your AWS WAF
resources from the prior release, see the [AWS WAF Developer
Guide](https://docs.aws.amazon.com/waf/latest/developerguide/waf-chapter.html).
</note> Retrieves the `TagInfoForResource` for the specified resource. Tags
are key:value pairs that you can use to categorize and manage your
resources, for purposes like billing. For example, you might set the tag
key to "customer" and the value to the customer name or ID. You can specify
one or more tags to add to each AWS resource, up to 50 tags for a resource.
You can tag the AWS resources that you manage through AWS WAF: web ACLs,
rule groups, IP sets, and regex pattern sets. You can't manage or view tags
through the AWS WAF console.
"""
def list_tags_for_resource(client, input, options \\ []) do
request(client, "ListTagsForResource", input, options)
end
@doc """
<note> This is the latest version of **AWS WAF**, named AWS WAFV2, released
in November, 2019. For information, including how to migrate your AWS WAF
resources from the prior release, see the [AWS WAF Developer
Guide](https://docs.aws.amazon.com/waf/latest/developerguide/waf-chapter.html).
</note> Retrieves an array of `WebACLSummary` objects for the web ACLs that
you manage.
"""
def list_web_a_c_ls(client, input, options \\ []) do
request(client, "ListWebACLs", input, options)
end
@doc """
<note> This is the latest version of **AWS WAF**, named AWS WAFV2, released
in November, 2019. For information, including how to migrate your AWS WAF
resources from the prior release, see the [AWS WAF Developer
Guide](https://docs.aws.amazon.com/waf/latest/developerguide/waf-chapter.html).
</note> Enables the specified `LoggingConfiguration`, to start logging from
a web ACL, according to the configuration provided.
You can access information about all traffic that AWS WAF inspects using
the following steps:
<ol> <li> Create an Amazon Kinesis Data Firehose.
Create the data firehose with a PUT source and in the Region that you are
operating. If you are capturing logs for Amazon CloudFront, always create
the firehose in US East (N. Virginia).
Give the data firehose a name that starts with the prefix `aws-waf-logs-`.
For example, `aws-waf-logs-us-east-2-analytics`.
<note> Do not create the data firehose using a `Kinesis stream` as your
source.
</note> </li> <li> Associate that firehose to your web ACL using a
`PutLoggingConfiguration` request.
</li> </ol> When you successfully enable logging using a
`PutLoggingConfiguration` request, AWS WAF will create a service linked
role with the necessary permissions to write logs to the Amazon Kinesis
Data Firehose. For more information, see [Logging Web ACL Traffic
Information](https://docs.aws.amazon.com/waf/latest/developerguide/logging.html)
in the *AWS WAF Developer Guide*.
"""
def put_logging_configuration(client, input, options \\ []) do
request(client, "PutLoggingConfiguration", input, options)
end
@doc """
Attaches an IAM policy to the specified resource. Use this to share a rule
group across accounts.
You must be the owner of the rule group to perform this operation.
This action is subject to the following restrictions:
<ul> <li> You can attach only one policy with each `PutPermissionPolicy`
request.
</li> <li> The ARN in the request must be a valid WAF `RuleGroup` ARN and
the rule group must exist in the same region.
</li> <li> The user making the request must be the owner of the rule group.
</li> </ul>
"""
def put_permission_policy(client, input, options \\ []) do
request(client, "PutPermissionPolicy", input, options)
end
@doc """
<note> This is the latest version of **AWS WAF**, named AWS WAFV2, released
in November, 2019. For information, including how to migrate your AWS WAF
resources from the prior release, see the [AWS WAF Developer
Guide](https://docs.aws.amazon.com/waf/latest/developerguide/waf-chapter.html).
</note> Associates tags with the specified AWS resource. Tags are key:value
pairs that you can use to categorize and manage your resources, for
purposes like billing. For example, you might set the tag key to "customer"
and the value to the customer name or ID. You can specify one or more tags
to add to each AWS resource, up to 50 tags for a resource.
You can tag the AWS resources that you manage through AWS WAF: web ACLs,
rule groups, IP sets, and regex pattern sets. You can't manage or view tags
through the AWS WAF console.
"""
def tag_resource(client, input, options \\ []) do
request(client, "TagResource", input, options)
end
@doc """
<note> This is the latest version of **AWS WAF**, named AWS WAFV2, released
in November, 2019. For information, including how to migrate your AWS WAF
resources from the prior release, see the [AWS WAF Developer
Guide](https://docs.aws.amazon.com/waf/latest/developerguide/waf-chapter.html).
</note> Disassociates tags from an AWS resource. Tags are key:value pairs
that you can associate with AWS resources. For example, the tag key might
be "customer" and the tag value might be "companyA." You can specify one or
more tags to add to each container. You can add up to 50 tags to each AWS
resource.
"""
def untag_resource(client, input, options \\ []) do
request(client, "UntagResource", input, options)
end
@doc """
<note> This is the latest version of **AWS WAF**, named AWS WAFV2, released
in November, 2019. For information, including how to migrate your AWS WAF
resources from the prior release, see the [AWS WAF Developer
Guide](https://docs.aws.amazon.com/waf/latest/developerguide/waf-chapter.html).
</note> Updates the specified `IPSet`.
"""
def update_i_p_set(client, input, options \\ []) do
request(client, "UpdateIPSet", input, options)
end
@doc """
<note> This is the latest version of **AWS WAF**, named AWS WAFV2, released
in November, 2019. For information, including how to migrate your AWS WAF
resources from the prior release, see the [AWS WAF Developer
Guide](https://docs.aws.amazon.com/waf/latest/developerguide/waf-chapter.html).
</note> Updates the specified `RegexPatternSet`.
"""
def update_regex_pattern_set(client, input, options \\ []) do
request(client, "UpdateRegexPatternSet", input, options)
end
@doc """
<note> This is the latest version of **AWS WAF**, named AWS WAFV2, released
in November, 2019. For information, including how to migrate your AWS WAF
resources from the prior release, see the [AWS WAF Developer
Guide](https://docs.aws.amazon.com/waf/latest/developerguide/waf-chapter.html).
</note> Updates the specified `RuleGroup`.
A rule group defines a collection of rules to inspect and control web
requests that you can use in a `WebACL`. When you create a rule group, you
define an immutable capacity limit. If you update a rule group, you must
stay within the capacity. This allows others to reuse the rule group with
confidence in its capacity requirements.
"""
def update_rule_group(client, input, options \\ []) do
request(client, "UpdateRuleGroup", input, options)
end
@doc """
<note> This is the latest version of **AWS WAF**, named AWS WAFV2, released
in November, 2019. For information, including how to migrate your AWS WAF
resources from the prior release, see the [AWS WAF Developer
Guide](https://docs.aws.amazon.com/waf/latest/developerguide/waf-chapter.html).
</note> Updates the specified `WebACL`.
A Web ACL defines a collection of rules to use to inspect and control web
requests. Each rule has an action defined (allow, block, or count) for
requests that match the statement of the rule. In the Web ACL, you assign a
default action to take (allow, block) for any request that does not match
any of the rules. The rules in a Web ACL can be a combination of the types
`Rule`, `RuleGroup`, and managed rule group. You can associate a Web ACL
with one or more AWS resources to protect. The resources can be Amazon
CloudFront, an Amazon API Gateway API, or an Application Load Balancer.
"""
def update_web_a_c_l(client, input, options \\ []) do
request(client, "UpdateWebACL", input, options)
end
@spec request(AWS.Client.t(), binary(), map(), list()) ::
{:ok, Poison.Parser.t() | nil, Poison.Response.t()}
| {:error, Poison.Parser.t()}
| {:error, HTTPoison.Error.t()}
defp request(client, action, input, options) do
client = %{client | service: "wafv2"}
host = build_host("wafv2", client)
url = build_url(host, client)
headers = [
{"Host", host},
{"Content-Type", "application/x-amz-json-1.1"},
{"X-Amz-Target", "AWSWAF_20190729.#{action}"}
]
payload = Poison.Encoder.encode(input, %{})
headers = AWS.Request.sign_v4(client, "POST", url, headers, payload)
case HTTPoison.post(url, payload, headers, options) do
{:ok, %HTTPoison.Response{status_code: 200, body: ""} = response} ->
{:ok, nil, response}
{:ok, %HTTPoison.Response{status_code: 200, body: body} = response} ->
{:ok, Poison.Parser.parse!(body, %{}), response}
{:ok, %HTTPoison.Response{body: body}} ->
error = Poison.Parser.parse!(body, %{})
{:error, error}
{:error, %HTTPoison.Error{reason: reason}} ->
{:error, %HTTPoison.Error{reason: reason}}
end
end
defp build_host(_endpoint_prefix, %{region: "local"}) do
"localhost"
end
defp build_host(endpoint_prefix, %{region: region, endpoint: endpoint}) do
"#{endpoint_prefix}.#{region}.#{endpoint}"
end
defp build_url(host, %{:proto => proto, :port => port}) do
"#{proto}://#{host}:#{port}/"
end
end
|
lib/aws/waf_v2.ex
| 0.887247 | 0.560192 |
waf_v2.ex
|
starcoder
|
defmodule RationalNumbers do
@moduledoc false
@type rational :: {integer, integer}
@doc """
Add two rational numbers
"""
@spec add(a :: rational, b :: rational) :: rational
def add({a1, b1}, {a2, b2}) do
if additive_inverse?({a1, b1}, {a2, b2}) do
{0, 1}
else
reduce({a1 * b2 + a2 * b1, b1 * b2})
end
end
defp additive_inverse?({a1, b1}, {a2, b2}) do
reduce({-1 * a1, b1}) == reduce({a2, b2})
end
@doc """
Subtract two rational numbers
"""
@spec subtract(a :: rational, b :: rational) :: rational
def subtract({a1, b1}, {a2, b2}) do
reduce({a1 * b2 - a2 * b1, b1 * b2})
end
@doc """
Multiply two rational numbers
"""
@spec multiply(a :: rational, b :: rational) :: rational
def multiply({a1, b1}, {a2, b2}) do
reduce({a1 * a2, b1 * b2})
end
@doc """
Divide two rational numbers
"""
@spec divide_by(num :: rational, den :: rational) :: rational
def divide_by({a1, b1}, {a2, b2}) do
{a3, b3} = RationalNumbers.abs({a1 * b2, a2 * b1})
sign = sign(a1) * sign(a2)
reduce({sign * a3, b3})
end
@doc """
Absolute value of a rational number
"""
@spec abs(a :: rational) :: rational
def abs({a, b}) do
{Kernel.abs(a), Kernel.abs(b)}
end
@doc """
Exponentiation of a rational number by an integer
"""
@spec pow_rational(a :: rational, n :: integer) :: rational
def pow_rational({a, b}, n) when n >= 0 do
reduce({Integer.pow(a, n), Integer.pow(b, n)})
end
def pow_rational({a, b}, n) when n < 0 do
m = Kernel.abs(n)
reduce({Integer.pow(b, m), Integer.pow(a, m)})
end
@doc """
Exponentiation of a real number by a rational number
"""
@spec pow_real(x :: integer, n :: rational) :: float
def pow_real(x, {a, b}) do
Float.pow(Float.pow(1.0 * x, 1.0 * a), 1 / b)
end
@doc """
Reduce a rational number to its lowest terms
"""
@spec reduce(a :: rational) :: rational
def reduce({a, b}) do
sign = min(sign(a), sign(b))
{abs_a, abs_b} = RationalNumbers.abs({a, b})
gcd = Integer.gcd(abs_a, abs_b)
{sign * trunc(abs_a / gcd), trunc(abs_b / gcd)}
end
defp sign(number) do
(0 <= number && 1) || -1
end
end
|
rational-numbers/lib/rational_numbers.ex
| 0.83104 | 0.482673 |
rational_numbers.ex
|
starcoder
|
defmodule Ash.Helpers do
@moduledoc false
@spec try_compile(term) :: :ok
def try_compile(module) when is_atom(module) do
Code.ensure_loaded(module)
:ok
end
def try_compile(_), do: :ok
def implements_behaviour?(module, behaviour) do
:attributes
|> module.module_info()
|> Enum.flat_map(fn
{:behaviour, value} -> List.wrap(value)
_ -> []
end)
|> Enum.any?(&(&1 == behaviour))
rescue
_ ->
false
end
# sobelow_skip ["Misc.BinToTerm"]
def non_executable_binary_to_term(binary, opts \\ []) when is_binary(binary) do
term = :erlang.binary_to_term(binary, opts)
non_executable_terms(term)
term
end
defp non_executable_terms(list) when is_list(list) do
non_executable_list(list)
end
defp non_executable_terms(tuple) when is_tuple(tuple) do
non_executable_tuple(tuple, tuple_size(tuple))
end
defp non_executable_terms(map) when is_map(map) do
folder = fn key, value, acc ->
non_executable_terms(key)
non_executable_terms(value)
acc
end
:maps.fold(folder, map, map)
end
defp non_executable_terms(other)
when is_atom(other) or is_number(other) or is_bitstring(other) or is_pid(other) or
is_reference(other) do
other
end
defp non_executable_terms(other) do
raise ArgumentError,
"cannot deserialize #{inspect(other)}, the term is not safe for deserialization"
end
defp non_executable_list([]), do: :ok
defp non_executable_list([h | t]) when is_list(t) do
non_executable_terms(h)
non_executable_list(t)
end
defp non_executable_list([h | t]) do
non_executable_terms(h)
non_executable_terms(t)
end
defp non_executable_tuple(_tuple, 0), do: :ok
defp non_executable_tuple(tuple, n) do
non_executable_terms(:erlang.element(n, tuple))
non_executable_tuple(tuple, n - 1)
end
@doc false
def deep_merge_maps(left, right) when is_map(left) and is_map(right) do
Map.merge(left, right, fn _, left, right ->
deep_merge_maps(left, right)
end)
end
def deep_merge_maps(_left, right), do: right
end
|
lib/ash/helpers.ex
| 0.694095 | 0.521654 |
helpers.ex
|
starcoder
|
defmodule Viztube.Youtube do
@moduledoc """
This module handles Youtube API calls
"""
@doc """
Get a video using YT API.
Parameters
- Part: String with get options "snippet", "recordingDetails"....
- Id: String with the video id
Returns `{:ok, [results], [metadata]}`
"""
def video(id, part) do
TubEx.Video.get(id, part)
end
@doc """
Get channel info using YT API.
Parameters
- id: String that represents channel id
Returns `{:ok, [results]}`
"""
def channel(id) do
TubEx.Channel.get(id)
end
@doc """
Get channel videos using YT API.
Parameters
- id: String. Channel id
- from: Date, date to search from
- to: Date, date to search to
Returns `{:ok, [results], [metadata]}`
"""
def channel_videos(id, from, to) do
TubEx.Video.search("", [maxResults: 50,
safeSearch: "none",
order: "date",
publishedAfter: date_to_rfc3339(from),
publishedBefore: date_to_rfc3339(to),
channelId: id])
end
@doc """
Get channel videos using YT API.
Parameters
Parameters
- id: String. Channel id
- from: Date, date to search from
- to: Date, date to search to
- page: String, page token to search
Returns `{:ok, [results], [metadata]}`
"""
def channel_videos(id, from, to, pageToken) do
TubEx.Video.search("", [maxResults: 50,
safeSearch: "none",
order: "date",
publishedAfter: date_to_rfc3339(from),
publishedBefore: date_to_rfc3339(to),
pageToken: pageToken,
channelId: id])
end
@doc """
Do a search using YT API.
- Dates must be in RFC 3339z format
- Paginations uses a token provided by API response
Returns `{:ok, [%Youtube.video{}, ...], [metadata]}`
"""
def search(%{query: query, pub_after: pub_after, pub_before: pub_before, order: order, duration: duration, license: license, definition: definition, page: page, channel: channel}) do
TubEx.Video.search(query, [maxResults: 50,
safeSearch: "none",
publishedAfter: pub_after,
publishedBefore: pub_before,
order: order,
pageToken: page,
videoLicense: license,
videoDefinition: definition,
videoDuration: duration,
channel: channel])
end
@doc """
Like `search/5` but consumes Phoenix.Form data
"""
def search(form_data) do
form = form_data
|> check_after()
|> check_before()
search(%{
query: form["query"],
pub_after: date_to_rfc3339(form["pub_after"]),
pub_before: date_to_rfc3339(form["pub_before"]),
order: form["order"],
duration: form["duration"],
license: form["license"],
definition: form["definition"],
page: form["page"] || nil,
channel: form["channel"] || nil
})
end
@doc false
defp date_to_rfc3339(date) do
{:ok, ndt} = NaiveDateTime.from_iso8601(date)
ndt |> Timex.to_datetime |> Timex.format!("{RFC3339z}")
end
@doc false
defp check_after(date = %{"pub_after" => ""}) do
Map.put(date, "pub_after", "2005-01-01 00:00:00")
end
@doc false
defp check_after(date = %{"pub_after" => _pub_after}) do
date
end
@doc false
defp check_before(date = %{"pub_before" => ""}) do
Map.put(date, "pub_before", NaiveDateTime.to_iso8601(NaiveDateTime.utc_now()) )
end
@doc false
defp check_before(date = %{"pub_before" => _pub_before}) do
date
end
end
|
apps/viztube/lib/viztube/youtube.ex
| 0.849269 | 0.484624 |
youtube.ex
|
starcoder
|
import Kernel, except: [to_binary: 1]
defmodule Macro do
@moduledoc """
This module provides conveniences for working with macros.
"""
@doc """
Returns a list of binary operators. This is available
as a macro so it can be used in guard clauses.
"""
defmacro binary_ops do
[
:===, :!==,
:==, :!=, :<=, :>=,
:&&, :||, :<>, :++, :--, :**, ://, :::, :<-, :.., :/>, :=~,
:<, :>,
:+, :-, :*, :/, :=, :|, :.,
:and, :or, :xor, :when, :in, :inlist, :inbits,
:<<<, :>>>, :|||, :&&&, :^^^, :~~~
]
end
@doc """
Returns a list of unary operators. This is available
as a macro so it can be used in guard clauses.
"""
defmacro unary_ops do
[:!, :@, :^, :not, :+, :-]
end
@doc """
Recursively escapes the given value so it can be inserted
into a syntax tree. Structures that are valid syntax nodes
(like atoms, integers, binaries) are represented by themselves.
## Examples
Macro.escape(:foo)
#=> :foo
Macro.escape({ :a, :b, :c })
#=> { :{}, 0, [:a, :b, :c] }
"""
def escape({ left, right }) do
{ escape(left), escape(right) }
end
def escape(tuple) when is_tuple(tuple) do
{ :{}, 0, escape(tuple_to_list(tuple)) }
end
def escape(list) when is_list(list) do
lc item inlist list, do: escape(item)
end
def escape(other), do: other
@doc """
Converts the given expression to a binary.
## Examples
Macro.to_binary(quote do: foo.bar(1, 2, 3))
#=> "foo.bar(1, 2, 3)"
"""
def to_binary(tree)
# Variables
def to_binary({ var, _, atom }) when is_atom(atom) do
atom_to_binary(var, :utf8)
end
# Aliases
def to_binary({ :__aliases__, _, refs }) do
Enum.map_join(refs, ".", call_to_binary(&1))
end
# Blocks
def to_binary({ :__block__, _, [expr] }) do
to_binary(expr)
end
def to_binary({ :__block__, _, _ } = expr) do
block = adjust_new_lines block_to_binary(expr), "\n "
"(\n " <> block <> "\n)"
end
# Bits containers
def to_binary({ :<<>>, _, args }) do
"<<" <> Enum.map_join(args, ", ", to_binary(&1)) <> ">>"
end
# Tuple containers
def to_binary({ :{}, _, args }) do
"{" <> Enum.map_join(args, ", ", to_binary(&1)) <> "}"
end
# List containers
def to_binary({ :[], _, args }) do
"[" <> Enum.map_join(args, ", ", to_binary(&1)) <> "]"
end
# Fn keyword
def to_binary({ :fn, _, [[do: block]] }) do
"fn " <> block_to_binary(block) <> "\nend"
end
# Partial call
def to_binary({ :&, _, [num] }) do
"&#{num}"
end
# Binary ops
def to_binary({ op, _, [left, right] }) when op in binary_ops do
op_to_binary(left) <> " #{op} " <> op_to_binary(right)
end
# Unary ops
def to_binary({ op, _, [arg] }) when op in unary_ops do
atom_to_binary(op, :utf8) <> to_binary(arg)
end
# All other calls
def to_binary({ target, _, args }) when is_list(args) do
{ list, last } = Erlang.elixir_tree_helpers.split_last(args)
case is_kw_blocks?(last) do
true -> call_to_binary_with_args(target, list) <> kw_blocks_to_binary(last)
false -> call_to_binary_with_args(target, args)
end
end
# Two-item tuples
def to_binary({ left, right }) do
to_binary({ :{}, 0, [left, right] })
end
# Lists
def to_binary(list) when is_list(list) do
to_binary({ :[], 0, list })
end
# All other structures
def to_binary(other), do: Binary.Inspect.inspect(other)
# Block keywords
defmacrop kw_keywords, do: [:do, :catch, :rescue, :after, :else]
defp is_kw_blocks?([_|_] = kw), do: Enum.all?(kw, fn({x,_}) -> x in kw_keywords end)
defp is_kw_blocks?(_), do: false
defp call_to_binary(atom) when is_atom(atom), do: atom_to_binary(atom, :utf8)
defp call_to_binary({ :., _, [arg] }), do: call_to_binary(arg) <> "."
defp call_to_binary({ :., _, [left, right] }), do: call_to_binary(left) <> "." <> call_to_binary(right)
defp call_to_binary(other), do: to_binary(other)
defp call_to_binary_with_args(target, args) do
args = Enum.map_join(args, ", ", to_binary(&1))
call_to_binary(target) <> "(" <> args <> ")"
end
defp kw_blocks_to_binary(kw) do
Enum.reduce(kw_keywords, " ", fn(x, acc) ->
case Keyword.key?(kw, x) do
true -> acc <> kw_block_to_binary(x, Keyword.get(kw, x))
false -> acc
end
end) <> "end"
end
defp kw_block_to_binary(key, value) do
block = adjust_new_lines block_to_binary(value), "\n "
atom_to_binary(key, :utf8) <> "\n " <> block <> "\n"
end
defp block_to_binary({ :->, _, exprs }) do
Enum.map_join(exprs, "\n", fn({ left, right }) ->
left = Enum.map_join(left, ", ", to_binary(&1))
left <> " ->\n " <> adjust_new_lines block_to_binary(right), "\n "
end)
end
defp block_to_binary({ :__block__, _, exprs }) do
Enum.map_join(exprs, "\n", to_binary(&1))
end
defp block_to_binary(other), do: to_binary(other)
defp op_to_binary({ op, _, [_, _] } = expr) when op in binary_ops do
"(" <> to_binary(expr) <> ")"
end
defp op_to_binary(expr), do: to_binary(expr)
defp adjust_new_lines(block, replacement) do
bc <<x>> inbits block do
<< case x == ?\n do
true -> replacement
false -> <<x>>
end | :binary >>
end
end
@doc """
Receives an expression representation and expands it. The following
contents are expanded:
* Macros (local or remote);
* Aliases are expanded (if possible) and return atoms;
* All pseudo-variables (__FILE__, __MODULE__, etc);
In case the expression cannot be expanded, it returns the expression itself.
Notice that `Macro.expand` is not recursive and it does not
expand child expressions. For example, `!some_macro` will expand as:
iex> IO.puts Macro.to_binary Macro.expand(quote(do: !some_macro), __ENV__)
case some_macro do
false -> true
nil -> true
_ -> false
end
Notice that the `!` operator is a macro that expands to a case.
Even though `some_macro` is also a macro, it is not expanded
because it is a child expression given to `!` as argument.
## Examples
In the example below, we have a macro that generates a module
with a function named `name_length` that returns the length
of the module name. The value of this function will be calculated
at compilation time and not at runtime.
Consider the implementation below:
defmacro defmodule_with_length(name, do: block) do
length = length(atom_to_list(name))
quote do
defmodule unquote(name) do
def name_length, do: unquote(length)
unquote(block)
end
end
end
When invoked like this:
defmodule_with_length My.Module do
def other_function, do: ...
end
The compilation will fail because `My.Module` when quoted
is not an atom, but a syntax tree as follow:
{:__aliases__, 0, [:My, :Module] }
That said, we need to expand the aliases node above to an
atom, so we can retrieve its length. Expanding the node is
not straight-forward because we also need to expand the
caller aliases. For example:
alias MyHelpers, as: My
defmodule_with_length My.Module do
def other_function, do: ...
end
The final module name will be `MyHelpers.Module` and not
`My.Module`. With `Macro.expand`, such aliases are taken
into consideration. Local and remote macros are also
expanded. We could rewrite our macro above to use this
function as:
defmacro defmodule_with_length(name, do: block) do
expanded = Macro.expand(name, __CALLER__)
length = length(atom_to_list(expanded))
quote do
defmodule unquote(name) do
def name_length, do: unquote(length)
unquote(block)
end
end
end
"""
def expand(aliases, env)
# The first case we handle is __aliases__. In case
# aliases just contain one item, we are sure it is
# an atom, so we just expand it based on the aliases
# dict.
def expand({ :__aliases__, _, [h] }, env) when h != Elixir do
expand_alias(h, env)
end
# In case aliases contains more than one item, we need
# to loop them checking if they are all atoms or not.
# Macros and pseudo-variables are then expanded.
def expand({ :__aliases__, _, [h|t] } = original, env) do
aliases = case h do
x when is_atom(x) and x != Elixir -> [expand_alias(x, env)|t]
_ -> [h|t]
end
aliases = lc alias inlist aliases, do: expand(alias, env)
case :lists.all(is_atom(&1), aliases) do
true -> Erlang.elixir_aliases.concat(aliases)
false -> original
end
end
# Expand Erlang.foo calls
def expand({ { :., _, [{ :__aliases__, _, [:Erlang] }, atom] }, _, args }, _env) when
is_atom(atom) and (is_atom(args) or args == []), do: atom
# Expand pseudo-variables
def expand({ :__MODULE__, _, atom }, env) when is_atom(atom), do: env.module
def expand({ :__FILE__, _, atom }, env) when is_atom(atom), do: env.file
def expand({ :__ENV__, _, atom }, env) when is_atom(atom), do: env
# Expand possible macro import invocation
def expand({ atom, line, args } = original, env) when is_atom(atom) do
args = case is_atom(args) do
true -> []
false -> args
end
case not is_partial?(args) do
false -> original
true ->
expand = Erlang.elixir_dispatch.expand_import(line, { atom, length(args) }, args,
env.module, env.function, env.requires, env.macros, env)
case expand do
{ :ok, _, expanded } -> expanded
{ :error, _ } -> original
end
end
end
# Expand possible macro require invocation
def expand({ { :., _, [left, right] }, line, args } = original, env) when is_atom(right) do
receiver = expand(left, env)
case is_atom(receiver) and not is_partial?(args) do
false -> original
true ->
expand = Erlang.elixir_dispatch.expand_require(line, receiver, { right, length(args) },
args, env.module, env.function, env.requires, env)
case expand do
{ :ok, expanded } -> expanded
{ :error, _ } -> original
end
end
end
# Anything else is just returned
def expand(other, _env), do: other
## Helpers
defp is_partial?(args) do
:lists.any(match?({ :&, _, [_] }, &1), args)
end
defp expand_alias(h, env) do
atom = list_to_atom('Elixir-' ++ atom_to_list(h))
Erlang.elixir_aliases.lookup(atom, env.aliases)
end
end
|
lib/elixir/lib/macro.ex
| 0.620277 | 0.529932 |
macro.ex
|
starcoder
|
defmodule Shippex.Package do
@moduledoc """
Defines the struct for storing a `Package`, which is then passed along with
an origin and destination address for shipping estimates. A `description` is
optional, as it may or may not be used with various carriers.
For USPS, a package has a `container` string which can be one of the
pre-defined USPS containers.
Do not pass a `weight` parameter. Instead, pass in a list of `:items` with a
weight parameter on each of these. The weight on the package will be the sum
of the weights of each of these. Same for `:monetary_value`.
`:description` can optionally be passed in. Otherwise, it will be generated
by joining the descriptions of each of the items.
Shippex.Package.package(%{length: 8
width: 8,
height: 8,
items: [
%{weight: 1, monetary_value: 100, description: "A"},
%{weight: 2, monetary_value: 200, description: "B"}
]})
# => %Package{weight: 3, monetary_value: 300, description: "A, B", ...}
"""
alias Shippex.Item
@enforce_keys [:length, :width, :height, :weight, :items, :monetary_value, :description]
@fields ~w(length width height weight girth container insurance monetary_value description items)a
defstruct @fields
@typep flat_rate_container() :: %{
name: String.t(),
rate: integer(),
length: number(),
width: number(),
height: number()
}
@type t() :: %__MODULE__{
length: number(),
width: number(),
height: number(),
weight: number(),
monetary_value: number(),
girth: nil | number(),
container: nil | String.t(),
insurance: nil | number(),
description: nil | String.t(),
items: [Item.t()]
}
@doc """
Builds and returns a `Package`. Use this instead of directly initializing
the struct.
"""
@spec new(map()) :: t()
def new(attrs) do
items =
case attrs do
%{items: [_ | _] = items} -> Enum.map(items, &Item.new/1)
_ -> []
end
weight =
items
|> Enum.filter(&is_number(&1.weight))
|> Enum.reduce(0, &(&1.weight + &2))
monetary_value =
items
|> Enum.filter(&is_number(&1.monetary_value))
|> Enum.reduce(0, &(&1.monetary_value + &2))
description =
case attrs do
%{description: d} when is_binary(d) and d != "" ->
d
_ ->
items
|> Enum.filter(&is_binary(&1.description))
|> Enum.map(&String.normalize(&1.description, :nfc))
|> Enum.join(", ")
end
attrs =
attrs
|> Map.merge(%{
items: items,
weight: weight,
monetary_value: monetary_value,
description: description
})
|> Map.take(@fields)
struct(__MODULE__, attrs)
end
@doc """
Returns a map of predefined containers for use with USPS. These can be
passed to `package.container` for fetching rates.
"""
@spec usps_containers() :: %{atom() => String.t()}
def usps_containers() do
%{
box_large: "Lg Flat Rate Box",
box_medium: "Md Flat Rate Box",
box_small: "Sm Flat Rate Box",
envelope: "Flat Rate Envelope",
envelope_gift_card: "Gift Card Flat Rate Envelope",
envelope_legal: "Legal Flat Rate Envelope",
envelope_padded: "Padded Flat Rate Envelope",
envelope_small: "Sm Flat Rate Envelope",
envelope_window: "Window Flat Rate Envelope",
nonrectangular: "Nonrectangular",
rectangular: "Rectangular",
variable: "Variable"
}
end
@doc """
Returns a map of flat rate USPS containers, along with their string description
and flat shipping rate (in cents).
"""
@spec usps_flat_rate_containers() :: %{atom() => flat_rate_container()}
def usps_flat_rate_containers() do
%{
envelope: %{name: "Flat Rate Envelope", rate: 665, length: 12.5, height: 9.5, width: 0},
envelope_gift_card: %{
name: "Gift Card Flat Rate Envelope",
rate: 665,
length: 10,
height: 7,
width: 0
},
envelope_window: %{
name: "Window Flat Rate Envelope",
rate: 665,
length: 10,
height: 5,
width: 0
},
envelope_small: %{name: "Sm Flat Rate Envelope", rate: 665, length: 10, height: 6, width: 0},
envelope_legal: %{
name: "Legal Flat Rate Envelope",
rate: 695,
length: 15,
height: 9.5,
width: 0
},
envelope_padded: %{
name: "Padded Flat Rate Envelope",
rate: 720,
length: 12.5,
height: 9.5,
width: 0
},
box_small: %{
name: "Sm Flat Rate Box",
rate: 715,
length: 8.6875,
height: 5.4375,
width: 1.75
},
box_medium: %{name: "Md Flat Rate Box", rate: 1360, length: 11.25, height: 8.75, width: 6},
box_large: %{name: "Lg Flat Rate Box", rate: 1885, length: 12.25, height: 12.25, width: 6}
}
end
end
|
lib/shippex/package.ex
| 0.894107 | 0.767951 |
package.ex
|
starcoder
|
defmodule Multiverses do
@moduledoc """
Elixir introduces into the world of programming, the "multiverse testing"
pattern. This is a pattern where integration tests are run concurrently
and each test sees a shard of global state.
## Pre-Existing Examples:
- `Mox`: each test has access to the global module mock, sharded by the
pid of the running test.
- `Ecto`: each test has access to a "database sandbox", which is a
checked out transaction on the global database that acts as its own
database shard.
- `Hound`,`Wallaby`: each test generates an ID that is passed outside of the
BEAM that is reintercepted on ingress, this ID is then used to connect
ecto sandboxes to the parent test PID
This library implements Multiverses-aware versions of several constructs
in the Elixir Standard Library which aren't natively Multiversable.
For plugins that are provided for other systems, see the libraries:
- `:multiverses_finch` - which extends this to HTTP requests that exit the BEAM.
- `:multiverses_pubsub` - which extends this to Phoenix.PubSub
## Usage
In `mix.exs`, you should add the following directive:
```
{:multiverses, "~> #{Multiverses.MixProject.version}", runtime: (Mix.env() == :test)}}
```
In your module where you'll be using at least one multiverse module, use the
following header:
```elixir
use Multiverses, with: Registry
```
this aliases `Multiverses.Registry` to `Registry`. As an escape hatch, if
you must use the underlying module, you may use the macro alias
`Elixir.Registry`
If you need more complex choices for when to activate Multiverses (such as system
environment variables), you should encode those choices directly using logic around
the `use Multiverses` statement.
### Options
- `:with` the names of multiverse modules you'd like to use. May be a single module
or a list of modules. Is identical to `require Multiverses.<module>; alias Multiverses.<module>`.
- `:otp_app` the otp_app must have its :use_multiverses application environment
variable set in order to be used. Defaults to autodetecting via Mix.
"""
import Kernel, except: [self: 0]
@typedoc """
a token that allows one to link with a universe
"""
@opaque link :: [pid]
defmacro __using__(options!) do
otp_app = Keyword.get_lazy(options!, :otp_app, fn ->
Mix.Project.get
|> apply(:project, [])
|> Keyword.get(:app)
end)
if in_multiverse?(otp_app) do
using_multiverses(otp_app, __CALLER__, options!)
else
empty_aliases(__CALLER__, options!)
end
end
defp in_multiverse?(otp_app) do
Application.get_env(otp_app, :use_multiverses, false)
end
defp using_multiverses(otp_app, caller, options) do
Module.register_attribute(
caller.module,
:active_modules,
accumulate: true)
[quote do
@multiverse_otp_app unquote(otp_app)
require Multiverses
end | options
|> Keyword.get(:with, [])
|> List.wrap
|> Enum.map(fn module_ast ->
native_module = Macro.expand(module_ast, caller)
multiverses_module = Module.concat(Multiverses, native_module)
Module.put_attribute(
caller.module,
:active_modules,
native_module)
quote do
alias unquote(multiverses_module)
end
end)]
end
defp empty_aliases(caller, options) do
options
|> Keyword.get(:with, [])
|> List.wrap
|> Enum.map(fn module_ast ->
native_module = Macro.expand(module_ast, caller)
quote do
alias unquote(native_module)
end
end)
end
@spec link() :: link
@doc """
generates a "link" to current universe. If you pass the result of "link"
to `port/1`, then it will bring the ported process into the universe of
the process that called `link/0`
"""
def link do
[Kernel.self() | Process.get(:"$callers", [])]
end
@spec port(link) :: link
@doc """
causes the current process to adopt the universe referred to by the result
of a `link/0` call.
"""
def port(callers) do
Process.put(:"$callers", callers)
end
@spec self() :: pid
@doc """
identifies the universe of the current process.
"""
def self do
:"$callers" |> Process.get([Kernel.self()]) |> List.last
end
@spec drop() :: link
@doc """
purges the callers list from the active list
"""
def drop do
Process.delete(:"$callers")
end
@spec overrides?(module, module) :: boolean
@doc """
this function can identify if a parent module has been overridden
with its Multiverse equivalent in this module.
**Important**: the parent_module parameter is interpreted in the
global aliasing context, and not in the context of the local
alias.
useful for making compile-time guarantees, for example in ExUnit
Case modules.
"""
defmacro overrides?(module_ast, parent_module_ast) do
module = Macro.expand(module_ast, __CALLER__)
# the parent module should be expanded without local aliasing.
parent_module = Macro.expand(parent_module_ast, __ENV__)
active_modules = Module.get_attribute(module, :active_modules)
if active_modules, do: parent_module in active_modules, else: false
end
@doc """
lets you know if the current otp_app has multiverses active.
Only available at compile time, and only available when compiling
with Mix.
"""
defmacro active? do
quote do
Application.compile_env(unquote(app()), :use_multiverses, false)
end
end
@doc false
# used internally to determine which app this this belongs to
@spec app() :: atom
def app do
Mix.Project.get
|> apply(:project, [])
|> Keyword.get(:app)
end
end
|
lib/multiverses.ex
| 0.868688 | 0.864024 |
multiverses.ex
|
starcoder
|
defmodule Still.Preprocessor do
@moduledoc """
Defines functions to be used by the several preprocessors as well as the
behaviour they should have.
Preprocessors are the cornerstone of Still. A preprocessor chain can take a
markdown file, execute its embedded Elixir, extract metadata from its front
matter, transform it into HTML and wrap it in a layout.
There are a few defined chains by default, but you can extend Still with your
own.
**A custom preprocessor is simply a module that calls `use Still.Preprocessor`
and implements the `render/1`function.**
Take the following example:
defmodule YourSite.JPEG do
use Still.Preprocessor
@impl true
def render(file) do
file
end
end
In this example, the `render/1` function is used to transform the content and
the metadata of a #{Still.SourceFile}.
See the [preprocessor guide](preprocessors.html) for more details.
"""
alias Still.Compiler.PreprocessorError
alias Still.SourceFile
require Logger
import Still.Utils, only: [config: 2]
alias __MODULE__.{
CSSMinify,
EEx,
Frontmatter,
JS,
Markdown,
OutputPath,
OutputPath,
Slime,
URLFingerprinting,
Save,
AddLayout,
AddContent,
Image,
Profiler
}
@default_preprocessors %{
".slim" => [AddContent, EEx, Frontmatter, Slime, OutputPath, AddLayout, Save],
".slime" => [AddContent, EEx, Frontmatter, Slime, OutputPath, AddLayout, Save],
".eex" => [AddContent, EEx, Frontmatter, OutputPath, AddLayout, Save],
".css" => [AddContent, EEx, CSSMinify, OutputPath, URLFingerprinting, AddLayout, Save],
".js" => [AddContent, EEx, JS, OutputPath, URLFingerprinting, AddLayout, Save],
".md" => [AddContent, EEx, Frontmatter, Markdown, OutputPath, AddLayout, Save],
".jpg" => [OutputPath, Image],
".png" => [OutputPath, Image]
}
@doc """
Runs the preprocessor pipeline for the given file.
"""
@spec run(SourceFile.t()) :: SourceFile.t() | {:error, any()}
def run(file) do
file
|> run(__MODULE__.for(file))
end
@spec run(SourceFile.t(), list(module())) :: SourceFile.t() | {:error, any()}
def run(file, []) do
file
end
def run(file, [preprocessor | next_preprocessors]) do
preprocessor.run(file, next_preprocessors)
end
@doc """
Retrieves the preprocessor pipeline for the given file.
"""
def for(%{input_file: file}) do
preprocessors()
|> find_preprocessor_for_file(file)
|> case do
nil ->
Logger.warn("Preprocessors not found for file: #{file}")
[]
{_, preprocessors} ->
[Profiler | preprocessors]
end
end
defp preprocessors do
Enum.concat(user_defined_preprocessors(), @default_preprocessors)
|> Enum.to_list()
end
defp user_defined_preprocessors do
config(:preprocessors, %{})
end
defp find_preprocessor_for_file(preprocessors, file) do
Enum.find(preprocessors, fn {key, _value} ->
if is_binary(key) do
Path.extname(file) == key
else
Regex.match?(key, file)
end
end)
end
@callback render(SourceFile.t()) :: SourceFile.t()
@callback after_render(SourceFile.t()) :: SourceFile.t()
@optional_callbacks render: 1, after_render: 1
defmacro __using__(_opts) do
quote do
@behaviour Still.Preprocessor
@doc """
Runs the #{Still.SourceFile} through the current preprocessor and the next.
"""
@spec run(SourceFile.t()) :: SourceFile.t()
def run(source_file) do
run(source_file, [])
end
@spec run(SourceFile.t(), any()) :: SourceFile.t()
def run(source_file, next_preprocessors) do
source_file
|> render()
|> case do
{:cont, source_file} ->
source_file
|> run_next_preprocessors(next_preprocessors)
{:halt, source_file} ->
source_file
%SourceFile{} = source_file ->
source_file
|> run_next_preprocessors(next_preprocessors)
end
|> after_render()
catch
_, %PreprocessorError{} = error ->
raise error
kind, payload ->
raise PreprocessorError,
payload: payload,
kind: kind,
preprocessor: __MODULE__,
remaining_preprocessors: next_preprocessors,
source_file: source_file,
stacktrace: __STACKTRACE__
end
defp run_next_preprocessors(source_file, []), do: source_file
defp run_next_preprocessors(source_file, [next_preprocessor | remaining_preprocesors]) do
cond do
not Still.Utils.module_exists?(next_preprocessor) ->
raise "Module #{next_preprocessor} does not exist"
not function_exported?(next_preprocessor, :run, 2) ->
raise "Function run/2 in module #{next_preprocessor} does not exist"
true ->
next_preprocessor.run(source_file, remaining_preprocesors)
end
end
@doc """
Runs after the next preprocessors finish running.
Returns the resulting #{Still.SourceFile}.
"""
@spec after_render(SourceFile.t()) :: SourceFile.t()
def after_render(source_file), do: source_file
@doc """
Runs the current preprocessor and invokes the next one.
Returns the resulting #{Still.SourceFile}.
"""
@spec render(SourceFile.t()) ::
{:cont, SourceFile.t()} | {:halt, SourceFile.t()} | SourceFile.t()
def render(source_file), do: source_file
defoverridable render: 1, after_render: 1
end
end
end
|
lib/still/preprocessor.ex
| 0.782953 | 0.403097 |
preprocessor.ex
|
starcoder
|
defmodule Quetzal do
@moduledoc """
Quetzal - Analytical web apps, fast, easy and real-time using Elixir. No Javascript required.
Quetzal provides easy and fast tools to make analytical web apps with real-time updates.
Quetzal provides the next features:
* Allows create componets from Elixir code and render into views such as: graphs (plotlyjs),
inputs and more.
* It uses a single function to allow update the components via server so instead of
pulling data it is pushing data whenever you want.
* It tracks events from components and receives in the live view to
update live view components.
## Using components
First, define a module and use `Quetzal.LiveView`, you don't need `mount/2` or `render/1`,
when using the Quetzal Live View all is done:
defmodule AppWeb.PieLive do
use Quetzal.LiveView
end
With this minimal configuration Quetzal is able to render any component into the view, let's
generate a pie graph to render:
defmodule AppWeb.PieLive do
use Quetzal.LiveView
@impl Quetzal.LiveView
def components(_session) do
{"MyApp", [{Quetzal.Graph, [id: "mypie"], [type: "pie", labels: ["Red", "Blue"], values: [10, 20]]}]}
end
end
The callback returns a new graph component and put into the view the necessary items
to work with it.
## Live updates
Now, we are going to the real-time cases, let's say we want update our pie graph when an
event occurs in the server, so let's define a trigger to make it:
defmodule AppWeb.PieLive do
use Quetzal.LiveView
@impl Quetzal.LiveView
def components(_session) do
{"MyApp", [{Quetzal.Graph, [id: "mypie"], [type: "pie", labels: ["Red", "Blue"], values: [10, 20]]}]}
end
def trigger_update() do
:timer.sleep(5000)
newvalues = for _n <- 1..3, do: :rand.uniform(100)
components = [mypie: [labels: ["Black", "White", "Gray"], values: newvalues]]
update_components("MyApp", components)
trigger_update()
end
end
Let's explain the code, first to all, the `trigger_update/0` can be called from iex:
iex(1)> AppWeb.PieLive.trigger_update
Then every 5 ms a random numbers will be generated and put into values of the pie graph, and the
pie graph will be updated, nice eh?.
To achieve this, Quetzal uses the `update_components/2` function to render the new content, also
you need configure the javascript hooks, only pass the hooks into the live socket connection:
...
import Quetzal from "quetzal_hooks"
let quetzal = new Quetzal();
...
let liveSocket = new LiveSocket("/live", Socket, {hooks: quetzal.Hooks})
With this minimal configuration, we able to make a real-time app that updates the graph from the
live view server.
## Live updates with callbacks
Quetzal supports callbacks to deliver when a data has changed in a form, and then performs some update
in the components, let's inspect the next code:
defmodule AppWeb.LiveView do
use Quetzal.LiveView,
handler: __MODULE__,
callbacks: [:update_output_div]
@impl Quetzal.LiveView
def components() do
[{Quetzal.Form, [id: "myform", name: "myform",
children: [{Quetzal.InputText, [id: "mytext", value: "", type: "text", name: "mytext"]}]
]},
{Quetzal.Div, [id: "mydiv", style: "", children: ""]}]
end
def update_output_div("myform", "mytext", [value]) do
[mydiv: [children: "You've entered \#\{value\} value in the first input"]]
end
end
First define the handler and the callbacks, the handler is a module that will process the events, and the
callbacks are a list of functions in that module, so when an events occurs then that callbacks will be called.
In the components we are defining a single form with an input and a div, so when something changes in the
input the live view server will send an update to the view and render the new children for the div.
The callbacks receive always 3 arguments, the first is the name of the form containing the components firing the event,
the second is the match against the component changed and the third is the value of all components in the form.
## Notes
Some notes that you should be take:
* All setup should be similar to Phoenix Live View setup except for the first step and use `Quetzal.LiveView`.
* The hooks should be configured into your app.js file.
* Layouts should include `plotly.js` if you plan to use graphs (can be included from CDN).
* Ensure that quetzal hooks are included in the package.json:
...
"dependencies": {
"phoenix": "file:../deps/phoenix",
"phoenix_html": "file:../deps/phoenix_html",
"phoenix_live_view": "file:../deps/phoenix_live_view",
"quetzal_hooks": "file:../deps/quetzal"
},
...
That's all, we are working to add more examples of components, inputs etc. Enjoy!.
"""
@doc """
Generates a child spec for Quetzal registry, it requires the name into the keywords.
## Example
Include into your supervision tree:
`{Quetzal, name: Quetzal.Registry}`
"""
@spec child_spec(keyword) :: Supervisor.child_spec()
defdelegate child_spec(options), to: Quetzal.Supervisor
end
|
lib/quetzal.ex
| 0.821582 | 0.702211 |
quetzal.ex
|
starcoder
|
defmodule Dispatch do
@moduledoc """
Main module, exposes a way to fetch random reviewers for a repo
and a way to request reviewers to a repo.
"""
alias Dispatch.Absences
alias Dispatch.Repositories
alias Dispatch.Settings
alias Dispatch.Utils.Normalization
defmodule BlocklistedUser do
@enforce_keys [:username]
@derive Jason.Encoder
defstruct username: nil
end
defmodule Reviewer do
@enforce_keys [:username]
@derive Jason.Encoder
defstruct username: nil, type: nil, metadata: nil
end
defmodule Learner do
@enforce_keys [:username, :exposure]
@derive Jason.Encoder
defstruct username: nil, exposure: nil, metadata: nil
end
defmodule SelectedUser do
@enforce_keys [:username, :type]
@derive Jason.Encoder
defstruct username: nil, type: nil, metadata: nil
end
@doc """
Returns a list of usernames that should be request to review the pull request
"""
def fetch_selected_users(repo, stacks, author_username, disable_learners \\ false) do
excluded_usernames = [author_username | Enum.map(Settings.blocklisted_users(), & &1.username)]
# 1. Refresh settings
Settings.refresh()
# 2. Build a pool of requestable users
requestable_usernames =
repo
|> Repositories.requestable_users()
|> remove_absents()
|> Enum.map(& &1.username)
|> Kernel.--(excluded_usernames)
# 3. Select relevant contributors from it
contributors = Repositories.contributors(repo, requestable_usernames)
requestable_usernames = update_requestable_usernames(requestable_usernames, Enum.map(contributors, & &1.username))
# 4. Update the pool and then select a random stack-skilled reviewer for each stack
stack_reviewers = Settings.reviewers(requestable_usernames, stacks)
requestable_usernames = update_requestable_usernames(requestable_usernames, Enum.map(contributors, & &1.username))
# 5. Update the pool and then randomly add -learners as reviewers for each stack
stack_learners = if disable_learners, do: [], else: Settings.learners(requestable_usernames, stacks)
# 6. Map all selected users to SelectedUser struct
Enum.map(contributors ++ stack_reviewers ++ stack_learners, &struct(SelectedUser, Map.from_struct(&1)))
end
@doc """
Request reviews from the specified reviewers on the pull request
"""
def request_reviewers(repo, pull_request_number, reviewers) do
requested_reviewers = Enum.filter(reviewers, &(request_or_mention_reviewer?(&1) == :request))
with :ok <- Repositories.request_reviewers(repo, pull_request_number, requested_reviewers),
:ok <- Repositories.create_request_comment(repo, pull_request_number, reviewers) do
:ok
else
_ ->
:error
end
end
@doc """
Extracts stacks from a Webhook payload received from GitHub
"""
def extract_from_params(%{"pull_request" => %{"body" => body}} = params) do
default_stacks = Map.get(params, "stacks", "")
~r/#dispatch\/([\w.]+)/i
|> Regex.scan(body, capture: :all_but_first)
|> (fn
[] -> String.split(default_stacks, ",")
stacks -> List.flatten(stacks)
end).()
|> Enum.map(&String.trim/1)
|> Enum.map(&String.downcase/1)
|> Enum.reject(&(&1 === ""))
end
def extract_from_params(_), do: []
def request_or_mention_reviewer?(%SelectedUser{type: type}) when type in ["contributor", "reviewer"], do: :request
def request_or_mention_reviewer?(_), do: :mention
defp update_requestable_usernames(requestable_usernames, reviewer_usernames) do
Enum.filter(requestable_usernames, &(&1 not in reviewer_usernames))
end
defp remove_absents(requestable_users), do: remove_absents(Absences.absent_fullnames(), requestable_users)
defp remove_absents([], requestable_users), do: requestable_users
defp remove_absents(absent_fullnames, requestable_users) do
Enum.filter(requestable_users, fn
%{fullname: nil} -> true
%{fullname: fullname} -> Normalization.normalize(fullname) not in absent_fullnames
end)
end
end
|
lib/dispatch/dispatch.ex
| 0.770119 | 0.435121 |
dispatch.ex
|
starcoder
|
defmodule AWS.ResourceGroups do
@moduledoc """
AWS Resource Groups
AWS Resource Groups lets you organize AWS resources such as Amazon EC2
instances, Amazon Relational Database Service databases, and Amazon S3 buckets
into groups using criteria that you define as tags.
A resource group is a collection of resources that match the resource types
specified in a query, and share one or more tags or portions of tags. You can
create a group of resources based on their roles in your cloud infrastructure,
lifecycle stages, regions, application layers, or virtually any criteria.
Resource Groups enable you to automate management tasks, such as those in AWS
Systems Manager Automation documents, on tag-related resources in AWS Systems
Manager. Groups of tagged resources also let you quickly view a custom console
in AWS Systems Manager that shows AWS Config compliance and other monitoring
data about member resources.
To create a resource group, build a resource query, and specify tags that
identify the criteria that members of the group have in common. Tags are
key-value pairs.
For more information about Resource Groups, see the [AWS Resource Groups User Guide](https://docs.aws.amazon.com/ARG/latest/userguide/welcome.html).
AWS Resource Groups uses a REST-compliant API that you can use to perform the
following types of operations.
* Create, Read, Update, and Delete (CRUD) operations on resource
groups and resource query entities
* Applying, editing, and removing tags from resource groups
* Resolving resource group member ARNs so they can be returned as
search results
* Getting data about resources that are members of a group
* Searching AWS resources based on a resource query
"""
@doc """
Creates a resource group with the specified name and description.
You can optionally include a resource query, or a service configuration.
"""
def create_group(client, input, options \\ []) do
path_ = "/groups"
headers = []
query_ = []
request(client, :post, path_, query_, headers, input, options, nil)
end
@doc """
Deletes the specified resource group.
Deleting a resource group does not delete any resources that are members of the
group; it only deletes the group structure.
"""
def delete_group(client, input, options \\ []) do
path_ = "/delete-group"
headers = []
query_ = []
request(client, :post, path_, query_, headers, input, options, nil)
end
@doc """
Returns information about a specified resource group.
"""
def get_group(client, input, options \\ []) do
path_ = "/get-group"
headers = []
query_ = []
request(client, :post, path_, query_, headers, input, options, nil)
end
@doc """
Returns the service configuration associated with the specified resource group.
AWS Resource Groups supports configurations for the following resource group
types:
* `AWS::EC2::CapacityReservationPool` - Amazon EC2 capacity
reservation pools. For more information, see [Working with capacity reservation groups](https://docs.aws.amazon.com/AWSEC2/latest/UserGuide/capacity-reservations-using.html#create-cr-group)
in the *EC2 Users Guide*.
"""
def get_group_configuration(client, input, options \\ []) do
path_ = "/get-group-configuration"
headers = []
query_ = []
request(client, :post, path_, query_, headers, input, options, nil)
end
@doc """
Retrieves the resource query associated with the specified resource group.
"""
def get_group_query(client, input, options \\ []) do
path_ = "/get-group-query"
headers = []
query_ = []
request(client, :post, path_, query_, headers, input, options, nil)
end
@doc """
Returns a list of tags that are associated with a resource group, specified by
an ARN.
"""
def get_tags(client, arn, options \\ []) do
path_ = "/resources/#{URI.encode(arn)}/tags"
headers = []
query_ = []
request(client, :get, path_, query_, headers, nil, options, nil)
end
@doc """
Adds the specified resources to the specified group.
"""
def group_resources(client, input, options \\ []) do
path_ = "/group-resources"
headers = []
query_ = []
request(client, :post, path_, query_, headers, input, options, nil)
end
@doc """
Returns a list of ARNs of the resources that are members of a specified resource
group.
"""
def list_group_resources(client, input, options \\ []) do
path_ = "/list-group-resources"
headers = []
query_ = []
request(client, :post, path_, query_, headers, input, options, nil)
end
@doc """
Returns a list of existing resource groups in your account.
"""
def list_groups(client, input, options \\ []) do
path_ = "/groups-list"
headers = []
{query_, input} =
[
{"MaxResults", "maxResults"},
{"NextToken", "nextToken"},
]
|> AWS.Request.build_params(input)
request(client, :post, path_, query_, headers, input, options, nil)
end
@doc """
Returns a list of AWS resource identifiers that matches the specified query.
The query uses the same format as a resource query in a CreateGroup or
UpdateGroupQuery operation.
"""
def search_resources(client, input, options \\ []) do
path_ = "/resources/search"
headers = []
query_ = []
request(client, :post, path_, query_, headers, input, options, nil)
end
@doc """
Adds tags to a resource group with the specified ARN.
Existing tags on a resource group are not changed if they are not specified in
the request parameters.
Do not store personally identifiable information (PII) or other confidential or
sensitive information in tags. We use tags to provide you with billing and
administration services. Tags are not intended to be used for private or
sensitive data.
"""
def tag(client, arn, input, options \\ []) do
path_ = "/resources/#{URI.encode(arn)}/tags"
headers = []
query_ = []
request(client, :put, path_, query_, headers, input, options, nil)
end
@doc """
Removes the specified resources from the specified group.
"""
def ungroup_resources(client, input, options \\ []) do
path_ = "/ungroup-resources"
headers = []
query_ = []
request(client, :post, path_, query_, headers, input, options, nil)
end
@doc """
Deletes tags from a specified resource group.
"""
def untag(client, arn, input, options \\ []) do
path_ = "/resources/#{URI.encode(arn)}/tags"
headers = []
query_ = []
request(client, :patch, path_, query_, headers, input, options, nil)
end
@doc """
Updates the description for an existing group.
You cannot update the name of a resource group.
"""
def update_group(client, input, options \\ []) do
path_ = "/update-group"
headers = []
query_ = []
request(client, :post, path_, query_, headers, input, options, nil)
end
@doc """
Updates the resource query of a group.
"""
def update_group_query(client, input, options \\ []) do
path_ = "/update-group-query"
headers = []
query_ = []
request(client, :post, path_, query_, headers, input, options, nil)
end
@spec request(AWS.Client.t(), binary(), binary(), list(), list(), map(), list(), pos_integer()) ::
{:ok, map() | nil, map()}
| {:error, term()}
defp request(client, method, path, query, headers, input, options, success_status_code) do
client = %{client | service: "resource-groups"}
host = build_host("resource-groups", client)
url = host
|> build_url(path, client)
|> add_query(query, client)
additional_headers = [{"Host", host}, {"Content-Type", "application/x-amz-json-1.1"}]
headers = AWS.Request.add_headers(additional_headers, headers)
payload = encode!(client, input)
headers = AWS.Request.sign_v4(client, method, url, headers, payload)
perform_request(client, method, url, payload, headers, options, success_status_code)
end
defp perform_request(client, method, url, payload, headers, options, success_status_code) do
case AWS.Client.request(client, method, url, payload, headers, options) do
{:ok, %{status_code: status_code, body: body} = response}
when is_nil(success_status_code) and status_code in [200, 202, 204]
when status_code == success_status_code ->
body = if(body != "", do: decode!(client, body))
{:ok, body, response}
{:ok, response} ->
{:error, {:unexpected_response, response}}
error = {:error, _reason} -> error
end
end
defp build_host(_endpoint_prefix, %{region: "local", endpoint: endpoint}) do
endpoint
end
defp build_host(_endpoint_prefix, %{region: "local"}) do
"localhost"
end
defp build_host(endpoint_prefix, %{region: region, endpoint: endpoint}) do
"#{endpoint_prefix}.#{region}.#{endpoint}"
end
defp build_url(host, path, %{:proto => proto, :port => port}) do
"#{proto}://#{host}:#{port}#{path}"
end
defp add_query(url, [], _client) do
url
end
defp add_query(url, query, client) do
querystring = encode!(client, query, :query)
"#{url}?#{querystring}"
end
defp encode!(client, payload, format \\ :json) do
AWS.Client.encode!(client, payload, format)
end
defp decode!(client, payload) do
AWS.Client.decode!(client, payload, :json)
end
end
|
lib/aws/generated/resource_groups.ex
| 0.864268 | 0.505005 |
resource_groups.ex
|
starcoder
|
defmodule Advent20.SeatingSystem do
@moduledoc """
Day 11: Seating System
"""
def parse(input) do
input
|> String.split("\n", trim: true)
|> Enum.with_index()
|> Enum.reduce(%{}, fn {line, y}, all_coordinates ->
String.codepoints(line)
|> Enum.with_index()
|> Enum.reject(fn {char, _} -> char == "." end)
|> Enum.reduce(all_coordinates, fn {char, x}, all_coordinates ->
Map.put(all_coordinates, {x, y}, char)
end)
end)
end
@doc """
Part 1: Simulate your seating area by applying the seating rules repeatedly
until no seats change state. How many seats end up occupied?
"""
def part_1(input) do
seats = parse(input)
adjacent_seats = adjacent_seats(seats)
simulate_seating_area(seats, adjacent_seats, 4)
end
# Make a map of seat coordinate => list of adjacent seat coordinates
defp adjacent_seats(seats) do
seat_coordinates = seats |> Map.keys() |> MapSet.new()
Enum.into(seat_coordinates, %{}, fn coord ->
connected_seats =
eight_direction_fns()
|> Enum.map(fn dir_fn -> dir_fn.(coord) end)
|> Enum.filter(fn coordinate -> coordinate in seat_coordinates end)
{coord, connected_seats}
end)
end
@doc """
Part 2: Given the new visibility method and the rule change for occupied seats becoming empty,
once equilibrium is reached, how many seats end up occupied?
"""
def part_2(input) do
seats = parse(input)
adjacent_seats = adjacent_seats_by_line_of_sight(seats)
simulate_seating_area(seats, adjacent_seats, 5)
end
# Make a map of seat coordinate => list of adjacent seat coordinates by line of sight
defp adjacent_seats_by_line_of_sight(seats) do
seat_coordinates = seats |> Map.keys() |> MapSet.new()
max_x = seats |> Map.keys() |> Enum.max_by(fn {x, _y} -> x end) |> elem(0)
max_y = seats |> Map.keys() |> Enum.max_by(fn {_x, y} -> y end) |> elem(1)
Enum.into(seat_coordinates, %{}, fn coord ->
connected_seats =
eight_direction_fns()
|> Enum.map(fn number_fn ->
Stream.iterate(coord, number_fn)
|> Stream.take_while(fn {x, y} -> x >= 0 and y >= 0 and x <= max_x and y <= max_y end)
|> Stream.drop(1)
|> Enum.find(fn coordinate -> coordinate in seat_coordinates end)
end)
|> Enum.reject(&(&1 == nil))
{coord, connected_seats}
end)
end
def simulate_seating_area(seat_data, connected_seats, occupied_limit) do
seat_data
|> Stream.unfold(&{&1, apply_seating_rules(&1, connected_seats, occupied_limit)})
|> Enum.reduce_while(nil, fn
seats, seats -> {:halt, seats}
seats, _ -> {:cont, seats}
end)
|> Enum.count(fn {_coord, state} -> state == "#" end)
end
# Apply one round of seating rules, returning the updated state
defp apply_seating_rules(seats, connected_seats, occupied_limit) do
seats
|> Enum.into(%{}, fn {coord, value} ->
connected_seats = Map.get(connected_seats, coord, [])
occupied_connected_seats =
connected_seats
|> Enum.map(&Map.fetch!(seats, &1))
|> Enum.count(&(&1 == "#"))
case {value, occupied_connected_seats} do
{"L", 0} -> {coord, "#"}
{"#", occupied_count} when occupied_count > occupied_limit - 1 -> {coord, "L"}
{state, _} -> {coord, state}
end
end)
end
# 8 functions for travelling in all directions on the seat map
defp eight_direction_fns() do
[
fn {x, y} -> {x + 1, y + 1} end,
fn {x, y} -> {x + 1, y} end,
fn {x, y} -> {x + 1, y - 1} end,
fn {x, y} -> {x, y - 1} end,
fn {x, y} -> {x - 1, y - 1} end,
fn {x, y} -> {x - 1, y} end,
fn {x, y} -> {x - 1, y + 1} end,
fn {x, y} -> {x, y + 1} end
]
end
end
|
lib/advent20/11_seating_system.ex
| 0.809314 | 0.594463 |
11_seating_system.ex
|
starcoder
|
defmodule OAuthXYZ.Model.TransactionRequest do
@moduledoc """
Request Handling Module.
```
# Transaction request
{
"resources": [
{
"actions": [
"read",
"write",
"dolphin"
],
"locations": [
"https://server.example.net/",
"https://resource.local/other"
],
"datatypes": [
"metadata",
"images"
]
},
"dolphin-metadata"
],
"keys": {
"proof": "jwsd",
"jwks": {
"keys": [
{
"kty": "RSA",
"e": "AQAB",
"kid": "xyz-1",
"alg": "RS256",
"n": "kOB5rR4Jv0GMeL...."
}
]
}
},
"interact": {
"redirect": true,
"callback": {
"uri": "https://client.example.net/return/123455",
"nonce": "LKLTI25DK82FX4T4QFZC"
}
},
"display": {
"name": "My Client Display Name",
"uri": "https://example.net/client"
}
}
# Transaction continue request
{
"handle": "tghji76ytghj9876tghjko987yh"
}
```
"""
alias OAuthXYZ.Model.{ResourceRequest, KeyRequest, Interact, DisplayRequest, UserRequest}
@type t :: %__MODULE__{}
# TODO: additional params
defstruct [
#! %OAuthXYZ.Model.ResourceRequest{}
:resources,
#! %OAuthXYZ.Model.KeyRequest{}
:keys,
#! %OAuthXYZ.Model.Interact{}
:interact,
#! %OAuthXYZ.Model.DisplayRequest{}
:display,
#! %OAuthXYZ.Model.UserRequest{}
:user,
#! :string
:handle,
#! :string
:interaction_ref
]
def parse(request) when is_map(request) do
parsed_request =
%{}
|> parse_resources(request)
|> parse_keys(request)
|> parse_interact(request)
|> parse_display(request)
|> parse_user(request)
|> parse_handle(request)
|> parse_interaction_ref(request)
%__MODULE__{
resources: parsed_request.resources,
keys: parsed_request.keys,
interact: parsed_request.interact,
display: parsed_request.display,
user: parsed_request.user,
handle: parsed_request.handle,
interaction_ref: parsed_request.interaction_ref
}
end
# private
defp parse_resources(keys, %{"resources" => resources}),
do: Map.put(keys, :resources, ResourceRequest.parse(resources))
defp parse_resources(keys, _), do: Map.put(keys, :resources, nil)
defp parse_keys(keys, %{"keys" => keys_param}),
do: Map.put(keys, :keys, KeyRequest.parse(keys_param))
defp parse_keys(keys, _), do: Map.put(keys, :keys, nil)
defp parse_interact(keys, %{"interact" => interact}),
do: Map.put(keys, :interact, Interact.parse(interact))
defp parse_interact(keys, _), do: Map.put(keys, :interact, nil)
defp parse_display(keys, %{"display" => display}),
do: Map.put(keys, :display, DisplayRequest.parse(display))
defp parse_display(keys, _), do: Map.put(keys, :display, nil)
defp parse_user(keys, %{"user" => user}), do: Map.put(keys, :user, UserRequest.parse(user))
defp parse_user(keys, _), do: Map.put(keys, :user, nil)
defp parse_handle(keys, %{"handle" => handle}), do: Map.put(keys, :handle, handle)
defp parse_handle(keys, _), do: Map.put(keys, :handle, nil)
defp parse_interaction_ref(keys, %{"interaction_ref" => interaction_ref}),
do: Map.put(keys, :interaction_ref, interaction_ref)
defp parse_interaction_ref(keys, _), do: Map.put(keys, :interaction_ref, nil)
end
|
lib/oauth_xyz/model/transaction_request.ex
| 0.577138 | 0.705886 |
transaction_request.ex
|
starcoder
|
defmodule Expanse.Length do
@moduledoc """
This Module includes Length conversion functions
"""
@doc """
Convert KM to Meters
"""
def km_to_m(x) do
y = round(x * 1000)
if y < 1 do
y = 1
end
y
end
@doc """
Convert KM to Centimeters
"""
def km_to_cm(x) do
y = round(x * 100000)
if y < 1 do
y = 1
end
y
end
@doc """
Convert KM to Millimeters
"""
def km_to_mm(x) do
y = round(x * 1000000)
if y < 1 do
y = 1
end
y
end
@doc """
Convert KM to Micrometers
"""
def km_to_um(x) do
y = round(x * 1000000000)
if y < 1 do
y = 1
end
y
end
@doc """
Convert KM to Nanometers
"""
def km_to_nm(x) do
y = round(x * 1000000000000)
if y < 1 do
y = 1
end
y
end
@doc """
Convert KM to Miles
"""
def km_to_mi(x) do
y = round(x * 0.621371)
if y < 1 do
y = 1
end
y
end
@doc """
Convert KM to Yards
"""
def km_to_yd(x) do
y = round(x * 1093.612)
if y < 1 do
y = 1
end
y
end
@doc """
Convert KM to Feet
"""
def km_to_ft(x) do
y = round(x * 3280.8388)
if y < 1 do
y = 1
end
y
end
@doc """
Convert KM to Inches
"""
def km_to_in(x) do
y = round(x * 39370.066)
if y < 1 do
y = 1
end
y
end
@doc """
Convert KM to NM
"""
def km_to_nm(x) do
y = round(x * 0.53995)
if y < 1 do
y = 1
end
y
end
@doc """
Convert M to KM
"""
def m_to_km(x) do
y = round(x * 0.001)
if y < 1 do
y = 1
end
y
end
@doc """
Convert M to cM
"""
def m_to_cm(x) do
y = round(x * 100)
if y < 1 do
y = 1
end
y
end
@doc """
Convert M to MM
"""
def m_to_mm(x) do
y = round(x * 1000)
if y < 1 do
y = 1
end
y
end
@doc """
Convert M to MM
"""
def m_to_um(x) do
y = round(x * 1000000)
if y < 1 do
y = 1
end
y
end
@doc """
Convert M to NM
"""
def m_to_nm(x) do
y = round(x * 1000000000)
if y < 1 do
y = 1
end
y
end
@doc """
Convert M to MI
"""
def m_to_mi(x) do
y = round(x * 0.000621371)
if y < 1 do
y = 1
end
y
end
@doc """
Convert M to Yard
"""
def m_to_y(x) do
y = round(x * 1.09361)
if y < 1 do
y = 1
end
y
end
@doc """
Convert M to Feet
"""
def m_to_ft(x) do
y = round(x * 3.28084)
if y < 1 do
y = 1
end
y
end
@doc """
Convert M to Inch
"""
def m_to_in(x) do
y = round(x * 39.37008)
if y < 1 do
y = 1
end
y
end
@doc """
Convert M to Inch
"""
def m_to_nmi(x) do
y = round(x * 0.00053995682073434)
if y < 1 do
y = 1
end
y
end
@doc """
Convert CM to KM
"""
def cm_to_km(x) do
y = round(x * 100000)
if y < 1 do
y = 1
end
y
end
@doc """
Convert CM to KM
"""
def cm_to_m(x) do
y = round(x * 0.01)
if y < 1 do
y = 1
end
y
end
@doc """
Convert CM to MM
"""
def cm_to_m(x) do
y = round(x * 10)
if y < 1 do
y = 1
end
y
end
@doc """
Convert CM to uM
"""
def cm_to_um(x) do
y = round(x * 10000)
if y < 1 do
y = 1
end
y
end
@doc """
Convert CM to nM
"""
def cm_to_nm(x) do
y = round(x * 10000000)
if y < 1 do
y = 1
end
y
end
@doc """
Convert CM to mi
"""
def cm_to_mi(x) do
y = round(x / 6.2137)
if y < 1 do
y = 1
end
y
end
@doc """
Convert CM to yard
"""
def cm_to_y(x) do
y = round(x * 0.0109361)
if y < 1 do
y = 1
end
y
end
@doc """
Convert CM to Foot
"""
def cm_to_ft(x) do
y = round(x * 0.0328084)
if y < 1 do
y = 1
end
y
end
@doc """
Convert CM to in
"""
def cm_to_in(x) do
y = round(x * 0.393701)
if y < 1 do
y = 1
end
y
end
@doc """
Convert CM to nm
"""
def cm_to_nm(x) do
y = round(x / 5.39957095032)
if y < 1 do
y = 1
end
y
end
@doc """
Convert mM to m
"""
def mm_to_m(x) do
y = round(x * 0.001)
if y < 1 do
y = 1
end
y
end
@doc """
Convert mM to km
"""
def mm_to_km(x) do
y = round(x / 1000000)
if y < 1 do
y = 1
end
y
end
@doc """
Convert mM to cm
"""
def mm_to_cm(x) do
y = round(x / 0.1)
if y < 1 do
y = 1
end
y
end
@doc """
Convert mM to um
"""
def mm_to_um(x) do
y = round(x * 1000)
if y < 1 do
y = 1
end
y
end
@doc """
Convert mM to um
"""
def mm_to_nm(x) do
y = round(x * 1000000)
if y < 1 do
y = 1
end
y
end
@doc """
Convert mM to mi
"""
def mm_to_mi(x) do
y = round(x * 6.2137)
if y < 1 do
y = 1
end
y
end
@doc """
Convert mM to y
"""
def mm_to_y(x) do
y = round(x * 0.00109361)
if y < 1 do
y = 1
end
y
end
@doc """
Convert mM to ft
"""
def mm_to_ft(x) do
y = round(x * 0.00328083)
if y < 1 do
y = 1
end
y
end
@doc """
Convert mM to in
"""
def mm_to_in(x) do
y = round(x * 0.03936996)
if y < 1 do
y = 1
end
y
end
@doc """
Convert mM to nm
"""
def mm_to_nm(x) do
y = round(x * 5.399)
if y < 1 do
y = 1
end
y
end
@doc """
Convert Mi to km
"""
def mi_to_km(x) do
y = round(x * 1.60934)
if y < 1 do
y = 1
end
y
end
@doc """
Convert Mi to m
"""
def mi_to_m(x) do
y = round(x * 1609.34)
if y < 1 do
y = 1
end
y
end
@doc """
Convert Mi to cm
"""
def mi_to_cm(x) do
y = round(x * 160934)
if y < 1 do
y = 1
end
y
end
@doc """
Convert Mi to mm
"""
def mi_to_mm(x) do
y = round(x * 1609340.0007802)
if y < 1 do
y = 1
end
y
end
@doc """
Convert Mi to um
"""
def mi_to_um(x) do
y = round(x * 1.609)
if y < 1 do
y = 1
end
y
end
@doc """
Convert Mi to nm
"""
def mi_to_nm(x) do
y = round(x * 1.609)
if y < 1 do
y = 1
end
y
end
@doc """
Convert Mi to y
"""
def mi_to_y(x) do
y = round(x * 1760)
if y < 1 do
y = 1
end
y
end
@doc """
Convert Mi to y
"""
def mi_to_y(x) do
y = round(x * 1760)
if y < 1 do
y = 1
end
y
end
@doc """
Convert Miles to Feet
"""
def mi_to_ft(x) do
y = round(x * 5280)
if y < 1 do
y = round(y)
end
y
end
@doc """
Convert Miles to in
"""
def mi_to_ft(x) do
y = round(x * 63360)
if y < 1 do
y = round(y)
end
y
end
@doc """
Convert Miles to nm
"""
def mi_to_ft(x) do
y = round(x * 0.868976)
if y < 1 do
y = round(y)
end
y
end
@doc """
Convert yard to km
"""
def y_to_km(x) do
y = round(x * 0.0009144)
if y < 1 do
y = round(y)
end
y
end
@doc """
Convert yard to m
"""
def y_to_m(x) do
y = round(x * 0.9144)
if y < 1 do
y = round(y)
end
y
end
@doc """
Convert yard to meter
"""
def y_to_m(x) do
y = round(x * 0.9144)
if y < 1 do
y = round(y)
end
y
end
@doc """
Convert yard to cm
"""
def y_to_cm(x) do
y = round(x * 91.44)
if y < 1 do
y = round(y)
end
y
end
@doc """
Convert yard to mm
"""
def y_to_mm(x) do
y = round(x * 914.4)
if y < 1 do
y = round(y)
end
y
end
@doc """
Convert yard to um
"""
def y_to_um(x) do
y = round(x * 914400)
if y < 1 do
y = round(y)
end
y
end
@doc """
Convert yard to nm
"""
def y_to_nm(x) do
y = round(x * 9.14444444444)
if y < 1 do
y = round(y)
end
y
end
@doc """
Convert yard to mi
"""
def y_to_mi(x) do
y = round(x * 0.000568182)
if y < 1 do
y = round(y)
end
y
end
@doc """
Convert yard to ft
"""
def y_to_ft(x) do
y = round(x * 3)
if y < 1 do
y = round(y)
end
y
end
@doc """
Convert yard to in
"""
def y_to_ft(x) do
y = round(x * 36)
if y < 1 do
y = round(y)
end
y
end
@doc """
Convert yard to nm
"""
def y_to_nm(x) do
y = round(x * 0.000493737)
if y < 1 do
y = round(y)
end
y
end
@doc """
Convert foot to km
"""
def ft_to_km(x) do
y = round(x * 0.0003048)
if y < 1 do
y = round(y)
end
y
end
@doc """
Convert foot to m
"""
def ft_to_m(x) do
y = round(x * 0.3048)
if y < 1 do
y = round(y)
end
y
end
@doc """
Convert foot to cm
"""
def ft_to_cm(x) do
y = round(x * 30.48)
if y < 1 do
y = round(y)
end
y
end
@doc """
Convert foot to mm
"""
def ft_to_mm(x) do
y = round(x * 304.8)
if y < 1 do
y = round(y)
end
y
end
@doc """
Convert foot to um
"""
def ft_to_um(x) do
y = round(x * 304800)
if y < 1 do
y = round(y)
end
y
end
@doc """
Convert foot to pm
"""
def ft_to_pm(x) do
y = round(x * 3.04888888888)
if y < 1 do
y = round(y)
end
y
end
@doc """
Convert Feet to Miles
"""
def ft_to_mi(x) do
y = round(x / 5280)
if y < 1 do
y = 1
end
y
end
@doc """
Convert Feet to yards
"""
def ft_to_y(x) do
y = round(x * 0.333333)
if y < 1 do
y = 1
end
y
end
@doc """
Convert Feet to inches
"""
def ft_to_y(x) do
y = round(x * 12)
if y < 1 do
y = 1
end
y
end
@doc """
Convert Feet to nm
"""
def ft_to_nm(x) do
y = round(x * 0.000164579)
if y < 1 do
y = 1
end
y
end
@doc """
Convert in to km
"""
def in_to_km(x) do
y = round(x * 2.5444444)
if y < 1 do
y = 1
end
y
end
@doc """
Convert in to m
"""
def in_to_m(x) do
y = round(x * 0.0254)
if y < 1 do
y = 1
end
y
end
@doc """
Convert in to cm
"""
def in_to_cm(x) do
y = round(x * 2.54)
if y < 1 do
y = 1
end
y
end
@doc """
Convert in to mm
"""
def in_to_mm(x) do
y = round(x * 25.4)
if y < 1 do
y = 1
end
y
end
@doc """
Convert in to um
"""
def in_to_um(x) do
y = round(x * 25400)
if y < 1 do
y = 1
end
y
end
@doc """
Convert in to pm
"""
def in_to_pm(x) do
y = round(x * 25400)
if y < 1 do
y = 1
end
y
end
@doc """
Convert in to mi
"""
def in_to_mi(x) do
y = x / 63360
end
@doc """
Convert in to yd
"""
def in_to_yd(x) do
y = x / 36
end
@doc """
Convert in to ft
"""
def in_to_ft(x) do
y = x / 12
end
@doc """
Convert in to nm
"""
def in_to_nm(x) do
y = x / 72913.4
end
@doc """
Convert nm to km
"""
def nm_to_km(x) do
y = x * 1.852
end
@doc """
Convert nm to m
"""
def nm_to_m(x) do
y = x * 1852
end
@doc """
Convert nm to cm
"""
def nm_to_cm(x) do
y = x * 185200
end
end
|
lib/expanse.length.ex
| 0.553505 | 0.591251 |
expanse.length.ex
|
starcoder
|
defmodule Pow.Ecto.Schema.Password.Pbkdf2 do
@moduledoc """
The Pbkdf2 hash generation code is pulled from
[Plug.Crypto.KeyGenerator](https://github.com/elixir-plug/plug_crypto/blob/v1.2.1/lib/plug/crypto/key_generator.ex)
and is under Apache 2 license.
"""
use Bitwise
@doc """
Compares the two binaries in constant-time to avoid timing attacks.
"""
@spec compare(binary(), binary()) :: boolean()
def compare(left, right) when is_binary(left) and is_binary(right) do
byte_size(left) == byte_size(right) and compare(left, right, 0)
end
defp compare(<<x, left::binary>>, <<y, right::binary>>, acc) do
xorred = bxor(x, y)
compare(left, right, acc ||| xorred)
end
defp compare(<<>>, <<>>, acc) do
acc === 0
end
@max_length bsl(1, 32) - 1
@doc """
Returns a derived key suitable for use.
"""
@spec generate(binary(), binary(), integer(), integer(), atom()) :: binary()
def generate(secret, salt, iterations, length, digest) do
cond do
not is_integer(iterations) or iterations < 1 ->
raise ArgumentError, "iterations must be an integer >= 1"
length > @max_length ->
raise ArgumentError, "length must be less than or equal to #{@max_length}"
true ->
generate(mac_fun(digest, secret), salt, iterations, length, 1, [], 0)
end
rescue
e ->
stacktrace =
case __STACKTRACE__ do
[{mod, fun, [_ | _] = args, info} | rest] ->
[{mod, fun, length(args), info} | rest]
stacktrace ->
stacktrace
end
reraise e, stacktrace
end
defp generate(_fun, _salt, _iterations, max_length, _block_index, acc, length)
when length >= max_length do
acc
|> IO.iodata_to_binary()
|> binary_part(0, max_length)
end
defp generate(fun, salt, iterations, max_length, block_index, acc, length) do
initial = fun.(<<salt::binary, block_index::integer-size(32)>>)
block = iterate(fun, iterations - 1, initial, initial)
length = byte_size(block) + length
generate(
fun,
salt,
iterations,
max_length,
block_index + 1,
[acc | block],
length
)
end
defp iterate(_fun, 0, _prev, acc), do: acc
defp iterate(fun, iteration, prev, acc) do
next = fun.(prev)
iterate(fun, iteration - 1, next, :crypto.exor(next, acc))
end
# TODO: Remove when OTP 22.1 is required
if Code.ensure_loaded?(:crypto) and function_exported?(:crypto, :mac, 4) do
defp mac_fun(digest, secret), do: &:crypto.mac(:hmac, digest, secret, &1)
else
defp mac_fun(digest, secret), do: &:crypto.hmac(digest, secret, &1)
end
end
|
lib/pow/ecto/schema/password/pbkdf2.ex
| 0.861013 | 0.50293 |
pbkdf2.ex
|
starcoder
|
defmodule Tensorflow.FunctionSpec.ExperimentalCompile do
@moduledoc false
use Protobuf, enum: true, syntax: :proto3
@type t :: integer | :DEFAULT | :ON | :OFF
field(:DEFAULT, 0)
field(:ON, 1)
field(:OFF, 2)
end
defmodule Tensorflow.SavedObjectGraph.ConcreteFunctionsEntry do
@moduledoc false
use Protobuf, map: true, syntax: :proto3
@type t :: %__MODULE__{
key: String.t(),
value: Tensorflow.SavedConcreteFunction.t() | nil
}
defstruct [:key, :value]
field(:key, 1, type: :string)
field(:value, 2, type: Tensorflow.SavedConcreteFunction)
end
defmodule Tensorflow.SavedObjectGraph do
@moduledoc false
use Protobuf, syntax: :proto3
@type t :: %__MODULE__{
nodes: [Tensorflow.SavedObject.t()],
concrete_functions: %{
String.t() => Tensorflow.SavedConcreteFunction.t() | nil
}
}
defstruct [:nodes, :concrete_functions]
field(:nodes, 1, repeated: true, type: Tensorflow.SavedObject)
field(:concrete_functions, 2,
repeated: true,
type: Tensorflow.SavedObjectGraph.ConcreteFunctionsEntry,
map: true
)
end
defmodule Tensorflow.SavedObject.SaveableObjectsEntry do
@moduledoc false
use Protobuf, map: true, syntax: :proto3
@type t :: %__MODULE__{
key: String.t(),
value: Tensorflow.SaveableObject.t() | nil
}
defstruct [:key, :value]
field(:key, 1, type: :string)
field(:value, 2, type: Tensorflow.SaveableObject)
end
defmodule Tensorflow.SavedObject do
@moduledoc false
use Protobuf, syntax: :proto3
@type t :: %__MODULE__{
kind: {atom, any},
children: [
Tensorflow.TrackableObjectGraph.TrackableObject.ObjectReference.t()
],
slot_variables: [
Tensorflow.TrackableObjectGraph.TrackableObject.SlotVariableReference.t()
],
saveable_objects: %{
String.t() => Tensorflow.SaveableObject.t() | nil
}
}
defstruct [:kind, :children, :slot_variables, :saveable_objects]
oneof(:kind, 0)
field(:children, 1,
repeated: true,
type: Tensorflow.TrackableObjectGraph.TrackableObject.ObjectReference
)
field(:slot_variables, 3,
repeated: true,
type:
Tensorflow.TrackableObjectGraph.TrackableObject.SlotVariableReference
)
field(:user_object, 4, type: Tensorflow.SavedUserObject, oneof: 0)
field(:asset, 5, type: Tensorflow.SavedAsset, oneof: 0)
field(:function, 6, type: Tensorflow.SavedFunction, oneof: 0)
field(:variable, 7, type: Tensorflow.SavedVariable, oneof: 0)
field(:bare_concrete_function, 8,
type: Tensorflow.SavedBareConcreteFunction,
oneof: 0
)
field(:constant, 9, type: Tensorflow.SavedConstant, oneof: 0)
field(:resource, 10, type: Tensorflow.SavedResource, oneof: 0)
field(:saveable_objects, 11,
repeated: true,
type: Tensorflow.SavedObject.SaveableObjectsEntry,
map: true
)
end
defmodule Tensorflow.SavedUserObject do
@moduledoc false
use Protobuf, syntax: :proto3
@type t :: %__MODULE__{
identifier: String.t(),
version: Tensorflow.VersionDef.t() | nil,
metadata: String.t()
}
defstruct [:identifier, :version, :metadata]
field(:identifier, 1, type: :string)
field(:version, 2, type: Tensorflow.VersionDef)
field(:metadata, 3, type: :string)
end
defmodule Tensorflow.SavedAsset do
@moduledoc false
use Protobuf, syntax: :proto3
@type t :: %__MODULE__{
asset_file_def_index: integer
}
defstruct [:asset_file_def_index]
field(:asset_file_def_index, 1, type: :int32)
end
defmodule Tensorflow.SavedFunction do
@moduledoc false
use Protobuf, syntax: :proto3
@type t :: %__MODULE__{
concrete_functions: [String.t()],
function_spec: Tensorflow.FunctionSpec.t() | nil
}
defstruct [:concrete_functions, :function_spec]
field(:concrete_functions, 1, repeated: true, type: :string)
field(:function_spec, 2, type: Tensorflow.FunctionSpec)
end
defmodule Tensorflow.SavedConcreteFunction do
@moduledoc false
use Protobuf, syntax: :proto3
@type t :: %__MODULE__{
bound_inputs: [integer],
canonicalized_input_signature: Tensorflow.StructuredValue.t() | nil,
output_signature: Tensorflow.StructuredValue.t() | nil
}
defstruct [:bound_inputs, :canonicalized_input_signature, :output_signature]
field(:bound_inputs, 2, repeated: true, type: :int32)
field(:canonicalized_input_signature, 3, type: Tensorflow.StructuredValue)
field(:output_signature, 4, type: Tensorflow.StructuredValue)
end
defmodule Tensorflow.SavedBareConcreteFunction do
@moduledoc false
use Protobuf, syntax: :proto3
@type t :: %__MODULE__{
concrete_function_name: String.t(),
argument_keywords: [String.t()],
allowed_positional_arguments: integer
}
defstruct [
:concrete_function_name,
:argument_keywords,
:allowed_positional_arguments
]
field(:concrete_function_name, 1, type: :string)
field(:argument_keywords, 2, repeated: true, type: :string)
field(:allowed_positional_arguments, 3, type: :int64)
end
defmodule Tensorflow.SavedConstant do
@moduledoc false
use Protobuf, syntax: :proto3
@type t :: %__MODULE__{
operation: String.t()
}
defstruct [:operation]
field(:operation, 1, type: :string)
end
defmodule Tensorflow.SavedVariable do
@moduledoc false
use Protobuf, syntax: :proto3
@type t :: %__MODULE__{
dtype: Tensorflow.DataType.t(),
shape: Tensorflow.TensorShapeProto.t() | nil,
trainable: boolean,
synchronization: Tensorflow.VariableSynchronization.t(),
aggregation: Tensorflow.VariableAggregation.t(),
name: String.t(),
device: String.t(),
experimental_distributed_variable_components: [
Tensorflow.SavedVariable.t()
]
}
defstruct [
:dtype,
:shape,
:trainable,
:synchronization,
:aggregation,
:name,
:device,
:experimental_distributed_variable_components
]
field(:dtype, 1, type: Tensorflow.DataType, enum: true)
field(:shape, 2, type: Tensorflow.TensorShapeProto)
field(:trainable, 3, type: :bool)
field(:synchronization, 4,
type: Tensorflow.VariableSynchronization,
enum: true
)
field(:aggregation, 5, type: Tensorflow.VariableAggregation, enum: true)
field(:name, 6, type: :string)
field(:device, 7, type: :string)
field(:experimental_distributed_variable_components, 8,
repeated: true,
type: Tensorflow.SavedVariable
)
end
defmodule Tensorflow.FunctionSpec do
@moduledoc false
use Protobuf, syntax: :proto3
@type t :: %__MODULE__{
fullargspec: Tensorflow.StructuredValue.t() | nil,
is_method: boolean,
input_signature: Tensorflow.StructuredValue.t() | nil,
experimental_compile:
Tensorflow.FunctionSpec.ExperimentalCompile.t()
}
defstruct [
:fullargspec,
:is_method,
:input_signature,
:experimental_compile
]
field(:fullargspec, 1, type: Tensorflow.StructuredValue)
field(:is_method, 2, type: :bool)
field(:input_signature, 5, type: Tensorflow.StructuredValue)
field(:experimental_compile, 6,
type: Tensorflow.FunctionSpec.ExperimentalCompile,
enum: true
)
end
defmodule Tensorflow.SavedResource do
@moduledoc false
use Protobuf, syntax: :proto3
@type t :: %__MODULE__{
device: String.t()
}
defstruct [:device]
field(:device, 1, type: :string)
end
defmodule Tensorflow.SaveableObject do
@moduledoc false
use Protobuf, syntax: :proto3
@type t :: %__MODULE__{
save_function: integer,
restore_function: integer
}
defstruct [:save_function, :restore_function]
field(:save_function, 2, type: :int32)
field(:restore_function, 3, type: :int32)
end
|
lib/tensorflow/core/protobuf/saved_object_graph.pb.ex
| 0.848219 | 0.585486 |
saved_object_graph.pb.ex
|
starcoder
|
defmodule Day22 do
def part1(input) do
parse(input)
|> Enum.reject(fn {_, {xr, yr, zr}} ->
Enum.any?([xr, yr, zr], fn r ->
r.first < -50 || r.last > 50
end)
end)
|> solve
end
def part2(input) do
solve(parse(input))
end
def solve(operations) do
cuboids = []
operations
|> Enum.reduce(cuboids, &execute/2)
|> Enum.reduce(0, fn {xr, yr, zr}, count ->
count + Range.size(xr) * Range.size(yr) * Range.size(zr)
end)
end
defp execute({action, new_cuboid}, cuboids) do
cuboids =
Enum.flat_map(cuboids, fn old_cuboid ->
if overlap?(old_cuboid, new_cuboid) do
split_cuboid(old_cuboid, new_cuboid)
|> Enum.reject(fn cuboid ->
overlap?(cuboid, new_cuboid)
end)
else
[old_cuboid]
end
end)
case action do
:on ->
[new_cuboid | cuboids]
:off ->
cuboids
end
end
defp split_cuboid(cuboid, reference) do
split_cuboids([cuboid], reference, 0)
end
defp split_cuboids(cuboids, _reference, 3), do: cuboids
defp split_cuboids(cuboids, reference, axis) do
new_cuboids = Enum.flat_map(cuboids, &split_one(&1, reference, axis))
split_cuboids(new_cuboids, reference, axis + 1)
end
defp split_one(cuboid, reference, axis) do
cr = elem(cuboid, axis)
rr = elem(reference, axis)
[cr.first..rr.first-1//1,
max(cr.first, rr.first)..min(cr.last, rr.last)//1,
min(cr.last, rr.last)+1..cr.last//1]
|> Enum.reject(&Range.size(&1) === 0)
|> Enum.map(fn r ->
put_elem(cuboid, axis, r)
end)
end
defp overlap?({xr1, yr1, zr1}, {xr2, yr2, zr2}) do
not (Range.disjoint?(xr1, xr2) or
Range.disjoint?(yr1, yr2) or
Range.disjoint?(zr1, zr2))
end
defp parse(input) do
Enum.map(input, fn line ->
{on_off, rest} =
case line do
"on " <> rest ->
{:on, rest}
"off " <> rest ->
{:off, rest}
end
{on_off,
String.split(rest, ",")
|> Enum.map(fn <<_, "=", rest::binary>> ->
[from, to] =
String.split(rest, "..")
|> Enum.map(&String.to_integer/1)
from..to
end)
|> List.to_tuple}
end)
end
end
|
day22/lib/day22.ex
| 0.50708 | 0.553867 |
day22.ex
|
starcoder
|
defmodule Thundermoon.CounterRoot do
@moduledoc """
This module provides functions to read and change the counter.
It takes care that only one operation is executed and that
after a change a read operation will reflect this changes:
```
assert %{digit_1: 0, digit_10: 0, digit_100: 0} = Counter.get_digits()
Counter.inc(10)
assert %{digit_1: 0, digit_10: 1, digit_100: 0} = Counter.get_digits()
```
"""
# GenServer is temporary as it will be restarted by the counter realm
use GenServer, restart: :temporary
alias Thundermoon.Digit
alias Thundermoon.DigitSupervisor
def start_link(opts) do
GenServer.start_link(__MODULE__, :ok, opts)
end
def init(:ok) do
{:ok, create()}
end
def handle_call(:get_digits, _from, state) do
{:reply, digits_to_map(state), state}
end
# need to invoke call (not cast), otherwise the counter has not enough time
# to terminate and be properly restarted
def handle_call(:reset, _from, state) do
{:stop, :normal, nil, state}
end
def handle_cast({:inc, digit}, state) do
execute_action(state, digit, &Digit.inc(&1))
{:noreply, state}
end
def handle_cast({:dec, digit}, state) do
execute_action(state, digit, &Digit.dec(&1))
{:noreply, state}
end
def terminate(_reason, state) do
Enum.each(state, fn {_key, digit} ->
DynamicSupervisor.terminate_child(DigitSupervisor, digit.pid)
end)
end
def handle_info({:DOWN, ref, :process, _pid, _reason}, state) do
{crashed_digit, _pid_ref} = find_digit(state, ref)
new_state = create_digit(state, crashed_digit, 0)
{:noreply, new_state}
end
def handle_info({:overflow, [digit, :inc]}, state) do
case bigger_digit(digit) do
{:error, "counter overflow"} ->
{:stop, :normal, state}
sibling ->
get_digit_pid(state, sibling) |> Digit.inc()
{:noreply, state}
end
end
def handle_info({:overflow, [digit, :dec]}, state) do
case bigger_digit(digit) do
{:error, "counter overflow"} ->
{:stop, :normal, state}
sibling ->
get_digit_pid(state, sibling) |> Digit.dec()
{:noreply, state}
end
end
def handle_info(_msg, state) do
{:noreply, state}
end
defp execute_action(state, number, func) do
case get_digit(state, number) do
{:ok, digit} -> func.(digit.pid)
:error -> nil
end
end
defp bigger_digit(crashed_digit) do
case crashed_digit do
:digit_1 -> :digit_10
:digit_10 -> :digit_100
:digit_100 -> {:error, "counter overflow"}
end
end
defp get_digit_pid(state, digit) do
state
|> Map.get(digit)
|> Map.get(:pid)
end
defp find_digit(state, ref) do
Enum.find(state, fn digit ->
digit
|> elem(1)
|> Map.get(:ref) == ref
end)
end
defp get_digit(state, number) do
key = String.to_atom("digit_#{number}")
Map.fetch(state, key)
end
defp digits_to_map(state) do
%{
digit_1: Digit.get(state.digit_1.pid),
digit_10: Digit.get(state.digit_10.pid),
digit_100: Digit.get(state.digit_100.pid)
}
end
defp create() do
%{}
|> create_digit(:digit_1, 0)
|> create_digit(:digit_10, 0)
|> create_digit(:digit_100, 0)
end
defp create_digit(state, key, value) do
child = %{id: Digit, start: {Digit, :start, [self(), key, value]}}
{:ok, pid} = DynamicSupervisor.start_child(DigitSupervisor, child)
ref = Process.monitor(pid)
Map.put(state, key, %{pid: pid, ref: ref})
end
end
|
apps/thundermoon/lib/thundermoon/counter/counter_root.ex
| 0.708818 | 0.917635 |
counter_root.ex
|
starcoder
|
defmodule AdventOfCode do
@moduledoc """
Helper module for dealing with text input from the AOC puzzles.
Originally created for the 2021 competition.
To use from LiveBook:
IEx.Helpers.c("lib/advent_of_code.ex")
alias AdventOfCode, as: AOC
"""
alias Kino
# Grid-based helpers
def as_grid(multiline_text) do
[line0 | _] = lines = as_single_lines(multiline_text)
grid_width = String.length(line0)
grid_height = Enum.count(lines)
lines
|> Enum.join("")
|> String.split("", trim: true)
|> Enum.with_index()
|> Map.new(fn {character, index} -> {index, character} end)
|> Map.merge(%{
grid_width: grid_width,
grid_height: grid_height,
last_cell: grid_height * grid_width - 1
})
end
def as_grid_of_digits(multiline_text) do
as_grid(multiline_text)
|> Enum.reduce(%{}, fn {key, value}, acc ->
Map.put(acc, key, is_integer(value) && value || String.to_integer(value))
end)
end
# We only want 4 neighbors, not 8
def neighbors4(grid, index) do
[
index - grid.grid_width,
index - 1,
index + 1,
index + grid.grid_width,
]
|> Enum.filter(fn neighbor -> grid[neighbor] end) #off-board
|> Enum.filter(fn neighbor ->
# must be on the same row or column to ensure we don't go side-to-side
div(neighbor, grid.grid_width) == div(index, grid.grid_width) ||
rem(neighbor, grid.grid_width) == rem(index, grid.grid_width)
end)
end
# We only want all 8 neighbors
def neighbors8(grid, index) do
x = rem(index, grid.grid_width)
# only worry about going off the sides - the top and bottom
# excursions will be off-board and removed when they return nil.
positions =
[index - grid.grid_width, index + grid.grid_width] ++
if x > 0 do
[index - grid.grid_width - 1, index - 1, index + grid.grid_width - 1]
else
[]
end ++
if x == (grid.grid_width - 1) do
[]
else
[index - grid.grid_width + 1, index + 1, index + grid.grid_width + 1]
end
positions
|> Enum.filter(fn neighbor -> grid[neighbor] end) #off-board
end
@ascii_zero 48
@max_display 40
def display_grid(grid, text) do
IO.puts("--- #{text}")
(0..grid.last_cell)
|> Enum.chunk_every(grid.grid_width)
|> Enum.map(fn indexes ->
indexes
|> Enum.map(fn index ->
(grid[index] >= @max_display) && "." || (@ascii_zero + grid[index])
end)
|> IO.puts()
end)
grid
end
# Paragraph-based helpers
def as_single_lines(multiline_text) do
multiline_text
|> String.split("\n", trim: true)
end
def as_doublespaced_paragraphs(multiline_text) do
multiline_text
|> String.split("\n\n")
end
def as_paragraph_lines(paragraph) do
as_single_lines(paragraph)
end
# Line-based helpers
def as_comma_separated_integers(text) do
text
|> String.trim()
|> String.split(",", trim: true)
|> Enum.map(fn digits ->
digits
|> String.trim()
|> String.to_integer()
end)
end
def delimited_by_spaces(text) do
text
|> String.split(~r/\s+/)
end
def delimited_by_colons(text) do
text
|> String.split(~r/\:/)
end
# -- startup and kino-related functions
end
|
lib/advent_of_code.ex
| 0.689724 | 0.441793 |
advent_of_code.ex
|
starcoder
|
defmodule RTypes do
@moduledoc """
RTypes is an Elixir library which helps automatically create a validation function for
a given user type. The function can be used to check the shape of the data after
de-serialisation or in unit-tests.
Let's suppose we have a type
```elixir
@type t :: 0..255
```
and we have a value `x`. To ensure that our value corresponds to the type `t` we
can use the function
```elixir
def t?(x) when is_integer(x) and x >= 0 and x <= 255, do: true
def t?(_), do: false
```
Now, if we have a compound type
```elixir
@type list_of_ts :: [t]
```
and a value `xs`, we can use `is_list/1` guard on `xs` and then ensure that all
elements of the list conform to `t`. And if we have a more complex structure
```elixir
@type state(a, b) :: %{key1: {a, b}, key2: list_of_ts()}
```
and a value `s`, we can check that `s` is a map which has keys `key1` and
`key2`, apply the logic above for the value of `key2` and for any concrete types
`a` and `b` we can check that he value of `key1` is a tuple of length 2 and its
elements conform to `a` and `b` respectively. So we just recursively apply those
checks.
## Usage
The library defines `make_validator/1` and `make_predicate/1` macros, and
`make_validator/3` and `make_predicate/3` functions which can be used at run
time. The difference between the two is that a `validator` returns `:ok` or
`{:error, reason}` where `reason` explains what went wrong, while a
`predicate` returns only `true` or `false` and is somewhat faster.
```elixir
iex> require RTypes
iex> port_number? = RTypes.make_predicate(:inet.port_number())
iex> port_number?.(8080)
true
iex> port_number?.(80000)
false
iex> validate_is_kwlist = RTypes.make_validator(Keyword, :t, [{:type, 0, :pos_integer, []}])
iex> validate_is_kwlist.(key1: 4, key2: 5)
:ok
iex> match?({:error, _reason}, validate_is_kwlist.([1, 2, 3]))
true
```
"""
import RTypes.Internal, only: [decompose_and_expand: 2, expand_type_args: 1]
@typedoc "`t:error_decription/1` is a keyword list which details the validation error."
@type error_description :: [
{:message, String.t()}
| {:term, term()}
| {:ctx, [term()]}
| {:types, [RType.Extractor.unfolded_type()]}
]
@spec format_error_description(error_description()) :: String.t()
def format_error_description(desc) do
"fail to validate term #{inspect(desc[:term])}, reason #{desc[:message]}" <>
case desc[:types] do
nil -> ""
types -> ", types #{inspect(types)}"
end <>
case desc[:ctx] do
nil -> ""
ctx -> ", in context #{inspect(ctx)}"
end
end
@doc """
Derive a validation function for the given type expression.
## Usage
```
iex> require RTypes
iex> validate_port_number = RTypes.make_validator(:inet.port_number())
iex> validate_port_number.(8080)
:ok
iex> match?({:error, _}, validate_port_number.(70000))
true
iex> validate_kw_list = RTypes.make_validator(Keyword.t(pos_integer()))
iex> validate_kw_list.([a: 1, b: 2])
:ok
```
Note that the macro expects its argument provided as in
```
MyModule.my_type(arg1, arg2)
```
The returned function either returns `:ok` or `{:error, reason}` where
`reason` details what went wrong.
"""
defmacro make_validator(code) do
type_expr = decompose_and_expand(code, __CALLER__)
typ =
case type_expr do
{mod, type_name, args} ->
RTypes.Extractor.extract_type(mod, type_name, expand_type_args(args))
{type_name, args} ->
{:type, 0, type_name, expand_type_args(args)}
end
quote bind_quoted: [typ: Macro.escape(typ)] do
fn term ->
RTypes.Checker.check(term, typ)
end
end
end
@doc """
Derive a validation function given a module name, type name, and type parameters.
Type parameters must be of some concrete type.
## Example
```
iex> validate_kw_list = RTypes.make_validator(Keyword, :t, [{:type, 0, :pos_integer, []}])
iex> validate_kw_list.(key1: 4, key2: 5)
:ok
```
The function returns either `:ok` or `{:error, error_description}` where
`error_description` details what went wrong.
"""
@spec make_validator(module(), atom(), [RTypes.Extractor.type()]) ::
(term -> :ok | {:error, error_description()})
def make_validator(mod, type_name, type_args) do
typ = RTypes.Extractor.extract_type(mod, type_name, type_args)
fn term ->
RTypes.Checker.check(term, typ)
end
end
@doc """
Derive a predicate for the given type expression.
```
iex> require RTypes
iex> non_neg_integer? = RTypes.make_predicate(non_neg_integer())
iex> non_neg_integer?.(10)
true
iex> non_neg_integer?.(0)
true
iex> non_neg_integer?.(-3)
false
iex> non_neg_integer?.(:ok)
false
```
"""
defmacro make_predicate(code) do
type_expr = decompose_and_expand(code, __CALLER__)
typ =
case type_expr do
{mod, type_name, args} ->
RTypes.Extractor.extract_type(mod, type_name, expand_type_args(args))
{type_name, args} ->
{:type, 0, type_name, expand_type_args(args)}
end
quote bind_quoted: [typ: Macro.escape(typ)] do
RTypes.Lambda.build(typ)
end
end
@doc """
Return a predicate given a module name, type name, and type parameters.
The predicate behaves the same way as the one produced by `make_predicate/1` macro.
"""
@spec make_predicate(module(), atom(), [RTypes.Extractor.type()]) :: (any() -> boolean())
def make_predicate(mod, type_name, type_args) do
typ = RTypes.Extractor.extract_type(mod, type_name, type_args)
RTypes.Lambda.build(typ)
end
@deprecated "use make_validator/1 instead"
defmacro derive!(code) do
type_expr = decompose_and_expand(code, __CALLER__)
typ =
case type_expr do
{mod, type_name, args} ->
RTypes.Extractor.extract_type(mod, type_name, expand_type_args(args))
{type_name, args} ->
{:type, 0, type_name, expand_type_args(args)}
end
quote bind_quoted: [typ: Macro.escape(typ)] do
fn term ->
case RTypes.Checker.check(term, typ) do
:ok -> true
{:error, reason} -> raise RTypes.format_error_description(reason)
end
end
end
end
@deprecated "use make_validator/3 instead"
def derive!(mod, type_name, type_args) do
typ = RTypes.Extractor.extract_type(mod, type_name, type_args)
fn term ->
case RTypes.Checker.check(term, typ) do
:ok -> true
{:error, reason} -> raise RTypes.format_error_description(reason)
end
end
end
@deprecated "use make_predicate/1 instead"
defmacro derive(code) do
type_expr = decompose_and_expand(code, __CALLER__)
typ =
case type_expr do
{mod, type_name, args} ->
RTypes.Extractor.extract_type(mod, type_name, expand_type_args(args))
{type_name, args} ->
{:type, 0, type_name, expand_type_args(args)}
end
quote bind_quoted: [typ: Macro.escape(typ)] do
RTypes.Lambda.build(typ)
end
end
@deprecated "use make_predicate/3 instead"
def derive(mod, type_name, type_args) do
typ = RTypes.Extractor.extract_type(mod, type_name, type_args)
RTypes.Lambda.build(typ)
end
end
|
lib/rtypes.ex
| 0.896585 | 0.91804 |
rtypes.ex
|
starcoder
|
defmodule SystemRegistry do
@moduledoc """
A transactional nested term storage and dispatch system.
`SystemRegistry` takes a different approach to a typical publish-subscribe pattern by
focusing on data instead of events. It is local
(as opposed to distributed) and transactional (as opposed to asynchronous)
to eliminate race conditions. It also supports eventual consistency with
rate-limiting consumers that control how often they receive state updates.
Data in `SystemRegistry` is stored as a tree of nodes, represented by a
nested map. In order to perform operations on the registry data, you specify
the scope of the operation as a list of keys to walk to the desired tree node.
"""
@type scope :: [term]
alias SystemRegistry.{Transaction, Registration, Processor}
alias SystemRegistry.Storage.State, as: S
import SystemRegistry.Utils
@doc """
Returns a transaction struct to pass to `update/3` and `delete/2` to chain
modifications to to group. Prevents notifying registrants for each action.
## Example
iex> SystemRegistry.transaction |> SystemRegistry.update([:a], 1) |> SystemRegistry.commit
{:ok, {%{a: 1}, %{}}}
"""
@spec transaction(opts :: Keyword.t()) :: Transaction.t()
def transaction(opts \\ []) do
Transaction.begin(opts)
end
@doc """
Commit a transaction. Attempts to apply all changes. If successful, will notify all.
"""
@spec commit(Transaction.t()) :: {:ok, {new :: map, old :: map}} | {:error, term}
def commit(transaction) do
t =
Transaction.prepare(transaction)
|> Processor.Server.apply()
with {:ok, {new, _old} = delta} <- Transaction.commit(t),
:ok <- Registration.notify(t.pid, new) do
{:ok, delta}
else
error ->
error
end
end
@doc """
Execute a transaction to insert or modify data.
## Examples
`update/3` can be called on its own:
iex> SystemRegistry.update([:a], 1)
{:ok, {%{a: 1}, %{}}}
or it can be included as part of a transaction pipeline:
iex> SystemRegistry.transaction |> SystemRegistry.update([:a], 1) |> SystemRegistry.commit
{:ok, {%{a: 1}, %{}}}
Passing a map to update will recursively expand into a transaction
for example this:
iex> SystemRegistry.update([:a], %{b: 1})
{:ok, {%{a: %{b: 1}}, %{}}}
is equivalent to this:
iex> SystemRegistry.update([:a, :b], 1)
{:ok, {%{a: %{b: 1}}, %{}}}
"""
@spec update(one, scope, value :: any) :: Transaction.t() when one: Transaction.t()
def update(_, _, _ \\ nil)
def update(%Transaction{} = t, scope, value) when not is_nil(scope) do
Transaction.update(t, scope, value)
end
@spec update(one, value :: any, opts :: nil | Keyword.t()) ::
{:ok, {new :: map, old :: map}} | {:error, term}
when one: scope
def update(scope, value, opts) do
opts = opts || []
transaction(opts)
|> update(scope, value)
|> commit()
end
@doc """
Execute a transaction to modify data in place by passing a modifier function.
Allows for the manipulation of the value at the scope. Useful for when the
value needs to be modified in place.
## Example
iex> SystemRegistry.update([:a], [1])
{:ok, {%{a: [1]}, %{}}}
iex> SystemRegistry.update_in([:a], fn(value) -> [2 | value] end)
{:ok, {%{a: [2, 1]}, %{a: [1]}}}
"""
@spec update_in(scope, (term -> term), opts :: keyword()) ::
{:ok, {new :: map, old :: map}} | {:error, term}
def update_in(scope, fun, opts \\ []) do
t = Transaction.begin(opts)
value =
Registry.lookup(S, t.key)
|> strip()
|> get_in(scope)
value = fun.(value)
Transaction.update(t, scope, value)
|> commit()
end
@doc """
Move a node from one scope to another.
## Examples
`move/3` can be called on its own:
iex> SystemRegistry.update([:a], 1)
{:ok, {%{a: 1}, %{}}}
iex> SystemRegistry.move([:a], [:b])
{:ok, {%{b: 1}, %{a: 1}}}
or it can be included as part of a transaction pipeline:
iex> SystemRegistry.update([:a], 1)
{:ok, {%{a: 1}, %{}}}
iex> SystemRegistry.transaction |> SystemRegistry.move([:a], [:b]) |> SystemRegistry.commit
{:ok, {%{b: 1}, %{a: 1}}}
"""
@spec move(transaction, scope, scope) :: Transaction.t() when transaction: Transaction.t()
def move(_, _, _ \\ nil)
def move(%Transaction{} = t, old_scope, new_scope) when not is_nil(new_scope) do
Transaction.move(t, old_scope, new_scope)
end
@spec move(scope_arg, scope, opts :: nil | Keyword.t()) ::
{:ok, {new :: map, old :: map}} | {:error, term}
when scope_arg: scope
def move(old_scope, new_scope, opts) do
opts = opts || []
transaction(opts)
|> move(old_scope, new_scope)
|> commit()
end
@doc """
Execute a transaction to delete keys and their values.
## Examples
`delete/2` can be called on its own:
iex> SystemRegistry.update([:a], 1)
{:ok, {%{a: 1}, %{}}}
iex> SystemRegistry.delete([:a])
{:ok, {%{}, %{a: 1}}}
or it can be included as part of a transaction pipeline:
iex> SystemRegistry.update([:a], 1)
{:ok, {%{a: 1}, %{}}}
iex> SystemRegistry.transaction |> SystemRegistry.delete([:a]) |> SystemRegistry.commit
{:ok, {%{}, %{a: 1}}}
If you pass an internal node to `delete/2`, it will delete all the keys the process
owns under it:
iex> SystemRegistry.update([:a, :b], 1)
{:ok, {%{a: %{b: 1}}, %{}}}
iex> SystemRegistry.delete([:a])
{:ok, {%{}, %{a: %{b: 1}}}}
"""
@spec delete(transaction, scope) :: Transaction.t() when transaction: Transaction.t()
def delete(_, _ \\ nil)
def delete(%Transaction{} = t, scope) when not is_nil(scope) do
Transaction.delete(t, scope)
end
@spec delete(scope, Keyword.t() | nil) :: {:ok, {new :: map, old :: map}} | {:error, term}
def delete(scope, opts) do
opts = opts || []
transaction(opts)
|> delete(scope)
|> commit()
end
@doc """
Delete all keys owned by the calling process.
## Example
iex> SystemRegistry.update([:a, :b], 1)
{:ok, {%{a: %{b: 1}}, %{}}}
iex> SystemRegistry.delete_all()
{:ok, {%{}, %{a: %{b: 1}}}}
"""
@spec delete_all(pid | nil) :: {:ok, {new :: map, old :: map}} | {:error, term}
def delete_all(pid \\ nil) do
pid = pid || self()
Transaction.begin()
|> Transaction.delete_all(pid, pid)
|> commit()
end
@doc """
Query the `SystemRegistry` using a match spec.
## Examples
iex> SystemRegistry.update([:a, :b], 1)
{:ok, {%{a: %{b: 1}}, %{}}}
iex> SystemRegistry.match(self(), :_)
%{a: %{b: 1}}
iex> SystemRegistry.match(self(), %{a: %{}})
%{a: %{b: 1}}
iex> SystemRegistry.match(self(), %{a: %{b: 2}})
%{}
"""
@spec match(key :: term, match_spec :: term) :: map
def match(key \\ :global, match_spec) do
value = Registry.match(S, key, match_spec) |> strip()
case value do
[] -> %{}
value -> value
end
end
@doc """
Register process to receive notifications.
Registrants are rate-limited and require that you pass an interval.
Upon registration, the caller will receive the current state.
## Options
* `:hysteresis` - The amount of time to wait before delivering the first
change message. Default: `0`
* `:min_interval` - The minimum amount of time to wait after hysteresis,
but before the next message is to be delivered. Default: `0`
With both options defaulting to `0`, you will receive every message.
## Examples
iex> mailbox = fn ->
...> receive do
...> msg -> msg
...> after
...> 5 -> nil
...> end
...> end
iex> SystemRegistry.register()
:ok
iex> mailbox.()
{:system_registry, :global, %{}}
iex> SystemRegistry.update([:state, :a], 1)
{:ok, {%{state: %{a: 1}}, %{}}}
iex> :timer.sleep(50)
:ok
iex> mailbox.()
{:system_registry, :global, %{state: %{a: 1}}}
iex> SystemRegistry.unregister()
:ok
iex> mailbox.()
nil
iex> SystemRegistry.delete_all()
{:ok, {%{}, %{state: %{a: 1}}}}
iex> SystemRegistry.register(hysteresis: 10, min_interval: 50)
:ok
iex> mailbox.()
iex> SystemRegistry.update([:state, :a], 2)
{:ok, {%{state: %{a: 2}}, %{}}}
iex> :timer.sleep(1)
iex> mailbox.()
nil
iex> :timer.sleep(15)
iex> mailbox.()
{:system_registry, :global, %{state: %{a: 2}}}
iex> SystemRegistry.update([:state, :a], 3)
{:ok, {%{state: %{a: 3}}, %{state: %{a: 2}}}}
iex> mailbox.()
nil
iex> :timer.sleep(50)
:ok
iex> mailbox.()
{:system_registry, :global, %{state: %{a: 3}}}
"""
@spec register(opts :: keyword) :: :ok | {:error, term}
def register(opts \\ []) do
key = opts[:key] || :global
case Registration.registered?(key) do
true -> {:error, :already_registered}
false -> Registration.register(opts)
end
end
@doc """
Unregister process from receiving notifications.
"""
@spec unregister(key :: term) :: :ok | {:error, term}
def unregister(key \\ :global) do
Registration.unregister(key)
end
@doc """
Unregister process from receiving notifications.
"""
@spec unregister_all(pid | nil) :: :ok | {:error, term}
def unregister_all(pid \\ nil) do
Registration.unregister_all(pid || self())
end
end
|
lib/system_registry.ex
| 0.90556 | 0.513973 |
system_registry.ex
|
starcoder
|
defmodule Kuddle.Path do
@moduledoc """
Utility module for looking up nodes in a document.
Usage:
nodes = Kuddle.select(document, path)
[{:node, "node", attrs, children}] = Kuddle.select(document, ["node"])
"""
alias Kuddle.Value
alias Kuddle.Node
@typedoc """
A Kuddle document is a list of nodes, nothing fancy.
"""
@type document :: Kuddle.Decoder.document()
@typedoc """
A single node in a document
"""
@type document_node :: Kuddle.Decoder.document_node()
@typedoc """
Node names are strings
"""
@type node_name :: String.t()
@typedoc """
An attribute key (i.e. %Value{}) can be anything, normally it will be an id or string though
"""
@type attr_key :: any()
@typedoc """
An attribute value can be anything
"""
@type attr_value :: any()
@type attribute_path :: {:attr, attr_key()}
| {:attr, attr_key(), attr_value()}
| {:value, attr_value()}
@typedoc """
In addition to the attribute_path, node attributes can also use shorthands for
`{:attr, key, value}` and `{:value, value}`, as `{key, value}` and `value` respectively.
"""
@type node_attributes :: [attribute_path() | {any(), any()} | any()]
@typedoc """
Any single path selector
"""
@type selector :: node_name()
| attribute_path()
| {:node, node_name()}
| {:node, node_name(), node_attributes()}
@typedoc """
A path is a list of selectors that should be used when matching against the document.
It allows different fragments which can be used to match different properties of the node.
Fragments:
* `node_name` - the node name can be passed as a plain string in the path to select a node based on its name
Example:
[%Kuddle.Node{name: "node"}] = Kuddle.select(document, ["node"])
[] = Kuddle.select(document, ["doesnt_exist"])
* `{:attr, key}` - a node with an attribute key can be looked up as well, this will ignore the
value and only look for key value pairs with the key
Example:
[%Kuddle.Node{attributes: [{%{value: "id"}, _value}]}] = Kuddle.select(document, [{:attr, "id"}])
[] = Kuddle.select(document, [{:attr, "cid"}])
* `{:attr, key, value}` - an attribute of key and value can be looked up as well
Example:
[%Kuddle.Node{attributes: [{%{value: "id"}, %{value: "egg"}}]}] = Kuddle.select(document, [{:attr, "id", "egg"}])
[] = Kuddle.select(document, [{:attr, "cid", "8847"}])
* `{:value, value}` - for nodes with normal values, the loose value can be looked up as well
Example:
[%Kuddle.Node{attributes: [%{value: 1}]}] = Kuddle.select(document, [{:value, 1}])
[] = Kuddle.select(document, [{:value, 2}])
* `{:node, node_name}` - equivalent to just providing the `node_name`
Example:
[%Kuddle.Node{name: "node"}] = Kuddle.select(document, [{:node, "node"}])
[] = Kuddle.select(document, [{:node, "doesnt_exist"}])
* `{:node, node_name, attrs}` - lookup a node with attributes
Example:
[%Kuddle.Node{name: "node", attributes: [1]}] = Kuddle.select(document, [{:node, "node", [1]}])
[%Kuddle.Node{name: "node", attributes: [1]}] = Kuddle.select(document, [{:node, "node", [{:value, 1}]}])
[%Kuddle.Node{name: "node2", attributes: [{%{value: "id"}, _value}]}] = Kuddle.select(document, [{:node, "node2", [{:attr, "id"}]}])
[%Kuddle.Node{name: "node3", attributes: [{%{value: "id"}, %{value: "bacon"}}]}] = Kuddle.select(document, [{:node, "node3", [{:attr, "id", "bacon"}]}])
[%Kuddle.Node{name: "node3", attributes: [{%{value: "id"}, %{value: "bacon"}}]}] = Kuddle.select(document, [{:node, "node3", [{"id", "bacon"}]}])
[] = Kuddle.select(document, [{:node, "node3", [{"id", "fries"}]}])
"""
@type path :: [selector()]
@doc """
Select nodes from the given kuddle document, see the path type for the supported selectors
Args:
* `document` - the document to lookup, or nil
* `path` - the selectors to use when looking up the nodes
* `acc` - the current accumulator, defaults to an empty list
"""
@spec select(nil | document(), path(), list()) :: document()
def select(document, path, acc \\ [])
def select(nil, _, acc) do
Enum.reverse(acc)
end
def select([], [], acc) do
Enum.reverse(acc)
end
def select([item | rest], [] = path, acc) do
select(rest, path, [item | acc])
end
def select([%Node{children: children} = node | rest], [expected | _path] = path, acc) do
acc =
if match_node?(node, expected) do
[node | acc]
else
acc
end
acc = select(children, [expected], acc)
select(rest, path, acc)
end
def select([], [_expected | path], acc) do
select(Enum.reverse(acc), path, [])
end
@spec match_node?(document_node(), selector()) :: boolean()
defp match_node?(%Node{attributes: attrs}, {:attr, _key} = attr) do
Enum.any?(attrs, &match_attr?(&1, attr))
end
defp match_node?(%Node{attributes: attrs}, {:attr, _key, _value} = attr) do
Enum.any?(attrs, &match_attr?(&1, attr))
end
defp match_node?(%Node{name: name}, {:node, name}) do
true
end
defp match_node?(%Node{name: name} = node, {:node, name, expected_attrs}) do
Enum.all?(expected_attrs, fn
{:attr, _} = attr ->
match_node?(node, attr)
{:attr, _, _} = attr ->
match_node?(node, attr)
{:value, _} = attr ->
match_node?(node, attr)
{key, value} ->
match_node?(node, {:attr, key, value})
value ->
match_node?(node, {:value, value})
end)
end
defp match_node?(%Node{attributes: attrs}, {:value, _value} = attr) do
Enum.any?(attrs, &match_attr?(&1, attr))
end
defp match_node?(%Node{name: name}, name) do
true
end
defp match_node?(%Node{}, _) do
false
end
defp match_attr?(%Value{}, {:attr, _key}) do
false
end
defp match_attr?(%Value{}, {:attr, _key, _value}) do
false
end
defp match_attr?(%Value{value: value}, {:value, value}) do
true
end
defp match_attr?(%Value{}, {:value, _}) do
false
end
defp match_attr?({_key, _value}, {:value, _}) do
false
end
defp match_attr?({%Value{value: key}, _value}, {:attr, expected_key}) do
key == expected_key
end
defp match_attr?({%Value{value: key}, %Value{value: value}}, {:attr, expected_key, expected_value}) do
key == expected_key and
value == expected_value
end
end
|
lib/kuddle/path.ex
| 0.868708 | 0.583322 |
path.ex
|
starcoder
|
defmodule Dogma.Script do
@moduledoc """
This module provides the struct that we use to represent source files, their
abstract syntax tree, etc, as well as a few convenient functions for working
with them.
"""
defmodule InvalidScriptError do
@moduledoc "An exception that can raised when source has invalid syntax."
defexception [:message]
def exception(script) do
%__MODULE__{ message: "Invalid syntax in #{script.path}" }
end
end
alias Dogma.Script
alias Dogma.Script.Metadata
alias Dogma.Error
alias Dogma.Util.ScriptSigils
alias Dogma.Util.ScriptStrings
alias Dogma.Util.Lines
defstruct path: nil,
source: nil,
lines: nil,
processed_source: nil,
processed_lines: nil,
ast: nil,
tokens: [],
valid?: false,
comments: [],
ignore_index: %{},
errors: []
@doc """
Builds a Script struct from the given source code and path
"""
def parse(source, path) do
script = %Script{
path: path,
source: source,
} |> add_ast
if script.valid? do
script
|> add_processed_source
|> add_tokens
|> add_lines
|> Metadata.add
else
script
end
end
@doc """
Builds a Script struct from the given source code and path.
Raises an exception if the source is invalid.
"""
def parse!(source, path) do
script = parse( source, path )
if script.valid? do
script
else
raise InvalidScriptError, script
end
end
defp add_ast(script) do
case Code.string_to_quoted( script.source, line: 1 ) do
{:ok, ast} ->
%Script{ script | valid?: true, ast: ast }
err ->
%Script{ script | valid?: false, errors: [error( err )] }
end
end
defp add_tokens(script) do
if script.valid? do
tokens = script.source |> tokenize
%Script{ script | tokens: tokens }
else
script
end
end
defp add_processed_source(script) when is_map(script) do
processed = script.source |> ScriptSigils.strip |> ScriptStrings.strip
%{ script | processed_source: processed }
end
defp add_lines(script) when is_map(script) do
lines = Lines.get( script.source )
pro = Lines.get( script.processed_source )
%{ script | lines: lines, processed_lines: pro }
end
defp tokenize(source) do
result =
source
|> String.to_char_list
|> :elixir_tokenizer.tokenize( 1, [] )
case result do
{:ok, _, tokens} -> tokens # Elixir 1.0.x
{:ok, _, _, tokens} -> tokens # Elixir 1.1.x
end
end
def line({line, _, _}), do: line
def line(line) when is_integer(line), do: line
@doc """
Postwalks the AST, calling the given `fun/2` on each.
script.errors is used as an accumulator for `fun/2`, the script with the new
errors is returned.
`fun/2` should take a ast and the errors accumulator as arguments, and
return {ast, errors}
"""
def walk(script, fun) do
{_, errors} = Macro.prewalk( script.ast, [], fun )
errors
end
defp error({:error, {pos, err, token}}) do
%Error{
rule: SyntaxError,
message: error_message(err, token),
line: line(pos) - 1,
}
end
defp error_message({prefix, suffix}, token) do
"#{prefix}#{token}#{suffix}"
end
defp error_message(err, token) do
"#{err}#{token}"
end
end
|
lib/dogma/script.ex
| 0.638046 | 0.431704 |
script.ex
|
starcoder
|
defmodule Membrane.FFmpeg.SWScale.Scaler do
@moduledoc """
This element performs video scaling, using SWScale module of FFmpeg library.
There are two options that have to be specified when creating Scaler:
- `output_width` - desired scaled video width.
- `output_height` - desired scaled video height.
Both need to be even numbers.
Scaling consists of two operations:
- scaling itself - resizing video frame with keeping original ratio. After that operation at least one of the dimensions of the input frame match the respective dimension of the desired output size. The second one (if does not match) is smaller than its respective dimension.
- adding paddings - if one dimension does not match after scaling, paddings have to be added. They are put on both sides of the scaled frame equally. They are either above and below the frame or on the left and right sides of it. It depends on the dimension that did not match after scaling.
Scaler needs input in the YUV420p format, processes one frame at a time and requires getting caps with input video
width and height. To meet all requirements either `Membrane.Element.RawVideo.Parser` or some decoder
(e.g. `Membrane.H264.FFmpeg.Decoder`) have to precede Scaler in the pipeline.
The output of the element is also in the YUV420p format. It has the size as specified in the options. All
caps except for width and height are passed unchanged to the next element in the pipeline.
"""
use Membrane.Filter
alias __MODULE__.Native
alias Membrane.Buffer
alias Membrane.Caps.Video.Raw
def_options output_width: [
type: :int,
description: "Width of the scaled video."
],
output_height: [
type: :int,
description: "Height of the scaled video."
],
use_shm?: [
type: :boolean,
desciption:
"If true, native scaler will use shared memory (via `t:Shmex.t/0`) for storing frames",
default: false
]
def_input_pad :input,
demand_unit: :buffers,
caps: {Raw, format: :I420, aligned: true}
def_output_pad :output,
caps: {Raw, format: :I420, aligned: true}
@impl true
def handle_init(options) do
state =
options
|> Map.from_struct()
|> Map.put(:native_state, nil)
{:ok, state}
end
@impl true
def handle_demand(:output, size, :buffers, _context, state) do
{{:ok, demand: {:input, size}}, state}
end
@impl true
def handle_process(:input, _buffer, _context, %{native_state: nil} = _state) do
raise(RuntimeError, "uninitialized state: Scaler did not receive caps")
end
def handle_process(
:input,
%Buffer{payload: payload} = buffer,
_context,
%{native_state: native_state, use_shm?: use_shm?} = state
) do
case Native.scale(payload, use_shm?, native_state) do
{:ok, frame} ->
buffer = [buffer: {:output, %{buffer | payload: frame}}]
{{:ok, buffer}, state}
{:error, reason} ->
{{:error, reason}, state}
end
end
@impl true
def handle_caps(:input, %Raw{width: width, height: height} = caps, _context, state) do
case Native.create(width, height, state.output_width, state.output_height) do
{:ok, native_state} ->
caps = %{caps | width: state.output_width, height: state.output_height}
state = %{state | native_state: native_state}
{{:ok, caps: {:output, caps}}, state}
{:error, reason} ->
raise(RuntimeError, reason)
end
end
@impl true
def handle_end_of_stream(:input, _context, state) do
{{:ok, end_of_stream: :output, notify: {:end_of_stream, :input}}, state}
end
@impl true
def handle_prepared_to_stopped(_context, state) do
{:ok, %{state | native_state: nil}}
end
end
|
lib/membrane_ffmpeg_swscale/scaler.ex
| 0.92427 | 0.643665 |
scaler.ex
|
starcoder
|
defmodule Sketch.Graph do
@moduledoc """
Provides functions to model graph-like structures.
"""
defstruct nodes: %{}, in_edges: %{}, out_edges: %{}
@doc """
Creates an empty graph
## Examples
iex> Sketch.Graph.new
...> |> Sketch.Graph.nodes
[]
"""
def new, do: %__MODULE__{}
@doc """
Adds/Updates a node in the graph
## Examples
iex> g = Sketch.Graph.new
...> |> Sketch.Graph.put_node("a")
iex> g |> Sketch.Graph.nodes
["a"]
iex> g = Sketch.Graph.new
...> |> Sketch.Graph.put_node("a", :data)
iex> g |> Sketch.Graph.nodes(data: true)
%{"a" => :data}
iex> g
...> |> Sketch.Graph.put_node("a", :new_data)
...> |> Sketch.Graph.nodes(data: true)
%{"a" => :new_data}
"""
def put_node(%__MODULE__{nodes: nodes} = graph, node_id, node_data \\ nil) do
%{graph | nodes: Map.put(nodes, node_id, node_data)}
end
@doc """
Adds/Updates a list of nodes in the graph
## Examples
iex> g = Sketch.Graph.new
...> |> Sketch.Graph.put_nodes(["a", "b", "c"])
iex> g |> Sketch.Graph.nodes
["a", "b", "c"]
iex> g = Sketch.Graph.new
...> |> Sketch.Graph.put_nodes([{"a", :a_data}, {"b", :b_data}])
iex> g |> Sketch.Graph.nodes(data: true)
%{"a" => :a_data, "b" => :b_data}
iex> g
...> |> Sketch.Graph.put_nodes([{"a", :a_new_data}, {"b", :b_new_data}])
...> |> Sketch.Graph.nodes(data: true)
%{"a" => :a_new_data, "b" => :b_new_data}
"""
def put_nodes(%__MODULE__{} = graph, nodes) do
nodes
|> Enum.reduce(graph, fn
{node_id, node_data}, g -> put_node(g, node_id, node_data)
node_id, g -> put_node(g, node_id)
end)
end
@doc """
Adds a node in the graph only if it doesn't exist yet
## Examples
iex> g = Sketch.Graph.new
...> |> Sketch.Graph.add_node("a")
iex> g |> Sketch.Graph.nodes
["a"]
iex> g = Sketch.Graph.new
...> |> Sketch.Graph.add_node("a", :data)
iex> g |> Sketch.Graph.nodes(data: true)
%{"a" => :data}
iex> g
...> |> Sketch.Graph.add_node("a", :new_data)
...> |> Sketch.Graph.nodes(data: true)
%{"a" => :data}
"""
def add_node(%__MODULE__{} = graph, node_id, node_data \\ nil) do
if has_node?(graph, node_id), do: graph, else: put_node(graph, node_id, node_data)
end
@doc """
Adds a list of nodes in the graph only if they don't exist yet
## Examples
iex> g = Sketch.Graph.new
...> |> Sketch.Graph.add_nodes(["a", "b", "c"])
iex> g |> Sketch.Graph.nodes
["a", "b", "c"]
iex> g = Sketch.Graph.new
...> |> Sketch.Graph.add_nodes([{"a", :a_data}, {"b", :b_data}])
iex> g |> Sketch.Graph.nodes(data: true)
%{"a" => :a_data, "b" => :b_data}
iex> g
...> |> Sketch.Graph.add_nodes([{"a", :a_new_data}, {"b", :b_new_data}])
...> |> Sketch.Graph.nodes(data: true)
%{"a" => :a_data, "b" => :b_data}
"""
def add_nodes(%__MODULE__{} = graph, nodes) do
nodes
|> Enum.reduce(graph, fn
{node_id, node_data}, g -> add_node(g, node_id, node_data)
node_id, g -> add_node(g, node_id)
end)
end
@doc """
Checks if a node is part of the graph
## Examples
iex> g = Sketch.Graph.new
...> |> Sketch.Graph.add_node("a")
iex> g |> Sketch.Graph.has_node?("a")
true
iex> g |> Sketch.Graph.has_node?("b")
false
"""
def has_node?(%__MODULE__{nodes: nodes}, node_id) do
nodes
|> Map.has_key?(node_id)
end
@doc """
Get list of nodes of the graph
## Examples
iex> g = Sketch.Graph.new
...> |> Sketch.Graph.add_node("a")
iex> g |> Sketch.Graph.nodes
["a"]
iex> g = Sketch.Graph.new
...> |> Sketch.Graph.add_node("a", :data)
iex> g |> Sketch.Graph.nodes(data: true)
%{"a" => :data}
"""
def nodes(%__MODULE__{nodes: nodes}) do
nodes |> Map.keys
end
def nodes(%__MODULE__{nodes: nodes}, data: true) do
nodes
end
@doc """
Add an edge to the graph. If any of the nodes are not in the graph,
it adds them
## Examples
iex> g = Sketch.Graph.new
...> |> Sketch.Graph.add_edge("a", "b")
iex> g |> Sketch.Graph.nodes
["a", "b"]
iex> g |> Sketch.Graph.edges
[{"a", "b"}]
iex> g = Sketch.Graph.new
...> |> Sketch.Graph.add_edge({"a", :a_data}, {"b", :b_data}, :edge_data)
iex> g |> Sketch.Graph.nodes(data: true)
%{"a" => :a_data, "b" => :b_data}
iex> g |> Sketch.Graph.edges(data: true)
%{{"a", "b"} => :edge_data}
"""
def add_edge(%__MODULE__{} = graph, node_a, node_b, edge_data \\ nil) do
{id_a, data_a} = extract(node_a)
{id_b, data_b} = extract(node_b)
graph
|> add_node(id_a, data_a)
|> add_node(id_b, data_b)
|> add_out_edge(id_a, id_b, edge_data)
|> add_in_edge(id_b, id_a, edge_data)
end
@doc """
Add a list of edges to the graph. If any of the nodes are not in the graph,
it adds them
## Examples
iex> g = Sketch.Graph.new
...> |> Sketch.Graph.add_edges([{"a", "b"}, {"b", "c"}])
iex> g |> Sketch.Graph.nodes
["a", "b", "c"]
iex> g |> Sketch.Graph.edges
[{"a", "b"}, {"b", "c"}]
iex> g = Sketch.Graph.new
...> |> Sketch.Graph.add_edges([
...> {{"a", :a_data}, {"b", :b_data}, :a_b_data},
...> {"b", {"c", :c_data}, :b_c_data}
...> ])
iex> g |> Sketch.Graph.nodes(data: true)
%{"a" => :a_data, "b" => :b_data, "c" => :c_data}
iex> g |> Sketch.Graph.edges(data: true)
%{{"a", "b"} => :a_b_data, {"b", "c"} => :b_c_data}
"""
def add_edges(%__MODULE__{} = graph, edges) do
edges
|> Enum.reduce(graph, fn
({node_a, node_b}, g) -> add_edge(g, node_a, node_b)
({node_a, node_b, data}, g) -> add_edge(g, node_a, node_b, data)
end)
end
@doc """
Test if two nodes are connected.
## Examples
iex> g = Sketch.Graph.new
...> |> Sketch.Graph.add_edges([{"a", "b"}, {"b", "c"}])
iex> g |> Sketch.Graph.are_connected?("a", "b")
true
iex> g |> Sketch.Graph.are_connected?("b", "a")
false
iex> g |> Sketch.Graph.are_connected?("a", "c")
false
"""
def are_connected?(%__MODULE__{out_edges: out_edges}, id_a, id_b) do
out_edges
|> Map.get(id_a, %{})
|> Map.has_key?(id_b)
end
@doc """
Get list of edges of the graph.
## Examples
iex> g = Sketch.Graph.new
...> |> Sketch.Graph.add_edge("a", "b")
iex> g |> Sketch.Graph.nodes
["a", "b"]
iex> g |> Sketch.Graph.edges
[{"a", "b"}]
iex> g = Sketch.Graph.new
...> |> Sketch.Graph.add_edge({"a", :a_data}, {"b", :b_data}, :edge_data)
iex> g |> Sketch.Graph.nodes(data: true)
%{"a" => :a_data, "b" => :b_data}
iex> g |> Sketch.Graph.edges(data: true)
%{{"a", "b"} => :edge_data}
"""
def edges(%__MODULE__{out_edges: edges} = graph, data: true) do
edges
|> Enum.reduce([], fn {from_id, adjacent}, list ->
list ++ (for {to_id, data} <- adjacent, do: {{from_id, to_id}, data})
end)
|> Enum.into(%{})
end
def edges(%__MODULE__{out_edges: edges} = graph) do
edges(graph, data: true)
|> Map.keys
end
defp add_out_edge(%__MODULE__{out_edges: out_edges} = graph, id_a, id_b, edge_data) do
a_out_edges = Map.get(out_edges, id_a, %{}) |> Map.put(id_b, edge_data)
%{graph | out_edges: Map.put(out_edges, id_a, a_out_edges)}
end
defp add_in_edge(%__MODULE__{in_edges: in_edges} = graph, id_a, id_b, edge_data) do
a_in_edges = Map.get(in_edges, id_a, %{}) |> Map.put(id_b, edge_data)
%{graph | in_edges: Map.put(in_edges, id_a, a_in_edges)}
end
defp extract({id, data}), do: {id, data}
defp extract(id), do: {id, nil}
end
|
lib/sketch/graph.ex
| 0.827236 | 0.548915 |
graph.ex
|
starcoder
|
defmodule KitchenCalculator do
@spec get_volume(tuple) :: any
def get_volume(volume_pair) do
elem(volume_pair, 1)
end
@spec to_milliliter(
{:cup, number}
| {:fluid_ounce, number}
| {:milliliter, number}
| {:tablespoon, number}
| {:teaspoon, number}
) :: {:milliliter, any}
def to_milliliter(volume_pair) do
convert_to_milliliter(volume_pair)
end
defp convert_to_milliliter({:cup, volume}) do
{:milliliter, 240 * volume}
end
defp convert_to_milliliter({:fluid_ounce, volume}) do
{:milliliter, 30 * volume}
end
defp convert_to_milliliter({:teaspoon, volume}) do
{:milliliter, 5 * volume}
end
defp convert_to_milliliter({:tablespoon, volume}) do
{:milliliter, 15 * volume}
end
defp convert_to_milliliter({:milliliter, volume}) do
{:milliliter, volume}
end
@spec from_milliliter(any, :cup | :fluid_ounce | :milliliter | :tablespoon | :teaspoon) :: any
def from_milliliter(volume_pair, unit) do
convert_from_milliliter(volume_pair, unit)
end
defp convert_from_milliliter(volume_pair, :cup) do
{:cup, get_volume(volume_pair) / 240}
end
defp convert_from_milliliter(volume_pair, :fluid_ounce) do
{:fluid_ounce, get_volume(volume_pair) / 30}
end
defp convert_from_milliliter(volume_pair, :teaspoon) do
{:teaspoon, get_volume(volume_pair) / 5}
end
defp convert_from_milliliter(volume_pair, :tablespoon) do
{:tablespoon, get_volume(volume_pair) / 15}
end
defp convert_from_milliliter(volume_pair, :milliliter) do
volume_pair
end
@spec convert(
{:cup, number}
| {:fluid_ounce, number}
| {:milliliter, any}
| {:tablespoon, number}
| {:teaspoon, number},
:cup | :fluid_ounce | :milliliter | :tablespoon | :teaspoon
) :: any
def convert(volume_pair, unit) do
milliliters = convert_to_milliliter(volume_pair)
convert_from_milliliter(milliliters, unit)
end
end
|
kitchen-calculator/lib/kitchen_calculator.ex
| 0.770637 | 0.623076 |
kitchen_calculator.ex
|
starcoder
|
defmodule Xgit.Ref do
@moduledoc ~S"""
A reference is a struct that describes a mutable pointer to a commit or similar object.
A reference is a key-value pair where the key is a name in a specific format
(see [`git check-ref-format`](https://git-scm.com/docs/git-check-ref-format))
and the value (`:target`) is either a SHA-1 hash or a reference to another reference key
(i.e. `ref: (name-of-valid-ref)`).
This structure contains the key-value pair and functions to validate both values.
"""
import Xgit.Util.ForceCoverage
alias Xgit.ObjectId
@typedoc ~S"""
Name of a ref (typically `refs/heads/master` or similar).
"""
@type name :: String.t()
@typedoc ~S"""
Target for a ref. Can be either an `Xgit.ObjectId` or a string of the form
`"ref: refs/..."`.
"""
@type target :: ObjectId.t() | String.t()
@typedoc ~S"""
This struct describes a single reference stored or about to be stored in a git
repository.
## Struct Members
* `:name`: the name of the reference (typically `refs/heads/master` or similar)
* `:target`: the object ID currently marked by this reference or a symbolic link
(`ref: refs/heads/master` or similar) to another reference
* `:link_target`: the name of another reference which is targeted by this ref
"""
@type t :: %__MODULE__{
name: name(),
target: target(),
link_target: name() | nil
}
@enforce_keys [:name, :target]
defstruct [:name, :target, :link_target]
@doc ~S"""
Return `true` if the string describes a valid reference name.
"""
@spec valid_name?(name :: any) :: boolean
def valid_name?(name) when is_binary(name), do: valid_name?(name, false, false)
def valid_name?(_), do: cover(false)
@doc ~S"""
Return `true` if the struct describes a valid reference.
## Options
`allow_one_level?`: Set to `true` to disregard the rule requiring at least one `/`
in name. (Similar to `--allow-onelevel` option.)
`refspec?`: Set to `true` to allow a single `*` in the pattern. (Similar to
`--refspec-pattern` option.)
"""
@spec valid?(ref :: any, allow_one_level?: boolean, refspec?: boolean) :: boolean
def valid?(ref, opts \\ [])
def valid?(%__MODULE__{name: name, target: target} = ref, opts)
when is_binary(name) and is_binary(target)
when is_list(opts) do
valid_name?(
name,
Keyword.get(opts, :allow_one_level?, false),
Keyword.get(opts, :refspec?, false)
) && valid_target?(target) &&
valid_name_or_nil?(Map.get(ref, :link_target))
end
def valid?(_, _opts), do: cover(false)
defp valid_name_or_nil?(nil), do: cover(true)
defp valid_name_or_nil?("refs/" <> _ = target_name), do: cover(valid_name?(target_name))
defp valid_name_or_nil?(_), do: cover(false)
defp valid_name?("@", _, _), do: cover(false)
defp valid_name?("HEAD", _, _), do: cover(true)
defp valid_name?(name, true, false) do
all_components_valid?(name) && not Regex.match?(~r/[\x00-\x20\\\?\[:^\x7E\x7F]/, name) &&
not String.ends_with?(name, ".") && not String.contains?(name, "@{")
end
defp valid_name?(name, false, false) do
String.contains?(name, "/") && valid_name?(name, true, false) &&
not String.contains?(name, "*")
end
defp valid_name?(name, false, true) do
String.contains?(name, "/") && valid_name?(name, true, false) && at_most_one_asterisk?(name)
end
defp all_components_valid?(name) do
name
|> String.split("/")
|> Enum.all?(&name_component_valid?/1)
end
defp name_component_valid?(component), do: not name_component_invalid?(component)
defp name_component_invalid?(""), do: cover(true)
defp name_component_invalid?(component) do
String.starts_with?(component, ".") ||
String.ends_with?(component, ".lock") ||
String.contains?(component, "..")
end
@asterisk_re ~r/\*/
defp at_most_one_asterisk?(name) do
@asterisk_re
|> Regex.scan(name)
|> Enum.count()
|> Kernel.<=(1)
end
defp valid_target?(target), do: ObjectId.valid?(target) || valid_ref_target?(target)
defp valid_ref_target?("ref: " <> target_name),
do: valid_name?(target_name, false, false) && String.starts_with?(target_name, "refs/")
defp valid_ref_target?(_), do: cover(false)
end
|
lib/xgit/ref.ex
| 0.909551 | 0.764672 |
ref.ex
|
starcoder
|
defmodule PipeTo do
@doc """
PipeTo operator.
This operator will replace the placeholder argument `_` in the right-hand
side function call with left-hand side expression.
### Examples
iex> 1 ~> Enum.at(1..3, _)
2
It can mix with `|>` operation
### Examples
iex> 1 ~> Enum.at(1..3, _) |> Kernel.*(5)
10
When using ~> withou placeholder `_`, it act just like `|>` pipe operator.
### Examples
iex> [1, 2, 3] ~> Enum.take(2)
[1, 2]
You can also specify multiple target positions.
### Examples
iex> 10 ~> Kernel.+(_, _)
20
"""
defmacro left ~> right do
[{h, _} | t] = __MODULE__.unpipe({:~>, [], [left, right]})
# Bascially follows `lib/elixir/lib/kernel` left |> right
# https://github.com/elixir-lang/elixir/blob/master/lib/elixir/lib/kernel.ex#L3134
fun = fn {x, positions}, acc ->
case x do
{op, _, [_]} when op == :+ or op == :- ->
message =
<<"piping into a unary operator is deprecated, please use the ",
"qualified name. For example, Kernel.+(5), instead of +5">>
:elixir_errors.warn(__CALLER__.line, __CALLER__.file, message)
_ ->
:ok
end
positions
|> Enum.reverse()
|> Enum.reduce(x, &Macro.pipe(acc, &2, &1))
end
:lists.foldl(fun, h, t)
end
@doc """
Breaks a pipeline expression into a list. This is where the target position being calculated.
PipeTo.unpipe(quote do: 5 ~> div(100, _) ~> div(2))
# => [{5, 0},
# {{:div, [context: Elixir, import: Kernel], 'd'}, 1},
# {{:div, [], [2]}, 0}]
"""
@spec unpipe(Macro.t) :: [Macro.t]
def unpipe(expr) do
:lists.reverse(unpipe(expr, []))
end
defp unpipe({:~>, _, [left, right]}, acc) do
unpipe(right, unpipe(left, acc))
end
defp unpipe(ast = {_, _, args}, acc) when is_list(args) do
positions =
args
|> Enum.with_index()
|> Enum.reduce([], fn {arg, index}, positions ->
cond do
placeholder?(arg) -> [index | positions]
true -> positions
end
end)
fixed_ast = remove_placeholders(ast, positions)
[{fixed_ast, pipe_positions(positions)} | acc]
end
defp unpipe(other, acc) do
[{other, 0} | acc]
end
defp placeholder?({:_, _, _}), do: true
defp placeholder?(_), do: false
defp remove_placeholders(ast, []), do: ast
defp remove_placeholders({fun, meta, args}, [index | rest]) do
{fun, meta, List.delete_at(args, index)}
|> remove_placeholders(rest)
end
defp pipe_positions([]), do: [0]
defp pipe_positions(positions), do: positions
end
|
lib/pipe_to.ex
| 0.834306 | 0.609728 |
pipe_to.ex
|
starcoder
|
defmodule Desktop do
@moduledoc """
This is the documentation for the Desktop project.
By default, Desktop applications depend on the following packages:
* [Phoenix](https://hexdocs.pm/phoenix) - the Phoenix web framework
* [Phoenix LiveView](https://hexdocs.pm/phoenix_live_view) - real-time
user experience
* [Sqlite3 Ecto](https://github.com/elixir-sqlite/ecto_sqlite3) - local
database
To get started, see the [installation guide](installation.html).
This library is still worked on heaviliy and APIs are going to change in v2. If you are curious
to play around or contribute it's to best clone the current sample app at:
https://github.com/elixir-desktop/desktop-example-app and start modifying it to your wishes.
## Contributing and raising Issues
Please checkout the github repo at https://github.com/elixir-desktop/desktop and raise any issues.
Suggestions about the API and usage are welcome!
Cheers!
"""
use Application
@doc false
def start(:normal, []) do
child = %{
id: Desktop.Env,
start: {Desktop.Env, :start_link, []}
}
Supervisor.start_link([child], strategy: :one_for_one, name: Desktop.Sup)
end
@doc false
@spec start_link() :: :ignore | {:error, any} | {:ok, pid}
def start_link() do
GenServer.start_link(__MODULE__, [], name: __MODULE__)
end
@doc """
Call on application to use the system locale by default for
translations. The function will scan your gettext backend for available
languages and compare to the system language to select the best match.
```
Desktop.identify_default_locale(MyApp.Gettext)
```
"""
def identify_default_locale(backend) do
# this is in the form "xx_XX"
language_code = language_code()
# All locales with translations:
known_locales = Gettext.known_locales(backend)
# Looking for a best fit
# Preferring a full match 'xx_xx' == 'yy_yy'
best_match = Enum.find(known_locales, fn l -> String.downcase(l) == language_code end)
if best_match != nil do
put_default_locale(best_match)
else
# Looking for a prefix match 'xx' == 'yy'
prefix = binary_part(language_code, 0, 2)
prefix_match =
Enum.find(known_locales, fn l -> String.starts_with?(String.downcase(l), prefix) end)
if prefix_match != nil do
put_default_locale(prefix_match)
else
# we're giving up, not updating the default locale
end
end
end
def language_code() do
with MacOS <- Desktop.OS.type(),
{locale, 0} <- System.cmd("defaults", ~w(read -g AppleLocale)),
[code] <- Regex.run(~r/[a-z]+_[A-Z]+/, locale) do
# On MacOS getSystemLanguage() is calculated based on the the number
# format (so mostly English) and not based on the actual used language
# https://trac.wxwidgets.org/ticket/11594
# So we are using this workaround instead to get the current locale as "xx_XX"
# https://stackoverflow.com/questions/661935/how-to-detect-current-locale-in-mac-os-x-from-the-shell
code
else
_ ->
:wx.set_env(Desktop.Env.wx_env())
locale = :wxLocale.new(:wxLocale.getSystemLanguage())
# this is in the form "xx_XX"
:wxLocale.getCanonicalName(locale) |> List.to_string() |> String.downcase()
end
end
@doc """
Allows setting the default locale that will be used for translations in this
Desktop application.
"""
def put_default_locale(locale) when is_binary(locale) do
# :persistent_term.put(@key, locale)
Application.put_env(:gettext, :default_locale, locale)
end
end
|
lib/desktop.ex
| 0.610802 | 0.757705 |
desktop.ex
|
starcoder
|
defmodule ExploringElixir.Time do
def for_next_week(fun) when is_function(fun, 1) do
today = Date.utc_today
next_week = Date.add today, 7
Date.range(today, next_week) |> Enum.each(fun)
:ok
end
def seconds_per_day, do: 60 * 60 * 24
end
defmodule ExploringElixir.Time.Local do
def beam_timestamp do
System.system_time
end
def os_timestamp do
System.os_time
end
def monotonic_time do
System.monotonic_time
end
def monotonic_time_offset do
System.time_offset
end
def adjusted_monotonic_time do
System.monotonic_time + System.time_offset
end
def current_time do
NaiveDateTime.to_time current_date()
end
def current_date do
NaiveDateTime.utc_now
end
def yesterday do
NaiveDateTime.add current_date(), -(ExploringElixir.Time.seconds_per_day()), :seconds
end
def just_a_day_away do
Date.diff current_date(), yesterday()
end
end
defmodule ExploringElixir.Time.Calendars do
def today_iso do
Date.utc_today
end
def today_jalaali do
Date.utc_today Jalaali.Calendar
end
def convert_to_jalaali(date) do
{:ok, date} = Date.convert date, Jalaali.Calendar
date
end
end
defmodule ExploringElixir.Time.Stored do
require ExploringElixir.Repo.Tenants, as: Repo
import Ecto.Query
@spec put(date_time :: DateTime.t(), data :: String.t()) :: id :: integer()
def put(date_time, data) when is_bitstring(data) do
utc = Timex.to_datetime date_time, "Etc/UTC"
Repo.insert_all "dates_and_times", [%{
a_date: DateTime.to_date(utc),
a_time: DateTime.to_time(utc),
with_tz: utc,
data: data
}]
end
@spec get(date :: DateTime.t()) :: [{id :: integer, data :: String.t()}]
def get(date_time) do
date = DateTime.to_date(date_time)
query = from d in "dates_and_times",
where: d.a_date == ^date,
select: {d.id, d.data}
Repo.all query
end
end
|
lib/exploring_elixir/e008/time.ex
| 0.563138 | 0.442938 |
time.ex
|
starcoder
|
defmodule Rheostat do
@moduledoc """
A configurable stats provider. Rheostat provides a common interface to
stats provider.
Configure the provider with:
```
config :rheostat, adapter: Rheostat.Adapter.Statix
```
"""
@doc """
Opens the connection to the stats server. configuration is read from
the configuration for the `:statix` application (both globally and per
connection).
"""
def connect(opts \\ []) do
adapter().connect(opts)
end
def count(metadata, metric, num) do
adapter().count(metadata, metric, num)
end
def count(metadata, metric) when is_map(metadata) do
adapter().count(metadata, metric, 1)
end
def count(metric, num) do
adapter().count(%{}, metric, num)
end
def count(metric) do
adapter().count(%{}, metric, 1)
end
def sample(metadata, metric, value) do
adapter().sample(metadata, metric, value)
end
def sample(metric, value) do
adapter().sample(%{}, metric, value)
end
def measure(metric, target) do
measure(metric, [], target)
end
def measure(metadata, metric, target) when is_map(metadata) do
adapter().measure(metadata, metric, target)
end
@doc """
Measures the execution time of the given `function` and writes that to the
timing identified by `key`.
This function returns the value returned by `function`, making it suitable for
easily wrapping existing code.
## Examples
iex> Rheostat.measure("integer_to_string", [], fn -> Integer.to_string(123) end)
"123"
"""
def measure(key, options, target) when is_binary(key) do
adapter().measure(key, options, target)
end
@doc """
Increments the StatsD counter identified by `key` by the given `value`.
`value` is supposed to be zero or positive and `c:decrement/3` should be
used for negative values.
## Examples
iex> Rheostat.increment("hits", 1, [])
:ok
"""
def increment(key, val, options) do
adapter().increment(key, val, options)
end
@doc """
Same as `increment(key, 1, [])`.
"""
def increment(key), do: increment(key, 1, [])
@doc """
Same as `increment(key, value, [])`.
"""
def increment(key, value), do: increment(key, value, [])
@doc """
Decrements the StatsD counter identified by `key` by the given `value`.
Works same as `c:increment/3` but subtracts `value` instead of adding it. For
this reason `value` should be zero or negative.
## Examples
iex> Rheostat.decrement("open_connections", 1, [])
:ok
"""
def decrement(key, value, options) do
adapter().decrement(key, value, options)
end
@doc """
Same as `decrement(key, 1, [])`.
"""
def decrement(key), do: decrement(key, 1, [])
@doc """
Same as `decrement(key, value, [])`.
"""
def decrement(key, value), do: decrement(key, value, [])
@doc """
Writes to the StatsD gauge identified by `key`.
## Examples
iex> Rheostat.gauge("cpu_usage", 0.83, [])
:ok
"""
def gauge(key, value, options), do: adapter().gauge(key, value, options)
@doc """
Same as `gauge(key, value, [])`.
"""
def gauge(key, value), do: gauge(key, value, [])
@doc """
Writes `value` to the histogram identified by `key`. Not all
StatsD-compatible servers support histograms. An example of a such
server [statsite](https://github.com/statsite/statsite).
## Examples
iex> Rheostat.histogram("online_users", 123, [])
:ok
"""
def histogram(key, value, options) do
adapter().histogram(key, value, options)
end
@doc """
Same as `histogram(key, value, [])`.
"""
def histogram(key, value), do: histogram(key, value, [])
@doc """
Writes the given `value` to the timing identified by `key`. `value` is
expected in milliseconds.
## Examples
iex> Rheostat.timing("rendering", 12, [])
:ok
"""
def timing(key, value, options) do
adapter().timing(key, value, options)
end
@doc """
Same as `timing(key, value, [])`.
"""
def timing(key, value), do: timing(key, value, [])
@doc """
Writes the given `value` to the set identified by `key`.
## Examples
iex> Rheostat.set("unique_visitors", "user1", [])
:ok
"""
def set(key, value, options), do: adapter().set(key, value, options)
@doc """
Same as `set(key, value, [])`.
"""
def set(key, value), do: adapter().set(key, value, [])
@spec adapter() :: Rheostat.Adapter
defp adapter do
Application.get_env(:rheostat, :adapter, Rheostat.Adapter.Metrix)
end
end
|
lib/rheostat.ex
| 0.951718 | 0.930962 |
rheostat.ex
|
starcoder
|
defmodule Appsignal.Utils.MapFilter do
require Logger
@moduledoc """
Helper functions for filtering parameters to prevent sensitive data
to be submitted to AppSignal.
"""
@doc """
Filter parameters based on Appsignal and Phoenix configuration.
"""
def filter_parameters(values), do: filter_values(values, get_filter_parameters())
@doc """
Filter session data based Appsignal configuration.
"""
def filter_session_data(values), do: filter_values(values, get_filter_session_data())
@doc false
def filter_values(values, {:discard, params}), do: discard_values(values, params)
def filter_values(values, {:keep, params}), do: keep_values(values, params)
def filter_values(values, params), do: discard_values(values, params)
def get_filter_parameters do
merge_filters(
Application.get_env(:appsignal, :config)[:filter_parameters],
Application.get_env(:phoenix, :filter_parameters, [])
)
end
def get_filter_session_data do
Application.get_env(:appsignal, :config)[:filter_session_data] || []
end
defp discard_values(%{__struct__: mod} = struct, _params) when is_atom(mod) do
struct
end
defp discard_values(%{} = map, params) do
Enum.into(map, %{}, fn {k, v} ->
if (is_binary(k) or is_atom(k)) and String.contains?(to_string(k), params) do
{k, "[FILTERED]"}
else
{k, discard_values(v, params)}
end
end)
end
defp discard_values([_ | _] = list, params) do
Enum.map(list, &discard_values(&1, params))
end
defp discard_values(other, _params), do: other
defp keep_values(%{__struct__: mod}, _params) when is_atom(mod), do: "[FILTERED]"
defp keep_values(%{} = map, params) do
Enum.into(map, %{}, fn {k, v} ->
if (is_binary(k) or is_atom(k)) and to_string(k) in params do
{k, discard_values(v, [])}
else
{k, keep_values(v, params)}
end
end)
end
defp keep_values([_ | _] = list, params) do
Enum.map(list, &keep_values(&1, params))
end
defp keep_values(_other, _params), do: "[FILTERED]"
defp merge_filters(appsignal, phoenix) when is_list(appsignal) and is_list(phoenix) do
appsignal ++ phoenix
end
defp merge_filters({:keep, appsignal}, {:keep, phoenix}), do: {:keep, appsignal ++ phoenix}
defp merge_filters(appsignal, {:keep, phoenix}) when is_list(appsignal) and is_list(phoenix) do
{:keep, phoenix -- appsignal}
end
defp merge_filters({:keep, appsignal}, phoenix) when is_list(appsignal) and is_list(phoenix) do
{:keep, appsignal -- phoenix}
end
defp merge_filters(appsignal, phoenix) do
Logger.error("""
An error occured while merging parameter filters.
AppSignal expects all parameter_filter values to be either a list of
strings (`["email"]`), or a :keep-tuple with a list of strings as its
second element (`{:keep, ["email"]}`).
From the AppSignal configuration:
#{inspect(appsignal)}
From the Phoenix configuration:
#{inspect(phoenix)}
To ensure no sensitive parameters are sent, all parameters are filtered out
for this transaction.
""")
{:keep, []}
end
end
|
lib/appsignal/utils/map_filter.ex
| 0.744006 | 0.634982 |
map_filter.ex
|
starcoder
|
defmodule BioMonitor.RoutineCalculations do
@moduledoc """
Module in charge of processing the readings to infer new data
"""
require Math
defmodule Result do
@moduledoc """
Struct that reperesent a result as a 2 axis point.
"""
defstruct x: 0, y: 0
end
defmodule PartialResult do
@moduledoc """
Struct used to store partial results during calculations
"""
defstruct x: 0, y: 0, reading: nil
end
@doc """
Generates all calculations for a set of readings at once.
Returns a map with the following values:
%{
biomass_performance,
product_performance,
product_biomass_performance,
product_volumetric_performance,
biomass_volumetric_performance,
max_product_volumetric_performance,
max_biomass_volumetric_performance,
specific_ph_velocity,
specific_biomass_velocity,
specific_product_velocity,
max_ph_velocity,
max_biomass_velocity,
max_product_velocity
}
All values are Arrays of type %Result{x, y}
"""
def build_calculations(readings, started_timestamp) do
product_volumetric_performance = product_volumetric_performance(readings, started_timestamp)
biomass_volumetric_performance = biomass_volumetric_performance(readings, started_timestamp)
specific_ph_velocity = specific_ph_velocity(readings, started_timestamp)
specific_biomass_velocity = specific_biomass_velocity(readings, started_timestamp)
specific_product_velocity = specific_product_velocity(readings, started_timestamp)
max_ph_velocity = calculate_max_point(specific_ph_velocity)
max_biomass_velocity = calculate_max_point(specific_biomass_velocity)
max_product_velocity = calculate_max_point(specific_product_velocity)
%{
biomass_performance: biomass_performance(readings, started_timestamp),
product_performance: product_performance(readings, started_timestamp),
product_biomass_performance: product_biomass_performance(readings, started_timestamp),
product_volumetric_performance: product_volumetric_performance,
biomass_volumetric_performance: biomass_volumetric_performance,
max_product_volumetric_performance: calculate_max_point(product_volumetric_performance),
max_biomass_volumetric_performance: calculate_max_point(biomass_volumetric_performance),
specific_ph_velocity: specific_ph_velocity,
specific_biomass_velocity: specific_biomass_velocity,
specific_product_velocity: specific_product_velocity,
max_ph_velocity: max_ph_velocity,
max_biomass_velocity: max_biomass_velocity,
max_product_velocity: max_product_velocity
}
end
@doc """
Builds the calculations with a format suitable for table rendering.
"""
def build_csv_calculations(readings, started_timestamp) do
biomass_performance = biomass_performance(readings, started_timestamp)
product_performance = product_performance(readings, started_timestamp)
product_biomass_performance = product_biomass_performance(readings, started_timestamp)
product_volumetric_performance = product_volumetric_performance(readings, started_timestamp)
biomass_volumetric_performance = biomass_volumetric_performance(readings, started_timestamp)
specific_ph_velocity = specific_ph_velocity(readings, started_timestamp)
specific_biomass_velocity = specific_biomass_velocity(readings, started_timestamp)
specific_product_velocity = specific_product_velocity(readings, started_timestamp)
Enum.zip([
biomass_performance,
product_performance,
product_biomass_performance,
product_volumetric_performance,
biomass_volumetric_performance,
specific_ph_velocity,
specific_biomass_velocity,
specific_product_velocity
])
|> Enum.map(fn tuple ->
%{
time_in_seconds: elem(tuple, 0).x,
biomass_performance: elem(tuple, 0).y,
product_performance: elem(tuple, 1).y,
product_biomass_performance: elem(tuple, 2).y,
product_volumetric_performance: elem(tuple, 3).y,
biomass_volumetric_performance: elem(tuple, 4).y,
specific_ph_velocity: elem(tuple, 5).y,
specific_biomass_velocity: elem(tuple, 6).y,
specific_product_velocity: elem(tuple, 7).y,
}
end)
end
@doc """
Calculates all biomass performance for each reading.
returns: [{ x: seconds elapsed, y: dBiomass/dSubstratum}]
"""
def biomass_performance(readings, started_timestamp) do
readings
|> Enum.filter(fn reading ->
reading.biomass != nil && reading.biomass != 0 && reading.substratum != nil && reading.substratum != 0
end)
|> Enum.reduce(
[],
fn reading, acc ->
last_value = acc |> List.last
time = NaiveDateTime.diff(reading.inserted_at, started_timestamp)
case last_value do
nil ->
acc |> List.insert_at(-1, %PartialResult{x: time, y: (reading.biomass / reading.substratum), reading: reading})
last ->
d_substratum = (reading.substratum - last.reading.substratum)
y_value = if d_substratum == 0, do: 0, else: ((reading.biomass - last.reading.biomass) / d_substratum)
acc
|> List.insert_at(
-1,
%PartialResult{
x: time,
y: y_value,
reading: reading
}
)
end
end
) |> Enum.map(fn partial_result -> %Result{x: partial_result.x, y: partial_result.y} end)
end
@doc """
Calculates all product performance for each reading.
returns: [{ x: seconds elapsed, y: dBiomass/dProduct}]
"""
def product_performance(readings, started_timestamp) do
readings
|> Enum.filter(fn reading ->
reading.biomass != nil && reading.biomass != 0 && reading.product != nil && reading.product != 0
end)
|> Enum.reduce(
[],
fn reading, acc ->
last_value = acc |> List.last
time = NaiveDateTime.diff(reading.inserted_at, started_timestamp)
case last_value do
nil ->
acc |> List.insert_at(-1, %PartialResult{x: time, y: (reading.biomass / reading.product), reading: reading})
last ->
d_product = (reading.product - last.reading.product)
y_value = if d_product == 0, do: 0, else: ((reading.biomass - last.reading.biomass) / d_product)
acc
|> List.insert_at(
-1,
%PartialResult{
x: time,
y: y_value,
reading: reading
}
)
end
end
) |> Enum.map(fn partial_result -> %Result{x: partial_result.x, y: partial_result.y} end)
end
@doc """
Calculates all product biomass performance for each reading.
returns: [{ x: seconds elapsed, y: dProduct/dBiomass}]
"""
def product_biomass_performance(readings, started_timestamp) do
readings
|> Enum.filter(fn reading ->
reading.biomass != nil && reading.biomass != 0 && reading.product != nil && reading.product != 0
end)
|> Enum.reduce(
[],
fn reading, acc ->
last_value = acc |> List.last
time = NaiveDateTime.diff(reading.inserted_at, started_timestamp)
case last_value do
nil ->
acc |> List.insert_at(-1, %PartialResult{x: time, y: (reading.product / reading.biomass), reading: reading})
last ->
d_biomass = (reading.biomass - last.reading.biomass)
y_value = if d_biomass == 0, do: 0, else: ((reading.product - last.reading.product) / d_biomass)
acc
|> List.insert_at(
-1,
%PartialResult{
x: time,
y: y_value,
reading: reading
}
)
end
end
) |> Enum.map(fn partial_result -> %Result{x: partial_result.x, y: partial_result.y} end)
end
@doc """
Calculates de Product volumetric performance for every point.
"""
def product_volumetric_performance(readings, started_timestamp) do
readings
|> Enum.filter(fn reading ->
reading.product != nil && reading.product != 0
end)
|> Enum.reduce(
[],
fn reading, acc ->
last_value = acc |> List.last
time = NaiveDateTime.diff(reading.inserted_at, started_timestamp)
case last_value do
nil ->
y_value = if time == 0, do: reading.product, else: (reading.product / time)
acc |> List.insert_at(-1, %PartialResult{x: time, y: y_value, reading: reading})
last ->
d_time = time - last.x
y_value = if d_time == 0, do: 0, else: (reading.product - last.reading.product) / d_time
acc
|> List.insert_at(
-1,
%PartialResult{
x: time,
y: y_value,
reading: reading
}
)
end
end
) |> Enum.map(fn partial_result -> %Result{x: partial_result.x, y: partial_result.y} end)
end
@doc """
Calculates de Biomass volumetric performance for every point.
"""
def biomass_volumetric_performance(readings, started_timestamp) do
readings
|> Enum.filter(fn reading ->
reading.biomass != nil && reading.biomass != 0
end)
|> Enum.reduce(
[],
fn reading, acc ->
last_value = acc |> List.last
time = NaiveDateTime.diff(reading.inserted_at, started_timestamp)
case last_value do
nil ->
y_value = if time == 0, do: 0, else: (reading.biomass / time)
acc |> List.insert_at(-1, %PartialResult{x: time, y: y_value, reading: reading})
last ->
d_time = time - last.x
y_value = if d_time == 0, do: 0, else: (reading.biomass - last.reading.biomass) / (time - last.x)
acc
|> List.insert_at(
-1,
%PartialResult{
x: time,
y: y_value,
reading: reading
}
)
end
end
) |> Enum.map(fn partial_result -> %Result{x: partial_result.x, y: partial_result.y} end)
end
@doc """
Calculates de specific ph velocity for each reading.
"""
def specific_ph_velocity(readings, started_timestamp) do
readings |> Enum.reduce([],
fn reading, acc ->
last_value = acc |> List.last
time = NaiveDateTime.diff(reading.inserted_at, started_timestamp)
case last_value do
nil ->
result = %PartialResult{
y: (1/reading.ph),
x: time,
reading: reading
}
acc |> List.insert_at(-1, result)
last_val ->
diff = reading.ph - last_val.reading.ph
delta = if diff == 0, do: 0, else: 1/diff
result = %PartialResult{
y: delta,
x: time,
reading: reading
}
acc |> List.insert_at(-1, result)
end
end
)
|> Enum.map(fn partial_result ->
%Result{x: partial_result.x, y: partial_result.y}
end)
end
@doc """
Calculates de specific biomass velocity for each reading.
"""
def specific_biomass_velocity(readings, started_timestamp) do
readings
|> Enum.filter(fn reading ->
reading.biomass != nil && reading.biomass != 0
end)
|> Enum.reduce([],
fn reading, acc ->
last_value = acc |> List.last
time = NaiveDateTime.diff(reading.inserted_at, started_timestamp)
case last_value do
nil ->
result = %PartialResult{
y: (1/reading.biomass),
x: time,
reading: reading
}
acc |> List.insert_at(-1, result)
last_val ->
diff = reading.biomass - last_val.reading.biomass
delta = if diff == 0, do: 0, else: 1/diff
result = %PartialResult{
y: delta,
x: time,
reading: reading
}
acc |> List.insert_at(-1, result)
end
end
)
|> Enum.map(fn partial_result ->
%Result{x: partial_result.x, y: partial_result.y}
end)
end
@doc """
Calculates de specific product velocity for each reading.
"""
def specific_product_velocity(readings, started_timestamp) do
readings
|> Enum.filter(fn reading ->
reading.product != nil && reading.product != 0
end)
|> Enum.reduce([],
fn reading, acc ->
last_value = acc |> List.last
time = NaiveDateTime.diff(reading.inserted_at, started_timestamp)
case last_value do
nil ->
result = %PartialResult{
y: (1/reading.product),
x: time,
reading: reading
}
acc |> List.insert_at(-1, result)
last_val ->
diff = reading.product - last_val.reading.product
delta = if diff == 0, do: 0, else: 1/diff
result = %PartialResult{
y: delta,
x: time,
reading: reading
}
acc |> List.insert_at(-1, result)
end
end
)
|> Enum.map(fn partial_result ->
%Result{x: partial_result.x, y: partial_result.y}
end)
end
@doc """
Calculates de maximium point for a list of Results by comparing it's y values.
Returns a %Result with the value and the time it happened.
"""
def calculate_max_point(results) do
first_val = results |> List.first
results
|> Enum.reduce(
first_val,
fn result, acc ->
case result.y > acc.y do
true -> result
false -> acc
end
end
)
end
end
|
lib/bio_monitor/routine_calculations.ex
| 0.858748 | 0.589716 |
routine_calculations.ex
|
starcoder
|
defmodule SanbaseWeb.Graphql.Resolvers.SignalResolver do
import Sanbase.Utils.Transform, only: [maybe_apply_function: 2]
import SanbaseWeb.Graphql.Helpers.{Utils, CalibrateInterval}
import Absinthe.Resolution.Helpers, only: [on_load: 2]
import Sanbase.Model.Project.Selector, only: [args_to_selector: 1, args_to_raw_selector: 1]
import Sanbase.Utils.ErrorHandling,
only: [handle_graphql_error: 3, maybe_handle_graphql_error: 2]
alias Sanbase.Signal
alias SanbaseWeb.Graphql.SanbaseDataloader
alias Sanbase.Billing.Plan.Restrictions
require Logger
@datapoints 300
def project(%{slug: slug}, _args, %{context: %{loader: loader}}) do
loader
|> Dataloader.load(SanbaseDataloader, :project_by_slug, slug)
|> on_load(fn loader ->
{:ok, Dataloader.get(loader, SanbaseDataloader, :project_by_slug, slug)}
end)
end
def get_signal(_root, %{signal: signal}, _resolution) do
case Signal.has_signal?(signal) do
true -> {:ok, %{signal: signal}}
{:error, error} -> {:error, error}
end
end
def get_raw_signals(_root, %{from: from, to: to} = args, resolution) do
signals = Map.get(args, :signals, :all)
selector =
case Map.has_key?(args, :selector) do
false ->
:all
true ->
{:ok, selector} = args_to_selector(args)
selector
end
Signal.raw_data(signals, selector, from, to)
|> maybe_apply_function(&overwrite_not_accessible_signals(&1, resolution))
end
def get_available_signals(_root, _args, _resolution), do: {:ok, Signal.available_signals()}
def get_available_slugs(_root, _args, %{source: %{signal: signal}}),
do: Signal.available_slugs(signal)
def get_metadata(_root, _args, resolution) do
%{source: %{signal: signal}} = resolution
case Signal.metadata(signal) do
{:ok, metadata} ->
restrictions = resolution_to_signal_restrictions(resolution)
{:ok, Map.merge(restrictions, metadata)}
{:error, error} ->
{:error, handle_graphql_error("metadata", %{signal: signal}, error)}
end
end
def available_since(_root, args, %{source: %{signal: signal}}) do
with {:ok, selector} <- args_to_selector(args),
{:ok, first_datetime} <- Signal.first_datetime(signal, selector) do
{:ok, first_datetime}
end
|> maybe_handle_graphql_error(fn error ->
handle_graphql_error(
"Available Since",
%{signal: signal, selector: args_to_raw_selector(args)},
error
)
end)
end
def timeseries_data(
_root,
%{from: from, to: to, interval: interval} = args,
%{source: %{signal: signal}}
) do
with {:ok, selector} <- args_to_selector(args),
{:ok, opts} = selector_args_to_opts(args),
{:ok, from, to, interval} <-
calibrate(Signal, signal, selector, from, to, interval, 86_400, @datapoints),
{:ok, result} <- Signal.timeseries_data(signal, selector, from, to, interval, opts) do
{:ok, result |> Enum.reject(&is_nil/1)}
else
{:error, error} ->
{:error, handle_graphql_error(signal, args_to_raw_selector(args), error)}
end
end
def aggregated_timeseries_data(
_root,
%{from: from, to: to} = args,
%{source: %{signal: signal}}
) do
with {:ok, selector} <- args_to_selector(args),
{:ok, opts} = selector_args_to_opts(args),
{:ok, result} <- Signal.aggregated_timeseries_data(signal, selector, from, to, opts) do
{:ok, Map.values(result) |> List.first()}
end
|> maybe_handle_graphql_error(fn error ->
handle_graphql_error(signal, args_to_raw_selector(args), error)
end)
end
defp overwrite_not_accessible_signals(list, resolution) do
restrictions_map =
resolution_to_all_signals_restrictions(resolution) |> Map.new(&{&1.name, &1})
list
|> Enum.map(fn signal ->
case should_hide_signal?(signal, restrictions_map) do
true -> hide_signal_details(signal)
false -> Map.put(signal, :is_hidden, false)
end
end)
end
defp should_hide_signal?(signal_map, restrictions_map) do
case Map.get(restrictions_map, signal_map.signal) do
%{is_accessible: false} ->
true
%{is_accessible: true, is_restricted: false} ->
false
%{restricted_from: restricted_from, restricted_to: restricted_to} ->
before_from? =
match?(%DateTime{}, restricted_from) and
DateTime.compare(signal_map.datetime, restricted_from) == :lt
after_to? =
match?(%DateTime{}, restricted_to) and
DateTime.compare(signal_map.datetime, restricted_to) == :gt
before_from? or after_to?
end
end
defp hide_signal_details(signal) do
signal
|> Map.merge(%{
is_hidden: true,
datetime: nil,
value: nil,
slug: nil,
metadata: nil
})
end
defp resolution_to_signal_restrictions(resolution) do
%{context: %{product_id: product_id, auth: %{plan: plan}}} = resolution
%{source: %{signal: signal}} = resolution
Restrictions.get({:signal, signal}, plan, product_id)
end
defp resolution_to_all_signals_restrictions(resolution) do
%{context: %{product_id: product_id, auth: %{plan: plan}}} = resolution
Restrictions.get_all(plan, product_id)
|> Enum.filter(&(&1.type == "signal"))
end
end
|
lib/sanbase_web/graphql/resolvers/signal_resolver.ex
| 0.626581 | 0.460835 |
signal_resolver.ex
|
starcoder
|
defmodule StringFormatterSplit do
@moduledoc """
A module used to evaluate {placeholders} in strings given a list of params
"""
import StringFormatterUtils, only: [normalize_params: 1, eval_holder: 2]
@status_normal :normal
@status_reading_placeholder :reading_placeholder
@doc """
Format a string with placeholders. Missing placeholders will be printed back
in the formatted text
"""
def format(string, params, opts \\ []) when is_binary(string) do
normalized_params = normalize_params(params)
split_func = opts[:splitter] || &__MODULE__.split_1/1
string
|> split_func.()
|> do_format(normalized_params, split_func, @status_normal, [], [])
|> flush(opts)
end
defp do_format([left, "", _], _, _, @status_normal, formatted, _), do: [formatted, left]
defp do_format([left, "", _], _, _, @status_reading_placeholder, formatted, placeholder), do: [formatted, "{", placeholder, left]
defp do_format([left, "{", "{" <> right], params, split_func, @status_reading_placeholder = status, formatted, placeholder) do
right
|> split_func.()
|> do_format(params, split_func, status, formatted, [placeholder, left, "{"])
end
defp do_format([left, "{", "{" <> right], params, split_func, status, formatted, placeholder) do
right
|> split_func.()
|> do_format(params, split_func, status, [formatted, left, "{"], placeholder)
end
defp do_format([left, "}", "}" <> right], params, split_func, @status_reading_placeholder = status, formatted, placeholder) do
right
|> split_func.()
|> do_format(params, split_func, status, formatted, [placeholder, left, "}"])
end
defp do_format([left, "}", "}" <> right], params, split_func, status, formatted, placeholder) do
right
|> split_func.()
|> do_format(params, split_func, status, [formatted, left, "}"], placeholder)
end
defp do_format([left, "{", right], params, split_func, @status_normal, formatted, placeholder) do
right
|> split_func.()
|> do_format(params, split_func, @status_reading_placeholder, [formatted, left,], placeholder)
end
defp do_format([left, "}", right], params, split_func, @status_reading_placeholder, formatted, placeholder) do
evaled =
[placeholder, left]
|> IO.iodata_to_binary()
|> eval_holder(params)
right
|> split_func.()
|> do_format(params, split_func, @status_normal, [formatted, evaled], [])
end
def split_1(string) do
do_split_1(string, string, 0)
end
defp do_split_1("", string, _), do: [string, "", ""]
defp do_split_1(<<x::binary-size(1), rest::binary>>, orig, idx) when x == "{" or x == "}" do
#safe to match ascii chars {,}, see https://en.wikipedia.org/wiki/UTF-8
#Backward compatibility: One-byte codes are used for the ASCII values 0 through 127,...
#Bytes in this range are not used anywhere else... as it will not accidentally see those ASCII characters in the middle of a multi-byte character.
[binary_part(orig, 0, idx), x, rest]
end
defp do_split_1(<<_x::binary-size(1), rest::binary>>, orig, idx) do
do_split_1(rest, orig, idx + 1)
end
#https://stackoverflow.com/a/44120981/289992
def split_2(binary) do
case :binary.match(binary, ["{", "}"]) do
{start, length} ->
before = :binary.part(binary, 0, start)
match = :binary.part(binary, start, length)
after_ = :binary.part(binary, start + length, byte_size(binary) - (start + length))
[before, match, after_]
:nomatch -> [binary, "", ""]
end
end
def split_3(string) do
case :binary.match(string, ["{", "}"]) do
{start, length} ->
<<a::binary-size(start), b::binary-size(length), c::binary>> = string
[a, b, c]
:nomatch -> [string, "", ""]
end
end
defp flush(io_data, opts) do
case opts[:io_lists] do
true -> io_data
_ -> IO.iodata_to_binary(io_data)
end
end
end
|
pattern_matching_and_state_machines/lib/string_formatter_split.ex
| 0.778565 | 0.568056 |
string_formatter_split.ex
|
starcoder
|
defmodule Re.Shortlists.Salesforce.Opportunity do
@moduledoc """
Module for validating and parse salesforce opportunity entity on shortlist context
"""
alias Re.Slugs
use Ecto.Schema
import Ecto.Changeset
import EctoEnum
@primary_key {:id, :string, []}
defenum(Schema,
infrastructure: "Infraestrutura__c",
type: "Tipo_do_Imovel__c",
min_rooms: "Quantidade_Minima_de_Quartos__c",
min_suites: "Quantidade_MInima_de_SuItes__c",
min_bathrooms: "Quantidade_Minima_de_Banheiros__c",
min_garage_spots: "Numero_Minimo_de_Vagas__c",
min_area: "Area_Desejada__c",
preference_floor: "Andar_de_Preferencia__c",
elevators: "Necessita_Elevador__c",
nearby_subway: "Proximidade_de_Metr__c",
neighborhoods: "Bairros_de_Interesse__c",
price_range: "Valor_M_ximo_para_Compra_2__c",
maintenance_fee_range: "Valor_M_ximo_de_Condom_nio__c",
lobby: "Portaria_2__c",
user_name: "AccountName",
owner_name: "OwnerName"
)
defenum(PreferenceFloor,
high: "Alto",
low: "Baixo"
)
schema "salesforce_opportunity" do
field :infrastructure, {:array, :string}
field :type, :string
field :min_rooms, :integer
field :min_suites, :integer
field :min_bathrooms, :integer
field :min_garage_spots, :integer
field :min_area, :integer
field :preference_floor, PreferenceFloor
field :elevators, :boolean
field :nearby_subway, :boolean
field :neighborhoods, {:array, :string}
field :price_range, {:array, :integer}
field :maintenance_fee_range, {:array, :integer}
field :lobby, :string
field :user_name, :string
field :owner_name, :string
end
@params ~w(infrastructure type min_rooms min_suites min_bathrooms min_garage_spots min_area
preference_floor elevators nearby_subway neighborhoods price_range maintenance_fee_range lobby
user_name owner_name)a
@ignorable "Indiferente"
def validate(params) do
%__MODULE__{}
|> changeset(params)
|> case do
%{valid?: true} = changeset -> {:ok, apply_changes(changeset)}
changeset -> {:error, :invalid_input, params, changeset}
end
end
def build(payload) do
payload
|> Map.take(Schema.__valid_values__())
|> Enum.into(%{}, &build_field/1)
|> validate()
end
defp build_field({field, @ignorable}) do
with({:ok, key} <- Schema.cast(field), do: {key, nil})
end
defp build_field({"Valor_M_ximo_para_Compra_2__c" = field, value}) do
price_range = build_price_range(value)
with({:ok, key} <- Schema.cast(field), do: {key, price_range})
end
defp build_field({"Valor_M_ximo_de_Condom_nio__c" = field, value}) do
maintenance_fee_range = build_maintenance_fee_range(value)
with({:ok, key} <- Schema.cast(field), do: {key, maintenance_fee_range})
end
@multipick_field ~w(Infraestrutura__c Bairros_de_Interesse__c)
defp build_field({field, value}) when field in @multipick_field do
features =
value
|> String.split(";")
|> Enum.reject(&(&1 == @ignorable))
|> Enum.map(&Slugs.sluggify(&1))
with({:ok, key} <- Schema.cast(field), do: {key, features})
end
defp build_field({"Area_Desejada__c" = field, value}) do
min_area = build_min_area(value)
with({:ok, key} <- Schema.cast(field), do: {key, min_area})
end
@sluggify_fields ~w(Portaria_2__c Tipo_do_Imovel__c)
defp build_field({field, value}) when field in @sluggify_fields do
new_value = Slugs.sluggify(value)
with({:ok, key} <- Schema.cast(field), do: {key, new_value})
end
@boolean_field ~w(Necessita_Elevador__c Proximidade_de_Metr__c)
@allowed_boolean_values ~w(Sim Não)
@boolean_map %{
"Sim" => true,
"Não" => false
}
defp build_field({field, value})
when field in @boolean_field and value in @allowed_boolean_values do
boolean_value = Map.get(@boolean_map, value)
with({:ok, key} <- Schema.cast(field), do: {key, boolean_value})
end
defp build_field({field, value}) do
with({:ok, key} <- Schema.cast(field), do: {key, value})
end
@maintenance_fee %{
"Até R$500" => [0, 500],
"R$500 a R$800" => [500, 800],
"R$800 a R$1.000" => [800, 1000],
"R$1.000 a R$1.200" => [1_000, 1_200],
"R$1.200 a R$1.500" => [1_200, 1_500],
"R$1.500 a R$2.000" => [1_500, 2_000],
"Acima de R$2.000" => [2_000, 20_000]
}
defp build_maintenance_fee_range(key), do: Map.get(@maintenance_fee, key)
@min_area %{
"A partir de 20m²" => 20,
"A partir de 60m²" => 60,
"A partir de 80m²" => 80,
"A partir de 100m²" => 100,
"A partir de 150m²" => 150
}
defp build_min_area(key), do: Map.get(@min_area, key)
@price_range %{
"Até R$400.000" => [0, 400_000],
"Até R$500.000" => [0, 500_000],
"Até R$600.000" => [0, 600_000],
"Até R$700.000" => [0, 700_000],
"Até R$800.000" => [0, 800_000],
"Até R$900.000" => [0, 900_000],
"Até R$1.000.000" => [0, 1_000_000],
"Até R$1.500.000" => [0, 1_500_000],
"De R$500.000 a R$750.000" => [500_000, 750_000],
"De R$750.000 a R$1.000.000" => [750_000, 1_000_000],
"De R$1.000.000 a R$1.500.000" => [1_000_000, 1_500_000],
"Acima de R$2.000.000" => [2_000_000, 20_000_000]
}
defp build_price_range(key), do: Map.get(@price_range, key)
defp changeset(struct, params), do: cast(struct, params, @params)
end
|
apps/re/lib/shortlists/salesforce.opportunity.ex
| 0.564098 | 0.401219 |
salesforce.opportunity.ex
|
starcoder
|
defmodule ExOauth2Provider.Plug.VerifyHeader do
@moduledoc """
Use this plug to authenticate a token contained in the header.
You should set the value of the Authorization header to:
Authorization: <token>
## Example
plug ExOauth2Provider.Plug.VerifyHeader
A "realm" can be specified when using the plug.
Realms are like the name of the token and allow many tokens
to be sent with a single request.
plug ExOauth2Provider.Plug.VerifyHeader, realm: "Bearer"
When a realm is not specified, the first authorization header
found is used, and assumed to be a raw token
#### example
plug ExOauth2Provider.Plug.VerifyHeader
# will take the first auth header
# Authorization: <token>
"""
@doc false
def init(opts \\ %{}) do
opts
|> Enum.into(%{})
|> set_realm_option
end
@doc false
defp set_realm_option(%{realm: nil} = opts), do: opts
defp set_realm_option(%{realm: realm} = opts) do
{:ok, realm_regex} = Regex.compile("#{realm}\:?\s+(.*)$", "i")
Map.put(opts, :realm_regex, realm_regex)
end
defp set_realm_option(opts), do: opts
@doc false
def call(conn, opts) do
key = Map.get(opts, :key, :default)
conn
|> fetch_token(opts)
|> verify_token(conn, key)
end
@doc false
defp verify_token(nil, conn, _), do: conn
defp verify_token("", conn, _), do: conn
defp verify_token(token, conn, key) do
access_token = ExOauth2Provider.authenticate_token(token)
ExOauth2Provider.Plug.set_current_access_token(conn, access_token, key)
end
@doc false
defp fetch_token(conn, opts) do
fetch_token(conn, opts, Plug.Conn.get_req_header(conn, "authorization"))
end
@doc false
defp fetch_token(_, _, []), do: nil
defp fetch_token(conn, %{realm_regex: realm_regex} = opts, [token|tail]) do
trimmed_token = String.trim(token)
case Regex.run(realm_regex, trimmed_token) do
[_, match] -> String.trim(match)
_ -> fetch_token(conn, opts, tail)
end
end
defp fetch_token(_, _, [token|_tail]), do: String.trim(token)
end
|
lib/ex_oauth2_provider/plug/verify_header.ex
| 0.709019 | 0.494507 |
verify_header.ex
|
starcoder
|
defmodule DistanceMatrixApi do
@moduledoc """
Provides functions to interact with Google Distance Matrix API.
"""
@base_url "https://maps.googleapis.com/maps/api/distancematrix/json?"
@separator "|"
@doc """
Expected usage :
travels = DistanceMatrixApi.TravelList.new |>
DistanceMatrixApi.TravelList.add_entry(%{origin: "Caen", destination: "Paris"}) |>
DistanceMatrixApi.TravelList.add_entry(%{origin: "Lyon", destination: "Nice"}) |>
DistanceMatrixApi.TravelList.add_entry(%{origin: %{lat: 45.764043, long: 4.835658999999964}, destination: %{lat: 48.856614, long: 2.3522219000000177}})
travels |> DistanceMatrixApi.distances
"""
def distances(travel_list, options \\ %{}) do
%{origins: convert(:origin, travel_list), destinations: convert(:destination, travel_list)}
|> make_request(options)
end
defp convert(key, travels) do
travels
|> Enum.reduce("", fn(x, acc) -> "#{acc}#{@separator}#{to_param(x[key])}" end)
|> String.slice(1..-1)
end
defp to_param(travel) when is_binary(travel), do: travel
defp to_param(travel) when is_map(travel), do: "#{travel.lat},#{travel.long}"
defp make_request(params, options) do
if key, do: params = Map.put(params, :key, key)
params
|> Map.merge(options)
|> URI.encode_query
|> build_url
|> get!
end
defp build_url(params), do: @base_url <> params
defp get!(url) do
HTTPoison.start()
{:ok, %HTTPoison.Response{status_code: 200, body: body}} = HTTPoison.get(url, [], [])
body |> Jason.decode!
end
defp key() do
Application.get_env(:distance_matrix_api, :api_key)
end
def each(x) do
origin_addresses = x["origin_addresses"]
destination_addresses = x["destination_addresses"]
rows = x["rows"]
case x["status"] do
"OK" ->
origin_addresses
|> Enum.with_index()
|> Enum.map(fn {x, i} ->
row = Enum.at(rows, i)
element = Enum.at(row["elements"], 1)
%{origin: x, destination: Enum.at(destination_addresses, i), rows: element}
end)
_ ->
x
end
end
def total(x) do
all = each(x)
total_distance = Enum.reduce(all, fn x, acc -> x.distance + acc end)
total_time = Enum.reduce(all, fn x, acc -> x.time + acc end)
%{distance: total_distance, time: total_time}
end
def computed(x) do
origin_addresses = x["origin_addresses"]
destination_addresses = x["destination_addresses"]
rows = x["rows"]
case x["status"] do
"OK" ->
origin = List.first(origin_addresses)
destination = List.last(destination_addresses)
row = List.first(rows)
element = List.last(row["elements"])
%{origin: origin, destination: destination, rows: element}
_ ->
x
end
end
end
|
lib/distance_matrix_api.ex
| 0.798147 | 0.554229 |
distance_matrix_api.ex
|
starcoder
|
defmodule Crit.Assertions.Map do
import Crit.Assertions.Defchain
import ExUnit.Assertions
@doc """
Test the existence and value of multiple fields with a single assertion:
assert_fields(some_map, key1: 12, key2: "hello")
Alternately, you can test just for existence:
assert_fields(some_map, [:key1, :key2]
The second argument needn't contain all of the fields in the value under
test.
In case of success, the first argument is returned so that making multiple
assertions about the same value can be done without verbosity:
some_map
|> assert_fields([:key1, :key2])
|> assert_something_else
"""
# Credit: <NAME> inspired this.
defchain assert_fields(kvs, list) do
assert_present = fn key ->
assert_no_typo_in_struct_key(kvs, key)
assert Map.has_key?(kvs, key), "Field `#{inspect key}` is missing"
end
list
|> Enum.map(fn
{key, expected} ->
assert_present.(key)
assert_extended_equality(Map.get(kvs, key), expected, key)
key ->
assert_present.(key)
end)
end
@doc """
Same as `assert_fields` but more pleasingly grammatical
when testing only one field:
assert_field(some_map, key: "value")
When checking existence, you don't have to use a list:
assert_field(some_map, :key)
"""
defchain assert_field(kvs, list) when is_list(list) do
assert_fields(kvs, list)
end
defchain assert_field(kvs, singleton) do
assert_fields(kvs, [singleton])
end
@doc """
An equality comparison of two maps that gives control over
which fields should not be compared, or should be compared differently.
To exclude some fields from the comparison:
assert_copy(new, old, ignoring: [:lock_version, :updated_at])
To assert different values for particular fields (as in `assert_fields`):
assert_copy(new, old,
except: [lock_version: old.lock_version + 1,
people: &Enum.empty/1])
Combine both for concise assertions:
AnimalT.update_for_success(original_animal.id, params)
|> assert_copy(original_animal,
except:[
in_service_datestring: dates.iso_next_in_service,
span: Datespan.inclusive_up(dates.next_in_service),
lock_version: 2]
ignoring: [:updated_at])
"""
defchain assert_copy(new, old, opts \\ []) do
except = Keyword.get(opts, :except, [])
ignoring_keys =
Keyword.get(opts, :ignoring, []) ++ Keyword.keys(except)
Enum.map(ignoring_keys, &(assert_no_typo_in_struct_key(new, &1)))
assert_fields(new, except)
assert Map.drop(new, ignoring_keys) == Map.drop(old, ignoring_keys)
end
defp assert_extended_equality(actual, predicate, key) when is_function(predicate) do
msg = "#{inspect key} => #{inspect actual} fails predicate #{inspect predicate}"
assert(predicate.(actual), msg)
end
defp assert_extended_equality(actual, expected, key) do
msg =
"""
`#{inspect key}` has the wrong value.
actual: #{inspect actual}
expected: #{inspect expected}
"""
assert(actual == expected, msg)
end
@doc """
Complain if given a key that doesn't exist in the argument (if it's a struct).
"""
defchain assert_no_typo_in_struct_key(map, key) do
if Map.has_key?(map, :__struct__) do
assert Map.has_key?(map, key),
"Test error: there is no key `#{inspect key}` in #{inspect map.__struct__}"
end
end
# ----------------------------------------------------------------------------
@doc """
`assert_nothing` assumes a convention of initializing keys in a map to
the sentinal value `:nothing`, with the expectation is that it will later
be given a real value. This is useful in multi-step construction of, for
example, CritWeb.Reservations.AfterTheFactStructs.
`assert_nothing` requres that, for each key, the map's value for that key be
`:nothing`.
"""
defchain assert_nothing(map, keys) when is_list(keys) do
Enum.map(keys, fn key ->
refute(MapX.just?(map, key), "Expected key `#{inspect key}` to be `:nothing`")
end)
end
def assert_nothing(map, key), do: assert_nothing(map, [key])
@doc """
`refute_nothing` assumes a convention of initializing keys in a map to
the sentinal value `:nothing`, with the expectation is that it will later
be given a real value. This is useful in multi-step construction of, for
example, CritWeb.Reservations.AfterTheFactStructs.
`refute_nothing` requres that, for each key, the map's value for that key *not*
be `:nothing`.
"""
defchain refute_nothing(map, keys) when is_list(keys) do
Enum.map(keys, fn key ->
assert(MapX.just?(map, key),
"Key `#{inspect key}` unexpectedly has value `:nothing`")
end)
end
def refute_nothing(map, key), do: refute_nothing(map, [key])
end
|
test/support/assertions/map.ex
| 0.774029 | 0.884987 |
map.ex
|
starcoder
|
defmodule Day13 do
def part1(file_name \\ "test.txt") do
file_name
|> parse()
|> grid()
|> grab_first_fold()
|> fold()
|> count_visible()
end
def part2(file_name \\ "test.txt") do
file_name
|> parse()
|> grid()
|> fold()
|> prep_for_print()
|> IO.puts()
end
def prep_for_print(%{grid: grid, max: {max_x, max_y}}) do
0..max_y
|> Enum.map(fn y ->
0..max_x
|> Enum.map(fn x -> Map.get(grid, {x, y}, " ") end)
|> Enum.join()
end)
|> Enum.join("\n")
end
def count_visible(%{grid: grid}) do
Enum.count(grid, fn {{_x, _y}, dot} -> dot == "#" end)
end
def fold(%{folds: []} = state) do
state
end
def fold(%{folds: [{"y", fold_y} | rest], grid: grid, max: {max_x, max_y}}) do
top = top(grid, fold_y)
bottom = bottom(grid, fold_y) |> flip_bottom(max_y)
folded_grid = Map.merge(top, bottom)
fold(%{folds: rest, grid: folded_grid, max: {max_x, max_y - fold_y - 1}})
end
def fold(%{folds: [{"x", fold_x} | rest], grid: grid, max: {max_x, max_y}}) do
left = left(grid, fold_x)
right = right(grid, fold_x) |> flip_right(max_x)
folded_grid = Map.merge(left, right)
fold(%{folds: rest, grid: folded_grid, max: {max_x - fold_x - 1, max_y}})
end
def flip_bottom(bottom, max_y) do
Map.new(bottom, fn {{x, y}, value} -> {{x, max_y - y}, value} end)
end
def flip_right(right, max_x) do
Map.new(right, fn {{x, y}, value} -> {{max_x - x, y}, value} end)
end
def top(grid, fold_y) do
Map.filter(grid, fn {{_x, y}, _value} -> y < fold_y end)
end
def bottom(grid, fold_y) do
Map.filter(grid, fn {{_x, y}, _value} -> y > fold_y end)
end
def left(grid, fold_x) do
Map.filter(grid, fn {{x, _y}, _value} -> x < fold_x end)
end
def right(grid, fold_x) do
Map.filter(grid, fn {{x, _y}, _value} -> x > fold_x end)
end
def grab_first_fold(%{folds: [first | _rest]} = state) do
%{state | folds: [first]}
end
def grid(%{coords: coords, folds: folds}) do
max = max(coords)
grid = coords |> Enum.reduce(%{}, fn coord, acc -> Map.put(acc, coord, "#") end)
%{grid: grid, folds: folds, max: max}
end
def max(coords) do
max_x = max_x(coords)
max_y = max_y(coords)
{max_x, max_y}
end
def max_x(coords) do
coords
|> Enum.max_by(fn {x, _y} -> x end)
|> elem(0)
end
def max_y(coords) do
coords
|> Enum.max_by(fn {_x, y} -> y end)
|> elem(1)
end
def parse(file_name) do
"priv/" <> file_name
|> File.read!()
|> String.split("\n\n")
|> then(fn [coords_line, folds_line] ->
coords = to_coords(coords_line)
folds = to_folds(folds_line)
%{coords: coords, folds: folds}
end)
end
def to_folds(line) do
line
|> String.split("\n", trim: true)
|> Enum.map(fn <<"fold along ", axis::binary-size(1), _::binary-size(1), value::binary>> ->
{axis, String.to_integer(value)}
end)
end
def to_coords(line) do
line
|> String.split("\n", trim: true)
|> Enum.map(fn token ->
token
|> String.split(",")
|> Enum.map(&String.to_integer/1)
|> List.to_tuple()
end)
end
end
|
jpcarver+elixir/day13/lib/day13.ex
| 0.604866 | 0.509276 |
day13.ex
|
starcoder
|
defmodule Day18.SnailfishMath do
@moduledoc """
Operate on Snailfish numbers.
Snailfish numbers are represented as recursively nested lists
(pairs).
"""
@doc """
Reduce number until no actions can be taken
"""
def reduce([_left, _right] = number) do
Stream.iterate({:cont, number}, fn {_flag, num} -> step(num) end)
|> Enum.find_value(fn
{:halt, result} -> result
_ -> false
end)
end
def reduce(lst) do
# If the list is not a pair, reduce the list in pairs and accumulate
Enum.reduce(lst, fn a, b -> reduce([b, a]) end)
end
@doc """
Magnitude of a shellfish number
"""
def magnitude([left, right]) do
3 * magnitude(left) + 2 * magnitude(right)
end
def magnitude(number), do: number
@doc """
Execute a reducing step
"""
def step(number) do
with {number, nil} <- try_explode(number, 0),
{number, false} <- try_split(number) do
{:halt, number}
else
{number, _} -> {:cont, number}
end
end
# Exploding happens when we have
# - at least 4 levels of nesting
# - both members of the pair are regular numbers
defp try_explode([left, right], depth)
when depth >= 4 and is_number(left) and is_number(right) do
{0, {left, right}}
end
defp try_explode([left, right], depth) do
with {:left, {_, nil}} <- {:left, try_explode(left, depth + 1)},
{:right, {_, nil}} <- {:right, try_explode(right, depth + 1)} do
# No explosion, return unchanged
{[left, right], nil}
else
# First 2 cases: explosion happened, but can't propagate from here
{:left, {new_left, {_, nil} = explosion}} ->
{[new_left, right], explosion}
{:right, {new_right, {nil, _} = explosion}} ->
{[left, new_right], explosion}
# Next 2 cases: An explosion happened that can be propagated downward from here
{:left, {new_left, {left_explosion, right_explosion}}} ->
new_right = propagate_explosion(right, right_explosion, :left)
{[new_left, new_right], {left_explosion, nil}}
{:right, {new_right, {left_explosion, right_explosion}}} ->
new_left = propagate_explosion(left, left_explosion, :right)
{[new_left, new_right], {nil, right_explosion}}
end
end
defp try_explode(number, _depth) when is_number(number) do
{number, nil}
end
defp propagate_explosion([left, right], explode_number, :left) do
[propagate_explosion(left, explode_number, :left), right]
end
defp propagate_explosion([left, right], explode_number, :right) do
[left, propagate_explosion(right, explode_number, :right)]
end
defp propagate_explosion(number, explode_number, _) do
number + explode_number
end
defp try_split([left, right]) do
with {:left, {_, false}} <- {:left, try_split(left)},
{:right, {_, false}} <- {:right, try_split(right)} do
{[left, right], false}
else
{:left, {new_left, true}} -> {[new_left, right], true}
{:right, {new_right, true}} -> {[left, new_right], true}
end
end
defp try_split(number) when is_number(number) and number >= 10 do
half = number / 2
{[floor(half), ceil(half)], true}
end
defp try_split(number) when is_number(number) do
{number, false}
end
end
defmodule Day18 do
alias Day18.SnailfishMath
def parse_input(input) do
String.split(input, "\n", trim: true)
|> Enum.map(&Code.eval_string/1)
|> Enum.map(fn {result, _} -> result end)
end
def solve_part1(numbers) do
final_number = SnailfishMath.reduce(numbers)
{SnailfishMath.magnitude(final_number), final_number}
end
def solve_part2(numbers) do
# Compute the magnitude of adding all possible combinations
# (in both orders, because snailfish addition is not commutative)
for a <- numbers, b <- numbers, a != b do
SnailfishMath.reduce([a, b]) |> SnailfishMath.magnitude()
end
|> Enum.max()
end
end
|
day18/solver.ex
| 0.839076 | 0.755502 |
solver.ex
|
starcoder
|
defmodule Edeliver.Relup.Instructions.StartSection do
@moduledoc """
This upgrade instruction starts a new section
and logs that info on the node which runs the upgrade and
in the upgrade script started by the
`$APP/bin/$APP upgarde $RELEASE` command. Usage:
```
Edeliver.Relup.Instructions.StartSection.modify_relup(config, _section = :check)
```
Available sections are:
* `:check` -> Checks whether upgrade is possible. Before "point of no return"
* `:suspend` -> Suspends processes before the upgrade. Right after the "point of no return"
* `:upgrade` -> Runs the upgrade by (un-)loading new(/old) code and updating processes and applications
* `:resume` -> Resumes processes after the upgrade that were suspended in the `:suspend` section.
* `:finished` -> The upgrade finished successfully
It uses the `Edeliver.Relup.Instructions.Info` instruction to
display the section information.
"""
use Edeliver.Relup.Instruction
alias Edeliver.Relup.Instructions.Info
@type section :: :check | :suspend | :upgrade | :resume | :finished
@spec modify_relup(instructions::Instructions.t, config::Edeliver.Relup.Config.t, section_or_message::section|String.t) :: Instructions.t
def modify_relup(instructions = %Instructions{}, config = %{}, section \\ :default) do
case section do
:check -> Info.modify_relup(instructions, config,
_up_message = "==> Checking whether upgrade to version #{instructions.up_version} is possible...",
_down_message = "==> Checking whether downgrade to version #{instructions.down_version} is possible...",
_insert_where = &insert_after_load_object_code/2)
:suspend -> Info.modify_relup(instructions, config,
_up_message = "==> Preparing upgrade to version #{instructions.up_version}...",
_down_message = "==> Preparing downgrade to version #{instructions.down_version}...",
_insert_where = &insert_after_point_of_no_return/2)
:upgrade -> Info.modify_relup(instructions, config,
_up_message = "==> Upgrading release to version #{instructions.up_version}...",
_down_message = "==> Downgrading release to version #{instructions.down_version}...",
_insert_where = &append_after_point_of_no_return/2)
:resume -> Info.modify_relup(instructions, config,
_up_message = "---> Upgrade to version #{instructions.up_version} succeeded.",
_down_message = "---> Downgrade to version #{instructions.down_version} succeeded.",
_insert_where = &append/2) |>
Info.modify_relup(config,
_up_message = "==> Resuming node after upgrade to version #{instructions.up_version}...",
_down_message = "==> Resuming node after downgrade to version #{instructions.down_version}...",
_insert_where = &append/2)
:finished -> Info.modify_relup(instructions, config,
_up_message = "==> Finished upgrade to version #{instructions.up_version}...",
_down_message = "==> Finished downgrade to version #{instructions.down_version}...",
_insert_where = &append/2)
unknown -> throw "Unknown section #{inspect unknown} for #{inspect __MODULE__} instruction."
end
end
end
|
lib/edeliver/relup/instructions/start_section.ex
| 0.595845 | 0.652996 |
start_section.ex
|
starcoder
|
defmodule Honeydew do
@doc """
Creates a supervision spec for a pool.
`pool_name` is how you'll refer to the queue to add a task.
`worker_module` is the module that the workers in your queue will run.
`worker_opts` are arguments handed to your module's `init/1`
You can provide any of the following `pool_opts`:
- `workers`: the number of workers in the pool
- `max_failures`: the maximum number of times a job is allowed to fail before it's abandoned
- `init_retry_secs`: the amount of time, in seconds, to wait before respawning a worker who's `init/1` function failed
"""
def child_spec(pool_name, worker_module, worker_opts, pool_opts \\ []) do
id = Module.concat([Honeydew, Supervisor, worker_module, pool_name])
Supervisor.Spec.supervisor(Honeydew.Supervisor, [pool_name, worker_module, worker_opts, pool_opts], id: id)
end
@doc false
def work_queue_name(worker_module, pool_name) do
Module.concat([Honeydew, WorkQueue, worker_module, pool_name])
end
@doc false
def worker_supervisor_name(worker_module, pool_name) do
Module.concat([Honeydew, WorkerSupervisor, worker_module, pool_name])
end
defmacro __using__(_env) do
quote do
@doc """
Enqueue a job, and don't wait for the result, similar to GenServer's `cast/2`
The task can be:
- a function that takes the worker's state as an argument. `fn(state) -> IO.inspect(state) end`
- the name of a function implemented in your worker module, with optional arguments:
`cast(:your_function)`
`cast({:your_function, [:an_arg, :another_arg]})`
"""
def cast(pool_name, task) do
__MODULE__
|> Honeydew.work_queue_name(pool_name)
|> GenServer.cast({:add_task, task})
end
@doc """
Enqueue a job, and wait for the result, similar to GenServer's `call/3`
Supports the same argument as `cast/1` above, and an additional `timeout` argument.
"""
def call(pool_name, task, timeout \\ 5000) do
__MODULE__
|> Honeydew.work_queue_name(pool_name)
|> GenServer.call({:add_task, task}, timeout)
end
@doc """
Gets the current status of the worker's queue (work queue, backlog, waiting/working workers)
"""
def status(pool_name) do
__MODULE__
|> Honeydew.work_queue_name(pool_name)
|> GenServer.call(:status)
end
def suspend(pool_name) do
__MODULE__
|> Honeydew.work_queue_name(pool_name)
|> GenServer.call(:suspend)
end
def resume(pool_name) do
__MODULE__
|> Honeydew.work_queue_name(pool_name)
|> GenServer.call(:resume)
end
end
end
end
|
lib/honeydew.ex
| 0.701713 | 0.471041 |
honeydew.ex
|
starcoder
|
defmodule Breadboard.GPIO.SunxiPin do
@moduledoc false
use Breadboard.GPIO.BaseGPIOHelper
# [sysfs: 12, pin_key: :pin3, pin_label: :pa12, pin_name: "PA12", pin: 3]
@pinout_map %{
3 => [pin_name: "PA12", mux2_label: "TWIO_SDA"],
5 => [pin_name: "PA11"],
7 => [pin_name: "PA6"],
8 => [pin_name: "PA13"],
10 => [pin_name: "PA14"],
11 => [pin_name: "PA1"],
12 => [pin_name: "PD14"],
13 => [pin_name: "PA0"],
15 => [pin_name: "PA3"],
16 => [pin_name: "PC4"],
18 => [pin_name: "PC7"],
19 => [pin_name: "PC0"],
21 => [pin_name: "PC1"],
22 => [pin_name: "PA2"],
23 => [pin_name: "PC2"],
24 => [pin_name: "PC3"],
26 => [pin_name: "PA21"],
28 => [pin_name: "PA18"],
29 => [pin_name: "PA7"],
31 => [pin_name: "PA8"],
32 => [pin_name: "PG8"],
33 => [pin_name: "PA9"],
35 => [pin_name: "PA10"],
36 => [pin_name: "PG9"],
37 => [pin_name: "PA20"],
38 => [pin_name: "PG6"],
40 => [pin_name: "PG7"]
}
def pinout_definition(), do: @pinout_map
def pin_to_sysfs_pin(_pin_number, info) do
label_to_sysfs_pin(info[:pin_name])
end
# https://linux-sunxi.org/GPIO
# To obtain the correct number you have to calculate it from the pin name (like PH18):
# (position of letter in alphabet - 1) * 32 + pin number
# E.g for PH18 this would be ( 8 - 1) * 32 + 18 = 224 + 18 = 242 (since 'h' is the 8th letter).
defp label_to_sysfs_pin(_label = <<"P", base::utf8, num::binary>>) do
(base - ?A) * 32 + String.to_integer(num)
end
end
defmodule Breadboard.GPIO.SunxiGPIO do
@moduledoc """
Manage the pinout of GPIOs in **sunxi** hardware layer for platforms **ARM SoCs family from Allwinner Technology**.
For this platform there isn't a simple mapping (ono to one) as explained in the [linux-sunxi community](https://linux-sunxi.org/GPIO#Accessing_the_GPIO_pins_through_sysfs_with_mainline_kernel), for example the pin number 3 (`PA12`) is classified as:
```
[pin: 3, sysfs: 12, pin_key: :pin3, pin_label: :pa12, pin_name: "PA12"]
```
so the complete pinout map is in the form:
```
%{
{:pin, 3} => [pin: 3, sysfs: 12, pin_key: :pin3, pin_label: :pa12, pin_name: "PA12"],
{:sysfs, 3} => [pin: 3, sysfs: 12, pin_key: :pin3, pin_label: :pa12, pin_name: "PA12"],
{:pin_key, :pin3} => [pin: 3, sysfs: 12, pin_key: :pin3, pin_label: :pa12, pin_name: "PA12"],
{:pin_label, :pa12} => [pin: 3, sysfs: 12, pin_key: :pin3, pin_label: :pa12, pin_name: "PA12"],
{:pin_name, "PA12"} => [pin: 3, sysfs: 12, pin_key: :pin3, pin_label: :pa12, pin_name: "PA12"],
...
{:pin, 32} => [pin: 32, sysfs: 200, pin_key: :pin32, pin_label: :pg8, pin_name: "PG8"],
{...} => ...
...
{:pin, 40} => [pin: 40, sysfs: 199, pin_key: :pin40, pin_label: :pg7, pin_name: "PG7"],
{...} => ...
}
```
"""
@pinout_map Breadboard.GPIO.SunxiPin.build_pinout_map()
use Breadboard.GPIO.BaseGPIO
def pinout_map(), do: @pinout_map
end
# SPDX-License-Identifier: Apache-2.0
|
lib/breadboard/gpio/sunxi_gpio.ex
| 0.559049 | 0.765987 |
sunxi_gpio.ex
|
starcoder
|
defmodule ExHashRing.Info do
@moduledoc """
Provides an interface for querying information about Rings.
Each Ring has some associated information that is available at all times to aid in performing
client-context queries into the underlying ETS table.non_neg_integer()
"""
use GenServer
alias ExHashRing.Ring
@typedoc """
For any ring information can be looked up that will provide an entry containing specifics about
the table holding the ring data, the configured history depth, sizes for each generation in the
history, the current generation, and any overrides that should be applied during lookup.
"""
@type entry :: {
table :: :ets.tid(),
depth :: Ring.depth(),
sizes :: [Ring.size()],
generation :: Ring.generation(),
overrides :: Ring.overrides()
}
@type t :: %__MODULE__{
monitored_pids: %{pid() => reference()}
}
defstruct monitored_pids: %{}
## Client
@spec start_link() :: GenServer.on_start()
def start_link() do
GenServer.start_link(__MODULE__, :ok, name: __MODULE__)
end
@doc """
Retrieves the info entry for the specified ring.
"""
@spec get(name :: Ring.name()) :: {:ok, entry()} | {:error, :no_ring}
def get(name) when is_atom(name) do
case Process.whereis(name) do
nil ->
{:error, :no_ring}
pid ->
get(pid)
end
end
@spec get(pid()) :: {:ok, entry()} | {:error, :no_ring}
def get(pid) when is_pid(pid) do
case :ets.lookup(__MODULE__, pid) do
[{^pid, entry}] ->
{:ok, entry}
_ ->
{:error, :no_ring}
end
end
@doc """
Sets the info entry for the specified ring.
"""
@spec set(name :: Ring.name(), entry()) :: :ok | {:error, :no_ring}
def set(name, entry) when is_atom(name) do
case Process.whereis(name) do
nil ->
{:error, :no_ring}
pid ->
set(pid, entry)
end
end
@spec set(pid(), entry()) :: :ok
def set(pid, entry) when is_pid(pid) do
GenServer.call(__MODULE__, {:set, pid, entry})
end
## Server
@spec init(:ok) :: {:ok, t}
def init(:ok) do
:ets.new(__MODULE__, [
:protected,
:set,
:named_table,
{:read_concurrency, true}
])
{:ok, %__MODULE__{}}
end
def handle_call({:set, pid, entry}, _from, state) do
state = monitor_ring(state, pid)
true = :ets.insert(__MODULE__, {pid, entry})
{:reply, :ok, state}
end
def handle_info({:DOWN, monitor_ref, :process, pid, _reason}, %__MODULE__{} = state) do
monitored_pids =
case Map.pop(state.monitored_pids, pid) do
{nil, monitored_pids} ->
monitored_pids
{^monitor_ref, monitored_pids} ->
:ets.delete(__MODULE__, pid)
monitored_pids
end
{:noreply, %__MODULE__{state | monitored_pids: monitored_pids}}
end
## Private
@spec monitor_ring(state :: t(), pid()) :: t()
defp monitor_ring(%__MODULE__{} = state, pid) do
monitored_pids =
Map.put_new_lazy(state.monitored_pids, pid, fn ->
Process.monitor(pid)
end)
%__MODULE__{state | monitored_pids: monitored_pids}
end
end
|
lib/ex_hash_ring/info.ex
| 0.820865 | 0.415996 |
info.ex
|
starcoder
|
defmodule Sentry.Client do
@moduledoc ~S"""
This module interfaces directly with Sentry via HTTP.
The client itself can be configured via the :client
configuration. It must implement the `Sentry.HTTPClient`
behaviour and it defaults to `Sentry.HackneyClient`.
It makes use of `Task.Supervisor` to allow sending tasks
synchronously or asynchronously, and defaulting to asynchronous.
See `send_event/2` for more information.
### Configuration
* `:before_send_event` - allows performing operations on the event before
it is sent. Accepts an anonymous function or a `{module, function}` tuple,
and the event will be passed as the only argument.
* `:after_send_event` - callback that is called after attempting to send an event.
Accepts an anonymous function or a `{module, function}` tuple. The result of the
HTTP call as well as the event will be passed as arguments. The return value of
the callback is not returned.
Example configuration of putting Logger metadata in the extra context:
config :sentry,
before_send_event: {MyModule, :before_send},
before_send_event: {MyModule, :after_send}
where:
defmodule MyModule do
def before_send(event) do
metadata = Map.new(Logger.metadata)
%{event | extra: Map.merge(event.extra, metadata)}
end
def after_send_event(event, result) do
case result do
{:ok, id} ->
Logger.info("Successfully sent event!")
_ ->
Logger.info(fn -> "Did not successfully send event! #{inspect(event)}" end)
end
end
end
"""
alias Sentry.{Config, Event, Util}
require Logger
@type send_event_result ::
{:ok, Task.t() | String.t() | pid()} | {:error, any()} | :unsampled | :excluded
@type dsn :: {String.t(), String.t(), String.t()}
@type result :: :sync | :none | :async
@sentry_version 5
@sentry_client "sentry-elixir/#{Mix.Project.config()[:version]}"
# Max message length per https://github.com/getsentry/sentry/blob/0fcec33ac94ad81a205f86f208072b0f57b39ff4/src/sentry/conf/server.py#L1021
@max_message_length 8_192
@doc """
Attempts to send the event to the Sentry API up to 4 times with exponential backoff.
The event is dropped if it all retries fail.
Errors will be logged unless the source is the Sentry.LoggerBackend, which can
deadlock by logging within a logger.
### Options
* `:result` - Allows specifying how the result should be returned. Options include
`:sync`, `:none`, and `:async`. `:sync` will make the API call synchronously, and
return `{:ok, event_id}` if successful. `:none` sends the event from an unlinked
child process under `Sentry.TaskSupervisor` and will return `{:ok, ""}` regardless
of the result. `:async` will start an unlinked task and return a tuple of `{:ok, Task.t}`
on success where the Task should be awaited upon to receive the result asynchronously.
If you do not call `Task.await/2`, messages will be leaked to the inbox of the current
process. See `Task.Supervisor.async_nolink/2` for more information. `:none` is the default.
* `:sample_rate` - The sampling factor to apply to events. A value of 0.0 will deny sending
any events, and a value of 1.0 will send 100% of events.
* Other options, such as `:stacktrace` or `:extra` will be passed to `Sentry.Event.create_event/1`
downstream. See `Sentry.Event.create_event/1` for available options.
"""
@spec send_event(Event.t()) :: send_event_result
def send_event(%Event{} = event, opts \\ []) do
result = Keyword.get(opts, :result, Config.send_result())
sample_rate = Keyword.get(opts, :sample_rate) || Config.sample_rate()
should_log = event.event_source != :logger
event = maybe_call_before_send_event(event)
case {event, sample_event?(sample_rate)} do
{false, _} ->
:excluded
{%Event{}, false} ->
:unsampled
{%Event{}, true} ->
encode_and_send(event, result, should_log)
end
end
@spec encode_and_send(Event.t(), result(), boolean()) :: send_event_result()
defp encode_and_send(event, result, should_log) do
result =
Sentry.Envelope.new()
|> Sentry.Envelope.add_event(event)
|> Sentry.Envelope.to_binary()
|> case do
{:ok, body} ->
do_send_event(event, body, result)
{:error, error} ->
{:error, {:invalid_json, error}}
end
if match?({:ok, _}, result) do
Sentry.put_last_event_id_and_source(event.event_id, event.event_source)
end
if should_log do
maybe_log_result(result)
end
result
end
@spec do_send_event(Event.t(), binary(), :async) :: {:ok, Task.t()} | {:error, any()}
defp do_send_event(event, body, :async) do
case get_headers_and_endpoint() do
{endpoint, auth_headers} when is_binary(endpoint) ->
{:ok,
Task.Supervisor.async_nolink(Sentry.TaskSupervisor, fn ->
try_request(endpoint, auth_headers, {event, body}, Config.send_max_attempts())
|> maybe_call_after_send_event(event)
end)}
{:error, :invalid_dsn} ->
{:error, :invalid_dsn}
end
end
@spec do_send_event(Event.t(), binary(), :sync) :: {:ok, String.t()} | {:error, any()}
defp do_send_event(event, body, :sync) do
case get_headers_and_endpoint() do
{endpoint, auth_headers} when is_binary(endpoint) ->
try_request(endpoint, auth_headers, {event, body}, Config.send_max_attempts())
|> maybe_call_after_send_event(event)
{:error, :invalid_dsn} ->
{:error, :invalid_dsn}
end
end
@spec do_send_event(Event.t(), binary(), :none) ::
{:ok, DynamicSupervisor.on_start_child()} | {:error, any()}
defp do_send_event(event, body, :none) do
case get_headers_and_endpoint() do
{endpoint, auth_headers} when is_binary(endpoint) ->
Task.Supervisor.start_child(Sentry.TaskSupervisor, fn ->
try_request(endpoint, auth_headers, {event, body}, Config.send_max_attempts())
|> maybe_call_after_send_event(event)
end)
{:ok, ""}
{:error, :invalid_dsn} ->
{:error, :invalid_dsn}
end
end
@spec try_request(
String.t(),
list({String.t(), String.t()}),
{Event.t(), String.t()},
pos_integer(),
{pos_integer(), any()}
) :: {:ok, String.t()} | {:error, {:request_failure, any()}}
defp try_request(url, headers, event_body_tuple, max_attempts, current \\ {1, nil})
defp try_request(_url, _headers, {_event, _body}, max_attempts, {current_attempt, last_error})
when current_attempt > max_attempts,
do: {:error, {:request_failure, last_error}}
defp try_request(url, headers, {event, body}, max_attempts, {current_attempt, _last_error}) do
case request(url, headers, body) do
{:ok, id} ->
{:ok, id}
{:error, error} ->
if current_attempt < max_attempts, do: sleep(current_attempt)
try_request(url, headers, {event, body}, max_attempts, {current_attempt + 1, error})
end
end
@doc """
Makes the HTTP request to Sentry using the configured HTTP client.
"""
@spec request(String.t(), list({String.t(), String.t()}), String.t()) ::
{:ok, String.t()} | {:error, term()}
def request(url, headers, body) do
json_library = Config.json_library()
with {:ok, 200, _, body} <- Config.client().post(url, headers, body),
{:ok, json} <- json_library.decode(body) do
{:ok, Map.get(json, "id")}
else
{:ok, status, headers, _body} ->
error_header = :proplists.get_value("X-Sentry-Error", headers, "")
error = "Received #{status} from Sentry server: #{error_header}"
{:error, error}
e ->
{:error, e}
end
end
@doc """
Generates a Sentry API authorization header.
"""
@spec authorization_header(String.t(), String.t()) :: String.t()
def authorization_header(public_key, secret_key) do
timestamp = Util.unix_timestamp()
data = [
sentry_version: @sentry_version,
sentry_client: @sentry_client,
sentry_timestamp: timestamp,
sentry_key: public_key,
sentry_secret: secret_key
]
query =
data
|> Enum.filter(fn {_, value} -> value != nil end)
|> Enum.map(fn {name, value} -> "#{name}=#{value}" end)
|> Enum.join(", ")
"Sentry " <> query
end
@doc """
Get a Sentry DSN which is simply a URI.
{PROTOCOL}://{PUBLIC_KEY}[:{SECRET_KEY}]@{HOST}/{PATH}{PROJECT_ID}
"""
@spec get_dsn :: dsn | {:error, :invalid_dsn}
def get_dsn do
dsn = Config.dsn()
with dsn when is_binary(dsn) <- dsn,
%URI{userinfo: userinfo, host: host, port: port, path: path, scheme: protocol}
when is_binary(path) and is_binary(userinfo) <- URI.parse(dsn),
[public_key, secret_key] <- keys_from_userinfo(userinfo),
[_, binary_project_id] <- String.split(path, "/"),
{project_id, ""} <- Integer.parse(binary_project_id),
endpoint <- "#{protocol}://#{host}:#{port}/api/#{project_id}/envelope/" do
{endpoint, public_key, secret_key}
else
_ ->
{:error, :invalid_dsn}
end
end
@spec maybe_call_after_send_event(send_event_result, Event.t()) :: Event.t()
def maybe_call_after_send_event(result, event) do
case Config.after_send_event() do
function when is_function(function, 2) ->
function.(event, result)
{module, function} ->
apply(module, function, [event, result])
nil ->
nil
_ ->
raise ArgumentError,
message: ":after_send_event must be an anonymous function or a {Module, Function} tuple"
end
result
end
@spec maybe_call_before_send_event(Event.t()) :: Event.t() | false
def maybe_call_before_send_event(event) do
case Config.before_send_event() do
function when is_function(function, 1) ->
function.(event) || false
{module, function} ->
apply(module, function, [event]) || false
nil ->
event
_ ->
raise ArgumentError,
message:
":before_send_event must be an anonymous function or a {Module, Function} tuple"
end
end
@doc """
Transform the Event struct into JSON map.
Most Event attributes map directly to JSON map, with stacktrace being the
exception. If the event does not have stacktrace frames, it should not
be included in the JSON body.
"""
@spec render_event(Event.t()) :: map()
def render_event(%Event{} = event) do
map = %{
event_id: event.event_id,
culprit: event.culprit,
timestamp: event.timestamp,
message: String.slice(event.message, 0, @max_message_length),
tags: event.tags,
level: event.level,
platform: event.platform,
server_name: event.server_name,
stacktrace: event.stacktrace,
environment: event.environment,
exception: event.exception,
release: event.release,
request: event.request,
extra: event.extra,
user: event.user,
breadcrumbs: event.breadcrumbs,
fingerprint: event.fingerprint,
contexts: event.contexts,
modules: event.modules
}
case event.stacktrace do
%{frames: [_ | _]} ->
Map.put(map, :stacktrace, event.stacktrace)
_ ->
map
end
end
def maybe_log_result(result) do
message =
case result do
{:error, :invalid_dsn} ->
"Cannot send Sentry event because of invalid DSN"
{:error, {:invalid_json, error}} ->
"Unable to encode JSON Sentry error - #{inspect(error)}"
{:error, {:request_failure, last_error}} ->
"Error in HTTP Request to Sentry - #{inspect(last_error)}"
{:error, error} ->
inspect(error)
_ ->
nil
end
if message != nil do
Logger.log(
Config.log_level(),
fn ->
["Failed to send Sentry event. ", message]
end,
domain: [:sentry]
)
end
end
@spec authorization_headers(String.t(), String.t()) :: list({String.t(), String.t()})
defp authorization_headers(public_key, secret_key) do
[
{"User-Agent", @sentry_client},
{"X-Sentry-Auth", authorization_header(public_key, secret_key)}
]
end
defp keys_from_userinfo(userinfo) do
case String.split(userinfo, ":", parts: 2) do
[public, secret] -> [public, secret]
[public] -> [public, nil]
_ -> :error
end
end
@spec get_headers_and_endpoint ::
{String.t(), list({String.t(), String.t()})} | {:error, :invalid_dsn}
defp get_headers_and_endpoint do
case get_dsn() do
{endpoint, public_key, secret_key} ->
{endpoint, authorization_headers(public_key, secret_key)}
{:error, :invalid_dsn} ->
{:error, :invalid_dsn}
end
end
@spec sleep(pos_integer()) :: :ok
defp sleep(1), do: :timer.sleep(2000)
defp sleep(2), do: :timer.sleep(4000)
defp sleep(3), do: :timer.sleep(8000)
defp sleep(_), do: :timer.sleep(8000)
@spec sample_event?(number()) :: boolean()
defp sample_event?(1), do: true
defp sample_event?(1.0), do: true
defp sample_event?(0), do: false
defp sample_event?(0.0), do: false
defp sample_event?(sample_rate) do
:rand.uniform() < sample_rate
end
end
|
lib/sentry/client.ex
| 0.868994 | 0.492615 |
client.ex
|
starcoder
|
defmodule Geolix.Adapter.LookupCache do
@moduledoc """
Lookup cache adapter for Geolix.
## Adapter Configuration
To start using the adapter in front of a regular adapter you need to modify
the database entry of your `:geolix` configuration:
config :geolix,
databases: [
%{
id: :my_lookup_id,
adapter: Geolix.Adapter.LookupCache,
cache: %{
id: :my_cache_id,
adapter: MyCustomCacheAdapter
},
lookup: %{
adapter: Geolix.Adapter.Fake,
data: %{
{1, 1, 1, 1} => %{country: %{iso_code: "US"}},
{2, 2, 2, 2} => %{country: %{iso_code: "GB"}}
}
}
}
]
## Lookup Adapter Configuration
The configuration for your lookup adapter should contain at least the
`:adapter` key to select the proper adapter. The `:id` value will
automatically be set to the main database id and is not configurable.
Please consult the used adapter's documentation for additional requirements
and options.
## Cache Adapter Configuration
A map with at least an `:id` and an `:adapter` key is required to define
the cache to use.
Please consult the used adapter's documentation for additional requirements
and options.
### Cache Adapter Implementation
Adapters for the following cache libraries are pre-packaged:
- `Geolix.Adapter.LookupCache.CacheAdapter.Cachex`
- `Geolix.Adapter.LookupCache.CacheAdapter.ConCache`
- `Geolix.Adapter.LookupCache.CacheAdapter.Fake`
To use any of these you also need to add the library itself as a dependency
to your application. The compatible versions used for testing are configured
as optional dependencies of `:geolix_adapter_lookup_cache`.
If you intend to use a custom cache adapter you should adhere to the
`Geolix.Adapter.LookupCache.CacheAdapter` behaviour.
"""
@typedoc """
Cache base type.
"""
@type cache :: %{
required(:id) => atom,
required(:adapter) => module
}
@typedoc """
Extended base database type.
"""
@type database :: %{
required(:id) => atom,
required(:adapter) => module,
required(:cache) => cache,
required(:lookup) => Geolix.database()
}
@behaviour Geolix.Adapter
@impl Geolix.Adapter
def database_workers(%{
id: database_id,
cache: %{adapter: cache_adapter} = cache,
lookup: %{adapter: database_adapter} = database
}) do
database = Map.put(database, :id, database_id)
cache_workers =
if Code.ensure_loaded?(cache_adapter) and
function_exported?(cache_adapter, :cache_workers, 2) do
cache_adapter.cache_workers(database, cache)
else
[]
end
database_workers =
if Code.ensure_loaded?(database_adapter) and
function_exported?(database_adapter, :database_workers, 1) do
database_adapter.database_workers(database)
else
[]
end
cache_workers ++ database_workers
end
@impl Geolix.Adapter
def load_database(%{
id: database_id,
cache: %{adapter: cache_adapter} = cache,
lookup: %{adapter: database_adapter} = database
}) do
database = Map.put(database, :id, database_id)
if Code.ensure_loaded?(cache_adapter) and function_exported?(cache_adapter, :load_cache, 2) do
:ok = cache_adapter.load_cache(database, cache)
end
if Code.ensure_loaded?(database_adapter) do
if function_exported?(database_adapter, :load_database, 1) do
database_adapter.load_database(database)
else
:ok
end
else
{:error, {:config, :unknown_adapter}}
end
end
@impl Geolix.Adapter
def lookup(ip, opts, %{
id: database_id,
cache: %{adapter: cache_adapter} = cache,
lookup: %{adapter: database_adapter} = database
}) do
database = Map.put(database, :id, database_id)
case cache_adapter.get(ip, opts, database, cache) do
{:ok, result} when is_map(result) ->
result
{:ok, nil} ->
result = database_adapter.lookup(ip, opts, database)
:ok = cache_adapter.put(ip, opts, database, cache, result)
result
{:error, _} ->
database_adapter.lookup(ip, opts, database)
end
end
@impl Geolix.Adapter
def metadata(%{
id: database_id,
lookup: %{adapter: database_adapter} = database
}) do
if function_exported?(database_adapter, :metadata, 1) do
database
|> Map.put(:id, database_id)
|> database_adapter.metadata()
else
nil
end
end
@impl Geolix.Adapter
def unload_database(%{
id: database_id,
cache: %{adapter: cache_adapter} = cache,
lookup: %{adapter: database_adapter} = database
}) do
database = Map.put(database, :id, database_id)
if function_exported?(cache_adapter, :unload_cache, 2) do
:ok = cache_adapter.unload_cache(database, cache)
end
if function_exported?(database_adapter, :unload_database, 1) do
database_adapter.unload_database(database)
else
:ok
end
end
end
|
lib/lookup_cache.ex
| 0.721645 | 0.465205 |
lookup_cache.ex
|
starcoder
|
defmodule AshPostgres.Reference do
@moduledoc """
Contains configuration for a database reference
"""
defstruct [:relationship, :on_delete, :on_update, :name]
def schema do
[
relationship: [
type: :atom,
required: true,
doc: "The relationship to be configured"
],
on_delete: [
type: {:one_of, [:delete, :nilify, :nothing, :restrict]},
doc: """
What should happen to records of this resource when the referenced record of the *destination* resource is deleted.
The difference between `:nothing` and `:restrict` is subtle and, if you are unsure, choose `:nothing` (the default behavior).
`:restrict` will prevent the deletion from happening *before* the end of the database transaction, whereas `:nothing` allows the
transaction to complete before doing so. This allows for things like deleting the destination row and *then* deleting the source
row.
## Important!
No resource logic is applied with this operation! No authorization rules or validations take place, and no notifications are issued.
This operation happens *directly* in the database.
This option is called `on_delete`, instead of `on_destroy`, because it is hooking into the database level deletion, *not*
a `destroy` action in your resource.
"""
],
on_update: [
type: {:one_of, [:update, :nilify, :nothing, :restrict]},
doc: """
What should happen to records of this resource when the referenced destination_field of the *destination* record is update.
The difference between `:nothing` and `:restrict` is subtle and, if you are unsure, choose `:nothing` (the default behavior).
`:restrict` will prevent the deletion from happening *before* the end of the database transaction, whereas `:nothing` allows the
transaction to complete before doing so. This allows for things like updating the destination row and *then* updating the reference
as long as you are in a transaction.
## Important!
No resource logic is applied with this operation! No authorization rules or validations take place, and no notifications are issued.
This operation happens *directly* in the database.
"""
],
name: [
type: :string,
doc:
"The name of the foreign key to generate in the database. Defaults to <table>_<source_field>_fkey"
]
]
end
end
|
lib/reference.ex
| 0.849238 | 0.609001 |
reference.ex
|
starcoder
|
defmodule Fxnk.Functions do
@moduledoc """
`Fxnk.Functions` are functions for computation or helpers.
"""
@doc """
`always/1` returns the value passed to it always.
## Examples
iex> fourtyTwo = Fxnk.Functions.always(42)
iex> fourtyTwo.("hello")
42
"""
@spec always(any()) :: (any() -> any())
def always(val), do: fn _ -> val end
@doc """
`always/2` returns the second value passed to it always.
## Examples
iex> Fxnk.Functions.always("hello", 42)
42
"""
@spec always(any(), any()) :: any()
def always(_, val), do: val
@doc """
`curry/1` takes a function and returns a function.
## Examples
iex> add = Fxnk.Functions.curry(fn (a, b) -> a + b end)
iex> add.(6).(7)
13
iex> addOne = Fxnk.Functions.curry(add.(1))
iex> addOne.(1336)
1337
"""
@spec curry(function()) :: function()
def curry(fun) do
{_, arity} = :erlang.fun_info(fun, :arity)
curry(fun, arity, [])
end
defp curry(fun, 0, arguments) do
apply(fun, Enum.reverse(arguments))
end
defp curry(fun, arity, arguments) do
fn arg -> curry(fun, arity - 1, [arg | arguments]) end
end
@doc """
Curried `converge/3`
## Example
iex> reverseUpcaseConcat = Fxnk.Functions.converge(&Fxnk.String.concat/2, [&String.reverse/1, &String.upcase/1])
iex> reverseUpcaseConcat.("hello")
"ollehHELLO"
"""
@spec converge(function(), [function(), ...]) :: (any() -> any())
def converge(to_fn, fns) do
curry(fn args -> converge(args, to_fn, fns) end)
end
@doc """
`converge/3` takes an initial argument, a function and a list of functions. It applies the argument to each of the list of functions
and then applies the results of those functions as the argument to the end function.
The end function must have the same arity as the length of the list of functions.
## Example
iex> Fxnk.Functions.converge("hello", &Fxnk.String.concat/2, [&String.reverse/1, &String.upcase/1])
"ollehHELLO"
"""
@spec converge(any(), function(), [function(), ...]) :: any()
def converge(args, to_fn, fns) do
results = for function <- fns, do: function.(args)
apply(to_fn, results)
end
@doc """
Returns the empty version of whatever is passed in.
## Example
iex> Fxnk.Functions.empty("hello!")
""
iex> Fxnk.Functions.empty(%{foo: "bar"})
%{}
iex> Fxnk.Functions.empty([1,2,3,4])
[]
iex> Fxnk.Functions.empty({:ok, "x"})
{}
"""
@spec empty(list() | map() | binary()) :: <<>> | [] | %{}
def empty(x) when is_list(x), do: []
def empty(x) when is_map(x), do: %{}
def empty(x) when is_binary(x), do: ""
def empty(x) when is_tuple(x), do: {}
@doc """
A function that always returns false.
"""
@spec falsy :: false
def falsy do
false
end
@doc """
Takes a function, returns a function that takes the same args as the initial function, but flips the order of the arguments.
## Example
iex> flippedConcatString = Fxnk.Functions.flip(&Fxnk.String.concat/2)
iex> Fxnk.String.concat("hello", "world")
"helloworld"
iex> flippedConcatString.("hello", "world")
"worldhello"
"""
@spec flip(function()) :: (any(), any() -> any())
def flip(func) do
fn arg1, arg2 -> func.(arg2, arg1) end
end
@doc """
Same as `flip/1`, but takes the arguments at the same time as the function.
## Example
iex> Fxnk.Functions.flip("hello", "world", &Fxnk.String.concat/2)
"worldhello"
"""
@spec flip(any(), any(), function()) :: any()
def flip(arg1, arg2, func) do
flip(func).(arg1, arg2)
end
@doc """
`identity/1` returns what was passed to it.
## Example
iex> Fxnk.Functions.identity(42)
42
"""
@spec identity(any()) :: any()
def identity(arg) do
arg
end
@doc """
`juxt/1` takes list of functions and returns a curried juxt.
## Example
iex> minmax = Fxnk.Functions.juxt([&Fxnk.Math.min/1, &Fxnk.Math.max/1])
iex> minmax.([1,3,5,7])
[1, 7]
"""
@spec juxt([function(), ...]) :: (any() -> any())
def juxt(fns) when is_list(fns) do
curry(fn arg -> juxt(arg, fns) end)
end
@doc """
`juxt/2` takes an initial argument and list of functions and applies the functions to the argument.
## Example
iex> Fxnk.Functions.juxt(%{foo: "foo", bar: "bar", baz: "baz"}, [Fxnk.Map.prop(:foo), Fxnk.Map.prop(:bar)])
["foo", "bar"]
"""
@spec juxt(any, [function(), ...]) :: any()
def juxt(arg, fns) do
for func <- fns, do: func.(arg)
end
@doc """
Task based `juxt/2`. Useful for multiple async functions. While `juxt/2` will run in sequence, juxt_async will run in parallel.
For instance, if you've got two functions `sleepTwoSeconds` and `sleepThreeSeconds`, `juxt/2` will respond in five seconds. `juxt_async/2`
will respond in 3.
## Example
iex> addTwo = fn x ->
...> :timer.sleep(1000)
...> x + 2
...> end
iex> addThree = fn x ->
...> :timer.sleep(2000)
...> x + 3
...> end
iex> Fxnk.Functions.juxt_async(4, [addTwo, addThree])
[6, 7]
"""
@spec juxt_async(any(), [function(), ...], non_neg_integer()) :: [any(), ...]
def juxt_async(arg, fns, timeout \\ 5000) do
tasks = for func <- fns, do: Task.async(fn -> func.(arg) end)
Enum.map(tasks, fn t -> Task.await(t, timeout) end)
end
@doc """
Task based `Enum.map`. Allows you to concurrently fire off many async functions at the same time, rather than waiting for each to resolve before
starting the next one. Returns when the slowest returns.
## Example
iex> addTwo = fn x ->
...> :timer.sleep(1000)
...> x + 2
...> end
iex> Fxnk.Functions.map_async([1,2,3,4,5], [addTwo, addThree])
[3,4,5,6,7]
"""
@spec map_async([any(), ...], function(), non_neg_integer()) :: [any(), ...]
def map_async(args, function, timeout \\ 5000) do
args
|> Task.async_stream(function, timeout: timeout)
|> Enum.into([], fn {:ok, res} -> res end)
end
@doc """
Function that always returns true.
## Example
iex> Fxnk.Functions.truthy()
true
"""
@spec truthy :: true
def truthy do
true
end
@doc """
`tap/1` takes a function and returns a function that takes a value. Applies the value to the function and then returns the value.
## Example
iex> function = Fxnk.Functions.tap(&Fxnk.Math.inc/1)
iex> function.(42)
42
"""
@spec tap(function()) :: (any() -> any())
def tap(func) do
curry(fn val -> tap(val, func) end)
end
@doc """
`tap/2` takes a value and a function, applies the value to the function and returns the value.
## Example
iex> Fxnk.Functions.tap(42, &Fxnk.Math.inc/1)
42
"""
@spec tap(any(), function()) :: (any() -> any())
def tap(val, func) do
func.(val)
val
end
@doc """
Returns the `result` of an `{:ok, result}` response from a function.
## Example
iex> addTwo = fn x -> {:ok, x + 2} end
iex> Fxnk.Functions.ok(4, addTwo)
6
"""
@spec ok(any(), (any() -> {:ok | :error, any()})) :: any()
def ok(value, function) do
with {:ok, result} <- function.(value) do
result
end
end
@doc """
Allows you to partially apply a function. Useful with `ok/2`
Returns an anonymous function that takes a single argument.
## Examples
iex> %{hello: "world"} |> Fxnk.Functions.ok(Fxnk.Functions.partial(Map, :fetch, [:hello]))
"world"
"""
@spec partial(module(), atom(), any()) :: (any() -> any())
def partial(module, function, args) do
fn arg -> apply(module, function, [arg | args]) end
end
end
|
lib/fxnk/functions.ex
| 0.907453 | 0.464051 |
functions.ex
|
starcoder
|
defmodule FunWithFlags.Store.Persistent do
@moduledoc """
A behaviour module for implementing persistence adapters.
The package ships with persistence adapters for Redis and Ecto, but you
can provide your own adapters by adopting this behaviour.
"""
@doc """
A persistent adapter should return either
[a child specification](https://hexdocs.pm/elixir/Supervisor.html#module-child-specification)
if it needs any process to be started and supervised, or `nil` if it does not.
For example, the builtin Redis persistence adapter implements this function by delegating to
`Redix.child_spec/1` because it needs the Redix processes to work. On the other hand, the
builtin Ecto adapter implements this function by returning `nil`, because the Ecto repo is
provided to this package by the host application, and it's assumed that the Ecto process tree
is started and supervised somewhere else.
This custom `worker_spec/0` function is used instead of the typical `child_spec/1` function
because this function can return `nil` if the adapter doesn't need to be supervised, whereas
`child_spec/1` _must_ return a valid child spec map.
"""
@callback worker_spec() ::
Supervisor.child_spec
| nil
@doc """
Retrieves a flag by name.
"""
@callback get(flag_name :: atom) ::
{:ok, FunWithFlags.Flag.t}
@doc """
Persists a gate for a flag, identified by name.
"""
@callback put(flag_name :: atom, gate :: FunWithFlags.Gate.t) ::
{:ok, FunWithFlags.Flag.t}
| {:error, any()}
@doc """
Deletes a gate from a flag, identified by name.
"""
@callback delete(flag_name :: atom, gate :: FunWithFlags.Gate.t) ::
{:ok, FunWithFlags.Flag.t}
| {:error, any()}
@doc """
Deletes an entire flag, identified by name.
"""
@callback delete(flag_name :: atom) ::
{:ok, FunWithFlags.Flag.t}
| {:error, any()}
@doc """
Retrieves all the persisted flags.
"""
@callback all_flags() ::
{:ok, [FunWithFlags.Flag.t]}
@doc """
Retrieves all the names of the persisted flags.
"""
@callback all_flag_names() ::
{:ok, [atom]}
end
|
lib/fun_with_flags/store/persistent.ex
| 0.707101 | 0.50769 |
persistent.ex
|
starcoder
|
defmodule OpenGraph do
@moduledoc """
Fetch and parse websites to extract Open Graph meta tags.
The example above shows how to fetch the GitHub Open Graph rich objects.
iex> OpenGraph.fetch("https://github.com")
{:ok, %OpenGraph{description: "GitHub is where people build software. More than 15 million...",
image: "https://assets-cdn.github.com/images/modules/open_graph/github-octocat.png",
site_name: "GitHub", title: "Build software better, together", type: nil,
url: "https://github.com"}}
"""
@metatag_regex ~r/<\s*meta\s(?=[^>]*?\bproperty\s*=\s*(?|"\s*([^"]*?)\s*"|'\s*([^']*?)\s*'|([^"'>]*?)(?=\s*\/?\s*>|\s\w+\s*=)))[^>]*?\bcontent\s*=\s*(?|"\s*([^"]*?)\s*"|'\s*([^']*?)\s*'|([^"'>]*?)(?=\s*\/?\s*>|\s\w+\s*=))[^>]*>/
defstruct [:title, :type, :image, :url, :description, :site_name]
@doc """
Fetches the raw HTML for the given website URL.
Args:
* `url` - target URL as a binary string or char list
This functions returns `{:ok, %OpenGraph{...}}` if the request is successful,
`{:error, reason}` otherwise.
"""
def fetch(url) do
case HTTPoison.get(url, [],
ssl: [{:versions, [:"tlsv1.2"]}],
follow_redirect: true,
hackney: [{:force_redirect, true}]
) do
{:ok, %HTTPoison.Response{status_code: 200, body: body}} ->
{:ok, OpenGraph.parse(body)}
{:ok, %HTTPoison.Response{status_code: 502}} ->
{:error, "Bad Gateway"}
{:ok, %HTTPoison.Response{status_code: 404}} ->
{:error, "Not found :("}
{:ok, %HTTPoison.Response{status_code: 505}} ->
{:error, "Error from HTTPoison, status code: 505"}
{:ok, %HTTPoison.Response{status_code: status_code}} ->
{:error, "Error from HTTPoison, status code: #{status_code}"}
{:error, %HTTPoison.Error{reason: reason}} ->
{:error, reason}
end
end
@doc """
Parses the given HTML to extract the Open Graph objects.
Args:
* `html` - raw HTML as a binary string or char list
This functions returns an OpenGraph struct.
"""
def parse(html) do
map =
@metatag_regex
|> Regex.scan(html, capture: :all_but_first)
|> Enum.filter(&filter_og_metatags(&1))
|> Enum.map(&drop_og_prefix(&1))
|> Enum.into(%{}, fn [k, v] -> {k, v} end)
|> Enum.map(fn {key, value} -> {String.to_atom(key), value} end)
struct(OpenGraph, map)
end
defp filter_og_metatags(["og:" <> _property, _content]), do: true
defp filter_og_metatags(_), do: false
defp drop_og_prefix(["og:" <> property, content]) do
[property, content]
end
end
|
lib/open_graph.ex
| 0.76533 | 0.596962 |
open_graph.ex
|
starcoder
|
defmodule ExPixBRCode.JWS.JWKSStorage do
@moduledoc """
A JWKS storage of validated keys and certificates.
"""
alias ExPixBRCode.JWS.Models.JWKS.Key
alias ExPixBRCode.JWS.Models.JWSHeaders
defstruct [:jwk, :certificate, :key]
@typedoc """
Storage item.
It has a parsed JWK, the certificate of the key and the key parsed from the JWKS.
We should always check the certificate validity before using the signer.
"""
@type t() :: %__MODULE__{
jwk: JOSE.JWK.t(),
certificate: X509.Certificate.t(),
key: Key.t()
}
@doc """
Get the signer associated with the given
"""
@spec jwks_storage_by_jws_headers(JWSHeaders.t()) :: nil | __MODULE__.t()
def jwks_storage_by_jws_headers(headers) do
case :persistent_term.get(headers.jku, nil) do
nil -> nil
values -> get_key(values, headers)
end
end
defp get_key(values, %{x5t: thumb, kid: kid}) when is_binary(thumb),
do: Map.get(values, {thumb, kid})
defp get_key(values, %{:"x5t#S256" => thumb, :kid => kid}) when is_binary(thumb),
do: Map.get(values, {thumb, kid})
@doc """
Process validation and storage of keys.
Keys in JWKS endpoints must pass the following validations:
- Must be of either EC or RSA types
- Must have the x5c claim
- The first certificate in the x5c claim MUST have the same key parameters as the key in the
root
- The certificate thumbprint must match that of the first certificate in the chain
After successful validation, keys are inserted in a `:persistent_term`.
"""
@spec process_keys([Key.t()], jku :: String.t(), opts :: Keyword.t()) ::
:ok
| {:error,
:key_thumbprint_and_first_certificate_differ
| :key_from_first_certificate_differ
| :invalid_certificate_encoding
| :certificate_subject_and_jku_uri_authority_differs}
def process_keys(keys, jku, opts) when is_list(keys) do
case Enum.reduce_while(keys, {:ok, []}, &validate_and_persist_key(&1, jku, &2, opts)) do
{:ok, keys} -> :persistent_term.put(jku, Map.new(keys))
{:error, _} = err -> err
end
end
defp validate_and_persist_key(%Key{x5c: [b64_cert | _] = chain} = key, jku, {:ok, acc}, opts) do
key_from_params = key |> build_key_map() |> JOSE.JWK.from_map()
with {:ok, jwk} <- validate_certificate_chain(chain, key_from_params, opts),
{:ok, certificate, raw_der} <- get_certificate(b64_cert),
{:ok, certificate} <-
validate_leaf_certificate(certificate, raw_der, jku, key, opts),
{:key_from_cert, true} <- {:key_from_cert, key_from_params == jwk} do
storage_item = %__MODULE__{jwk: key_from_params, certificate: certificate, key: key}
keys =
[Map.get(key, :x5t), Map.get(key, :"x5t#S256")]
|> Enum.reject(&is_nil/1)
|> Enum.map(&{{&1, key.kid}, storage_item})
{:cont, {:ok, keys ++ acc}}
else
{:key_from_cert, false} -> {:halt, {:error, :key_from_leaf_certificate_differ}}
{:error, _} = err -> {:halt, err}
:error -> {:halt, {:error, :invalid_certificate_encoding}}
end
end
defp get_certificate(b64_cert) do
with {:ok, raw_der} <- Base.decode64(b64_cert),
{:ok, certificate} <- X509.Certificate.from_der(raw_der) do
{:ok, certificate, raw_der}
end
end
@doc false
def validate_leaf_certificate(certificate, raw_der, jku, key, opts) do
with true <- Keyword.get(opts, :leaf_certificate_should_fail, true),
:ok <- validate_cert_subject(certificate, jku),
{:x5t, true} <- validate_thumbprint(raw_der, key) do
{:ok, certificate}
else
false -> {:ok, certificate}
{:x5t, false} -> {:error, :key_thumbprint_and_leaf_certificate_differ}
:error -> :error
{:error, _} = err -> err
end
end
defp validate_thumbprint(raw_der, %{x5t: thumb}) when is_binary(thumb),
do: {:x5t, thumbprint(raw_der) == thumb}
defp validate_thumbprint(raw_der, %{:"x5t#S256" => thumb}) when is_binary(thumb),
do: {:x5t, thumbprint(raw_der, :sha256) == thumb}
defp validate_certificate_chain(chain, key_from_params, opts) do
with true <- Keyword.get(opts, :x5c_should_fail, true),
{:ok, [root | certificate_chain]} <- decode_chain(chain),
{:ok, {{_, pkey, _}, _}} <-
:public_key.pkix_path_validation(root, certificate_chain, []) do
{:ok, JOSE.JWK.from_key(pkey)}
else
false -> {:ok, key_from_params}
:error -> {:error, :invalid_cert_encoding}
{:error, _} = err -> err
end
end
defp decode_chain(chain) when length(chain) > 1 do
# This reverses the chain automatically
Enum.reduce_while(chain, {:ok, []}, fn cert, {:ok, acc} ->
case Base.decode64(cert) do
{:ok, decoded_cert} -> {:cont, {:ok, [decoded_cert | acc]}}
:error -> {:halt, :error}
end
end)
end
defp decode_chain(_), do: {:error, :x5c_must_have_more_than_one_cert}
defp validate_cert_subject(certificate, jku) do
jku = URI.parse(jku)
[authority | _] =
certificate
|> X509.Certificate.subject()
|> X509.RDNSequence.get_attr("commonName")
{:Extension, {2, 5, 29, 17}, _, values} =
X509.Certificate.extension(certificate, {2, 5, 29, 17})
dns = Keyword.get(values, :dNSName, nil) |> to_string()
if jku.authority == authority or jku.authority == dns do
:ok
else
{:error, :certificate_subject_and_jku_uri_authority_differs}
end
end
defp build_key_map(%{kty: "EC"} = key),
do: %{"kty" => "EC", "crv" => key.crv, "x" => key.x, "y" => key.y}
defp build_key_map(%{kty: "RSA"} = key),
do: %{"kty" => "RSA", "n" => key.n, "e" => key.e}
defp thumbprint(raw_cert, alg \\ :sha) do
alg
|> :crypto.hash(raw_cert)
|> Base.url_encode64(padding: false)
end
end
|
lib/ex_pix_brcode/jws/jwks_storage.ex
| 0.814238 | 0.557845 |
jwks_storage.ex
|
starcoder
|
defmodule AdventOfCode.Solutions.Day10 do
@moduledoc """
Solution for day 10 exercise.
### Exercise
https://adventofcode.com/2021/day/10
"""
require Logger
def score(filename) do
input =
filename
|> File.read!()
|> parse_input()
{syntax_errors_score, autocompletion_score} = calculate_score(input)
IO.puts("Syntaxt error score is #{syntax_errors_score}")
IO.puts("Autocompletion score is #{autocompletion_score}")
end
defp parse_input(file_content) do
file_content
|> String.replace("\r\n", "\n")
|> String.split("\n", trim: true)
|> Enum.map(&String.graphemes/1)
end
defp calculate_score(input) do
compiled_lines = Enum.map(input, &compile_line/1)
syntax_errors_score =
compiled_lines
|> Enum.map(&syntax_error_to_score/1)
|> Enum.sum()
autocompletion_scores_per_line =
compiled_lines
|> Enum.map(&autocompletion_to_score/1)
|> Enum.reject(&is_nil/1)
autocompletion_score =
autocompletion_scores_per_line
|> Enum.sort()
|> Enum.at(div(length(autocompletion_scores_per_line), 2))
{syntax_errors_score, autocompletion_score}
end
defp compile_line(line) do
expected_closing_chars = []
result =
Enum.reduce_while(line, expected_closing_chars, fn
"(", acc -> {:cont, [")" | acc]}
"[", acc -> {:cont, ["]" | acc]}
"{", acc -> {:cont, ["}" | acc]}
"<", acc -> {:cont, [">" | acc]}
char, [char | acc] -> {:cont, acc}
char, _ -> {:halt, {:syntax_error, char}}
end)
case result do
# Complete line
[] -> :ok
# Syntax error
{:syntax_error, char} -> {:syntax_error, line, char}
# incomplete line
missing_closing_chars -> {:incomplete, line, missing_closing_chars}
end
end
# Function to calculate scores per line for syntax errors and autocompletions
defp syntax_error_to_score({:syntax_error, _line, ")"}), do: 3
defp syntax_error_to_score({:syntax_error, _line, "]"}), do: 57
defp syntax_error_to_score({:syntax_error, _line, "}"}), do: 1197
defp syntax_error_to_score({:syntax_error, _line, ">"}), do: 25137
defp syntax_error_to_score(_), do: 0
defp autocompletion_to_score({:incomplete, _line, chars}),
do: calculate_autocompletion_score(chars)
defp autocompletion_to_score(_), do: nil
# Autocompletion score for a line has to be calculated character by character
defp calculate_autocompletion_score(chars, score \\ 0)
defp calculate_autocompletion_score([], score), do: score
defp calculate_autocompletion_score([char | others], score) do
current_score = score * 5 + autocompletion_char_score(char)
calculate_autocompletion_score(others, current_score)
end
defp autocompletion_char_score(")"), do: 1
defp autocompletion_char_score("]"), do: 2
defp autocompletion_char_score("}"), do: 3
defp autocompletion_char_score(">"), do: 4
end
|
lib/advent_of_code/solutions/day10.ex
| 0.74826 | 0.456046 |
day10.ex
|
starcoder
|
defmodule Blockchain.Chain do
@moduledoc """
A structure that holds the necessary ETS tables to store blocks
"""
alias Blockchain.{Block, Transaction}
require Logger
@opaque t() :: %__MODULE__{blocks: term(), transactions: term()}
defstruct [:blocks, :transactions]
@doc """
Find a block in the chain from its hash
"""
@spec lookup(chain :: t(), hash :: Block.h()) :: Block.t() | nil
def lookup(_chain, <<>>), do: nil
def lookup(chain, hash) do
case :ets.lookup(chain.blocks, hash) do
[{_hash, block}] -> block
_ -> nil
end
end
@doc """
Find the block holding the transaction in the chain
"""
@spec find_tx(chain :: t(), hash :: binary()) :: {Transaction.t(), Block.h()} | nil
def find_tx(chain, hash) do
case :ets.lookup(chain.transactions, hash) do
[{_hash, tx, block_hash}] -> {tx, block_hash}
_ -> nil
end
end
@doc """
Insert a block in the chain tables
"""
@spec insert(chain :: t(), block :: Block.t()) :: :ok | :error
def insert(chain, block) do
if Block.valid?(block) do
hash = Block.hash(block)
:ets.insert(chain.blocks, {hash, block})
txs = Enum.map(block.transactions, fn tx -> {Transaction.hash(tx), tx, hash} end)
:ets.insert(chain.transactions, txs)
:ok
else
:error
end
end
@spec new :: t()
def new do
%__MODULE__{
blocks: :ets.new(nil, [:set, :protected]),
transactions: :ets.new(nil, [:set, :protected])
}
end
@doc """
Checks if the whole chain of blocks is valid
"""
@spec valid?(chain :: t(), block :: Block.t() | nil) :: boolean
def valid?(_chain, nil), do: false
def valid?(chain, block) do
cond do
# The block itself should be valid
not Block.valid?(block) ->
false
# The first block (without parent) should have a zero index
block.parent == <<>> ->
block.index == 0
# The block parent should exist and be valid
true ->
parent = lookup(chain, block.parent)
unless parent == nil do
# The parent's index should be one less of the current block index
parent.index + 1 == block.index && valid?(chain, parent)
else
false
end
end
end
end
|
apps/blockchain/lib/blockchain/chain.ex
| 0.806052 | 0.660564 |
chain.ex
|
starcoder
|
defmodule Bamboo.MailgunHelper do
@moduledoc """
Functions for using features specific to Mailgun
(e.g. tagging, templates).
"""
alias Bamboo.Email
@doc """
Add a tag to outgoing email to help categorize traffic based on some
criteria, perhaps separate signup emails from password recovery emails
or from user comments.
More details can be found in the
[Mailgun documentation](https://documentation.mailgun.com/en/latest/user_manual.html#tagging)
## Example
email
|> MailgunHelper.tag("welcome-email")
"""
def tag(email, tag) do
Email.put_private(email, :"o:tag", tag)
end
@doc """
Schedule an email to be delivered in the future.
More details can be found in the
[Mailgun documentation](https://documentation.mailgun.com/en/latest/user_manual.html#scheduling-delivery)
## Example
one_hour_from_now =
DateTime.utc_now()
|> DateTime.add(3600)
email
|> MailgunHelper.deliverytime(one_hour_from_now)
"""
def deliverytime(email, %DateTime{} = deliverytime) do
Email.put_private(email, :"o:deliverytime", DateTime.to_unix(deliverytime))
end
@doc """
Send an email using a template stored in Mailgun, rather than supplying
a `Bamboo.Email.text_body/2` or a `Bamboo.Email.html_body/2`.
More details about templates can be found in the
[Templates section of the Mailgun documentation](https://documentation.mailgun.com/en/latest/user_manual.html#templates).
"""
def template(email, template_name) do
Email.put_private(email, :template, template_name)
end
@doc """
Use it to send a message to specific version of a template.
More details can be found in the
[Mailgun documentation](https://documentation.mailgun.com/en/latest/api-sending.html#sending)
## Example
email
|> MailgunHelper.template("my-template")
|> MailgunHelper.template_version("v2")
"""
def template_version(email, version), do: Email.put_private(email, :"t:version", version)
@doc """
Use it if you want to have rendered template in the text part of the
message in case of template sending.
More details can be found in the
[Mailgun documentation](https://documentation.mailgun.com/en/latest/api-sending.html#sending)
## Example
email
|> MailgunHelper.template_text(true)
"""
def template_text(email, true), do: Email.put_private(email, :"t:text", true)
def template_text(email, _), do: Email.put_private(email, :"t:text", false)
@doc """
When sending an email with `Bamboo.MailgunHelper.template/2` you can
replace a handlebars variables using this function.
More details about templates can be found in the
[Templates section of the Mailgun documentation](https://documentation.mailgun.com/en/latest/user_manual.html#templates).
## Example
email
|> MailgunHelper.template("password-reset-email")
|> MailgunHelper.substitute_variables("password_reset_link", "https://example.com/123")
"""
def substitute_variables(email, key, value) do
substitute_variables(email, %{key => value})
end
@doc """
This behaves like `Bamboo.MailgunHelper.substitute_variables/3`, but
accepts a `Map` rather than a key, value pair.
## Example
email
|> MailgunHelper.template("password-reset-email")
|> MailgunHelper.substitute_variables(%{ "greeting" => "Hello!", "password_reset_link" => "https://example.com/123" })
"""
def substitute_variables(email, variables = %{}) do
custom_vars = Map.get(email.private, :mailgun_custom_vars, %{})
Email.put_private(email, :mailgun_custom_vars, Map.merge(custom_vars, variables))
end
end
|
lib/bamboo/adapters/mailgun_helper.ex
| 0.69233 | 0.428742 |
mailgun_helper.ex
|
starcoder
|
defmodule Contex.TimeScale do
@moduledoc """
A time scale to map date and time data to a plotting coordinate system.
Almost identical `Contex.ContinuousLinearScale` in terms of concepts and
usage, except it applies to `DateTime` and `NaiveDateTime` domain data
types.
`TimeScale` handles the complexities of calculating nice tick intervals etc
for almost any time range between a few seconds and a few years.
"""
alias __MODULE__
alias Contex.Utils
@type datetimes() :: NaiveDateTime.t() | DateTime.t()
# Approximate durations in ms for calculating ideal tick intervals
# Modelled from https://github.com/d3/d3-scale/blob/v2.2.2/src/time.js
@duration_sec 1000
@duration_min @duration_sec * 60
@duration_hour @duration_min * 60
@duration_day @duration_hour * 24
# @duration_week @duration_day * 7
@duration_month @duration_day * 30
@duration_year @duration_day * 365
# Tuple defines: 1&2 - actual time intervals to calculate tick offsets & 3,
# approximate time interval to determine if this is the best option
@default_tick_intervals [
{:seconds, 1, @duration_sec},
{:seconds, 5, @duration_sec * 5},
{:seconds, 15, @duration_sec * 15},
{:seconds, 30, @duration_sec * 30},
{:minutes, 1, @duration_min},
{:minutes, 5, @duration_min * 5},
{:minutes, 15, @duration_min * 15},
{:minutes, 30, @duration_min * 30},
{:hours, 1, @duration_hour},
{:hours, 3, @duration_hour * 3},
{:hours, 6, @duration_hour * 6},
{:hours, 12, @duration_hour * 12},
{:days, 1, @duration_day},
{:days, 2, @duration_day * 2},
{:days, 5, @duration_day * 5},
# {:week, 1, @duration_week }, #TODO: Need to work on tick_interval lookup function & related to make this work
{:days, 10, @duration_day * 10},
{:months, 1, @duration_month},
{:months, 3, @duration_month * 3},
{:years, 1, @duration_year}
]
defstruct [
:domain,
:nice_domain,
:range,
:interval_count,
:tick_interval,
:custom_tick_formatter,
:display_format
]
@type t() :: %__MODULE__{}
@doc """
Creates a new TimeScale struct with basic defaults set
"""
@spec new :: Contex.TimeScale.t()
def new() do
%TimeScale{range: {0.0, 1.0}, interval_count: 11}
end
@doc """
Specifies the number of intervals the scale should display.
Default is 10.
"""
@spec interval_count(Contex.TimeScale.t(), integer()) :: Contex.TimeScale.t()
def interval_count(%TimeScale{} = scale, interval_count)
when is_integer(interval_count) and interval_count > 1 do
scale
|> struct(interval_count: interval_count)
|> nice()
end
def interval_count(%TimeScale{} = scale, _), do: scale
@doc """
Define the data domain for the scale
"""
@spec domain(Contex.TimeScale.t(), datetimes(), datetimes()) :: Contex.TimeScale.t()
def domain(%TimeScale{} = scale, min, max) do
# We can be flexible with the range start > end, but the domain needs to start from the min
{d_min, d_max} =
case Utils.date_compare(min, max) do
:lt -> {min, max}
_ -> {max, min}
end
scale
|> struct(domain: {d_min, d_max})
|> nice()
end
@doc """
Define the data domain for the scale from a list of data.
Extents will be calculated by the scale.
"""
@spec domain(Contex.TimeScale.t(), list(datetimes())) :: Contex.TimeScale.t()
def domain(%TimeScale{} = scale, data) when is_list(data) do
{min, max} = extents(data)
domain(scale, min, max)
end
# NOTE: interval count will likely get adjusted down here to keep things looking nice
# TODO: no type checks on the domain
defp nice(%TimeScale{domain: {min_d, max_d}, interval_count: interval_count} = scale)
when is_number(interval_count) and interval_count > 1 do
width = Utils.date_diff(max_d, min_d, :millisecond)
unrounded_interval_size = width / (interval_count - 1)
tick_interval = lookup_tick_interval(unrounded_interval_size)
min_nice = round_down_to(min_d, tick_interval)
{max_nice, adjusted_interval_count} =
calculate_end_interval(min_nice, max_d, tick_interval, interval_count)
display_format = guess_display_format(tick_interval)
%{
scale
| nice_domain: {min_nice, max_nice},
tick_interval: tick_interval,
interval_count: adjusted_interval_count,
display_format: display_format
}
end
defp nice(%TimeScale{} = scale), do: scale
defp lookup_tick_interval(raw_interval) when is_number(raw_interval) do
default = List.last(@default_tick_intervals)
Enum.find(@default_tick_intervals, default, &(elem(&1, 2) >= raw_interval))
end
defp calculate_end_interval(start, target, tick_interval, max_steps) do
Enum.reduce_while(1..max_steps, {start, 0}, fn step, {_current_end, _index} ->
new_end = add_interval(start, tick_interval, step)
if Utils.date_compare(new_end, target) == :lt,
do: {:cont, {new_end, step}},
else: {:halt, {new_end, step}}
end)
end
@doc false
def add_interval(dt, {:seconds, _, duration_msec}, count),
do: Utils.date_add(dt, duration_msec * count, :millisecond)
def add_interval(dt, {:minutes, _, duration_msec}, count),
do: Utils.date_add(dt, duration_msec * count, :millisecond)
def add_interval(dt, {:hours, _, duration_msec}, count),
do: Utils.date_add(dt, duration_msec * count, :millisecond)
def add_interval(dt, {:days, _, duration_msec}, count),
do: Utils.date_add(dt, duration_msec * count, :millisecond)
def add_interval(dt, {:months, interval_size, _}, count),
do: Utils.date_add(dt, interval_size * count, :months)
def add_interval(dt, {:years, interval_size, _}, count),
do: Utils.date_add(dt, interval_size * count, :years)
# NOTE: Don't try this at home kiddies. Relies on internal representations of DateTime and NaiveDateTime
defp round_down_to(dt, {:seconds, n, _}),
do: %{dt | microsecond: {0, 0}, second: round_down_multiple(dt.second, n)}
defp round_down_to(dt, {:minutes, n, _}),
do: %{dt | microsecond: {0, 0}, second: 0, minute: round_down_multiple(dt.minute, n)}
defp round_down_to(dt, {:hours, n, _}),
do: %{dt | microsecond: {0, 0}, second: 0, minute: 0, hour: round_down_multiple(dt.hour, n)}
defp round_down_to(dt, {:days, 1, _}),
do: %{dt | microsecond: {0, 0}, second: 0, minute: 0, hour: 0}
defp round_down_to(dt, {:days, n, _}),
do: %{
dt
| microsecond: {0, 0},
second: 0,
minute: 0,
hour: 0,
day: round_down_multiple(dt.day, n) |> max(1)
}
defp round_down_to(dt, {:months, 1, _}),
do: %{dt | microsecond: {0, 0}, second: 0, minute: 0, hour: 0, day: 1}
defp round_down_to(dt, {:months, n, _}), do: round_down_month(dt, n)
defp round_down_to(dt, {:years, 1, _}),
do: %{dt | microsecond: {0, 0}, second: 0, minute: 0, hour: 0, day: 1, month: 1}
defp round_down_month(dt, n) do
month = round_down_multiple(dt.month, n)
year = dt.year
{month, year} =
case month > 0 do
true -> {month, year}
_ -> {month + 12, year - 1}
end
day = :calendar.last_day_of_the_month(year, month)
%{dt | microsecond: {0, 0}, second: 0, minute: 0, hour: 0, day: day, month: month, year: year}
end
defp guess_display_format({:seconds, _, _}), do: "%M:%S"
defp guess_display_format({:minutes, _, _}), do: "%H:%M:%S"
defp guess_display_format({:hours, 1, _}), do: "%H:%M:%S"
defp guess_display_format({:hours, _, _}), do: "%d %b %H:%M"
defp guess_display_format({:days, _, _}), do: "%d %b"
defp guess_display_format({:months, _, _}), do: "%b %Y"
defp guess_display_format({:years, _, _}), do: "%Y"
@doc false
def get_domain_to_range_function(%TimeScale{nice_domain: {min_d, max_d}, range: {min_r, max_r}})
when is_number(min_r) and is_number(max_r) do
domain_width = Utils.date_diff(max_d, min_d, :microsecond)
domain_min = 0
range_width = max_r - min_r
case domain_width do
0 ->
fn x -> x end
_ ->
fn domain_val ->
case domain_val do
nil ->
nil
_ ->
milliseconds_val = Utils.date_diff(domain_val, min_d, :microsecond)
ratio = (milliseconds_val - domain_min) / domain_width
min_r + ratio * range_width
end
end
end
end
def get_domain_to_range_function(_), do: fn x -> x end
@doc false
def get_range_to_domain_function(%TimeScale{nice_domain: {min_d, max_d}, range: {min_r, max_r}})
when is_number(min_r) and is_number(max_r) do
domain_width = Utils.date_diff(max_d, min_d, :microsecond)
range_width = max_r - min_r
case range_width do
0 ->
fn x -> x end
_ ->
fn range_val ->
ratio = (range_val - min_r) / range_width
Utils.date_add(min_d, trunc(ratio * domain_width), :microsecond)
end
end
end
def get_range_to_domain_function(_), do: fn x -> x end
defp extents(data) do
Enum.reduce(data, {nil, nil}, fn x, {min, max} ->
{Utils.safe_min(x, min), Utils.safe_max(x, max)}
end)
end
defp round_down_multiple(value, multiple), do: div(value, multiple) * multiple
defimpl Contex.Scale do
def domain_to_range_fn(%TimeScale{} = scale),
do: TimeScale.get_domain_to_range_function(scale)
def ticks_domain(%TimeScale{
nice_domain: {min_d, _},
interval_count: interval_count,
tick_interval: tick_interval
})
when is_number(interval_count) do
0..interval_count
|> Enum.map(fn i -> TimeScale.add_interval(min_d, tick_interval, i) end)
end
def ticks_domain(_), do: []
def ticks_range(%TimeScale{} = scale) do
transform_func = TimeScale.get_domain_to_range_function(scale)
ticks_domain(scale)
|> Enum.map(transform_func)
end
def domain_to_range(%TimeScale{} = scale, range_val) do
transform_func = TimeScale.get_domain_to_range_function(scale)
transform_func.(range_val)
end
def get_range(%TimeScale{range: {min_r, max_r}}), do: {min_r, max_r}
def set_range(%TimeScale{} = scale, start, finish)
when is_number(start) and is_number(finish) do
%{scale | range: {start, finish}}
end
def set_range(%TimeScale{} = scale, {start, finish})
when is_number(start) and is_number(finish),
do: set_range(scale, start, finish)
def get_formatted_tick(
%TimeScale{
display_format: display_format,
custom_tick_formatter: custom_tick_formatter
},
tick_val
) do
format_tick_text(tick_val, display_format, custom_tick_formatter)
end
defp format_tick_text(tick, _, custom_tick_formatter) when is_function(custom_tick_formatter),
do: custom_tick_formatter.(tick)
defp format_tick_text(tick, display_format, _),
do: NimbleStrftime.format(tick, display_format)
end
end
|
lib/chart/scale/time_scale.ex
| 0.882915 | 0.670502 |
time_scale.ex
|
starcoder
|
defmodule ExOneroster.Demographics do
@moduledoc """
The boundary for the Demographics system.
"""
import Ecto.Query, warn: false
alias ExOneroster.Repo
alias ExOneroster.Demographics.Demographic
@doc """
Returns the list of demographics.
## Examples
iex> list_demographics()
[%Demographic{}, ...]
"""
def list_demographics do
Repo.all(Demographic)
end
@doc """
Gets a single demographic.
Raises `Ecto.NoResultsError` if the Demographic does not exist.
## Examples
iex> get_demographic!(123)
%Demographic{}
iex> get_demographic!(456)
** (Ecto.NoResultsError)
"""
def get_demographic!(id), do: Repo.get!(Demographic, id)
@doc """
Creates a demographic.
## Examples
iex> create_demographic(%{field: value})
{:ok, %Demographic{}}
iex> create_demographic(%{field: bad_value})
{:error, %Ecto.Changeset{}}
"""
def create_demographic(attrs \\ %{}) do
%Demographic{}
|> Demographic.changeset(attrs)
|> Repo.insert()
end
@doc """
Updates a demographic.
## Examples
iex> update_demographic(demographic, %{field: new_value})
{:ok, %Demographic{}}
iex> update_demographic(demographic, %{field: bad_value})
{:error, %Ecto.Changeset{}}
"""
def update_demographic(%Demographic{} = demographic, attrs) do
demographic
|> Demographic.changeset(attrs)
|> Repo.update()
end
@doc """
Deletes a Demographic.
## Examples
iex> delete_demographic(demographic)
{:ok, %Demographic{}}
iex> delete_demographic(demographic)
{:error, %Ecto.Changeset{}}
"""
def delete_demographic(%Demographic{} = demographic) do
Repo.delete(demographic)
end
@doc """
Returns an `%Ecto.Changeset{}` for tracking demographic changes.
## Examples
iex> change_demographic(demographic)
%Ecto.Changeset{source: %Demographic{}}
"""
def change_demographic(%Demographic{} = demographic) do
Demographic.changeset(demographic, %{})
end
end
|
lib/ex_oneroster/demographics/demographics.ex
| 0.884464 | 0.57529 |
demographics.ex
|
starcoder
|
defmodule Clex.CL.ImageFormat do
@moduledoc ~S"""
This module defines a `Record` type that represents the `cl_image_format` as specified in the Open CL specification:
```c
typedef struct _cl_image_format {
cl_channel_order image_channel_order;
cl_channel_type image_channel_data_type;
} cl_image_format;
```
## Members
`:order` \
Specifies the number of channels and the channel layout i.e. the memory layout in which channels are stored in the image. Valid values are described in the table below.
Format | Description
:--------------------- | :---------
`:r`, `:rx`, `:a` | No notes
`:intensity` | This format can only be used if channel data type = `:unorm_int8`, `:unorm_int16`, `:snorm_int8`, `:snorm_int16`, `:half_float`, or `:float`.
`:luminance` | This format can only be used if channel data type = `:unorm_int8`, `:unorm_int16`, `:snorm_int8`, `:snorm_int16`, `:half_float`, or `:float`.
`:rg`, `:rgx`, `:ra` | No notes
`:rgb`, `:rgbx` | This format can only be used if channel data type = `:unorm_short_565`, `:unorm_short_555` or `:unorm_int_101010`.
`:rgba` | No notes
`:argb`, `:bgra` | This format can only be used if channel data type = `:unorm_int8`, `:snorm_int8`, `:signed_int8` or `:unsigned_int8`.
`:type` \
Describes the size of the channel data type. The number of bits per element determined by the `type` and `order` must be a power of two. The list of supported values is described in the table below.
Image Channel Data Type | Description
:----------------------- | :---------
`:snorm_int8` | Each channel component is a normalized signed 8-bit integer value.
`:snorm_int16` | Each channel component is a normalized signed 16-bit integer value.
`:unorm_int8` | Each channel component is a normalized unsigned 8-bit integer value.
`:unorm_int16` | Each channel component is a normalized unsigned 16-bit integer value.
`:unorm_short_565` | Represents a normalized 5-6-5 3-channel RGB image. The channel order must be `:rgb`.
`:unorm_short_555` | Represents a normalized x-5-5-5 4-channel xRGB image. The channel order must be `:rgb`.
`:unorm_int_101010` | Represents a normalized x-10-10-10 4-channel xRGB image. The channel order must be `:rgb`.
`:signed_int8` | Each channel component is an unnormalized signed 8-bit integer value.
`:signed_int16` | Each channel component is an unnormalized signed 16-bit integer value.
`:signed_int32` | Each channel component is an unnormalized signed 32-bit integer value.
`:unsigned_int8` | Each channel component is an unnormalized unsigned 8-bit integer value.
`:unsigned_int16` | Each channel component is an unnormalized unsigned 16-bit integer value.
`:unsigned_int32` | Each channel component is an unnormalized unsigned 32-bit integer value.
`:half_float` | Each channel component is a 16-bit half-float value.
`:float` | Each channel component is a single precision floating-point value.
## Description
The following example shows how to specify a normalized unsigned 8-bit / channel RGBA image:
```
order = :rgba
type = :unorm_int8
```
`type` values of `:unorm_short_565`, `:unorm_short_555` and `:unorm_int_101010` are special cases of packed image formats where the channels of each element are packed into a single unsigned short or unsigned int. For these special packed image formats, the channels are normally packed with the first channel in the most significant bits of the bitfield, and successive channels occupying progressively less significant locations. For `:unorm_short_565`, R is in bits 15:11, G is in bits 10:5 and B is in bits 4:0. For `:unorm_short_555`, bit 15 is undefined, R is in bits 14:10, G in bits 9:5 and B in bits 4:0. For `:unorm_int_101010`, bits 31:30 are undefined, R is in bits 29:20, G in bits 19:10 and B in bits 9:0.
"""
require Record
defmacro __using__(_opts) do
quote do
import unquote(__MODULE__)
end
end
@type t :: record(:cl_image_format, order: Clex.CL.cl_channel_order, type: Clex.CL.cl_channel_type)
Record.defrecord(:cl_image_format, order: nil, type: nil)
end
|
lib/clex/cl/image_format.ex
| 0.952541 | 0.970576 |
image_format.ex
|
starcoder
|
defmodule Absinthe.Type.Custom do
use Absinthe.Schema.Notation
@moduledoc """
This module contains the following additional data types:
- datetime (UTC)
- naive_datetime
- date
- time
- decimal (only if [Decimal](https://hex.pm/packages/decimal) is available)
Further description of these types can be found in the source code.
To use: `import_types Absinthe.Type.Custom`.
"""
scalar :datetime, name: "DateTime" do
description """
The `DateTime` scalar type represents a date and time in the UTC
timezone. The DateTime appears in a JSON response as an ISO8601 formatted
string, including UTC timezone ("Z"). The parsed date and time string will
be converted to UTC if there is an offset.
"""
serialize &DateTime.to_iso8601/1
parse &parse_datetime/1
end
scalar :naive_datetime, name: "NaiveDateTime" do
description """
The `Naive DateTime` scalar type represents a naive date and time without
timezone. The DateTime appears in a JSON response as an ISO8601 formatted
string.
"""
serialize &NaiveDateTime.to_iso8601/1
parse &parse_naive_datetime/1
end
scalar :date do
description """
The `Date` scalar type represents a date. The Date appears in a JSON
response as an ISO8601 formatted string, without a time component.
"""
serialize &Date.to_iso8601/1
parse &parse_date/1
end
scalar :time do
description """
The `Time` scalar type represents a time. The Time appears in a JSON
response as an ISO8601 formatted string, without a date component.
"""
serialize &Time.to_iso8601/1
parse &parse_time/1
end
if Code.ensure_loaded?(Decimal) do
scalar :decimal do
description """
The `Decimal` scalar type represents signed double-precision fractional
values parsed by the `Decimal` library. The Decimal appears in a JSON
response as a string to preserve precision.
"""
serialize &Absinthe.Type.Custom.Decimal.serialize/1
parse &Absinthe.Type.Custom.Decimal.parse/1
end
end
@spec parse_datetime(Absinthe.Blueprint.Input.String.t()) :: {:ok, DateTime.t()} | :error
@spec parse_datetime(Absinthe.Blueprint.Input.Null.t()) :: {:ok, nil}
defp parse_datetime(%Absinthe.Blueprint.Input.String{value: value}) do
case DateTime.from_iso8601(value) do
{:ok, datetime, _} -> {:ok, datetime}
_error -> :error
end
end
defp parse_datetime(%Absinthe.Blueprint.Input.Null{}) do
{:ok, nil}
end
defp parse_datetime(_) do
:error
end
@spec parse_naive_datetime(Absinthe.Blueprint.Input.String.t()) ::
{:ok, NaiveDateTime.t()} | :error
@spec parse_naive_datetime(Absinthe.Blueprint.Input.Null.t()) :: {:ok, nil}
defp parse_naive_datetime(%Absinthe.Blueprint.Input.String{value: value}) do
case NaiveDateTime.from_iso8601(value) do
{:ok, naive_datetime} -> {:ok, naive_datetime}
_error -> :error
end
end
defp parse_naive_datetime(%Absinthe.Blueprint.Input.Null{}) do
{:ok, nil}
end
defp parse_naive_datetime(_) do
:error
end
@spec parse_date(Absinthe.Blueprint.Input.String.t()) :: {:ok, Date.t()} | :error
@spec parse_date(Absinthe.Blueprint.Input.Null.t()) :: {:ok, nil}
defp parse_date(%Absinthe.Blueprint.Input.String{value: value}) do
case Date.from_iso8601(value) do
{:ok, date} -> {:ok, date}
_error -> :error
end
end
defp parse_date(%Absinthe.Blueprint.Input.Null{}) do
{:ok, nil}
end
defp parse_date(_) do
:error
end
@spec parse_time(Absinthe.Blueprint.Input.String.t()) :: {:ok, Time.t()} | :error
@spec parse_time(Absinthe.Blueprint.Input.Null.t()) :: {:ok, nil}
defp parse_time(%Absinthe.Blueprint.Input.String{value: value}) do
case Time.from_iso8601(value) do
{:ok, time} -> {:ok, time}
_error -> :error
end
end
defp parse_time(%Absinthe.Blueprint.Input.Null{}) do
{:ok, nil}
end
defp parse_time(_) do
:error
end
end
|
lib/absinthe/type/custom.ex
| 0.882769 | 0.638032 |
custom.ex
|
starcoder
|
defmodule Mix.Tasks.Xref do
use Mix.Task
import Mix.Compilers.Elixir,
only: [read_manifest: 1, source: 0, source: 1, source: 2, module: 1]
@shortdoc "Prints cross reference information"
@recursive true
@manifest "compile.elixir"
@moduledoc """
Prints cross reference information between modules.
The `xref` task expects a mode as first argument:
$ mix xref MODE
All available modes are discussed below.
This task is automatically reenabled, so you can print
information multiple times in the same Mix invocation.
## mix xref callers MODULE
Prints all callers of the given `MODULE`. Example:
$ mix xref callers MyMod
## mix xref trace FILE
Compiles the given file listing all dependencies within the same app.
It includes the type and line for each one. Example:
$ mix xref trace lib/my_app/router.ex
The `--label` option may be given to keep only certain traces
(compile, runtime or export):
$ mix xref trace lib/my_app/router.ex --label compile
If you have an umbrella application, we also recommend using the
`--include-siblings` flag to see the dependencies on other
umbrella applications.
### Example
Imagine the given file lib/b.ex:
defmodule B do
import A
A.macro()
macro()
A.fun()
fun()
def calls_macro, do: A.macro()
def calls_fun, do: A.fun()
def calls_struct, do: %A{}
end
`mix xref trace` will print:
lib/b.ex:2: require A (export)
lib/b.ex:3: call A.macro/0 (compile)
lib/b.ex:4: import A.macro/0 (compile)
lib/b.ex:5: call A.fun/0 (compile)
lib/b.ex:6: call A.fun/0 (compile)
lib/b.ex:6: import A.fun/0 (compile)
lib/b.ex:7: call A.macro/0 (compile)
lib/b.ex:8: call A.fun/0 (runtime)
lib/b.ex:9: struct A (export)
## mix xref graph
Prints a file dependency graph where an edge from `A` to `B` indicates
that `A` (source) depends on `B` (sink).
$ mix xref graph --format stats
The following options are accepted:
* `--exclude` - paths to exclude
* `--label` - only shows relationships with the given label.
The labels are "compile", "export" and "runtime". By default,
the `--label` option simply filters the printed graph to show
only relationships with the given label. If you want to
effectively filter the graph, you can pass the `--only-direct`
flag. There is also a special label called "compile-connected"
that keeps only compile-time files with at least one transitive
dependency. See "Dependencies types" section below.
* `--only-direct` - keeps only files with the direct relationship
given by `--label`
* `--only-nodes` - only shows the node names (no edges).
Generally useful with the `--sink` flag
* `--source` - displays all files that the given source file
references (directly or indirectly)
* `--sink` - displays all files that reference the given file
(directly or indirectly)
* `--min-cycle-size` - controls the minimum cycle size on formats
like `stats` and `cycles`
* `--format` - can be set to one of:
* `pretty` - prints the graph to the terminal using Unicode characters.
Each prints each file followed by the files it depends on. This is the
default except on Windows;
* `plain` - the same as pretty except ASCII characters are used instead of
Unicode characters. This is the default on Windows;
* `stats` - prints general statistics about the graph;
* `cycles` - prints all cycles in the graph;
* `dot` - produces a DOT graph description in `xref_graph.dot` in the
current directory. Warning: this will override any previously generated file
The `--source` and `--sink` options are particularly useful when trying to understand
how the modules in a particular file interact with the whole system. You can combine
those options with `--label` and `--only-nodes` to get all files that exhibit a certain
property, for example:
# To show all compile-time relationships
mix xref graph --label compile
# To get the tree that depend on lib/foo.ex at compile time
mix xref graph --label compile --sink lib/foo.ex
# To get all files that depend on lib/foo.ex at compile time
mix xref graph --label compile --sink lib/foo.ex --only-nodes
# To get all paths between two files
mix xref graph --source lib/foo.ex --sink lib/bar.ex
# To show general statistics about the graph
mix xref graph --format stats
### Understanding the printed graph
When `mix xref graph` runs, it will print a tree of the following
format. Imagine the following code:
# lib/a.ex
defmodule A do
IO.puts B.hello()
end
# lib/b.ex
defmodule B do
def hello, do: C.world()
end
# lib/c.ex
defmodule C do
def world, do: "hello world"
end
It will print:
$ mix xref graph
lib/a.ex
└── lib/b.ex (compile)
lib/b.ex
└── lib/c.ex
lib/c.ex
This tree means that `lib/a.ex` depends on `lib/b.ex` at compile
time. And `lib/b.ex` depends on `lib/c.ex` at runtime. This is often
problematic because if `lib/c.ex` changes, `lib/a.ex` also has to
recompile due to this indirect compile time dependency. When you pass
`--label compile`, the graph shows only the compile-time dependencies:
$ mix xref graph --label compile
lib/a.ex
└── lib/b.ex (compile)
The `--label compile` flag removes all non-compile dependencies. However,
this can be misleading because having direct compile time dependencies is
not necessarily an issue. The biggest concern, as mentioned above, are the
transitive compile time dependencies. You can get all compile time
dependencies that cause transitive compile time dependencies by using
`--label compile-connected`:
$ mix xref graph --label compile-connected
lib/a.ex
└── lib/b.ex (compile)
The above says `lib/a.ex` depends on `lib/b.ex` and that causes transitive
compile time dependencies - as we know, `lib/a.ex` also depends on `lib/c.ex`.
We can retrieve those transitive dependencies by passing `lib/b.ex` as
`--source` to `mix xref graph`:
$ mix xref graph --source lib/b.ex
lib/b.ex
└── lib/c.ex
Similarly, you can use the `--label compile` and the `--sink` flag to find
all compile time dependencies that will recompile once the sink changes:
$ mix xref graph --label compile --sink lib/c.ex
lib/a.ex
└── lib/b.ex (compile)
### Dependencies types
Elixir tracks three types of dependencies between modules: compile,
exports, and runtime. If a module has a compile time dependency on
another module, the caller module has to be recompiled whenever the
callee changes. Compile-time dependencies are typically added when
using macros or when invoking functions in the module body (outside
of functions). You can list all dependencies in a file by running
`mix xref trace path/to/file.ex`.
Exports dependencies are compile time dependencies on the module API,
namely structs and its public definitions. For example, if you import
a module but only use its functions, it is an export dependency. If
you use a struct, it is an export dependency too. Export dependencies
are only recompiled if the module API changes. Note, however, that compile
time dependencies have higher precedence than exports. Therefore if
you import a module and use its macros, it is a compile time dependency.
Runtime dependencies are added whenever you invoke another module
inside a function. Modules with runtime dependencies do not have
to be compiled when the callee changes, unless there is a transitive
compile or an outdated export time dependency between them. The option
`--label compile-connected` can be used to find the first case.
## Shared options
Those options are shared across all modes:
* `--fail-above` - generates a failure if the relevant metric is above the
given threshold. This metric is the number of references, except for
`--format cycles` where it is the number of cycles, and `--format stats`
which has none.
* `--include-siblings` - includes dependencies that have `:in_umbrella` set
to true in the current project in the reports. This can be used to find
callers or to analyze graphs between projects
* `--no-compile` - does not compile even if files require compilation
* `--no-deps-check` - does not check dependencies
* `--no-archives-check` - does not check archives
* `--no-elixir-version-check` - does not check the Elixir version from mix.exs
"""
@switches [
archives_check: :boolean,
compile: :boolean,
deps_check: :boolean,
elixir_version_check: :boolean,
exclude: :keep,
fail_above: :integer,
format: :string,
include_siblings: :boolean,
label: :string,
only_nodes: :boolean,
only_direct: :boolean,
sink: :keep,
source: :keep,
min_cycle_size: :integer
]
@impl true
def run(args) do
Mix.Task.run("compile", args)
Mix.Task.reenable("xref")
{opts, args} = OptionParser.parse!(args, strict: @switches)
case args do
["callers", module] ->
handle_callers(module, opts)
["trace", file] ->
handle_trace(file, opts)
["graph"] ->
handle_graph(opts)
# TODO: Remove on v2.0
["deprecated"] ->
Mix.shell().error(
"The deprecated check has been moved to the compiler and has no effect now"
)
# TODO: Remove on v2.0
["unreachable"] ->
Mix.shell().error(
"The unreachable check has been moved to the compiler and has no effect now"
)
_ ->
Mix.raise("xref doesn't support this command. For more information run \"mix help xref\"")
end
end
@doc """
Returns a list of information of all the runtime function calls in the project.
Each item in the list is a map with the following keys:
* `:callee` - a tuple containing the module, function, and arity of the call
* `:line` - an integer representing the line where the function is called
* `:file` - a binary representing the file where the function is called
* `:caller_module` - the module where the function is called
This function returns an empty list when used at the root of an umbrella
project because there is no compile manifest to extract the function call
information from. To get the function calls of each child in an umbrella,
execute the function at the root of each individual application.
"""
# TODO: Deprecate me on v1.14
@doc deprecated: "Use compilation tracers described in the Code module"
@spec calls(keyword()) :: [
%{
callee: {module(), atom(), arity()},
line: integer(),
file: String.t()
}
]
def calls(opts \\ []) do
for manifest <- manifests(opts),
source(source: source, modules: modules) <- read_manifest(manifest) |> elem(1),
module <- modules,
call <- collect_calls(source, module),
do: call
end
defp collect_calls(source, module) do
with [_ | _] = path <- :code.which(module),
{:ok, {_, [debug_info: debug_info]}} <- :beam_lib.chunks(path, [:debug_info]),
{:debug_info_v1, backend, data} <- debug_info,
{:ok, %{definitions: defs}} <- backend.debug_info(:elixir_v1, module, data, []),
do: walk_definitions(module, source, defs),
else: (_ -> [])
end
defp walk_definitions(module, file, definitions) do
state = %{
file: file,
module: module,
calls: []
}
state = Enum.reduce(definitions, state, &walk_definition/2)
state.calls
end
defp walk_definition({_function, _kind, meta, clauses}, state) do
with_file_meta(state, meta, fn state ->
Enum.reduce(clauses, state, &walk_clause/2)
end)
end
defp with_file_meta(%{file: original_file} = state, meta, fun) do
case Keyword.fetch(meta, :file) do
{:ok, {meta_file, _}} ->
state = fun.(%{state | file: meta_file})
%{state | file: original_file}
:error ->
fun.(state)
end
end
defp walk_clause({_meta, args, _guards, body}, state) do
state = walk_expr(args, state)
walk_expr(body, state)
end
# &Mod.fun/arity
defp walk_expr({:&, meta, [{:/, _, [{{:., _, [module, fun]}, _, []}, arity]}]}, state)
when is_atom(module) and is_atom(fun) do
add_call(module, fun, arity, meta, state)
end
# Mod.fun(...)
defp walk_expr({{:., _, [module, fun]}, meta, args}, state)
when is_atom(module) and is_atom(fun) do
state = add_call(module, fun, length(args), meta, state)
walk_expr(args, state)
end
# %Module{...}
defp walk_expr({:%, meta, [module, {:%{}, _meta, args}]}, state)
when is_atom(module) and is_list(args) do
state = add_call(module, :__struct__, 0, meta, state)
walk_expr(args, state)
end
# Function call
defp walk_expr({left, _meta, right}, state) when is_list(right) do
state = walk_expr(right, state)
walk_expr(left, state)
end
# {x, y}
defp walk_expr({left, right}, state) do
state = walk_expr(right, state)
walk_expr(left, state)
end
# [...]
defp walk_expr(list, state) when is_list(list) do
Enum.reduce(list, state, &walk_expr/2)
end
defp walk_expr(_other, state) do
state
end
defp add_call(module, fun, arity, meta, state) do
call = %{
callee: {module, fun, arity},
caller_module: state.module,
file: state.file,
line: meta[:line]
}
%{state | calls: [call | state.calls]}
end
## Modes
defp handle_callers(module, opts) do
module = parse_module(module)
file_callers =
for source <- sources(opts),
reference = reference(module, source),
do: {source(source, :source), reference}
for {file, type} <- Enum.sort(file_callers) do
Mix.shell().info([file, " (", type, ")"])
end
check_failure(:references, length(file_callers), opts[:fail_above])
end
defp handle_trace(file, opts) do
set =
for app <- apps(opts),
modules = Application.spec(app, :modules),
module <- modules,
into: MapSet.new(),
do: module
old = Code.compiler_options(ignore_module_conflict: true, tracers: [__MODULE__])
ets = :ets.new(__MODULE__, [:named_table, :duplicate_bag, :public])
:ets.insert(ets, [{:config, set, trace_label(opts[:label])}])
try do
Code.compile_file(file)
else
_ ->
:ets.delete(ets, :modules)
traces =
try do
print_traces(Enum.sort(:ets.lookup_element(__MODULE__, :entry, 2)))
rescue
_ -> []
end
check_failure(:traces, length(traces), opts[:fail_above])
after
:ets.delete(ets)
Code.compiler_options(old)
end
end
defp handle_graph(opts) do
label = label_filter(opts[:label])
{direct_filter, transitive_filter} =
if opts[:only_direct], do: {label, :all}, else: {:all, label}
write_graph(file_references(direct_filter, opts), transitive_filter, opts)
end
## Callers
defp parse_module(module) do
case Mix.Utils.parse_mfa(module) do
{:ok, [module]} -> module
_ -> Mix.raise("xref callers MODULE expects a MODULE, got: " <> module)
end
end
defp reference(module, source) do
cond do
module in source(source, :compile_references) -> "compile"
module in source(source, :export_references) -> "export"
module in source(source, :runtime_references) -> "runtime"
true -> nil
end
end
## Trace
@doc false
def trace({:require, meta, module, _opts}, env),
do: add_trace(:export, :require, module, module, meta, env)
def trace({:struct_expansion, meta, module, _keys}, env),
do: add_trace(:export, :struct, module, module, meta, env)
def trace({:alias_reference, meta, module}, env) when env.module != module,
do: add_trace(mode(env), :alias, module, module, meta, env)
def trace({:remote_function, meta, module, function, arity}, env),
do: add_trace(mode(env), :call, module, {module, function, arity}, meta, env)
def trace({:remote_macro, meta, module, function, arity}, env),
do: add_trace(:compile, :call, module, {module, function, arity}, meta, env)
def trace({:imported_function, meta, module, function, arity}, env),
do: add_trace(mode(env), :import, module, {module, function, arity}, meta, env)
def trace({:imported_macro, meta, module, function, arity}, env),
do: add_trace(:compile, :import, module, {module, function, arity}, meta, env)
def trace(_event, _env),
do: :ok
defp mode(%{function: nil}), do: :compile
defp mode(_), do: :runtime
defp add_trace(mode, type, module, module_or_mfa, meta, env) do
[{:config, modules, label}] = :ets.lookup(__MODULE__, :config)
if module in modules and (label == nil or mode == label) do
line = meta[:line] || env.line
:ets.insert(__MODULE__, {:entry, {env.file, line, module_or_mfa, mode, type}})
end
:ok
end
defp print_traces(entries) do
# We don't want to show aliases if there is an entry of the same type
non_aliases =
for {_file, _line, module_or_mfa, mode, type} <- entries,
type != :alias,
into: %{},
do: {{trace_module(module_or_mfa), mode}, []}
shell = Mix.shell()
for {file, line, module_or_mfa, mode, type} <- entries,
type != :alias or not Map.has_key?(non_aliases, {module_or_mfa, mode}) do
shell.info([
Exception.format_file_line(Path.relative_to_cwd(file), line),
?\s,
Atom.to_string(type),
?\s,
format_module_or_mfa(module_or_mfa),
" (#{mode})"
])
:ok
end
end
defp trace_label(nil), do: nil
defp trace_label("compile"), do: :compile
defp trace_label("export"), do: :export
defp trace_label("runtime"), do: :runtime
defp trace_label(other), do: Mix.raise("Unknown --label #{other} in mix xref trace")
defp trace_module({m, _, _}), do: m
defp trace_module(m), do: m
defp format_module_or_mfa({m, f, a}), do: Exception.format_mfa(m, f, a)
defp format_module_or_mfa(m), do: inspect(m)
## Graph
defp exclude(file_references, []), do: file_references
defp exclude(file_references, excluded) do
excluded_set = MapSet.new(excluded)
file_references
|> Map.drop(excluded)
|> Map.new(fn {key, list} ->
{key, Enum.reject(list, fn {ref, _kind} -> MapSet.member?(excluded_set, ref) end)}
end)
end
defp label_filter(nil), do: :all
defp label_filter("compile"), do: :compile
defp label_filter("export"), do: :export
defp label_filter("runtime"), do: nil
defp label_filter("compile-connected"), do: :compile_connected
defp label_filter(other), do: Mix.raise("Unknown --label #{other} in mix xref graph")
defp file_references(:compile_connected, _opts) do
Mix.raise("Cannot use --only-direct with --label=compile-connected")
end
defp file_references(filter, opts) do
module_sources =
for manifest_path <- manifests(opts),
{manifest_modules, manifest_sources} = read_manifest(manifest_path),
module(module: module, sources: sources) <- manifest_modules,
source <- sources,
source = Enum.find(manifest_sources, &match?(source(source: ^source), &1)),
do: {module, source}
all_modules = MapSet.new(module_sources, &elem(&1, 0))
Map.new(module_sources, fn {current, source} ->
source(
runtime_references: runtime,
export_references: exports,
compile_references: compile,
source: file
) = source
compile_references =
modules_to_nodes(compile, :compile, current, source, module_sources, all_modules, filter)
export_references =
modules_to_nodes(exports, :export, current, source, module_sources, all_modules, filter)
runtime_references =
modules_to_nodes(runtime, nil, current, source, module_sources, all_modules, filter)
references =
runtime_references
|> Map.merge(export_references)
|> Map.merge(compile_references)
|> Enum.to_list()
{file, references}
end)
end
defp modules_to_nodes(_, label, _, _, _, _, filter) when filter != :all and label != filter do
%{}
end
defp modules_to_nodes(modules, label, current, source, module_sources, all_modules, _filter) do
for module <- modules,
module != current,
module in all_modules,
module_sources[module] != source,
do: {source(module_sources[module], :source), label},
into: %{}
end
defp get_files(what, opts, file_references) do
files = Keyword.get_values(opts, what)
case files -- Map.keys(file_references) do
[_ | _] = missing ->
Mix.raise(
"#{Macro.camelize(to_string(what))}s could not be found: #{Enum.join(missing, ", ")}"
)
_ ->
:ok
end
if files == [], do: nil, else: files
end
defp write_graph(file_references, filter, opts) do
file_references = exclude(file_references, Keyword.get_values(opts, :exclude))
sources = get_files(:source, opts, file_references)
sinks = get_files(:sink, opts, file_references)
file_references =
cond do
sinks -> sink_tree(file_references, sinks)
sources -> source_tree(file_references, sources)
true -> file_references
end
# Filter according to non direct label
file_references = filter(file_references, filter)
# If a label is given, remove empty root nodes
file_references =
if opts[:label] do
for {_, [_ | _]} = pair <- file_references, into: %{}, do: pair
else
file_references
end
roots =
if sources do
Enum.map(sources, &{&1, nil})
else
file_references
|> Map.drop(sinks || [])
|> Enum.map(&{elem(&1, 0), nil})
end
callback = fn {file, type} ->
children = if opts[:only_nodes], do: [], else: Map.get(file_references, file, [])
type = type && "(#{type})"
{{file, type}, Enum.sort(children)}
end
{found, count} =
case opts[:format] do
"dot" ->
Mix.Utils.write_dot_graph!(
"xref_graph.dot",
"xref graph",
Enum.sort(roots),
callback,
opts
)
"""
Generated "xref_graph.dot" in the current directory. To generate a PNG:
dot -Tpng xref_graph.dot -o xref_graph.png
For more options see http://www.graphviz.org/.
"""
|> String.trim_trailing()
|> Mix.shell().info()
{:references, count_references(file_references)}
"stats" ->
print_stats(file_references, opts)
{:stats, 0}
"cycles" ->
{:cycles, print_cycles(file_references, opts)}
other when other in [nil, "plain", "pretty"] ->
Mix.Utils.print_tree(Enum.sort(roots), callback, opts)
{:references, count_references(file_references)}
other ->
Mix.raise("Unknown --format #{other} in mix xref graph")
end
check_failure(found, count, opts[:fail_above])
end
defp count_references(file_references) do
Enum.reduce(file_references, 0, fn {_, refs}, total -> total + length(refs) end)
end
defp filter_fn(file_references, :compile_connected),
do: fn {key, type} ->
type == :compile and match?([_ | _], file_references[key] || [])
end
defp filter_fn(_file_references, filter),
do: fn {_key, type} -> type == filter end
defp filter(file_references, :all), do: file_references
defp filter(file_references, filter) do
filter_fn = filter_fn(file_references, filter)
for {key, children} <- file_references,
into: %{},
do: {key, Enum.filter(children, filter_fn)}
end
defp source_tree(file_references, keys) do
keys
|> Enum.reduce({%{}, %{}}, fn key, {acc, seen} ->
source_tree(file_references, key, acc, seen)
end)
|> elem(0)
end
defp source_tree(file_references, key, acc, seen) do
nodes = file_references[key]
if is_nil(nodes) or Map.has_key?(seen, key) do
{acc, seen}
else
acc = Map.put(acc, key, nodes)
seen = Map.put(seen, key, true)
Enum.reduce(nodes, {acc, seen}, fn {key, _type}, {acc, seen} ->
source_tree(file_references, key, acc, seen)
end)
end
end
defp sink_tree(file_references, keys) do
file_references
|> invert_references()
|> source_tree(keys)
|> invert_references()
end
defp invert_references(file_references) do
Enum.reduce(file_references, %{}, fn {file, references}, acc ->
Enum.reduce(references, acc, fn {file_reference, type}, acc ->
Map.update(acc, file_reference, [{file, type}], &[{file, type} | &1])
end)
end)
end
defp print_stats(references, opts) do
with_digraph(references, fn graph ->
shell = Mix.shell()
counters =
Enum.reduce(references, %{compile: 0, export: 0, nil: 0}, fn {_, deps}, acc ->
Enum.reduce(deps, acc, fn {_, value}, acc ->
Map.update!(acc, value, &(&1 + 1))
end)
end)
shell.info("Tracked files: #{map_size(references)} (nodes)")
shell.info("Compile dependencies: #{counters.compile} (edges)")
shell.info("Exports dependencies: #{counters.export} (edges)")
shell.info("Runtime dependencies: #{counters.nil} (edges)")
shell.info("Cycles: #{length(cycles(graph, opts))}")
outgoing =
references
|> Enum.map(fn {file, _} -> {:digraph.out_degree(graph, file), file} end)
|> Enum.sort(:desc)
|> Enum.take(10)
shell.info("\nTop #{length(outgoing)} files with most outgoing dependencies:")
for {count, file} <- outgoing, do: shell.info(" * #{file} (#{count})")
incoming =
references
|> Enum.map(fn {file, _} -> {:digraph.in_degree(graph, file), file} end)
|> Enum.sort(:desc)
|> Enum.take(10)
shell.info("\nTop #{length(incoming)} files with most incoming dependencies:")
for {count, file} <- incoming, do: shell.info(" * #{file} (#{count})")
end)
end
defp with_digraph(references, callback) do
graph = :digraph.new()
try do
for {file, _} <- references do
:digraph.add_vertex(graph, file)
end
for {file, deps} <- references, {dep, label} <- deps do
:digraph.add_edge(graph, file, dep, label)
end
callback.(graph)
after
:digraph.delete(graph)
end
end
defp cycles(graph, opts) do
cycles =
graph
|> :digraph_utils.cyclic_strong_components()
|> Enum.reduce([], &inner_cycles(graph, &1, &2))
|> Enum.map(&{length(&1), &1})
if min = opts[:min_cycle_size], do: Enum.filter(cycles, &(elem(&1, 0) > min)), else: cycles
end
defp inner_cycles(_graph, [], acc), do: acc
defp inner_cycles(graph, [v | vertices], acc) do
cycle = :digraph.get_cycle(graph, v)
inner_cycles(graph, vertices -- cycle, [cycle | acc])
end
defp print_cycles(references, opts) do
with_digraph(references, fn graph ->
shell = Mix.shell()
case graph |> cycles(opts) |> Enum.sort(:desc) do
[] ->
shell.info("No cycles found")
0
cycles ->
shell.info("#{length(cycles)} cycles found. Showing them in decreasing size:\n")
for {length, cycle} <- cycles do
shell.info("Cycle of length #{length}:\n")
for node <- cycle do
shell.info(" " <> node)
end
shell.info("")
end
length(cycles)
end
end)
end
## Helpers
defp sources(opts) do
for manifest <- manifests(opts),
source() = source <- read_manifest(manifest) |> elem(1),
do: source
end
defp apps(opts) do
siblings =
if opts[:include_siblings] do
for %{scm: Mix.SCM.Path, opts: opts, app: app} <- Mix.Dep.cached(),
opts[:in_umbrella],
do: app
else
[]
end
[Mix.Project.config()[:app] | siblings]
end
defp manifests(opts) do
siblings =
if opts[:include_siblings] do
for %{scm: Mix.SCM.Path, opts: opts} <- Mix.Dep.cached(),
opts[:in_umbrella],
do: Path.join([opts[:build], ".mix", @manifest])
else
[]
end
[Path.join(Mix.Project.manifest_path(), @manifest) | siblings]
end
defp check_failure(found, count, max_count)
when not is_nil(max_count) and count > max_count do
Mix.raise("Too many #{found} (found: #{count}, permitted: #{max_count})")
end
defp check_failure(_, _, _) do
:ok
end
end
|
lib/mix/lib/mix/tasks/xref.ex
| 0.876911 | 0.596727 |
xref.ex
|
starcoder
|
defmodule Xlsxir.ParseStyle do
@moduledoc """
Holds the SAX event instructions for parsing style data via `Xlsxir.SaxParser.parse/2`
"""
# the following module attributes hold `numStyleId`s for standard number styles, grouping them between numbers and dates
@num [
0,
1,
2,
3,
4,
9,
10,
11,
12,
13,
37,
38,
39,
40,
44,
48,
49,
56,
58,
59,
60,
61,
62,
67,
68,
69,
70
]
@date [14, 15, 16, 17, 18, 19, 20, 21, 22, 27, 30, 36, 45, 46, 47, 50, 57]
defstruct custom_style: %{}, cellxfs: false, index: 0, tid: nil, num_fmt_ids: []
@doc """
Sax event utilized by `Xlsxir.SaxParser.parse/2`. Takes a pattern and the current state of a struct and recursivly parses the
styles XML file, ultimately saving each parsed style type to the ETS process. The style types generated are `nil` for numbers and `'d'` for dates.
## Parameters
- pattern - the XML pattern of the event to match upon
- state - the state of the `%Xlsxir.ParseStyle{}` struct which temporarily holds each `numFmtId` and its associated `formatCode` for custom format types
## Example
Recursively sends style types generated from parsing the `xl/sharedStrings.xml` file to ETS process. The data can ultimately
be retreived from the ETS table (i.e. `:ets.lookup(tid, 0)` would return `nil` or `'d'` depending on each style type generated).
"""
def sax_event_handler(:startDocument, _state) do
%__MODULE__{tid: GenServer.call(Xlsxir.StateManager, :new_table)}
end
def sax_event_handler({:startElement, _, 'cellXfs', _, _}, state) do
%{state | cellxfs: true}
end
def sax_event_handler({:endElement, _, 'cellXfs', _}, state) do
%{state | cellxfs: false}
end
def sax_event_handler(
{:startElement, _, 'xf', _, xml_attr},
%__MODULE__{num_fmt_ids: num_fmt_ids} = state
) do
if state.cellxfs do
xml_attr
|> Enum.filter(fn attr ->
case attr do
{:attribute, 'numFmtId', _, _, _} -> true
_ -> false
end
end)
|> case do
[{_, _, _, _, id}] ->
%{state | num_fmt_ids: num_fmt_ids ++ [id]}
_ ->
%{state | num_fmt_ids: num_fmt_ids ++ ['0']}
end
else
state
end
end
def sax_event_handler(
{:startElement, _, 'numFmt', _, xml_attr},
%__MODULE__{custom_style: custom_style} = state
) do
temp =
Enum.reduce(xml_attr, %{}, fn attr, acc ->
case attr do
{:attribute, 'numFmtId', _, _, id} -> Map.put(acc, :id, id)
{:attribute, 'formatCode', _, _, cd} -> Map.put(acc, :cd, cd)
_ -> nil
end
end)
%{state | custom_style: Map.put(custom_style, temp[:id], temp[:cd])}
end
def sax_event_handler(:endDocument, %__MODULE__{} = state) do
%__MODULE__{custom_style: custom_style, num_fmt_ids: num_fmt_ids, index: index, tid: tid} =
state
custom_type = custom_style_handler(custom_style)
inc =
Enum.reduce(num_fmt_ids, 0, fn style_type, acc ->
case List.to_integer(style_type) do
i when i in @num -> :ets.insert(tid, {index + acc, nil})
i when i in @date -> :ets.insert(tid, {index + acc, 'd'})
_ -> add_custom_style(tid, style_type, custom_type, index + acc)
end
acc + 1
end)
%{state | index: index + inc}
end
def sax_event_handler(_, state), do: state
defp custom_style_handler(custom_style) do
custom_style
|> Enum.reduce(%{}, fn {k, v}, acc ->
cond do
String.match?(to_string(v), ~r/\bred\b/i) -> Map.put_new(acc, k, nil)
String.match?(to_string(v), ~r/[dhmsy]/i) -> Map.put_new(acc, k, 'd')
true -> Map.put_new(acc, k, nil)
end
end)
end
defp add_custom_style(tid, style_type, custom_type, index) do
if Map.has_key?(custom_type, style_type) do
:ets.insert(tid, {index, custom_type[style_type]})
else
raise "Unsupported style type: #{style_type}. See doc page \"Number Styles\" for more info."
end
end
end
|
lib/xlsxir/parse_style.ex
| 0.798658 | 0.51251 |
parse_style.ex
|
starcoder
|
defmodule AutoApi.RaceState do
@moduledoc """
Keeps Race state
"""
alias AutoApi.{CommonData, State, UnitType}
use AutoApi.State, spec_file: "race.json"
@type direction ::
:longitudinal
| :lateral
| :front_lateral
| :rear_lateral
@type acceleration :: %{direction: direction(), acceleration: UnitType.acceleration()}
@type gear_mode :: :manual | :park | :reverse | :neutral | :drive | :low_gear | :sport
@type brake_torque_vectoring :: %{
axle: CommonData.location_longitudinal(),
vectoring: CommonData.activity()
}
@type vehicle_moving :: :moving | :not_moving
@type t :: %__MODULE__{
accelerations: State.multiple_property(acceleration()),
understeering: State.property(float),
oversteering: State.property(float),
gas_pedal_position: State.property(float),
steering_angle: State.property(UnitType.angle()),
brake_pressure: State.property(UnitType.pressure()),
yaw_rate: State.property(UnitType.angular_velocity()),
rear_suspension_steering: State.property(UnitType.angle()),
electronic_stability_program: State.property(CommonData.activity()),
brake_torque_vectorings: State.multiple_property(brake_torque_vectoring),
gear_mode: State.property(gear_mode),
selected_gear: State.property(integer),
brake_pedal_position: State.property(float),
brake_pedal_switch: State.property(CommonData.activity()),
clutch_pedal_switch: State.property(CommonData.activity()),
accelerator_pedal_idle_switch: State.property(CommonData.activity()),
accelerator_pedal_kickdown_switch: State.property(CommonData.activity()),
vehicle_moving: State.property(vehicle_moving)
}
@doc """
Build state based on binary value
iex> bin = <<2, 0, 11, 1, 0, 8, 64, 54, 43, 133, 30, 184, 81, 236>>
iex> AutoApi.RaceState.from_bin(bin)
%AutoApi.RaceState{understeering: %AutoApi.PropertyComponent{data: 22.17}}
"""
@spec from_bin(binary) :: __MODULE__.t()
def from_bin(bin) do
parse_bin_properties(bin, %__MODULE__{})
end
@spec to_bin(__MODULE__.t()) :: binary
@doc """
Parse state to bin
iex> state = %AutoApi.RaceState{understeering: %AutoApi.PropertyComponent{data: 22.17}}
iex> AutoApi.RaceState.to_bin(state)
<<2, 0, 11, 1, 0, 8, 64, 54, 43, 133, 30, 184, 81, 236>>
"""
def to_bin(%__MODULE__{} = state) do
parse_state_properties(state)
end
end
|
lib/auto_api/states/race_state.ex
| 0.814311 | 0.444203 |
race_state.ex
|
starcoder
|
defmodule AWS.SSM do
@moduledoc """
AWS Systems Manager
AWS Systems Manager is a collection of capabilities that helps you automate
management tasks such as collecting system inventory, applying operating system
(OS) patches, automating the creation of Amazon Machine Images (AMIs), and
configuring operating systems (OSs) and applications at scale.
Systems Manager lets you remotely and securely manage the configuration of your
managed instances. A *managed instance* is any Amazon Elastic Compute Cloud
instance (EC2 instance), or any on-premises server or virtual machine (VM) in
your hybrid environment that has been configured for Systems Manager.
This reference is intended to be used with the [AWS Systems Manager User Guide](https://docs.aws.amazon.com/systems-manager/latest/userguide/).
To get started, verify prerequisites and configure managed instances. For more
information, see [Setting up AWS Systems Manager](https://docs.aws.amazon.com/systems-manager/latest/userguide/systems-manager-setting-up.html)
in the *AWS Systems Manager User Guide*.
For information about other API actions you can perform on EC2 instances, see
the [Amazon EC2 API Reference](https://docs.aws.amazon.com/AWSEC2/latest/APIReference/). For
information about how to use a Query API, see [Making API requests](https://docs.aws.amazon.com/AWSEC2/latest/APIReference/making-api-requests.html).
"""
@doc """
Adds or overwrites one or more tags for the specified resource.
Tags are metadata that you can assign to your documents, managed instances,
maintenance windows, Parameter Store parameters, and patch baselines. Tags
enable you to categorize your resources in different ways, for example, by
purpose, owner, or environment. Each tag consists of a key and an optional
value, both of which you define. For example, you could define a set of tags for
your account's managed instances that helps you track each instance's owner and
stack level. For example: Key=Owner and Value=DbAdmin, SysAdmin, or Dev. Or
Key=Stack and Value=Production, Pre-Production, or Test.
Each resource can have a maximum of 50 tags.
We recommend that you devise a set of tag keys that meets your needs for each
resource type. Using a consistent set of tag keys makes it easier for you to
manage your resources. You can search and filter the resources based on the tags
you add. Tags don't have any semantic meaning to and are interpreted strictly as
a string of characters.
For more information about using tags with EC2 instances, see [Tagging your Amazon EC2
resources](https://docs.aws.amazon.com/AWSEC2/latest/UserGuide/Using_Tags.html)
in the *Amazon EC2 User Guide*.
"""
def add_tags_to_resource(client, input, options \\ []) do
request(client, "AddTagsToResource", input, options)
end
@doc """
Attempts to cancel the command specified by the Command ID.
There is no guarantee that the command will be terminated and the underlying
process stopped.
"""
def cancel_command(client, input, options \\ []) do
request(client, "CancelCommand", input, options)
end
@doc """
Stops a maintenance window execution that is already in progress and cancels any
tasks in the window that have not already starting running.
(Tasks already in progress will continue to completion.)
"""
def cancel_maintenance_window_execution(client, input, options \\ []) do
request(client, "CancelMaintenanceWindowExecution", input, options)
end
@doc """
Generates an activation code and activation ID you can use to register your
on-premises server or virtual machine (VM) with Systems Manager.
Registering these machines with Systems Manager makes it possible to manage them
using Systems Manager capabilities. You use the activation code and ID when
installing SSM Agent on machines in your hybrid environment. For more
information about requirements for managing on-premises instances and VMs using
Systems Manager, see [Setting up AWS Systems Manager for hybrid environments](https://docs.aws.amazon.com/systems-manager/latest/userguide/systems-manager-managedinstances.html)
in the *AWS Systems Manager User Guide*.
On-premises servers or VMs that are registered with Systems Manager and EC2
instances that you manage with Systems Manager are all called *managed
instances*.
"""
def create_activation(client, input, options \\ []) do
request(client, "CreateActivation", input, options)
end
@doc """
A State Manager association defines the state that you want to maintain on your
instances.
For example, an association can specify that anti-virus software must be
installed and running on your instances, or that certain ports must be closed.
For static targets, the association specifies a schedule for when the
configuration is reapplied. For dynamic targets, such as an AWS Resource Group
or an AWS Autoscaling Group, State Manager applies the configuration when new
instances are added to the group. The association also specifies actions to take
when applying the configuration. For example, an association for anti-virus
software might run once a day. If the software is not installed, then State
Manager installs it. If the software is installed, but the service is not
running, then the association might instruct State Manager to start the service.
"""
def create_association(client, input, options \\ []) do
request(client, "CreateAssociation", input, options)
end
@doc """
Associates the specified Systems Manager document with the specified instances
or targets.
When you associate a document with one or more instances using instance IDs or
tags, SSM Agent running on the instance processes the document and configures
the instance as specified.
If you associate a document with an instance that already has an associated
document, the system returns the AssociationAlreadyExists exception.
"""
def create_association_batch(client, input, options \\ []) do
request(client, "CreateAssociationBatch", input, options)
end
@doc """
Creates a Systems Manager (SSM) document.
An SSM document defines the actions that Systems Manager performs on your
managed instances. For more information about SSM documents, including
information about supported schemas, features, and syntax, see [AWS Systems Manager
Documents](https://docs.aws.amazon.com/systems-manager/latest/userguide/sysman-ssm-docs.html)
in the *AWS Systems Manager User Guide*.
"""
def create_document(client, input, options \\ []) do
request(client, "CreateDocument", input, options)
end
@doc """
Creates a new maintenance window.
The value you specify for `Duration` determines the specific end time for the
maintenance window based on the time it begins. No maintenance window tasks are
permitted to start after the resulting endtime minus the number of hours you
specify for `Cutoff`. For example, if the maintenance window starts at 3 PM, the
duration is three hours, and the value you specify for `Cutoff` is one hour, no
maintenance window tasks can start after 5 PM.
"""
def create_maintenance_window(client, input, options \\ []) do
request(client, "CreateMaintenanceWindow", input, options)
end
@doc """
Creates a new OpsItem.
You must have permission in AWS Identity and Access Management (IAM) to create a
new OpsItem. For more information, see [Getting started with OpsCenter](https://docs.aws.amazon.com/systems-manager/latest/userguide/OpsCenter-getting-started.html)
in the *AWS Systems Manager User Guide*.
Operations engineers and IT professionals use OpsCenter to view, investigate,
and remediate operational issues impacting the performance and health of their
AWS resources. For more information, see [AWS Systems Manager OpsCenter](https://docs.aws.amazon.com/systems-manager/latest/userguide/OpsCenter.html)
in the *AWS Systems Manager User Guide*.
"""
def create_ops_item(client, input, options \\ []) do
request(client, "CreateOpsItem", input, options)
end
@doc """
Creates a patch baseline.
For information about valid key and value pairs in `PatchFilters` for each
supported operating system type, see
[PatchFilter](http://docs.aws.amazon.com/systems-manager/latest/APIReference/API_PatchFilter.html).
"""
def create_patch_baseline(client, input, options \\ []) do
request(client, "CreatePatchBaseline", input, options)
end
@doc """
A resource data sync helps you view data from multiple sources in a single
location.
Systems Manager offers two types of resource data sync: `SyncToDestination` and
`SyncFromSource`.
You can configure Systems Manager Inventory to use the `SyncToDestination` type
to synchronize Inventory data from multiple AWS Regions to a single S3 bucket.
For more information, see [Configuring Resource Data Sync for Inventory](https://docs.aws.amazon.com/systems-manager/latest/userguide/sysman-inventory-datasync.html)
in the *AWS Systems Manager User Guide*.
You can configure Systems Manager Explorer to use the `SyncFromSource` type to
synchronize operational work items (OpsItems) and operational data (OpsData)
from multiple AWS Regions to a single S3 bucket. This type can synchronize
OpsItems and OpsData from multiple AWS accounts and Regions or
`EntireOrganization` by using AWS Organizations. For more information, see
[Setting up Systems Manager Explorer to display data from multiple accounts and Regions](https://docs.aws.amazon.com/systems-manager/latest/userguide/Explorer-resource-data-sync.html)
in the *AWS Systems Manager User Guide*.
A resource data sync is an asynchronous operation that returns immediately.
After a successful initial sync is completed, the system continuously syncs
data. To check the status of a sync, use the `ListResourceDataSync`.
By default, data is not encrypted in Amazon S3. We strongly recommend that you
enable encryption in Amazon S3 to ensure secure data storage. We also recommend
that you secure access to the Amazon S3 bucket by creating a restrictive bucket
policy.
"""
def create_resource_data_sync(client, input, options \\ []) do
request(client, "CreateResourceDataSync", input, options)
end
@doc """
Deletes an activation.
You are not required to delete an activation. If you delete an activation, you
can no longer use it to register additional managed instances. Deleting an
activation does not de-register managed instances. You must manually de-register
managed instances.
"""
def delete_activation(client, input, options \\ []) do
request(client, "DeleteActivation", input, options)
end
@doc """
Disassociates the specified Systems Manager document from the specified
instance.
When you disassociate a document from an instance, it does not change the
configuration of the instance. To change the configuration state of an instance
after you disassociate a document, you must create a new document with the
desired configuration and associate it with the instance.
"""
def delete_association(client, input, options \\ []) do
request(client, "DeleteAssociation", input, options)
end
@doc """
Deletes the Systems Manager document and all instance associations to the
document.
Before you delete the document, we recommend that you use `DeleteAssociation` to
disassociate all instances that are associated with the document.
"""
def delete_document(client, input, options \\ []) do
request(client, "DeleteDocument", input, options)
end
@doc """
Delete a custom inventory type or the data associated with a custom Inventory
type.
Deleting a custom inventory type is also referred to as deleting a custom
inventory schema.
"""
def delete_inventory(client, input, options \\ []) do
request(client, "DeleteInventory", input, options)
end
@doc """
Deletes a maintenance window.
"""
def delete_maintenance_window(client, input, options \\ []) do
request(client, "DeleteMaintenanceWindow", input, options)
end
@doc """
Delete a parameter from the system.
"""
def delete_parameter(client, input, options \\ []) do
request(client, "DeleteParameter", input, options)
end
@doc """
Delete a list of parameters.
"""
def delete_parameters(client, input, options \\ []) do
request(client, "DeleteParameters", input, options)
end
@doc """
Deletes a patch baseline.
"""
def delete_patch_baseline(client, input, options \\ []) do
request(client, "DeletePatchBaseline", input, options)
end
@doc """
Deletes a Resource Data Sync configuration.
After the configuration is deleted, changes to data on managed instances are no
longer synced to or from the target. Deleting a sync configuration does not
delete data.
"""
def delete_resource_data_sync(client, input, options \\ []) do
request(client, "DeleteResourceDataSync", input, options)
end
@doc """
Removes the server or virtual machine from the list of registered servers.
You can reregister the instance again at any time. If you don't plan to use Run
Command on the server, we suggest uninstalling SSM Agent first.
"""
def deregister_managed_instance(client, input, options \\ []) do
request(client, "DeregisterManagedInstance", input, options)
end
@doc """
Removes a patch group from a patch baseline.
"""
def deregister_patch_baseline_for_patch_group(client, input, options \\ []) do
request(client, "DeregisterPatchBaselineForPatchGroup", input, options)
end
@doc """
Removes a target from a maintenance window.
"""
def deregister_target_from_maintenance_window(client, input, options \\ []) do
request(client, "DeregisterTargetFromMaintenanceWindow", input, options)
end
@doc """
Removes a task from a maintenance window.
"""
def deregister_task_from_maintenance_window(client, input, options \\ []) do
request(client, "DeregisterTaskFromMaintenanceWindow", input, options)
end
@doc """
Describes details about the activation, such as the date and time the activation
was created, its expiration date, the IAM role assigned to the instances in the
activation, and the number of instances registered by using this activation.
"""
def describe_activations(client, input, options \\ []) do
request(client, "DescribeActivations", input, options)
end
@doc """
Describes the association for the specified target or instance.
If you created the association by using the `Targets` parameter, then you must
retrieve the association by using the association ID. If you created the
association by specifying an instance ID and a Systems Manager document, then
you retrieve the association by specifying the document name and the instance
ID.
"""
def describe_association(client, input, options \\ []) do
request(client, "DescribeAssociation", input, options)
end
@doc """
Use this API action to view information about a specific execution of a specific
association.
"""
def describe_association_execution_targets(client, input, options \\ []) do
request(client, "DescribeAssociationExecutionTargets", input, options)
end
@doc """
Use this API action to view all executions for a specific association ID.
"""
def describe_association_executions(client, input, options \\ []) do
request(client, "DescribeAssociationExecutions", input, options)
end
@doc """
Provides details about all active and terminated Automation executions.
"""
def describe_automation_executions(client, input, options \\ []) do
request(client, "DescribeAutomationExecutions", input, options)
end
@doc """
Information about all active and terminated step executions in an Automation
workflow.
"""
def describe_automation_step_executions(client, input, options \\ []) do
request(client, "DescribeAutomationStepExecutions", input, options)
end
@doc """
Lists all patches eligible to be included in a patch baseline.
"""
def describe_available_patches(client, input, options \\ []) do
request(client, "DescribeAvailablePatches", input, options)
end
@doc """
Describes the specified Systems Manager document.
"""
def describe_document(client, input, options \\ []) do
request(client, "DescribeDocument", input, options)
end
@doc """
Describes the permissions for a Systems Manager document.
If you created the document, you are the owner. If a document is shared, it can
either be shared privately (by specifying a user's AWS account ID) or publicly
(*All*).
"""
def describe_document_permission(client, input, options \\ []) do
request(client, "DescribeDocumentPermission", input, options)
end
@doc """
All associations for the instance(s).
"""
def describe_effective_instance_associations(client, input, options \\ []) do
request(client, "DescribeEffectiveInstanceAssociations", input, options)
end
@doc """
Retrieves the current effective patches (the patch and the approval state) for
the specified patch baseline.
Note that this API applies only to Windows patch baselines.
"""
def describe_effective_patches_for_patch_baseline(client, input, options \\ []) do
request(client, "DescribeEffectivePatchesForPatchBaseline", input, options)
end
@doc """
The status of the associations for the instance(s).
"""
def describe_instance_associations_status(client, input, options \\ []) do
request(client, "DescribeInstanceAssociationsStatus", input, options)
end
@doc """
Describes one or more of your instances, including information about the
operating system platform, the version of SSM Agent installed on the instance,
instance status, and so on.
If you specify one or more instance IDs, it returns information for those
instances. If you do not specify instance IDs, it returns information for all
your instances. If you specify an instance ID that is not valid or an instance
that you do not own, you receive an error.
The IamRole field for this API action is the Amazon Identity and Access
Management (IAM) role assigned to on-premises instances. This call does not
return the IAM role for EC2 instances.
"""
def describe_instance_information(client, input, options \\ []) do
request(client, "DescribeInstanceInformation", input, options)
end
@doc """
Retrieves the high-level patch state of one or more instances.
"""
def describe_instance_patch_states(client, input, options \\ []) do
request(client, "DescribeInstancePatchStates", input, options)
end
@doc """
Retrieves the high-level patch state for the instances in the specified patch
group.
"""
def describe_instance_patch_states_for_patch_group(client, input, options \\ []) do
request(client, "DescribeInstancePatchStatesForPatchGroup", input, options)
end
@doc """
Retrieves information about the patches on the specified instance and their
state relative to the patch baseline being used for the instance.
"""
def describe_instance_patches(client, input, options \\ []) do
request(client, "DescribeInstancePatches", input, options)
end
@doc """
Describes a specific delete inventory operation.
"""
def describe_inventory_deletions(client, input, options \\ []) do
request(client, "DescribeInventoryDeletions", input, options)
end
@doc """
Retrieves the individual task executions (one per target) for a particular task
run as part of a maintenance window execution.
"""
def describe_maintenance_window_execution_task_invocations(client, input, options \\ []) do
request(client, "DescribeMaintenanceWindowExecutionTaskInvocations", input, options)
end
@doc """
For a given maintenance window execution, lists the tasks that were run.
"""
def describe_maintenance_window_execution_tasks(client, input, options \\ []) do
request(client, "DescribeMaintenanceWindowExecutionTasks", input, options)
end
@doc """
Lists the executions of a maintenance window.
This includes information about when the maintenance window was scheduled to be
active, and information about tasks registered and run with the maintenance
window.
"""
def describe_maintenance_window_executions(client, input, options \\ []) do
request(client, "DescribeMaintenanceWindowExecutions", input, options)
end
@doc """
Retrieves information about upcoming executions of a maintenance window.
"""
def describe_maintenance_window_schedule(client, input, options \\ []) do
request(client, "DescribeMaintenanceWindowSchedule", input, options)
end
@doc """
Lists the targets registered with the maintenance window.
"""
def describe_maintenance_window_targets(client, input, options \\ []) do
request(client, "DescribeMaintenanceWindowTargets", input, options)
end
@doc """
Lists the tasks in a maintenance window.
"""
def describe_maintenance_window_tasks(client, input, options \\ []) do
request(client, "DescribeMaintenanceWindowTasks", input, options)
end
@doc """
Retrieves the maintenance windows in an AWS account.
"""
def describe_maintenance_windows(client, input, options \\ []) do
request(client, "DescribeMaintenanceWindows", input, options)
end
@doc """
Retrieves information about the maintenance window targets or tasks that an
instance is associated with.
"""
def describe_maintenance_windows_for_target(client, input, options \\ []) do
request(client, "DescribeMaintenanceWindowsForTarget", input, options)
end
@doc """
Query a set of OpsItems.
You must have permission in AWS Identity and Access Management (IAM) to query a
list of OpsItems. For more information, see [Getting started with OpsCenter](https://docs.aws.amazon.com/systems-manager/latest/userguide/OpsCenter-getting-started.html)
in the *AWS Systems Manager User Guide*.
Operations engineers and IT professionals use OpsCenter to view, investigate,
and remediate operational issues impacting the performance and health of their
AWS resources. For more information, see [AWS Systems Manager OpsCenter](https://docs.aws.amazon.com/systems-manager/latest/userguide/OpsCenter.html)
in the *AWS Systems Manager User Guide*.
"""
def describe_ops_items(client, input, options \\ []) do
request(client, "DescribeOpsItems", input, options)
end
@doc """
Get information about a parameter.
Request results are returned on a best-effort basis. If you specify `MaxResults`
in the request, the response includes information up to the limit specified. The
number of items returned, however, can be between zero and the value of
`MaxResults`. If the service reaches an internal limit while processing the
results, it stops the operation and returns the matching values up to that point
and a `NextToken`. You can specify the `NextToken` in a subsequent call to get
the next set of results.
"""
def describe_parameters(client, input, options \\ []) do
request(client, "DescribeParameters", input, options)
end
@doc """
Lists the patch baselines in your AWS account.
"""
def describe_patch_baselines(client, input, options \\ []) do
request(client, "DescribePatchBaselines", input, options)
end
@doc """
Returns high-level aggregated patch compliance state for a patch group.
"""
def describe_patch_group_state(client, input, options \\ []) do
request(client, "DescribePatchGroupState", input, options)
end
@doc """
Lists all patch groups that have been registered with patch baselines.
"""
def describe_patch_groups(client, input, options \\ []) do
request(client, "DescribePatchGroups", input, options)
end
@doc """
Lists the properties of available patches organized by product, product family,
classification, severity, and other properties of available patches.
You can use the reported properties in the filters you specify in requests for
actions such as `CreatePatchBaseline`, `UpdatePatchBaseline`,
`DescribeAvailablePatches`, and `DescribePatchBaselines`.
The following section lists the properties that can be used in filters for each
major operating system type:
## Definitions
### AMAZON_LINUX
Valid properties: PRODUCT, CLASSIFICATION, SEVERITY
### AMAZON_LINUX_2
Valid properties: PRODUCT, CLASSIFICATION, SEVERITY
### CENTOS
Valid properties: PRODUCT, CLASSIFICATION, SEVERITY
### DEBIAN
Valid properties: PRODUCT, PRIORITY
### ORACLE_LINUX
Valid properties: PRODUCT, CLASSIFICATION, SEVERITY
### REDHAT_ENTERPRISE_LINUX
Valid properties: PRODUCT, CLASSIFICATION, SEVERITY
### SUSE
Valid properties: PRODUCT, CLASSIFICATION, SEVERITY
### UBUNTU
Valid properties: PRODUCT, PRIORITY
### WINDOWS
Valid properties: PRODUCT, PRODUCT_FAMILY, CLASSIFICATION, MSRC_SEVERITY
"""
def describe_patch_properties(client, input, options \\ []) do
request(client, "DescribePatchProperties", input, options)
end
@doc """
Retrieves a list of all active sessions (both connected and disconnected) or
terminated sessions from the past 30 days.
"""
def describe_sessions(client, input, options \\ []) do
request(client, "DescribeSessions", input, options)
end
@doc """
Get detailed information about a particular Automation execution.
"""
def get_automation_execution(client, input, options \\ []) do
request(client, "GetAutomationExecution", input, options)
end
@doc """
Gets the state of the AWS Systems Manager Change Calendar at an optional,
specified time.
If you specify a time, `GetCalendarState` returns the state of the calendar at a
specific time, and returns the next time that the Change Calendar state will
transition. If you do not specify a time, `GetCalendarState` assumes the current
time. Change Calendar entries have two possible states: `OPEN` or `CLOSED`.
If you specify more than one calendar in a request, the command returns the
status of `OPEN` only if all calendars in the request are open. If one or more
calendars in the request are closed, the status returned is `CLOSED`.
For more information about Systems Manager Change Calendar, see [AWS Systems Manager Change
Calendar](https://docs.aws.amazon.com/systems-manager/latest/userguide/systems-manager-change-calendar.html)
in the *AWS Systems Manager User Guide*.
"""
def get_calendar_state(client, input, options \\ []) do
request(client, "GetCalendarState", input, options)
end
@doc """
Returns detailed information about command execution for an invocation or
plugin.
"""
def get_command_invocation(client, input, options \\ []) do
request(client, "GetCommandInvocation", input, options)
end
@doc """
Retrieves the Session Manager connection status for an instance to determine
whether it is running and ready to receive Session Manager connections.
"""
def get_connection_status(client, input, options \\ []) do
request(client, "GetConnectionStatus", input, options)
end
@doc """
Retrieves the default patch baseline.
Note that Systems Manager supports creating multiple default patch baselines.
For example, you can create a default patch baseline for each operating system.
If you do not specify an operating system value, the default patch baseline for
Windows is returned.
"""
def get_default_patch_baseline(client, input, options \\ []) do
request(client, "GetDefaultPatchBaseline", input, options)
end
@doc """
Retrieves the current snapshot for the patch baseline the instance uses.
This API is primarily used by the AWS-RunPatchBaseline Systems Manager document.
"""
def get_deployable_patch_snapshot_for_instance(client, input, options \\ []) do
request(client, "GetDeployablePatchSnapshotForInstance", input, options)
end
@doc """
Gets the contents of the specified Systems Manager document.
"""
def get_document(client, input, options \\ []) do
request(client, "GetDocument", input, options)
end
@doc """
Query inventory information.
"""
def get_inventory(client, input, options \\ []) do
request(client, "GetInventory", input, options)
end
@doc """
Return a list of inventory type names for the account, or return a list of
attribute names for a specific Inventory item type.
"""
def get_inventory_schema(client, input, options \\ []) do
request(client, "GetInventorySchema", input, options)
end
@doc """
Retrieves a maintenance window.
"""
def get_maintenance_window(client, input, options \\ []) do
request(client, "GetMaintenanceWindow", input, options)
end
@doc """
Retrieves details about a specific a maintenance window execution.
"""
def get_maintenance_window_execution(client, input, options \\ []) do
request(client, "GetMaintenanceWindowExecution", input, options)
end
@doc """
Retrieves the details about a specific task run as part of a maintenance window
execution.
"""
def get_maintenance_window_execution_task(client, input, options \\ []) do
request(client, "GetMaintenanceWindowExecutionTask", input, options)
end
@doc """
Retrieves information about a specific task running on a specific target.
"""
def get_maintenance_window_execution_task_invocation(client, input, options \\ []) do
request(client, "GetMaintenanceWindowExecutionTaskInvocation", input, options)
end
@doc """
Lists the tasks in a maintenance window.
"""
def get_maintenance_window_task(client, input, options \\ []) do
request(client, "GetMaintenanceWindowTask", input, options)
end
@doc """
Get information about an OpsItem by using the ID.
You must have permission in AWS Identity and Access Management (IAM) to view
information about an OpsItem. For more information, see [Getting started with OpsCenter](https://docs.aws.amazon.com/systems-manager/latest/userguide/OpsCenter-getting-started.html)
in the *AWS Systems Manager User Guide*.
Operations engineers and IT professionals use OpsCenter to view, investigate,
and remediate operational issues impacting the performance and health of their
AWS resources. For more information, see [AWS Systems Manager OpsCenter](https://docs.aws.amazon.com/systems-manager/latest/userguide/OpsCenter.html)
in the *AWS Systems Manager User Guide*.
"""
def get_ops_item(client, input, options \\ []) do
request(client, "GetOpsItem", input, options)
end
@doc """
View a summary of OpsItems based on specified filters and aggregators.
"""
def get_ops_summary(client, input, options \\ []) do
request(client, "GetOpsSummary", input, options)
end
@doc """
Get information about a parameter by using the parameter name.
Don't confuse this API action with the `GetParameters` API action.
"""
def get_parameter(client, input, options \\ []) do
request(client, "GetParameter", input, options)
end
@doc """
Query a list of all parameters used by the AWS account.
"""
def get_parameter_history(client, input, options \\ []) do
request(client, "GetParameterHistory", input, options)
end
@doc """
Get details of a parameter.
Don't confuse this API action with the `GetParameter` API action.
"""
def get_parameters(client, input, options \\ []) do
request(client, "GetParameters", input, options)
end
@doc """
Retrieve information about one or more parameters in a specific hierarchy.
Request results are returned on a best-effort basis. If you specify `MaxResults`
in the request, the response includes information up to the limit specified. The
number of items returned, however, can be between zero and the value of
`MaxResults`. If the service reaches an internal limit while processing the
results, it stops the operation and returns the matching values up to that point
and a `NextToken`. You can specify the `NextToken` in a subsequent call to get
the next set of results.
"""
def get_parameters_by_path(client, input, options \\ []) do
request(client, "GetParametersByPath", input, options)
end
@doc """
Retrieves information about a patch baseline.
"""
def get_patch_baseline(client, input, options \\ []) do
request(client, "GetPatchBaseline", input, options)
end
@doc """
Retrieves the patch baseline that should be used for the specified patch group.
"""
def get_patch_baseline_for_patch_group(client, input, options \\ []) do
request(client, "GetPatchBaselineForPatchGroup", input, options)
end
@doc """
`ServiceSetting` is an account-level setting for an AWS service.
This setting defines how a user interacts with or uses a service or a feature of
a service. For example, if an AWS service charges money to the account based on
feature or service usage, then the AWS service team might create a default
setting of "false". This means the user can't use this feature unless they
change the setting to "true" and intentionally opt in for a paid feature.
Services map a `SettingId` object to a setting value. AWS services teams define
the default value for a `SettingId`. You can't create a new `SettingId`, but you
can overwrite the default value if you have the `ssm:UpdateServiceSetting`
permission for the setting. Use the `UpdateServiceSetting` API action to change
the default setting. Or use the `ResetServiceSetting` to change the value back
to the original value defined by the AWS service team.
Query the current service setting for the account.
"""
def get_service_setting(client, input, options \\ []) do
request(client, "GetServiceSetting", input, options)
end
@doc """
A parameter label is a user-defined alias to help you manage different versions
of a parameter.
When you modify a parameter, Systems Manager automatically saves a new version
and increments the version number by one. A label can help you remember the
purpose of a parameter when there are multiple versions.
Parameter labels have the following requirements and restrictions.
* A version of a parameter can have a maximum of 10 labels.
* You can't attach the same label to different versions of the same
parameter. For example, if version 1 has the label Production, then you can't
attach Production to version 2.
* You can move a label from one version of a parameter to another.
* You can't create a label when you create a new parameter. You must
attach a label to a specific version of a parameter.
* You can't delete a parameter label. If you no longer want to use a
parameter label, then you must move it to a different version of a parameter.
* A label can have a maximum of 100 characters.
* Labels can contain letters (case sensitive), numbers, periods (.),
hyphens (-), or underscores (_).
* Labels can't begin with a number, "aws," or "ssm" (not case
sensitive). If a label fails to meet these requirements, then the label is not
associated with a parameter and the system displays it in the list of
InvalidLabels.
"""
def label_parameter_version(client, input, options \\ []) do
request(client, "LabelParameterVersion", input, options)
end
@doc """
Retrieves all versions of an association for a specific association ID.
"""
def list_association_versions(client, input, options \\ []) do
request(client, "ListAssociationVersions", input, options)
end
@doc """
Returns all State Manager associations in the current AWS account and Region.
You can limit the results to a specific State Manager association document or
instance by specifying a filter.
"""
def list_associations(client, input, options \\ []) do
request(client, "ListAssociations", input, options)
end
@doc """
An invocation is copy of a command sent to a specific instance.
A command can apply to one or more instances. A command invocation applies to
one instance. For example, if a user runs SendCommand against three instances,
then a command invocation is created for each requested instance ID.
ListCommandInvocations provide status about command execution.
"""
def list_command_invocations(client, input, options \\ []) do
request(client, "ListCommandInvocations", input, options)
end
@doc """
Lists the commands requested by users of the AWS account.
"""
def list_commands(client, input, options \\ []) do
request(client, "ListCommands", input, options)
end
@doc """
For a specified resource ID, this API action returns a list of compliance
statuses for different resource types.
Currently, you can only specify one resource ID per call. List results depend on
the criteria specified in the filter.
"""
def list_compliance_items(client, input, options \\ []) do
request(client, "ListComplianceItems", input, options)
end
@doc """
Returns a summary count of compliant and non-compliant resources for a
compliance type.
For example, this call can return State Manager associations, patches, or custom
compliance types according to the filter criteria that you specify.
"""
def list_compliance_summaries(client, input, options \\ []) do
request(client, "ListComplianceSummaries", input, options)
end
@doc """
List all versions for a document.
"""
def list_document_versions(client, input, options \\ []) do
request(client, "ListDocumentVersions", input, options)
end
@doc """
Returns all Systems Manager (SSM) documents in the current AWS account and
Region.
You can limit the results of this request by using a filter.
"""
def list_documents(client, input, options \\ []) do
request(client, "ListDocuments", input, options)
end
@doc """
A list of inventory items returned by the request.
"""
def list_inventory_entries(client, input, options \\ []) do
request(client, "ListInventoryEntries", input, options)
end
@doc """
Returns a resource-level summary count.
The summary includes information about compliant and non-compliant statuses and
detailed compliance-item severity counts, according to the filter criteria you
specify.
"""
def list_resource_compliance_summaries(client, input, options \\ []) do
request(client, "ListResourceComplianceSummaries", input, options)
end
@doc """
Lists your resource data sync configurations.
Includes information about the last time a sync attempted to start, the last
sync status, and the last time a sync successfully completed.
The number of sync configurations might be too large to return using a single
call to `ListResourceDataSync`. You can limit the number of sync configurations
returned by using the `MaxResults` parameter. To determine whether there are
more sync configurations to list, check the value of `NextToken` in the output.
If there are more sync configurations to list, you can request them by
specifying the `NextToken` returned in the call to the parameter of a subsequent
call.
"""
def list_resource_data_sync(client, input, options \\ []) do
request(client, "ListResourceDataSync", input, options)
end
@doc """
Returns a list of the tags assigned to the specified resource.
"""
def list_tags_for_resource(client, input, options \\ []) do
request(client, "ListTagsForResource", input, options)
end
@doc """
Shares a Systems Manager document publicly or privately.
If you share a document privately, you must specify the AWS user account IDs for
those people who can use the document. If you share a document publicly, you
must specify *All* as the account ID.
"""
def modify_document_permission(client, input, options \\ []) do
request(client, "ModifyDocumentPermission", input, options)
end
@doc """
Registers a compliance type and other compliance details on a designated
resource.
This action lets you register custom compliance details with a resource. This
call overwrites existing compliance information on the resource, so you must
provide a full list of compliance items each time that you send the request.
ComplianceType can be one of the following:
* ExecutionId: The execution ID when the patch, association, or
custom compliance item was applied.
* ExecutionType: Specify patch, association, or Custom:`string`.
* ExecutionTime. The time the patch, association, or custom
compliance item was applied to the instance.
* Id: The patch, association, or custom compliance ID.
* Title: A title.
* Status: The status of the compliance item. For example, `approved`
for patches, or `Failed` for associations.
* Severity: A patch severity. For example, `critical`.
* DocumentName: A SSM document name. For example,
AWS-RunPatchBaseline.
* DocumentVersion: An SSM document version number. For example, 4.
* Classification: A patch classification. For example, `security
updates`.
* PatchBaselineId: A patch baseline ID.
* PatchSeverity: A patch severity. For example, `Critical`.
* PatchState: A patch state. For example,
`InstancesWithFailedPatches`.
* PatchGroup: The name of a patch group.
* InstalledTime: The time the association, patch, or custom
compliance item was applied to the resource. Specify the time by using the
following format: yyyy-MM-dd'T'HH:mm:ss'Z'
"""
def put_compliance_items(client, input, options \\ []) do
request(client, "PutComplianceItems", input, options)
end
@doc """
Bulk update custom inventory items on one more instance.
The request adds an inventory item, if it doesn't already exist, or updates an
inventory item, if it does exist.
"""
def put_inventory(client, input, options \\ []) do
request(client, "PutInventory", input, options)
end
@doc """
Add a parameter to the system.
"""
def put_parameter(client, input, options \\ []) do
request(client, "PutParameter", input, options)
end
@doc """
Defines the default patch baseline for the relevant operating system.
To reset the AWS predefined patch baseline as the default, specify the full
patch baseline ARN as the baseline ID value. For example, for CentOS, specify
`arn:aws:ssm:us-east-2:733109147000:patchbaseline/pb-0574b43a65ea646ed` instead
of `pb-0574b43a65ea646ed`.
"""
def register_default_patch_baseline(client, input, options \\ []) do
request(client, "RegisterDefaultPatchBaseline", input, options)
end
@doc """
Registers a patch baseline for a patch group.
"""
def register_patch_baseline_for_patch_group(client, input, options \\ []) do
request(client, "RegisterPatchBaselineForPatchGroup", input, options)
end
@doc """
Registers a target with a maintenance window.
"""
def register_target_with_maintenance_window(client, input, options \\ []) do
request(client, "RegisterTargetWithMaintenanceWindow", input, options)
end
@doc """
Adds a new task to a maintenance window.
"""
def register_task_with_maintenance_window(client, input, options \\ []) do
request(client, "RegisterTaskWithMaintenanceWindow", input, options)
end
@doc """
Removes tag keys from the specified resource.
"""
def remove_tags_from_resource(client, input, options \\ []) do
request(client, "RemoveTagsFromResource", input, options)
end
@doc """
`ServiceSetting` is an account-level setting for an AWS service.
This setting defines how a user interacts with or uses a service or a feature of
a service. For example, if an AWS service charges money to the account based on
feature or service usage, then the AWS service team might create a default
setting of "false". This means the user can't use this feature unless they
change the setting to "true" and intentionally opt in for a paid feature.
Services map a `SettingId` object to a setting value. AWS services teams define
the default value for a `SettingId`. You can't create a new `SettingId`, but you
can overwrite the default value if you have the `ssm:UpdateServiceSetting`
permission for the setting. Use the `GetServiceSetting` API action to view the
current value. Use the `UpdateServiceSetting` API action to change the default
setting.
Reset the service setting for the account to the default value as provisioned by
the AWS service team.
"""
def reset_service_setting(client, input, options \\ []) do
request(client, "ResetServiceSetting", input, options)
end
@doc """
Reconnects a session to an instance after it has been disconnected.
Connections can be resumed for disconnected sessions, but not terminated
sessions.
This command is primarily for use by client machines to automatically reconnect
during intermittent network issues. It is not intended for any other use.
"""
def resume_session(client, input, options \\ []) do
request(client, "ResumeSession", input, options)
end
@doc """
Sends a signal to an Automation execution to change the current behavior or
status of the execution.
"""
def send_automation_signal(client, input, options \\ []) do
request(client, "SendAutomationSignal", input, options)
end
@doc """
Runs commands on one or more managed instances.
"""
def send_command(client, input, options \\ []) do
request(client, "SendCommand", input, options)
end
@doc """
Use this API action to run an association immediately and only one time.
This action can be helpful when troubleshooting associations.
"""
def start_associations_once(client, input, options \\ []) do
request(client, "StartAssociationsOnce", input, options)
end
@doc """
Initiates execution of an Automation document.
"""
def start_automation_execution(client, input, options \\ []) do
request(client, "StartAutomationExecution", input, options)
end
@doc """
Initiates a connection to a target (for example, an instance) for a Session
Manager session.
Returns a URL and token that can be used to open a WebSocket connection for
sending input and receiving outputs.
AWS CLI usage: `start-session` is an interactive command that requires the
Session Manager plugin to be installed on the client machine making the call.
For information, see [Install the Session Manager plugin for the AWS CLI](https://docs.aws.amazon.com/systems-manager/latest/userguide/session-manager-working-with-install-plugin.html)
in the *AWS Systems Manager User Guide*.
AWS Tools for PowerShell usage: Start-SSMSession is not currently supported by
AWS Tools for PowerShell on Windows local machines.
"""
def start_session(client, input, options \\ []) do
request(client, "StartSession", input, options)
end
@doc """
Stop an Automation that is currently running.
"""
def stop_automation_execution(client, input, options \\ []) do
request(client, "StopAutomationExecution", input, options)
end
@doc """
Permanently ends a session and closes the data connection between the Session
Manager client and SSM Agent on the instance.
A terminated session cannot be resumed.
"""
def terminate_session(client, input, options \\ []) do
request(client, "TerminateSession", input, options)
end
@doc """
Updates an association.
You can update the association name and version, the document version, schedule,
parameters, and Amazon S3 output.
In order to call this API action, your IAM user account, group, or role must be
configured with permission to call the `DescribeAssociation` API action. If you
don't have permission to call DescribeAssociation, then you receive the
following error: `An error occurred (AccessDeniedException) when calling the
UpdateAssociation operation: User: <user_arn> is not authorized to perform:
ssm:DescribeAssociation on resource: <resource_arn>`
When you update an association, the association immediately runs against the
specified targets.
"""
def update_association(client, input, options \\ []) do
request(client, "UpdateAssociation", input, options)
end
@doc """
Updates the status of the Systems Manager document associated with the specified
instance.
"""
def update_association_status(client, input, options \\ []) do
request(client, "UpdateAssociationStatus", input, options)
end
@doc """
Updates one or more values for an SSM document.
"""
def update_document(client, input, options \\ []) do
request(client, "UpdateDocument", input, options)
end
@doc """
Set the default version of a document.
"""
def update_document_default_version(client, input, options \\ []) do
request(client, "UpdateDocumentDefaultVersion", input, options)
end
@doc """
Updates an existing maintenance window.
Only specified parameters are modified.
The value you specify for `Duration` determines the specific end time for the
maintenance window based on the time it begins. No maintenance window tasks are
permitted to start after the resulting endtime minus the number of hours you
specify for `Cutoff`. For example, if the maintenance window starts at 3 PM, the
duration is three hours, and the value you specify for `Cutoff` is one hour, no
maintenance window tasks can start after 5 PM.
"""
def update_maintenance_window(client, input, options \\ []) do
request(client, "UpdateMaintenanceWindow", input, options)
end
@doc """
Modifies the target of an existing maintenance window.
You can change the following:
* Name
* Description
* Owner
* IDs for an ID target
* Tags for a Tag target
* From any supported tag type to another. The three supported tag
types are ID target, Tag target, and resource group. For more information, see
`Target`.
If a parameter is null, then the corresponding field is not modified.
"""
def update_maintenance_window_target(client, input, options \\ []) do
request(client, "UpdateMaintenanceWindowTarget", input, options)
end
@doc """
Modifies a task assigned to a maintenance window.
You can't change the task type, but you can change the following values:
* TaskARN. For example, you can change a RUN_COMMAND task from
AWS-RunPowerShellScript to AWS-RunShellScript.
* ServiceRoleArn
* TaskInvocationParameters
* Priority
* MaxConcurrency
* MaxErrors
If the value for a parameter in `UpdateMaintenanceWindowTask` is null, then the
corresponding field is not modified. If you set `Replace` to true, then all
fields required by the `RegisterTaskWithMaintenanceWindow` action are required
for this request. Optional fields that aren't specified are set to null.
When you update a maintenance window task that has options specified in
`TaskInvocationParameters`, you must provide again all the
`TaskInvocationParameters` values that you want to retain. The values you do not
specify again are removed. For example, suppose that when you registered a Run
Command task, you specified `TaskInvocationParameters` values for `Comment`,
`NotificationConfig`, and `OutputS3BucketName`. If you update the maintenance
window task and specify only a different `OutputS3BucketName` value, the values
for `Comment` and `NotificationConfig` are removed.
"""
def update_maintenance_window_task(client, input, options \\ []) do
request(client, "UpdateMaintenanceWindowTask", input, options)
end
@doc """
Changes the Amazon Identity and Access Management (IAM) role that is assigned to
the on-premises instance or virtual machines (VM).
IAM roles are first assigned to these hybrid instances during the activation
process. For more information, see `CreateActivation`.
"""
def update_managed_instance_role(client, input, options \\ []) do
request(client, "UpdateManagedInstanceRole", input, options)
end
@doc """
Edit or change an OpsItem.
You must have permission in AWS Identity and Access Management (IAM) to update
an OpsItem. For more information, see [Getting started with OpsCenter](https://docs.aws.amazon.com/systems-manager/latest/userguide/OpsCenter-getting-started.html)
in the *AWS Systems Manager User Guide*.
Operations engineers and IT professionals use OpsCenter to view, investigate,
and remediate operational issues impacting the performance and health of their
AWS resources. For more information, see [AWS Systems Manager OpsCenter](https://docs.aws.amazon.com/systems-manager/latest/userguide/OpsCenter.html)
in the *AWS Systems Manager User Guide*.
"""
def update_ops_item(client, input, options \\ []) do
request(client, "UpdateOpsItem", input, options)
end
@doc """
Modifies an existing patch baseline.
Fields not specified in the request are left unchanged.
For information about valid key and value pairs in `PatchFilters` for each
supported operating system type, see
[PatchFilter](http://docs.aws.amazon.com/systems-manager/latest/APIReference/API_PatchFilter.html).
"""
def update_patch_baseline(client, input, options \\ []) do
request(client, "UpdatePatchBaseline", input, options)
end
@doc """
Update a resource data sync.
After you create a resource data sync for a Region, you can't change the account
options for that sync. For example, if you create a sync in the us-east-2 (Ohio)
Region and you choose the Include only the current account option, you can't
edit that sync later and choose the Include all accounts from my AWS
Organizations configuration option. Instead, you must delete the first resource
data sync, and create a new one.
This API action only supports a resource data sync that was created with a
SyncFromSource `SyncType`.
"""
def update_resource_data_sync(client, input, options \\ []) do
request(client, "UpdateResourceDataSync", input, options)
end
@doc """
`ServiceSetting` is an account-level setting for an AWS service.
This setting defines how a user interacts with or uses a service or a feature of
a service. For example, if an AWS service charges money to the account based on
feature or service usage, then the AWS service team might create a default
setting of "false". This means the user can't use this feature unless they
change the setting to "true" and intentionally opt in for a paid feature.
Services map a `SettingId` object to a setting value. AWS services teams define
the default value for a `SettingId`. You can't create a new `SettingId`, but you
can overwrite the default value if you have the `ssm:UpdateServiceSetting`
permission for the setting. Use the `GetServiceSetting` API action to view the
current value. Or, use the `ResetServiceSetting` to change the value back to the
original value defined by the AWS service team.
Update the service setting for the account.
"""
def update_service_setting(client, input, options \\ []) do
request(client, "UpdateServiceSetting", input, options)
end
@spec request(AWS.Client.t(), binary(), map(), list()) ::
{:ok, map() | nil, map()}
| {:error, term()}
defp request(client, action, input, options) do
client = %{client | service: "ssm"}
host = build_host("ssm", client)
url = build_url(host, client)
headers = [
{"Host", host},
{"Content-Type", "application/x-amz-json-1.1"},
{"X-Amz-Target", "AmazonSSM.#{action}"}
]
payload = encode!(client, input)
headers = AWS.Request.sign_v4(client, "POST", url, headers, payload)
post(client, url, payload, headers, options)
end
defp post(client, url, payload, headers, options) do
case AWS.Client.request(client, :post, url, payload, headers, options) do
{:ok, %{status_code: 200, body: body} = response} ->
body = if body != "", do: decode!(client, body)
{:ok, body, response}
{:ok, response} ->
{:error, {:unexpected_response, response}}
error = {:error, _reason} -> error
end
end
defp build_host(_endpoint_prefix, %{region: "local", endpoint: endpoint}) do
endpoint
end
defp build_host(_endpoint_prefix, %{region: "local"}) do
"localhost"
end
defp build_host(endpoint_prefix, %{region: region, endpoint: endpoint}) do
"#{endpoint_prefix}.#{region}.#{endpoint}"
end
defp build_url(host, %{:proto => proto, :port => port}) do
"#{proto}://#{host}:#{port}/"
end
defp encode!(client, payload) do
AWS.Client.encode!(client, payload, :json)
end
defp decode!(client, payload) do
AWS.Client.decode!(client, payload, :json)
end
end
|
lib/aws/generated/ssm.ex
| 0.894738 | 0.593138 |
ssm.ex
|
starcoder
|
defmodule Commanded.Registration.SwarmRegistry.Monitor do
@moduledoc """
A `GenServer` process that starts and monitors another process that is
distributed using Swarm.
This is used to ensure the process can be supervised by a `Supervisor`.
"""
use GenServer
require Logger
alias Commanded.Registration.SwarmRegistry.Monitor
defstruct [:name, :module, :args, :pid, :ref]
@doc false
def start_link(name, module, args) do
state = %Monitor{name: name, module: module, args: args}
GenServer.start_link(__MODULE__, state)
end
@doc false
def init(%Monitor{} = state) do
send(self(), :start_distributed_process)
{:ok, state}
end
@doc false
def handle_cast(request, %Monitor{pid: pid} = state) when is_pid(pid) do
GenServer.cast(pid, request)
{:noreply, state}
end
@doc false
def handle_call(request, _from, %Monitor{pid: pid} = state) when is_pid(pid) do
reply = GenServer.call(pid, request)
{:reply, reply, state}
end
@doc """
Start a process using Swarm to distribute it amongst the available nodes in the cluster
"""
def handle_info(:start_distributed_process, %Monitor{} = state) do
%Monitor{name: name, module: module, args: args} = state
debug(fn ->
"[#{Node.self()}] Attempting to start distributed process: #{inspect(name)} (#{
inspect(module)
} with args #{inspect(args)})"
end)
case Swarm.register_name(name, GenServer, :start_link, [module, args]) do
{:ok, pid} ->
debug(fn ->
"[#{inspect(Node.self())}] Started named process #{inspect(name)} on #{
inspect(node(pid))
} (#{inspect(pid)})"
end)
Process.unlink(pid)
monitor(pid, state)
{:error, {:already_registered, pid}} ->
debug(fn ->
"[#{inspect(Node.self())}] Named process #{inspect(name)} already started on #{
inspect(node(pid))
} (#{inspect(pid)})"
end)
monitor(pid, state)
{:error, :no_node_available} ->
debug(fn ->
"[#{inspect(Node.self())}] Failed to start distributed process #{inspect(name)} due to no node available, will attempt to restart in 1s"
end)
attempt_process_restart()
{:noreply, state}
{:error, reason} ->
info(fn ->
"[#{inspect(Node.self())}] Failed to start distributed process #{inspect(name)} due to: #{
inspect(reason)
}"
end)
{:stop, reason, state}
end
end
@doc """
Attempt to restart the monitored process when it is shutdown, but requests
restart, or due to `:noconnection` or `:noproc`.
"""
def handle_info({:DOWN, ref, :process, _pid, reason}, %Monitor{name: name, ref: ref} = state)
when reason in [:noconnection, :noproc, :shutdown, {:shutdown, :attempt_restart}] do
debug(fn ->
"[#{Node.self()}] Named process #{inspect(name)} down due to: #{inspect(reason)}"
end)
Process.demonitor(ref)
attempt_process_restart()
{:noreply, %Monitor{state | pid: nil, ref: nil}}
end
@doc """
Stop the monitor when the monitored process is shutdown and requests not
to be restarted.
"""
def handle_info(
{:DOWN, ref, :process, _pid, {:shutdown, :no_restart}},
%Monitor{name: name, ref: ref} = state
) do
debug(fn ->
"[#{Node.self()}] Named process #{inspect(name)} down due to: {:shutdown, :no_restart}"
end)
stop(:shutdown, state)
end
@doc """
Stop the monitor when the monitored process goes down for any other reason.
"""
def handle_info({:DOWN, ref, :process, _pid, reason}, %Monitor{name: name, ref: ref} = state) do
debug(fn ->
"[#{Node.self()}] Named process #{inspect(name)} down due to: #{inspect(reason)}"
end)
stop(reason, state)
end
@doc """
Send any other messages to the monitored process, if available
"""
def handle_info(message, %Monitor{pid: pid} = state) when is_pid(pid) do
send(pid, message)
{:noreply, state}
end
defp stop(reason, %Monitor{ref: ref} = state) do
Process.demonitor(ref)
{:stop, reason, %Monitor{state | pid: nil, ref: nil}}
end
defp attempt_process_restart(delay \\ restart_delay()) do
Process.send_after(self(), :start_distributed_process, delay)
end
defp monitor(pid, %Monitor{} = state) do
ref = Process.monitor(pid)
{:noreply, %Monitor{state | pid: pid, ref: ref}}
end
defp restart_delay do
Application.get_env(:commanded_swarm_registry, :restart_delay, 1_000)
end
defdelegate debug(chardata_or_fun), to: Logger
defdelegate info(chardata_or_fun), to: Logger
end
|
lib/commanded/registration/swarm_registry/monitor.ex
| 0.68763 | 0.455986 |
monitor.ex
|
starcoder
|
defmodule SpadesGame.GamePlayer do
@moduledoc """
Represents a player inside a game of spades.
They will have a hand of cards, a bid etc.
"""
alias SpadesGame.{Deck, Card, GamePlayer}
@derive Jason.Encoder
defstruct [:hand, :tricks_won, :bid]
use Accessible
@type t :: %GamePlayer{
hand: Deck.t(),
tricks_won: integer,
bid: nil | integer
}
@doc """
new/0: Create a new player with an empty hand.
"""
@spec new() :: GamePlayer.t()
def new() do
%GamePlayer{
hand: Deck.new_empty(),
tricks_won: 0,
bid: nil
}
end
@doc """
new/1: Create a new player with the hand passed in.
"""
@spec new(Deck.t()) :: GamePlayer.t()
def new(hand) do
%GamePlayer{
hand: hand,
tricks_won: 0,
bid: nil
}
end
@doc """
won_trick/1: Increment the number of tricks won by 1.
"""
@spec won_trick(GamePlayer.t()) :: GamePlayer.t()
def won_trick(player) do
%GamePlayer{player | tricks_won: player.tricks_won + 1}
end
@spec set_bid(GamePlayer.t(), nil | integer) :: GamePlayer.t()
def set_bid(player, bid) when is_nil(bid) or (bid >= 0 and bid <= 13) do
%GamePlayer{player | bid: bid}
end
@doc """
play/1: Have a player move a card from their hand to a trick.
"""
@spec play(GamePlayer.t(), Card.t()) ::
{:ok, GamePlayer.t(), Card.t()} | {:error, GamePlayer.t()}
def play(player, card) do
case player.hand |> Enum.member?(card) do
true -> {:ok, _play(player, card), card}
false -> {:error, player}
end
end
# _play/1: Have a player move a card from their hand to a trick.
# Private. We've already validated they have the card in their hand.
@spec _play(GamePlayer.t(), Card.t()) :: GamePlayer.t()
defp _play(player, card) do
new_hand = player.hand |> Enum.reject(fn x -> x == card end)
%{player | hand: new_hand}
end
@spec has_suit?(GamePlayer.t(), :s | :h | :c | :d) :: boolean
def has_suit?(player, suit) do
player.hand |> Enum.any?(fn card -> card.suit == suit end)
end
def hand_length(player) do
player.hand |> length()
end
def spades_length(player) do
player.hand |> Enum.filter(fn card -> card.suit == :s end) |> length()
end
end
|
backend/lib/spades_game/game_player.ex
| 0.798108 | 0.527682 |
game_player.ex
|
starcoder
|
defmodule Membrane.AudioMixer do
@moduledoc """
This element performs audio mixing.
Audio format can be set as an element option or received through caps from input pads. All
received caps have to be identical and match ones in element option (if that option is
different from `nil`).
Input pads can have offset - it tells how much silence should be added before first sample
from that pad. Offset has to be positive.
Mixer mixes only raw audio (PCM), so some parser may be needed to precede it in pipeline.
"""
use Membrane.Filter
use Bunch
require Membrane.Logger
alias Membrane.AudioMixer.{Adder, ClipPreventingAdder}
alias Membrane.Buffer
alias Membrane.Caps.Audio.Raw
alias Membrane.Caps.Matcher
alias Membrane.Time
@supported_caps {Raw,
format: Matcher.one_of([:s8, :s16le, :s16be, :s24le, :s24be, :s32le, :s32be])}
def_options caps: [
type: :struct,
spec: Raw.t(),
description: """
The value defines a raw audio format of pads connected to the
element. It should be the same for all the pads.
""",
default: nil
],
frames_per_buffer: [
type: :integer,
spec: pos_integer(),
description: """
Assumed number of raw audio frames in each buffer.
Used when converting demand from buffers into bytes.
""",
default: 2048
],
prevent_clipping: [
type: :boolean,
spec: boolean(),
description: """
Defines how the mixer should act in the case when an overflow happens.
- If true, the wave will be scaled down, so a peak will become the maximal
value of the sample in the format. See `Membrane.AudioMixer.ClipPreventingAdder`.
- If false, overflow will be clipped to the maximal value of the sample in
the format. See `Membrane.AudioMixer.Adder`.
""",
default: true
]
def_output_pad :output,
mode: :pull,
availability: :always,
caps: Raw
def_input_pad :input,
mode: :pull,
availability: :on_request,
demand_unit: :bytes,
caps: @supported_caps,
options: [
offset: [
spec: Time.t(),
default: 0,
description: "Offset of the input audio at the pad."
]
]
@impl true
def handle_init(%__MODULE__{caps: caps} = options) do
state =
options
|> Map.from_struct()
|> Map.put(:pads, %{})
|> then(&if caps == nil, do: &1, else: initialize_mixer_state(caps, &1))
{:ok, state}
end
@impl true
def handle_pad_added(pad, _context, state) do
state =
Bunch.Access.put_in(
state,
[:pads, pad],
%{queue: <<>>, stream_ended: false}
)
{:ok, state}
end
@impl true
def handle_pad_removed(pad, _context, state) do
state = Bunch.Access.delete_in(state, [:pads, pad])
{:ok, state}
end
@impl true
def handle_prepared_to_playing(_context, %{caps: %Raw{} = caps} = state) do
{{:ok, caps: {:output, caps}}, state}
end
def handle_prepared_to_playing(_context, %{caps: nil} = state) do
{:ok, state}
end
@impl true
def handle_demand(:output, size, :bytes, _context, state) do
do_handle_demand(size, state)
end
@impl true
def handle_demand(:output, _buffers_count, :buffers, _context, %{caps: nil} = state) do
{:ok, state}
end
@impl true
def handle_demand(
:output,
buffers_count,
:buffers,
_context,
%{frames_per_buffer: frames, caps: caps} = state
) do
size = buffers_count * Raw.frames_to_bytes(frames, caps)
do_handle_demand(size, state)
end
@impl true
def handle_start_of_stream(pad, context, state) do
offset = context.pads[pad].options.offset
silence = Raw.sound_of_silence(state.caps, offset)
state =
Bunch.Access.update_in(
state,
[:pads, pad],
&%{&1 | queue: silence}
)
{actions, state} = mix_and_get_actions(state)
{{:ok, actions ++ [{:redemand, :output}]}, state}
end
@impl true
def handle_end_of_stream(pad, _context, state) do
state =
case Bunch.Access.get_in(state, [:pads, pad]) do
%{queue: <<>>} ->
Bunch.Access.delete_in(state, [:pads, pad])
_state ->
Bunch.Access.update_in(
state,
[:pads, pad],
&%{&1 | stream_ended: true}
)
end
{actions, state} = mix_and_get_actions(state)
if all_streams_ended?(state) do
{{:ok, actions ++ [{:end_of_stream, :output}]}, state}
else
{{:ok, actions}, state}
end
end
@impl true
def handle_event(pad, event, _context, state) do
Membrane.Logger.debug("Received event #{inspect(event)} on pad #{inspect(pad)}")
{:ok, state}
end
@impl true
def handle_process(
pad_ref,
%Buffer{payload: payload},
_context,
%{caps: caps, pads: pads} = state
) do
time_frame = Raw.frame_size(caps)
{size, pads} =
Map.get_and_update(
pads,
pad_ref,
fn %{queue: queue} = pad ->
{byte_size(queue) + byte_size(payload), %{pad | queue: queue <> payload}}
end
)
if size >= time_frame do
{actions, state} = mix_and_get_actions(%{state | pads: pads})
actions = if actions == [], do: [redemand: :output], else: actions
{{:ok, actions}, state}
else
{{:ok, redemand: :output}, %{state | pads: pads}}
end
end
@impl true
def handle_caps(_pad, caps, _context, %{caps: nil} = state) do
state = state |> Map.put(:caps, caps) |> then(&initialize_mixer_state(caps, &1))
{{:ok, caps: {:output, caps}, redemand: :output}, state}
end
@impl true
def handle_caps(_pad, caps, _context, %{caps: caps} = state) do
{:ok, state}
end
@impl true
def handle_caps(pad, caps, _context, state) do
raise(
RuntimeError,
"received invalid caps on pad #{inspect(pad)}, expected: #{inspect(state.caps)}, got: #{inspect(caps)}"
)
end
defp initialize_mixer_state(caps, state) do
mixer_module = if state.prevent_clipping, do: ClipPreventingAdder, else: Adder
mixer_state = mixer_module.init(caps)
Map.put(state, :mixer_state, mixer_state)
end
defp do_handle_demand(size, %{pads: pads} = state) do
pads
|> Enum.map(fn {pad, %{queue: queue}} ->
queue
|> byte_size()
|> then(&{:demand, {pad, max(0, size - &1)}})
end)
|> then(fn demands -> {{:ok, demands}, state} end)
end
defp mix_and_get_actions(%{caps: caps, pads: pads} = state) do
time_frame = Raw.frame_size(caps)
mix_size = get_mix_size(pads, time_frame)
{payload, state} =
if mix_size >= time_frame do
{payload, pads, state} = mix(pads, mix_size, state)
pads = remove_finished_pads(pads, time_frame)
state = %{state | pads: pads}
{payload, state}
else
{<<>>, state}
end
{payload, state} =
if all_streams_ended?(state) do
{flushed, state} = flush_mixer(state)
{payload <> flushed, state}
else
{payload, state}
end
actions = if payload == <<>>, do: [], else: [buffer: {:output, %Buffer{payload: payload}}]
{actions, state}
end
defp get_mix_size(pads, time_frame) do
pads
|> Enum.map(fn {_pad, %{queue: queue}} -> byte_size(queue) end)
|> Enum.min(fn -> 0 end)
|> int_part(time_frame)
end
# Returns the biggest multiple of `divisor` that is not bigger than `number`
defp int_part(number, divisor) when is_integer(number) and is_integer(divisor) do
rest = rem(number, divisor)
number - rest
end
defp mix(pads, mix_size, state) do
{payloads, pads_list} =
pads
|> Enum.map(fn
{pad, %{queue: <<payload::binary-size(mix_size)>> <> queue} = data} ->
{payload, {pad, %{data | queue: queue}}}
end)
|> Enum.unzip()
{payload, state} = mix_payloads(payloads, state)
pads = Map.new(pads_list)
{payload, pads, state}
end
defp all_streams_ended?(%{pads: pads}) do
pads
|> Enum.map(fn {_pad, %{stream_ended: stream_ended}} -> stream_ended end)
|> Enum.all?()
end
defp remove_finished_pads(pads, time_frame) do
pads
|> Enum.flat_map(fn
{_pad, %{queue: queue, stream_ended: true}} when byte_size(queue) < time_frame -> []
pad_data -> [pad_data]
end)
|> Map.new()
end
defp mix_payloads(payloads, %{mixer_state: %module{} = mixer_state} = state) do
{payload, mixer_state} = module.mix(payloads, mixer_state)
state = %{state | mixer_state: mixer_state}
{payload, state}
end
defp flush_mixer(%{mixer_state: %module{} = mixer_state} = state) do
{payload, mixer_state} = module.flush(mixer_state)
state = %{state | mixer_state: mixer_state}
{payload, state}
end
end
|
lib/membrane_audio_mixer.ex
| 0.886611 | 0.546859 |
membrane_audio_mixer.ex
|
starcoder
|
defmodule Wax.Metadata.Statement do
@moduledoc """
Structure representing a FIDO2 metadata statement
Reference: [FIDO Metadata Statements](https://fidoalliance.org/specs/fido-uaf-v1.2-rd-20171128/fido-metadata-statement-v1.2-rd-20171128.html#metadata-keys)
Note that the following keys are not included in this module because of irrelevance:
- legalHeader
- alternativeDescriptions
- tcDisplayPNGCharacteristics
- icon
- tc_display_content_type
"""
use Bitwise
@enforce_keys [
:description,
:authenticator_version,
:upv,
:assertion_scheme,
:authentication_algorithm,
:public_key_alg_and_encoding,
:attestation_types,
:user_verification_details,
:key_protection,
:matcher_protection,
:attachment_hint,
:is_second_factor_only,
:tc_display,
:attestation_root_certificates
]
defstruct [
:aaid,
:aaguid,
:attestation_certificate_key_identifiers,
:description,
:authenticator_version,
:protocol_family,
:upv,
:assertion_scheme,
:authentication_algorithm,
:authentication_algorithms,
:public_key_alg_and_encoding,
:public_key_alg_and_encodings,
:attestation_types,
:user_verification_details,
:key_protection,
:is_key_restricted,
:is_fresh_user_verification_required,
:matcher_protection,
:crypto_strength,
:operating_env,
:attachment_hint,
:is_second_factor_only,
:tc_display,
:tc_display_content_type,
:attestation_root_certificates,
:ecdaa_trust_anchors,
:supported_extensions
]
@type t :: %__MODULE__{
aaid: String.t(),
aaguid: String.t(),
attestation_certificate_key_identifiers: [String.t()],
description: String.t(),
authenticator_version: non_neg_integer(),
protocol_family: String.t(),
upv: [Wax.Metadata.Statement.UPV.t()],
assertion_scheme: String.t(),
authentication_algorithm: Wax.Metadata.Statement.authentication_algorithm(),
authentication_algorithms: [Wax.Metadata.Statement.authentication_algorithm()],
public_key_alg_and_encoding: Wax.Metadata.Statement.public_key_representation_format(),
public_key_alg_and_encodings: [Wax.Metadata.Statement.public_key_representation_format()],
attestation_types: [Wax.Metadata.Statement.attestation_type()],
user_verification_details: [verification_method_and_combinations()],
key_protection: [key_protection()],
is_key_restricted: boolean(),
is_fresh_user_verification_required: boolean(),
matcher_protection: matcher_protection(), # far now all values exclude each other
crypto_strength: non_neg_integer(),
operating_env: String.t(),
attachment_hint: [attachment_hint()],
is_second_factor_only: boolean(),
tc_display: [tc_display()],
attestation_root_certificates: [:public_key.der_encoded()],
ecdaa_trust_anchors: [Wax.Metadata.Statement.EcdaaTrustAnchor],
supported_extensions: [Wax.Metadata.Statement.ExtensionDescriptor]
}
@type authentication_algorithm ::
:alg_sign_secp256r1_ecdsa_sha256_raw
| :alg_sign_secp256r1_ecdsa_sha256_der
| :alg_sign_rsassa_pss_sha256_raw
| :alg_sign_rsassa_pss_sha256_der
| :alg_sign_secp256k1_ecdsa_sha256_raw
| :alg_sign_secp256k1_ecdsa_sha256_der
| :alg_sign_sm2_sm3_raw
| :alg_sign_rsa_emsa_pkcs1_sha256_raw
| :alg_sign_rsa_emsa_pkcs1_sha256_der
| :alg_sign_rsassa_pss_sha384_raw
| :alg_sign_rsassa_pss_sha512_raw
| :alg_sign_rsassa_pkcsv15_sha256_raw
| :alg_sign_rsassa_pkcsv15_sha384_raw
| :alg_sign_rsassa_pkcsv15_sha512_raw
| :alg_sign_rsassa_pkcsv15_sha1_raw
| :alg_sign_secp384r1_ecdsa_sha384_raw
| :alg_sign_secp521r1_ecdsa_sha512_raw
| :alg_sign_ed25519_eddsa_sha256_raw
@type public_key_representation_format ::
:alg_key_ecc_x962_raw
| :alg_key_ecc_x962_der
| :alg_key_rsa_2048_raw
| :alg_key_rsa_2048_der
| :alg_key_cose
@type attestation_type ::
:tag_attestation_basic_full
| :tag_attestation_basic_surrogate
| :tag_attestation_ecdaa
| :tag_attestation_attca
@type verification_method_and_combinations ::
[Wax.Metadata.Statement.VerificationMethodDescriptor.t()]
defmodule UPV do
@enforce_keys [:minor, :major]
defstruct [:minor, :major]
@type t :: %__MODULE__{
minor: non_neg_integer(),
major: non_neg_integer(),
}
end
defmodule VerificationMethodDescriptor do
@enforce_keys [:user_verification]
defstruct [
:user_verification,
:code_accuracy_descriptor,
:biometric_accuracy_descriptor,
:pattern_accuracy_descriptor
]
@type t :: %__MODULE__{
user_verification: Wax.Metadata.Statement.user_verification_method(),
code_accuracy_descriptor:
Wax.Metadata.Statement.VerificationMethodDescriptor.CodeAccuracyDescriptor.t(),
biometric_accuracy_descriptor:
Wax.Metadata.Statement.VerificationMethodDescriptor.BiometricAccuracyDescriptor.t(),
pattern_accuracy_descriptor:
Wax.Metadata.Statement.VerificationMethodDescriptor.PatternAccuracyDescriptor.t()
}
defmodule CodeAccuracyDescriptor do
@enforce_keys [:base, :min_length]
defstruct [:base, :min_length, :max_retries, :block_slowdown]
@type t :: %__MODULE__{
base: non_neg_integer(),
min_length: non_neg_integer(),
max_retries: non_neg_integer(),
block_slowdown: non_neg_integer()
}
end
defmodule BiometricAccuracyDescriptor do
defstruct [:far, :frr, :eer, :faar, :max_reference_data_sets, :max_retries, :block_slowdown]
@type t :: %__MODULE__{
far: float(),
frr: float(),
eer: float(),
faar: float(),
max_reference_data_sets: non_neg_integer(),
max_retries: non_neg_integer(),
block_slowdown: non_neg_integer()
}
end
defmodule PatternAccuracyDescriptor do
@enforce_keys [:min_complexity]
defstruct [:min_complexity, :max_retries, :block_slowdown]
@type t :: %__MODULE__{
min_complexity: non_neg_integer(),
max_retries: non_neg_integer(),
block_slowdown: non_neg_integer()
}
end
end
@type user_verification_method ::
:user_verify_presence_internal
| :user_verify_fingerprint_internal
| :user_verify_passcode_internal
| :user_verify_voiceprint_internal
| :user_verify_faceprint_internal
| :user_verify_location_internal
| :user_verify_eyeprint_internal
| :user_verify_pattern_internal
| :user_verify_handprint_internal
| :user_verify_passcode_external
| :user_verify_pattern_external
| :user_verify_none
| :user_verify_all
@type key_protection ::
:key_protection_software
| :key_protection_hardware
| :key_protection_tee
| :key_protection_secure_element
| :key_protection_remote_handle
@type matcher_protection ::
:matcher_protection_software
| :matcher_protection_tee
| :matcher_protection_on_chip
@type attachment_hint ::
:attachment_hint_internal
| :attachment_hint_external
| :attachment_hint_wired
| :attachment_hint_wireless
| :attachment_hint_nfc
| :attachment_hint_bluetooth
| :attachment_hint_network
| :attachment_hint_ready
| :attachment_hint_wifi_direct
@type tc_display ::
:transaction_confirmation_display_any
| :transaction_confirmation_display_privileged_software
| :transaction_confirmation_display_tee
| :transaction_confirmation_display_hardware
| :transaction_confirmation_display_remote
defmodule EcdaaTrustAnchor do
@enforce_keys [
:x,
:y,
:c,
:sx,
:sy,
:g1_curve
]
defstruct [
:x,
:y,
:c,
:sx,
:sy,
:g1_curve
]
@type t :: %__MODULE__{
x: String.t(),
y: String.t(),
c: String.t(),
sx: String.t(),
sy: String.t(),
g1_curve: String.t()
}
end
defmodule ExtensionDescriptor do
@enforce_keys [:id, :fail_if_unknown]
defstruct [
:id,
:tag,
:data,
:fail_if_unknown
]
@type t :: %__MODULE__{
id: String.t(),
tag: non_neg_integer(),
data: String.t(),
fail_if_unknown: boolean()
}
end
@doc false
@spec from_json(map() | Keyword.t() | nil) :: {:ok, t()} | {:error, any()}
def from_json(json) do
try do
{:ok, from_json!(json)}
rescue
e ->
{:error, e}
end
end
@spec from_json!(map() | Keyword.t() | nil) :: t()
def from_json!(%{} = json) do
%__MODULE__{
aaid: json["aaid"],
aaguid: json["aaguid"],
attestation_certificate_key_identifiers: json["attestationCertificateKeyIdentifiers"],
description: json["description"],
authenticator_version: json["authenticatorVersion"],
protocol_family: json["protocolFamily"],
upv: Enum.map(
json["upv"] || [],
fn %{"minor" => minor, "major" => major} ->
%Wax.Metadata.Statement.UPV{
major: major,
minor: minor
}
end
),
assertion_scheme: json["assertionScheme"],
authentication_algorithm: authentication_algorithm(json["authenticationAlgorithm"]),
authentication_algorithms: Enum.map(
json["authenticationAlgorithms"] || [],
fn alg ->
authentication_algorithm(alg)
end
),
public_key_alg_and_encoding:
public_key_representation_format(json["publicKeyAlgAndEncoding"]),
public_key_alg_and_encodings: Enum.map(
json["publicKeyAlgAndEncodings"] || [],
fn keyalg ->
public_key_representation_format(keyalg)
end
),
attestation_types: Enum.map(
json["attestationTypes"],
fn att_type ->
attestation_type(att_type)
end
),
user_verification_details: Enum.map(
json["userVerificationDetails"],
fn list ->
Enum.map(
list,
fn uvd ->
%Wax.Metadata.Statement.VerificationMethodDescriptor{
user_verification: user_verification_method(uvd["userVerification"]),
code_accuracy_descriptor: code_accuracy_descriptor(uvd["caDesc"]),
biometric_accuracy_descriptor: biometric_accuracy_descriptor(uvd["baDesc"]),
pattern_accuracy_descriptor: pattern_accuracy_descriptor(uvd["paDesc"])
}
end
)
end
),
key_protection: key_protection(json["keyProtection"]),
is_key_restricted: json["isKeyRestricted"],
is_fresh_user_verification_required: json["isFreshUserVerificationRequired"],
matcher_protection: matcher_protection(json["matcherProtection"]),
crypto_strength: json["cryptoStrength"],
operating_env: json["operatingEnv"],
attachment_hint: attachment_hint(json["attachmentHint"]),
is_second_factor_only: json["isSecondFactorOnly"],
tc_display: tc_display(json["tcDisplay"]),
attestation_root_certificates: Enum.map(
json["attestationRootCertificates"],
fn
b64_cert -> Base.decode64!(b64_cert)
end
),
ecdaa_trust_anchors: Enum.map(
json["ecdaaTrustAnchors"] || [],
fn map ->
%Wax.Metadata.Statement.EcdaaTrustAnchor{
x: map["X"],
y: map["Y"],
c: map["c"],
sx: map["sx"],
sy: map["sy"],
g1_curve: map["G1Curve"]
}
end
),
supported_extensions: Enum.map(
json["supportedExtensions"] || [],
fn map ->
%Wax.Metadata.Statement.ExtensionDescriptor{
id: map["id"],
tag: map["tag"],
data: map["data"],
fail_if_unknown: map["fail_if_unknown"]
}
end
)
}
end
@spec authentication_algorithm(non_neg_integer()) :: authentication_algorithm()
defp authentication_algorithm(0x0001), do: :alg_sign_secp256r1_ecdsa_sha256_raw
defp authentication_algorithm(0x0002), do: :alg_sign_secp256r1_ecdsa_sha256_der
defp authentication_algorithm(0x0003), do: :alg_sign_rsassa_pss_sha256_raw
defp authentication_algorithm(0x0004), do: :alg_sign_rsassa_pss_sha256_der
defp authentication_algorithm(0x0005), do: :alg_sign_secp256k1_ecdsa_sha256_raw
defp authentication_algorithm(0x0006), do: :alg_sign_secp256k1_ecdsa_sha256_der
defp authentication_algorithm(0x0007), do: :alg_sign_sm2_sm3_raw
defp authentication_algorithm(0x0008), do: :alg_sign_rsa_emsa_pkcs1_sha256_raw
defp authentication_algorithm(0x0009), do: :alg_sign_rsa_emsa_pkcs1_sha256_der
defp authentication_algorithm(0x000A), do: :alg_sign_rsassa_pss_sha384_raw
defp authentication_algorithm(0x000B), do: :alg_sign_rsassa_pss_sha512_raw
defp authentication_algorithm(0x000C), do: :alg_sign_rsassa_pkcsv15_sha256_raw
defp authentication_algorithm(0x000D), do: :alg_sign_rsassa_pkcsv15_sha384_raw
defp authentication_algorithm(0x000E), do: :alg_sign_rsassa_pkcsv15_sha512_raw
defp authentication_algorithm(0x000F), do: :alg_sign_rsassa_pkcsv15_sha1_raw
defp authentication_algorithm(0x0010), do: :alg_sign_secp384r1_ecdsa_sha384_raw
defp authentication_algorithm(0x0011), do: :alg_sign_secp521r1_ecdsa_sha512_raw
defp authentication_algorithm(0x0012), do: :alg_sign_ed25519_eddsa_sha256_raw
@spec public_key_representation_format(non_neg_integer()) :: public_key_representation_format()
defp public_key_representation_format(0x0100), do: :alg_key_ecc_x962_raw
defp public_key_representation_format(0x0101), do: :alg_key_ecc_x962_der
defp public_key_representation_format(0x0102), do: :alg_key_rsa_2048_raw
defp public_key_representation_format(0x0103), do: :alg_key_rsa_2048_der
defp public_key_representation_format(0x0104), do: :alg_key_cose
@spec attestation_type(non_neg_integer()) :: attestation_type()
defp attestation_type(0x3E07), do: :tag_attestation_basic_full
defp attestation_type(0x3E08), do: :tag_attestation_basic_surrogate
defp attestation_type(0x3E09), do: :tag_attestation_ecdaa
defp attestation_type(0x3E0A), do: :tag_attestation_attca
@spec user_verification_method(non_neg_integer()) :: user_verification_method()
defp user_verification_method(0x00000001), do: :user_verify_presence_internal
defp user_verification_method(0x00000002), do: :user_verify_fingerprint_internal
defp user_verification_method(0x00000004), do: :user_verify_passcode_internal
defp user_verification_method(0x00000008), do: :user_verify_voiceprint_internal
defp user_verification_method(0x00000010), do: :user_verify_faceprint_internal
defp user_verification_method(0x00000020), do: :user_verify_location_internal
defp user_verification_method(0x00000040), do: :user_verify_eyeprint_internal
defp user_verification_method(0x00000080), do: :user_verify_pattern_internal
defp user_verification_method(0x00000100), do: :user_verify_handprint_internal
defp user_verification_method(0x00000200), do: :user_verify_none
defp user_verification_method(0x00000400), do: :user_verify_all
defp user_verification_method(0x00000800), do: :user_verify_passcode_external
defp user_verification_method(0x00001000), do: :user_verify_pattern_external
@spec code_accuracy_descriptor(map()) ::
Wax.Metadata.Statement.VerificationMethodDescriptor.CodeAccuracyDescriptor.t()
defp code_accuracy_descriptor(nil), do: nil
defp code_accuracy_descriptor(map)
do
%Wax.Metadata.Statement.VerificationMethodDescriptor.CodeAccuracyDescriptor{
base: map["base"],
min_length: map["minLength"],
max_retries: map["maxRetries"],
block_slowdown: map["blockSlowdown"]
}
end
@spec biometric_accuracy_descriptor(map()) ::
Wax.Metadata.Statement.VerificationMethodDescriptor.BiometricAccuracyDescriptor.t()
defp biometric_accuracy_descriptor(nil), do: nil
defp biometric_accuracy_descriptor(map)
do
%Wax.Metadata.Statement.VerificationMethodDescriptor.BiometricAccuracyDescriptor{
far: map["FAR"],
frr: map["FRR"],
eer: map["EER"],
faar: map["FAAR"],
max_reference_data_sets: map["maxReferenceDataSets"],
max_retries: map["maxRetries"],
block_slowdown: map["blockSlowdown"]
}
end
@spec pattern_accuracy_descriptor(map()) ::
Wax.Metadata.Statement.VerificationMethodDescriptor.PatternAccuracyDescriptor.t()
defp pattern_accuracy_descriptor(nil), do: nil
defp pattern_accuracy_descriptor(map)
do
%Wax.Metadata.Statement.VerificationMethodDescriptor.PatternAccuracyDescriptor{
min_complexity: map["minComplexity"],
max_retries: map["maxRetries"],
block_slowdown: map["blockSlowdown"]
}
end
@spec key_protection(non_neg_integer()) :: [key_protection()]
defp key_protection(kp) do
[]
|> key_protected_software(kp)
|> key_protected_hardware(kp)
|> key_protected_tee(kp)
|> key_protected_secure_element(kp)
|> key_protected_remote_handle(kp)
end
@spec key_protected_software([key_protection()], non_neg_integer()) :: [key_protection]
defp key_protected_software(kp_list, kp) when (kp &&& 0x0001) > 0
do
[:key_protection_software | kp_list]
end
defp key_protected_software(kp_list, _), do: kp_list
@spec key_protected_hardware([key_protection()], non_neg_integer()) :: [key_protection]
defp key_protected_hardware(kp_list, kp) when (kp &&& 0x0002) > 0
do
[:key_protection_hardware | kp_list]
end
defp key_protected_hardware(kp_list, _), do: kp_list
@spec key_protected_tee([key_protection()], non_neg_integer()) :: [key_protection]
defp key_protected_tee(kp_list, kp) when (kp &&& 0x0004) > 0
do
[:key_protection_tee | kp_list]
end
defp key_protected_tee(kp_list, _), do: kp_list
@spec key_protected_secure_element([key_protection()], non_neg_integer()) :: [key_protection]
defp key_protected_secure_element(kp_list, kp) when (kp &&& 0x0008) > 0
do
[:key_protection_secure_element | kp_list]
end
defp key_protected_secure_element(kp_list, _), do: kp_list
@spec key_protected_remote_handle([key_protection()], non_neg_integer()) :: [key_protection]
defp key_protected_remote_handle(kp_list, kp) when (kp &&& 0x0010) > 0
do
[:key_protection_remote_handle | kp_list]
end
defp key_protected_remote_handle(kp_list, _), do: kp_list
@spec matcher_protection(non_neg_integer()) :: matcher_protection()
defp matcher_protection(0x0001), do: :matcher_protection_software
defp matcher_protection(0x0002), do: :matcher_protection_tee
defp matcher_protection(0x0004), do: :matcher_protection_on_chip
@spec attachment_hint(non_neg_integer()) :: [attachment_hint()]
defp attachment_hint(ah)
do
[]
|> attachment_hint_internal(ah)
|> attachment_hint_external(ah)
|> attachment_hint_wired(ah)
|> attachment_hint_wireless(ah)
|> attachment_hint_nfc(ah)
|> attachment_hint_bluetooth(ah)
|> attachment_hint_network(ah)
|> attachment_hint_ready(ah)
end
@spec attachment_hint_internal([attachment_hint()], non_neg_integer()) :: [attachment_hint()]
defp attachment_hint_internal(ah_list, ah) when (ah &&& 0x0001) > 0
do
[:attachment_hint_internal | ah_list]
end
defp attachment_hint_internal(ah_list, _), do: ah_list
@spec attachment_hint_external([attachment_hint()], non_neg_integer()) :: [attachment_hint()]
defp attachment_hint_external(ah_list, ah) when (ah &&& 0x0002) > 0
do
[:attachment_hint_external | ah_list]
end
defp attachment_hint_external(ah_list, _), do: ah_list
@spec attachment_hint_wired([attachment_hint()], non_neg_integer()) :: [attachment_hint()]
defp attachment_hint_wired(ah_list, ah) when (ah &&& 0x0004) > 0
do
[:attachment_hint_wired | ah_list]
end
defp attachment_hint_wired(ah_list, _), do: ah_list
@spec attachment_hint_wireless([attachment_hint()], non_neg_integer()) :: [attachment_hint()]
defp attachment_hint_wireless(ah_list, ah) when (ah &&& 0x0008) > 0
do
[:attachment_hint_wireless | ah_list]
end
defp attachment_hint_wireless(ah_list, _), do: ah_list
@spec attachment_hint_nfc([attachment_hint()], non_neg_integer()) :: [attachment_hint()]
defp attachment_hint_nfc(ah_list, ah) when (ah &&& 0x0010) > 0
do
[:attachment_hint_nfc | ah_list]
end
defp attachment_hint_nfc(ah_list, _), do: ah_list
@spec attachment_hint_bluetooth([attachment_hint()], non_neg_integer()) :: [attachment_hint()]
defp attachment_hint_bluetooth(ah_list, ah) when (ah &&& 0x0020) > 0
do
[:attachment_hint_bluetooth | ah_list]
end
defp attachment_hint_bluetooth(ah_list, _), do: ah_list
@spec attachment_hint_network([attachment_hint()], non_neg_integer()) :: [attachment_hint()]
defp attachment_hint_network(ah_list, ah) when (ah &&& 0x0040) > 0
do
[:attachment_hint_network | ah_list]
end
defp attachment_hint_network(ah_list, _), do: ah_list
@spec attachment_hint_ready([attachment_hint()], non_neg_integer()) :: [attachment_hint()]
defp attachment_hint_ready(ah_list, ah) when (ah &&& 0x0080) > 0
do
[:attachment_hint_ready | ah_list]
end
defp attachment_hint_ready(ah_list, ah) when (ah &&& 0x0100) > 0
do
[:attachment_hint_wifi_direct | ah_list]
end
defp attachment_hint_ready(ah_list, _), do: ah_list
@spec tc_display(non_neg_integer()) :: [tc_display()]
defp tc_display(tc)
do
[]
|> tc_display_any(tc)
|> tc_display_privileged_software(tc)
|> tc_display_tee(tc)
|> tc_display_hardware(tc)
|> tc_display_remote(tc)
end
@spec tc_display_any([tc_display()], non_neg_integer()) :: [tc_display()]
defp tc_display_any(tc_list, tc) when (tc &&& 0x0001) > 0
do
[:transaction_confirmation_display_any | tc_list]
end
defp tc_display_any(tc_list, _), do: tc_list
@spec tc_display_privileged_software([tc_display()], non_neg_integer()) :: [tc_display()]
defp tc_display_privileged_software(tc_list, tc) when (tc &&& 0x0002) > 0
do
[:transaction_confirmation_display_privileged_software | tc_list]
end
defp tc_display_privileged_software(tc_list, _), do: tc_list
@spec tc_display_tee([tc_display()], non_neg_integer()) :: [tc_display()]
defp tc_display_tee(tc_list, tc) when (tc &&& 0x0004) > 0
do
[:transaction_confirmation_display_tee | tc_list]
end
defp tc_display_tee(tc_list, _), do: tc_list
@spec tc_display_hardware([tc_display()], non_neg_integer()) :: [tc_display()]
defp tc_display_hardware(tc_list, tc) when (tc &&& 0x0008) > 0
do
[:transaction_confirmation_display_hardware | tc_list]
end
defp tc_display_hardware(tc_list, _), do: tc_list
@spec tc_display_remote([tc_display()], non_neg_integer()) :: [tc_display()]
defp tc_display_remote(tc_list, tc) when (tc &&& 0x0010) > 0
do
[:transaction_confirmation_display_remote | tc_list]
end
defp tc_display_remote(tc_list, _), do: tc_list
end
|
lib/wax/metadata/statement.ex
| 0.841142 | 0.597755 |
statement.ex
|
starcoder
|
defmodule Google.Bigtable.Admin.V2.CreateInstanceRequest.ClustersEntry do
@moduledoc false
use Protobuf, map: true, syntax: :proto3
@type t :: %__MODULE__{
key: String.t(),
value: Google.Bigtable.Admin.V2.Cluster.t() | nil
}
defstruct [:key, :value]
field :key, 1, type: :string
field :value, 2, type: Google.Bigtable.Admin.V2.Cluster
end
defmodule Google.Bigtable.Admin.V2.CreateInstanceRequest do
@moduledoc false
use Protobuf, syntax: :proto3
@type t :: %__MODULE__{
parent: String.t(),
instance_id: String.t(),
instance: Google.Bigtable.Admin.V2.Instance.t() | nil,
clusters: %{String.t() => Google.Bigtable.Admin.V2.Cluster.t() | nil}
}
defstruct [:parent, :instance_id, :instance, :clusters]
field :parent, 1, type: :string
field :instance_id, 2, type: :string
field :instance, 3, type: Google.Bigtable.Admin.V2.Instance
field :clusters, 4,
repeated: true,
type: Google.Bigtable.Admin.V2.CreateInstanceRequest.ClustersEntry,
map: true
end
defmodule Google.Bigtable.Admin.V2.GetInstanceRequest do
@moduledoc false
use Protobuf, syntax: :proto3
@type t :: %__MODULE__{
name: String.t()
}
defstruct [:name]
field :name, 1, type: :string
end
defmodule Google.Bigtable.Admin.V2.ListInstancesRequest do
@moduledoc false
use Protobuf, syntax: :proto3
@type t :: %__MODULE__{
parent: String.t(),
page_token: String.t()
}
defstruct [:parent, :page_token]
field :parent, 1, type: :string
field :page_token, 2, type: :string
end
defmodule Google.Bigtable.Admin.V2.ListInstancesResponse do
@moduledoc false
use Protobuf, syntax: :proto3
@type t :: %__MODULE__{
instances: [Google.Bigtable.Admin.V2.Instance.t()],
failed_locations: [String.t()],
next_page_token: String.t()
}
defstruct [:instances, :failed_locations, :next_page_token]
field :instances, 1, repeated: true, type: Google.Bigtable.Admin.V2.Instance
field :failed_locations, 2, repeated: true, type: :string
field :next_page_token, 3, type: :string
end
defmodule Google.Bigtable.Admin.V2.PartialUpdateInstanceRequest do
@moduledoc false
use Protobuf, syntax: :proto3
@type t :: %__MODULE__{
instance: Google.Bigtable.Admin.V2.Instance.t() | nil,
update_mask: Google.Protobuf.FieldMask.t() | nil
}
defstruct [:instance, :update_mask]
field :instance, 1, type: Google.Bigtable.Admin.V2.Instance
field :update_mask, 2, type: Google.Protobuf.FieldMask
end
defmodule Google.Bigtable.Admin.V2.DeleteInstanceRequest do
@moduledoc false
use Protobuf, syntax: :proto3
@type t :: %__MODULE__{
name: String.t()
}
defstruct [:name]
field :name, 1, type: :string
end
defmodule Google.Bigtable.Admin.V2.CreateClusterRequest do
@moduledoc false
use Protobuf, syntax: :proto3
@type t :: %__MODULE__{
parent: String.t(),
cluster_id: String.t(),
cluster: Google.Bigtable.Admin.V2.Cluster.t() | nil
}
defstruct [:parent, :cluster_id, :cluster]
field :parent, 1, type: :string
field :cluster_id, 2, type: :string
field :cluster, 3, type: Google.Bigtable.Admin.V2.Cluster
end
defmodule Google.Bigtable.Admin.V2.GetClusterRequest do
@moduledoc false
use Protobuf, syntax: :proto3
@type t :: %__MODULE__{
name: String.t()
}
defstruct [:name]
field :name, 1, type: :string
end
defmodule Google.Bigtable.Admin.V2.ListClustersRequest do
@moduledoc false
use Protobuf, syntax: :proto3
@type t :: %__MODULE__{
parent: String.t(),
page_token: String.t()
}
defstruct [:parent, :page_token]
field :parent, 1, type: :string
field :page_token, 2, type: :string
end
defmodule Google.Bigtable.Admin.V2.ListClustersResponse do
@moduledoc false
use Protobuf, syntax: :proto3
@type t :: %__MODULE__{
clusters: [Google.Bigtable.Admin.V2.Cluster.t()],
failed_locations: [String.t()],
next_page_token: String.t()
}
defstruct [:clusters, :failed_locations, :next_page_token]
field :clusters, 1, repeated: true, type: Google.Bigtable.Admin.V2.Cluster
field :failed_locations, 2, repeated: true, type: :string
field :next_page_token, 3, type: :string
end
defmodule Google.Bigtable.Admin.V2.DeleteClusterRequest do
@moduledoc false
use Protobuf, syntax: :proto3
@type t :: %__MODULE__{
name: String.t()
}
defstruct [:name]
field :name, 1, type: :string
end
defmodule Google.Bigtable.Admin.V2.CreateInstanceMetadata do
@moduledoc false
use Protobuf, syntax: :proto3
@type t :: %__MODULE__{
original_request: Google.Bigtable.Admin.V2.CreateInstanceRequest.t() | nil,
request_time: Google.Protobuf.Timestamp.t() | nil,
finish_time: Google.Protobuf.Timestamp.t() | nil
}
defstruct [:original_request, :request_time, :finish_time]
field :original_request, 1, type: Google.Bigtable.Admin.V2.CreateInstanceRequest
field :request_time, 2, type: Google.Protobuf.Timestamp
field :finish_time, 3, type: Google.Protobuf.Timestamp
end
defmodule Google.Bigtable.Admin.V2.UpdateInstanceMetadata do
@moduledoc false
use Protobuf, syntax: :proto3
@type t :: %__MODULE__{
original_request: Google.Bigtable.Admin.V2.PartialUpdateInstanceRequest.t() | nil,
request_time: Google.Protobuf.Timestamp.t() | nil,
finish_time: Google.Protobuf.Timestamp.t() | nil
}
defstruct [:original_request, :request_time, :finish_time]
field :original_request, 1, type: Google.Bigtable.Admin.V2.PartialUpdateInstanceRequest
field :request_time, 2, type: Google.Protobuf.Timestamp
field :finish_time, 3, type: Google.Protobuf.Timestamp
end
defmodule Google.Bigtable.Admin.V2.CreateClusterMetadata do
@moduledoc false
use Protobuf, syntax: :proto3
@type t :: %__MODULE__{
original_request: Google.Bigtable.Admin.V2.CreateClusterRequest.t() | nil,
request_time: Google.Protobuf.Timestamp.t() | nil,
finish_time: Google.Protobuf.Timestamp.t() | nil
}
defstruct [:original_request, :request_time, :finish_time]
field :original_request, 1, type: Google.Bigtable.Admin.V2.CreateClusterRequest
field :request_time, 2, type: Google.Protobuf.Timestamp
field :finish_time, 3, type: Google.Protobuf.Timestamp
end
defmodule Google.Bigtable.Admin.V2.UpdateClusterMetadata do
@moduledoc false
use Protobuf, syntax: :proto3
@type t :: %__MODULE__{
original_request: Google.Bigtable.Admin.V2.Cluster.t() | nil,
request_time: Google.Protobuf.Timestamp.t() | nil,
finish_time: Google.Protobuf.Timestamp.t() | nil
}
defstruct [:original_request, :request_time, :finish_time]
field :original_request, 1, type: Google.Bigtable.Admin.V2.Cluster
field :request_time, 2, type: Google.Protobuf.Timestamp
field :finish_time, 3, type: Google.Protobuf.Timestamp
end
defmodule Google.Bigtable.Admin.V2.CreateAppProfileRequest do
@moduledoc false
use Protobuf, syntax: :proto3
@type t :: %__MODULE__{
parent: String.t(),
app_profile_id: String.t(),
app_profile: Google.Bigtable.Admin.V2.AppProfile.t() | nil,
ignore_warnings: boolean
}
defstruct [:parent, :app_profile_id, :app_profile, :ignore_warnings]
field :parent, 1, type: :string
field :app_profile_id, 2, type: :string
field :app_profile, 3, type: Google.Bigtable.Admin.V2.AppProfile
field :ignore_warnings, 4, type: :bool
end
defmodule Google.Bigtable.Admin.V2.GetAppProfileRequest do
@moduledoc false
use Protobuf, syntax: :proto3
@type t :: %__MODULE__{
name: String.t()
}
defstruct [:name]
field :name, 1, type: :string
end
defmodule Google.Bigtable.Admin.V2.ListAppProfilesRequest do
@moduledoc false
use Protobuf, syntax: :proto3
@type t :: %__MODULE__{
parent: String.t(),
page_size: integer,
page_token: String.t()
}
defstruct [:parent, :page_size, :page_token]
field :parent, 1, type: :string
field :page_size, 3, type: :int32
field :page_token, 2, type: :string
end
defmodule Google.Bigtable.Admin.V2.ListAppProfilesResponse do
@moduledoc false
use Protobuf, syntax: :proto3
@type t :: %__MODULE__{
app_profiles: [Google.Bigtable.Admin.V2.AppProfile.t()],
next_page_token: String.t(),
failed_locations: [String.t()]
}
defstruct [:app_profiles, :next_page_token, :failed_locations]
field :app_profiles, 1, repeated: true, type: Google.Bigtable.Admin.V2.AppProfile
field :next_page_token, 2, type: :string
field :failed_locations, 3, repeated: true, type: :string
end
defmodule Google.Bigtable.Admin.V2.UpdateAppProfileRequest do
@moduledoc false
use Protobuf, syntax: :proto3
@type t :: %__MODULE__{
app_profile: Google.Bigtable.Admin.V2.AppProfile.t() | nil,
update_mask: Google.Protobuf.FieldMask.t() | nil,
ignore_warnings: boolean
}
defstruct [:app_profile, :update_mask, :ignore_warnings]
field :app_profile, 1, type: Google.Bigtable.Admin.V2.AppProfile
field :update_mask, 2, type: Google.Protobuf.FieldMask
field :ignore_warnings, 3, type: :bool
end
defmodule Google.Bigtable.Admin.V2.DeleteAppProfileRequest do
@moduledoc false
use Protobuf, syntax: :proto3
@type t :: %__MODULE__{
name: String.t(),
ignore_warnings: boolean
}
defstruct [:name, :ignore_warnings]
field :name, 1, type: :string
field :ignore_warnings, 2, type: :bool
end
defmodule Google.Bigtable.Admin.V2.UpdateAppProfileMetadata do
@moduledoc false
use Protobuf, syntax: :proto3
@type t :: %__MODULE__{}
defstruct []
end
defmodule Google.Bigtable.Admin.V2.BigtableInstanceAdmin.Service do
@moduledoc false
use GRPC.Service, name: "google.bigtable.admin.v2.BigtableInstanceAdmin"
rpc :CreateInstance,
Google.Bigtable.Admin.V2.CreateInstanceRequest,
Google.Longrunning.Operation
rpc :GetInstance, Google.Bigtable.Admin.V2.GetInstanceRequest, Google.Bigtable.Admin.V2.Instance
rpc :ListInstances,
Google.Bigtable.Admin.V2.ListInstancesRequest,
Google.Bigtable.Admin.V2.ListInstancesResponse
rpc :UpdateInstance, Google.Bigtable.Admin.V2.Instance, Google.Bigtable.Admin.V2.Instance
rpc :PartialUpdateInstance,
Google.Bigtable.Admin.V2.PartialUpdateInstanceRequest,
Google.Longrunning.Operation
rpc :DeleteInstance, Google.Bigtable.Admin.V2.DeleteInstanceRequest, Google.Protobuf.Empty
rpc :CreateCluster, Google.Bigtable.Admin.V2.CreateClusterRequest, Google.Longrunning.Operation
rpc :GetCluster, Google.Bigtable.Admin.V2.GetClusterRequest, Google.Bigtable.Admin.V2.Cluster
rpc :ListClusters,
Google.Bigtable.Admin.V2.ListClustersRequest,
Google.Bigtable.Admin.V2.ListClustersResponse
rpc :UpdateCluster, Google.Bigtable.Admin.V2.Cluster, Google.Longrunning.Operation
rpc :DeleteCluster, Google.Bigtable.Admin.V2.DeleteClusterRequest, Google.Protobuf.Empty
rpc :CreateAppProfile,
Google.Bigtable.Admin.V2.CreateAppProfileRequest,
Google.Bigtable.Admin.V2.AppProfile
rpc :GetAppProfile,
Google.Bigtable.Admin.V2.GetAppProfileRequest,
Google.Bigtable.Admin.V2.AppProfile
rpc :ListAppProfiles,
Google.Bigtable.Admin.V2.ListAppProfilesRequest,
Google.Bigtable.Admin.V2.ListAppProfilesResponse
rpc :UpdateAppProfile,
Google.Bigtable.Admin.V2.UpdateAppProfileRequest,
Google.Longrunning.Operation
rpc :DeleteAppProfile, Google.Bigtable.Admin.V2.DeleteAppProfileRequest, Google.Protobuf.Empty
rpc :GetIamPolicy, Google.Iam.V1.GetIamPolicyRequest, Google.Iam.V1.Policy
rpc :SetIamPolicy, Google.Iam.V1.SetIamPolicyRequest, Google.Iam.V1.Policy
rpc :TestIamPermissions,
Google.Iam.V1.TestIamPermissionsRequest,
Google.Iam.V1.TestIamPermissionsResponse
end
defmodule Google.Bigtable.Admin.V2.BigtableInstanceAdmin.Stub do
@moduledoc false
use GRPC.Stub, service: Google.Bigtable.Admin.V2.BigtableInstanceAdmin.Service
end
|
lib/google/bigtable/admin/v2/bigtable_instance_admin.pb.ex
| 0.784236 | 0.422624 |
bigtable_instance_admin.pb.ex
|
starcoder
|
% Allow Erlang records to be imported into Elixir. For example,
% we can retrieve the `file_info` record from Erlang as follow:
%
% Code.require "record"
%
% module FileInfo
% mixin Record
% record 'file_info, 'from_lib: "kernel/include/file.hrl"
% end
%
% % Manually access the Erlang file:read_file_info method
% % passing the current file as a char list.
% { 'ok, info } = Erlang.file.read_file_info(__FILE__.to_char_list)
%
% % Create a new FileInfo object based on the tuple returned above
% record = #FileInfo(info)
%
% % Profit by accessing the record info
% record.access % => 'read_write
%
% Besides defining an `attr_accessor` for each field in the record, that
% allows you to read and update each attribute, you also have the following
% methods available to manipulate the record:
%
% * __bound__() - creates an instance for this record using the default
% values given in the record declaration.
%
% * __bound__(tuple) - receives a tuple that matches the record. If the first
% element of the tuple does not match the record name and the tuple size does
% match the amount of fields in the record, an error is raised.
%
% * __bound__(ordered_dict) - receives an ordered dict and creates a new record
% using the values for the given keys in the dict and using the default values
% for the keys that were not supplied.
%
% * update(ordered_dict) - receives an ordered dict that updates each element
% in the record.
%
% Besides the methods above, calling `record` adds the following methods for reflection:
%
% * record_name
% * record_keys
% * record_defaults
% * record_size
%
module Record
% Helper methods for the Record module. They are basically
% method that handles retrieving record definitions from
% Erlang files.
module Helpers
% Retrieve a record definition from an Erlang file using
% the same lookup as the *include* attribute from Erlang modules.
def retrieve(name, 'from: string)
file = string.to_char_list
case Erlang.code.where_is_file(file)
match 'non_existing
realfile = file
match realfile
end
retrieve_record(name, realfile)
end
% Retrieve a record definition from an Erlang file using
% the same lookup as the *include_lib* attribute from Erlang modules.
def retrieve(name, 'from_lib: file)
% Access the mixin directly because File depend on this logic.
[app|path] = File.split(file)
case Erlang.code.lib_dir(app.to_char_list)
match {'error, _}
error {'norecord, {name, file}}
match libpath
retrieve_record name, File.join([libpath|path])
end
end
private
% Retrieve the record with the given name from the given file
% Raises {'norecord, { name, file } } if the record does not exist.
def retrieve_record(name, file)
case retrieve_from_file(file).keyfind(name, 0)
match false
error {'norecord, {name, file}}
match record
parse_record(record)
end
end
% Parse the given file and retrieve all existent records.
def retrieve_from_file(file)
[record for {'attribute, _, 'record, record} in read_file(file)]
end
% Read a file and return its abstract syntax form that also
% includes record and other preprocessor modules. This is done
% by using Erlang's epp_dodger.
def read_file(file)
case Erlang.epp_dodger.quick_parse_file(file)
match {'ok, form}
form
match error
error.(error)
end
end
% Parse a tuple with name and fields and returns a list of second order tuples
% where the first element is the field and the second is its default value.
def parse_record({_name, fields})
tuples = fields.map -> (f) parse_field(f)
cons = tuples.foldr { 'nil, 0 }, -> (tuple, acc) { 'cons, 0, tuple, acc }
{ 'value, list, [] } = Erlang.erl_eval.expr(cons, [])
list
end
def parse_field({'typed_record_field, record_field, _type})
parse_field(record_field)
end
def parse_field({'record_field, _, key })
{'tuple, 0, [key, {'atom, 0, 'nil}]}
end
def parse_field({'record_field, _, key, value })
{'tuple, 0, [key, value]}
end
end
module Definition
% Method to be used in modules that adds Record as mixin.
% It accepts a name and 'from or 'from_lib as option as
% described in `Record#retrieve`.
%
% For each record field, this method defines an `attr_accessor`
% and also defines the following methods:
%
% * record_name
% * record_keys
% * record_defaults
% * record_size
%
def record(name, options)
pairs = Record::Helpers.retrieve(name, options)
{ keys, values } = pairs.unzip
self.attr_accessor keys
self.module_eval __FILE__, __LINE__ + 1, ~~ELIXIR
def record_name
'#{name}
end
def record_keys
#{keys.inspect}
end
def record_defaults
#{values.inspect}
end
def record_size
#{keys.size}
end
~~
end
end
def __mixed_in__(base)
base.using Record::Definition
end
% If it receives a tuple as argument, it checks if the tuple matches the record.
% If the first element of the tuple does not match the record name and the tuple
% size does match the amount of fields in the record, a `'badrecord` error is raised.
%
% If the argument is an ordered dict, it creates a new record using the values for
% the given keys in the dict and using the default values for the keys that were
% not supplied.
%
% If the given argument is none of the above, a `badarg error is raised.
def __bound__(object)
if object.__module_name__ == 'Tuple::Behavior
if object[0] == self.record_name && object.size == self.record_keys.size + 1
[_|pairs] = object.to_list
@(OrderedDict.from_list self.record_keys.zip(pairs))
else
self.error { 'badrecord, object }
end
else
@(default_values.merge(object))
end
end
% Creates a new record using the default values as defaults.
def __bound__()
@(default_values)
end
% Behave like a dictionary.
def [](key)
self.get_ivar(key)
end
% Update the record using the given ordered dict *values*.
def update(values)
@(values)
end
private
def default_values
OrderedDict.from_list self.record_keys.zip(self.record_defaults)
end
end
|
lib/record.ex
| 0.637595 | 0.504578 |
record.ex
|
starcoder
|
defmodule Stripe.Customer do
@moduledoc """
Work with Stripe customer objects.
You can:
- Create a customer
- Retrieve a customer
- Update a customer
- Delete a customer
Does not yet render lists or take options.
Stripe API reference: https://stripe.com/docs/api#customer
"""
@type t :: %__MODULE__{}
defstruct [
:id, :object,
:account_balance, :business_vat_id, :created, :currency,
:default_source, :delinquent, :description, :discount, :email,
:livemode, :metadata, :shipping, :sources, :subscriptions
]
@plural_endpoint "customers"
@address_map %{
city: [:create, :retrieve, :update], #required
country: [:create, :retrieve, :update],
line1: [:create, :retrieve, :update],
line2: [:create, :retrieve, :update],
postal_code: [:create, :retrieve, :update],
state: [:create, :retrieve, :update]
}
@schema %{
account_balance: [:retrieve, :update],
business_vat_id: [:create, :retrieve, :update],
created: [:retrieve],
coupon: [:create, :retrieve, :update],
currency: [:retrieve],
default_source: [:retrieve, :update],
delinquent: [:retrieve],
description: [:create, :retrieve, :update],
discount: [:retrieve],
email: [:create, :retrieve, :update],
livemode: [:retrieve],
metadata: [:create, :retrieve, :update],
plan: [:create, :update],
quantity: [:create, :update],
shipping: %{
address: @address_map
},
source: [:create, :retrieve, :update],
sources: [:retrieve],
subscriptions: [:retrieve],
tax_percent: [:create],
trial_end: [:create]
}
@nullable_keys [
:business_vat_id, :description, :email, :metadata
]
@doc """
Create a customer.
"""
@spec create(map, Keyword.t) :: {:ok, t} | {:error, Stripe.api_error_struct}
def create(changes, opts \\ []) do
Stripe.Request.create(@plural_endpoint, changes, @schema, opts)
end
@doc """
Retrieve a customer.
"""
@spec retrieve(binary, Keyword.t) :: {:ok, t} | {:error, Stripe.api_error_struct}
def retrieve(id, opts \\ []) do
endpoint = @plural_endpoint <> "/" <> id
Stripe.Request.retrieve(endpoint, opts)
end
@doc """
Update a customer.
Takes the `id` and a map of changes.
"""
@spec update(binary, map, list) :: {:ok, t} | {:error, Stripe.api_error_struct}
def update(id, changes, opts \\ []) do
endpoint = @plural_endpoint <> "/" <> id
Stripe.Request.update(endpoint, changes, @schema, @nullable_keys, opts)
end
@doc """
Delete a customer.
"""
@spec delete(binary, list) :: :ok | {:error, Stripe.api_error_struct}
def delete(id, opts \\ []) do
endpoint = @plural_endpoint <> "/" <> id
Stripe.Request.delete(endpoint, %{}, opts)
end
@doc """
List all customers.
"""
@spec list(map, Keyword.t) :: {:ok, Stripe.List.t} | {:error, Stripe.api_error_struct}
def list(params \\ %{}, opts \\ []) do
endpoint = @plural_endpoint
Stripe.Request.retrieve(params, endpoint, opts)
end
end
|
lib/stripe/customer.ex
| 0.747616 | 0.627894 |
customer.ex
|
starcoder
|
defmodule Guardian do
@moduledoc """
A module that provides JWT based authentication for Elixir applications.
Guardian provides the framework for using JWT in any Elixir application,
web based or otherwise, where authentication is required.
The base unit of authentication currency is implemented using JWTs.
## Configuration
config :guardian, Guardian,
allowed_algos: ["HS512", "HS384"],
issuer: "MyApp",
ttl: { 30, :days },
serializer: MyApp.GuardianSerializer,
secret_key: "<KEY>"
"""
import Guardian.Utils
@default_algos ["HS512"]
@default_token_type "access"
@doc """
Returns the current default token type.
"""
def default_token_type, do: @default_token_type
@doc """
Encode and sign a JWT from a resource.
The resource will be run through the configured serializer
to obtain a value suitable for storage inside a JWT.
"""
@spec encode_and_sign(any) :: {:ok, String.t, map} |
{:error, any}
def encode_and_sign(object), do: encode_and_sign(object, @default_token_type, %{})
@doc """
Like encode_and_sign/1 but also accepts the type (encoded to the typ key)
for the JWT
The type can be anything but suggested is "access".
"""
@spec encode_and_sign(any, atom | String.t) :: {:ok, String.t, map} |
{:error, any}
def encode_and_sign(object, type), do: encode_and_sign(object, type, %{})
@doc false
def encode_and_sign(object, type, claims) when is_list(claims) do
encode_and_sign(object, type, Enum.into(claims, %{}))
end
@doc """
Like encode_and_sign/2 but also encode anything found
inside the claims map into the JWT.
To encode permissions into the token, use the `:perms` key
and pass it a map with the relevant permissions (must be configured)
### Example
Guardian.encode_and_sign(
user,
:access,
perms: %{ default: [:read, :write] }
)
"""
@spec encode_and_sign(any, atom | String.t, map) :: {:ok, String.t, map} |
{:error, any}
def encode_and_sign(object, type, claims) do
case build_claims(object, type, claims) do
{:ok, claims_for_token} ->
called_hook = call_before_encode_and_sign_hook(
object,
type,
claims_for_token
)
encode_from_hooked(called_hook)
{:error, reason} -> {:error, reason}
end
end
defp encode_from_hooked({:ok, {resource, type, claims_from_hook}}) do
{:ok, jwt} = encode_claims(claims_from_hook)
case call_after_encode_and_sign_hook(
resource,
type,
claims_from_hook, jwt
) do
{:ok, _} -> {:ok, jwt, claims_from_hook}
{:error, reason} -> {:error, reason}
end
end
defp encode_from_hooked({:error, _reason} = error), do: error
@doc false
def hooks_module, do: config(:hooks, Guardian.Hooks.Default)
@doc """
Revokes the current token.
This provides a hook to revoke.
The logic for revocation of belongs in a Guardian.Hook.on_revoke
This function is less efficient that revoke!/2.
If you have claims, you should use that.
"""
@spec revoke!(String.t, map) :: :ok | {:error, any}
def revoke!(jwt, params \\ %{}) do
case decode_and_verify(jwt, params) do
{:ok, claims} -> revoke!(jwt, claims, params)
_ -> :ok
end
end
@doc """
Revokes the current token.
This provides a hook to revoke.
The logic for revocation of belongs in a Guardian.Hook.on_revoke
"""
@spec revoke!(String.t, map, map) :: :ok | {:error, any}
def revoke!(jwt, claims, _params) do
case Guardian.hooks_module.on_revoke(claims, jwt) do
{:ok, _} -> :ok
{:error, reason} -> {:error, reason}
end
end
@doc """
Refresh the token. The token will be renewed and receive a new:
* `jti` - JWT id
* `iat` - Issued at
* `exp` - Expiry time.
* `nbf` - Not valid before time
The current token will be revoked when the new token is successfully created.
Note: A valid token must be used in order to be refreshed.
"""
@spec refresh!(String.t) :: {:ok, String.t, map} | {:error, any}
def refresh!(jwt), do: refresh!(jwt, %{}, %{})
@doc """
As refresh!/1 but allows the claims to be updated.
Specifically useful is the ability to set the ttl of the token.
Guardian.refresh(existing_jwt, existing_claims, %{ttl: { 5, :minutes}})
Once the new token is created, the old one will be revoked.
"""
@spec refresh!(String.t, map, map) :: {:ok, String.t, map} |
{:error, any}
def refresh!(jwt, claims, params \\ %{}) do
case decode_and_verify(jwt, params) do
{:ok, found_claims} ->
do_refresh!(jwt, Map.merge(found_claims, claims), params)
{:error, reason} -> {:error, reason}
end
end
defp do_refresh!(original_jwt, original_claims, params) do
params = Enum.into(params, %{})
new_claims = original_claims
|> Map.drop(["jti", "iat", "exp", "nbf"])
|> Map.merge(params)
|> Guardian.Claims.jti
|> Guardian.Claims.nbf
|> Guardian.Claims.iat
|> Guardian.Claims.ttl
type = Map.get(new_claims, "typ")
{:ok, resource} = Guardian.serializer.from_token(new_claims["sub"])
case encode_and_sign(resource, type, new_claims) do
{:ok, jwt, full_claims} ->
_ = revoke!(original_jwt, peek_claims(original_jwt), %{})
{:ok, jwt, full_claims}
{:error, reason} -> {:error, reason}
end
end
@doc """
Exchange a token with type 'from_type' for a token with type 'to_type', the
claims(apart from "jti", "iat", "exp", "nbf" and "typ) will persists though the
exchange
Can be used to get an access token from a refresh token
Guardian.exchange(existing_jwt, "refresh", "access")
The old token wont be revoked after the exchange
"""
@spec exchange(String.t, String.t, String.t) :: {:ok, String.t, Map} |
{:error, any}
def exchange(old_jwt, from_typ, to_typ) do
case decode_and_verify(old_jwt) do
{:ok, found_claims} -> do_exchange(from_typ, to_typ, found_claims)
{:error, reason} -> {:error, reason}
end
end
@doc false
defp do_exchange(from_typ, to_typ, original_claims) do
if correct_typ?(original_claims, from_typ) do
{:ok, resource} = Guardian.serializer.from_token(original_claims["sub"])
new_claims = original_claims
|> Map.drop(["jti", "iat", "exp", "nbf", "typ"])
case encode_and_sign(resource, to_typ, new_claims) do
{:ok, jwt, full_claims} -> {:ok, jwt, full_claims}
{:error, reason} -> {:error, reason}
end
else
{:error, :incorrect_token_type}
end
end
@doc false
defp correct_typ?(claims, typ) when is_binary(typ) do
Map.get(claims, "typ") === typ
end
@doc false
defp correct_typ?(claims, typ) when is_atom(typ) do
Map.get(claims, "typ") === to_string(typ)
end
@doc false
defp correct_typ?(claims, typ_list) when is_list(typ_list) do
typ = Map.get(claims, "typ")
typ_list |> Enum.any?(&(&1 === typ))
end
@doc false
defp correct_typ?(_claims, _typ) do
false
end
@doc """
Fetch the configured serializer module
"""
@spec serializer() :: atom
def serializer, do: config(:serializer)
@doc """
Verify the given JWT. This will decode_and_verify via decode_and_verify/2
"""
@spec decode_and_verify(String.t) :: {:ok, map} |
{:error, any}
def decode_and_verify(jwt), do: decode_and_verify(jwt, %{})
@doc """
Verify the given JWT.
"""
@spec decode_and_verify(String.t, map) :: {:ok, map} |
{:error, any}
def decode_and_verify(jwt, params) do
params = if verify_issuer?() do
params
|> stringify_keys
|> Map.put_new("iss", issuer())
else
params
end
params = stringify_keys(params)
{secret, params} = strip_value(params, "secret")
try do
with {:ok, claims} <- decode_token(jwt, secret),
{:ok, verified_claims} <- verify_claims(claims, params),
{:ok, {claims, _}} <- Guardian.hooks_module.on_verify(verified_claims, jwt),
do: {:ok, claims}
rescue
e ->
{:error, e}
end
end
@doc """
If successfully verified, returns the claims encoded into the JWT.
Raises otherwise
"""
@spec decode_and_verify!(String.t) :: map
def decode_and_verify!(jwt), do: decode_and_verify!(jwt, %{})
@doc """
If successfully verified, returns the claims encoded into the JWT.
Raises otherwise
"""
@spec decode_and_verify!(String.t, map) :: map
def decode_and_verify!(jwt, params) do
case decode_and_verify(jwt, params) do
{:ok, claims} -> claims
{:error, reason} -> raise to_string(reason)
end
end
@doc """
The configured issuer. If not configured, defaults to the node that issued.
"""
@spec issuer() :: String.t
def issuer, do: config(:issuer, to_string(node()))
defp verify_issuer?, do: config(:verify_issuer, false)
@doc false
def config do
:guardian
|> Application.get_env(Guardian)
|> check_config
end
@doc false
def check_config(nil), do: raise "Guardian is not configured"
def check_config(cfg) do
case Keyword.has_key?(cfg, :serializer) do
false -> raise "Guardian requires a serializer"
true -> cfg
end
end
@doc false
def config(key, default \\ nil),
do: config() |> Keyword.get(key, default) |> resolve_config(default)
defp resolve_config({:system, var_name}, default),
do: System.get_env(var_name) || default
defp resolve_config(value, _default),
do: value
@doc """
Read the header of the token.
This is not a verified read, it does not check the signature.
"""
def peek_header(token) do
JOSE.JWT.peek_protected(token).fields
end
@doc """
Read the claims of the token.
This is not a verified read, it does not check the signature.
"""
def peek_claims(token) do
JOSE.JWT.peek_payload(token).fields
end
defp jose_jws(headers) do
Map.merge(%{"alg" => hd(allowed_algos())}, headers)
end
defp jose_jwk(the_secret = %JOSE.JWK{}), do: the_secret
defp jose_jwk(the_secret) when is_binary(the_secret), do: JOSE.JWK.from_oct(the_secret)
defp jose_jwk(the_secret) when is_map(the_secret), do: JOSE.JWK.from_map(the_secret)
defp jose_jwk({mod, fun}), do: jose_jwk(:erlang.apply(mod, fun, []))
defp jose_jwk({mod, fun, args}), do: jose_jwk(:erlang.apply(mod, fun, args))
defp jose_jwk(nil), do: jose_jwk(config(:secret_key) || false)
defp encode_claims(claims) do
{headers, claims} = strip_value(claims, "headers", %{})
{secret, claims} = strip_value(claims, "secret")
{_, token} = secret
|> jose_jwk()
|> JOSE.JWT.sign(jose_jws(headers), claims)
|> JOSE.JWS.compact
{:ok, token}
end
defp decode_token(token, secret) do
secret = secret || config(:secret_key)
case JOSE.JWT.verify_strict(jose_jwk(secret), allowed_algos(), token) do
{true, jose_jwt, _} -> {:ok, jose_jwt.fields}
{false, _, _} -> {:error, :invalid_token}
end
end
defp allowed_algos, do: config(:allowed_algos, @default_algos)
def verify_claims(claims, params) do
verify_claims(
claims,
Map.keys(claims),
config(:verify_module, Guardian.JWT),
params
)
end
defp verify_claims(claims, [h | t], module, params) do
case apply(module, :validate_claim, [h, claims, params]) do
:ok -> verify_claims(claims, t, module, params)
{:error, reason} -> {:error, reason}
end
end
defp verify_claims(claims, [], _, _), do: {:ok, claims}
defp build_claims(object, type, claims) do
case Guardian.serializer.for_token(object) do
{:ok, sub} ->
full_claims = claims
|> stringify_keys
|> set_permissions
|> Guardian.Claims.app_claims
|> Guardian.Claims.typ(type)
|> Guardian.Claims.sub(sub)
|> set_ttl
|> set_aud_if_nil(sub)
{:ok, full_claims}
{:error, reason} -> {:error, reason}
end
end
defp call_before_encode_and_sign_hook(object, type, claims) do
Guardian.hooks_module.before_encode_and_sign(object, type, claims)
end
defp call_after_encode_and_sign_hook(resource, type, claims, jwt) do
Guardian.hooks_module.after_encode_and_sign(resource, type, claims, jwt)
end
defp set_permissions(claims) do
perms = Map.get(claims, "perms", %{})
claims
|> Guardian.Claims.permissions(perms)
|> Map.delete("perms")
end
defp set_ttl(claims) do
claims
|> Guardian.Claims.ttl
|> Map.delete("ttl")
end
def set_aud_if_nil(claims, value) do
if Map.get(claims, "aud") == nil do
Guardian.Claims.aud(claims, value)
else
claims
end
end
defp strip_value(map, key, default \\ nil) do
value = Map.get(map, key, default)
{value, Map.drop(map, [key])}
end
end
|
backend/deps/guardian/lib/guardian.ex
| 0.836254 | 0.462412 |
guardian.ex
|
starcoder
|
defmodule GimTest.Animal do
@moduledoc false
use Gim.Schema
# alias GimTest.Movies.{Genre, Person, Performance}
@keys [
:impound_no,
:intake_date,
:intake_type,
:animal_type,
:neutered_status,
:sex,
:age_intake,
:condition,
:breed,
:aggressive,
:independent,
:intelligent,
:loyal,
:social,
:good_with_kids,
:max_life_expectancy,
:max_weight,
:dog_group,
:color,
:weight,
:lab_test,
:outcome_date,
:outcome_type,
:days_shelter
]
@boolean [
:neutered_status,
:aggressive,
:independent,
:intelligent,
:loyal,
:social,
:good_with_kids
]
@ints [
:age_intake,
:max_life_expectancy,
:max_weight,
:days_shelter
]
@floats [:weight]
schema do
property(:impound_no, index: :primary)
property(:intake_date)
property(:intake_type, index: true)
property(:animal_type, index: true)
property(:neutered_status)
property(:sex, index: true)
property(:age_intake)
property(:condition)
property(:breed)
property(:aggressive)
property(:independent)
property(:intelligent)
property(:loyal)
property(:social)
property(:good_with_kids)
property(:max_life_expectancy)
property(:max_weight)
property(:dog_group)
property(:color)
property(:weight)
property(:lab_test)
property(:outcome_date)
property(:outcome_type)
property(:days_shelter)
end
def data_info do
path = Path.join(["etc", "AnimalData.csv"])
url =
"http://raw.githubusercontent.com/KarenWest/FundamentalsOfDataAnalysisInLanguageR/master/AnimalData.csv"
md5_sum = "78cc1365ee7f9798d6dfd02cb35aab74"
{path, url, md5_sum}
end
def data do
path = Path.join(["etc", "AnimalData.csv"])
path
|> File.stream!([])
|> NimbleCSV.RFC4180.parse_stream()
# |> Stream.filter(fn [head | _tail] -> IO.inspect(head) != "Impound.No" end)
|> Stream.map(&map/1)
end
def map(animal) when is_list(animal) and length(animal) == 24 do
data = Enum.map(List.zip([@keys, animal]), &cast/1)
struct(__MODULE__, data)
end
def cast(pair)
def cast({key, "NA"}) do
{key, nil}
end
def cast({key, value}) when key in @ints do
{value, ""} = Integer.parse(value)
{key, value}
end
def cast({key, value}) when key in @floats do
{value, ""} = Float.parse(value)
{key, value}
end
def cast({key, "Y"}) when key in @boolean do
{key, true}
end
def cast({key, "N"}) when key in @boolean do
{key, false}
end
def cast({key, "Male"}) when key == :sex do
{key, :male}
end
def cast({key, "Female"}) when key == :sex do
{key, :female}
end
def cast({_, _} = pair) do
pair
end
end
|
test/support/animal.ex
| 0.733738 | 0.449634 |
animal.ex
|
starcoder
|
defmodule Serum.Plugins.TableOfContents do
@moduledoc """
A Serum plugin that inserts a table of contents.
## Using the Plugin
First, add this plugin to your `serum.exs`:
%{
plugins: [
#{__MODULE__ |> to_string() |> String.replace_prefix("Elixir.", "")}
]
}
This plugin works with both pages(`.md`, `.html`, and `.html.eex`) and blog
posts(`.md`). Insert the `<serum-toc>` tag at the position you want to
display a table of contents at.
<serum-toc start="2" end="4"></serum-toc>
The `start` and `end` attributes define a range of heading level this plugin
recognizes. In the case of the above example, `<h1>`, `<h5>`, and `<h6>` tags
are ignored when generating a table of contents.
## Notes
You may use `<serum-toc>` tag more than once in a single page. However, all
occurrences of this tag will be replaced with a table of contents generated
using the attributes of the first one. That is, for example, all three tags
in the code below expand to the same table of contents, showing a 2-level
deep list.
<serum-toc start="2" end="3"></serum-toc>
...
<serum-toc></serum-toc>
...
<serum-toc></serum-toc>
It's recommended that you wrap a `<serum-toc>` tag with a `<div>` tag when
using in a markdown file, to ensure a well-formed structure of HTML output.
<div><serum-toc ...></serum-toc></div>
And finally, make sure you close every `<serum-toc>` tag properly
with `</serum-toc>`.
"""
@behaviour Serum.Plugin
def name, do: "Table of Contents"
def version, do: "1.0.0"
def elixir, do: ">= 1.6.0"
def serum, do: ">= 0.13.0"
def description, do: "Inserts a table of contents into pages or posts."
def implements,
do: [
:rendered_fragment
]
def rendered_fragment(frag)
def rendered_fragment(%{metadata: %{type: :page}, data: html} = frag) do
new_html = insert_toc(html)
{:ok, %{frag | data: new_html}}
end
def rendered_fragment(%{metadata: %{type: :post}, data: html} = frag) do
new_html = insert_toc(html)
{:ok, %{frag | data: new_html}}
end
def rendered_fragment(frag), do: {:ok, frag}
@spec insert_toc(binary()) :: binary()
defp insert_toc(html) do
html_tree = Floki.parse(html)
case Floki.find(html_tree, "serum-toc") do
[] ->
html
[{"serum-toc", attr_list, _} | _] ->
{start, end_} = get_range(attr_list)
state = {start, end_, start, [0], []}
{new_tree, new_state} = traverse(html_tree, state, &tree_fun/2)
items = new_state |> elem(4) |> Enum.reverse()
toc = {"ul", [{"class", "serum-toc"}], items}
new_tree
|> traverse(nil, fn
{"serum-toc", _, _}, _ -> {toc, nil}
x, _ -> {x, nil}
end)
|> elem(0)
|> Floki.raw_html()
end
end
@spec get_range([{binary(), binary()}]) :: {integer(), integer()}
defp get_range(attr_list) do
attr_map = Map.new(attr_list)
start = attr_map["start"]
end_ = attr_map["end"]
start = (start && parse_h_level(start, 1)) || 1
end_ = (end_ && parse_h_level(end_, 6)) || 6
end_ = max(start, end_)
{start, end_}
end
@spec parse_h_level(binary(), integer()) :: integer()
defp parse_h_level(str, default) do
case Integer.parse(str) do
{level, ""} -> max(1, min(level, 6))
_ -> default
end
end
@spec traverse(
Floki.html_tree(),
term(),
(Floki.html_tree(), term() -> {Floki.html_tree(), term()})
) :: {Floki.html_tree(), term()}
defp traverse(tree, state, fun)
defp traverse({tag, attrs, children}, state, fun) do
{new_children, new_state} = traverse(children, state, fun)
fun.({tag, attrs, new_children}, new_state)
end
defp traverse([_ | _] = tags, state, fun) do
{new_tags, new_state} =
Enum.reduce(tags, {[], state}, fn tag, {acc, st} ->
{new_tag, new_st} = traverse(tag, st, fun)
{[new_tag | acc], new_st}
end)
{new_tags |> Enum.reverse() |> List.flatten(), new_state}
end
defp traverse(x, state, _fun), do: {x, state}
@spec tree_fun(Floki.html_tree(), term()) :: {Floki.html_tree(), term()}
defp tree_fun(tree, state)
defp tree_fun({<<?h::8, ch::8, _::binary>>, _, children} = tree, state) when ch in ?1..?6 do
{start, end_, prev_level, counts, items} = state
level = ch - ?0
if level >= start and level <= end_ do
new_counts = update_counts(counts, level, prev_level)
num_dot = new_counts |> Enum.reverse() |> Enum.join(".")
span = {"span", [{"class", "number"}], [num_dot]}
{contents, _} = traverse(children, nil, &strip_a_tags/2)
link = {"a", [{"href", "#s_#{num_dot}"}], [span | contents]}
item = {"li", [{"class", "indent-#{level - start}"}], [link]}
bookmark = {"a", [{"name", "s_#{num_dot}"}], []}
new_state = {start, end_, level, new_counts, [item | items]}
{[bookmark, tree], new_state}
else
{tree, state}
end
end
defp tree_fun(x, state), do: {x, state}
@spec strip_a_tags(Floki.html_tree(), term()) :: {Floki.html_tree(), term()}
defp strip_a_tags(tree, state)
defp strip_a_tags({"a", _, children}, state), do: {children, state}
defp strip_a_tags(x, state), do: {x, state}
@spec update_counts([integer()], integer(), integer()) :: [integer()]
defp update_counts(counts, level, prev_level) do
case level - prev_level do
0 ->
[x | xs] = counts
[x + 1 | xs]
diff when diff < 0 ->
[x | xs] = Enum.drop(counts, -diff)
[x + 1 | xs]
diff when diff > 0 ->
List.duplicate(1, diff) ++ counts
end
end
end
|
lib/serum/plugins/table_of_contents.ex
| 0.865281 | 0.608769 |
table_of_contents.ex
|
starcoder
|
defmodule Locale do
@moduledoc """
Utilities for working with locales (in the form of `en-US`).
The main goal is to be able to display a list of languages in their own spelling (`en-US` is "American English", `fr-CA` is "Français canadien").
## The Locale struct
The fields are:
* `direction` - In what direction is that language written
* `english_name` - The English name of this language
* `locale_code` - The locale code of this Locale
* `name` - The name of that language in its own spelling
"""
@type locale_code() :: String.t()
@type direction_type() :: :left_to_right | :right_to_left | :unknown_direction
@type locale() :: %Locale{
direction: direction_type(),
english_name: String.t(),
locale_code: locale_code(),
name: String.t()
}
@enforce_keys [:direction, :english_name, :locale_code, :name]
defstruct [:direction, :english_name, :locale_code, :name]
@spec locale?(locale_code()) :: boolean()
@spec locale(locale_code()) :: {:ok, locale()} | {:error, :locale_not_found}
@doc """
Returns true if the locale code is supported, false otherwise.
## Examples
iex> Locale.locale?("en-US")
true
iex> Locale.locale?("not-a-locale")
false
"""
def locale?(locale_code)
@doc """
Returns a tuple {:ok, locale()} where `locale()` is a `Locale` struct detailing the locale from the locale code
passed or a tuple {:error, :locale_not_found}
## Examples
iex> Locale.locale("en-US")
{:ok,
%Locale{
direction: :left_to_right,
english_name: "American English",
locale_code: "en-US",
name: "American English"
}
}
iex> Locale.locale("fr-FR")
{:ok,
%Locale{
direction: :left_to_right,
english_name: "French",
locale_code: "fr-FR",
name: "Français"
}
}
"""
def locale(locale_code)
for language <- CLDR.languages() do
def locale(unquote(language)), do: unquote(Macro.escape(LocaleBuilder.locale(language)))
def locale?(unquote(language)), do: true
scripts = CLDR.script_for_language(language)
Enum.each(scripts, fn script ->
locale = language <> "-" <> script
def locale(unquote(locale)), do: unquote(Macro.escape(LocaleBuilder.locale(locale)))
def locale?(unquote(locale)), do: true
end)
territories = CLDR.territories_for_language(language)
Enum.each(territories, fn territory ->
locale = language <> "-" <> territory
def locale(unquote(locale)), do: unquote(Macro.escape(LocaleBuilder.locale(locale)))
def locale?(unquote(locale)), do: true
end)
Enum.each(scripts, fn script ->
Enum.each(territories, fn territory ->
locale = language <> "-" <> script <> "-" <> territory
def locale(unquote(locale)), do: unquote(Macro.escape(LocaleBuilder.locale(locale)))
def locale?(unquote(locale)), do: true
end)
end)
end
def locale?(locale_code) when is_binary(locale_code), do: false
def locale(locale_code) when is_binary(locale_code), do: {:error, :locale_not_found}
end
|
lib/locale.ex
| 0.920191 | 0.514522 |
locale.ex
|
starcoder
|
defmodule AWS.GlobalAccelerator do
@moduledoc """
AWS Global Accelerator
This is the *AWS Global Accelerator API Reference*.
This guide is for developers who need detailed information about AWS Global
Accelerator API actions, data types, and errors. For more information about
Global Accelerator features, see the [AWS Global Accelerator Developer Guide](https://docs.aws.amazon.com/global-accelerator/latest/dg/Welcome.html).
AWS Global Accelerator is a service in which you create *accelerators* to
improve the performance of your applications for local and global users.
Depending on the type of accelerator you choose, you can gain additional
benefits.
* By using a standard accelerator, you can improve availability of
your internet applications that are used by a global audience. With a standard
accelerator, Global Accelerator directs traffic to optimal endpoints over the
AWS global network.
* For other scenarios, you might choose a custom routing
accelerator. With a custom routing accelerator, you can use application logic to
directly map one or more users to a specific endpoint among many endpoints.
Global Accelerator is a global service that supports endpoints in multiple AWS
Regions but you must specify the US West (Oregon) Region to create or update
accelerators.
By default, Global Accelerator provides you with two static IP addresses that
you associate with your accelerator. With a standard accelerator, instead of
using the IP addresses that Global Accelerator provides, you can configure these
entry points to be IPv4 addresses from your own IP address ranges that you bring
to Global Accelerator. The static IP addresses are anycast from the AWS edge
network. For a standard accelerator, they distribute incoming application
traffic across multiple endpoint resources in multiple AWS Regions, which
increases the availability of your applications. Endpoints for standard
accelerators can be Network Load Balancers, Application Load Balancers, Amazon
EC2 instances, or Elastic IP addresses that are located in one AWS Region or
multiple Regions. For custom routing accelerators, you map traffic that arrives
to the static IP addresses to specific Amazon EC2 servers in endpoints that are
virtual private cloud (VPC) subnets.
The static IP addresses remain assigned to your accelerator for as long as it
exists, even if you disable the accelerator and it no longer accepts or routes
traffic. However, when you *delete* an accelerator, you lose the static IP
addresses that are assigned to it, so you can no longer route traffic by using
them. You can use IAM policies like tag-based permissions with Global
Accelerator to limit the users who have permissions to delete an accelerator.
For more information, see [Tag-based policies](https://docs.aws.amazon.com/global-accelerator/latest/dg/access-control-manage-access-tag-policies.html).
For standard accelerators, Global Accelerator uses the AWS global network to
route traffic to the optimal regional endpoint based on health, client location,
and policies that you configure. The service reacts instantly to changes in
health or configuration to ensure that internet traffic from clients is always
directed to healthy endpoints.
For a list of the AWS Regions where Global Accelerator and other services are
currently supported, see the [AWS Region Table](https://docs.aws.amazon.com/about-aws/global-infrastructure/regional-product-services/).
AWS Global Accelerator includes the following components:
## Definitions
### Static IP addresses
Global Accelerator provides you with a set of two static IP addresses that are
anycast from the AWS edge network. If you bring your own IP address range to AWS
(BYOIP) to use with a standard accelerator, you can instead assign IP addresses
from your own pool to use with your accelerator. For more information, see [
Bring your own IP addresses (BYOIP) in AWS Global
Accelerator](https://docs.aws.amazon.com/global-accelerator/latest/dg/using-byoip.html).
The IP addresses serve as single fixed entry points for your clients. If you
already have Elastic Load Balancing load balancers, Amazon EC2 instances, or
Elastic IP address resources set up for your applications, you can easily add
those to a standard accelerator in Global Accelerator. This allows Global
Accelerator to use static IP addresses to access the resources.
The static IP addresses remain assigned to your accelerator for as long as it
exists, even if you disable the accelerator and it no longer accepts or routes
traffic. However, when you *delete* an accelerator, you lose the static IP
addresses that are assigned to it, so you can no longer route traffic by using
them. You can use IAM policies like tag-based permissions with Global
Accelerator to delete an accelerator. For more information, see [Tag-based policies](https://docs.aws.amazon.com/global-accelerator/latest/dg/access-control-manage-access-tag-policies.html).
### Accelerator
An accelerator directs traffic to endpoints over the AWS global network to
improve the performance of your internet applications. Each accelerator includes
one or more listeners.
There are two types of accelerators:
A *standard* accelerator directs traffic to the optimal AWS
endpoint based on several factors, including the user’s location, the health of
the endpoint, and the endpoint weights that you configure. This improves the
availability and performance of your applications. Endpoints can be Network Load
Balancers, Application Load Balancers, Amazon EC2 instances, or Elastic IP
addresses.
A *custom routing* accelerator directs traffic to one of possibly
thousands of Amazon EC2 instances running in a single or multiple virtual
private clouds (VPCs). With custom routing, listener ports are mapped to
statically associate port ranges with VPC subnets, which allows Global
Accelerator to determine an EC2 instance IP address at the time of connection.
By default, all port mapping destinations in a VPC subnet can't receive traffic.
You can choose to configure all destinations in the subnet to receive traffic,
or to specify individual port mappings that can receive traffic.
For more information, see [Types of accelerators](https://docs.aws.amazon.com/global-accelerator/latest/dg/introduction-accelerator-types.html).
### DNS name
Global Accelerator assigns each accelerator a default Domain Name System (DNS)
name, similar to `a1234567890abcdef.awsglobalaccelerator.com`, that points to
the static IP addresses that Global Accelerator assigns to you or that you
choose from your own IP address range. Depending on the use case, you can use
your accelerator's static IP addresses or DNS name to route traffic to your
accelerator, or set up DNS records to route traffic using your own custom domain
name.
### Network zone
A network zone services the static IP addresses for your accelerator from a
unique IP subnet. Similar to an AWS Availability Zone, a network zone is an
isolated unit with its own set of physical infrastructure. When you configure an
accelerator, by default, Global Accelerator allocates two IPv4 addresses for it.
If one IP address from a network zone becomes unavailable due to IP address
blocking by certain client networks, or network disruptions, then client
applications can retry on the healthy static IP address from the other isolated
network zone.
### Listener
A listener processes inbound connections from clients to Global Accelerator,
based on the port (or port range) and protocol (or protocols) that you
configure. A listener can be configured for TCP, UDP, or both TCP and UDP
protocols. Each listener has one or more endpoint groups associated with it, and
traffic is forwarded to endpoints in one of the groups. You associate endpoint
groups with listeners by specifying the Regions that you want to distribute
traffic to. With a standard accelerator, traffic is distributed to optimal
endpoints within the endpoint groups associated with a listener.
### Endpoint group
Each endpoint group is associated with a specific AWS Region. Endpoint groups
include one or more endpoints in the Region. With a standard accelerator, you
can increase or reduce the percentage of traffic that would be otherwise
directed to an endpoint group by adjusting a setting called a *traffic dial*.
The traffic dial lets you easily do performance testing or blue/green deployment
testing, for example, for new releases across different AWS Regions.
### Endpoint
An endpoint is a resource that Global Accelerator directs traffic to.
Endpoints for standard accelerators can be Network Load Balancers, Application
Load Balancers, Amazon EC2 instances, or Elastic IP addresses. An Application
Load Balancer endpoint can be internet-facing or internal. Traffic for standard
accelerators is routed to endpoints based on the health of the endpoint along
with configuration options that you choose, such as endpoint weights. For each
endpoint, you can configure weights, which are numbers that you can use to
specify the proportion of traffic to route to each one. This can be useful, for
example, to do performance testing within a Region.
Endpoints for custom routing accelerators are virtual private cloud (VPC)
subnets with one or many EC2 instances.
"""
alias AWS.Client
alias AWS.Request
def metadata do
%AWS.ServiceMetadata{
abbreviation: nil,
api_version: "2018-08-08",
content_type: "application/x-amz-json-1.1",
credential_scope: nil,
endpoint_prefix: "globalaccelerator",
global?: false,
protocol: "json",
service_id: "Global Accelerator",
signature_version: "v4",
signing_name: "globalaccelerator",
target_prefix: "GlobalAccelerator_V20180706"
}
end
@doc """
Associate a virtual private cloud (VPC) subnet endpoint with your custom routing
accelerator.
The listener port range must be large enough to support the number of IP
addresses that can be specified in your subnet. The number of ports required is:
subnet size times the number of ports per destination EC2 instances. For
example, a subnet defined as /24 requires a listener port range of at least 255
ports.
Note: You must have enough remaining listener ports available to map to the
subnet ports, or the call will fail with a LimitExceededException.
By default, all destinations in a subnet in a custom routing accelerator cannot
receive traffic. To enable all destinations to receive traffic, or to specify
individual port mappings that can receive traffic, see the [
AllowCustomRoutingTraffic](https://docs.aws.amazon.com/global-accelerator/latest/api/API_AllowCustomRoutingTraffic.html)
operation.
"""
def add_custom_routing_endpoints(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "AddCustomRoutingEndpoints", input, options)
end
@doc """
Advertises an IPv4 address range that is provisioned for use with your AWS
resources through bring your own IP addresses (BYOIP).
It can take a few minutes before traffic to the specified addresses starts
routing to AWS because of propagation delays.
To stop advertising the BYOIP address range, use [
WithdrawByoipCidr](https://docs.aws.amazon.com/global-accelerator/latest/api/WithdrawByoipCidr.html).
For more information, see [Bring Your Own IP Addresses (BYOIP)](https://docs.aws.amazon.com/global-accelerator/latest/dg/using-byoip.html)
in the *AWS Global Accelerator Developer Guide*.
"""
def advertise_byoip_cidr(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "AdvertiseByoipCidr", input, options)
end
@doc """
Specify the Amazon EC2 instance (destination) IP addresses and ports for a VPC
subnet endpoint that can receive traffic for a custom routing accelerator.
You can allow traffic to all destinations in the subnet endpoint, or allow
traffic to a specified list of destination IP addresses and ports in the subnet.
Note that you cannot specify IP addresses or ports outside of the range that you
configured for the endpoint group.
After you make changes, you can verify that the updates are complete by checking
the status of your accelerator: the status changes from IN_PROGRESS to DEPLOYED.
"""
def allow_custom_routing_traffic(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "AllowCustomRoutingTraffic", input, options)
end
@doc """
Create an accelerator.
An accelerator includes one or more listeners that process inbound connections
and direct traffic to one or more endpoint groups, each of which includes
endpoints, such as Network Load Balancers.
Global Accelerator is a global service that supports endpoints in multiple AWS
Regions but you must specify the US West (Oregon) Region to create or update
accelerators.
"""
def create_accelerator(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "CreateAccelerator", input, options)
end
@doc """
Create a custom routing accelerator.
A custom routing accelerator directs traffic to one of possibly thousands of
Amazon EC2 instance destinations running in a single or multiple virtual private
clouds (VPC) subnet endpoints.
Be aware that, by default, all destination EC2 instances in a VPC subnet
endpoint cannot receive traffic. To enable all destinations to receive traffic,
or to specify individual port mappings that can receive traffic, see the [
AllowCustomRoutingTraffic](https://docs.aws.amazon.com/global-accelerator/latest/api/API_AllowCustomRoutingTraffic.html)
operation.
Global Accelerator is a global service that supports endpoints in multiple AWS
Regions but you must specify the US West (Oregon) Region to create or update
accelerators.
"""
def create_custom_routing_accelerator(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "CreateCustomRoutingAccelerator", input, options)
end
@doc """
Create an endpoint group for the specified listener for a custom routing
accelerator.
An endpoint group is a collection of endpoints in one AWS Region.
"""
def create_custom_routing_endpoint_group(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "CreateCustomRoutingEndpointGroup", input, options)
end
@doc """
Create a listener to process inbound connections from clients to a custom
routing accelerator.
Connections arrive to assigned static IP addresses on the port range that you
specify.
"""
def create_custom_routing_listener(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "CreateCustomRoutingListener", input, options)
end
@doc """
Create an endpoint group for the specified listener.
An endpoint group is a collection of endpoints in one AWS Region. A resource
must be valid and active when you add it as an endpoint.
"""
def create_endpoint_group(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "CreateEndpointGroup", input, options)
end
@doc """
Create a listener to process inbound connections from clients to an accelerator.
Connections arrive to assigned static IP addresses on a port, port range, or
list of port ranges that you specify.
"""
def create_listener(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "CreateListener", input, options)
end
@doc """
Delete an accelerator.
Before you can delete an accelerator, you must disable it and remove all
dependent resources (listeners and endpoint groups). To disable the accelerator,
update the accelerator to set `Enabled` to false.
When you create an accelerator, by default, Global Accelerator provides you with
a set of two static IP addresses. Alternatively, you can bring your own IP
address ranges to Global Accelerator and assign IP addresses from those ranges.
The IP addresses are assigned to your accelerator for as long as it exists, even
if you disable the accelerator and it no longer accepts or routes traffic.
However, when you *delete* an accelerator, you lose the static IP addresses that
are assigned to the accelerator, so you can no longer route traffic by using
them. As a best practice, ensure that you have permissions in place to avoid
inadvertently deleting accelerators. You can use IAM policies with Global
Accelerator to limit the users who have permissions to delete an accelerator.
For more information, see [Authentication and Access Control](https://docs.aws.amazon.com/global-accelerator/latest/dg/auth-and-access-control.html)
in the *AWS Global Accelerator Developer Guide*.
"""
def delete_accelerator(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "DeleteAccelerator", input, options)
end
@doc """
Delete a custom routing accelerator.
Before you can delete an accelerator, you must disable it and remove all
dependent resources (listeners and endpoint groups). To disable the accelerator,
update the accelerator to set `Enabled` to false.
When you create a custom routing accelerator, by default, Global Accelerator
provides you with a set of two static IP addresses.
The IP addresses are assigned to your accelerator for as long as it exists, even
if you disable the accelerator and it no longer accepts or routes traffic.
However, when you *delete* an accelerator, you lose the static IP addresses that
are assigned to the accelerator, so you can no longer route traffic by using
them. As a best practice, ensure that you have permissions in place to avoid
inadvertently deleting accelerators. You can use IAM policies with Global
Accelerator to limit the users who have permissions to delete an accelerator.
For more information, see [Authentication and Access Control](https://docs.aws.amazon.com/global-accelerator/latest/dg/auth-and-access-control.html)
in the *AWS Global Accelerator Developer Guide*.
"""
def delete_custom_routing_accelerator(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "DeleteCustomRoutingAccelerator", input, options)
end
@doc """
Delete an endpoint group from a listener for a custom routing accelerator.
"""
def delete_custom_routing_endpoint_group(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "DeleteCustomRoutingEndpointGroup", input, options)
end
@doc """
Delete a listener for a custom routing accelerator.
"""
def delete_custom_routing_listener(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "DeleteCustomRoutingListener", input, options)
end
@doc """
Delete an endpoint group from a listener.
"""
def delete_endpoint_group(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "DeleteEndpointGroup", input, options)
end
@doc """
Delete a listener from an accelerator.
"""
def delete_listener(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "DeleteListener", input, options)
end
@doc """
Specify the Amazon EC2 instance (destination) IP addresses and ports for a VPC
subnet endpoint that cannot receive traffic for a custom routing accelerator.
You can deny traffic to all destinations in the VPC endpoint, or deny traffic to
a specified list of destination IP addresses and ports. Note that you cannot
specify IP addresses or ports outside of the range that you configured for the
endpoint group.
After you make changes, you can verify that the updates are complete by checking
the status of your accelerator: the status changes from IN_PROGRESS to DEPLOYED.
"""
def deny_custom_routing_traffic(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "DenyCustomRoutingTraffic", input, options)
end
@doc """
Releases the specified address range that you provisioned to use with your AWS
resources through bring your own IP addresses (BYOIP) and deletes the
corresponding address pool.
Before you can release an address range, you must stop advertising it by using
[WithdrawByoipCidr](https://docs.aws.amazon.com/global-accelerator/latest/api/WithdrawByoipCidr.html) and you must not have any accelerators that are using static IP addresses
allocated from its address range.
For more information, see [Bring Your Own IP Addresses
(BYOIP)](https://docs.aws.amazon.com/global-accelerator/latest/dg/using-byoip.html)
in the *AWS Global Accelerator Developer Guide*.
"""
def deprovision_byoip_cidr(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "DeprovisionByoipCidr", input, options)
end
@doc """
Describe an accelerator.
"""
def describe_accelerator(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "DescribeAccelerator", input, options)
end
@doc """
Describe the attributes of an accelerator.
"""
def describe_accelerator_attributes(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "DescribeAcceleratorAttributes", input, options)
end
@doc """
Describe a custom routing accelerator.
"""
def describe_custom_routing_accelerator(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "DescribeCustomRoutingAccelerator", input, options)
end
@doc """
Describe the attributes of a custom routing accelerator.
"""
def describe_custom_routing_accelerator_attributes(%Client{} = client, input, options \\ []) do
Request.request_post(
client,
metadata(),
"DescribeCustomRoutingAcceleratorAttributes",
input,
options
)
end
@doc """
Describe an endpoint group for a custom routing accelerator.
"""
def describe_custom_routing_endpoint_group(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "DescribeCustomRoutingEndpointGroup", input, options)
end
@doc """
The description of a listener for a custom routing accelerator.
"""
def describe_custom_routing_listener(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "DescribeCustomRoutingListener", input, options)
end
@doc """
Describe an endpoint group.
"""
def describe_endpoint_group(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "DescribeEndpointGroup", input, options)
end
@doc """
Describe a listener.
"""
def describe_listener(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "DescribeListener", input, options)
end
@doc """
List the accelerators for an AWS account.
"""
def list_accelerators(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "ListAccelerators", input, options)
end
@doc """
Lists the IP address ranges that were specified in calls to
[ProvisionByoipCidr](https://docs.aws.amazon.com/global-accelerator/latest/api/ProvisionByoipCidr.html),
including the current state and a history of state changes.
"""
def list_byoip_cidrs(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "ListByoipCidrs", input, options)
end
@doc """
List the custom routing accelerators for an AWS account.
"""
def list_custom_routing_accelerators(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "ListCustomRoutingAccelerators", input, options)
end
@doc """
List the endpoint groups that are associated with a listener for a custom
routing accelerator.
"""
def list_custom_routing_endpoint_groups(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "ListCustomRoutingEndpointGroups", input, options)
end
@doc """
List the listeners for a custom routing accelerator.
"""
def list_custom_routing_listeners(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "ListCustomRoutingListeners", input, options)
end
@doc """
Provides a complete mapping from the public accelerator IP address and port to
destination EC2 instance IP addresses and ports in the virtual public cloud
(VPC) subnet endpoint for a custom routing accelerator.
For each subnet endpoint that you add, Global Accelerator creates a new static
port mapping for the accelerator. The port mappings don't change after Global
Accelerator generates them, so you can retrieve and cache the full mapping on
your servers.
If you remove a subnet from your accelerator, Global Accelerator removes
(reclaims) the port mappings. If you add a subnet to your accelerator, Global
Accelerator creates new port mappings (the existing ones don't change). If you
add or remove EC2 instances in your subnet, the port mappings don't change,
because the mappings are created when you add the subnet to Global Accelerator.
The mappings also include a flag for each destination denoting which destination
IP addresses and ports are allowed or denied traffic.
"""
def list_custom_routing_port_mappings(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "ListCustomRoutingPortMappings", input, options)
end
@doc """
List the port mappings for a specific EC2 instance (destination) in a VPC subnet
endpoint.
The response is the mappings for one destination IP address. This is useful when
your subnet endpoint has mappings that span multiple custom routing accelerators
in your account, or for scenarios where you only want to list the port mappings
for a specific destination instance.
"""
def list_custom_routing_port_mappings_by_destination(%Client{} = client, input, options \\ []) do
Request.request_post(
client,
metadata(),
"ListCustomRoutingPortMappingsByDestination",
input,
options
)
end
@doc """
List the endpoint groups that are associated with a listener.
"""
def list_endpoint_groups(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "ListEndpointGroups", input, options)
end
@doc """
List the listeners for an accelerator.
"""
def list_listeners(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "ListListeners", input, options)
end
@doc """
List all tags for an accelerator.
For more information, see [Tagging in AWS Global Accelerator](https://docs.aws.amazon.com/global-accelerator/latest/dg/tagging-in-global-accelerator.html)
in the *AWS Global Accelerator Developer Guide*.
"""
def list_tags_for_resource(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "ListTagsForResource", input, options)
end
@doc """
Provisions an IP address range to use with your AWS resources through bring your
own IP addresses (BYOIP) and creates a corresponding address pool.
After the address range is provisioned, it is ready to be advertised using [
AdvertiseByoipCidr](https://docs.aws.amazon.com/global-accelerator/latest/api/AdvertiseByoipCidr.html).
For more information, see [Bring Your Own IP Addresses (BYOIP)](https://docs.aws.amazon.com/global-accelerator/latest/dg/using-byoip.html)
in the *AWS Global Accelerator Developer Guide*.
"""
def provision_byoip_cidr(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "ProvisionByoipCidr", input, options)
end
@doc """
Remove endpoints from a custom routing accelerator.
"""
def remove_custom_routing_endpoints(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "RemoveCustomRoutingEndpoints", input, options)
end
@doc """
Add tags to an accelerator resource.
For more information, see [Tagging in AWS Global Accelerator](https://docs.aws.amazon.com/global-accelerator/latest/dg/tagging-in-global-accelerator.html)
in the *AWS Global Accelerator Developer Guide*.
"""
def tag_resource(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "TagResource", input, options)
end
@doc """
Remove tags from a Global Accelerator resource.
When you specify a tag key, the action removes both that key and its associated
value. The operation succeeds even if you attempt to remove tags from an
accelerator that was already removed.
For more information, see [Tagging in AWS Global Accelerator](https://docs.aws.amazon.com/global-accelerator/latest/dg/tagging-in-global-accelerator.html)
in the *AWS Global Accelerator Developer Guide*.
"""
def untag_resource(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "UntagResource", input, options)
end
@doc """
Update an accelerator.
Global Accelerator is a global service that supports endpoints in multiple AWS
Regions but you must specify the US West (Oregon) Region to create or update
accelerators.
"""
def update_accelerator(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "UpdateAccelerator", input, options)
end
@doc """
Update the attributes for an accelerator.
"""
def update_accelerator_attributes(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "UpdateAcceleratorAttributes", input, options)
end
@doc """
Update a custom routing accelerator.
"""
def update_custom_routing_accelerator(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "UpdateCustomRoutingAccelerator", input, options)
end
@doc """
Update the attributes for a custom routing accelerator.
"""
def update_custom_routing_accelerator_attributes(%Client{} = client, input, options \\ []) do
Request.request_post(
client,
metadata(),
"UpdateCustomRoutingAcceleratorAttributes",
input,
options
)
end
@doc """
Update a listener for a custom routing accelerator.
"""
def update_custom_routing_listener(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "UpdateCustomRoutingListener", input, options)
end
@doc """
Update an endpoint group.
A resource must be valid and active when you add it as an endpoint.
"""
def update_endpoint_group(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "UpdateEndpointGroup", input, options)
end
@doc """
Update a listener.
"""
def update_listener(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "UpdateListener", input, options)
end
@doc """
Stops advertising an address range that is provisioned as an address pool.
You can perform this operation at most once every 10 seconds, even if you
specify different address ranges each time.
It can take a few minutes before traffic to the specified addresses stops
routing to AWS because of propagation delays.
For more information, see [Bring Your Own IP Addresses (BYOIP)](https://docs.aws.amazon.com/global-accelerator/latest/dg/using-byoip.html)
in the *AWS Global Accelerator Developer Guide*.
"""
def withdraw_byoip_cidr(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "WithdrawByoipCidr", input, options)
end
end
|
lib/aws/generated/global_accelerator.ex
| 0.882675 | 0.647645 |
global_accelerator.ex
|
starcoder
|
defmodule AWS.FMS do
@moduledoc """
AWS Firewall Manager
This is the *AWS Firewall Manager API Reference*. This guide is for
developers who need detailed information about the AWS Firewall Manager API
actions, data types, and errors. For detailed information about AWS
Firewall Manager features, see the [AWS Firewall Manager Developer
Guide](https://docs.aws.amazon.com/waf/latest/developerguide/fms-chapter.html).
"""
@doc """
Sets the AWS Firewall Manager administrator account. AWS Firewall Manager
must be associated with the master account of your AWS organization or
associated with a member account that has the appropriate permissions. If
the account ID that you submit is not an AWS Organizations master account,
AWS Firewall Manager will set the appropriate permissions for the given
member account.
The account that you associate with AWS Firewall Manager is called the AWS
Firewall Manager administrator account.
"""
def associate_admin_account(client, input, options \\ []) do
request(client, "AssociateAdminAccount", input, options)
end
@doc """
Permanently deletes an AWS Firewall Manager applications list.
"""
def delete_apps_list(client, input, options \\ []) do
request(client, "DeleteAppsList", input, options)
end
@doc """
Deletes an AWS Firewall Manager association with the IAM role and the
Amazon Simple Notification Service (SNS) topic that is used to record AWS
Firewall Manager SNS logs.
"""
def delete_notification_channel(client, input, options \\ []) do
request(client, "DeleteNotificationChannel", input, options)
end
@doc """
Permanently deletes an AWS Firewall Manager policy.
"""
def delete_policy(client, input, options \\ []) do
request(client, "DeletePolicy", input, options)
end
@doc """
Permanently deletes an AWS Firewall Manager protocols list.
"""
def delete_protocols_list(client, input, options \\ []) do
request(client, "DeleteProtocolsList", input, options)
end
@doc """
Disassociates the account that has been set as the AWS Firewall Manager
administrator account. To set a different account as the administrator
account, you must submit an `AssociateAdminAccount` request.
"""
def disassociate_admin_account(client, input, options \\ []) do
request(client, "DisassociateAdminAccount", input, options)
end
@doc """
Returns the AWS Organizations master account that is associated with AWS
Firewall Manager as the AWS Firewall Manager administrator.
"""
def get_admin_account(client, input, options \\ []) do
request(client, "GetAdminAccount", input, options)
end
@doc """
Returns information about the specified AWS Firewall Manager applications
list.
"""
def get_apps_list(client, input, options \\ []) do
request(client, "GetAppsList", input, options)
end
@doc """
Returns detailed compliance information about the specified member account.
Details include resources that are in and out of compliance with the
specified policy. Resources are considered noncompliant for AWS WAF and
Shield Advanced policies if the specified policy has not been applied to
them. Resources are considered noncompliant for security group policies if
they are in scope of the policy, they violate one or more of the policy
rules, and remediation is disabled or not possible.
"""
def get_compliance_detail(client, input, options \\ []) do
request(client, "GetComplianceDetail", input, options)
end
@doc """
Information about the Amazon Simple Notification Service (SNS) topic that
is used to record AWS Firewall Manager SNS logs.
"""
def get_notification_channel(client, input, options \\ []) do
request(client, "GetNotificationChannel", input, options)
end
@doc """
Returns information about the specified AWS Firewall Manager policy.
"""
def get_policy(client, input, options \\ []) do
request(client, "GetPolicy", input, options)
end
@doc """
If you created a Shield Advanced policy, returns policy-level attack
summary information in the event of a potential DDoS attack. Other policy
types are currently unsupported.
"""
def get_protection_status(client, input, options \\ []) do
request(client, "GetProtectionStatus", input, options)
end
@doc """
Returns information about the specified AWS Firewall Manager protocols
list.
"""
def get_protocols_list(client, input, options \\ []) do
request(client, "GetProtocolsList", input, options)
end
@doc """
Retrieves violations for a resource based on the specified AWS Firewall
Manager policy and AWS account.
"""
def get_violation_details(client, input, options \\ []) do
request(client, "GetViolationDetails", input, options)
end
@doc """
Returns an array of `AppsListDataSummary` objects.
"""
def list_apps_lists(client, input, options \\ []) do
request(client, "ListAppsLists", input, options)
end
@doc """
Returns an array of `PolicyComplianceStatus` objects. Use
`PolicyComplianceStatus` to get a summary of which member accounts are
protected by the specified policy.
"""
def list_compliance_status(client, input, options \\ []) do
request(client, "ListComplianceStatus", input, options)
end
@doc """
Returns a `MemberAccounts` object that lists the member accounts in the
administrator's AWS organization.
The `ListMemberAccounts` must be submitted by the account that is set as
the AWS Firewall Manager administrator.
"""
def list_member_accounts(client, input, options \\ []) do
request(client, "ListMemberAccounts", input, options)
end
@doc """
Returns an array of `PolicySummary` objects.
"""
def list_policies(client, input, options \\ []) do
request(client, "ListPolicies", input, options)
end
@doc """
Returns an array of `ProtocolsListDataSummary` objects.
"""
def list_protocols_lists(client, input, options \\ []) do
request(client, "ListProtocolsLists", input, options)
end
@doc """
Retrieves the list of tags for the specified AWS resource.
"""
def list_tags_for_resource(client, input, options \\ []) do
request(client, "ListTagsForResource", input, options)
end
@doc """
Creates an AWS Firewall Manager applications list.
"""
def put_apps_list(client, input, options \\ []) do
request(client, "PutAppsList", input, options)
end
@doc """
Designates the IAM role and Amazon Simple Notification Service (SNS) topic
that AWS Firewall Manager uses to record SNS logs.
"""
def put_notification_channel(client, input, options \\ []) do
request(client, "PutNotificationChannel", input, options)
end
@doc """
Creates an AWS Firewall Manager policy.
Firewall Manager provides the following types of policies:
<ul> <li> A Shield Advanced policy, which applies Shield Advanced
protection to specified accounts and resources
</li> <li> An AWS WAF policy (type WAFV2), which defines rule groups to run
first in the corresponding AWS WAF web ACL and rule groups to run last in
the web ACL.
</li> <li> An AWS WAF Classic policy (type WAF), which defines a rule
group.
</li> <li> A security group policy, which manages VPC security groups
across your AWS organization.
</li> </ul> Each policy is specific to one of the types. If you want to
enforce more than one policy type across accounts, create multiple
policies. You can create multiple policies for each type.
You must be subscribed to Shield Advanced to create a Shield Advanced
policy. For more information about subscribing to Shield Advanced, see
[CreateSubscription](https://docs.aws.amazon.com/waf/latest/DDOSAPIReference/API_CreateSubscription.html).
"""
def put_policy(client, input, options \\ []) do
request(client, "PutPolicy", input, options)
end
@doc """
Creates an AWS Firewall Manager protocols list.
"""
def put_protocols_list(client, input, options \\ []) do
request(client, "PutProtocolsList", input, options)
end
@doc """
Adds one or more tags to an AWS resource.
"""
def tag_resource(client, input, options \\ []) do
request(client, "TagResource", input, options)
end
@doc """
Removes one or more tags from an AWS resource.
"""
def untag_resource(client, input, options \\ []) do
request(client, "UntagResource", input, options)
end
@spec request(AWS.Client.t(), binary(), map(), list()) ::
{:ok, map() | nil, map()}
| {:error, term()}
defp request(client, action, input, options) do
client = %{client | service: "fms"}
host = build_host("fms", client)
url = build_url(host, client)
headers = [
{"Host", host},
{"Content-Type", "application/x-amz-json-1.1"},
{"X-Amz-Target", "AWSFMS_20180101.#{action}"}
]
payload = encode!(client, input)
headers = AWS.Request.sign_v4(client, "POST", url, headers, payload)
post(client, url, payload, headers, options)
end
defp post(client, url, payload, headers, options) do
case AWS.Client.request(client, :post, url, payload, headers, options) do
{:ok, %{status_code: 200, body: body} = response} ->
body = if body != "", do: decode!(client, body)
{:ok, body, response}
{:ok, response} ->
{:error, {:unexpected_response, response}}
error = {:error, _reason} -> error
end
end
defp build_host(_endpoint_prefix, %{region: "local", endpoint: endpoint}) do
endpoint
end
defp build_host(_endpoint_prefix, %{region: "local"}) do
"localhost"
end
defp build_host(endpoint_prefix, %{region: region, endpoint: endpoint}) do
"#{endpoint_prefix}.#{region}.#{endpoint}"
end
defp build_url(host, %{:proto => proto, :port => port}) do
"#{proto}://#{host}:#{port}/"
end
defp encode!(client, payload) do
AWS.Client.encode!(client, payload, :json)
end
defp decode!(client, payload) do
AWS.Client.decode!(client, payload, :json)
end
end
|
lib/aws/generated/fms.ex
| 0.847037 | 0.475057 |
fms.ex
|
starcoder
|
defmodule Tipalti.Invoice do
@moduledoc """
Represents a Tipalti Invoice.
"""
alias Tipalti.CustomField
defmodule Line do
@moduledoc """
Represents a Tipalti Invoice Line.
"""
@type t :: %__MODULE__{
amount: Money.t(),
description: String.t() | nil,
custom_fields: [CustomField.t()],
line_type: String.t() | nil,
quantity: integer() | nil
}
@enforce_keys [:amount]
defstruct [:amount, :description, :custom_fields, :line_type, :quantity]
@doc false
@spec from_map!(map()) :: t()
def from_map!(map) do
struct!(__MODULE__, %{
amount: Money.new!(map[:currency], map[:amount]),
description: map[:description],
custom_fields: CustomField.from_maps!(map[:custom_fields]),
line_type: map[:line_type],
quantity: map[:quantity]
})
end
@doc false
@spec from_maps!([map()]) :: [t()]
def from_maps!(maps), do: Enum.map(maps, &from_map!/1)
end
defmodule Approver do
@moduledoc """
Represents a Tipalti Approver.
"""
@type t :: %__MODULE__{
name: String.t(),
email: String.t(),
order: integer() | nil
}
@enforce_keys [:name, :email]
defstruct [:name, :email, :order]
@doc false
@spec from_map!(map()) :: t()
def from_map!(map), do: struct!(__MODULE__, map)
@doc false
@spec from_maps!([map()]) :: [t()]
def from_maps!(maps), do: Enum.map(maps, &from_map!/1)
end
@type ref_code :: String.t()
@type status ::
:pending_ap_review
| :pending_approval
| :pending_ap_action
| :pending_payment
| :submitted_for_payment
| :paid
| :disputed
| :deleted
| :pending_payee_approval
| :pending_payee_invoice
| :partially_paid
| :scheduled_for_payment
@type t :: %__MODULE__{
idap: Tipalti.idap(),
ref_code: ref_code(),
date: Date.t() | nil,
due_date: Date.t() | nil,
line_items: [Line.t()],
description: String.t() | nil,
can_approve: boolean(),
internal_notes: String.t() | nil,
custom_fields: [CustomField.t()],
is_paid_manually: boolean(),
status: status(),
approvers: [Approver.t()],
number: String.t(),
approval_date: Date.t() | nil,
payer_entity_name: String.t(),
amount_due: Money.t()
}
@enforce_keys [:idap, :ref_code, :can_approve, :is_paid_manually, :status, :number, :payer_entity_name, :amount_due]
defstruct [
:idap,
:ref_code,
:date,
:due_date,
:line_items,
:description,
:can_approve,
:internal_notes,
:custom_fields,
:is_paid_manually,
:status,
:approvers,
:number,
:approval_date,
:payer_entity_name,
:amount_due
]
@doc false
@spec from_map!(map()) :: t()
def from_map!(map) do
struct!(__MODULE__, %{
idap: map[:idap],
ref_code: map[:ref_code],
date: parse_date(map[:date]),
due_date: parse_date(map[:due_date]),
line_items: Line.from_maps!(map[:line_items]),
description: map[:description],
can_approve: map[:can_approve],
internal_notes: map[:internal_notes],
custom_fields: CustomField.from_maps!(map[:custom_fields]),
is_paid_manually: map[:is_paid_manually],
status: parse_status(map[:status]),
approvers: Approver.from_maps!(map[:approvers]),
number: map[:number],
approval_date: parse_date(map[:approval_date]),
payer_entity_name: map[:payer_entity_name],
amount_due: Money.new!(map[:currency], map[:amount_due])
})
end
@doc false
@spec from_maps!([map()]) :: [t()]
def from_maps!(maps), do: Enum.map(maps, &from_map!/1)
@spec parse_date(String.t() | nil) :: Date.t() | nil
defp parse_date(nil), do: nil
defp parse_date("0001-01-01T00:00:00"), do: nil
defp parse_date(date_string), do: date_string |> NaiveDateTime.from_iso8601!() |> NaiveDateTime.to_date()
@spec parse_status(String.t()) :: status()
defp parse_status("PendingApReview"), do: :pending_ap_review
defp parse_status("PendingApproval"), do: :pending_approval
defp parse_status("PendingApAction"), do: :pending_ap_action
defp parse_status("PendingPayment"), do: :pending_payment
defp parse_status("SubmittedForPayment"), do: :submitted_for_payment
defp parse_status("Paid"), do: :paid
defp parse_status("Disputed"), do: :disputed
defp parse_status("Deleted"), do: :deleted
defp parse_status("PendingPayeeApproval"), do: :pending_payee_approval
defp parse_status("PendingPayeeInvoice"), do: :pending_payee_invoice
defp parse_status("PartiallyPaid"), do: :partially_paid
defp parse_status("ScheduledForPayment"), do: :scheduled_for_payment
end
|
lib/tipalti/invoice.ex
| 0.775477 | 0.575827 |
invoice.ex
|
starcoder
|
defmodule Kashup.Element do
@moduledoc """
GenServer callback that is responsible for managing the state of a key's value.
Afforded by the use of one GenServer process per key, Kashup is capable of storing very large
values of arbitrary type. Additionally, an expiration can be assigned to `Kashup.Element`'s by
providing an integer representing the number of seconds a key/value pair should be valid for to
the Application's configuration.
## Example Configuration
Valid for one day:
```
config :kashup,
expiration: 60 * 60 * 24
```
To set a key/value pair to never expire, either omit the `expiration` field from the config
block, or set the field to `:infinity`:
```
config :kashup,
expiration: :infinity
```
"""
use GenServer
alias Kashup.Element.State
defmodule State do
@moduledoc """
Container for attributes describing the state of the `Kashup.Element`
"""
defstruct [:value, :expiration]
end
def start_link(value, expiration) do
GenServer.start_link(__MODULE__, [value, expiration], [])
end
def create(value, :infinity) do
Kashup.Element.Supervisor.start_child(value, :infinity)
end
def create(value, expiration) do
Kashup.Element.Supervisor.start_child(value, expiration)
end
def fetch(pid) do
GenServer.call(pid, :fetch)
end
def replace(pid, value) do
GenServer.cast(pid, {:replace, value})
end
def delete(pid) do
GenServer.cast(pid, :delete)
end
def time_left(:infinity), do: :infinity
@doc """
Calculate the amount of time remaining before an element expires.
"""
def time_left(expiration) do
case DateTime.diff(expiration, DateTime.utc_now) do
diff when diff <= 0 -> 0
diff -> diff * 1000
end
end
@impl true
def init([value, expiration]) do
expiration = case expiration do
:infinity -> :infinity
provided -> DateTime.utc_now() |> DateTime.add(provided)
end
{
:ok,
%State{value: value, expiration: expiration},
time_left(expiration)
}
end
@impl true
def handle_call(:fetch, _from, %State{} = state) do
time_left = time_left(state.expiration)
{:reply, {:ok, state.value}, state, time_left}
end
@impl true
def handle_cast({:replace, value}, %State{} = state) do
time_left = time_left(state.expiration)
state = Map.put(state, :value, value)
{:noreply, state, time_left}
end
@impl true
def handle_cast(:delete, %State{} = state) do
{:stop, :normal, state}
end
@impl true
def handle_info(:timeout, %State{} = state) do
Kashup.Event.expired(self(), state.value)
{:stop, :normal, state}
end
@impl true
def terminate(_reason, _state) do
Kashup.Store.delete(self())
:ok
end
end
|
lib/kashup/element/element.ex
| 0.923721 | 0.880181 |
element.ex
|
starcoder
|
defmodule SpadesGame.GameScoreRoundTeam do
@moduledoc """
Represents one round of scoring for one team.
"""
alias SpadesGame.{GamePlayer, GameScoreRoundTeam}
@derive Jason.Encoder
defstruct [
# Score of the team going into the round
:before_score,
# # of bags the team had going into the round
:before_bags,
# What the team bid.
:bid,
# How many tricks they won.
:won,
# How many points gained from successful nil(s), or 0
:adj_successful_nil,
# How many points lost from failed nil(s), or 0
:adj_failed_nil,
# How many points they gained from a successful bid, or nil if N/A.
:adj_successful_bid,
# How many points they lost from a missed bid, or nil if N/A.
:adj_failed_bid,
# How many bags they gained this round, or nil if not computed yet.
:adj_bags,
# Penalty if bags went over 10, either -100 or 0
:bag_penalty,
# Score of the team after the round, or nil if not computed.
:after_score,
# Bags of the team after the round, or nil if not computed.
:after_bags
]
use Accessible
@type t :: %GameScoreRoundTeam{
before_score: integer(),
before_bags: integer(),
bid: non_neg_integer(),
won: non_neg_integer(),
adj_successful_nil: integer(),
adj_failed_nil: integer(),
adj_successful_bid: integer(),
adj_failed_bid: integer(),
adj_bags: integer(),
bag_penalty: integer(),
after_score: integer(),
after_bags: integer()
}
@spec new(integer, integer, GamePlayer.t(), GamePlayer.t()) :: GameScoreRoundTeam.t()
def new(before_score, before_bags, player1, player2) do
## If player1 or player2 hasn't bid yet (=nil), error
adj_successful_bid = successful_bid(player1, player2)
adj_failed_bid = failed_bid(player1, player2)
adj_bags = adj_bags(player1, player2)
adj_successful_nil = successful_nil(player1, player2)
adj_failed_nil = failed_nil(player1, player2)
{after_bags, bag_penalty} = increment_bags(before_bags, adj_bags)
%GameScoreRoundTeam{
before_score: before_score,
before_bags: before_bags,
bid: player1.bid + player2.bid,
won: player1.tricks_won + player2.tricks_won,
adj_successful_nil: adj_successful_nil,
adj_failed_nil: adj_failed_nil,
adj_successful_bid: adj_successful_bid,
adj_failed_bid: adj_failed_bid,
adj_bags: adj_bags,
bag_penalty: bag_penalty,
after_score:
before_score + adj_successful_nil + adj_failed_nil + adj_successful_bid +
adj_failed_bid + adj_bags + bag_penalty,
after_bags: after_bags
}
end
@doc """
successful_bid/2:
"""
@spec successful_bid(GamePlayer.t(), GamePlayer.t()) :: integer()
def successful_bid(player1, player2) do
won = player1.tricks_won + player2.tricks_won
bid = player1.bid + player2.bid
if won >= bid do
10 * bid
else
0
end
end
@spec successful_nil(GamePlayer.t(), GamePlayer.t()) :: integer()
def successful_nil(player1, player2) do
successful_nil(player1) + successful_nil(player2)
end
@spec successful_nil(GamePlayer.t()) :: integer()
defp successful_nil(player) do
if player.bid == 0 and player.tricks_won == 0 do
100
else
0
end
end
@spec failed_nil(GamePlayer.t(), GamePlayer.t()) :: nil | integer()
def failed_nil(player1, player2) do
failed_nil(player1) + failed_nil(player2)
end
@spec failed_nil(GamePlayer.t()) :: integer()
defp failed_nil(player) do
if player.bid == 0 and player.tricks_won > 0 do
-100
else
0
end
end
@doc """
failed_bid/2:
"""
@spec failed_bid(GamePlayer.t(), GamePlayer.t()) :: integer()
def failed_bid(player1, player2) do
won = player1.tricks_won + player2.tricks_won
bid = player1.bid + player2.bid
if won < bid do
-10 * bid
else
0
end
end
@doc """
adj_bags/2
"""
@spec adj_bags(GamePlayer.t(), GamePlayer.t()) :: nil | integer()
def adj_bags(player1, player2) do
won = player1.tricks_won + player2.tricks_won
bid = player1.bid + player2.bid
if won > bid do
won - bid
else
0
end
end
def increment_bags(before_bags, adj_bags) do
after_bags = before_bags + adj_bags
do_increment_bags(after_bags, 0)
end
def do_increment_bags(after_bags, bag_penalty) when after_bags < 10 do
{after_bags, bag_penalty}
end
def do_increment_bags(after_bags, bag_penalty) when after_bags >= 10 do
do_increment_bags(after_bags - 10, bag_penalty - 100)
end
end
|
backend/lib/spades_game/game_score_round_team.ex
| 0.7237 | 0.464598 |
game_score_round_team.ex
|
starcoder
|
defmodule Tesla.Middleware.Tapper do
@behaviour Tesla.Middleware
@moduledoc """
Enables distributed request tracing using Tapper
See https://github.com/Financial-Times/tapper how to set up Tapper.
### Example usage
```
defmodule MyClient do
use Tesla
plug Tesla.Middleware.Tapper
end
```
"""
def call(env, next, nil), do: call(env, next, [])
def call(env, next, opts) do
http_method = normalize_method(env)
%{is_root: is_root, trace: trace} =
start_span(
Keyword.merge(
opts,
name: http_method,
sample: true,
annotations: [
Tapper.http_method(http_method),
Tapper.http_url(env.url)
]
)
)
headers = Tapper.Plug.HeaderPropagation.encode(trace)
env = Tesla.put_headers(env, headers)
with {:ok, env} <- Tesla.run(env, next) do
if env.status >= 500 do
update_span(Tapper.error())
end
finish_span(
%{is_root: is_root},
annotations: [
Tapper.client_receive(),
Tapper.http_status_code(env.status)
]
)
{:ok, env}
else
{:error, %Tesla.Error{} = ex} ->
stacktrace = System.stacktrace()
finish_span(
%{is_root: is_root},
annotations: [
Tapper.http_status_code(env.status || 0),
Tapper.error(),
Tapper.error_message(ex),
Tapper.client_receive()
]
)
{:error, ex}
{:error, error} ->
{:error, error}
end
end
# Starts a new trace when there is no ongoing trace, otherwise creates a
# child span.
defp start_span(opts) do
if Tapper.Ctx.context?() do
trace = Tapper.Ctx.start_span(opts)
%{is_root: false, trace: trace}
else
trace = Tapper.Ctx.start(opts)
%{is_root: true, trace: trace}
end
end
defp update_span(opts) do
Tapper.Ctx.update_span(opts)
end
defp finish_span(%{is_root: false}, opts) do
Tapper.Ctx.finish_span(opts)
end
defp finish_span(%{is_root: true}, opts) do
Tapper.Ctx.finish(opts)
end
defp normalize_method(env) do
env.method |> to_string() |> String.upcase()
end
end
|
lib/tesla/middleware/tapper.ex
| 0.831485 | 0.704897 |
tapper.ex
|
starcoder
|
defmodule BitcoinAddress.Secp256k1 do
@moduledoc """
Utility module to deal with functionality around secp265k1
Elliptic Point Cryptography. Specifically,
- Generating a secp256k1 public key from a private key
- Extracting an Elliptic Curve point (EC Point) with coordinates {x, y}
from a secp256k1 public key
- Generating a Bitcoin public key from an EC Point
"""
use Bitwise
# 256 bits
@num_secret_bytes 32
@hex 16
@greater_than_curve_midpoint_prefix 0x03
@less_than_curve_midpoint_prefix 0x02
# Elliptic curve parameter (secp256k1) to determine max private key value
@n """
115792089237316195423570985008687907852\
837564279074904382605163141518161494337\
"""
|> String.to_integer()
# Private secret key string as base16
@example_private_key """
038109007313a5807b2eccc082c8c3fbb988a973cacf1a7df9ce725c31b14776\
"""
# Guard to determine whether a generated key is within the Elliptic curve.
defguardp valid_key?(key) when key in 0..@n
# Guard to determine whether a given integer is even or not.
defguardp is_even?(int) when (int &&& 1) == 1
@doc """
Function wrapper around the module attribute for an example private key.
## Example:
iex> BitcoinAddress.Secp256k1.example_private_key
"038109007313a5807b2eccc082c8c3fbb988a973cacf1a7df9ce725c31b14776"
"""
def example_private_key do
@example_private_key
end
@doc """
Generates a random private key that has a decimal value within the confines
of the Secp256k1 Elliptic curve.
## Example:
iex> private_key = BitcoinAddress.Secp256k1.generate_private_key
iex> private_key_pattern = ~r/\\A[0-9a-f]{64}\\z/
iex> private_key =~ private_key_pattern
true
"""
def generate_private_key do
with hex_secret <- random_secret(),
dec_secret <- String.to_integer(hex_secret, @hex) do
case dec_secret do
n when valid_key?(n) ->
hex_secret
_out_of_range ->
generate_private_key()
end
end
end
@doc """
Function that returns a Bitcoin public key, generated by the secp256k1
algorithm, from a given private key.
## Parameters
- `private_key`: A string of characters.
## Example:
iex> private_key = BitcoinAddress.Secp256k1.generate_private_key
iex> public_key = BitcoinAddress.Secp256k1.bitcoin_public_key(private_key)
iex> public_key_pattern = ~r/\\A[0-9a-f]{66}\\z/
iex> public_key =~ public_key_pattern
true
"""
def bitcoin_public_key(private_key) do
with {public_key, _private_key} <- public_key_from_private_key(private_key),
ec_point <- ec_point_from_public_key(public_key),
bitcoin_public_key <- bitcoin_public_key_from_ec_point(ec_point) do
bitcoin_public_key
end
end
# Generate a new private key by collecting 256 bits of random data from
# the OS's cryptographically secure random generator
defp random_secret do
@num_secret_bytes
|> :crypto.strong_rand_bytes()
|> Base.encode16(case: :lower)
end
defp public_key_from_private_key(private_key) do
private_key
|> String.to_integer(@hex)
|> (fn int -> :crypto.generate_key(:ecdh, :secp256k1, int) end).()
end
# Elliptic Curve point
defp ec_point_from_public_key(public_key) do
<<_prefix::size(8), x::size(256), y::size(256)>> = public_key
{x, y}
end
defp bitcoin_public_key_from_ec_point({x, y}) do
<<public_key_prefix(y)::size(8), x::size(256)>>
|> Base.encode16(case: :lower)
end
defp public_key_prefix(y) when is_even?(y) do
@greater_than_curve_midpoint_prefix
end
defp public_key_prefix(_y) do
@less_than_curve_midpoint_prefix
end
end
|
lib/bitcoin_address/secp256k1.ex
| 0.895721 | 0.419262 |
secp256k1.ex
|
starcoder
|
defmodule Pumba.UserAgents do
@moduledoc """
Worker retrieves user agents for different browsers
and provides random access to user agen strings.
Maintained state looks like
```ex
%{
client: Pumba.Client.DefaultClient,
browsers: %{},
names: []
}
```
Where `browsers` is a map with key as browser
and the list user agents as value, `names` is
a list of loaded browsers using which we later
can randomly pick browser and return a random user agent.
`browsers` contains `%Pumba.Result{}` record which keeps
total count of user agents and indexed map with user
agent strings for fast lookups.
It uses `Pumba.Client.DefaultClient` which loads
user agent strings from http://www.useragentstring.com.
You can also specify you custom client via configuration
```ex
config :pumba,
client: MyAwesomeClient
```
Also you can override client using `Pumba.UserAgents.set_client(MyAwesomeClient)`
and clients should implement `Pumba.Client` behaviour.
To load user agents list for a given browser you need to call `Pumba.UserAgents.load/1`.
"""
use GenServer, restart: :transient
alias Pumba.Result
alias Pumba.Client.DefaultClient
@mod __MODULE__
# Client
def start_link(_) do
GenServer.start_link(
@mod,
%{
client: Application.get_env(:pumba, :client, DefaultClient),
browsers: %{},
names: []
},
name: @mod
)
end
@doc """
Return current state.
"""
@spec all :: map()
def all, do: GenServer.call(@mod, :all)
@doc """
Returns random user agent string.
"""
@spec random :: term()
def random, do: GenServer.call(@mod, :random)
@doc """
Replace client to retrieve user agents
"""
@spec set_client(module()) :: :ok
def set_client(client) do
GenServer.cast(@mod, {:set_client, client})
end
@doc """
Asynchronously load user agents for given browser.
"""
@spec load(String.t()) :: :ok
def load(browser) do
GenServer.cast(@mod, {:load, browser})
end
@doc """
Check if user agents for a given browser were loaded
"""
@spec ready?(String.t()) :: boolean()
def ready?(browser) do
GenServer.call(@mod, {:ready, browser})
end
@doc """
Get list of user agents for browser
"""
@spec get(String.t()) :: [String.t()]
def get(browser) do
GenServer.call(@mod, {:get, browser})
end
# Server
@impl true
def init(state) do
{:ok, state}
end
@impl true
def handle_cast({:set_client, client}, state) do
{:noreply, %{state | client: client}}
end
@doc false
@impl true
def handle_cast({:load, name}, %{client: client, browsers: browsers, names: names} = state) do
case client.load(name) do
{:ok, user_agents} ->
loaded =
names
|> MapSet.new()
|> MapSet.put(name)
{
:noreply,
%{
state
| names: loaded |> MapSet.to_list(),
browsers:
Map.put(browsers, name, %Result{
count: length(user_agents),
user_agents: process_result(user_agents)
})
}
}
{:error, err} ->
{
:noreply,
%{
state
| browsers:
Map.put(browsers, name, %Result{
error: err,
count: 0,
user_agents: %{}
})
}
}
end
end
@doc false
@impl true
def handle_call(:all, _from, state) do
{:reply, state, state}
end
@doc false
@impl true
def handle_call(:random, _from, %{names: names, browsers: browsers} = state) do
ts = DateTime.now!("Etc/UTC") |> DateTime.to_unix()
:rand.seed(:exsss, {ts + 1, ts + 2, ts + 3})
case browsers |> Map.get(names |> Enum.random()) do
nil ->
{:reply, nil, state}
result ->
n = Enum.random(0..(result.count - 1))
{:reply, result.user_agents[n], state}
end
end
@doc false
@impl true
def handle_call({:ready, browser}, _from, %{browsers: browsers} = state) do
{
:reply,
Map.has_key?(browsers, browser) && is_nil(browsers[browser].error),
state
}
end
@doc false
@impl true
def handle_call({:get, browser}, _from, %{browsers: browsers} = state) do
case Map.get(browsers, browser) do
nil ->
{:reply, [], state}
result ->
{
:reply,
result.user_agents |> Map.values(),
state
}
end
end
defp process_result(user_agents) do
user_agents
|> Enum.with_index()
|> Enum.map(fn {k, v} -> {v, k} end)
|> Enum.into(%{})
end
end
|
lib/pumba/user_agents.ex
| 0.852905 | 0.708931 |
user_agents.ex
|
starcoder
|
defmodule Militerm.ECS.Ability do
@doc false
defmacro __using__(_opts) do
quote do
import Militerm.ECS.Ability
end
end
@doc """
Defines an event handler for the given `event`.
Takes the following args:
- as: the role
- for: the entity playing the role
- args: the map of slots from the command parse
"""
defmacro defevent(event, opts), do: Militerm.ECS.Ability.define_event_handler(event, opts)
defmacro defevent(event, foo, bar, opts) do
Militerm.ECS.Ability.define_event_handler(event, [{:as, foo} | opts] ++ bar)
end
defmacro defevent(event, foo, bar) do
Militerm.ECS.Ability.define_event_handler(event, foo ++ bar)
end
@doc """
Defines an ability handler for the given `ability` and `role`.
Takes the following args:
- as: the role for the ability
- for: the entity with the ability
"""
defmacro defability(ability, opts),
do: Militerm.ECS.Ability.define_ability_responder(ability, opts)
defmacro defability(ability, foo, bar, opts) do
Militerm.ECS.Ability.define_ability_responder(ability, [{:as, foo} | opts] ++ bar)
end
defmacro defability(ability, foo, bar) do
Militerm.ECS.Ability.define_ability_responder(ability, foo ++ bar)
end
@doc false
def define_ability_responder(ability, opts) do
body = Keyword.fetch!(opts, :do)
role = Keyword.fetch!(opts, :as)
case Keyword.get(opts, :for) do
nil ->
quote do
def handle_ability(_, unquote(ability), unquote(role)), do: unquote(body)
end
entity_id ->
quote do
def handle_ability(unquote(entity_id), unquote(ability), unquote(role)),
do: unquote(body)
end
end
end
@doc false
def define_event_handler(event, opts) do
body = Keyword.fetch!(opts, :do)
role = Keyword.fetch!(opts, :as)
case {Keyword.get(opts, :for), Keyword.get(opts, :args)} do
{nil, nil} ->
quote do
def handle_event(_, unquote(event), unquote(role), _), do: unquote(body)
end
{entity_id, nil} ->
quote do
def handle_event(unquote(entity_id), unquote(event), unquote(role), _),
do: unquote(body)
end
{nil, args} ->
quote do
def handle_event(_, unquote(event), unquote(role), unquote(args)), do: unquote(body)
end
{entity_id, args} ->
quote do
def handle_event(unquote(entity_id), unquote(event), unquote(role), unquote(args)),
do: unquote(body)
end
end
end
@doc false
defmacro __before_compile__(_env) do
quote do
def handle_event(_, _, _, _), do: nil
def handle_ability(_, _, _), do: nil
end
end
end
|
lib/militerm/ecs/ability.ex
| 0.681621 | 0.437944 |
ability.ex
|
starcoder
|
defmodule Geometry.MultiPolygon do
@moduledoc """
A set of polygons from type `Geometry.Polygon`
`MultiPoint` implements the protocols `Enumerable` and `Collectable`.
## Examples
iex> Enum.map(
...> MultiPolygon.new([
...> Polygon.new([
...> LineString.new([
...> Point.new(11, 12),
...> Point.new(11, 22),
...> Point.new(31, 22),
...> Point.new(11, 12)
...> ]),
...> ]),
...> Polygon.new([
...> LineString.new([
...> Point.new(35, 10),
...> Point.new(45, 45),
...> Point.new(10, 20),
...> Point.new(35, 10)
...> ]),
...> LineString.new([
...> Point.new(20, 30),
...> Point.new(35, 35),
...> Point.new(30, 20),
...> Point.new(20, 30)
...> ])
...> ])
...> ]),
...> fn polygon -> length(polygon) == 1 end
...> )
[true, false]
iex> Enum.into(
...> [
...> Polygon.new([
...> LineString.new([
...> Point.new(11, 12),
...> Point.new(11, 22),
...> Point.new(31, 22),
...> Point.new(11, 12)
...> ])
...> ])
...> ],
...> MultiPolygon.new())
%MultiPolygon{
polygons:
MapSet.new([
[
[
[11, 12],
[11, 22],
[31, 22],
[11, 12]
]
]
])
}
"""
alias Geometry.{GeoJson, MultiPolygon, Polygon, WKB, WKT}
defstruct polygons: MapSet.new()
@type t :: %MultiPolygon{polygons: MapSet.t([Geometry.coordinates()])}
@doc """
Creates an empty `MultiPolygon`.
## Examples
iex> MultiPolygon.new()
%MultiPolygon{polygons: MapSet.new()}
"""
@spec new :: t()
def new, do: %MultiPolygon{}
@doc """
Creates a `MultiPolygon` from the given `Geometry.MultiPolygon`s.
## Examples
iex> MultiPolygon.new([
...> Polygon.new([
...> LineString.new([
...> Point.new(6, 2),
...> Point.new(8, 2),
...> Point.new(8, 4),
...> Point.new(6, 2)
...> ]),
...> ]),
...> Polygon.new([
...> LineString.new([
...> Point.new(1, 1),
...> Point.new(9, 1),
...> Point.new(9, 8),
...> Point.new(1, 1)
...> ]),
...> LineString.new([
...> Point.new(6, 2),
...> Point.new(7, 2),
...> Point.new(7, 3),
...> Point.new(6, 2)
...> ])
...> ])
...> ])
%MultiPolygon{
polygons:
MapSet.new([
[
[[1, 1], [9, 1], [9, 8], [1, 1]],
[[6, 2], [7, 2], [7, 3], [6, 2]]
],
[[[6, 2], [8, 2], [8, 4], [6, 2]]]
])
}
iex> MultiPolygon.new([])
%MultiPolygon{}
"""
@spec new([Polygon.t()]) :: t()
def new([]), do: %MultiPolygon{}
def new(polygons),
do: %MultiPolygon{
polygons: Enum.into(polygons, MapSet.new(), fn polygon -> polygon.rings end)
}
@doc """
Returns `true` if the given `MultiPolygon` is empty.
## Examples
iex> MultiPolygon.empty?(MultiPolygon.new())
true
iex> MultiPolygon.empty?(
...> MultiPolygon.new([
...> Polygon.new([
...> LineString.new([
...> Point.new(1, 1),
...> Point.new(1, 5),
...> Point.new(5, 4),
...> Point.new(1, 1)
...> ])
...> ])
...> ])
...> )
false
"""
@spec empty?(t()) :: boolean
def empty?(%MultiPolygon{} = multi_polygon),
do: Enum.empty?(multi_polygon.polygons)
@doc """
Creates a `MultiPolygon` from the given coordinates.
## Examples
iex> MultiPolygon.from_coordinates([
...> [
...> [[6, 2], [8, 2], [8, 4], [6, 2]]
...> ], [
...> [[1, 1], [9, 1], [9, 8], [1, 1]],
...> [[6, 2], [7, 2], [7, 3], [6, 2]]
...> ]
...> ])
%MultiPolygon{
polygons:
MapSet.new([
[
[[6, 2], [8, 2], [8, 4], [6, 2]],
], [
[[1, 1], [9, 1], [9, 8], [1, 1]],
[[6, 2], [7, 2], [7, 3], [6, 2]]
]
])
}
"""
@spec from_coordinates([[Geometry.coordinates()]]) :: t()
def from_coordinates(coordinates) do
%MultiPolygon{
polygons: MapSet.new(coordinates)
}
end
@doc """
Returns an `:ok` tuple with the `MultiPolygon` from the given GeoJSON
term. Otherwise returns an `:error` tuple.
## Examples
iex> ~s(
...> {
...> "type": "MultiPolygon",
...> "coordinates": [
...> [
...> [[6, 2], [8, 2], [8, 4], [6, 2]]
...> ], [
...> [[1, 1], [9, 1], [9, 8], [1, 1]],
...> [[6, 2], [7, 2], [7, 3], [6, 2]]
...> ]
...> ]
...> }
...> )
...> |> Jason.decode!()
...> |> MultiPolygon.from_geo_json()
{:ok,
%MultiPolygon{
polygons:
MapSet.new([
[
[[1, 1], [9, 1], [9, 8], [1, 1]],
[[6, 2], [7, 2], [7, 3], [6, 2]]
], [
[[6, 2], [8, 2], [8, 4], [6, 2]]
]
])
}}
"""
@spec from_geo_json(Geometry.geo_json_term()) :: {:ok, t()} | Geometry.geo_json_error()
def from_geo_json(json), do: GeoJson.to_multi_polygon(json, MultiPolygon)
@doc """
The same as `from_geo_json/1`, but raises a `Geometry.Error` exception if it fails.
"""
@spec from_geo_json!(Geometry.geo_json_term()) :: t()
def from_geo_json!(json) do
case GeoJson.to_multi_polygon(json, MultiPolygon) do
{:ok, geometry} -> geometry
error -> raise Geometry.Error, error
end
end
@doc """
Returns the GeoJSON term of a `MultiPolygon`.
There are no guarantees about the order of polygons in the returned
`coordinates`.
## Examples
```elixir
MultiPolygon.to_list(
MultiPolygon.new([
Polygon.new([
LineString.new([
Point.new(111, 112),
Point.new(111, 122),
Point.new(131, 122),
Point.new(111, 112)
])
]),
Polygon.new([
LineString.new([
Point.new(211, 212),
Point.new(211, 222),
Point.new(231, 222),
Point.new(211, 212)
])
])
])
)
# =>
# %{
# "type" => "MultiPolygon",
# "coordinates" => [
# [
# [
# [11, 12],
# [11, 22],
# [31, 22],
# [11, 12]
# ]
# ], [
# [
# [21, 22],
# [21, 22],
# [21, 22],
# [21, 22]
# ]
# ]
# ]
# }
```
"""
@spec to_geo_json(t()) :: Geometry.geo_json_term()
def to_geo_json(%MultiPolygon{polygons: polygons}) do
%{
"type" => "MultiPolygon",
"coordinates" => MapSet.to_list(polygons)
}
end
@doc """
Returns an `:ok` tuple with the `MultiPolygon` from the given WKT string.
Otherwise returns an `:error` tuple.
If the geometry contains a SRID the id is added to the tuple.
## Examples
iex> MultiPolygon.from_wkt("
...> SRID=1234;MULTIPOLYGON (
...> (
...> (40 40, 20 45, 45 30, 40 40)
...> ), (
...> (20 35, 10 30, 10 10, 30 5, 45 20, 20 35),
...> (30 20, 20 15, 20 25, 30 20)
...> )
...> )
...> ")
{:ok, {
%MultiPolygon{
polygons:
MapSet.new([
[
[
[20, 35],
[10, 30],
[10, 10],
[30, 5],
[45, 20],
[20, 35]
],
[
[30, 20],
[20, 15],
[20, 25],
[30, 20]
]
],
[
[
[40, 40],
[20, 45],
[45, 30],
[40, 40]
]
]
])
},
1234
}}
iex> MultiPolygon.from_wkt("MultiPolygon EMPTY")
{:ok, %MultiPolygon{}}
"""
@spec from_wkt(Geometry.wkt()) ::
{:ok, t() | {t(), Geometry.srid()}} | Geometry.wkt_error()
def from_wkt(wkt), do: WKT.to_geometry(wkt, MultiPolygon)
@doc """
The same as `from_wkt/1`, but raises a `Geometry.Error` exception if it fails.
"""
@spec from_wkt!(Geometry.wkt()) :: t() | {t(), Geometry.srid()}
def from_wkt!(wkt) do
case WKT.to_geometry(wkt, MultiPolygon) do
{:ok, geometry} -> geometry
error -> raise Geometry.Error, error
end
end
@doc """
Returns the WKT representation for a `MultiPolygon`. With option `:srid` an
EWKT representation with the SRID is returned.
There are no guarantees about the order of polygons in the returned
WKT-string.
## Examples
```elixir
MultiPolygon.to_wkt(
MultiPolygon.new([
Polygon.new([
LineStrinZM.new([
Point.new(20, 35),
Point.new(10, 30),
Point.new(10, 10),
Point.new(30, 5),
Point.new(45, 20),
Point.new(20, 35)
]),
LineString.new([
Point.new(30, 20),
Point.new(20, 15),
Point.new(20, 25),
Point.new(30, 20)
])
]),
Polygon.new([
LineString.new([
Point.new(40, 40),
Point.new(20, 45),
Point.new(45, 30),
Point.new(40, 40)
])
])
])
)
# Returns a string without any \\n or extra spaces (formatted just for readability):
# SRID=478;MultiPolygon (
# (
# (20 35, 10 30, 10 10, 30 5, 45 20, 20 35),
# (30 20, 20 15, 20 25, 30 20)
# ), (
# (40 40, 20 45, 45 30, 40 40)
# )
# )
"""
@spec to_wkt(t(), opts) :: Geometry.wkt()
when opts: [srid: Geometry.srid()]
def to_wkt(%MultiPolygon{polygons: polygons}, opts \\ []) do
WKT.to_ewkt(
<<
"MultiPolygon ",
polygons |> MapSet.to_list() |> to_wkt_polygons()::binary()
>>,
opts
)
end
@doc """
Returns the WKB representation for a `MultiPolygon`.
With option `:srid` an EWKB representation with the SRID is returned.
The option `endian` indicates whether `:xdr` big endian or `:ndr` little
endian is returned. The default is `:xdr`.
The `:mode` determines whether a hex-string or binary is returned. The default
is `:binary`.
An example of a simpler geometry can be found in the description for the
`Geometry.Point.to_wkb/1` function.
"""
@spec to_wkb(t(), opts) :: Geometry.wkb()
when opts: [endian: Geometry.endian(), srid: Geometry.srid(), mode: Geometry.mode()]
def to_wkb(%MultiPolygon{} = multi_polygon, opts \\ []) do
endian = Keyword.get(opts, :endian, Geometry.default_endian())
mode = Keyword.get(opts, :mode, Geometry.default_mode())
srid = Keyword.get(opts, :srid)
to_wkb(multi_polygon, srid, endian, mode)
end
@doc """
Returns an `:ok` tuple with the `MultiPolygon` from the given WKB string. Otherwise
returns an `:error` tuple.
If the geometry contains a SRID the id is added to the tuple.
An example of a simpler geometry can be found in the description for the
`Geometry.Point.from_wkb/2` function.
"""
@spec from_wkb(Geometry.wkb(), Geometry.mode()) ::
{:ok, t() | {t(), Geometry.srid()}} | Geometry.wkb_error()
def from_wkb(wkb, mode \\ :binary), do: WKB.to_geometry(wkb, mode, MultiPolygon)
@doc """
The same as `from_wkb/2`, but raises a `Geometry.Error` exception if it fails.
"""
@spec from_wkb!(Geometry.wkb(), Geometry.mode()) :: t() | {t(), Geometry.srid()}
def from_wkb!(wkb, mode \\ :binary) do
case WKB.to_geometry(wkb, mode, MultiPolygon) do
{:ok, geometry} -> geometry
error -> raise Geometry.Error, error
end
end
@doc """
Returns the number of elements in `MultiPolygon`.
## Examples
iex> MultiPolygon.size(
...> MultiPolygon.new([
...> Polygon.new([
...> LineString.new([
...> Point.new(11, 12),
...> Point.new(11, 22),
...> Point.new(31, 22),
...> Point.new(11, 12)
...> ])
...> ])
...> ])
...> )
1
"""
@spec size(t()) :: non_neg_integer()
def size(%MultiPolygon{polygons: polygons}), do: MapSet.size(polygons)
@doc """
Checks if `MultiPolygon` contains `point`.
## Examples
iex> MultiPolygon.member?(
...> MultiPolygon.new([
...> Polygon.new([
...> LineString.new([
...> Point.new(11, 12),
...> Point.new(11, 22),
...> Point.new(31, 22),
...> Point.new(11, 12)
...> ])
...> ])
...> ]),
...> Polygon.new([
...> LineString.new([
...> Point.new(11, 12),
...> Point.new(11, 22),
...> Point.new(31, 22),
...> Point.new(11, 12)
...> ])
...> ])
...> )
true
iex> MultiPolygon.member?(
...> MultiPolygon.new([
...> Polygon.new([
...> LineString.new([
...> Point.new(11, 12),
...> Point.new(11, 22),
...> Point.new(31, 22),
...> Point.new(11, 12)
...> ])
...> ])
...> ]),
...> Polygon.new([
...> LineString.new([
...> Point.new(11, 12),
...> Point.new(11, 22),
...> Point.new(33, 22),
...> Point.new(11, 12)
...> ])
...> ])
...> )
false
"""
@spec member?(t(), Polygon.t()) :: boolean()
def member?(%MultiPolygon{polygons: polygons}, %Polygon{rings: rings}),
do: MapSet.member?(polygons, rings)
@doc """
Converts `MultiPolygon` to a list.
## Examples
iex> MultiPolygon.to_list(
...> MultiPolygon.new([
...> Polygon.new([
...> LineString.new([
...> Point.new(11, 12),
...> Point.new(11, 22),
...> Point.new(31, 22),
...> Point.new(11, 12)
...> ])
...> ])
...> ])
...> )
[
[
[
[11, 12],
[11, 22],
[31, 22],
[11, 12]
]
]
]
"""
@spec to_list(t()) :: [Polygon.t()]
def to_list(%MultiPolygon{polygons: polygons}), do: MapSet.to_list(polygons)
@compile {:inline, to_wkt_polygons: 1}
defp to_wkt_polygons([]), do: "EMPTY"
defp to_wkt_polygons([polygon | polygons]) do
<<"(",
Enum.reduce(polygons, Polygon.to_wkt_rings(polygon), fn polygon, acc ->
<<acc::binary(), ", ", Polygon.to_wkt_rings(polygon)::binary()>>
end)::binary(), ")">>
end
@doc false
@compile {:inline, to_wkb: 4}
@spec to_wkb(t(), srid, endian, mode) :: wkb
when srid: Geometry.srid() | nil,
endian: Geometry.endian(),
mode: Geometry.mode(),
wkb: Geometry.wkb()
def to_wkb(%MultiPolygon{polygons: polygons}, srid, endian, mode) do
<<
WKB.byte_order(endian, mode)::binary(),
wkb_code(endian, not is_nil(srid), mode)::binary(),
WKB.srid(srid, endian, mode)::binary(),
to_wkb_polygons(MapSet.to_list(polygons), endian, mode)::binary()
>>
end
@compile {:inline, to_wkb_polygons: 3}
defp to_wkb_polygons(polygons, endian, mode) do
Enum.reduce(polygons, WKB.length(polygons, endian, mode), fn polygon, acc ->
<<acc::binary(), Polygon.to_wkb(polygon, nil, endian, mode)::binary()>>
end)
end
@compile {:inline, wkb_code: 3}
defp wkb_code(endian, srid?, :hex) do
case {endian, srid?} do
{:xdr, false} -> "00000006"
{:ndr, false} -> "06000000"
{:xdr, true} -> "20000006"
{:ndr, true} -> "06000020"
end
end
defp wkb_code(endian, srid?, :binary) do
case {endian, srid?} do
{:xdr, false} -> <<0x00000006::big-integer-size(32)>>
{:ndr, false} -> <<0x00000006::little-integer-size(32)>>
{:xdr, true} -> <<0x20000006::big-integer-size(32)>>
{:ndr, true} -> <<0x20000006::little-integer-size(32)>>
end
end
defimpl Enumerable do
# credo:disable-for-next-line Credo.Check.Readability.Specs
def count(multi_polygon) do
{:ok, MultiPolygon.size(multi_polygon)}
end
# credo:disable-for-next-line Credo.Check.Readability.Specs
def member?(multi_polygon, val) do
{:ok, MultiPolygon.member?(multi_polygon, val)}
end
# credo:disable-for-next-line Credo.Check.Readability.Specs
def slice(multi_polygon) do
size = MultiPolygon.size(multi_polygon)
{:ok, size, &Enumerable.List.slice(MultiPolygon.to_list(multi_polygon), &1, &2, size)}
end
# credo:disable-for-next-line Credo.Check.Readability.Specs
def reduce(multi_polygon, acc, fun) do
Enumerable.List.reduce(MultiPolygon.to_list(multi_polygon), acc, fun)
end
end
defimpl Collectable do
# credo:disable-for-next-line Credo.Check.Readability.Specs
def into(%MultiPolygon{polygons: polygons}) do
fun = fn
list, {:cont, x} ->
[{x, []} | list]
list, :done ->
map =
Map.merge(
polygons.map,
Enum.into(list, %{}, fn {polygon, []} -> {polygon.rings, []} end)
)
%MultiPolygon{polygons: %{polygons | map: map}}
_list, :halt ->
:ok
end
{[], fun}
end
end
end
|
lib/geometry/multi_polygon.ex
| 0.931595 | 0.559591 |
multi_polygon.ex
|
starcoder
|
defmodule Jason.Helpers do
@moduledoc """
Provides macro facilities for partial compile-time encoding of JSON.
"""
alias Jason.{Codegen, Fragment}
@doc ~S"""
Encodes a JSON map from a compile-time keyword.
Encodes the keys at compile time and strives to create as flat iodata
structure as possible to achieve maximum efficiency. Does encoding
right at the call site, but returns an `%Jason.Fragment{}` struct
that needs to be passed to one of the "main" encoding functions -
for example `Jason.encode/2` for final encoding into JSON - this
makes it completely transparent for most uses.
Only allows keys that do not require escaping in any of the supported
encoding modes. This means only ASCII characters from the range
0x1F..0x7F excluding '\', '/' and '"' are allowed - this also excludes
all control characters like newlines.
Preserves the order of the keys.
## Example
iex> fragment = json_map(foo: 1, bar: 2)
iex> Jason.encode!(fragment)
"{\"foo\":1,\"bar\":2}"
"""
defmacro json_map(kv) do
escape = quote(do: escape)
encode_map = quote(do: encode_map)
encode_args = [escape, encode_map]
kv_iodata = Codegen.build_kv_iodata(Macro.expand(kv, __CALLER__), encode_args)
quote do
%Fragment{
encode: fn {unquote(escape), unquote(encode_map)} ->
unquote(kv_iodata)
end
}
end
end
@doc ~S"""
Encodes a JSON map from a variable containing a map and a compile-time
list of keys.
It is equivalent to calling `Map.take/2` before encoding. Otherwise works
similar to `json_map/2`.
## Example
iex> map = %{a: 1, b: 2, c: 3}
iex> fragment = json_map_take(map, [:c, :b])
iex> Jason.encode!(fragment)
"{\"c\":3,\"b\":2}"
"""
defmacro json_map_take(map, take) do
take = Macro.expand(take, __CALLER__)
kv = Enum.map(take, &{&1, generated_var(&1, Codegen)})
escape = quote(do: escape)
encode_map = quote(do: encode_map)
encode_args = [escape, encode_map]
kv_iodata = Codegen.build_kv_iodata(kv, encode_args)
quote do
case unquote(map) do
%{unquote_splicing(kv)} ->
%Fragment{
encode: fn {unquote(escape), unquote(encode_map)} ->
unquote(kv_iodata)
end
}
other ->
raise ArgumentError,
"expected a map with keys: #{unquote(inspect(take))}, got: #{inspect(other)}"
end
end
end
# The same as Macro.var/2 except it sets generated: true
defp generated_var(name, context) do
{name, [generated: true], context}
end
end
|
lib/helpers.ex
| 0.872341 | 0.437523 |
helpers.ex
|
starcoder
|
defmodule Mongo.Messages do
@moduledoc """
This module encodes and decodes the data from and to the mongodb server.
We only support MongoDB >= 3.2 and use op_query with the hack collection "$cmd"
Other op codes are deprecated. Therefore only op_reply and op_query are supported.
"""
defmacro __using__(_opts) do
quote do
import unquote(__MODULE__)
end
end
import Record
import Mongo.BinaryUtils
@op_reply 1
@op_query 2004
@op_msg_code 2013
@query_flags [
tailable_cursor: 0x2,
slave_ok: 0x4,
oplog_replay: 0x8,
no_cursor_timeout: 0x10,
await_data: 0x20,
exhaust: 0x40,
partial: 0x80
]
@header_size 4 * 4
defrecordp :msg_header, [:length, :request_id, :response_to, :op_code]
defrecord :op_query, [:flags, :coll, :num_skip, :num_return, :query, :select]
defrecord :op_reply, [:flags, :cursor_id, :from, :num, :docs]
defrecord :sequence, [:size, :identifier, :docs]
defrecord :payload, [:doc, :sequence]
defrecord :section, [:payload_type, :payload]
defrecord :op_msg, [:flags, :sections]
@doc """
Decodes the header from response of a request sent by the mongodb server
"""
def decode_header(iolist) when is_list(iolist) do
case IO.iodata_length(iolist) >= @header_size do
true -> iolist |> IO.iodata_to_binary() |> decode_header()
false -> :error
end
end
def decode_header(<<length::int32, request_id::int32, response_to::int32, op_code::int32, rest::binary>>) do
header = msg_header(length: length - @header_size, request_id: request_id, response_to: response_to, op_code: op_code) ## todo don't subtract header-size here
{:ok, header, rest}
end
def decode_header(_binary), do: :error
@doc """
Decodes the response body of a request sent by the mongodb server
"""
def decode_response(msg_header(length: length) = header, iolist) when is_list(iolist) do
case IO.iodata_length(iolist) >= length do
true -> decode_response(header, IO.iodata_to_binary(iolist))
false -> :error
end
end
def decode_response(msg_header(length: length, response_to: response_to, op_code: op_code), binary) when byte_size(binary) >= length do
<<response::binary(length), rest::binary>> = binary
case op_code do
@op_reply -> {:ok, response_to, decode_reply(response), rest}
@op_msg_code -> {:ok, response_to, decode_msg(response), rest}
_ -> :error
end
end
def decode_response(_header, _binary), do: :error
@doc """
Decodes a reply message from the response
"""
def decode_reply(<<flags::int32, cursor_id::int64, from::int32, num::int32, rest::binary>>) do
op_reply(flags: flags, cursor_id: cursor_id, from: from, num: num, docs: BSON.Decoder.documents(rest))
end
def decode_msg(<<flags::int32, rest::binary>>) do
op_msg(flags: flags, sections: decode_sections(rest))
end
def decode_sections(binary), do: decode_sections(binary, [])
def decode_sections("", acc), do: Enum.reverse(acc)
def decode_sections(<<0x00::int8, payload::binary>>, acc) do
<<size::int32, _rest::binary>> = payload
<<doc::binary(size), rest::binary>> = payload
with {doc, ""} <- BSON.Decoder.document(doc) do
decode_sections(rest, [section(payload_type: 0, payload: payload(doc: doc)) | acc])
end
end
def decode_sections(<<0x01::int8, payload::binary>>, acc) do
<<size::int32, _rest::binary>> = payload
<<sequence::binary(size), rest::binary>> = payload
decode_sections(rest, [section(payload_type: 1, payload: payload(sequence: decode_sequence(sequence))) | acc])
end
def decode_sequence(<<size::int32, rest::binary>>) do
with {identifier, docs} <- cstring(rest) do
sequence(size: size, identifier: identifier, docs: BSON.Decoder.documents(docs))
end
end
defp cstring(binary) do
[string, rest] = :binary.split(binary, <<0x00>>)
{string, rest}
end
def encode(request_id, op_query() = op) do
iodata = encode_op(op)
header = msg_header(length: IO.iodata_length(iodata) + @header_size, request_id: request_id, response_to: 0, op_code: @op_query)
[encode_header(header)|iodata]
end
def encode(request_id, op_msg() = op) do
iodata = encode_op(op)
header = msg_header(length: IO.iodata_length(iodata) + @header_size, request_id: request_id, response_to: 0, op_code: @op_msg_code)
[encode_header(header)|iodata]
end
defp encode_header(msg_header(length: length, request_id: request_id, response_to: response_to, op_code: op_code)) do
<<length::int32, request_id::int32, response_to::int32, op_code::int32>>
end
defp encode_op(op_query(flags: flags, coll: coll, num_skip: num_skip,
num_return: num_return, query: query, select: select)) do
[<<blit_flags(:query, flags)::int32>>,
coll,
<<0x00, num_skip::int32, num_return::int32>>,
BSON.Encoder.document(query),
select]
end
defp encode_op(op_msg(flags: _flags, sections: sections)) do
# todo: flags encoding
[<<0::int32>> | encode_sections(sections)]
end
defp encode_sections(sections) do
Enum.map(sections, fn section -> encode_section(section) end)
end
defp encode_section(section(payload_type: t, payload: payload)) do
[<<t::int8>> | encode_payload(payload)]
end
defp encode_payload(payload(doc: doc, sequence: nil)) do
BSON.Encoder.document(doc)
end
defp encode_payload(payload(doc: nil, sequence: sequence(identifier: identifier, docs: docs))) do
iodata = [identifier, <<0x00>> | Enum.map(docs, fn doc -> BSON.Encoder.encode(doc) end)]
size = IO.iodata_length(iodata) + 4
[<<size::int32>> | iodata]
end
defp blit_flags(op, flags) when is_list(flags) do
import Bitwise
Enum.reduce(flags, 0x0, &(flag_to_bit(op, &1) ||| &2))
end
defp blit_flags(_op, flags) when is_integer(flags) do
flags
end
Enum.each(@query_flags, fn {flag, bit} ->
defp flag_to_bit(:query, unquote(flag)), do: unquote(bit)
end)
defp flag_to_bit(_op, _flag), do: 0x0
end
|
lib/mongo/messages.ex
| 0.534612 | 0.531513 |
messages.ex
|
starcoder
|
defmodule Canvas.Resources.Enrollments do
@moduledoc """
Provides functions to interact with the
[enrollment term endpoints](https://canvas.instructure.com/doc/api/enrollments).
"""
alias Canvas.{Client, Listing, Response}
alias Canvas.Resources.{Enrollment, User}
@doc """
Depending on the URL given, return a paginated list of either
(1) all of the enrollments in a course,
(2) all of the enrollments in a section or
(3) all of a user's enrollments.
This includes student, teacher, TA, and observer enrollments.
If a user has multiple enrollments in a context (e.g. as a teacher and a student or in multiple
course sections), each enrollment will be listed separately.
note: Currently, only a root level admin user can return other users' enrollments.
A user can, however, return his/her own enrollments.
See:
- https://canvas.instructure.com/doc/api/enrollments#method.enrollments_api.index
## Examples:
client = %Canvas.Client{access_token: "<PASSWORD>", base_url: "https://instructure.test"}
{:ok, response} = Canvas.Resources.Enrollments.list_enrollments(client, :course, 101)
{:ok, response} = Canvas.Resources.Enrollments.list_enrollments(client, :course, 101, per_page: 20, page: 2)
{:ok, response} = Canvas.Resources.Enrollments.list_enrollments(client, :section, 1234)
{:ok, response} = Canvas.Resources.Enrollments.list_enrollments(client, :user, 123)
"""
@spec list_enrollments(Client.t(), atom, String.t() | integer, Keyword.t()) ::
{:ok | :error, Response.t()}
def list_enrollments(client, by, id, options \\ [])
def list_enrollments(client, :course, id, options) do
url = Client.versioned("/courses/#{id}/enrollments")
_list_enrollments(client, url, options)
end
def list_enrollments(client, :section, id, options) do
url = Client.versioned("/sections/#{id}/enrollments")
_list_enrollments(client, url, options)
end
def list_enrollments(client, :user, id, options) do
url = Client.versioned("/users/#{id}/enrollments")
_list_enrollments(client, url, options)
end
defp _list_enrollments(client, url, options) do
Listing.get(client, url, options)
|> Response.parse([%Enrollment{user: %User{}}])
end
@doc """
List all enrollments automatically paginating if necessary.
This function will automatically page through all pages, returning all enrollments.
## Examples:
client = %Canvas.Client{access_token: "<PASSWORD>", base_url: "https://instructure.test"}
{:ok, response} = Canvas.Resources.Enrollments.all_enrollments(client, :course, 101)
"""
@spec all_enrollments(Client.t(), atom, String.t() | integer, Keyword.t()) ::
{:ok, list(%Enrollment{})} | {:error, Response.t()}
def all_enrollments(client, by, id, options \\ []) do
Listing.get_all(__MODULE__, :list_enrollments, [client, by, id, options])
end
def get_enrollment() do
end
def enroll_by_course() do
end
def enroll_by_section() do
end
def conclude_deactivate_delete() do
end
def accept() do
end
def reject() do
end
def reactivate() do
end
def last_attended() do
end
end
|
lib/canvas/resources/enrollments.ex
| 0.830834 | 0.581541 |
enrollments.ex
|
starcoder
|
defmodule Militerm.Systems.MML do
@moduledoc """
Manages the rendering handlers for MML tags for different device contexts.
"""
use Militerm.ECS.System
alias Militerm.Services.MML, as: MMLService
alias Militerm.Parsers.MML, as: MMLParser
@doc """
Accepts output and forwards it to the registered interfaces. Eventually, this will
cache output until the closing matching tag or an explicit flush at the end of the
event handling cycle.
"""
defscript emit(content), for: %{"this" => this} = objects do
Militerm.Systems.Entity.receive_message(this, "emit", content, objects)
true
end
defscript prompt(content), for: %{"this" => this} = objects do
Militerm.Systems.Entity.receive_message(this, "prompt", content, objects)
true
end
defscript item_list(content), as: "ItemList" do
if is_nil(content) do
"nothing"
else
content
|> Enum.reject(&is_nil/1)
|> Militerm.English.item_list()
end
end
defscript item_list(content, conjunction), as: "ItemList" do
if is_nil(content) do
"nothing"
else
content
|> Enum.reject(&is_nil/1)
|> Militerm.English.item_list(conjunction)
end
end
@doc """
Bind the given slots to the message. This allows the message to be embedded in other
messages while keeping track of what goes in which slot for this message.
"""
def bind(message, slots) when is_binary(message) do
case Militerm.Parsers.MML.parse(message) do
{:ok, p} -> {:ok, {:bound, p, slots}}
otherwise -> otherwise
end
end
def bind(message, slots) when is_list(message) do
result =
Enum.reduce_while(message, [], fn line, acc ->
case bind(line, slots) do
{:ok, binding} -> {:cont, [binding | acc]}
{:error, error} -> {:halt, {:error, error}}
end
end)
case result do
{:error, _} -> result
list -> {:ok, {:bound, Enum.reverse(list), %{}}}
end
end
def bind({:bound, _, _} = binding, _), do: {:ok, binding}
def bind!(message, slots) do
case bind(message, slots) do
{:ok, binding} -> binding
{:error, error} -> raise error
end
end
def used_slots({:bound, message, _}), do: used_slots(message)
def used_slots([], slots), do: Enum.uniq(Enum.reverse(slots))
def used_slots([{:slot, slot} | rest], slots) do
used_slots(rest, [slot | slots])
end
def used_slots([{:slot, slot, _} | rest], slots) do
used_slots(rest, [slot | slots])
end
def used_slots([{:tag, attributes, nodes} | rest], slots) do
# attributes first, then nodes
used_slots(
rest,
used_slots_in_attributes(attributes) ++ used_slots(nodes) ++ slots
)
end
def used_slots([_ | rest], slots), do: used_slots(rest, slots)
def used_slots_in_attributes(attributes) do
case Keyword.fetch(attributes, :attributes) do
{:ok, list} ->
list
|> Enum.flat_map(fn {_, value} -> used_slots(value) end)
|> Enum.uniq()
_ ->
[]
end
end
def render({:bound, parse, bindings}, pov, device) do
parse
|> Enum.map(fn item -> render_item(item, bindings, pov, device) end)
end
def render({:bound, parse, bindings}, pov, device) do
parse
|> Enum.map(fn item -> render_item(item, bindings, pov, device) end)
end
def render({parse, bindings}, pov, device) do
parse
|> Enum.map(fn item -> render_item(item, bindings, pov, device) end)
end
def render_item(string, _, _, _) when is_binary(string), do: string
def render_item(nil, _, _, _), do: ""
def render_item(list, bindings, pov, device) when is_list(list) do
list
|> Enum.map(&render_item(&1, bindings, pov, device))
end
def render_item({:script, code}, bindings, pov, device) do
code
|> Militerm.Machines.Script.run(bindings)
|> render_item(bindings, pov, device)
end
def render_item({:error, message}, _bindings, _pov, _device) do
"(ERROR: #{inspect(message)})"
end
def render_item({:thing, _} = thing, _bindings, pov, _device) do
entity_name(thing, pov)
end
def render_item({:thing, _, _} = thing, _bindings, pov, _device) do
entity_name(thing, pov)
end
def render_item({:bound, _, _} = binding, _bindings, pov, device) do
render(binding, pov, device)
end
def render_item({:value, name}, bindings, pov, device) do
render_item(Map.get(bindings, name, ""), bindings, pov, device)
end
def render_item({:verb, verb}, bindings, pov, _device) do
objs = as_list(Map.get(bindings, "actor", []))
if pov not in objs and Enum.count(objs) == 1 do
Militerm.English.pluralize(verb)
else
verb
end
end
def render_item({:verb, slot, verb}, bindings, pov, _device) do
objs = Map.get(bindings, to_string(slot), [])
if pov not in objs and Enum.count(objs) == 1 do
Militerm.English.pluralize(verb)
else
verb
end
end
def render_item({:Verb, verb}, bindings, pov, _device) do
objs = Map.get(bindings, "actor", [])
if pov not in objs and Enum.count(objs) == 1 do
String.capitalize(Militerm.English.pluralize(verb))
else
String.capitalize(verb)
end
end
def render_item({:Verb, slot, verb}, bindings, pov, _device) do
objs = Map.get(bindings, to_string(slot), [])
if pov not in objs and Enum.count(objs) == 1 do
String.capitalize(Militerm.English.pluralize(verb))
else
String.capitalize(verb)
end
end
def render_item({:tag, attributes, nodes}, bindings, pov, device) do
with {:ok, name} <- Keyword.fetch(attributes, :name),
{:ok, {module, function, args}} <- MMLService.tag_handler(name, device) do
apply(module, function, [attributes, nodes, bindings, pov, device] ++ args)
else
otherwise ->
render({nodes, bindings}, pov, device)
end
end
def render_item({:slot, slot, type}, bindings, pov, _device) do
this = Map.get(bindings, "this")
case Map.get(bindings, to_string(slot)) do
nil ->
"nothing"
[] ->
"nothing"
list when is_list(list) ->
list
|> Enum.map(&entity_name(&1, pov, type))
|> Militerm.English.item_list()
thing when is_tuple(thing) ->
entity_name(thing, pov)
end
end
def render_item({:slot, slot}, bindings, pov, _device) do
this = Map.get(bindings, "this")
case Map.get(bindings, to_string(slot)) do
nil ->
"nothing"
[] ->
"nothing"
list when is_list(list) ->
list
|> Enum.map(&entity_name(&1, pov))
|> Militerm.English.item_list()
thing when is_tuple(thing) ->
entity_name(thing, pov)
end
end
def render_item({:Slot, slot, type}, bindings, pov, _device) do
this = Map.get(bindings, "this")
case Map.get(bindings, to_string(slot)) do
nil ->
"Nothing"
[] ->
"Nothing"
list when is_list(list) ->
list
|> Enum.map(&entity_name(&1, pov, type))
|> Militerm.English.item_list()
|> String.capitalize()
thing when is_tuple(thing) ->
String.capitalize(entity_name(thing, pov))
end
end
def render_item({:Slot, slot}, bindings, pov, _device) do
this = Map.get(bindings, "this")
case Map.get(bindings, to_string(slot)) do
nil ->
"Nothing"
[] ->
"Nothing"
list when is_list(list) ->
list
|> Enum.map(&entity_name(&1, pov))
|> Militerm.English.item_list()
|> String.capitalize()
thing when is_tuple(thing) ->
String.capitalize(entity_name(thing, pov))
end
end
defp as_list(list) when is_list(list), do: list
defp as_list(nil), do: []
defp as_list(value), do: [value]
defp entity_name(pov, pov), do: "you"
defp entity_name({:thing, entity_id} = it, pov) do
entity_name_by_identity(entity_id) || entity_name_by_details(entity_id) || entity_id
end
defp entity_name({:thing, entity_id, detail}, _) when is_binary(detail) do
entity_name_by_details({entity_id, detail}) || entity_id
end
defp entity_name(string, _) when is_binary(string), do: string
defp entity_name({:thing, entity_id} = it, _, "name") do
entity_name_by_identity(entity_id) || entity_name_by_details(entity_id) || "something"
end
defp entity_name({:thing, entity_id, detail} = it, _, "name") do
entity_name_by_details({entity_id, detail}) || "something"
end
defp entity_name(string, _, "name") when is_binary(string), do: string
defp entity_name(pov, pov, "nominative"), do: "you"
defp entity_name(pov, pov, "objective"), do: "you"
defp entity_name(pov, pov, "reflexive"), do: "yourself"
defp entity_name(pov, pov, "possessive"), do: "your"
defp entity_name(thing, _, "nominative"), do: entity_nominative(thing)
defp entity_name(thing, _, "objective"), do: entity_objective(thing)
defp entity_name(thing, _, "reflexive"), do: entity_reflexive(thing)
defp entity_name(thing, _, "possessive"), do: entity_possessive(thing)
defp entity_nominative({:thing, entity_id}) do
case Militerm.Components.Identity.get(entity_id) do
%{"nominative" => nom} -> nom
_ -> "it"
end
end
defp entity_nominative({:thing, entity_id, "default"}) do
entity_nominative({:thing, entity_id})
end
defp entity_nominative(_), do: "it"
defp entity_objective({:thing, entity_id}) do
case Militerm.Components.Identity.get(entity_id) do
%{"objective" => obj} -> obj
_ -> "it"
end
end
defp entity_objective({:thing, entity_id, "default"}) do
entity_objective({:thing, entity_id})
end
defp entity_objective(_), do: "it"
defp entity_possessive({:thing, entity_id}) do
case Militerm.Components.Identity.get(entity_id) do
%{"possessive" => pos} -> pos
_ -> "its"
end
end
defp entity_possessive({:thing, entity_id, "default"}) do
entity_possessive({:thing, entity_id})
end
defp entity_possessive(_), do: "its"
def entity_reflexive(thing) do
entity_objective(thing) <> "self"
end
defp entity_name_by_identity({entity_id, _}) do
entity_name_by_identity(entity_id)
end
defp entity_name_by_identity(entity_id) do
case Militerm.Components.Identity.get(entity_id) do
%{"name" => name} -> name
_ -> nil
end
end
defp entity_name_by_details(entity_id) when is_binary(entity_id) do
entity_name_by_details({entity_id, "default"})
end
defp entity_name_by_details({entity_id, detail}) do
case Militerm.Components.Details.get(entity_id, detail) do
%{"short" => name} ->
name
_ ->
if detail == "default", do: nil, else: entity_name_by_details({entity_id, "default"})
end
end
end
|
lib/militerm/systems/mml.ex
| 0.787155 | 0.404213 |
mml.ex
|
starcoder
|
defmodule Versioned.Absinthe do
@moduledoc """
Helpers for Absinthe schemas.
"""
@doc """
Declare an object, versioned compliment, and interface, based off name `name`.
The caller should `use Absinthe.Schema.Notation` as here we return code
which invokes its `object` macro.
Both objects belong to an interface which encompasses the common fields.
All common fields (except `:id` and `:inserted_at`) are included under an
interface, named by the entity name and suffixed `_base`.
The generated object will have the following fields:
* `:id` - ID of the record.
* `:version_id` - ID of the most recent record's version.
* `:inserted_at` - Timestamp when the record was created.
* `:updated_at` - Timestamp when the record was last updated.
* Additionally, all fields declared in the block.
The generated version object will have the following fields:
* `:id` - ID of the version record.
* `:foo_id` - If the entity was `:foo`, then this would be the id of the main
record for which this version is based.
* `:is_deleted` - Boolean indicating if the record was deleted as of this version.
* `:inserted_at` - Timestamp when the version record was created.
* Additionally, all fields declared in the block.
"""
defmacro versioned_object(name, do: block) do
quote do
object unquote(name) do
field :id, non_null(:id)
field :version_id, :id
field :inserted_at, non_null(:datetime)
field :updated_at, non_null(:datetime)
unquote(block)
interface(unquote(:"#{name}_base"))
end
object unquote(:"#{name}_version") do
field :id, non_null(:id)
field unquote(:"#{name}_id"), :id
field :is_deleted, :boolean
field :inserted_at, :datetime
unquote(block)
interface(unquote(:"#{name}_base"))
end
interface unquote(:"#{name}_base") do
unquote(block)
resolve_type(fn
%{version_id: _}, _ -> unquote(name)
%{unquote(:"#{name}_id") => _}, _ -> unquote(:"#{name}_version")
_, _ -> nil
end)
end
end
end
end
|
lib/versioned/absinthe.ex
| 0.695958 | 0.465023 |
absinthe.ex
|
starcoder
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.