code
stringlengths 114
1.05M
| path
stringlengths 3
312
| quality_prob
float64 0.5
0.99
| learning_prob
float64 0.2
1
| filename
stringlengths 3
168
| kind
stringclasses 1
value |
---|---|---|---|---|---|
defmodule Surface do
@moduledoc """
Surface is component based library for **Phoenix LiveView**.
Built on top of the new `Phoenix.LiveComponent` API, Surface provides
a more declarative way to express and use components in Phoenix.
Full documentation and live examples can be found at [surface-demo.msaraiva.io](http://surface-demo.msaraiva.io)
This module defines the `~H` sigil that should be used to translate Surface
code into Phoenix templates.
In order to have `~H` available for any Phoenix view, add the following import to your web
file in `lib/my_app_web.ex`:
# lib/my_app_web.ex
...
def view do
quote do
...
import Surface
end
end
## Defining components
To create a component you need to define a module and `use` one of the available component types:
* `Surface.Component` - A stateless component.
* `Surface.LiveComponent` - A live stateful component.
* `Surface.LiveView` - A wrapper component around `Phoenix.LiveView`.
* `Surface.MacroComponent` - A low-level component which is responsible for translating its own content at compile time.
### Example
# A functional stateless component
defmodule Button do
use Surface.Component
property click, :event
property kind, :string, default: "is-info"
def render(assigns) do
~H"\""
<button class="button {{ @kind }}" phx-click={{ @click }}>
<slot/>
</button>
"\""
end
end
You can visit the documentation of each type of component for further explanation and examples.
"""
@doc """
Translates Surface code into Phoenix templates.
"""
defmacro sigil_H({:<<>>, _, [string]}, _) do
line_offset = __CALLER__.line + 1
string
|> Surface.Translator.run(line_offset, __CALLER__, __CALLER__.file)
|> EEx.compile_string(
engine: Phoenix.LiveView.Engine,
line: line_offset,
file: __CALLER__.file
)
end
@doc false
def component(module, assigns) do
module.render(assigns)
end
def component(module, assigns, []) do
module.render(assigns)
end
@doc false
def put_default_props(props, mod) do
Enum.reduce(mod.__props__(), props, fn %{name: name, opts: opts}, acc ->
default = Keyword.get(opts, :default)
Map.put_new(acc, name, default)
end)
end
@doc false
def begin_context(props, current_context, mod) do
assigns = put_gets_into_assigns(props, current_context, mod.__context_gets__())
initialized_context_assigns =
with true <- function_exported?(mod, :init_context, 1),
{:ok, values} <- mod.init_context(assigns) do
Map.new(values)
else
false ->
[]
{:error, message} ->
runtime_error(message)
result ->
runtime_error(
"unexpected return value from init_context/1. " <>
"Expected {:ok, keyword()} | {:error, String.t()}, got: #{inspect(result)}"
)
end
{assigns, new_context} =
put_sets_into_assigns_and_context(
assigns,
current_context,
initialized_context_assigns,
mod.__context_sets__()
)
assigns = Map.put(assigns, :__surface_context__, new_context)
{assigns, new_context}
end
@doc false
def end_context(context, mod) do
Enum.reduce(mod.__context_sets__(), context, fn %{name: name, opts: opts}, acc ->
to = Keyword.fetch!(opts, :to)
context_entry = acc |> Map.get(to, %{}) |> Map.delete(name)
if context_entry == %{} do
Map.delete(acc, to)
else
Map.put(acc, to, context_entry)
end
end)
end
@doc false
def attr_value(attr, value) do
if String.Chars.impl_for(value) do
value
else
runtime_error(
"invalid value for attribute \"#{attr}\". Expected a type that implements " <>
"the String.Chars protocol (e.g. string, boolean, integer, atom, ...), " <>
"got: #{inspect(value)}"
)
end
end
@doc false
def style(value, show) when is_binary(value) do
if show do
quot(value)
else
semicolon = if String.ends_with?(value, ";") || value == "", do: "", else: ";"
quot([value, semicolon, "display: none;"])
end
end
def style(value, _show) do
runtime_error(
"invalid value for attribute \"style\". Expected a string " <>
"got: #{inspect(value)}"
)
end
@doc false
def css_class([value]) when is_list(value) do
css_class(value)
end
def css_class(value) when is_binary(value) do
value
end
def css_class(value) when is_list(value) do
Enum.reduce(value, [], fn item, classes ->
case item do
{class, true} ->
[to_string(class) | classes]
class when is_binary(class) or is_atom(class) ->
[to_string(class) | classes]
_ ->
classes
end
end)
|> Enum.reverse()
|> Enum.join(" ")
end
def css_class(value) do
runtime_error(
"invalid value for property of type :css_class. " <>
"Expected a string or a keyword list, got: #{inspect(value)}"
)
end
@doc false
def boolean_attr(name, value) do
if value do
name
else
""
end
end
@doc false
def keyword_value(key, value) do
if Keyword.keyword?(value) do
value
else
runtime_error(
"invalid value for property \"#{key}\". Expected a :keyword, got: #{inspect(value)}"
)
end
end
@doc false
def map_value(_key, value) when is_map(value) do
value
end
def map_value(key, value) do
if Keyword.keyword?(value) do
Map.new(value)
else
runtime_error(
"invalid value for property \"#{key}\". Expected a :map, got: #{inspect(value)}"
)
end
end
@doc false
def event_value(key, [event], caller_cid) do
event_value(key, event, caller_cid)
end
def event_value(key, [name | opts], caller_cid) do
event = Map.new(opts) |> Map.put(:name, name)
event_value(key, event, caller_cid)
end
def event_value(_key, nil, _caller_cid) do
nil
end
def event_value(_key, name, nil) when is_binary(name) do
%{name: name, target: :live_view}
end
def event_value(_key, name, caller_cid) when is_binary(name) do
%{name: name, target: to_string(caller_cid)}
end
def event_value(_key, %{name: _, target: _} = event, _caller_cid) do
event
end
def event_value(key, event, _caller_cid) do
runtime_error(
"invalid value for event \"#{key}\". Expected an :event or :string, got: #{inspect(event)}"
)
end
@doc false
def on_phx_event(phx_event, [event], caller_cid) do
on_phx_event(phx_event, event, caller_cid)
end
def on_phx_event(phx_event, [event | opts], caller_cid) do
value = Map.new(opts) |> Map.put(:name, event)
on_phx_event(phx_event, value, caller_cid)
end
def on_phx_event(phx_event, %{name: name, target: :live_view}, _caller_cid) do
[phx_event, "=", quot(name)]
end
def on_phx_event(phx_event, %{name: name, target: target}, _caller_cid) do
[phx_event, "=", quot(name), " phx-target=", quot(target)]
end
# Stateless component or a liveview (no caller_id)
def on_phx_event(phx_event, event, nil) when is_binary(event) do
[phx_event, "=", quot(event)]
end
def on_phx_event(phx_event, event, caller_cid) when is_binary(event) do
[phx_event, "=", quot(event), " phx-target=", to_string(caller_cid)]
end
def on_phx_event(_phx_event, nil, _caller_cid) do
[]
end
def on_phx_event(phx_event, event, _caller_cid) do
runtime_error(
"invalid value for \":on-#{phx_event}\". " <>
"Expected a :string or :event, got: #{inspect(event)}"
)
end
@doc false
def phx_event(_phx_event, value) when is_binary(value) do
value
end
def phx_event(phx_event, value) do
runtime_error(
"invalid value for \"#{phx_event}\". LiveView bindings only accept values " <>
"of type :string. If you want to pass an :event, please use directive " <>
":on-#{phx_event} instead. Expected a :string, got: #{inspect(value)}"
)
end
@doc false
def event_to_opts(%{name: name, target: :live_view}, event_name) do
[{event_name, name}]
end
def event_to_opts(%{name: name, target: target}, event_name) do
[{event_name, name}, {:phx_target, target}]
end
def event_to_opts(nil, _event_name) do
[]
end
defp quot(value) do
[{:safe, "\""}, value, {:safe, "\""}]
end
defp runtime_error(message) do
stacktrace =
self()
|> Process.info(:current_stacktrace)
|> elem(1)
|> Enum.drop(2)
reraise(message, stacktrace)
end
defp put_gets_into_assigns(assigns, context, gets) do
Enum.reduce(gets, assigns, fn %{name: name, opts: opts}, acc ->
key = Keyword.get(opts, :as, name)
from = Keyword.fetch!(opts, :from)
# TODO: raise an error if it's required and it hasn't been set
value = context[from][name]
Map.put_new(acc, key, value)
end)
end
defp put_sets_into_assigns_and_context(assigns, context, values, sets) do
Enum.reduce(sets, {assigns, context}, fn %{name: name, opts: opts}, {assigns, context} ->
to = Keyword.fetch!(opts, :to)
scope = Keyword.get(opts, :scope)
case Map.fetch(values, name) do
{:ok, value} ->
new_context_entry =
context
|> Map.get(to, %{})
|> Map.put(name, value)
new_context = Map.put(context, to, new_context_entry)
new_assigns =
if scope == :only_children, do: assigns, else: Map.put(assigns, name, value)
{new_assigns, new_context}
:error ->
{assigns, context}
end
end)
end
end
|
lib/surface.ex
| 0.863464 | 0.563078 |
surface.ex
|
starcoder
|
defmodule AWS.Discovery do
@moduledoc """
AWS Application Discovery Service
AWS Application Discovery Service helps you plan application migration
projects by automatically identifying servers, virtual machines (VMs),
software, and software dependencies running in your on-premises data
centers. Application Discovery Service also collects application
performance data, which can help you assess the outcome of your migration.
The data collected by Application Discovery Service is securely retained in
an AWS-hosted and managed database in the cloud. You can export the data as
a CSV or XML file into your preferred visualization tool or cloud-migration
solution to plan your migration. For more information, see [AWS Application
Discovery Service FAQ](http://aws.amazon.com/application-discovery/faqs/).
Application Discovery Service offers two modes of operation:
<ul> <li> **Agentless discovery** mode is recommended for environments that
use VMware vCenter Server. This mode doesn't require you to install an
agent on each host. Agentless discovery gathers server information
regardless of the operating systems, which minimizes the time required for
initial on-premises infrastructure assessment. Agentless discovery doesn't
collect information about software and software dependencies. It also
doesn't work in non-VMware environments.
</li> <li> **Agent-based discovery** mode collects a richer set of data
than agentless discovery by using the AWS Application Discovery Agent,
which you install on one or more hosts in your data center. The agent
captures infrastructure and application information, including an inventory
of installed software applications, system and process performance,
resource utilization, and network dependencies between workloads. The
information collected by agents is secured at rest and in transit to the
Application Discovery Service database in the cloud.
</li> </ul> We recommend that you use agent-based discovery for non-VMware
environments and to collect information about software and software
dependencies. You can also run agent-based and agentless discovery
simultaneously. Use agentless discovery to quickly complete the initial
infrastructure assessment and then install agents on select hosts.
Application Discovery Service integrates with application discovery
solutions from AWS Partner Network (APN) partners. Third-party application
discovery tools can query Application Discovery Service and write to the
Application Discovery Service database using a public API. You can then
import the data into either a visualization tool or cloud-migration
solution.
<important> Application Discovery Service doesn't gather sensitive
information. All data is handled according to the [AWS Privacy
Policy](http://aws.amazon.com/privacy/). You can operate Application
Discovery Service offline to inspect collected data before it is shared
with the service.
</important> This API reference provides descriptions, syntax, and usage
examples for each of the actions and data types for Application Discovery
Service. The topic for each action shows the API request parameters and the
response. Alternatively, you can use one of the AWS SDKs to access an API
that is tailored to the programming language or platform that you're using.
For more information, see [AWS SDKs](http://aws.amazon.com/tools/#SDKs).
This guide is intended for use with the [ *AWS Application Discovery
Service User Guide*
](http://docs.aws.amazon.com/application-discovery/latest/userguide/).
"""
@doc """
Associates one or more configuration items with an application.
"""
def associate_configuration_items_to_application(client, input, options \\ []) do
request(client, "AssociateConfigurationItemsToApplication", input, options)
end
@doc """
Deletes one or more import tasks, each identified by their import ID. Each
import task has a number of records that can identify servers or
applications.
AWS Application Discovery Service has built-in matching logic that will
identify when discovered servers match existing entries that you've
previously discovered, the information for the already-existing discovered
server is updated. When you delete an import task that contains records
that were used to match, the information in those matched records that
comes from the deleted records will also be deleted.
"""
def batch_delete_import_data(client, input, options \\ []) do
request(client, "BatchDeleteImportData", input, options)
end
@doc """
Creates an application with the given name and description.
"""
def create_application(client, input, options \\ []) do
request(client, "CreateApplication", input, options)
end
@doc """
Creates one or more tags for configuration items. Tags are metadata that
help you categorize IT assets. This API accepts a list of multiple
configuration items.
"""
def create_tags(client, input, options \\ []) do
request(client, "CreateTags", input, options)
end
@doc """
Deletes a list of applications and their associations with configuration
items.
"""
def delete_applications(client, input, options \\ []) do
request(client, "DeleteApplications", input, options)
end
@doc """
Deletes the association between configuration items and one or more tags.
This API accepts a list of multiple configuration items.
"""
def delete_tags(client, input, options \\ []) do
request(client, "DeleteTags", input, options)
end
@doc """
Lists agents or connectors as specified by ID or other filters. All
agents/connectors associated with your user account can be listed if you
call `DescribeAgents` as is without passing any parameters.
"""
def describe_agents(client, input, options \\ []) do
request(client, "DescribeAgents", input, options)
end
@doc """
Retrieves attributes for a list of configuration item IDs.
<note> All of the supplied IDs must be for the same asset type from one of
the following:
<ul> <li> server
</li> <li> application
</li> <li> process
</li> <li> connection
</li> </ul> Output fields are specific to the asset type specified. For
example, the output for a *server* configuration item includes a list of
attributes about the server, such as host name, operating system, number of
network cards, etc.
For a complete list of outputs for each asset type, see [Using the
DescribeConfigurations
Action](http://docs.aws.amazon.com/application-discovery/latest/APIReference/discovery-api-queries.html#DescribeConfigurations).
</note>
"""
def describe_configurations(client, input, options \\ []) do
request(client, "DescribeConfigurations", input, options)
end
@doc """
Lists exports as specified by ID. All continuous exports associated with
your user account can be listed if you call `DescribeContinuousExports` as
is without passing any parameters.
"""
def describe_continuous_exports(client, input, options \\ []) do
request(client, "DescribeContinuousExports", input, options)
end
@doc """
`DescribeExportConfigurations` is deprecated. Use
[DescribeImportTasks](https://docs.aws.amazon.com/application-discovery/latest/APIReference/API_DescribeExportTasks.html),
instead.
"""
def describe_export_configurations(client, input, options \\ []) do
request(client, "DescribeExportConfigurations", input, options)
end
@doc """
Retrieve status of one or more export tasks. You can retrieve the status of
up to 100 export tasks.
"""
def describe_export_tasks(client, input, options \\ []) do
request(client, "DescribeExportTasks", input, options)
end
@doc """
Returns an array of import tasks for your account, including status
information, times, IDs, the Amazon S3 Object URL for the import file, and
more.
"""
def describe_import_tasks(client, input, options \\ []) do
request(client, "DescribeImportTasks", input, options)
end
@doc """
Retrieves a list of configuration items that have tags as specified by the
key-value pairs, name and value, passed to the optional parameter
`filters`.
There are three valid tag filter names:
<ul> <li> tagKey
</li> <li> tagValue
</li> <li> configurationId
</li> </ul> Also, all configuration items associated with your user account
that have tags can be listed if you call `DescribeTags` as is without
passing any parameters.
"""
def describe_tags(client, input, options \\ []) do
request(client, "DescribeTags", input, options)
end
@doc """
Disassociates one or more configuration items from an application.
"""
def disassociate_configuration_items_from_application(client, input, options \\ []) do
request(client, "DisassociateConfigurationItemsFromApplication", input, options)
end
@doc """
Deprecated. Use `StartExportTask` instead.
Exports all discovered configuration data to an Amazon S3 bucket or an
application that enables you to view and evaluate the data. Data includes
tags and tag associations, processes, connections, servers, and system
performance. This API returns an export ID that you can query using the
*DescribeExportConfigurations* API. The system imposes a limit of two
configuration exports in six hours.
"""
def export_configurations(client, input, options \\ []) do
request(client, "ExportConfigurations", input, options)
end
@doc """
Retrieves a short summary of discovered assets.
This API operation takes no request parameters and is called as is at the
command prompt as shown in the example.
"""
def get_discovery_summary(client, input, options \\ []) do
request(client, "GetDiscoverySummary", input, options)
end
@doc """
Retrieves a list of configuration items as specified by the value passed to
the required paramater `configurationType`. Optional filtering may be
applied to refine search results.
"""
def list_configurations(client, input, options \\ []) do
request(client, "ListConfigurations", input, options)
end
@doc """
Retrieves a list of servers that are one network hop away from a specified
server.
"""
def list_server_neighbors(client, input, options \\ []) do
request(client, "ListServerNeighbors", input, options)
end
@doc """
Start the continuous flow of agent's discovered data into Amazon Athena.
"""
def start_continuous_export(client, input, options \\ []) do
request(client, "StartContinuousExport", input, options)
end
@doc """
Instructs the specified agents or connectors to start collecting data.
"""
def start_data_collection_by_agent_ids(client, input, options \\ []) do
request(client, "StartDataCollectionByAgentIds", input, options)
end
@doc """
Begins the export of discovered data to an S3 bucket.
If you specify `agentIds` in a filter, the task exports up to 72 hours of
detailed data collected by the identified Application Discovery Agent,
including network, process, and performance details. A time range for
exported agent data may be set by using `startTime` and `endTime`. Export
of detailed agent data is limited to five concurrently running exports.
If you do not include an `agentIds` filter, summary data is exported that
includes both AWS Agentless Discovery Connector data and summary data from
AWS Discovery Agents. Export of summary data is limited to two exports per
day.
"""
def start_export_task(client, input, options \\ []) do
request(client, "StartExportTask", input, options)
end
@doc """
Starts an import task, which allows you to import details of your
on-premises environment directly into AWS without having to use the
Application Discovery Service (ADS) tools such as the Discovery Connector
or Discovery Agent. This gives you the option to perform migration
assessment and planning directly from your imported data, including the
ability to group your devices as applications and track their migration
status.
To start an import request, do this:
<ol> <li> Download the specially formatted comma separated value (CSV)
import template, which you can find here:
[https://s3-us-west-2.amazonaws.com/templates-7cffcf56-bd96-4b1c-b45b-a5b42f282e46/import_template.csv](https://s3-us-west-2.amazonaws.com/templates-7cffcf56-bd96-4b1c-b45b-a5b42f282e46/import_template.csv).
</li> <li> Fill out the template with your server and application data.
</li> <li> Upload your import file to an Amazon S3 bucket, and make a note
of it's Object URL. Your import file must be in the CSV format.
</li> <li> Use the console or the `StartImportTask` command with the AWS
CLI or one of the AWS SDKs to import the records from your file.
</li> </ol> For more information, including step-by-step procedures, see
[Migration Hub
Import](https://docs.aws.amazon.com/application-discovery/latest/userguide/discovery-import.html)
in the *AWS Application Discovery Service User Guide*.
<note> There are limits to the number of import tasks you can create (and
delete) in an AWS account. For more information, see [AWS Application
Discovery Service
Limits](https://docs.aws.amazon.com/application-discovery/latest/userguide/ads_service_limits.html)
in the *AWS Application Discovery Service User Guide*.
</note>
"""
def start_import_task(client, input, options \\ []) do
request(client, "StartImportTask", input, options)
end
@doc """
Stop the continuous flow of agent's discovered data into Amazon Athena.
"""
def stop_continuous_export(client, input, options \\ []) do
request(client, "StopContinuousExport", input, options)
end
@doc """
Instructs the specified agents or connectors to stop collecting data.
"""
def stop_data_collection_by_agent_ids(client, input, options \\ []) do
request(client, "StopDataCollectionByAgentIds", input, options)
end
@doc """
Updates metadata about an application.
"""
def update_application(client, input, options \\ []) do
request(client, "UpdateApplication", input, options)
end
@spec request(map(), binary(), map(), list()) ::
{:ok, Poison.Parser.t | nil, Poison.Response.t} |
{:error, Poison.Parser.t} |
{:error, HTTPoison.Error.t}
defp request(client, action, input, options) do
client = %{client | service: "discovery"}
host = get_host("discovery", client)
url = get_url(host, client)
headers = [{"Host", host},
{"Content-Type", "application/x-amz-json-1.1"},
{"X-Amz-Target", "AWSPoseidonService_V2015_11_01.#{action}"}]
payload = Poison.Encoder.encode(input, [])
headers = AWS.Request.sign_v4(client, "POST", url, headers, payload)
case HTTPoison.post(url, payload, headers, options) do
{:ok, response=%HTTPoison.Response{status_code: 200, body: ""}} ->
{:ok, nil, response}
{:ok, response=%HTTPoison.Response{status_code: 200, body: body}} ->
{:ok, Poison.Parser.parse!(body), response}
{:ok, _response=%HTTPoison.Response{body: body}} ->
error = Poison.Parser.parse!(body)
exception = error["__type"]
message = error["message"]
{:error, {exception, message}}
{:error, %HTTPoison.Error{reason: reason}} ->
{:error, %HTTPoison.Error{reason: reason}}
end
end
defp get_host(endpoint_prefix, client) do
if client.region == "local" do
"localhost"
else
"#{endpoint_prefix}.#{client.region}.#{client.endpoint}"
end
end
defp get_url(host, %{:proto => proto, :port => port}) do
"#{proto}://#{host}:#{port}/"
end
end
|
lib/aws/discovery.ex
| 0.885415 | 0.480601 |
discovery.ex
|
starcoder
|
defmodule EctoTestDSL.Variants.PhoenixGranular.Update do
use EctoTestDSL.Drink.Me
alias T.Variants.PhoenixGranular.Update, as: ThisVariant
alias T.Parse.Start
alias T.Parse.Callbacks
import FlowAssertions.Define.BodyParts
alias T.Variants.Common.DefaultFunctions
# ------------------- Workflows -----------------------------------------
use T.Run.Steps
def workflows() do
from_start_through_changeset = [
:repo_setup,
:existing_ids,
:params,
:primary_key,
[:struct_for_update, uses: [:primary_key]],
[:changeset_for_update, uses: [:struct_for_update]]
]
from_start_through_validation = from_start_through_changeset ++ [
[:assert_valid_changeset, uses: [:changeset_for_update]],
[:example_specific_changeset_checks, uses: [:changeset_for_update]],
[:as_cast_changeset_checks, uses: [:changeset_for_update]],
[:field_calculation_checks, uses: [:changeset_for_update]],
]
%{
validation_success: from_start_through_validation ++ [
:postcheck
],
validation_error: from_start_through_changeset ++ [
[:refute_valid_changeset, uses: [:changeset_for_update]],
[:example_specific_changeset_checks, uses: [:changeset_for_update]],
[:as_cast_changeset_checks, uses: [:changeset_for_update]],
:assert_no_insertion,
:postcheck
],
constraint_error: from_start_through_validation ++ [
[:try_changeset_update, uses: [:changeset_for_update]],
[:error_content, uses: [:try_changeset_update]],
[:refute_valid_changeset, uses: [:error_content]],
[:example_specific_changeset_checks, uses: [:error_content]],
:assert_no_insertion,
:postcheck
],
success: from_start_through_validation ++ [
[:try_changeset_update, uses: [:changeset_for_update]],
[:ok_content, uses: [:try_changeset_update]],
[:check_against_given_fields, uses: [:ok_content]],
[:check_against_earlier_example, uses: [:ok_content]],
[:as_cast_field_checks, uses: [:ok_content]],
[:assert_id_inserted, uses: [:ok_content]],
:postcheck
],
}
end
# ------------------- Startup -----------------------------------------
def start(opts) do
opts = Keyword.merge(default_start_opts(), opts)
Start.start_with_variant(ThisVariant, opts)
end
defp default_start_opts, do: [
changeset_for_update_with: &DefaultFunctions.plain_changeset/3,
update_with: &DefaultFunctions.plain_update/2,
get_primary_key_with: &DefaultFunctions.primary_key_from_id_param/1,
struct_for_update_with: &DefaultFunctions.checked_get/1,
existing_ids_with: &DefaultFunctions.existing_ids/1,
format: :phoenix,
usually_ignore: [],
]
# ------------------- Hook functions -----------------------------------------
def hook(:start, top_level, []) do
assert_valid_keys(top_level)
top_level
end
def hook(:workflow, top_level, [workflow_name]) do
assert_valid_workflow_name(workflow_name)
top_level
end
defp assert_valid_keys(top_level) do
required_keys = [:examples_module, :repo] ++ Keyword.keys(default_start_opts())
optional_keys = [:schema]
top_level
|> Callbacks.validate_top_level_keys(required_keys, optional_keys)
end
defp assert_valid_workflow_name(workflow_name) do
workflows = Map.keys(workflows())
elaborate_assert(
workflow_name in workflows,
"The PhoenixGranular.Update variant only allows these workflows: #{inspect workflows}",
left: workflow_name
)
end
# ----------------------------------------------------------------------------
defmacro __using__(_) do
quote do
use EctoTestDSL.Predefines
alias EctoTestDSL.Variants.PhoenixGranular
alias __MODULE__, as: ExamplesModule
def start(opts) do
PhoenixGranular.Update.start([{:examples_module, ExamplesModule} | opts])
end
defmodule Tester do
use EctoTestDSL.Predefines.Tester
alias T.Run.Steps
def validation_changeset(example_name) do
check_workflow(example_name, stop_after: :changeset_for_update)
|> Keyword.get(:changeset_for_update)
end
def updated(example_name) do
{:ok, value} =
check_workflow(example_name, stop_after: :ok_content)
|> Keyword.get(:try_changeset_update)
value
end
end
end
end
end
|
lib/variants/phoenix_granular_update.ex
| 0.522689 | 0.420362 |
phoenix_granular_update.ex
|
starcoder
|
defmodule Furlong.Row do
@moduledoc """
Row abstraction used internally by the Solver.
"""
import Furlong.Util, only: [near_zero?: 1]
alias Furlong.Row
alias Furlong.Symbol
defstruct constant: 0, cells: %{}
def new(), do: %Row{constant: 0, cells: %{}}
def new(constant) when is_number(constant), do: %Row{constant: constant, cells: %{}}
def add(%Row{constant: constant} = row, value) when is_number(value),
do: %Row{row | constant: constant + value}
def insert(%Row{} = row, {:symbol, _type, _ref} = symbol), do: insert(row, symbol, 1)
def insert(%Row{} = first, %Row{} = second), do: insert(first, second, 1)
def insert(%Row{cells: cells} = row, {:symbol, _type, _ref} = symbol, coefficient)
when is_number(coefficient) do
updated_coefficient = Map.get(cells, symbol, 0) + coefficient
if near_zero?(updated_coefficient) do
%Row{row | cells: Map.delete(cells, symbol)}
else
%Row{row | cells: Map.put(cells, symbol, updated_coefficient)}
end
end
def insert(
%Row{cells: cells_1, constant: constant_1},
%Row{cells: cells_2, constant: constant_2},
coefficient
)
when is_number(coefficient) do
%Row{
constant: constant_1 + coefficient * constant_2,
cells:
cells_2
|> Enum.reduce(cells_1, fn {symbol, coeff}, cells ->
c = Map.get(cells, symbol, 0) + coeff * coefficient
if near_zero?(c) do
Map.delete(cells, symbol)
else
Map.put(cells, symbol, c)
end
end)
}
end
def remove(%Row{cells: cells} = row, {:symbol, _, _} = symbol),
do: %Row{row | cells: Map.delete(cells, symbol)}
def reverse_sign(%Row{constant: constant, cells: cells}) do
%Row{
constant: -constant,
cells:
Enum.map(cells, fn {symbol, coefficient} -> {symbol, -coefficient} end) |> Enum.into(%{})
}
end
def solve_for(%Row{constant: constant, cells: cells}, {:symbol, _, _} = symbol) do
coefficient = -1.0 / Map.get(cells, symbol)
%Row{
constant: constant * coefficient,
cells:
cells
|> Map.delete(symbol)
|> Enum.map(fn {sym, coeff} -> {sym, coeff * coefficient} end)
|> Enum.into(%{})
}
end
def solve_for(%Row{} = row, {:symbol, _, _} = lhs, {:symbol, _, _} = rhs) do
row
|> insert(lhs, -1.0)
|> solve_for(rhs)
end
def coefficient_for(%Row{cells: cells}, {:symbol, _, _} = symbol), do: Map.get(cells, symbol, 0)
def substitute(%Row{cells: cells} = row, {:symbol, _, _} = symbol, %Row{} = subst) do
case Map.get(cells, symbol) do
nil ->
row
coefficient ->
row
|> remove(symbol)
|> insert(subst, coefficient)
end
end
def get_external_var(%Row{cells: cells}) do
cells
|> Map.keys()
|> Enum.find(fn {:symbol, type, _} -> type == :external end)
end
def all_dummies?(%Row{cells: cells}) do
cells
|> Map.keys()
|> Enum.all?(fn {:symbol, type, _} -> type == :dummy end)
end
def get_entering_symbol(%Row{cells: cells}) do
case Enum.find(cells, fn {{:symbol, type, _}, value} -> type != :dummy and value < 0.0 end) do
nil -> Symbol.invalid()
{{:symbol, _type, _value} = symbol, _} -> symbol
end
end
def any_pivotable_symbol(%Row{cells: cells}) do
symbol =
cells
|> Map.keys()
|> Enum.find(fn {:symbol, type, _} -> type == :slack or type == :error end)
symbol || Symbol.invalid()
end
end
|
lib/furlong/row.ex
| 0.754825 | 0.590779 |
row.ex
|
starcoder
|
defmodule Timex.Ecto.Time do
@moduledoc """
Support for using Timex with :time fields
"""
use Timex
@behaviour Ecto.Type
def type, do: :time
@doc """
Handle casting to Timex.Ecto.Time
"""
def cast(input) when is_binary(input) do
case Timex.parse(input, "{ISOtime}") do
{:ok, %NaiveDateTime{hour: hour,
minute: minute,
second: second,
microsecond: {us,_}}} ->
load({hour, minute, second, us})
{:error, _} -> :error
end
end
def cast({h, m, s} = timestamp) when is_number(h) and is_number(m) and is_number(s) do
{:ok, Duration.from_erl(timestamp)}
end
def cast(%Duration{} = d) do
{:ok, d}
end
# Support embeds_one/embeds_many
def cast(%{"megaseconds" => m, "seconds" => s, "microseconds" => us}) do
{:ok, Duration.from_erl({m,s,us})}
end
def cast(%{"hour" => h, "minute" => mm, "second" => s, "ms" => ms}) do
load({h, mm, s, ms * 1_000})
end
def cast(%{"hour" => h, "minute" => mm, "second" => s, "millisecond" => ms}) do
load({h, mm, s, ms * 1_000})
end
def cast(%{"hour" => h, "minute" => mm, "second" => s, "microsecond" => {us, _}}) do
load({h, mm, s, us})
end
def cast(input) do
case Ecto.Time.cast(input) do
{:ok, time} -> load({time.hour, time.min, time.sec, time.usec})
:error -> :error
end
end
@doc """
Load from the native Ecto representation
"""
def load({_hour, _minute, _second, _usecs} = clock) do
d = Duration.from_clock(clock)
{:ok, d}
end
def load(%{:__struct__ => Postgrex.Interval, :days => days, :months => months, :secs => seconds}) do
d = Duration.from_clock({ ((months * 30) + days) * 24, 0, seconds, 0 })
{:ok, d}
end
def load(_), do: :error
@doc """
Convert to the native Ecto representation
"""
def dump(%Duration{} = d) do
{:ok, Duration.to_clock(d)}
end
def dump({_mega, _sec, _micro} = timestamp) do
{:ok, Duration.to_clock(Duration.from_erl(timestamp))}
end
def dump(_), do: :error
def autogenerate(precision \\ :sec)
def autogenerate(:sec) do
{_date, {h, m, s}} = :erlang.universaltime
load({h, m, s, 0}) |> elem(1)
end
def autogenerate(:usec) do
timestamp = {_,_, usec} = :os.timestamp
{_date, {h, m, s}} = :calendar.now_to_datetime(timestamp)
load({h, m, s, usec}) |> elem(1)
end
end
|
lib/types/time.ex
| 0.738292 | 0.447098 |
time.ex
|
starcoder
|
defmodule Maestro.Store do
@moduledoc """
Concise API for events and snapshots.
If you are using the `Maestro.Store.Postgres` adapter, an `Ecto.Repo` should
be provided.
"""
@default_options [max_sequence: 2_147_483_647]
@type id :: HLClock.Timestamp.t()
@type sequence :: non_neg_integer()
@type event :: Maestro.Types.Event.t()
@type events :: [event()]
@type snapshot :: Maestro.Types.Snapshot.t()
@type opts :: [{:max_sequence, sequence()}]
@doc """
Commit the events and apply all projections within a transaction. If there's a
sequence number conflict, the events and projections will be discarded such
that the command generating these components could be retried.
"""
@spec commit_all(events(), [module()]) :: :ok | {:error, :retry_command}
def commit_all(events, projections) do
adapter().commit_all(events, projections)
end
@doc """
Commit the events provided iff there is no sequence number conflict.
Otherwise, the command should be retried as indicated by the specific error
tuple.
"""
@spec commit_events(events()) :: :ok | {:error, :retry_command}
def commit_events(events) do
adapter().commit_events(events)
end
@doc """
Store the snapshot iff the sequence number is greater than what is in the
store. This allows nodes that are partitioned from each other to treat the
store as the source of truth even when writing snapshots.
"""
@spec commit_snapshot(snapshot()) :: :ok
def commit_snapshot(snapshot) do
adapter().commit_snapshot(snapshot)
end
@doc """
Retrieve all events for a specific aggregate by id and minimum sequence number.
Options include:
* `:max_sequence` - useful hydration purposes (defaults to `max_sequence/0`)
"""
@spec get_events(id(), sequence(), opts()) :: events()
def get_events(aggregate_id, seq, opts \\ []) do
options =
@default_options
|> Keyword.merge(opts)
|> Enum.into(%{})
adapter().get_events(aggregate_id, seq, options)
end
@doc """
Retrieve a snapshot by aggregate id and minimum sequence number. If no
snapshot is found, nil is returned.
Options include:
* `:max_sequence` - useful hydration purposes (defaults to `max_sequence/0`)
"""
@spec get_snapshot(id(), sequence(), opts()) :: snapshot() | nil
def get_snapshot(aggregate_id, seq, opts \\ []) do
options =
@default_options
|> Keyword.merge(opts)
|> Enum.into(%{})
adapter().get_snapshot(aggregate_id, seq, options)
end
defp adapter do
Application.get_env(:maestro, :storage_adapter, Maestro.Store.InMemory)
end
@doc """
Return the maximum allowable sequence number permitted by the durable storage
adapter.
"""
@spec max_sequence :: non_neg_integer()
def max_sequence, do: @default_options[:max_sequence]
end
|
lib/maestro/store.ex
| 0.884364 | 0.436802 |
store.ex
|
starcoder
|
defmodule ChallengeGov.Security do
@moduledoc """
Application env parsing for security related data
"""
alias ChallengeGov.SecurityLogs
def challenge_manager_assumed_tlds do
var = Application.get_env(:challenge_gov, :challenge_manager_assumed_tlds)
case parse_list_env(var) do
nil ->
[".mil"]
val ->
val
end
end
def default_challenge_manager?(email) do
escaped_gov_tld = Regex.escape(".gov")
matching_gov_string = ".*#{escaped_gov_tld}$"
gov_regex = Regex.compile!(matching_gov_string)
Regex.match?(gov_regex, email) or assume_challenge_manager?(email)
end
def assume_challenge_manager?(email) do
tlds = challenge_manager_assumed_tlds()
regexs =
Enum.map(tlds, fn tld ->
escaped_tld = Regex.escape(tld)
matching_string = ".*#{escaped_tld}$"
Regex.compile!(matching_string)
end)
Enum.any?(regexs, fn regex ->
Regex.match?(regex, email)
end)
end
def log_retention_days do
var = Application.get_env(:challenge_gov, :log_retention_in_days)
case parse_integer_env(var) do
nil ->
180
val ->
val
end
end
def deactivate_days do
var = Application.get_env(:challenge_gov, :account_deactivation_in_days)
case parse_integer_env(var) do
nil ->
90
val ->
val
end
end
def decertify_days do
var = Application.get_env(:challenge_gov, :account_decertify_in_days)
case parse_integer_env(var) do
nil ->
365
val ->
val
end
end
def timeout_interval do
var = Application.get_env(:challenge_gov, :session_timeout_in_minutes)
case parse_integer_env(var) do
nil ->
15
val ->
val
end
end
def deactivate_warning_one_days do
var = Application.get_env(:challenge_gov, :account_deactivation_warning_one_in_days)
case parse_integer_env(var) do
nil ->
10
val ->
val
end
end
def deactivate_warning_two_days do
var = Application.get_env(:challenge_gov, :account_deactivation_warning_two_in_days)
case parse_integer_env(var) do
nil ->
5
val ->
val
end
end
def extract_remote_ip(%{remote_ip: remote_ip}) do
case is_nil(remote_ip) do
true ->
nil
false ->
to_string(:inet_parse.ntoa(remote_ip))
end
end
def track_role_change_in_security_log(_remote_ip, _current_user, _user, new_role, new_role) do
# NO-OP, the roles are the same
end
def track_role_change_in_security_log(_remote_ip, _current_user, _user, nil, _previous_role) do
# NO-OP, role is not a param being updated
end
def track_role_change_in_security_log(remote_ip, current_user, user, new_role, previous_role) do
SecurityLogs.track(%{
originator_id: current_user.id,
originator_role: current_user.role,
originator_identifier: current_user.email,
originator_remote_ip: remote_ip,
target_id: user.id,
target_type: new_role,
target_identifier: user.email,
action: "role_change",
details: %{previous_role: previous_role, new_role: new_role}
})
end
def track_status_update_in_security_log(
_remote_ip,
_current_user,
_user,
new_status,
new_status
) do
# NO-OP, the statuses are the same
end
def track_status_update_in_security_log(_remote_ip, _current_user, _user, nil, _previous_status) do
# NO-_OP, status is not a param being updated
end
def track_status_update_in_security_log(
remote_ip,
current_user,
user,
new_status,
previous_status
) do
SecurityLogs.track(%{
originator_id: current_user.id,
originator_role: current_user.role,
originator_identifier: current_user.email,
originator_remote_ip: remote_ip,
target_id: user.id,
target_type: user.role,
target_identifier: user.email,
action: "status_change",
details: %{previous_status: previous_status, new_status: new_status}
})
end
defp parse_integer_env(nil), do: nil
defp parse_integer_env(""), do: nil
defp parse_integer_env(var) when is_integer(var), do: var
defp parse_integer_env(var) do
{val, ""} = Integer.parse(var)
val
end
defp parse_list_env(nil), do: nil
defp parse_list_env(""), do: nil
defp parse_list_env(var) when is_list(var), do: var
defp parse_list_env(var) do
var
|> String.split(",")
|> Enum.map(&String.trim/1)
end
end
|
lib/challenge_gov/security.ex
| 0.57332 | 0.434281 |
security.ex
|
starcoder
|
defmodule Vial do
@moduledoc """
`Vial` is a library for staged processing and event handling.
## Hello world
defmodule MyVial do
import Vial.Cauldron
def init(options)
# Initialize options
options
end
def call(cauldron, options) do
update_data(cauldron, fn data ->
data + 1
end)
end
end
The snippet above shows a very simple vial that adds 1 to the data passed to it.
Build and execute this in a pipeline with `Vial.run/4`.
```bash
$ iex -S mix
iex> "path/to/file.ex"
[MyVial]
iex> cauldron = Vial.run(1, [{MyVial, []}], [])
iex> cauldron.data
2
```
If this looks a lot like `Plug`, that's because this library aims to replicate `Plug`'s pipeline style of processing in a more general-purpose library.
Let's dive into Vial's concepts and see how they compare to `Plug`.
## The `Vial.Cauldron` struct
A vial encapsulates an operation performed on data, and can be made two ways.
Function vials accept data and options as arguments, and returns the encapsulated data.
def hello_world_vial(cauldron, _opts) do
update_data(cauldron, fn _data ->
"Hello world"
end)
end
Module vials have an `init/1` and `call/2` function, just like a module Plug.
defmodule MyVial do
def init([]), do: false
def call(cauldron, _opts), do: cauldron
end
Data is wrapped by a `Vial.Cauldron` struct.
It's where all your vials mix together.
Isn't that whimsical?
%Vial.Cauldron{
data: "Hello world",
...
}
Manipulate the cauldron with the `Vial.Cauldron` module.
In the above examples, the `update_data/2` function is defined in `Vial.Cauldron`.
Remember that Elixir data is immutable, so every manipulation returns a new copy of that cauldron.
## Event handing using Reagents
A more original feature in `Vial` is the ability to add event handlers to your cauldron, and trigger them from vials.
First, a vial needs to use `emit_event/2` and pass some data.
defmodule MyEventEmitterVial do
import Vial.Cauldron
def init(_), do: []
def call(cauldron, _opts) do
emit_event(cauldron, cauldron.data)
end
end
Then, a reagent will be given that data at the end of the pipeline.
defmodule MyReagent do
def init(_), do: []
def handle_event(event) do
IO.puts("Hello, " <> event <> "!")
end
end
Then use `Vial.run/3` to run the pipeline, providing reagents as the third argument.
cauldron = Vial.run("Rosa", [{MyEventEmitterVial, []}], [{MyReagent, []}])
Hello, Rosa!
Reagents run once the cauldron pipeline is halted, and each reagent gets every event, in the order they were emitted.
Use pattern matching to let an event reject data it doesn't care about.
"""
alias Vial.Cauldron
@callback init(any()) :: any()
@callback call(Cauldron.t(), any()) :: Vial.Cauldron.t()
def run(cauldron, vials, reagents, opts \\ [])
def run(%Cauldron{halted: true} = cauldron, _vials, reagents, _opts) do
do_events(cauldron, reagents)
end
def run(%Cauldron{} = cauldron, vials, reagents, _opts) do
do_run(cauldron, vials)
|> do_events(reagents)
end
defp do_run(%Cauldron{} = cauldron, [{vial_mod, vial_opts} | vials]) do
case vial_mod.call(cauldron, vial_mod.init(vial_opts)) do
%Cauldron{halted: true} = cauldron ->
cauldron
%Cauldron{} = cauldron ->
do_run(cauldron, vials)
other ->
raise "expected #{inspect(vial_mod)} to return Vial.Cauldron, got: #{inspect(other)}"
end
end
defp do_run(%Cauldron{} = cauldron, [fun | vials]) when is_function(fun, 1) do
case fun.(cauldron) do
%Cauldron{halted: true} = cauldron ->
cauldron
%Cauldron{} = cauldron ->
do_run(cauldron, vials)
other ->
raise "expected #{inspect(fun)} to return Vial.Cauldron, got: #{inspect(other)}"
end
end
defp do_run(%Cauldron{} = cauldron, []) do
cauldron
end
defp do_events(%Cauldron{} = cauldron, [reagent_fun | reagents]) when is_function(reagent_fun, 1) do
events = Enum.reverse(cauldron.events)
Enum.each(events, reagent_fun)
do_events(cauldron, reagents)
end
defp do_events(%Cauldron{} = cauldron, [{reagent_mod, reagent_opts} | reagents]) do
events = Enum.reverse(cauldron.events)
processed_reagent_opts = reagent_mod.init(reagent_opts)
Enum.each(events, &reagent_mod.handle(&1, processed_reagent_opts))
do_events(cauldron, reagents)
end
defp do_events(%Cauldron{} = cauldron, []) do
cauldron
end
end
|
lib/vial.ex
| 0.847574 | 0.927822 |
vial.ex
|
starcoder
|
defmodule Runlet.Cmd.TLS do
@moduledoc "Display information about a TLS server"
@doc """
Displays TLS protocol, cipher suite and certificate details
Args:
"1.1.1.1"
"1.1.1.1 1234"
"1.1.1.1:1234"
"""
@spec exec(binary) :: Enumerable.t()
def exec(address) do
{ip, port} =
case Regex.split(~r/[:\s]/, address, trim: true) do
[i, p] -> {i, p}
[i] -> {i, "443"}
end
exec(ip, String.to_integer(port))
end
@doc """
Displays TLS protocol, cipher suite and certificate details
Args:
"1.1.1.1" 443
"""
@spec exec(binary, 0..0xFFFF) :: Enumerable.t()
def exec(ip, port) do
pid = self()
fun = fn cert, event, state ->
Kernel.send(pid, {:runlet_cmd_tls, event, cert})
{:valid, state}
end
response =
case :ssl.connect(
String.to_charlist(ip),
port,
[
verify_fun: {fun, []},
ciphers: :ssl.cipher_suites(:all, :"tlsv1.2")
],
5000
) do
{:ok, s} ->
info = connection_information(s)
_ = :ssl.close(s)
cert = chain()
[info | cert]
error ->
[inspect(error)]
end
response
|> Enum.map(fn t ->
%Runlet.Event{
event: %Runlet.Event.Stdout{
host: "#{ip}:#{port}",
service: "tls",
description: t
},
query: "tls #{ip}:#{port}"
}
end)
end
def connection_information(s) do
t =
case :ssl.connection_information(s, [:protocol, :cipher_suite]) do
{:ok, info} -> info
error -> error
end
inspect(t)
end
def format(x509) do
"""
Data:
version: #{Kernel.get_in(x509, [:data, :version])}
serialNumber: #{x509 |> Kernel.get_in([:data, :serialNumber]) |> serial_number()}
Issuer:
#{Regex.replace(~r/[^ -~\\\n]/,
x509 |> Kernel.get_in([:issuer]) |> to_string,
"")}
Validity:
notBefore: #{Kernel.get_in(x509, [:validity, :notBefore])}
notAfter: #{Kernel.get_in(x509, [:validity, :notAfter])}
Subject:
#{Regex.replace(~r/[^ -~\\\n]/,
x509 |> Kernel.get_in([:subject]) |> to_string,
"")}
Signature Algorithm:
#{Kernel.get_in(x509, [:signatureAlgorithm])}
"""
end
defp chain(), do: chain([])
defp chain(state) do
receive do
{:runlet_cmd_tls, {:bad_cert, _}, x509} ->
cert = x509 |> :runlet_x509.info() |> format()
chain([cert | state])
{:runlet_cmd_tls, :valid, x509} ->
cert = x509 |> :runlet_x509.info() |> format()
chain([cert | state])
{:runlet_cmd_tls, :valid_peer, x509} ->
cert = x509 |> :runlet_x509.info() |> format()
chain([cert | state])
{:runlet_cmd_tls, {:extension, _}, _x509} ->
chain(state)
error ->
[inspect(error) | state]
after
0 -> state |> Enum.reverse()
end
end
defp serial_number(n) when n < 0xFF do
serial =
n
|> :erlang.integer_to_list(16)
|> to_string()
"#{n} (0x#{serial})"
end
defp serial_number(n) when is_integer(n) do
n
|> :erlang.integer_to_list(16)
|> to_string()
|> leftpad()
|> String.split("", trim: true)
|> Enum.chunk_every(2)
|> Enum.join(":")
end
defp leftpad(x) when is_binary(x),
do:
String.pad_leading(
x,
byte_size(x) + rem(byte_size(x), 2),
"0"
)
end
|
lib/runlet/cmd/tls.ex
| 0.717012 | 0.449091 |
tls.ex
|
starcoder
|
defmodule Kitt.Message.SSM do
@moduledoc """
Defines the structure and instantiation function
for creating a J2735-compliant SignalStatusMessage
An `SSM` defines the message sent by DSRC-capable infrastructure
to relay to vehicles the status of current signals and collection
of pending or active pre-emption or requests
"""
@typedoc "Defines the structure of a SignalStatusMessage and the data elements comprising its fields"
@type t :: %__MODULE__{
timeStamp: Kitt.Types.minute_of_year(),
second: non_neg_integer(),
sequenceNumber: non_neg_integer(),
status: [signal_status()],
regional: [Kitt.Types.regional_extension()]
}
@type signal_status :: %{
sequenceNumber: non_neg_integer(),
id: Kitt.Types.intersection_reference_id(),
sigStatus: [signal_status_package()],
regional: [Kitt.Types.regional_extension()]
}
@type signal_status_package :: %{
requester: signal_requester_info(),
inboundOn: Kitt.Types.intersection_access_point(),
outboundOn: Kitt.Types.intersection_access_point(),
minute: non_neg_integer(),
second: non_neg_integer(),
duration: non_neg_integer(),
status: prioritization_response_status(),
regional: [Kitt.Types.regional_extension()]
}
@type signal_requester_info :: %{
id: Kitt.Types.vehicle_id(),
request: non_neg_integer(),
sequenceNumber: non_neg_integer(),
role: Kitt.Types.basic_vehicle_role(),
typeData: Kitt.Types.requestor_type()
}
@type prioritization_response_status ::
:unknown
| :requested
| :processing
| :watchOtherTraffic
| :granted
| :rejected
| :maxPresence
| :reserviceLocked
@derive Jason.Encoder
@enforce_keys [:second, :status]
defstruct [:regional, :second, :sequenceNumber, :status, :timeStamp]
@doc """
Produces an `SSM` message struct from an equivalent map or keyword input
"""
@spec new(map() | keyword()) :: t()
def new(message), do: struct(__MODULE__, message)
@doc """
Returns the `SSM` identifying integer
"""
@spec type_id() :: non_neg_integer()
def type_id(), do: :DSRC.signalStatusMessage()
@doc """
Returns the `SSM` identifying atom recognized by the ASN1 spec
"""
@spec type() :: atom()
def type(), do: :SignalStatusMessage
end
|
lib/kitt/message/ssm.ex
| 0.789437 | 0.525856 |
ssm.ex
|
starcoder
|
defmodule AWS.DataSync do
@moduledoc """
AWS DataSync
AWS DataSync is a managed data transfer service that makes it simpler for you to
automate moving data between on-premises storage and Amazon Simple Storage
Service (Amazon S3) or Amazon Elastic File System (Amazon EFS).
This API interface reference for AWS DataSync contains documentation for a
programming interface that you can use to manage AWS DataSync.
"""
@doc """
Cancels execution of a task.
When you cancel a task execution, the transfer of some files is abruptly
interrupted. The contents of files that are transferred to the destination might
be incomplete or inconsistent with the source files. However, if you start a new
task execution on the same task and you allow the task execution to complete,
file content on the destination is complete and consistent. This applies to
other unexpected failures that interrupt a task execution. In all of these
cases, AWS DataSync successfully complete the transfer when you start the next
task execution.
"""
def cancel_task_execution(client, input, options \\ []) do
request(client, "CancelTaskExecution", input, options)
end
@doc """
Activates an AWS DataSync agent that you have deployed on your host.
The activation process associates your agent with your account. In the
activation process, you specify information such as the AWS Region that you want
to activate the agent in. You activate the agent in the AWS Region where your
target locations (in Amazon S3 or Amazon EFS) reside. Your tasks are created in
this AWS Region.
You can activate the agent in a VPC (virtual private cloud) or provide the agent
access to a VPC endpoint so you can run tasks without going over the public
internet.
You can use an agent for more than one location. If a task uses multiple agents,
all of them need to have status AVAILABLE for the task to run. If you use
multiple agents for a source location, the status of all the agents must be
AVAILABLE for the task to run.
Agents are automatically updated by AWS on a regular basis, using a mechanism
that ensures minimal interruption to your tasks.
"""
def create_agent(client, input, options \\ []) do
request(client, "CreateAgent", input, options)
end
@doc """
Creates an endpoint for an Amazon EFS file system.
"""
def create_location_efs(client, input, options \\ []) do
request(client, "CreateLocationEfs", input, options)
end
@doc """
Creates an endpoint for an Amazon FSx for Windows file system.
"""
def create_location_fsx_windows(client, input, options \\ []) do
request(client, "CreateLocationFsxWindows", input, options)
end
@doc """
Defines a file system on a Network File System (NFS) server that can be read
from or written to.
"""
def create_location_nfs(client, input, options \\ []) do
request(client, "CreateLocationNfs", input, options)
end
@doc """
Creates an endpoint for a self-managed object storage bucket.
For more information about self-managed object storage locations, see
`create-object-location`.
"""
def create_location_object_storage(client, input, options \\ []) do
request(client, "CreateLocationObjectStorage", input, options)
end
@doc """
Creates an endpoint for an Amazon S3 bucket.
For more information, see
https://docs.aws.amazon.com/datasync/latest/userguide/create-locations-cli.html#create-location-s3-cli
in the *AWS DataSync User Guide*.
"""
def create_location_s3(client, input, options \\ []) do
request(client, "CreateLocationS3", input, options)
end
@doc """
Defines a file system on a Server Message Block (SMB) server that can be read
from or written to.
"""
def create_location_smb(client, input, options \\ []) do
request(client, "CreateLocationSmb", input, options)
end
@doc """
Creates a task.
A task is a set of two locations (source and destination) and a set of Options
that you use to control the behavior of a task. If you don't specify Options
when you create a task, AWS DataSync populates them with service defaults.
When you create a task, it first enters the CREATING state. During CREATING AWS
DataSync attempts to mount the on-premises Network File System (NFS) location.
The task transitions to the AVAILABLE state without waiting for the AWS location
to become mounted. If required, AWS DataSync mounts the AWS location before each
task execution.
If an agent that is associated with a source (NFS) location goes offline, the
task transitions to the UNAVAILABLE status. If the status of the task remains in
the CREATING status for more than a few minutes, it means that your agent might
be having trouble mounting the source NFS file system. Check the task's
ErrorCode and ErrorDetail. Mount issues are often caused by either a
misconfigured firewall or a mistyped NFS server hostname.
"""
def create_task(client, input, options \\ []) do
request(client, "CreateTask", input, options)
end
@doc """
Deletes an agent.
To specify which agent to delete, use the Amazon Resource Name (ARN) of the
agent in your request. The operation disassociates the agent from your AWS
account. However, it doesn't delete the agent virtual machine (VM) from your
on-premises environment.
"""
def delete_agent(client, input, options \\ []) do
request(client, "DeleteAgent", input, options)
end
@doc """
Deletes the configuration of a location used by AWS DataSync.
"""
def delete_location(client, input, options \\ []) do
request(client, "DeleteLocation", input, options)
end
@doc """
Deletes a task.
"""
def delete_task(client, input, options \\ []) do
request(client, "DeleteTask", input, options)
end
@doc """
Returns metadata such as the name, the network interfaces, and the status (that
is, whether the agent is running or not) for an agent.
To specify which agent to describe, use the Amazon Resource Name (ARN) of the
agent in your request.
"""
def describe_agent(client, input, options \\ []) do
request(client, "DescribeAgent", input, options)
end
@doc """
Returns metadata, such as the path information about an Amazon EFS location.
"""
def describe_location_efs(client, input, options \\ []) do
request(client, "DescribeLocationEfs", input, options)
end
@doc """
Returns metadata, such as the path information about an Amazon FSx for Windows
location.
"""
def describe_location_fsx_windows(client, input, options \\ []) do
request(client, "DescribeLocationFsxWindows", input, options)
end
@doc """
Returns metadata, such as the path information, about an NFS location.
"""
def describe_location_nfs(client, input, options \\ []) do
request(client, "DescribeLocationNfs", input, options)
end
@doc """
Returns metadata about a self-managed object storage server location.
For more information about self-managed object storage locations, see
`create-object-location`.
"""
def describe_location_object_storage(client, input, options \\ []) do
request(client, "DescribeLocationObjectStorage", input, options)
end
@doc """
Returns metadata, such as bucket name, about an Amazon S3 bucket location.
"""
def describe_location_s3(client, input, options \\ []) do
request(client, "DescribeLocationS3", input, options)
end
@doc """
Returns metadata, such as the path and user information about an SMB location.
"""
def describe_location_smb(client, input, options \\ []) do
request(client, "DescribeLocationSmb", input, options)
end
@doc """
Returns metadata about a task.
"""
def describe_task(client, input, options \\ []) do
request(client, "DescribeTask", input, options)
end
@doc """
Returns detailed metadata about a task that is being executed.
"""
def describe_task_execution(client, input, options \\ []) do
request(client, "DescribeTaskExecution", input, options)
end
@doc """
Returns a list of agents owned by an AWS account in the AWS Region specified in
the request.
The returned list is ordered by agent Amazon Resource Name (ARN).
By default, this operation returns a maximum of 100 agents. This operation
supports pagination that enables you to optionally reduce the number of agents
returned in a response.
If you have more agents than are returned in a response (that is, the response
returns only a truncated list of your agents), the response contains a marker
that you can specify in your next request to fetch the next page of agents.
"""
def list_agents(client, input, options \\ []) do
request(client, "ListAgents", input, options)
end
@doc """
Returns a list of source and destination locations.
If you have more locations than are returned in a response (that is, the
response returns only a truncated list of your agents), the response contains a
token that you can specify in your next request to fetch the next page of
locations.
"""
def list_locations(client, input, options \\ []) do
request(client, "ListLocations", input, options)
end
@doc """
Returns all the tags associated with a specified resource.
"""
def list_tags_for_resource(client, input, options \\ []) do
request(client, "ListTagsForResource", input, options)
end
@doc """
Returns a list of executed tasks.
"""
def list_task_executions(client, input, options \\ []) do
request(client, "ListTaskExecutions", input, options)
end
@doc """
Returns a list of all the tasks.
"""
def list_tasks(client, input, options \\ []) do
request(client, "ListTasks", input, options)
end
@doc """
Starts a specific invocation of a task.
A `TaskExecution` value represents an individual run of a task. Each task can
have at most one `TaskExecution` at a time.
`TaskExecution` has the following transition phases: INITIALIZING | PREPARING |
TRANSFERRING | VERIFYING | SUCCESS/FAILURE.
For detailed information, see the Task Execution section in the Components and
Terminology topic in the *AWS DataSync User Guide*.
"""
def start_task_execution(client, input, options \\ []) do
request(client, "StartTaskExecution", input, options)
end
@doc """
Applies a key-value pair to an AWS resource.
"""
def tag_resource(client, input, options \\ []) do
request(client, "TagResource", input, options)
end
@doc """
Removes a tag from an AWS resource.
"""
def untag_resource(client, input, options \\ []) do
request(client, "UntagResource", input, options)
end
@doc """
Updates the name of an agent.
"""
def update_agent(client, input, options \\ []) do
request(client, "UpdateAgent", input, options)
end
@doc """
Updates the metadata associated with a task.
"""
def update_task(client, input, options \\ []) do
request(client, "UpdateTask", input, options)
end
@spec request(AWS.Client.t(), binary(), map(), list()) ::
{:ok, map() | nil, map()}
| {:error, term()}
defp request(client, action, input, options) do
client = %{client | service: "datasync"}
host = build_host("datasync", client)
url = build_url(host, client)
headers = [
{"Host", host},
{"Content-Type", "application/x-amz-json-1.1"},
{"X-Amz-Target", "FmrsService.#{action}"}
]
payload = encode!(client, input)
headers = AWS.Request.sign_v4(client, "POST", url, headers, payload)
post(client, url, payload, headers, options)
end
defp post(client, url, payload, headers, options) do
case AWS.Client.request(client, :post, url, payload, headers, options) do
{:ok, %{status_code: 200, body: body} = response} ->
body = if body != "", do: decode!(client, body)
{:ok, body, response}
{:ok, response} ->
{:error, {:unexpected_response, response}}
error = {:error, _reason} -> error
end
end
defp build_host(_endpoint_prefix, %{region: "local", endpoint: endpoint}) do
endpoint
end
defp build_host(_endpoint_prefix, %{region: "local"}) do
"localhost"
end
defp build_host(endpoint_prefix, %{region: region, endpoint: endpoint}) do
"#{endpoint_prefix}.#{region}.#{endpoint}"
end
defp build_url(host, %{:proto => proto, :port => port}) do
"#{proto}://#{host}:#{port}/"
end
defp encode!(client, payload) do
AWS.Client.encode!(client, payload, :json)
end
defp decode!(client, payload) do
AWS.Client.decode!(client, payload, :json)
end
end
|
lib/aws/generated/data_sync.ex
| 0.844633 | 0.715712 |
data_sync.ex
|
starcoder
|
defmodule Helloworld.Hello.Pet.Color do
@moduledoc false
use Protobuf, enum: true, syntax: :proto3
@type t :: integer | :BLACK | :WHITE | :BLUE | :RED | :YELLOW | :GREEN
field :BLACK, 0
field :WHITE, 1
field :BLUE, 2
field :RED, 3
field :YELLOW, 4
field :GREEN, 5
end
defmodule Helloworld.Hello.Pet do
@moduledoc false
use Protobuf, syntax: :proto3
@type t :: %__MODULE__{
name: String.t(),
color: Helloworld.Hello.Pet.Color.t()
}
defstruct [:name, :color]
field :name, 1, type: :string
field :color, 2, type: Helloworld.Hello.Pet.Color, enum: true
end
defmodule Helloworld.Hello do
@moduledoc false
use Protobuf, syntax: :proto3
@type t :: %__MODULE__{
choice: {atom, any},
name: String.t(),
d: float | :infinity | :negative_infinity | :nan,
f: float | :infinity | :negative_infinity | :nan,
b: boolean,
n: integer,
l: integer,
pets: [Helloworld.Hello.Pet.t()]
}
defstruct [:choice, :name, :d, :f, :b, :n, :l, :pets]
oneof :choice, 0
field :name, 1, type: :string
field :d, 2, type: :double
field :f, 3, type: :float
field :b, 4, type: :bool
field :n, 5, type: :int32
field :l, 6, type: :int64
field :c1, 7, type: :string, oneof: 0
field :c2, 8, type: :bool, oneof: 0
field :pets, 9, repeated: true, type: Helloworld.Hello.Pet
end
defmodule Helloworld.HelloRequest do
@moduledoc false
use Protobuf, syntax: :proto3
@type t :: %__MODULE__{
request: Helloworld.Hello.t() | nil
}
defstruct [:request]
field :request, 1, type: Helloworld.Hello
end
defmodule Helloworld.HelloReply do
@moduledoc false
use Protobuf, syntax: :proto3
@type t :: %__MODULE__{
response: Helloworld.Hello.t() | nil
}
defstruct [:response]
field :response, 1, type: Helloworld.Hello
end
defmodule Helloworld.Greeter.Service do
@moduledoc false
use GRPC.Service, name: "helloworld.Greeter"
rpc :SayHello, Helloworld.HelloRequest, Helloworld.HelloReply
end
defmodule Helloworld.Greeter.Stub do
@moduledoc false
use GRPC.Stub, service: Helloworld.Greeter.Service
end
|
elixir_grpc_bench/lib/helloworld.pb.ex
| 0.805441 | 0.474327 |
helloworld.pb.ex
|
starcoder
|
defmodule Timex.Helpers do
@moduledoc false
alias Timex.Time
use Timex.Constants
import Timex.Macros
def calendar_universal_time() do
{_, _, us} = ts = Time.now
{d,{h,min,sec}} = :calendar.now_to_universal_time(ts)
{d,{h,min,sec,round(us/1000)}}
end
def calendar_local_time() do
{_, _, us} = ts = Time.now
{d,{h,min,sec}} = :calendar.now_to_local_time(ts)
{d,{h,min,sec,round(us/1000)}}
end
def calendar_gregorian_microseconds_to_datetime(us, addseconds) do
sec = div(us, @million)
u = rem(us, @million)
{d,{h,m,s}} = :calendar.gregorian_seconds_to_datetime(sec + addseconds)
{d,{h,m,s,round(u/1000)}}
end
def iso_day_to_date_tuple(year, day) when is_year(year) and is_day_of_year(day) do
{year, day} = cond do
day < 1 && :calendar.is_leap_year(year - 1) -> {year - 1, day + 366}
day < 1 -> {year - 1, day + 365}
day > 366 && :calendar.is_leap_year(year) -> {year, day - 366}
day > 365 -> {year, day - 365}
true -> {year, day}
end
{_, month, first_of_month} = Enum.take_while(@ordinal_day_map, fn {_, _, oday} -> oday <= day end) |> List.last
{year, month, day - first_of_month}
end
def days_in_month(year, month) when is_year(year) and is_month(month) do
:calendar.last_day_of_the_month(year, month)
end
def days_in_month(year, month) do
valid_year? = year > 0
valid_month? = month in @valid_months
cond do
!valid_year? && valid_month? ->
{:error, :invalid_year}
valid_year? && !valid_month? ->
{:error, :invalid_month}
true ->
{:error, :invalid_year_and_month}
end
end
@doc """
Given a {year, month, day} tuple, normalizes it so
that the day does not exceed the maximum valid days in that month
"""
def normalize_date_tuple({year, month, day}) do
# Check if we got past the last day of the month
max_day = days_in_month(year, month)
{year, month, min(day, max_day)}
end
def round_month(m) do
case mod(m, 12) do
0 -> 12
other -> other
end
end
defp mod(a, b), do: rem(rem(a, b) + b, b)
end
|
lib/timex/helpers.ex
| 0.748076 | 0.482368 |
helpers.ex
|
starcoder
|
defmodule Umbra.Operations do
@moduledoc """
This modules is the facade for the `Umbra.CodeGenerator` module.
It defines all macros `definit/2`, `defcall/3`, `defcast/3`, `definfo/3` and `defcontinue/3`.
"""
alias Umbra.FunctionGenerator
@doc """
Generate the `GenServer` `c:GenServer.init/1` callback.
It is server-side only, so no client method will be defined.
## Options
- when: `a statement`
- state: `a statement`, default to `_state`
## Example
Defining:
definit state: state, do: {:ok, state}
Will generate:
def init(state: state) do
{:ok, state}
end
"""
@spec definit(list(), list()) :: tuple()
defmacro definit(opts \\ [], body \\ []),
do: generate(:init, nil, opts ++ body)
@doc """
Generate the `GenServer` `c:GenServer.handle_call/3` callback and
a client method to call the function through `GenServer.call/2`.
By default generates the client and server function in public.
## Options
- private: `boolean()`, default to `false`
- when: `a statement`
- server: `boolean()`, default to `true`
- client: `boolean()`, default to `true`
- state: `a statement`, default to `_state`
- from: `a statement`, default to `_from`
## Example
Defining:
defcall {:compute, a, b}, do: {:reply, a + b, nil}
Will generate:
def get_state(pid_or_state, a, b) do
{:ok, GenServer.call(pid_or_state, {:compute, a, b})
end
def handle_call({:compute, a, b}, _from, _state) do
{:reply, a + b, nil}
end
"""
@spec defcall(atom() | tuple(), list(), list()) :: tuple()
defmacro defcall(definition, opts \\ [], body \\ []),
do: generate(:call, definition, opts ++ body)
@doc """
Generate the `GenServer` `c:GenServer.handle_cast/2` callback and
a client method to call the function through `GenServer.cast/2`.
By default generates the client and server function in public.
## Options
- private: `boolean()`, default to `false`
- when: `a statement`
- server: `boolean()`, default to `true`
- client: `boolean()`, default to `true`
- state: `a statement`, default to `_state`
## Example
Defining:
defcast {:set_state, %{id: id, name: name} = new_state}, do: {:noreply, new_state}
Will generate:
def set_state(pid, %{id: _id, name: _name} = new_state) do
GenServer.cast(pid, {:set_state, new_state})
end
def handle_cast({:set_state, %{id: id, name: name} = new_state}, _state) do
{:noreply, new_state}
end
"""
@spec defcast(atom() | tuple(), list(), list()) :: tuple()
defmacro defcast(definition, opts \\ [], body \\ []),
do: generate(:cast, definition, opts ++ body)
@doc """
Generate the `GenServer` `c:GenServer.handle_info/2` callback and
a client method to call the function through `Process.send/3`.
By default only generate the server-side function.
The client-side function can be useful sometimes.
## Options
- private: `boolean()`, default to `false`
- when: `a statement`
- server: `boolean()`, default to `true`
- client: `boolean()`, default to `false`
- state: `a statement`, default to `_state`
## Example
Defining:
definfo {:ping}, client: true, state: state do
IO.puts(:pong)
{:noreply, state}
end
Will generate:
def ping(pid) do
Process.send(pid, {:ping})
end
def handle_info({:ping}, state) do
IO.puts(:pong)
{:noreply, state}
end
"""
@spec definfo(atom() | tuple(), list(), list()) :: tuple()
defmacro definfo(definition, opts \\ [], body \\ []),
do: generate(:info, definition, opts ++ body)
@doc """
Generate the `GenServer` `c:GenServer.handle_continue/2` callback.
It is server-side only, so no client method will be defined.
## Options
- when: `a statement`
- state: `a statement`, default to `_state`
## Example
Defining:
defcontinue {:send_to_process, pid, result}, state: state do
Process.send(pid, result)
{:noreply, state}
end
Will generate:
def handle_continue({:send_to_process, pid, result}, state) do
Process.send(pid, result)
{:noreply, state}
end
"""
@spec defcontinue(atom() | tuple(), list(), list()) :: tuple()
defmacro defcontinue(definition, opts \\ [], body \\ []),
do: generate(:continue, definition, opts ++ body)
defp generate(type, definition, opts) do
opts = options(type, opts)
if Keyword.get(opts, :server) and Keyword.get(opts, :do) == nil,
do: raise(ArgumentError, message: "a body should be given when defining a server function")
functions =
[
if(Keyword.get(opts, :client),
do: FunctionGenerator.generate_client_function(type, definition, opts)
),
if(Keyword.get(opts, :server),
do: FunctionGenerator.generate_server_function(type, definition, opts)
)
]
|> Enum.filter(&(!is_nil(&1)))
if Enum.empty?(functions) do
raise(ArgumentError,
message: "at least one function should be defined, server or client side."
)
end
functions
|> FunctionGenerator.add_to_module()
end
defp options(:init, opts) do
KeywordValidator.validate!(
opts,
%{
server: [
type: :boolean,
default: true,
inclusion: [true]
],
when: [],
state: [],
do: []
}
)
end
defp options(:call, opts) do
KeywordValidator.validate!(
opts,
%{
private: [
type: :boolean,
default: false
],
server: [
type: :boolean,
default: true
],
client: [
type: :boolean,
default: true
],
when: [],
state: [],
from: [],
do: []
}
)
end
defp options(:cast, opts) do
KeywordValidator.validate!(
opts,
%{
private: [
type: :boolean,
default: false
],
server: [
type: :boolean,
default: true
],
client: [
type: :boolean,
default: true
],
when: [],
state: [],
do: []
}
)
end
defp options(:info, opts) do
KeywordValidator.validate!(
opts,
%{
private: [
type: :boolean,
default: false
],
server: [
type: :boolean,
default: true
],
client: [
type: :boolean,
default: false
],
when: [],
state: [],
do: []
}
)
end
defp options(:continue, opts) do
KeywordValidator.validate!(
opts,
%{
server: [
type: :boolean,
default: true,
inclusion: [true]
],
when: [],
state: [],
do: []
}
)
end
end
|
lib/umbra/operations.ex
| 0.896016 | 0.590897 |
operations.ex
|
starcoder
|
defmodule SSHSubsystemFwup do
@moduledoc """
SSH subsystem for upgrading Nerves devices
This module provides an SSH subsystem for Erlang's `ssh` application. This
makes it possible to send firmware updates to Nerves devices using plain old
`ssh` like this:
```shell
cat $firmware | ssh -s $ip_address fwup
```
Where `$ip_address` is the IP address of your Nerves device. Depending on how
you have Erlang's `ssh` application set up, you may need to pass more
parameters (like username, port, identities, etc.).
See [`nerves_ssh`](https://github.com/nerves-project/nerves_ssh/) for an easy
way to set this up. If you don't want to use `nerves_ssh`, then in your call
to `:ssh.daemon` add the return value from
`SSHSubsystemFwup.subsystem_spec/1`:
```elixir
devpath = Nerves.Runtime.KV.get("nerves_fw_devpath")
:ssh.daemon([
{:subsystems, [SSHSubsystemFwup.subsystem_spec(devpath: devpath)]}
])
```
See `SSHSubsystemFwup.subsystem_spec/1` for options. You will almost always
need to pass the path to the device that should be updated since that is
device-specific.
"""
@typedoc """
Options:
* `:devpath` - path for fwup to upgrade (Required)
* `:fwup_path` - path to the fwup firmware update utility
* `:fwup_env` - a list of name,value tuples to be passed to the OS environment for fwup
* `:fwup_extra_options` - additional options to pass to fwup like for setting
public keys
* `:precheck_callback` - an MFA to call when there's a connection. If specified,
the callback will be passed the username and the current set of options. If allowed,
it should return `{:ok, new_options}`. Any other return value closes the connection.
* `:success_callback` - an MFA to call when a firmware update completes
successfully. Defaults to `{Nerves.Runtime, :reboot, []}`.
* `:task` - the task to run in the firmware update. Defaults to `"upgrade"`
"""
@behaviour :ssh_client_channel
@type options :: [
devpath: Path.t(),
fwup_path: Path.t(),
fwup_env: [{String.t(), String.t()}],
fwup_extra_options: [String.t()],
precheck_callback: mfa() | nil,
task: String.t(),
success_callback: mfa()
]
require Logger
alias SSHSubsystemFwup.FwupPort
@doc """
Helper for creating the SSH subsystem spec
"""
@spec subsystem_spec(options()) :: :ssh.subsystem_spec()
def subsystem_spec(options \\ []) do
{'fwup', {__MODULE__, options}}
end
@impl :ssh_client_channel
def init(options) do
# Combine the default options, any application environment options and finally subsystem options
combined_options =
default_options()
|> Keyword.merge(Application.get_all_env(:ssh_subsystem_fwup))
|> Keyword.merge(options)
{:ok, %{state: :running_fwup, id: nil, cm: nil, fwup: nil, options: combined_options}}
end
defp default_options() do
[
devpath: "",
fwup_path: System.find_executable("fwup"),
fwup_env: [],
fwup_extra_options: [],
precheck_callback: nil,
task: "upgrade",
success_callback: {Nerves.Runtime, :reboot, []}
]
end
@impl :ssh_client_channel
def handle_msg({:ssh_channel_up, channel_id, cm}, state) do
with {:ok, options} <- precheck(state.options[:precheck_callback], state.options),
:ok <- check_devpath(options[:devpath]) do
Logger.debug("ssh_subsystem_fwup: starting fwup")
fwup = FwupPort.open_port(options)
{:ok, %{state | id: channel_id, cm: cm, fwup: fwup}}
else
{:error, reason} ->
_ = :ssh_connection.send(cm, channel_id, "Error: #{reason}")
:ssh_connection.exit_status(cm, channel_id, 1)
:ssh_connection.close(cm, channel_id)
{:stop, :normal, state}
end
end
def handle_msg({port, message}, %{fwup: port} = state) do
case FwupPort.handle_port(port, message) do
{:respond, response} ->
_ = :ssh_connection.send(state.cm, state.id, response)
{:ok, state}
{:done, response, status} ->
_ = if response != "", do: :ssh_connection.send(state.cm, state.id, response)
_ = :ssh_connection.send_eof(state.cm, state.id)
_ = :ssh_connection.exit_status(state.cm, state.id, status)
:ssh_connection.close(state.cm, state.id)
Logger.debug("ssh_subsystem_fwup: fwup exited with status #{status}")
run_callback(status, state.options[:success_callback])
{:stop, :normal, state}
end
end
def handle_msg({:EXIT, port, _reason}, %{fwup: port} = state) do
_ = :ssh_connection.send_eof(state.cm, state.id)
_ = :ssh_connection.exit_status(state.cm, state.id, 1)
:ssh_connection.close(state.cm, state.id)
{:stop, :normal, state}
end
def handle_msg(message, state) do
Logger.debug("Ignoring message #{inspect(message)}")
{:ok, state}
end
@impl :ssh_client_channel
def handle_ssh_msg({:ssh_cm, _cm, {:data, _channel_id, 0, data}}, state) do
FwupPort.send_data(state.fwup, data)
{:ok, state}
end
def handle_ssh_msg({:ssh_cm, _cm, {:data, _channel_id, 1, _data}}, state) do
# Ignore stderr
{:ok, state}
end
def handle_ssh_msg({:ssh_cm, _cm, {:eof, _channel_id}}, state) do
{:ok, state}
end
def handle_ssh_msg({:ssh_cm, _cm, {:signal, _, _}}, state) do
# Ignore signals
{:ok, state}
end
def handle_ssh_msg({:ssh_cm, _cm, {:exit_signal, _channel_id, _, _error, _}}, state) do
{:stop, :normal, state}
end
def handle_ssh_msg({:ssh_cm, _cm, {:exit_status, _channel_id, _status}}, state) do
{:stop, :normal, state}
end
def handle_ssh_msg({:ssh_cm, _cm, message}, state) do
Logger.debug("Ignoring handle_ssh_msg #{inspect(message)}")
{:ok, state}
end
@impl :ssh_client_channel
def handle_call(_request, _from, state) do
{:reply, :error, state}
end
@impl :ssh_client_channel
def handle_cast(_message, state) do
{:noreply, state}
end
defp run_callback(0 = _rc, {m, f, a}) do
# Let others know that fwup was successful. The usual operation
# here is to reboot. Run the callback in its own process so that
# any issues with it don't affect processing here.
_ = spawn(m, f, a)
:ok
end
defp run_callback(_rc, _mfa), do: :ok
@impl :ssh_client_channel
def terminate(_reason, _state) do
:ok
end
@impl :ssh_client_channel
def code_change(_old, state, _extra) do
{:ok, state}
end
defp check_devpath(devpath) do
if is_binary(devpath) and File.exists?(devpath) do
:ok
else
{:error, "Invalid device path: #{inspect(devpath)}"}
end
end
defp precheck(nil, options), do: {:ok, options}
defp precheck({m, f, a}, options) do
case apply(m, f, a) do
{:ok, new_options} -> {:ok, Keyword.merge(options, new_options)}
{:error, reason} -> {:error, reason}
e -> {:error, "precheck failed for unknown reason - #{inspect(e)}"}
end
end
end
|
lib/ssh_subsystem_fwup.ex
| 0.821939 | 0.61973 |
ssh_subsystem_fwup.ex
|
starcoder
|
defmodule Ueberauth.Strategy.CAS.User do
@moduledoc """
Representation of a CAS user with their roles.
A [CAS serviceResponse][response] is either an error message or a success
message. In the success case, the response often contains various attributes
containing information about the user.
For example, a successful request might look like this:
```xml
<cas:serviceResponse xmlns:cas="http://www.yale.edu/tp/cas">
<cas:authenticationSuccess>
<cas:user>example</cas:user>
<cas:attributes>
<cas:authenticationDate>2016-06-29T21:53:41Z</cas:authenticationDate>
<cas:longTermAuthenticationRequestTokenUsed>false</cas:longTermAuthenticationRequestTokenUsed>
<cas:isFromNewLogin>true</cas:isFromNewLogin>
<cas:email><EMAIL></cas:email>
</cas:attributes>
</cas:authenticationSuccess>
</cas:serviceResponse>
```
Note that strictly speaking version 2.0 of CAS does not support attributes.
The strategy however does not make this distinction: if attributes exist, the strategy will
use them.
## User struct
Accessing the attributes is possible by accessing the "attributes"
on the raw information. For example:
```elixir
def extract_attributes(%Ueberauth.Auth{} = auth) do
attributes = auth.extra.raw_info.user.attributes
# Do something with the attributes
end
```
[response]: https://apereo.github.io/cas/6.2.x/protocol/CAS-Protocol-V2-Specification.html#appendix-a-cas-response-xml-schema
[old]: https://apereo.github.io/cas/6.2.x/protocol/CAS-Protocol-V2-Specification.html#appendix-a-cas-response-xml-schema
"""
@doc """
Struct containing information about the user.
There are two relevant fields:
- `:name` - The name returned by the serviceResponse
- `:attributes` - Other attributes returned by the serviceResponse
"""
defstruct name: nil, attributes: %{}
alias Ueberauth.Strategy.CAS.User
import SweetXml
def from_xml(body) do
attributes = get_attributes(body)
name = get_user(body)
%User{}
|> Map.put(:name, name)
|> Map.put(:attributes, attributes)
end
defp get_user(body) do
xpath(body, ~x"//cas:user/text()") |> to_string()
end
defp get_attributes(body) do
body
|> xpath(~x"//cas:attributes/*"l)
|> Enum.reduce(%{}, fn node, attributes ->
name = get_attribute_name(node)
value = get_attribute_value(node)
# If the attribute exists already, convert to list.
if Map.has_key?(attributes, name) do
Map.update!(attributes, name, fn existing ->
if is_list(existing) do
existing ++ [value]
else
[existing, value]
end
end)
else
Map.put(attributes, name, value)
end
end)
end
defp get_attribute_value(node) do
node
|> xpath(~x"./text()"s)
|> cast_value
end
defp get_attribute_name(node) do
node
|> xpath(~x"local-name(.)"s)
|> Macro.underscore()
end
defp cast_value(value) do
cond do
value == "true" -> true
value == "false" -> false
true -> value
end
end
end
|
lib/ueberauth/strategy/cas/user.ex
| 0.804175 | 0.709409 |
user.ex
|
starcoder
|
defmodule Listerine.Commands do
use Coxir.Commander
@prefix "$"
@man_pages [
study: [
description: "Permite a um aluno juntar-se às salas das cadeiras.",
synopse: "`#{@prefix}study [CADEIRA|ANO, ...]`",
options: :nil,
example: """
`#{@prefix}study Algebra PI`
Adiciona-te às salas de Algebra e PI.
`#{@prefix}study 1ano`
Adiciona-te a todas as cadeiras do primeiro ano.
""",
return_value: "A lista de cadeiras válidas a que foste adicionado."
],
unstudy: [
description: "Permite a um aluno sair das salas das cadeiras.",
synopse: "`#{@prefix}unstudy [CADEIRA|ANO, ...]`",
options: :nil,
example: """
`#{@prefix}unstudy Algebra PI`
Remove-te das salas de Algebra e PI.
`#{@prefix}unstudy 1ano`
Remove-te de todas as cadeiras do primeiro ano.
""",
return_value: "A lista de cadeiras válidas de que foste removido."
],
courses: [
description: "Permite interagir com as salas das cadeiras.",
synopse: """
```
#{@prefix}courses list
mk ano [CADEIRA, ...] (admin only)
rm [CADEIRA, ...] (admin only)
```
""",
options:
"""
__mk__
-> Cria salas das cadeiras especificadas, associadas ao ano especificado.
__rm__
-> Remove salas das cadeiras especificadas.
__list__
-> Lista as cadeiras disponíveis.
""",
example: :nil,
return_value: :nil
],
material: [
description: "Apresenta o link para o material de apoio do curso.",
synopse: "`#{@prefix}material`",
options: :nil,
example: :nil,
return_value: "O link para o material de apoio do curso."
]
# datatestes: "Apresenta o link para o calendario de testes.",
]
command study(roles) do
if Listerine.Helpers.bot_commands?(message) do
role_list = Listerine.Helpers.upcase_words(roles) |> Listerine.Helpers.roles_per_year()
case Listerine.Channels.add_roles(message, role_list) do
[] -> Message.reply(message, "Não foste adicionado a nenhuma sala.")
cl -> Message.reply(message, "Studying: #{Listerine.Helpers.unwords(cl)}")
end
else
Channel.send_message(
Channel.get(Listerine.Helpers.get_bot_commands_id()),
Listerine.Helpers.make_mention(message.author) <>
" Esse commando tem de ser utilizado nesta sala!"
)
end
end
command unstudy(roles) do
if Listerine.Helpers.bot_commands?(message) do
role_list = Listerine.Helpers.upcase_words(roles) |> Listerine.Helpers.roles_per_year()
case Listerine.Channels.rm_role(message, role_list) do
[] -> Message.reply(message, "Não foste removido de nenhuma sala.")
cl -> Message.reply(message, "Stopped studying #{Listerine.Helpers.unwords(cl)}")
end
else
Channel.send_message(
Channel.get(Listerine.Helpers.get_bot_commands_id()),
Listerine.Helpers.make_mention(message.author) <>
" Esse commando tem de ser utilizado nesta sala!"
)
end
end
@permit :MANAGE_CHANNELS
@space :courses
command mk(text) do
[y | cl] = Listerine.Helpers.upcase_words(text)
cond do
String.match?(y, Regex.compile!("^[0-9]*$")) ->
case Listerine.Channels.add_courses(message.guild, y, cl) do
[] -> Message.reply(message, "Didn't add any channels")
cl -> Message.reply(message, "Added: #{Listerine.Helpers.unwords(cl)}")
end
true ->
Message.reply(message, "Usage: `mkcourses year [course, ...]`")
end
end
@permit :MANAGE_CHANNELS
@space :courses
command rm(text) do
args = Listerine.Helpers.upcase_words(text)
case Listerine.Channels.remove_courses(args) do
[] -> Message.reply(message, "Didn't remove any channels")
cl -> Message.reply(message, "Removed: #{Listerine.Helpers.unwords(cl)}")
end
end
@permit :ADMINISTRATOR
command setbotcommands() do
Listerine.Helpers.set_bot_commands(message.channel)
Message.reply(message, "Channel set")
end
@space :courses
command list() do
if Listerine.Helpers.bot_commands?(message) do
embed = %{
title: "Informação sobre as cadeiras disponíveis",
color: 0x000000,
description: """
`$study CADEIRA` junta-te às salas das cadeiras
`$study 1ano` junta-te a todas as cadeiras de um ano
""",
fields: Listerine.Channels.generate_courses_embed_fields()
}
Message.reply(message, embed: embed)
else
Channel.send_message(
Channel.get(Listerine.Helpers.get_bot_commands_id()),
Listerine.Helpers.make_mention(message.author) <>
" Esse commando tem de ser utilizado nesta sala!"
)
end
end
command material() do
text =
"**Este é o link para o material do curso** -> http://bit.ly/materialmiei"
Message.reply(message, text)
end
# command datatestes() do
# text =
# "**As datas do teste encontram-se neste calendário** -> http://bit.ly/calendariomiei"
# Message.reply(message, text)
# end
command man(arg) do
if Listerine.Helpers.bot_commands?(message) do
arg = String.downcase(arg)
msg = cond do
arg === "man" ->
%{
title: "Comandos:",
color: 0x000000,
description:
@man_pages
|> Enum.map(fn {name, cmd} -> "**#{name}** -> #{
Enum.find(cmd, fn {a,_} -> a == :description end) |> elem(1)
}\n" end)
|> Enum.reduce("", fn x, acc -> acc <> x end),
footer: %{ text: "$man [comando] para saberes mais sobre algum comando" }
}
Enum.any?(@man_pages, fn {name, _} -> Atom.to_string(name) == arg end) ->
%{
title: arg,
color: 0x000000,
fields: Enum.find(@man_pages, nil, fn {name, _} -> Atom.to_string(name) == arg end)
|> elem(1)
|> Enum.filter(fn {_, text} -> text != :nil end)
|> Enum.map(fn {section, text} -> %{
name: section |> Atom.to_string() |> String.upcase(),
value: text,
inline: false
} end)
}
true ->
%{
title: "No manual entry for #{arg}",
color: 0xFF0000,
description: "Usa `$man man` para ver a lista de comandos."
}
end
Message.reply(message, embed: msg)
else
Channel.send_message(
Channel.get(Listerine.Helpers.get_bot_commands_id()),
Listerine.Helpers.make_mention(message.author) <>
" Esse commando tem de ser utilizado nesta sala!"
)
end
end
end
|
lib/Listerine/commands.ex
| 0.621656 | 0.625309 |
commands.ex
|
starcoder
|
defmodule RateTheDub.Locale do
@moduledoc """
Utility functions for dealing with i18n and l10n related things, used
alongside Gettext on the frontend.
This may eventually be replaced by
[Elixir CLDR](https://github.com/elixir-cldr/cldr)
"""
# Covers the English => language code for the most common languages on MAL
@en_langs %{
"japanese" => "ja",
"english" => "en",
"french" => "fr",
"spanish" => "es",
"korean" => "ko",
"german" => "de",
"italian" => "it",
"hungarian" => "hu",
"chinese" => "zh",
"portuguese" => "pt",
"brazilian" => "pt_BR",
"hebrew" => "he",
"arabic" => "ar"
}
@doc """
Returns the locale code for an Engluish language name, or returns the language
unchanged if it wasn't known.
## Examples
iex> en_name_to_code("english")
"en"
iex> en_name_to_code("klingon")
"klingon"
"""
@spec en_name_to_code(name :: String.t()) :: String.t()
def en_name_to_code(name) when is_binary(name),
do: Map.get(@en_langs, String.downcase(name)) || name
@doc """
The exact opposite of `en_name_to_code/1`, returns the lowercase English name
of the language based on the locale code, or the unchanged code if unknown.
## Examples
iex> code_to_en_name("en")
"english"
iex? code_to_en_name("fake")
"fake"
"""
@spec code_to_en_name(code :: String.t()) :: String.t()
def code_to_en_name(code) when is_binary(code) do
@en_langs
|> Map.new(fn {k, v} -> {v, k} end)
|> Map.get(code, code)
|> String.capitalize()
end
@doc """
Returns the lowercase (if applicable) name of the language in the given
locale's language. Falls back to the English name if it's unknown. This is the
multi-lingual version of `code_to_en_name/1`.
## Examples
iex> code_to_locale_name("es", "en")
"inglés"
iex> code_to_locale_name("fake", "fr")
"french"
"""
@spec code_to_locale_name(locale :: String.t(), code :: String.t()) :: String.t()
def code_to_locale_name(locale, code) when is_binary(locale) and is_binary(code) do
case locale do
# TODO translations to the relevant languages
"en" -> code_to_en_name(code)
_ -> code_to_en_name(code)
end
end
@doc """
Returns the locale's name in its own language.
"""
@spec locale_own_name(locale :: String.t()) :: String.t()
def locale_own_name(locale) when is_binary(locale),
do: code_to_locale_name(locale, locale)
end
|
lib/ratethedub/locale.ex
| 0.68721 | 0.459197 |
locale.ex
|
starcoder
|
defmodule PhoenixBoilerplate.BoilerplateDataManagement do
@moduledoc """
The BoilerplateDataManagement context.
"""
import Ecto.Query, warn: false
alias PhoenixBoilerplate.Repo
alias PhoenixBoilerplate.BoilerplateDataManagement.BoilerplateData
@doc """
Returns the list of boiler_plate_datas.
## Examples
iex> list_boiler_plate_datas()
[%BoilerplateData{}, ...]
"""
def list_boiler_plate_datas do
Repo.all(BoilerplateData)
end
@doc """
Gets a single boilerplate_data.
Raises `Ecto.NoResultsError` if the Boilerplate data does not exist.
## Examples
iex> get_boilerplate_data!(123)
%BoilerplateData{}
iex> get_boilerplate_data!(456)
** (Ecto.NoResultsError)
"""
def get_boilerplate_data!(id), do: Repo.get!(BoilerplateData, id)
@doc """
Creates a boilerplate_data.
## Examples
iex> create_boilerplate_data(%{field: value})
{:ok, %BoilerplateData{}}
iex> create_boilerplate_data(%{field: bad_value})
{:error, %Ecto.Changeset{}}
"""
def create_boilerplate_data(attrs \\ %{}) do
%BoilerplateData{}
|> BoilerplateData.changeset(attrs)
|> Repo.insert()
end
@doc """
Updates a boilerplate_data.
## Examples
iex> update_boilerplate_data(boilerplate_data, %{field: new_value})
{:ok, %BoilerplateData{}}
iex> update_boilerplate_data(boilerplate_data, %{field: bad_value})
{:error, %Ecto.Changeset{}}
"""
def update_boilerplate_data(%BoilerplateData{} = boilerplate_data, attrs) do
boilerplate_data
|> BoilerplateData.changeset(attrs)
|> Repo.update()
end
@doc """
Deletes a boilerplate_data.
## Examples
iex> delete_boilerplate_data(boilerplate_data)
{:ok, %BoilerplateData{}}
iex> delete_boilerplate_data(boilerplate_data)
{:error, %Ecto.Changeset{}}
"""
def delete_boilerplate_data(%BoilerplateData{} = boilerplate_data) do
Repo.delete(boilerplate_data)
end
@doc """
Returns an `%Ecto.Changeset{}` for tracking boilerplate_data changes.
## Examples
iex> change_boilerplate_data(boilerplate_data)
%Ecto.Changeset{data: %BoilerplateData{}}
"""
def change_boilerplate_data(%BoilerplateData{} = boilerplate_data, attrs \\ %{}) do
BoilerplateData.changeset(boilerplate_data, attrs)
end
end
|
lib/phoenix_boilerplate/boilerplate_data_management.ex
| 0.885347 | 0.473901 |
boilerplate_data_management.ex
|
starcoder
|
defmodule ExACN.PDU do
defstruct vector: <<>>, header: <<>>, data: <<>>
@moduledoc """
Packet Data Unit encoding
Common functions for processing the PDU format used across the ACN
specification
"""
alias ExACN.PDU.Flags
@type t :: %ExACN.PDU{vector: binary(), header: binary(), data: binary()}
@spec build_body(t, t) :: binary()
defp build_body(pdu, nil) do
pdu.vector <> pdu.header <> pdu.data
end
defp build_body(pdu, previous) do
[:vector, :header, :data]
|> Enum.map(fn field -> {Map.get(pdu, field), Map.get(previous, field)} end)
|> Enum.filter(fn {current, previous} -> current != previous end)
|> Enum.map(fn {current, _} -> current end)
|> Enum.join
end
@doc ~S"""
Encode a single PDU into binary.
The flags for vector, header and data will be set based on the previous packet
"""
@spec pack_single(t, t | nil) :: binary()
def pack_single(pdu, previous \\ nil) do
body = build_body(pdu, previous)
body_length = byte_size(body)
vector_flag = previous != nil && pdu.vector == previous.vector
header_flag = previous != nil && pdu.header == previous.header
data_flag = previous != nil && pdu.data == previous.data
length_flag = body_length > round(:math.pow(2, 12)) - 3 # less one for binary encoding and two for the preamble
flags = %Flags{length: length_flag, vector: vector_flag, header: header_flag, data: data_flag}
encoded_length_bits = Flags.length_bits(flags)
encoded_length = body_length + Flags.preamble_bytes(flags)
<< Flags.encode_flags(flags)::bits, encoded_length::size(encoded_length_bits), body::bytes>>
end
@doc ~S"""
Pack a series of PDUs into a binary
"""
@spec pack([t], t) :: binary()
def pack(pdu_sequence, prev_pdu \\ nil) do
{data, _} = Enum.reduce(pdu_sequence, {<<>>, prev_pdu}, fn(pdu, {data, prev}) -> {data <> pack_single(pdu, prev), pdu} end)
data
end
@spec unpack_body(binary(), Flags.t, t, integer(), integer() | (binary() -> integer())) :: t
defp unpack_body(data, flags, nil, vec_length, header_length) do
unpack_body(data, flags, %ExACN.PDU{}, vec_length, header_length)
end
defp unpack_body(data, flags = %Flags{vector: false}, prev, vec_length, header_length) do
<< vector::binary-size(vec_length), tail::binary >> = data
unpack_body(tail, %{flags | vector: true}, %{prev | vector: vector}, vec_length, header_length)
end
defp unpack_body(data, flags = %Flags{header: false}, prev, _, header_length) do
header_length_actual = header_length.(data)
<< header::binary-size(header_length_actual), tail::binary >> = data
unpack_body(tail, %{flags | header: true}, %{prev | header: header}, nil, nil)
end
defp unpack_body(data, flags = %Flags{data: false}, prev, _, _) do
unpack_body(<<>>, %{flags | data: true}, %{prev | data: data}, nil, nil)
end
defp unpack_body(_, _, prev, _, _) do
prev
end
@doc ~S"""
Decode a single PDU from the start of the start a binary
The previous PDU is required to get the correct value if the vector, header or data flags
are set. The vector length is a fixed integer
"""
@spec unpack_single(binary(), t | nil, integer(), integer() | (binary() -> integer())) :: {:ok, t, binary()}
def unpack_single(encoded, previous, vec_length, header_length) when is_integer(header_length) do
unpack_single(encoded, previous, vec_length, fn _ -> header_length end)
end
def unpack_single(encoded, previous, vec_length, header_length) do
# Extract flags
flags = Flags.decode_flags(encoded)
# Calculate the length
length_bits_encoded = Flags.length_bits(flags)
<< _::bits-size(4), length::size(length_bits_encoded), _::binary >> = encoded
preamble_bytes_encoded = Flags.preamble_bytes(flags)
body_bytes = length - preamble_bytes_encoded
# Extract the body
<< _::bytes-size(preamble_bytes_encoded), body::binary-size(body_bytes), tail::binary >> = encoded
# Unpack the body
pdu = unpack_body(body, flags, previous, vec_length, header_length)
{:ok, pdu, tail}
end
@doc """
Unpack a binary into a set of PDUs
"""
@spec unpack(binary(), integer(), (binary() -> integer())) :: [t]
def unpack(<<>>, _, _) do
[]
end
def unpack(encoded, vec_length, header_length) do
unpack(encoded, vec_length, header_length, [], nil)
end
defp unpack(encoded, vec_length, header_length, acc, prev) do
{:ok, pdu, tail} = unpack_single(encoded, prev, vec_length, header_length)
seq = [pdu | acc]
case tail do
<<>> -> Enum.reverse(seq)
_ -> unpack(tail, vec_length, header_length, seq, pdu)
end
end
end
|
lib/ex_acn/pdu.ex
| 0.715026 | 0.557845 |
pdu.ex
|
starcoder
|
defmodule Membrane.RTP.Packet.PayloadType do
@moduledoc """
This module contains utility to translate numerical payload type into an atom value.
"""
alias Membrane.RTP
@doc """
Gets the name of used encoding from numerical payload type according to [RFC3551](https://tools.ietf.org/html/rfc3551#page-32).
For quick reference check [datasheet](https://www.iana.org/assignments/rtp-parameters/rtp-parameters.xhtml).
"""
@spec get_encoding_name(payload_type :: RTP.payload_type_t()) ::
RTP.static_encoding_name_t() | :dynamic
def get_encoding_name(type)
def get_encoding_name(0), do: :PCMU
def get_encoding_name(3), do: :GSM
def get_encoding_name(4), do: :G732
def get_encoding_name(5), do: :DVI4
def get_encoding_name(6), do: :DVI4
def get_encoding_name(7), do: :LPC
def get_encoding_name(8), do: :PCMA
def get_encoding_name(9), do: :G722
def get_encoding_name(10), do: :L16
def get_encoding_name(11), do: :L16
def get_encoding_name(12), do: :QCELP
def get_encoding_name(13), do: :CN
def get_encoding_name(14), do: :MPA
def get_encoding_name(15), do: :G728
def get_encoding_name(16), do: :DVI4
def get_encoding_name(17), do: :DVI4
def get_encoding_name(18), do: :G729
def get_encoding_name(25), do: :CELB
def get_encoding_name(26), do: :JPEG
def get_encoding_name(28), do: :NV
def get_encoding_name(31), do: :H261
def get_encoding_name(32), do: :MPV
def get_encoding_name(33), do: :MP2T
def get_encoding_name(34), do: :H263
def get_encoding_name(payload_type) when payload_type in 96..127, do: :dynamic
@doc """
Gets the clock rate from numerical payload type according to [RFC3551](https://tools.ietf.org/html/rfc3551#page-32).
For quick reference check [datasheet](https://www.iana.org/assignments/rtp-parameters/rtp-parameters.xhtml).
"""
@spec get_clock_rate(payload_type :: RTP.payload_type_t()) ::
RTP.clock_rate_t() | :dynamic
def get_clock_rate(type)
def get_clock_rate(0), do: 8000
def get_clock_rate(3), do: 8000
def get_clock_rate(4), do: 8000
def get_clock_rate(5), do: 8000
def get_clock_rate(6), do: 16_000
def get_clock_rate(7), do: 8000
def get_clock_rate(8), do: 8000
def get_clock_rate(9), do: 8000
def get_clock_rate(10), do: 44_100
def get_clock_rate(11), do: 44_100
def get_clock_rate(12), do: 8000
def get_clock_rate(13), do: 8000
def get_clock_rate(14), do: 90_000
def get_clock_rate(15), do: 8000
def get_clock_rate(16), do: 11_025
def get_clock_rate(17), do: 22_050
def get_clock_rate(18), do: 8000
def get_clock_rate(25), do: 90_000
def get_clock_rate(26), do: 90_000
def get_clock_rate(28), do: 90_000
def get_clock_rate(31), do: 90_000
def get_clock_rate(32), do: 90_000
def get_clock_rate(33), do: 90_000
def get_clock_rate(34), do: 90_000
def get_clock_rate(payload_type) when payload_type in 96..127, do: :dynamic
@doc """
Checks if numerical payload type should be assigned to format type dynamically.
"""
@spec is_dynamic(payload_type :: RTP.payload_type_t()) :: boolean()
def is_dynamic(payload_type) when payload_type in 96..127, do: true
def is_dynamic(_payload_type), do: false
end
|
lib/membrane/rtp/packet_payload_type.ex
| 0.764672 | 0.503479 |
packet_payload_type.ex
|
starcoder
|
defprotocol Access do
@moduledoc """
The Access protocol is used by `foo[bar]` and also
empowers the nested update functions in Kernel.
For instance, `foo[bar]` translates `Access.get(foo, bar)`.
`Kernel.get_in/2`, `Kernel.put_in/3`, `Kernel.update_in/3` and
`Kernel.get_and_update_in/3` are also all powered by the Access
protocol.
This protocol is implemented by default for keywords, maps
and dictionary like types:
iex> keywords = [a: 1, b: 2]
iex> keywords[:a]
1
iex> map = %{a: 1, b: 2}
iex> map[:a]
1
iex> star_ratings = %{1.0 => "★", 1.5 => "★☆", 2.0 => "★★"}
iex> star_ratings[1.5]
"★☆"
The key comparison must be implemented using the `===` operator.
"""
@doc """
Accesses the given key in the container.
"""
@spec get(t, term) :: t
def get(container, key)
@doc """
Gets a value and updates the given `key` in one pass.
The function must receive the value for the given `key`
(or `nil` if the key doesn't exist in `container`) and
the function must return a tuple containing the `get`
value and the new value to be stored in the `container`.
"""
@spec get_and_update(t, term, (term -> {get, term})) :: {get, t} when get: var
def get_and_update(container, key, fun)
end
defimpl Access, for: List do
def get(dict, key) when is_atom(key) do
case :lists.keyfind(key, 1, dict) do
{^key, value} -> value
false -> nil
end
end
def get(_dict, key) do
raise ArgumentError,
"the access protocol for lists expect the key to be an atom, got: #{inspect key}"
end
def get_and_update(dict, key, fun) when is_atom(key) do
get_and_update(dict, [], key, fun)
end
defp get_and_update([{key, value}|t], acc, key, fun) do
{get, update} = fun.(value)
{get, :lists.reverse(acc, [{key, update}|t])}
end
defp get_and_update([h|t], acc, key, fun) do
get_and_update(t, [h|acc], key, fun)
end
defp get_and_update([], acc, key, fun) do
{get, update} = fun.(nil)
{get, [{key, update}|:lists.reverse(acc)]}
end
end
defimpl Access, for: Map do
def get(map, key) do
case :maps.find(key, map) do
{:ok, value} -> value
:error -> nil
end
end
def get_and_update(map, key, fun) do
value =
case :maps.find(key, map) do
{:ok, value} -> value
:error -> nil
end
{get, update} = fun.(value)
{get, :maps.put(key, update, map)}
end
def get!(%{} = map, key) do
case :maps.find(key, map) do
{:ok, value} -> value
:error -> raise KeyError, key: key, term: map
end
end
def get!(other, key) do
raise ArgumentError,
"could not get key #{inspect key}. Expected map/struct, got: #{inspect other}"
end
def get_and_update!(%{} = map, key, fun) do
case :maps.find(key, map) do
{:ok, value} ->
{get, update} = fun.(value)
{get, :maps.put(key, update, map)}
:error ->
raise KeyError, key: key, term: map
end
end
def get_and_update!(other, key, _fun) do
raise ArgumentError,
"could not put/update key #{inspect key}. Expected map/struct, got: #{inspect other}"
end
end
defimpl Access, for: Atom do
def get(nil, _) do
nil
end
def get(atom, _) do
undefined(atom)
end
def get_and_update(nil, key, _fun) do
raise ArgumentError,
"could not put/update key #{inspect key} on a nil value"
end
def get_and_update(atom, _key, _fun) do
undefined(atom)
end
defp undefined(atom) do
raise Protocol.UndefinedError,
protocol: @protocol,
value: atom,
description: "only the nil atom is supported"
end
end
|
lib/elixir/lib/access.ex
| 0.898109 | 0.587144 |
access.ex
|
starcoder
|
defmodule ExRss.Entry do
use ExRssWeb, :model
alias ExRss.Feed
@derive {Jason.Encoder, only: [:id, :url, :title, :read, :posted_at]}
@timestamps_opts [type: :utc_datetime]
schema "entries" do
belongs_to :feed, Feed
field :url, :string
field :title, :string
field :raw_posted_at, :string
field :posted_at, :utc_datetime
field :read, :boolean
timestamps()
end
@doc """
Builds a changeset based on the `struct` and `params`.
"""
def changeset(struct, params \\ %{}) do
struct
|> cast(params, [:read])
|> validate_required([:url])
|> assoc_constraint(:feed)
|> unique_constraint(:url, name: :entries_feed_id_url_index)
end
def parse(entry) do
now = DateTime.utc_now() |> DateTime.truncate(:second)
posted_at =
case parse_time(entry.updated) do
{:ok, parsed_time} -> parsed_time
{:error, _} -> nil
end
%{
title: entry.title,
url: entry.link,
raw_posted_at: entry.updated,
posted_at: posted_at,
read: false,
inserted_at: now,
updated_at: now
}
end
# For details on available directives, see
# https://hexdocs.pm/timex/Timex.Format.DateTime.Formatters.Strftime.html
@time_formats [
# "%-d" matches days with and without padding zero.
# Tue, 03 Jan 2017 14:55:00 +0100
# Wed, 8 Jan 2020 07:28:00 +0100
"%a, %-d %b %Y %H:%M:%S %z",
# Sun, 13 Nov 2016 21:00:00 GMT
"%a, %d %b %Y %H:%M:%S %Z",
# 13 Mar 2018 00:00:00 GMT
"%d %b %Y %H:%M:%S %Z",
# 06 Sep 2017 12:00 +0000
"%d %b %Y %H:%M %z",
# 2018-08-22T10:07:06.121Z
"%Y-%m-%dT%H:%M:%S.%LZ",
# 2020-05-03T13:10:00.000-06:00
"%Y-%m-%dT%H:%M:%S.%L%:z",
# 2019-01-17T00:00:00Z
"%Y-%m-%dT%H:%M:%SZ",
# Internet date/time format as specified by RFC 3339
# See https://tools.ietf.org/html/rfc3339
# 2018-01-13T19:05:08+00:00
"%Y-%m-%dT%H:%M:%S%:z"
]
def parse_time(time) do
parse_time(time, @time_formats)
end
def parse_time(time, formats) do
case formats do
[] ->
{:error, :no_format_found}
[head | tail] ->
case Timex.parse(time, head, :strftime) do
{:ok, posted_at} ->
{:ok,
posted_at
|> Timex.Timezone.convert("Etc/UTC")
|> DateTime.truncate(:second)}
{:error, _} ->
parse_time(time, tail)
end
end
end
# If an entry does not have an absolute URL, we merge it with the feed’s URL
# to get an absolute URL.
def url_for(feed_url, entry_url) do
URI.merge(feed_url, entry_url) |> to_string
end
def make_url_absolute(entry, feed_url) do
Map.put(entry, :url, url_for(feed_url, entry.url))
end
end
|
lib/ex_rss/entry.ex
| 0.727589 | 0.456107 |
entry.ex
|
starcoder
|
defmodule Sanbase.Signal.Trigger.DailyActiveAddressesSettings do
@moduledoc ~s"""
Signals based on the unique number of daily active addresses.
The signal supports the following operations:
1. Daily Active Addresses get over or under a given number
2. Daily Active Addresses change by a given percent compared to the average
number of daily active addresses over a given time window
"""
use Vex.Struct
import Sanbase.{Validation, Signal.Validation}
import Sanbase.Signal.Utils
import Sanbase.DateTimeUtils, only: [round_datetime: 2, str_to_days: 1, interval_to_str: 1]
alias __MODULE__
alias Sanbase.Signal.Type
alias Sanbase.Model.Project
@derive {Jason.Encoder, except: [:filtered_target, :triggered?, :payload, :template_kv]}
@trigger_type "daily_active_addresses"
@enforce_keys [:type, :target, :channel, :operation]
defstruct type: @trigger_type,
target: nil,
channel: nil,
time_window: "2d",
operation: nil,
# Private fields, not stored in DB.
filtered_target: %{list: []},
triggered?: false,
payload: %{},
template_kv: %{}
validates(:target, &valid_target?/1)
validates(:channel, &valid_notification_channel?/1)
validates(:time_window, &valid_time_window?/1)
validates(:time_window, &time_window_is_whole_days?/1)
validates(:operation, &valid_operation?/1)
@type t :: %__MODULE__{
type: Type.trigger_type(),
target: Type.complex_target(),
channel: Type.channel(),
time_window: Type.time_window(),
operation: Type.operation(),
# Private fields, not stored in DB.
filtered_target: Type.filtered_target(),
triggered?: boolean(),
payload: Type.payload(),
template_kv: Type.template_kv()
}
@spec type() :: Type.trigger_type()
def type(), do: @trigger_type
def get_data(%__MODULE__{filtered_target: %{list: target_list}} = settings)
when is_list(target_list) do
time_window_in_days = Enum.max([str_to_days(settings.time_window), 1])
# Ensure there are enough data points in the interval. The not needed
# ones are ignored
from = Timex.shift(Timex.now(), days: -(3 * time_window_in_days))
to = Timex.now()
target_list
|> Enum.map(fn slug ->
case fetch_24h_active_addersses(slug, from, to, "1d") do
{:ok, result} ->
{slug, Enum.take(result, -2)}
_ ->
nil
end
end)
|> Enum.reject(&is_nil/1)
end
defp fetch_24h_active_addersses(slug, from, to, interval) do
cache_key =
{__MODULE__, :fetch_24h_active_addersses, slug, round_datetime(from, 300),
round_datetime(to, 300), interval}
|> Sanbase.Cache.hash()
Sanbase.Cache.get_or_store(cache_key, fn ->
case Sanbase.Metric.timeseries_data(
"active_addresses_24h",
%{slug: slug},
from,
to,
interval,
:last
) do
{:ok, result} -> result
_ -> []
end
end)
end
defimpl Sanbase.Signal.Settings, for: DailyActiveAddressesSettings do
alias Sanbase.Signal.{ResultBuilder, OperationText}
def triggered?(%DailyActiveAddressesSettings{triggered?: triggered}), do: triggered
def evaluate(%DailyActiveAddressesSettings{} = settings, _trigger) do
case DailyActiveAddressesSettings.get_data(settings) do
data when is_list(data) and data != [] ->
build_result(data, settings)
_ ->
%DailyActiveAddressesSettings{settings | triggered?: false}
end
end
defp build_result(data, %DailyActiveAddressesSettings{} = settings) do
ResultBuilder.build(data, settings, &template_kv/2, value_key: :value)
end
def cache_key(%DailyActiveAddressesSettings{} = settings) do
construct_cache_key([
settings.type,
settings.target,
settings.time_window,
settings.operation
])
end
defp template_kv(%{identifier: slug} = values, settings) do
project = Project.by_slug(slug)
interval = interval_to_str(settings.time_window)
{operation_template, operation_kv} =
OperationText.to_template_kv(values, settings.operation)
{curr_value_template, curr_value_kv} =
OperationText.current_value(values, settings.operation)
kv =
%{
type: DailyActiveAddressesSettings.type(),
operation: settings.operation,
project_name: project.name,
project_ticker: project.ticker,
project_slug: project.slug,
average_value: values.previous_average,
interval: interval
}
|> Map.merge(operation_kv)
|> Map.merge(curr_value_kv)
template = """
🔔 \#{{project_ticker}} | **{{project_name}}**'s Active Addresses for the past 24 hours #{
operation_template
}.
#{curr_value_template}.
Average 24 hours Active Addresses for last **{{interval}}*: **{{average_value}}**.
"""
{template, kv}
end
end
end
|
lib/sanbase/signals/trigger/settings/daily_active_addresses_settings.ex
| 0.762336 | 0.45944 |
daily_active_addresses_settings.ex
|
starcoder
|
defmodule Bonbon.Model.Locale do
use Bonbon.Web, :model
@moduledoc """
A model representing the different languages using culture codes (ISO 3166-1
alpha-2 and ISO 639-1 code).
##Fields
The `:country` and `:language` fields are uniquely constrained.
###:id
Is the unique reference to the locale entry. Is an `integer`.
###:country
Is the country code (ISO 3166-1 alpha-2) of the locale. Is a 2 character
uppercase `string`.
###:language
Is the language code (ISO 639-1 code) of the locale. Is a 2 character
lowercase `string`.
"""
defmodule NotFoundError do
@moduledoc """
Exception raised when a locale does not exist.
"""
defexception [:message, :code]
def exception(option), do: %Bonbon.Model.Locale.NotFoundError{ message: "no locale exists for code: #{option[:code]}", code: option[:code] }
end
schema "locales" do
field :country, :string
field :language, :string
timestamps
end
@doc """
Builds a changeset based on the `struct` and `params`.
Enforces:
* `language` field is supplied
* `country` field is length of 2
* `language` field is length of 2
* formats the `country` field as uppercase
* formats the `language` field as lowercase
* checks uniqueness of given culture code
"""
def changeset(struct, params \\ %{}) do
struct
|> cast(params, [:country, :language])
|> validate_required(:language)
|> validate_length(:country, is: 2)
|> validate_length(:language, is: 2)
|> format_uppercase(:country)
|> format_lowercase(:language)
|> unique_constraint(:culture_code)
end
@doc """
Get the locale_id for the given string or raise the exception
`Bonbon.Model.Locale.NotFoundError` on an invalid locale. For more details
see: `to_locale_id/1`.
"""
@spec to_locale_id!(String.t) :: integer
def to_locale_id!(code) do
case to_locale_id(code) do
nil -> raise(Bonbon.Model.Locale.NotFoundError, code: code)
locale -> locale
end
end
@doc """
Get the locale_id for the given string or nil on an invalid locale.
The string format takes the form of `language_country` or `language` when no
country is specified. e.g. `"en"` and `"en_AU"` would be valid formats, the
first referring to the english locale, the second referring to Australian
english.
"""
@spec to_locale_id(String.t) :: integer | nil
def to_locale_id(<<language :: binary-size(2), "_", country :: binary-size(2)>>), do: to_locale_id(language, country)
def to_locale_id(<<language :: binary-size(2)>>), do: to_locale_id(language, nil)
@doc """
Get the fallback list of locale_id's for the given string or raise the exception
`Bonbon.Model.Locale.NotFoundError` when no locales are found. For more details
see: `to_locale_id_list/1`.
"""
@spec to_locale_id_list!(String.t) :: [integer]
def to_locale_id_list!(code) do
case to_locale_id_list(code) do
[] -> raise(Bonbon.Model.Locale.NotFoundError, code: code)
locale -> locale
end
end
@doc """
Get the fallback list of locale_id's for the given string or empty list if no
locales were valid.
The string format takes the form of `language_country` or `language` when no
country is specified. e.g. `"en"` and `"en_AU"` would be valid formats, the
first referring to the english locale, the second referring to Australian
english.
This list includes the top-most locale, and parent locales (to fallback to).
"""
@spec to_locale_id_list(String.t) :: [integer] | nil
def to_locale_id_list(<<language :: binary-size(2), "_", country :: binary-size(2)>>), do: [to_locale_id(language, country), to_locale_id(language, nil)] |> Enum.filter(&(&1 != nil))
def to_locale_id_list(<<language :: binary-size(2)>>), do: [to_locale_id(language, nil)] |> Enum.filter(&(&1 != nil))
defp to_locale_id(language, nil) do
query = from locale in Bonbon.Model.Locale,
where: locale.language == ^String.downcase(language) and is_nil(locale.country),
select: locale.id
Bonbon.Repo.one(query)
end
defp to_locale_id(language, country) do
query = from locale in Bonbon.Model.Locale,
where: locale.language == ^String.downcase(language) and locale.country == ^String.upcase(country),
select: locale.id
Bonbon.Repo.one(query)
end
end
|
web/models/locale.ex
| 0.861524 | 0.544256 |
locale.ex
|
starcoder
|
defmodule VirusTotal do
@moduledoc """
Documentation for VirusTotal.
Full API documentation can be found on the
[official site](https://www.virustotal.com/en/documentation/private-api)
As a note, this wrapper will convert HTTP-200 responses with a `response_code` of
0 to errors. Apparently VT considers to be HTTP 200 to be a valid status code for
"not found". How silly.
Example usage:
iex> client = VirusTotal.Client.new(my_api_key)
iex> {:ok, report} = VirusTotal.file_report(client, "7bf5623f0a10dfa148a35bebd899b7758612f1693d2a9910f716cf15a921a76a")
{:ok, %{"ITW_urls" => ["https://chiru.no/u/rensenware.exe"], ...}}
"""
@doc """
Retrieves a concluded file scan report for a given file.
Valid parameters:
- `:allinfo`: if this is specified and set to 1, the call will return additional info.
This is turned ON by default
iex> file_report(client, "7bf5623f0a10dfa148a35bebd899b7758612f1693d2a9910f716cf15a921a76a")
{:ok, %{"ITW_urls" => ["https://chiru.no/u/rensenware.exe"]}}
"""
def file_report(client, hash, params \\ [allinfo: 1]) do
params = Keyword.merge(params, resource: hash)
Tesla.get(client, "/vtapi/v2/file/report", query: params)
|> parse()
end
@doc """
Allows you to rescan files present in VirusTotal's file store
without having to resubmit them
iex> rescan_file(client, some_hash)
{:ok, %{"scan_id" => "something"}}
Valid parameters:
- `:date`: Date in `%Y%m%d%H%M%S` format (example: `20120725170000`)
in which the rescan should be performed.
If not specified the rescan will be performed immediately.
- `:period`: Periodicity (in days) with which the file should be rescanned.
If this argument is provided the file will be rescanned periodically
every `period` days, if not, the rescan is performed once and not repeated again.
- `:repeat`: Used in conjunction with `period` to specify the number of times the file
should be rescanned.
If this argument is provided the file will be
rescanned the given amount of times in coherence with the chosen periodicity,
if not, the file will be rescanned indefinitely.
- `:notify_url`: A URL to which a POST notification should be sent when the rescan finishes.
- `:notify_changes_only`: Used in conjunction with notify_url.
Indicates if POST notifications should only be sent if the
scan results differ from the previous one.
"""
def rescan_file(client, resource, params \\ []) do
params = Keyword.merge(params, resource: resource)
Tesla.post(client, "/vtapi/v2/file/rescan", query: params)
|> parse()
end
@doc """
Deletes a scheduled file rescan task. The file rescan api allows you to schedule
periodic scans of a file, this API call tells VirusTotal to stop rescanning
a file that you have previously enqueued for recurrent scanning.
"""
def delete_rescan(client, resource) do
Tesla.post(client, "/vtapi/v2/file/rescan/delete", query: [resource: resource])
|> parse()
end
@doc """
VirusTotal runs a distributed setup of Cuckoo sandbox machines that execute the files
they receive.
Execution is attempted only once, upon first submission to VirusTotal,
and only Portable Executables under 10MB in size are ran.
The execution of files is a best effort process, hence,
there are no guarantees about a report being generated for a given file in the dataset.
"""
def file_behaviour(client, hash) do
Tesla.get(client, "/vtapi/v2/hash/behaviour", query: [hash: hash])
|> parse()
end
@doc """
Files that are successfully executed may communicate with certain network resources,
all this communication is recorded in a network traffic dump (pcap file).
This API allows you to retrieve the network traffic dump generated during the
file's execution.
"""
def file_network_traffic(client, hash) do
Tesla.get(client, "/vtapi/v2/hash/network-traffic", query: [hash: hash])
|> parse()
end
@doc """
Valid params:
- `:offset`: The offset value returned by a previously issued identical query,
allows you to paginate over the results.
If not specified the first 300 matching files sorted according to last
submission date to VirusTotal in a descending fashion will be returned.
"""
def file_search(client, query, params \\ %{}) do
params = Map.merge(params, %{query: query})
Tesla.post(client, "/vtapi/v2/file/search", params)
|> parse()
end
@doc """
This API offers a programmatic access to the clustering section of VirusTotal Intelligence
Valid params:
- `:date`: A specific day for which we want to access the clustering details,
example: 2013-09-10.
"""
def file_clusters(client, date) do
Tesla.get(client, "/vtapi/v2/file/clusters", query: [date: date])
|> parse()
end
@doc """
Downloads a file from VirusTotal's store
iex> file_download(client, "7bf5623f0a10dfa148a35bebd899b7758612f1693d2a9910f716cf15a921a76a")
{:ok, <<77, 90, 144, ...>>}
"""
def file_download(client, hash) do
Tesla.get(client, "/vtapi/v2/file/download", query: [hash: hash])
|> case do
{:ok, %{status: 200, body: body}} ->
{:ok, body}
{:ok, other} ->
{:error, other}
other ->
other
end
end
@doc """
Retrieves a report for a given URL
iex> url_report(client, "https://yuruyuri.com/")
{:ok, %{"positives" => 0, ...}}
"""
def url_report(client, url) do
Tesla.get(client, "/vtapi/v2/url/report", query: [resource: url])
|> parse()
end
@doc """
Allows you to submit URLs to be scanned by VirusTotal
iex> url_scan(client, "https://yuruyuri.com")
{:ok, %{"scan_id" => ...}}
"""
def url_scan(client, url) do
Tesla.post(client, "/vtapi/v2/url/scan", query: [url: url])
|> parse()
end
@doc """
Retrieves a report on a given IP address
(including the information recorded by VirusTotal's Passive DNS infrastructure).
iex> ip_report(client, "8.8.8.8")
{:ok, %{"asn" => ...}}
"""
def ip_report(client, ip) do
Tesla.get(client, "/vtapi/v2/ip-address/report", query: [ip: ip])
|> parse()
end
@doc """
Retrieves a report on a given domain
(including the information recorded by VirusTotal's passive DNS infrastructure).
"""
def domain_report(client, domain) do
Tesla.get(client, "/vtapi/v2/domain/report", query: [domain: domain])
|> parse()
end
@doc """
Retrieves all notifications created by VirusTotal's hunting functionality
"""
def notifications(client) do
Tesla.get(client, "/intelligence/hunting/notifications-feed/")
|> case do
{:ok, %{status: 200, body: body}} ->
Jason.decode(body)
{:ok, other} ->
{:error, other}
other ->
other
end
end
def put_comment(client, resource, comment) do
Tesla.post(client, "/vtapi/v2/comments/put", query: [resource: resource, comment: comment])
|> parse()
end
def get_comments(client, resource, params \\ []) do
params = Keyword.merge(params, resource: resource)
Tesla.get(client, "/vtapi/v2/comments/get", query: params)
|> parse()
end
defp parse(response) do
case response do
{:ok, %{status: 200, body: %{"response_code" => 1} = body}} ->
{:ok, body}
{:ok, env} ->
{:error, env}
{:error, reason} ->
{:error, reason}
end
end
end
|
lib/virus_total.ex
| 0.881213 | 0.521776 |
virus_total.ex
|
starcoder
|
defmodule AWS.KinesisVideoMedia do
@moduledoc """
"""
@doc """
Use this API to retrieve media content from a Kinesis video stream.
In the request, you identify the stream name or stream Amazon Resource Name
(ARN), and the starting chunk. Kinesis Video Streams then returns a stream of
chunks in order by fragment number.
You must first call the `GetDataEndpoint` API to get an endpoint. Then send the
`GetMedia` requests to this endpoint using the [--endpoint-url parameter](https://docs.aws.amazon.com/cli/latest/reference/).
When you put media data (fragments) on a stream, Kinesis Video Streams stores
each incoming fragment and related metadata in what is called a "chunk." For
more information, see
[PutMedia](https://docs.aws.amazon.com/kinesisvideostreams/latest/dg/API_dataplane_PutMedia.html). The `GetMedia` API returns a stream of these chunks starting from the chunk that
you specify in the request.
The following limits apply when using the `GetMedia` API:
* A client can call `GetMedia` up to five times per second per
stream.
* Kinesis Video Streams sends media data at a rate of up to 25
megabytes per second (or 200 megabits per second) during a `GetMedia` session.
If an error is thrown after invoking a Kinesis Video Streams media API, in
addition to the HTTP status code and the response body, it includes the
following pieces of information:
`x-amz-ErrorType` HTTP header – contains a more specific error type
in addition to what the HTTP status code provides.
`x-amz-RequestId` HTTP header – if you want to report an issue to
AWS, the support team can better diagnose the problem if given the Request Id.
Both the HTTP status code and the ErrorType header can be utilized to make
programmatic decisions about whether errors are retry-able and under what
conditions, as well as provide information on what actions the client programmer
might need to take in order to successfully try again.
For more information, see the **Errors** section at the bottom of this topic, as
well as [Common
Errors](https://docs.aws.amazon.com/kinesisvideostreams/latest/dg/CommonErrors.html).
"""
def get_media(client, input, options \\ []) do
path_ = "/getMedia"
headers = []
query_ = []
case request(client, :post, path_, query_, headers, input, options, nil) do
{:ok, body, response} when not is_nil(body) ->
body =
[
{"Content-Type", "ContentType"},
]
|> Enum.reduce(body, fn {header_name, key}, acc ->
case List.keyfind(response.headers, header_name, 0) do
nil -> acc
{_header_name, value} -> Map.put(acc, key, value)
end
end)
{:ok, body, response}
result ->
result
end
end
@spec request(AWS.Client.t(), binary(), binary(), list(), list(), map(), list(), pos_integer()) ::
{:ok, map() | nil, map()}
| {:error, term()}
defp request(client, method, path, query, headers, input, options, success_status_code) do
client = %{client | service: "kinesisvideo"}
host = build_host("kinesisvideo", client)
url = host
|> build_url(path, client)
|> add_query(query, client)
additional_headers = [{"Host", host}, {"Content-Type", "application/x-amz-json-1.1"}]
headers = AWS.Request.add_headers(additional_headers, headers)
payload = encode!(client, input)
headers = AWS.Request.sign_v4(client, method, url, headers, payload)
perform_request(client, method, url, payload, headers, options, success_status_code)
end
defp perform_request(client, method, url, payload, headers, options, success_status_code) do
case AWS.Client.request(client, method, url, payload, headers, options) do
{:ok, %{status_code: status_code, body: body} = response}
when is_nil(success_status_code) and status_code in [200, 202, 204]
when status_code == success_status_code ->
body = if(body != "", do: decode!(client, body))
{:ok, body, response}
{:ok, response} ->
{:error, {:unexpected_response, response}}
error = {:error, _reason} -> error
end
end
defp build_host(_endpoint_prefix, %{region: "local", endpoint: endpoint}) do
endpoint
end
defp build_host(_endpoint_prefix, %{region: "local"}) do
"localhost"
end
defp build_host(endpoint_prefix, %{region: region, endpoint: endpoint}) do
"#{endpoint_prefix}.#{region}.#{endpoint}"
end
defp build_url(host, path, %{:proto => proto, :port => port}) do
"#{proto}://#{host}:#{port}#{path}"
end
defp add_query(url, [], _client) do
url
end
defp add_query(url, query, client) do
querystring = encode!(client, query, :query)
"#{url}?#{querystring}"
end
defp encode!(client, payload, format \\ :json) do
AWS.Client.encode!(client, payload, format)
end
defp decode!(client, payload) do
AWS.Client.decode!(client, payload, :json)
end
end
|
lib/aws/generated/kinesis_video_media.ex
| 0.844505 | 0.57687 |
kinesis_video_media.ex
|
starcoder
|
defmodule Premailex.CSSParser do
@moduledoc """
Module that handles CSS parsing with naive Regular Expression.
"""
@type rule :: %{directive: String.t(), value: String.t(), important?: boolean}
@type rule_set :: %{rules: [rule], selector: String.t(), specificity: number}
@css_selector_rules ~r/([\s\S]*?){([\s\S]*?)}/mi
@non_id_attributes_and_pseudo_classes ~r/
(\.[\w]+) # classes
|
\[(\w+) # attributes
|
(\:( # pseudo classes
link|visited|active
|hover|focus
|lang
|target
|enabled|disabled|checked|indeterminate
|root
|nth-child|nth-last-child|nth-of-type|nth-last-of-type
|first-child|last-child|first-of-type|last-of-type
|only-child|only-of-type
|empty|contains
))
/ix
@elements_and_pseudo_elements ~r/
((^|[\s\+\>\~]+)[\w]+ # elements
|
\:{1,2}( # pseudo-elements
after|before
|first-letter|first-line
|selection
)
)/ix
@comments ~r/\/\*[\s\S]*?\*\//m
@media_queries ~r/@media[^{]+{([\s\S]+?})\s*}/mi
@font_face ~r/@font-face\s*{[\s\S]+?}/mi
@doc """
Parses a CSS string into a map.
## Examples
iex> Premailex.CSSParser.parse("body { background-color: #fff !important; color: red; }")
[%{rules: [%{directive: "background-color", value: "#fff !important", important?: true},
%{directive: "color", value: "red", important?: false}],
selector: "body",
specificity: 1}]
"""
@spec parse(String.t()) :: [rule_set]
def parse(""), do: []
def parse(css) do
@css_selector_rules
|> Regex.scan(strip(css))
|> Enum.map(&parse_selectors_rules(&1))
|> Enum.reduce([], &Enum.concat(&2, &1))
end
defp parse_selectors_rules([_, selector, rules]) do
selector
|> String.split(",")
|> Enum.map(&parse_selector_rules(&1, rules))
end
defp parse_selector_rules(selector, rules) do
%{
selector: String.trim(selector),
rules: parse_rules(rules),
specificity: calculate_specificity(selector)
}
end
@doc """
Parses a CSS rules string into a map.
Note: `parse_rules/1` won't strip any CSS comments unlike `parse/1`.
## Examples
iex> Premailex.CSSParser.parse_rules("background-color: #fff; color: red;")
[%{directive: "background-color", value: "#fff", important?: false},
%{directive: "color", value: "red", important?: false}]
"""
@spec parse_rules(String.t()) :: [rule]
def parse_rules(rules) do
rules
|> String.split(";")
|> Enum.map(&parse_rule(&1))
|> Enum.filter(&(!is_nil(&1)))
end
defp parse_rule(rule) when is_binary(rule) do
rule
|> String.trim()
|> String.split(":", parts: 2)
|> parse_rule()
end
defp parse_rule([directive, value]) do
%{
directive: String.trim(directive),
value: String.trim(value),
important?: String.contains?(value, "!important")
}
end
defp parse_rule([""]), do: nil
defp parse_rule([value]) do
%{
directive: "",
value: String.trim(value),
important?: String.contains?(value, "!important")
}
end
defp strip(string) do
string
|> String.replace(@font_face, "")
|> String.replace(@media_queries, "")
|> String.replace(@comments, "")
end
@doc """
Merges CSS rules.
## Examples
iex> rule_sets = Premailex.CSSParser.parse("p {background-color: #fff !important; color: #000;} p {background-color: #000;}")
iex> Premailex.CSSParser.merge(rule_sets)
[%{directive: "background-color", value: "#fff !important", important?: true, specificity: 1},
%{directive: "color", value: "#000", important?: false, specificity: 1}]
"""
@spec merge([rule_set]) :: [rule_set]
def merge(rule_sets) do
rule_sets
|> Enum.map(fn rule_set ->
Enum.map(rule_set.rules, &Map.put(&1, :specificity, rule_set.specificity))
end)
|> Enum.reduce([], &Enum.concat(&2, &1))
|> merge_rule_sets
end
defp merge_rule_sets(rule_sets) do
rule_sets
|> Enum.reduce(%{}, &merge_into_rule_set(&2, &1))
|> Enum.into([], &elem(&1, 1))
end
defp merge_into_rule_set(rule_set, new_rule) do
rule = rule_set |> Map.get(new_rule.directive, nil)
# Cascading order: http://www.w3.org/TR/CSS21/cascade.html#cascading-order
cond do
is_nil(rule) ->
Map.put(rule_set, new_rule.directive, new_rule)
new_rule.important? and (!rule.important? or rule.specificity <= new_rule.specificity) ->
Map.put(rule_set, new_rule.directive, new_rule)
!rule.important? and rule.specificity <= new_rule.specificity ->
Map.put(rule_set, new_rule.directive, new_rule)
true ->
rule_set
end
end
@doc """
Transforms CSS map or list into string.
## Examples
iex> Premailex.CSSParser.to_string([%{directive: "background-color", value: "#fff"}, %{directive: "color", value: "#000"}])
"background-color:#fff;color:#000;"
iex> Premailex.CSSParser.to_string(%{directive: "background-color", value: "#fff"})
"background-color:#fff;"
"""
@spec to_string([rule]) :: String.t()
def to_string(rules) when is_list(rules),
do: Enum.reduce(rules, "", &"#{&2}#{__MODULE__.to_string(&1)}")
def to_string(%{directive: directive, value: value}), do: "#{directive}:#{value};"
defp calculate_specificity(selector) do
b = ~r/\#/ |> Regex.scan(selector) |> length()
c = @non_id_attributes_and_pseudo_classes |> Regex.scan(selector) |> length()
d = @elements_and_pseudo_elements |> Regex.scan(selector) |> length()
b + c + d
end
end
|
lib/premailex/css_parser.ex
| 0.775009 | 0.433562 |
css_parser.ex
|
starcoder
|
defmodule Binary.Queue do
@moduledoc """
Queue for binary data.
It resembles a pipeline: data is pushed on one end and pulled from the other.
The order by which bytes are pushed in is the same by which they are pulled out.
Internally, this queue implementation optimizes on the amount of copying of
binary data in memory. Copying possibly occurs when binary data is pulled
from the queue.
## Examples
iex> Binary.Queue.new() |> Binary.Queue.push(<<5, 208, 224, 23, 85>>)
%Binary.Queue{data: {[<<5, 208, 224, 23, 85>>],[]}, size: 5}
iex> Binary.Queue.new() |> Binary.Queue.push(<<5, 208, 224, 23, 85>>) |> Binary.Queue.pull(4)
{<<5, 208, 224, 23>> , %Binary.Queue{data: {[],["U"]}, size: 1}}
iex> Binary.Queue.new() |> Binary.Queue.push(<<5, 208, 224, 23, 85>>) |> Binary.Queue.push(<<82, 203>>)
%Binary.Queue{data: {[<<82, 203>>],[<<5, 208, 224, 23, 85>>]}, size: 7}
"""
@opaque t :: %__MODULE__{}
defstruct size: 0, data: :queue.new()
@doc """
Returns a new empty binary queue.
## Examples
iex> Binary.Queue.new()
%Binary.Queue{data: {[],[]}, size: 0}
"""
@spec new() :: t
def new() do
%__MODULE__{}
end
@doc """
Push binary data on the queue. Returns a new queue containing the pushed binary data.
## Examples
iex> Binary.Queue.push(Binary.Queue.new(), <<23, 75>>)
%Binary.Queue{data: {[<<23, 75>>],[]}, size: 2}
"""
@spec push(t, binary) :: t
def push(queue, data) do
%__MODULE__{size: queue.size + byte_size(data), data: :queue.in(data, queue.data)}
end
@doc """
Pulls a single byte from the queue. Returns a tuple of the first byte and the new queue without that first byte.
## Examples
iex> q = Binary.Queue.push(Binary.Queue.new(), <<23, 75>>)
%Binary.Queue{data: {[<<23, 75>>],[]}, size: 2}
iex> Binary.Queue.pull(q)
{<<23>>, %Binary.Queue{data: {[], ["K"]}, size: 1}}
"""
@spec pull(t) :: {binary, t}
def pull(queue) do
pull(queue, 1)
end
@doc """
Pulls a number of bytes from the queue. Returns a tuple of the first byte and the new queue without that first byte.
## Examples
iex> q = Binary.Queue.push(Binary.Queue.new(), <<23, 75, 17>>)
%Binary.Queue{data: {[<<23, 75, 17>>],[]}, size: 3}
iex> Binary.Queue.pull(q, 2)
{<<23, 75>>, %Binary.Queue{data: {[], [<<17>>]}, size: 1}}
"""
@spec pull(t, non_neg_integer) :: {binary, t}
def pull(queue, amount) do
pull(<<>>, amount, queue.size, queue.data)
end
defp pull(acc, 0, size, queue) do
{acc, %__MODULE__{size: size, data: queue}}
end
defp pull(acc, _amount, 0, queue) do
{acc, %__MODULE__{size: 0, data: queue}}
end
defp pull(acc, amount, size, queue) do
{element, popped_queue} = :queue.out(queue)
pull(acc, amount, size, popped_queue, element)
end
defp pull(acc, amount, _size, queue, :empty) do
pull(acc, amount, 0, queue)
end
defp pull(acc, amount, size, queue, {:value, data}) when amount == byte_size(data) do
pull(
Binary.append(acc, data),
0,
:erlang.max(0, size - byte_size(data)),
queue
)
end
defp pull(acc, amount, size, queue, {:value, data}) when amount > byte_size(data) do
data_size = byte_size(data)
pull(
Binary.append(acc, data),
amount - data_size,
:erlang.max(0, size - data_size),
queue
)
end
defp pull(acc, amount, size, queue, {:value, data}) when amount < byte_size(data) do
{first, rest} = Binary.split_at(data, amount)
pull(
Binary.append(acc, first),
0,
:erlang.max(0, size - amount),
:queue.in_r(rest, queue)
)
end
@doc """
Returns the amount of bytes on the queue
## Examples
iex> q = Binary.Queue.push(Binary.Queue.new(), <<23, 75, 17>>)
%Binary.Queue{data: {[<<23, 75, 17>>],[]}, size: 3}
iex> Binary.Queue.len(q)
3
"""
@spec len(%Binary.Queue{}) :: non_neg_integer
def len(queue) do
queue.size
end
@doc """
Returns the amount of bytes on the queue
## Examples
iex> q = Binary.Queue.new()
%Binary.Queue{data: {[],[]}, size: 0}
iex> Binary.Queue.is_empty(q)
true
iex> q = Binary.Queue.push(q, <<23, 75, 17>>)
%Binary.Queue{data: {[<<23, 75, 17>>],[]}, size: 3}
iex> Binary.Queue.is_empty(q)
false
"""
@spec is_empty(%Binary.Queue{}) :: boolean
def is_empty(queue) do
queue.size == 0 && :queue.is_empty(queue.data)
end
end
|
lib/binary/queue.ex
| 0.838713 | 0.688141 |
queue.ex
|
starcoder
|
defmodule Geohash do
@geobase32 '0123456789bcdefghjkmnpqrstuvwxyz'
@doc ~S"""
Encodes given coordinates to a geohash of length `precision`
## Examples
iex> Geohash.encode(42.6, -5.6, 5)
"ezs42"
"""
def encode(lat, lon, precision \\ 11) do
encode_to_bits(lat, lon, precision * 5) |> to_geobase32
end
@doc ~S"""
Encodes given coordinates to a bitstring of length `bits_length`
## Examples
iex> Geohash.encode_to_bits(42.6, -5.6, 25)
<<0b0110111111110000010000010::25>>
"""
def encode_to_bits(lat, lon, bits_length) do
starting_position = bits_length - 1
lat_bits = lat_to_bits(lat, starting_position - 1) # odd bits
lon_bits = lon_to_bits(lon, starting_position) # even bits
geo_bits = lat_bits + lon_bits
<<geo_bits::size(bits_length)>>
end
defp to_geobase32(bits) do
chars = for << c::5 <- bits >>, do: Enum.fetch!(@geobase32, c)
chars |> to_string
end
defp lon_to_bits(lon, position) do
geo_to_bits(lon, position, {-180.0, 180.0})
end
defp lat_to_bits(lat, position) do
geo_to_bits(lat, position, {-90.0, 90.0})
end
defp geo_to_bits(_, position, _) when position < 0 do
0
end
@docp ~S"""
Decodes given lat or lon creating the bits using 2^x to
positionate the bit instead of building a bitstring.
It moves by 2 to already set the bits on odd or even positions.
"""
defp geo_to_bits(n, position, {gmin, gmax}) do
mid = (gmin + gmax) / 2
cond do
n >= mid ->
round(:math.pow(2, position)) + geo_to_bits(n, position - 2, {mid, gmax})
n < mid ->
geo_to_bits(n, position - 2, {gmin, mid})
end
end
#--------------------------
@doc ~S"""
Decodes given geohash to a coordinate pair
## Examples
iex> Geohash.decode("ezs42")
{42.605, -5.603}
"""
def decode(geohash) do
geohash
|> decode_to_bits
|> bits_to_coordinates_pair
end
@doc ~S"""
Decodes given geohash to a coordinate pair
## Examples
iex> Geohash.decode("ezs42")
{42.605, -5.603}
"""
def decode_to_bits(geohash) do
geohash
|> to_char_list
|> Enum.map(&from_geobase32/1)
|> Enum.reduce(<<>>, fn(c, acc) -> << acc::bitstring, c::bitstring >> end)
end
def bits_to_coordinates_pair(bits) do
bitslist = for << bit::1 <- bits >>, do: bit
lat = bitslist
|> filter_odd
|> Enum.reduce(fn (bit, acc) -> <<acc::bitstring, bit::bitstring>> end)
|> bits_to_coordinate({-90.0, 90.0})
lon = bitslist
|> filter_even
|> Enum.reduce(fn (bit, acc) -> <<acc::bitstring, bit::bitstring>> end)
|> bits_to_coordinate({-180.0, 180.0})
{lat, lon}
end
defp filter_even(bitslists) do
bitslists |> filter_periodically(2, 0)
end
defp filter_odd(bitslists) do
bitslists |> filter_periodically(2, 1)
end
defp filter_periodically(bitslist, period, offset) do
bitslist
|> Enum.with_index
|> Enum.filter(fn {_, i} -> rem(i, period) == offset end)
|> Enum.map(fn {bit, _} -> <<bit::1>> end)
end
defp middle({min, max}) do
middle(min, max)
end
defp middle(min, max) do
(min + max) / 2
end
defp bits_to_coordinate(<<>>, {_min, _max} = min_max) do
min_max
|> middle
|> round_coordinate(min_max)
end
defp bits_to_coordinate(bits, {min, max}) do
<< bit::1, rest::bitstring >> = bits
mid = (min + max) / 2
{start, finish} = case bit do
1 -> {mid, max}
0 -> {min, mid}
end
bits_to_coordinate(rest, {start, finish})
end
@docp ~S"""
Rounding criteria taken from:
https://github.com/chrisveness/latlon-geohash/blob/decb13b09a7f1e219a2ca86ff8432fb9e2774fc7/latlon-geohash.js#L117
See demo of that implementation here:
http://www.movable-type.co.uk/scripts/geohash.html
"""
defp round_coordinate(coord, {min, max}) do
Float.round(coord, round(Float.floor(2 - :math.log10(max-min))))
end
defp from_geobase32(char) do
Enum.with_index(@geobase32)
|> Enum.filter_map(fn {x, _} -> x == char end, fn {_, i} -> <<i::5>> end)
|> List.first
end
end
|
lib/geohash.ex
| 0.783243 | 0.664144 |
geohash.ex
|
starcoder
|
defmodule FolderDiff do
@moduledoc false
# Split out as a separate package? Compare two folders. Assert if mismatched.
import ExUnit.Assertions
@spec assert_folders_are_equal(folder1 :: Path.t(), folder2 :: Path.t()) :: :ok
def assert_folders_are_equal(folder1, folder2) do
files1 = folder1 |> File.ls!() |> Enum.sort()
files2 = folder2 |> File.ls!() |> Enum.sort()
assert_folders_are_equal(folder1, folder2, files1, files2)
end
defp assert_folders_are_equal(folder1, folder2, [file1 | files1], [file2 | files2]) do
cond do
file1 == file2 ->
assert_paths_are_equal(folder1, folder2, file1)
assert_folders_are_equal(folder1, folder2, files1, files2)
file1 < file2 ->
flunk_file_missing(folder1, folder2, file1)
true ->
flunk_file_missing(folder2, folder1, file2)
end
:ok
end
defp assert_folders_are_equal(folder1, folder2, [file1 | _], []),
do: flunk_file_missing(folder1, folder2, file1)
defp assert_folders_are_equal(folder1, folder2, [], [file2 | _]),
do: flunk_file_missing(folder2, folder1, file2)
defp assert_folders_are_equal(_folder1, _folder2, [], []), do: :ok
defp assert_paths_are_equal(_folder1, _folder2, "."), do: :ok
defp assert_paths_are_equal(_folder1, _folder2, ".."), do: :ok
defp assert_paths_are_equal(folder1, folder2, file) do
f1 = Path.join(folder1, file)
f2 = Path.join(folder2, file)
f1_is_dir? = File.dir?(f1)
f2_is_dir? = File.dir?(f2)
cond do
f1_is_dir? and f2_is_dir? -> assert_folders_are_equal(f1, f2)
f1_is_dir? -> flunk("#{f1} is a directory; #{f2} is a file")
f2_is_dir? -> flunk("#{f1} is a file; #{f2} is a directory")
true -> assert_files_are_equal(f1, f2)
end
end
defp flunk_file_missing(folder_present, folder_missing, file) do
flunk("File #{file} exists in folder #{folder_present}, but is missing in #{folder_missing}")
end
@spec assert_files_are_equal(f1 :: Path.t(), f2 :: Path.t()) :: :ok
def assert_files_are_equal(f1, f2) do
c1 = File.read!(f1)
c2 = File.read!(f2)
unless c1 == c2 do
c1 = truncate(c1)
c2 = truncate(c2)
flunk(~s"""
Files mismatch:
#{f1}:
#{c1}
#{f2}:
#{c2}
""")
end
:ok
end
defp truncate(c) do
length = String.length(c)
if String.valid?(c) do
if length > 500 do
~s"""
#{length} bytes starting with:
#{String.slice(c, 0, 500)}
"""
else
c
end
else
if length > 100 do
~s"""
#{length} bytes starting with:
#{inspect(:binary.bin_to_list(c, 0, 100))}
"""
else
inspect(:binary.bin_to_list(c))
end
end
end
end
|
test/support/folder_diff.ex
| 0.632162 | 0.59408 |
folder_diff.ex
|
starcoder
|
defmodule Rfx.Ops.Credo.MultiAlias do
@behaviour Rfx.Ops
@moduledoc """
Refactoring Operations to automatically apply the Credo `multi-alias`
recommendation.
Walks the source code and expands instances of multi-alias syntax.
## Examples
Basic transformation...
iex> source = "alias Foo.{Bar, Baz.Qux}"
...>
...> expected = """
...> alias Foo.Bar
...> alias Foo.Baz.Qux
...> """ |> String.trim()
...>
...> Rfx.Ops.Credo.MultiAlias.edit(source)
expected
Preserving comments...
iex> source = """
...> # Multi alias example
...> alias Foo.{ # Opening the multi alias
...> Bar, # Here is Bar
...> # Here come the Baz
...> Baz.Qux # With a Qux!
...> }
...> """ |> String.trim()
...>
...> expected = """
...> # Multi alias example
...> # Opening the multi alias
...> # Here is Bar
...> alias Foo.Bar
...> # Here come the Baz
...> # With a Qux!
...> alias Foo.Baz.Qux
...> """ |> String.trim()
...>
...> Rfx.Ops.Credo.MultiAlias.edit(source)
expected
"""
alias Rfx.Util.Source
alias Rfx.Change.Req
# ----- Changelists -----
@impl true
def cl_code(source_code: source) do
cl_code(source)
end
@impl true
def cl_code(file_path: file_path) do
old_source = File.read!(file_path)
new_source = edit(old_source)
{:ok, result} = case Source.diff(old_source, new_source) do
"" -> {:ok, nil}
nil -> {:ok, nil}
diff -> Req.new(edit: [file_path: file_path, diff: diff])
end
[result] |> Enum.reject(&is_nil/1)
end
@impl true
def cl_code(old_source) do
new_source = edit(old_source)
{:ok, result} = case Source.diff(old_source, new_source) do
"" -> {:ok, nil}
nil -> {:ok, nil}
diff -> Req.new(edit: [edit_source: old_source, diff: diff])
end
[result] |> Enum.reject(&is_nil/1)
end
@doc """
Applies the `multi_alias` transformation to an Elixir source code file.
- reads the file
- applies the `multi_alias` transformation to the source
- return a changelist
"""
@impl true
def cl_file(file_path: file_path) do
cl_code(file_path: file_path)
end
@impl true
def cl_file(file_path) do
cl_code(file_path: file_path)
end
@doc """
Applies the `multi_alias` transformation to every source file in an Elixir project.
- walk the project directory, and for each source code file:
- read the file
"""
@impl true
def cl_project(project_root: project_root) do
project_root
|> Rfx.Util.Filesys.project_files()
|> Enum.map(&cl_file/1)
|> List.flatten()
|> Enum.reject(&is_nil/1)
end
@impl true
def cl_project(project_root) do
cl_project(project_root: project_root)
end
@impl true
def cl_subapp(subapp_root: subapp_root) do
subapp_root
|> Rfx.Util.Filesys.subapp_files()
|> Enum.map(&cl_file/1)
|> List.flatten()
|> Enum.reject(&is_nil/1)
end
@impl true
def cl_subapp(subapp_root) do
cl_subapp(subapp_root: subapp_root)
end
# ----- Edit -----
@impl true
defdelegate edit(source_code), to: Rfx.Edit.Credo.MultiAlias1
end
|
lib/rfx/ops/credo/multi_alias.ex
| 0.816736 | 0.446314 |
multi_alias.ex
|
starcoder
|
defmodule Protobuf.JSON.DecodeError do
defexception [:message]
@type t :: %__MODULE__{message: String.t()}
def new({:unsupported_syntax, syntax}) do
%__MODULE__{message: "JSON encoding of '#{syntax}' syntax is unsupported, try proto3"}
end
def new(:no_json_lib) do
%__MODULE__{message: "JSON library not loaded, make sure to add :jason to your mix.exs file"}
end
def new({:bad_message, data, module}) do
%__MODULE__{message: "JSON map expected for module #{inspect(module)}, got: #{inspect(data)}"}
end
def new({:bad_duration, string, error}) do
%__MODULE__{message: "bad JSON value for duration #{inspect(string)}, got: #{inspect(error)}"}
end
def new({:bad_timestamp, string, reason}) do
%__MODULE__{
message:
"bad JSON value for timestamp #{inspect(string)}, failed to parse: #{inspect(reason)}"
}
end
def new({:bad_field_mask, string}) do
%__MODULE__{message: "invalid characters in field mask: #{inspect(string)}"}
end
def new({:bad_string, field, value}) do
%__MODULE__{message: "Field '#{field}' has an invalid string (#{inspect(value)})"}
end
def new({:bad_bool, field, value}) do
%__MODULE__{message: "Field '#{field}' has an invalid boolean (#{inspect(value)})"}
end
def new({:bad_int, field, value}) do
%__MODULE__{message: "Field '#{field}' has an invalid integer (#{inspect(value)})"}
end
def new({:bad_float, field, value}) do
%__MODULE__{message: "Field '#{field}' has an invalid floating point (#{inspect(value)})"}
end
def new({:bad_bytes, field}) do
%__MODULE__{message: "Field '#{field}' has an invalid Base64-encoded byte sequence"}
end
def new({:bad_enum, field, value}) do
%__MODULE__{message: "Field '#{field}' has an invalid enum value (#{inspect(value)})"}
end
def new({:bad_map, field, value}) do
%__MODULE__{message: "Field '#{field}' has an invalid map (#{inspect(value)})"}
end
def new({:bad_map_key, field, type, value}) do
%__MODULE__{message: "Field '#{field}' has an invalid map key (#{type}: #{inspect(value)})"}
end
def new({:duplicated_oneof, oneof}) do
%__MODULE__{message: "Oneof field '#{oneof}' cannot be set twice"}
end
def new({:bad_repeated, field, value}) do
%__MODULE__{message: "Repeated field '#{field}' expected a list, got #{inspect(value)}"}
end
end
|
lib/protobuf/json/decode_error.ex
| 0.781622 | 0.458591 |
decode_error.ex
|
starcoder
|
defmodule Harald.HCI.Command.LEController.CreateConnection do
use Harald.HCI.Command.LEController, ocf: 0x000D
@moduledoc """
The HCI_LE_Create_Connection command is used to create an ACL connection to a
connectable advertiser
Bluetooth Core Version 5.2 | Vol 4, Part E, section 7.8.12
* OGF: `#{inspect(@ogf, base: :hex)}`
* OCF: `#{inspect(@ocf, base: :hex)}`
* Opcode: `#{inspect(@opcode)}`
The LE_Scan_Interval and LE_Scan_Window parameters are recommendations from
the Host on how long (LE_Scan_Window) and how frequently (LE_Scan_Interval)
the Controller should scan. The LE_Scan_Window parameter shall be set to a
value smaller or equal to the value set for the LE_Scan_Interval parameter. If
both are set to the same value, scanning should run continuously.
The Initiator_Filter_Policy is used to determine whether the White List is
used. If the White List is not used, the Peer_Address_Type and the
Peer_Address parameters specify the address type and address of the
advertising device to connect to.
Peer_Address_Type parameter indicates the type of address used in the
connectable advertisement sent by the peer. The Host shall not set
Peer_Address_Type to either 0x02 or 0x03 if both the Host and the Controller
support the HCI_LE_Set_Privacy_Mode command. If a Controller that supports the
HCI_LE_Set_Privacy_Mode command receives the HCI_LE_Create_Connection command
with Peer_Address_Type set to either 0x02 or 0x03, it may use either device
privacy mode or network privacy mode for that peer device.
Peer_Address parameter indicates the Peer’s Public Device Address, Random
(static) Device Address, Non-Resolvable Private Address or Resolvable Private
Address depending on the Peer_Address_Type parameter.
Own_Address_Type parameter indicates the type of address being used in the
connection request packets.
The Connection_Interval_Min and Connection_Interval_Max parameters define the
minimum and maximum allowed connection interval. The Connection_Interval_Min
parameter shall not be greater than the Connection_Interval_Max parameter.
The Connection_Latency parameter defines the maximum allowed connection latency
(see [Vol 6] Part B, Section 4.5.1).
The Supervision_Timeout parameter defines the link supervision timeout for the
connection. The Supervision_Timeout in milliseconds shall be larger than (1 +
Connection_Latency) * Connection_Interval_Max * 2, where Connection_Interval_Max
is given in milliseconds. (See [Vol 6] Part B, Section 4.5.2).
The Min_CE_Length and Max_CE_Length parameters are informative parameters
providing the Controller with the expected minimum and maximum length of the
connection events. The Min_CE_Length parameter shall be less than or equal to
the Max_CE_Length parameter.
If the Host issues this command when another HCI_LE_Create_Connection command is
pending in the Controller, the Controller shall return the error code Command
Disallowed (0x0C).
If the Own_Address_Type parameter is set to 0x01 and the random address for the
device has not been initialized, the Controller shall return the error code
Invalid HCI Command Parameters (0x12).
If the Own_Address_Type parameter is set to 0x03, the Initiator_Filter_Policy
parameter is set to 0x00, the controller's resolving list did not contain a
matching entry, and the random address for the device has not been initialized,
the Controller shall return the error code Invalid HCI Command Parameters
(0x12).
If the Own_Address_Type parameter is set to 0x03, the Initiator_Filter_Policy
parameter is set to 0x01, and the random address for the device has not been
initialized, the Controller shall return the error code Invalid HCI Command
Parameters (0x12)
"""
defparameters le_scan_interval: 0x0C80,
le_scan_window: 0x0640,
initiator_filter_policy: 0,
peer_address_type: 0,
peer_address: nil,
own_address_type: 0,
connection_interval_min: 0x0024,
connection_interval_max: 0x0C80,
connection_latency: 0x0012,
supervision_timeout: 0x0640,
min_ce_length: 0x0006,
max_ce_length: 0x0054
defimpl HCI.Serializable do
def serialize(cc) do
fields = <<
cc.le_scan_interval::16-little,
cc.le_scan_window::16-little,
cc.initiator_filter_policy::8,
cc.peer_address_type::8,
cc.peer_address::48,
cc.own_address_type::8,
cc.connection_interval_min::16-little,
cc.connection_interval_max::16-little,
cc.connection_latency::16-little,
cc.supervision_timeout::16-little,
cc.min_ce_length::16-little,
cc.max_ce_length::16-little
>>
fields_size = byte_size(fields)
<<cc.opcode::binary, fields_size, fields::binary>>
end
end
@impl Harald.HCI.Command
def deserialize(<<@opcode::binary, _fields_size, fields::binary>>) do
<<
le_scan_interval::16-little,
le_scan_window::16-little,
initiator_filter_policy::8,
peer_address_type::8,
peer_address::48,
own_address_type::8,
connection_interval_min::16-little,
connection_interval_max::16-little,
connection_latency::16-little,
supervision_timeout::16-little,
min_ce_length::16-little,
max_ce_length::16-little
>> = fields
cc = %__MODULE__{
le_scan_interval: le_scan_interval,
le_scan_window: le_scan_window,
initiator_filter_policy: initiator_filter_policy,
peer_address_type: peer_address_type,
peer_address: peer_address,
own_address_type: own_address_type,
connection_interval_min: connection_interval_min,
connection_interval_max: connection_interval_max,
connection_latency: connection_latency,
supervision_timeout: supervision_timeout,
min_ce_length: min_ce_length,
max_ce_length: max_ce_length
}
{:ok, cc}
end
@impl Harald.HCI.Command
def return_parameters(_), do: %{}
end
|
lib/harald/hci/commands/le_controller/create_connection.ex
| 0.803791 | 0.596139 |
create_connection.ex
|
starcoder
|
defmodule ServerSentEventStage do
@moduledoc """
A GenStage producer which parses the ServerSentEvent (SSE) protocol.
SSEs are used in browsers via the EventSource API, but they can be used for
any kind of one-directional streaming.
For more information, see the [W3C](https://html.spec.whatwg.org/multipage/server-sent-events.html).
"""
use GenStage
require Logger
alias Mint.HTTP
alias ServerSentEventStage.Event
# Client functions
@doc """
Starts a producer stage which parse the ServerSentEvent protocol and send those messages as events.
The only required argument is `url`: it can be either a binary of the URL
to connect to or a {module, fun, arguments} tuple.
Other arguments are passed as options to `GenStage.start_link/3`.
"""
def start_link(args) do
_url = Keyword.fetch!(args, :url)
opts = Keyword.take(args, ~w(debug name timeout spawn_opt)a)
GenStage.start_link(__MODULE__, args, opts)
end
@doc """
Refresh the connection by disconnecting and reconnecting.
Some clients will send a final message, but not terminate the
connection=. This function allows a client of SSES to reconnect.
"""
def refresh(server) do
GenStage.cast(server, :refresh)
end
# Server functions
defstruct [:url, :headers, :connected_url, :conn, :ref, buffer: "", redirecting?: false]
@doc false
def init(args) do
state = %__MODULE__{
url: Keyword.fetch!(args, :url),
headers: Keyword.get(args, :headers, [])
}
{:producer, state}
end
@doc false
def handle_info(:connect, state) do
url = compute_url(state)
state = do_connect(url, state)
{:noreply, [], state}
end
def handle_info(message, %{conn: conn} = state) when conn != nil do
case HTTP.stream(state.conn, message) do
{:ok, conn, responses} ->
state = %{state | conn: conn}
{state, events} = Enum.reduce_while(responses, {state, []}, &handle_mint_response/2)
{:noreply, events, state}
{:error, conn, error, responses} ->
state = %{state | conn: conn}
ref = state.ref
{state, events} = Enum.reduce_while(responses, {state, []}, &handle_mint_response/2)
{_, {state, events}} = handle_mint_response({:error, ref, error}, {state, events})
{:noreply, events, state}
:unknown ->
handle_unknown_info(message, state)
end
end
def handle_info(message, state) do
handle_unknown_info(message, state)
end
defp handle_unknown_info({data_tag, _, _}, state) when data_tag in [:ssl, :tcp] do
# The can occur after we've re-connected: drop them on the floor
{:noreply, [], state}
end
defp handle_unknown_info({closed_tag, _}, state)
when closed_tag in [:ssl_closed, :tcp_closed] do
# These can occur after we've re-connected: drop them on the floor.
{:noreply, [], state}
end
defp handle_unknown_info(message, state) do
# ignore data received unexpectedly
Logger.warn(fn ->
"#{__MODULE__} unexpected message: #{inspect(message)}\nState: #{inspect(state)}"
end)
{:noreply, [], state}
end
defp handle_mint_response({:status, ref, 200}, {%{ref: ref} = state, _events} = acc) do
Logger.debug(fn -> "#{__MODULE__} connected url=#{inspect(state.connected_url)}" end)
{:cont, acc}
end
defp handle_mint_response({:status, ref, redirect_code}, {%{ref: ref} = state, events})
when redirect_code in [301, 302, 307] do
Logger.debug(fn ->
"#{__MODULE__} connected, received redirect url=#{inspect(state.connected_url)} code=#{
redirect_code
}"
end)
state = %{state | redirecting?: true}
{:cont, {state, events}}
end
defp handle_mint_response({:status, ref, code}, {%{ref: ref} = state, events}) do
Logger.warn(fn ->
"#{__MODULE__} unexpected status url=#{inspect(state.connected_url)} code=#{code}"
end)
do_refresh!()
{:halt, {state, events}}
end
defp handle_mint_response(
{:headers, ref, _headers},
{%{ref: ref, redirecting?: false}, _events} = acc
) do
{:cont, acc}
end
defp handle_mint_response(
{:headers, ref, headers},
{%{ref: ref, redirecting?: true} = state, events}
) do
{"location", new_location} =
Enum.find(headers, fn {header, _value} -> header == "location" end)
state = do_connect(new_location, state)
{:halt, {state, events}}
end
defp handle_mint_response({:data, ref, chunk}, {%{ref: ref} = state, events}) do
buffer = state.buffer <> chunk
event_binaries = String.split(buffer, "\n\n")
{event_binaries, [buffer]} = Enum.split(event_binaries, -1)
new_events = Enum.map(event_binaries, &Event.from_string/1)
unless new_events == [] do
Logger.info(fn ->
"#{__MODULE__} sending events url=#{inspect(state.connected_url)} count=#{
length(new_events)
}"
end)
for event <- new_events do
Logger.debug(fn ->
inspect(event, limit: :infinity, printable_limit: :infinity)
end)
end
end
state = %{state | buffer: buffer}
{:cont, {state, events ++ new_events}}
end
defp handle_mint_response({:done, ref}, {%{ref: ref} = state, events}) do
Logger.info(fn ->
"#{__MODULE__} disconnected, reconnecting... url=#{inspect(state.connected_url)}"
end)
do_refresh!()
{:halt, {state, events}}
end
defp handle_mint_response({:error, ref, reason}, {%{ref: ref} = state, events}) do
Logger.error(fn ->
"#{__MODULE__} HTTP error url=#{inspect(state.connected_url)} reason=#{inspect(reason)}"
end)
do_refresh!()
{:halt, {state, events}}
end
@doc false
def handle_demand(_demand, state) do
:ok = maybe_connect(state)
{:noreply, [], state}
end
@doc false
def handle_cast(:refresh, state) do
do_refresh!()
{:noreply, [], state}
end
defp do_connect(url, state) do
state = reset_state(state)
case connect_to_url(url, state.headers) do
{:ok, conn, ref} ->
%{state | connected_url: url, conn: conn, ref: ref}
{:error, reason} ->
Logger.error(fn ->
"#{__MODULE__} unable to connect url=#{inspect(url)} reason=#{inspect(reason)}"
end)
do_refresh!()
state
end
end
defp connect_to_url(url, headers) do
Logger.debug(fn -> "#{__MODULE__} requesting #{url}" end)
uri = URI.parse(url)
uri.scheme
|> scheme_atom
|> HTTP.connect(uri.host, uri.port, transport_opts: [timeout: 60_000])
|> handle_connect_response(uri, headers)
end
defp scheme_atom("https"), do: :https
defp scheme_atom("http"), do: :http
defp handle_connect_response({:ok, conn}, uri, headers) do
headers = [
{"Accept", "text/event-stream"} | headers
]
path =
case {uri.path, uri.query} do
{nil, nil} ->
"/"
{path, nil} ->
path
{nil, query} ->
"/?" <> query
{path, query} ->
path <> "?" <> query
end
{:ok, conn, ref} = HTTP.request(conn, "GET", path, headers, nil)
{:ok, conn, ref}
end
defp handle_connect_response({:error, _reason} = e, _uri_, _headers) do
e
end
defp handle_connect_response({:error, _conn, reason}, _uri, _headers) do
{:error, reason}
end
defp maybe_connect(%{conn: conn}) do
if conn == nil or not HTTP.open?(conn, :read) do
send(self(), :connect)
end
:ok
end
defp compute_url(%{url: {m, f, a}}) do
apply(m, f, a)
end
defp compute_url(%{url: url}) when is_binary(url) do
url
end
defp reset_state(state) do
if state.conn != nil and HTTP.open?(state.conn, :read) do
{:ok, _conn} = HTTP.close(state.conn)
end
%{state | connected_url: nil, conn: nil, ref: nil, redirecting?: false, buffer: ""}
end
defp do_refresh! do
send(self(), :connect)
end
end
|
lib/server_sent_event_stage.ex
| 0.753285 | 0.461441 |
server_sent_event_stage.ex
|
starcoder
|
defmodule Tablespoon.Transport.FakeBtd do
@moduledoc """
Transport implementation which pretends to be Btd.
By default, it always replies to packets with a good response.
However, there are some configuration variables that can be set to change that.
- drop_rate: the percent of messages which are not sent (1 to 100)
- send_error_rate: the percent of messages which result in a sending error (1 to 100)
- disconnect_rate: the percent of replies which result in a disconnection (to 100)
- delay_range: a range of milliseconds by which to delay replies
"""
@behaviour Tablespoon.Transport
alias Tablespoon.Protocol.NTCIP1211Extended, as: NTCIP
defstruct [
:ref,
drop_rate: 0,
send_error_rate: 0,
disconnect_rate: 0,
delay_range: 0..0
]
@impl Tablespoon.Transport
def new(opts \\ []) do
struct(__MODULE__, opts)
end
@impl Tablespoon.Transport
def connect(%__MODULE__{} = t) do
ref = make_ref()
{:ok, %{t | ref: ref}}
end
@impl Tablespoon.Transport
def close(%__MODULE__{} = t) do
%{t | ref: nil}
end
@impl Tablespoon.Transport
def send(%__MODULE__{ref: nil}, _data) do
{:error, :not_connected}
end
def send(%__MODULE__{} = t, data) do
cond do
trigger?(t.drop_rate) ->
{:ok, t}
trigger?(t.send_error_rate) ->
{:error, :trigger_failed}
true ->
{:ok, ntcip} = NTCIP.decode(IO.iodata_to_binary(data))
ntcip_response = NTCIP.encode(%{ntcip | pdu_type: :response})
message = {t.ref, {:data, IO.iodata_to_binary(ntcip_response)}}
delay = Enum.random(t.delay_range)
_ = Process.send_after(self(), message, delay)
{:ok, t}
end
end
@impl Tablespoon.Transport
def stream(%__MODULE__{ref: ref} = t, {ref, message}) do
if trigger?(t.disconnect_rate) do
t = %{t | ref: nil}
{:ok, t, [:closed]}
else
{:ok, t, [message]}
end
end
def stream(%__MODULE__{}, _) do
:unknown
end
def trigger?(rate) do
Enum.random(1..100) <= rate
end
end
|
lib/tablespoon/transport/fake_btd.ex
| 0.825203 | 0.474022 |
fake_btd.ex
|
starcoder
|
defmodule AFK.Scancode do
@moduledoc false
@keys [
{0x04, :a},
{0x05, :b},
{0x06, :c},
{0x07, :d},
{0x08, :e},
{0x09, :f},
{0x0A, :g},
{0x0B, :h},
{0x0C, :i},
{0x0D, :j},
{0x0E, :k},
{0x0F, :l},
{0x10, :m},
{0x11, :n},
{0x12, :o},
{0x13, :p},
{0x14, :q},
{0x15, :r},
{0x16, :s},
{0x17, :t},
{0x18, :u},
{0x19, :v},
{0x1A, :w},
{0x1B, :x},
{0x1C, :y},
{0x1D, :z},
{0x1E, :"1"},
{0x1F, :"2"},
{0x20, :"3"},
{0x21, :"4"},
{0x22, :"5"},
{0x23, :"6"},
{0x24, :"7"},
{0x25, :"8"},
{0x26, :"9"},
{0x27, :"0"},
{0x28, :enter},
{0x29, :escape},
{0x2A, :backspace},
{0x2B, :tab},
{0x2C, :space},
{0x2D, :minus},
{0x2E, :equals},
{0x2F, :left_square_bracket},
{0x30, :right_square_bracket},
{0x31, :backslash},
{0x33, :semicolon},
{0x34, :single_quote},
{0x35, :grave},
{0x36, :comma},
{0x37, :period},
{0x38, :slash},
{0x39, :caps_lock},
{0x3A, :f1},
{0x3B, :f2},
{0x3C, :f3},
{0x3D, :f4},
{0x3E, :f5},
{0x3F, :f6},
{0x40, :f7},
{0x41, :f8},
{0x42, :f9},
{0x43, :f10},
{0x44, :f11},
{0x45, :f12},
{0x46, :print_screen},
{0x47, :scroll_lock},
{0x48, :pause},
{0x49, :insert},
{0x4A, :home},
{0x4B, :page_up},
{0x4C, :delete},
{0x4D, :end},
{0x4E, :page_down},
{0x4F, :right},
{0x50, :left},
{0x51, :down},
{0x52, :up},
{0x65, :application}
]
@modifiers [
{0x01, :left_control},
{0x02, :left_shift},
{0x04, :left_alt},
{0x08, :left_super},
{0x10, :right_control},
{0x20, :right_shift},
{0x40, :right_alt},
{0x80, :right_super}
]
@type t ::
unquote(
Enum.reduce(
Enum.uniq(Enum.map(@keys(), &elem(&1, 0)) ++ Enum.map(@modifiers(), &elem(&1, 0))),
&{:|, [], [&1, &2]}
)
)
@spec scancode(AFK.Keycode.with_scancode()) :: t
def scancode(keycode), do: __MODULE__.Protocol.scancode(keycode)
@spec keys :: [{t, AFK.Keycode.Key.key()}]
def keys, do: @keys
@spec modifiers :: [{t, AFK.Keycode.Modifier.modifier()}]
def modifiers, do: @modifiers
end
|
lib/afk/scancode.ex
| 0.52683 | 0.664431 |
scancode.ex
|
starcoder
|
defmodule Manic.JSONEnvelope do
@moduledoc """
Module implementing the Merchant API [JSON Envelope Specification](https://github.com/bitcoin-sv-specs/brfc-misc/tree/master/jsonenvelope).
Every response payload from the Merchant API is encapsualted in a parent JSON
object, which is signed by the miner's [Miner ID](https://github.com/bitcoin-sv/minerid-reference).
Most manic functions return just the parsed payload, but behind the scenes
the signature is automatically verified against the payload and an error is
returned if verification fails.
"""
# JSONEnvelope
defstruct payload: nil,
signature: nil,
public_key: nil,
encoding: nil,
mimetype: nil,
verified: false
@typedoc """
JSON Envelope.
Each parent JSON object contains a JSON encoded payload, signature and public
key. The public key can be used to verify the signature against the payload.
"""
@type t :: %__MODULE__{
payload: String.t,
signature: String.t,
public_key: String.t,
encoding: String.t,
mimetype: String.t,
verified: boolean
}
@typedoc """
Merchant API response payload.
Depending on the request, the payload returned by the Marchant API can contain
different fields. Manic automatically re-cases all keys in the map to snake-cased
strings for a more idiomatic Elixir style.
## Examples
The payload for a fee quote request:
%{
"api_version" => String.t,
"current_highest_block_hash" => String.t,
"current_highest_block_height" => integer,
"expiry_time" => String.t,
"fees" => [
%{
"fee_type" => String.t,
"mining_fee" => %{
"bytes" => integer,
"satoshis" => integer
},
"relay_fee" => %{
"bytes" => integer,
"satoshis" => integer
}
},
...
],
"miner_id" => String.t,
"miner_reputation" => String.t | nil,
"timestamp" => String.t
}
Example payload from submiting new transactions:
%{
"api_version" => String.t,
"current_highest_block_hash" => String.t,
"current_highest_block_height" => integer,
"miner_id" => String.t,
"return_result" => String.t,
"result_description" => String.t,
"timestamp" => String.t,
"txid" => String.t,
"tx_scond_mempool_expiry" => integer
}
Example payload from querying a transaction's status:
%{
"api_version" => String.t,
"block_hash" => String.t,
"block_height" => integer,
"confirmations" => integer,
"miner_id" => String.t,
"return_result" => String.t,
"result_description" => String.t,
"timestamp" => String.t,
"tx_scond_mempool_expiry" => integer
}
"""
@type payload :: %{
String.t => String.t | integer | nil
}
@doc """
Builds a [`JSON Envelope`](`t:t/0`) from the given [`map`][`t:map/0`].
"""
@spec build(map) :: __MODULE__.t
def build(%{} = body) do
struct(__MODULE__, [
payload: body["payload"],
signature: body["signature"],
public_key: body["publicKey"],
encoding: body["encoding"],
mimetype: body["mimetype"]
])
end
@doc """
Verifies the given [`JSON Envelope`](`t:t/0`), by cryptographically verifying
the envelopes signature against the payload, using the public key in the envelope.
Adds the boolean result to the `:verified` key, and returns the
[`JSON Envelope`](`t:t/0`) in an `:ok` tuple.
"""
@spec verify(__MODULE__.t | map) ::
{:ok, __MODULE__.t} |
{:error, Exception.t | String.t}
def verify(%__MODULE__{public_key: public_key, signature: signature} = env)
when (is_nil(public_key) or public_key == "")
or (is_nil(signature) or signature == ""),
do: {:ok, env}
def verify(%__MODULE__{payload: payload, public_key: public_key, signature: signature} = env) do
with {:ok, pubkey} <- Base.decode16(public_key, case: :mixed) do
case Curvy.verify(signature, payload, pubkey, encoding: :hex) do
true -> {:ok, Map.put(env, :verified, true)}
_ -> {:ok, env}
end
else
:error ->
{:error, "Error decoding public key"}
end
end
def verify(%{} = env), do: build(env) |> verify
@doc """
Parses the given [`JSON Envelope's`](`t:t/0`) payload according it its
specified mime type.
Returns the result in an `:ok` / `:error` tuple pair.
The payload's keys are automatically re-cased to snake-cased strings for a
more idiomatic Elixir style.
"""
# Currently can safely assume everything is JSON
@spec parse_payload(__MODULE__.t) :: {:ok, map} | {:error, Exception.t}
def parse_payload(%__MODULE__{payload: payload} = env)
when is_binary(payload)
do
case Jason.decode(payload) do
{:ok, map} ->
payload = map
|> Recase.Enumerable.convert_keys(&Recase.to_snake/1)
|> Map.put("verified", env.verified)
{:ok, payload}
{:error, error} ->
{:error, error}
end
end
def parse_payload(%__MODULE__{payload: payload}),
do: {:error, "Invalid JSON payload: \"#{ payload }\""}
end
|
lib/manic/json_envelope.ex
| 0.887504 | 0.59134 |
json_envelope.ex
|
starcoder
|
defmodule FunLand.Appliable do
@doc """
Something is Appliable if you can _apply_ one of it (containing one or multiple functions) _with_ another.
Appliable is mostly born out of the needs to apply a function that is already wrapped in a Mappable:
- If you had a bare function, you could use `Mappable.map/2` to apply it over a Mappable.
- If however, you have a function already inside a Mappable, a new operation has to be defined to apply it over a Mappable (of the same kind).
This operation is called `apply_with/2`.
'a function inside a Mappable' is something that happens when you partially apply functions, which isn't that common in Elixir because functions are not automatically curried.
### Currying and Partial Application
As `apply_with` works only applies a single argument per function at a time, it works the best when used with curried functions.
In Elixir, functions are no curried by default.
Fortunately, there exists the [Currying](https://hex.pm/packages/currying) library, which transforms your normal functions into curried functions.
If you want to be able to use Applicative to its fullest potential, instead of calling `fun.(b)` in your implementation, use `Currying.curry(fun).(b)`
_________
To be Appliable something also has to be Mappable.
To make your data structure Appliable, use `use Appliable` in its module, and implement both Appliable's `apply_with/2` and Mappable's `map/2`.
## Fruit Salad Example
Say we have a bowl with a partiall-made fruit-salad.
We have a second bowl, which contains some (peeled) bananas.
We would like to add these bananas to the fruit salad.
This would be easy if we had our partially-made fruit-salad, as we could just _map_ the 'combine a banana with some fruit salad' operation over the bowl of bananas.
However, we don't 'just' have the partially-made fruit-salad, as this would make a big mess of our kitchen countertop.
In fact, it is very likely that this bowl-with partially-made fruit salad was the result of combining (`mapping`) earlier ingredients in bowls.
So, we need something similar to `map`, but instead of taking 'just' an operation, we use a bowl with that operation.
For the fruit salad bowl, we could define it as 'take some fruit-salad from Bowl A, combine it with a banana in Bowl B. -> repeat until bananas and fruit-salad are fully combined'.
This is called `apply_with`.
Note that, because the part that changes more often is the Appliable with the (partially-applied) function (in other words: The bowl with the partially-made fruit salad),
the parameters of this functions are the reverse of `Mappable.map`.
## In Other Environments
- In Haskell, `Appliable.apply_with` is known by the uninformative name `ap`, often written as `<$>`.
- In Category Theory, something that is Appliable is called an *Apply*.
"""
@type appliable(_) :: FunLand.adt
@callback apply_with(appliable((b -> c)), appliable(b)) :: appliable(c) when b: any, c: any
defmacro __using__(_opts) do
quote do
use FunLand.Mappable
@behaviour FunLand.Appliable
end
end
defdelegate map(a, fun), to: FunLand.Mappable
@doc """
Applies `appliable_with_function_inside`, which should only contain functions, with as arguments
the elements inside `appliable_b`.
For a List, this means that the list(s) that are made by mapping each of the functions inside
`appliable_with_function_inside` over the elements of `appliable_b` are concatenated, so a single list of all
results is returned.
iex> [&(&1 + 1), &(&1 - 1)] |> FunLand.Appliable.apply_with([1, 2])
[2, 3, 0, 1]
iex> [Currying.curry(&+/2), Currying.curry(&-/2)] |> FunLand.Appliable.apply_with([10, 20]) |> FunLand.Appliable.apply_with([3,4])
[13, 14, 23, 24, 7, 6, 17, 16]
For `Maybe`, whenever one of the two arguments is `Nothing`, `Nothing` will be returned. If both are filled, then a result will be computed,
and this result will be returned, wrapped in a new Maybe.
"""
def apply_with(appliable_with_function_inside, appliable_b)
# stdlib structs
for {stdlib_module, module} <- FunLand.Builtin.__stdlib_struct_modules__ do
def apply_with(a = %unquote(stdlib_module){}, b = %unquote(stdlib_module){}) do
apply(unquote(module), :apply_with, [a, b])
end
end
def apply_with(a = %appliable_module{}, b = %appliable_module{}) do
appliable_module.apply_with(a, b)
end
use FunLand.Helper.GuardMacros
for {guard, module} <- FunLand.Builtin.__builtin__ do
def apply_with(a, b) when unquote(guard)(a) and unquote(guard)(b) do
apply(unquote(module), :apply_with, [a, b])
end
end
end
|
lib/fun_land/appliable.ex
| 0.761405 | 0.738056 |
appliable.ex
|
starcoder
|
defmodule TelemetryMetricsPrometheus do
@moduledoc """
Prometheus Reporter for [`Telemetry.Metrics`](https://github.com/beam-telemetry/telemetry_metrics) definitions.
Provide a list of metric definitions to the `init/2` function. It's recommended to
run TelemetryMetricsPrometheus under a supervision tree, usually under Application.
def start(_type, _args) do
# List all child processes to be supervised
children = [
{TelemetryMetricsPrometheus, [metrics: metrics()]}
...
]
opts = [strategy: :one_for_one, name: ExampleApp.Supervisor]
Supervisor.start_link(children, opts)
end
defp metrics, do:
[
counter("http.request.count"),
sum("http.request.payload_size", unit: :byte),
last_value("vm.memory.total", unit: :byte)
]
By default, metrics are exposed on port `9568` at `/metrics`. The port number
can be configured if necessary. You are not required to use the included server,
though it is recommended. `https` is not supported yet, in which case the
`TelemetryMetricsPrometheus.Core` library should be used instead. The
`TelemetryMetricsPrometheus.Core.scrape/1` function will expose the metrics in
the Prometheus text format.
Please see the `TelemetryMetricsPrometheus.Core` docs for information on metric
types and units.
"""
require Logger
@type option ::
TelemetryMetricsPrometheus.Core.prometheus_option()
| {:port, pos_integer()}
| {:metrics, TelemetryMetricsPrometheus.Core.metrics()}
@type options :: [option]
@doc """
Reporter's child spec.
This function allows you to start the reporter under a supervisor like this:
children = [
{TelemetryMetricsPrometheus, options}
]
See `start_link/1` for a list of available options.
Returns a child specification to supervise the process.
"""
@spec child_spec(options()) :: Supervisor.child_spec()
def child_spec(options) do
opts = ensure_options(options)
id =
case Keyword.get(opts, :name, :prometheus_metrics) do
name when is_atom(name) -> name
{:global, name} -> name
{:via, _, name} -> name
end
spec = %{
id: id,
start: {TelemetryMetricsPrometheus.Supervisor, :start_link, [opts]}
}
Supervisor.child_spec(spec, [])
end
@doc """
Starts a reporter and links it to the calling process.
Available options:
* `:metrics` - a list of `Telemetry.Metrics` definitions to monitor. **required**
* `:name` - the name to set the process's id to. Defaults to `:prometheus_metrics`
* `:port` - port number for the reporter instance's server. Defaults to `9568`
All other options are forwarded to `TelemetryMetricsPrometheus.Core.init/2`.
"""
@spec start_link(options()) :: GenServer.on_start()
def start_link(options) do
ensure_options(options)
|> TelemetryMetricsPrometheus.Supervisor.start_link()
end
defp ensure_options(options) do
Keyword.merge(default_options(), options)
end
@spec default_options() :: options()
defp default_options() do
[port: 9568, protocol: :http, name: :prometheus_metrics]
end
end
|
lib/telemetry_metrics_prometheus.ex
| 0.899145 | 0.424233 |
telemetry_metrics_prometheus.ex
|
starcoder
|
defmodule Benchfella.Snapshot do
defstruct options: %{}, tests: []
@precision 2
alias __MODULE__
alias Benchfella.Json
def prepare(duration, mem_stats?, sys_mem_stats?, results) do
[
"duration:", to_string(duration), ";",
"mem stats:", to_string(mem_stats?), ";",
"sys mem stats:", to_string(sys_mem_stats?),
"\nmodule;test;tags;iterations;elapsed\n",
Enum.map(results, fn
{{mod, fun}, {iter, elapsed, _mem_stats}} ->
:io_lib.format('~s\t~s\t\t~B\t~B~n', [inspect(mod), "#{fun}", iter, elapsed])
_otherwise -> ""
end)
]
end
def parse(str) do
[header, _titles | rest] = String.split(str, "\n")
options =
header
|> String.split(";")
|> Enum.map(&String.split(&1, ":"))
|> Enum.map(fn [name, val] -> {name, parse_opt(name, val)} end)
tests =
rest
|> Enum.reject(&(&1 == ""))
|> Enum.map(&String.split(&1, "\t"))
|> Enum.map(fn [mod, test, tags, iter, elapsed] ->
tags =
String.split(tags, ",")
|> Enum.map(&String.trim/1)
|> Enum.reject(&(&1 == ""))
iter = String.to_integer(iter)
elapsed = String.to_integer(elapsed)
{mod, test, tags, iter, elapsed}
end)
%Snapshot{options: Enum.into(options, %{}), tests: tests}
end
defp parse_opt("duration", val), do: String.to_float(val)
defp parse_opt("mem stats", val), do: parse_bool(val)
defp parse_opt("sys mem stats", val), do: parse_bool(val)
defp parse_bool("false"), do: false
defp parse_bool("true"), do: true
def compare(%Snapshot{tests: tests1}, %Snapshot{tests: tests2}, format \\ :ratio) do
{test_map1, name_set1} = extract_test_names(tests1)
{test_map2, name_set2} = extract_test_names(tests2)
common_tests = MapSet.intersection(name_set1, name_set2)
diffs = Enum.reduce(common_tests, %{}, fn key, diffs ->
{count, elapsed} = test_map1[key]
result1 = elapsed / count
{count, elapsed} = test_map2[key]
result2 = elapsed / count
Map.put(diffs, key, diff(result1, result2, format))
end)
grouped_diffs = Enum.reduce(diffs, %{}, fn {{mod, test}, diff}, groups ->
Map.update(groups, mod, Map.put(%{}, test, diff), &Map.put(&1, test, diff))
end)
{grouped_diffs, symm_diff(name_set1, name_set2) |> Enum.into([])}
end
defp extract_test_names(tests) do
Enum.reduce(tests, {%{}, MapSet.new}, fn {mod, test, _tags, iter, elapsed}, {map, set} ->
name = {mod, test}
{Map.put(map, name, {iter, elapsed}), MapSet.put(set, name)}
end)
end
def format_percent(0.0) do
"--"
end
def format_percent(num) do
str = if num > 0 do <<?+>> else <<>> end
str <> :erlang.float_to_binary(num, decimals: @precision) <> "%"
end
defp diff(r1, r2, :ratio), do: Float.round(r2 / r1, @precision)
defp diff(r1, r2, :percent), do: Float.round((r2 - r1) / r1 * 100, @precision)
defp symm_diff(set1, set2) do
MapSet.union(MapSet.difference(set1, set2), MapSet.difference(set2, set1))
end
def print(snapshot, format) do
Snapshot.Formatter.format(snapshot, format)
|> IO.puts()
end
def bench_name(mod, test), do: "[#{mod}] #{test}"
def to_json(%Snapshot{tests: tests, options: options}) do
"""
{
"options": #{Json.encode(options)},
"tests": #{json_encode_tests(tests)}
}
""" |> String.trim_trailing
end
def snapshots_to_json(snapshots) when is_list(snapshots) do
fields = Enum.map(snapshots, fn {name, snapshot} ->
~s("#{name}": #{to_json(snapshot)})
end)
"{" <> Enum.join(fields, ",") <> "}"
end
defp json_encode_tests(tests) do
Enum.map(tests, fn {mod, test, tags, iter, elapsed} ->
%{
module: mod,
test: test,
tags: tags,
iter: iter,
elapsed: elapsed,
}
end)
|> Json.encode()
end
end
|
lib/benchfella/snapshot.ex
| 0.538741 | 0.431704 |
snapshot.ex
|
starcoder
|
defmodule HAP.CharacteristicDefinition do
@moduledoc """
A behaviour which encapsulates the functinos required to define a characteristic.
At runtime, characteristics are modeled via the `HAP.Characteristic` struct which
contains the runtime values for the characteristic itself, as well as metadata about
the characteristic. A `HAP.CharacteristicDefinition` is used to provide the template
values for these fields. HAP contains definitions for many common HomeKit characteristics
already, and users may define other characteristics by providing an implementation of
this behaviour as the first value in the characteristic definition tuple in a service.
"""
@typedoc """
The type of a characteristic as defined in Section 6.6.1 of Apple's [HomeKit Accessory Protocol Specification](https://developer.apple.com/homekit/).
"""
@type type :: String.t()
@typedoc """
A permission of a characteristic as defined in Table 6.4 of Apple's [HomeKit Accessory Protocol Specification](https://developer.apple.com/homekit/).
One of `pr`, `pw`, `ev`, `aa`, `tw`, `hd`, or `wr`
"""
@type perm :: String.t()
@typedoc """
The format of a characteristic as defined in Table 6.5 of Apple's [HomeKit Accessory Protocol Specification](https://developer.apple.com/homekit/).
One of `bool`, `uint8`, `uint16`, `uint32`, `uint64`, `int`, `float`, `string`, `tlv8`, or `data`
"""
@type format :: String.t()
@typedoc """
The unit of measure of a characrteristic's value
"""
@type unit :: any()
@doc """
The HomeKit type code for this characteristic
"""
@callback type :: type()
@doc """
The permissions to allow for this characteristic
"""
@callback perms :: [perm()]
@doc """
The format of this characteristic's data
"""
@callback format :: format()
@doc """
The minimum value allowed for this characteristic's value
"""
@callback min_value :: HAP.Characteristic.value()
@doc """
The maximum value allowed for this characteristic's value
"""
@callback max_value :: HAP.Characteristic.value()
@doc """
The step size by which this characteristic's value may change
"""
@callback step_value :: HAP.Characteristic.value()
@doc """
The units of this Characteristic's value
"""
@callback units :: unit()
@optional_callbacks min_value: 0, max_value: 0, step_value: 0, units: 0
end
|
lib/hap/characteristic_definition.ex
| 0.894766 | 0.80871 |
characteristic_definition.ex
|
starcoder
|
defmodule Transmog do
@moduledoc """
`Transmog` is a module which provides the ability to map keys on nested maps,
lists and structs to new values. It is useful for when you have external data
that you want to convert into an internal format easily. This recursive
transformation is made using an internal concept known as key pairs.
## Key Pairs
Key pairs are a list of two-tuples which represent a mapping from a key in the
source map to a key in the destination map. The key mapping does not have to
be exhaustive and any values that are skipped are simply added to the result.
## Examples
#=> "credentials", "first_name" to :identity, :first_name
[{["credentials", "first_name"], [:identity, :first_name]}]
When key pairs are simple they can be represented using an internal DSL which
is a dot notation string. These dot notation strings also support atoms if you
prefix them with `:`.
## Examples
#=> Same as the previous example
[{"credentials.first_name", ":identity.:first_name"}]
## Parsing
All of the supported key pair formats implement the `Transmog.Parser`
protocol. Technically if you wanted to add support for a different type then
you could implement the protocol for them.
## Formatting
We validate key pairs when they are provided to the main entrypoint of the
library, `format/2` and `format!/2`. If the key pairs are not valid then we
will let you know with an error.
## Examples
#=> Notice: key paths must be equal length!
iex> key_paths = [{"credentials", ":identity.:first_name"}]
iex> Transmog.format(%{"credentials" => "<NAME>"}, key_paths)
{:error, :invalid_key_pairs}
iex> key_paths = [{"", ":last_name"}]
iex> Transmog.format(%{}, key_paths)
{:error, :invalid_key_path}
Once your key pairs are validated you can start to use them to transform your
nested maps and lists.
## Examples
#=> Notice: you need to be explicit about which keys you want updated
iex> key_paths = [
...> {"credentials", ":identity"},
...> {"credentials.first_name", ":identity.:first_name"}
...> ]
iex> source = %{"credentials" => %{"first_name" => "Tom"}}
iex> {:ok, result} = Transmog.format(source, key_paths)
iex> result
%{identity: %{first_name: "Tom"}}
iex> key_paths = [
...> {"credentials", ":identity"},
...> {"credentials.first_name", ":identity.:first_name"}
...> ]
iex> source = [
...> %{"credentials" => %{"first_name" => "John"}},
...> %{"credentials" => %{"first_name" => "Sally"}}
...> ]
iex> {:ok, result} = Transmog.format(source, key_paths)
iex> result
[%{identity: %{first_name: "John"}}, %{identity: %{first_name: "Sally"}}]
If you know that your key pairs are valid then you can use `format!/2` instead
and your results will be unwrapped automatically for you.
## Examples
iex> key_paths = [{"name", ":name"}]
iex> source = %{"name" => "Jimmy"}
iex> Transmog.format!(source, key_paths)
%{name: "Jimmy"}
You can even transform your structs. When a struct is encountered during the
parse it will be converted into a map automatically for you.
"""
alias Transmog.KeyPairs
@typedoc """
`t:key_paths/0` is the type for a single tuple of key paths. Both sides can
be any type as long as they can be parsed by `Transmog.Parser`.
"""
@type key_paths :: {term, term}
@doc """
`format/2` takes a source value and either a list of key paths or a
`Transmog.KeyPair` struct directly and performs the key transformation on the
value.
If the key paths are given then they will be parsed using
`Transmog.KeyPairs.parse/1` and will report errors if any occur during that.
## Examples
iex> key_paths = [{"a", ":a"}, {"a.b", ":a.:b"}]
iex> source = %{"a" => %{"b" => "c"}}
iex> {:ok, result} = Transmog.format(source, key_paths)
iex> result
%{a: %{b: "c"}}
iex> key_paths = [{"a", ":a"}, {"a.b", ":a.:b"}]
iex> {:ok, %Transmog.KeyPairs{} = key_pairs} = Transmog.KeyPairs.parse(key_paths)
iex> source = %{"a" => %{"b" => "c"}}
iex> {:ok, result} = Transmog.format(source, key_pairs)
iex> result
%{a: %{b: "c"}}
"""
@spec format(source :: term, mapping :: KeyPairs.t() | list(key_paths)) ::
{:ok, term} | KeyPairs.error()
def format(source, %KeyPairs{} = key_pairs), do: {:ok, do_format(source, key_pairs)}
def format(source, key_paths) do
with {:ok, %KeyPairs{} = key_pairs} <- KeyPairs.parse(key_paths) do
format(source, key_pairs)
end
end
@doc """
`format!/2` takes a source value and either a list of key paths or a
`Transmog.KeyPair` struct directly and performs the key transformation on the
value.
This function will raise an error if the `Transmog.KeyPair` struct cannot be
created by parsing the key paths. The result will be automatically unwrapped
if the operation is successful.
## Examples
iex> key_paths = [{"a", ":a"}, {"a.b", ":a.:b"}]
iex> source = %{"a" => %{"b" => "c"}}
iex> Transmog.format!(source, key_paths)
%{a: %{b: "c"}}
iex> key_paths = [{"a", ":a"}, {"a.b", ":a.:b"}]
iex> %Transmog.KeyPairs{} = key_pairs = Transmog.KeyPairs.parse!(key_paths)
iex> source = %{"a" => %{"b" => "c"}}
iex> Transmog.format!(source, key_pairs)
%{a: %{b: "c"}}
"""
@spec format!(source :: term, mapping :: KeyPairs.t() | list(key_paths)) :: term
def format!(source, %KeyPairs{} = key_pairs), do: do_format(source, key_pairs)
def format!(source, key_paths) do
%KeyPairs{} = key_pairs = KeyPairs.parse!(key_paths)
format!(source, key_pairs)
end
# Formats a single level of a map or list. If the input is a list then the
# formatter is run over each map in the list. If the input is a map then each
# key is formatted and each value has `do_format/3` called on it.
@spec do_format(source :: term, key_pairs :: KeyPairs.t(), prefix :: list(term)) :: term
defp do_format(source, key_pairs, prefix \\ [])
defp do_format(source, %KeyPairs{} = key_pairs, prefix) when is_list(source) do
Enum.map(source, &do_format(&1, key_pairs, prefix))
end
defp do_format(%_{} = source, %KeyPairs{} = key_pairs, prefix) when is_list(prefix) do
source
|> Map.from_struct()
|> do_format(key_pairs, prefix)
end
defp do_format(%{} = source, %KeyPairs{} = key_pairs, prefix) when is_list(prefix) do
for {key, value} <- source, into: %{} do
current_path = prefix ++ [key]
{match_path(key_pairs, current_path), do_format(value, key_pairs, current_path)}
end
end
defp do_format(source, _, _), do: source
# Given the key pairs and path, attempts to find a match in the key pairs
# list. If no match is found then the key that was passed will be returned.
# Because we know the list isn't empty and that we fallback to the passed
# key, we know that `hd/1` should never raise.
@spec match_path(key_pairs :: KeyPairs.t(), path :: nonempty_list(term)) :: term
defp match_path(%KeyPairs{} = key_pairs, path) when is_list(path) do
key_pairs
|> KeyPairs.find_match(path)
|> Enum.reverse()
|> hd()
end
end
|
lib/transmog.ex
| 0.921247 | 0.540014 |
transmog.ex
|
starcoder
|
defmodule Application.Behaviour do
@moduledoc """
Default callbacks for applications.
In Erlang/OTP, an application is a component that can be started
and stopped as a unit, and which can be reused in other systems.
The first step in creating an application is to define an application specification.
For example, if your application is named `:my_app`, an app specification
should exist at `ebin/my_app.app`. This file is usually defined by
build tools like Mix.
With the app specification in hand, we must define
application module callbacks that control how to start and stop
instances of the application. This module is about defining such callbacks.
There are two callbacks which must be implemented:
1. `start(type, args)` - must return `{ :ok, pid }` or
`{ :ok, pid, state }`, where `pid` is the process identifier
of the supervisor tree root and `state` is application defined
state information;
2. `stop(state)` - receives the `state` returned by `start` and should
do any necessary clean up. Notice that shutting down the supervisor
is automatically handled by the VM;
When using this module, it tags the module behaviour as
`:application` and provides a default `stop/1` callback. The `start/2` callback
still needs to be implemented by the user.
You can learn more about the `:application` module, the application
specification and the application module callbacks from these sources:
* http://www.erlang.org/doc/man/application.html
* http://www.erlang.org/doc/design_principles/applications.html
* http://learnyousomeerlang.com/building-otp-applications
## Example
defmodule MyApp do
use Application.Behaviour
def start(_type, args) do
MyApp.Sup.start_link(args)
end
end
"""
@doc false
defmacro __using__(_) do
quote location: :keep do
@behaviour :application
@doc false
def stop(_state) do
:ok
end
defoverridable [stop: 1]
end
end
end
|
lib/elixir/lib/application/behaviour.ex
| 0.825343 | 0.493164 |
behaviour.ex
|
starcoder
|
defmodule Stats do
@moduledoc """
Stats Module
"""
@table :stats_table
def create(:counter, name, topic, partition) do
:ets.insert(@table, {key(:counter, name, topic, partition), empty(:counter)})
end
def create(:latency, name, topic, partition) do
key = key(:latency, name, topic, partition)
entry = empty(:latency)
:ets.insert(@table, {key, entry})
end
def reset_all() do
:ets.delete_all_objects(@table)
end
def reset(counter, name, topic, partition) do
create(counter, name, topic, partition)
end
def increment(:counter, name, topic, partition, value \\ 1) do
key = key(:counter, name, topic, partition)
:ets.update_counter(@table, key, value, {key, 0})
cond do
name == :queue_in ->
queue_key = key(:counter, :queue, topic, partition)
:ets.update_counter(@table, queue_key, value, {key, 0})
name == :queue_out ->
queue_key = key(:counter, :queue, topic, partition)
:ets.update_counter(@table, queue_key, -value, {key, 0})
name == :delayed_in ->
queue_key = key(:counter, :delayed, topic, partition)
:ets.update_counter(@table, queue_key, value, {key, 0})
name == :delayed_out ->
queue_key = key(:counter, :delayed, topic, partition)
:ets.update_counter(@table, queue_key, -value, {key, 0})
true -> nil
end
true
end
def get(counter_type, name, topic, partition) do
key = key(counter_type, name, topic, partition)
case :ets.lookup(@table, key) do
[{^key, val}] -> val
_ -> empty(counter_type)
end
end
def empty(:counter), do: 0
def empty(:latency), do: %{
min: -1,
max: -1,
_smaller_10: 0,
_10_100: 0,
_100_1k: 0,
_1k_10k: 0,
_larger_10k: 0
}
def reset_many(:counter, name, topic, partitions) do
partitions
|> Enum.each(fn partition ->
reset(:counter, name, topic, partition)
end)
end
def reset_many(:latency, name, topic, partitions) do
partitions
|> Enum.each(fn partition ->
reset(:latency, name, topic, partition)
end)
end
def get_many(:counter, name, topic, partitions) do
partitions
|> Enum.reduce(0, fn partition, acc ->
acc + get(:counter, name, topic, partition)
end)
end
def get_many(:latency, name, topic, partitions) do
partitions
|> Enum.reduce(empty(:latency), fn partition, acc ->
entry = get(:latency, name, topic, partition)
acc
|> (fn acc ->
if acc[:min] == -1 or entry[:min] < acc[:min] do
Map.put(acc, :min, entry[:min])
else
acc
end
end).()
|> (fn acc ->
if acc[:max] == -1 or entry[:max] > acc[:max] do
Map.put(acc, :max, entry[:max])
else
acc
end
end).()
|> (fn acc ->
%{
acc |
_smaller_10: acc[:_smaller_10] + entry[:_smaller_10],
_10_100: acc[:_10_100] + entry[:_10_100],
_100_1k: acc[:_100_1k] + entry[:_100_1k],
_1k_10k: acc[:_1k_10k] + entry[:_1k_10k],
_larger_10k: acc[:_larger_10k] + entry[:_larger_10k]
}
end).()
end)
end
def report(:latency, name, topic, partition, value) do
key = key(:latency, name, topic, partition)
entry = :ets.lookup(@table, key)
|> case do
[{^key, val}] when is_map(val) -> val
_ -> %{
min: -1,
max: -1,
_smaller_10: 0,
_10_100: 0,
_100_1k: 0,
_1k_10k: 0,
_larger_10k: 0
}
end
|> update_min(value)
|> update_max(value)
|> update_buckets(value)
:ets.insert(@table, {key, entry})
end
defp update_min(entry, value) do
if entry[:min] == -1 or value < entry[:min] do
Map.put(entry, :min, value)
else
entry
end
end
defp update_max(entry, value) do
if entry[:max] == -1 or value > entry[:max] do
Map.put(entry, :max, value)
else
entry
end
end
defp update_buckets(entry, value) do
cond do
value < 10 ->
Map.put(entry, :_smaller_10, entry[:_smaller_10] + 1)
value < 100 ->
Map.put(entry, :_10_100, entry[:_10_100] + 1)
value < 1_000 ->
Map.put(entry, :_100_1k, entry[:_100_1k] + 1)
value < 10_000 ->
Map.put(entry, :_1k_10k, entry[:_1k_10k] + 1)
true ->
Map.put(entry, :_larger_10k, entry[:_larger_10k] + 1)
end
end
defp key(counter_type, name, topic, partition) do
cond do
is_bitstring(partition) ->
{counter_type, name, topic, String.to_integer(partition)}
is_integer(partition) ->
{counter_type, name, topic, partition}
true ->
{counter_type, name, topic, 0}
end
end
end
|
apps/stats/lib/stats.ex
| 0.595493 | 0.484685 |
stats.ex
|
starcoder
|
defmodule Csvex.Parse do
@moduledoc """
Module for parsing CSV.
To reach higher performance, all the fold functions
support selecting which columns to be returned from the underlying NIF. See
CsvexParseTest for example.
"""
@type options :: [option]
@type option :: {:delimiter, :tab | :comma}
@type row :: [binary]
@type folder :: (row, any -> any)
@type folder_maker :: folder | {:maker, (... -> {folder, capture})}
@type capture :: [non_neg_integer] # List of column indexes where the left-most is 0.
@type generator :: {(generator_state -> {binary, generator_state}), generator_state}
@type generator_state :: any
@doc """
Parses one CSV string completely. It provides the simplest interface but
likely not the best performance.
"""
@spec string(binary, options) :: [row]
def string(csv, options \\ []) do
:csv.decode_binary(csv, default_options(options))
end
@doc """
Folds over the rows in one CSV string.
"""
@spec string_fold(binary, folder_maker, any, options) :: any
def string_fold(csv, folder, acc, options \\ []) do
:csv.decode_binary_fold(erlang_folder(folder), acc, csv, default_options(options))
end
@doc """
Same as string_fold/4 but working on a gzipped CSV.
"""
@spec gzip_fold(binary, folder_maker, any, options) :: any
def gzip_fold(csv_gzip, folder, acc, options \\ []) do
:csv.decode_gzip_fold(
erlang_folder(folder),
acc,
csv_gzip,
default_options(options)
)
end
@doc """
The generic parsing function which can handle all the use cases. The other
functions are convenience function, which could be using fold/4 under the hood
(but they map to the convenience functions available in the underlying `csv`
lib instead).
The csv_generator should generate sequential chunks of CSV. There is no requirement on
the size of the chunks and they can include partial rows and values.
"""
@spec fold(generator, folder_maker, any, options) :: any
def fold(csv_generator, folder, acc, options \\ []) do
:csv.decode_fold(erlang_folder(folder), acc, csv_generator, default_options(options))
end
# Elixir uses 0-based indexing (like it would be C), so we need to change it
# to 1-based indexing (because Erlang is not C).
defp erlang_folder({:maker, folder_maker}) do
{:arity, arity} = :erlang.fun_info(folder_maker, :arity)
{:maker, erlang_folder_maker(folder_maker, arity)}
end
defp erlang_folder(folder) do
folder
end
# TODO: Replace this with a macro which doesn't have an arity limit.
defp erlang_folder_maker(folder_maker, 0) do
fn ->
{folder, capture} = folder_maker.()
{folder, erlang_indexing(capture)}
end
end
defp erlang_folder_maker(folder_maker, 1) do
fn a ->
{folder, capture} = folder_maker.(a)
{folder, erlang_indexing(capture)}
end
end
defp erlang_folder_maker(folder_maker, 2) do
fn a, b ->
{folder, capture} = folder_maker.(a, b)
{folder, erlang_indexing(capture)}
end
end
defp erlang_folder_maker(_, _) do
{:error, "Folder maker arity not supported"}
end
defp erlang_indexing(indexes) do
for n <- indexes, do: n + 1
end
defp default_options(options) do
[delimiter: :comma]
|> Keyword.merge(options)
|> Keyword.merge(return: :binary)
end
end
|
lib/parse/parse.ex
| 0.727685 | 0.520496 |
parse.ex
|
starcoder
|
defmodule Resourceful.Collection do
@moduledoc """
Provides a common interface for querying and retrieving collections.
Deligated modules designed to interact directly with the underlying data or
data sources must return alls of resources. For instance, when using `Ecto`,
this module should return alls of structs or maps and not queries that have
not been executed yet.
## Data Sources
A `data_source` can be another from an Ecto schema, to a module that
intteracts with a remote API, to a list as long as there is an underlying
module to support the common interfaces. (For now, that's just Ecto.)
"""
alias Resourceful.Collection.{Delegate, Filter, Sort}
@default_page_size 25
@type name() :: atom() | String.t()
@type queryable() :: name() | {name(), name()} | Resourceful.Type.queryable()
@type page_info() :: %{
number: integer(),
resources: integer(),
size: integer(),
total: integer()
}
@type with_page_info() :: {[any()], page_info()}
@type with_page_info(type) :: {[type], page_info()}
@doc """
Returns a list of resources that may be filtered and sorted depending on
on options. Resources will always be paginated.
Args:
* `data_source`: See module overview.
* `opts`: Keyword list of options
Options:
* `filter`: See `Resourceful.Collection.Filter.call/2`
* `page`: Pagination options.
* `sort:`See `Resourceful.Collection.Sort.call/2`
Additionally, see settings for the delegated module as it may take additional
options.
"""
@spec all(any(), keyword()) :: [any()]
def all(data_source, opts \\ []) do
data_source
|> query(opts)
|> paginate(opts)
end
@spec all_with_page_info(any(), keyword()) :: with_page_info()
def all_with_page_info(data_source, opts \\ []) do
data_source
|> query(opts)
|> paginate_with_info(opts)
end
@doc """
Checks if `data_source` contains any resources.
Args:
* `data_source`: See module overview.
* `opts`: Keyword list of options
Options: See settings for the delegated module (e.g. `Resourceful.Collection.Ecto`).
"""
@spec any?(any(), keyword()) :: boolean()
def any?(data_source, opts \\ []) do
Delegate.collection(data_source).any?(data_source, opts)
end
@spec default_page_size() :: integer()
def default_page_size do
Application.get_env(:resourceful, :default_page_size, @default_page_size)
end
@spec filter(any(), keyword(), keyword()) :: any()
def filter(data_source, filters, opts \\ []) do
data_source
|> Filter.call(filters)
|> delegate_all(opts)
end
@doc """
Returns the total number of resources and pages based on `page_size` in a
`data_source`.
Args:
* `data_source`: See module overview.
* `opts`: Keyword list of options
Options: See settings for the delegated module (e.g. `Resourceful.Collection.Ecto`).
"""
@spec page_info(any(), keyword()) :: page_info()
def page_info(data_source, opts) when is_list(opts) do
page_info(data_source, page_size_or_default(opts), opts)
end
@spec page_info(any(), integer(), keyword()) :: page_info()
def page_info(data_source, page_size, opts \\ []) when is_integer(page_size) do
resources = total(data_source, opts)
%{
number: page_number_or_default(opts),
resources: resources,
size: page_size,
total: ceil(resources / page_size)
}
end
@spec page_number_or_default(keyword()) :: integer()
def page_number_or_default(opts), do: get_in(opts, [:page, :number]) || 1
@spec page_size_or_default(keyword()) :: integer()
def page_size_or_default(opts), do: get_in(opts, [:page, :size]) || default_page_size()
@spec paginate(any(), integer(), integer(), keyword()) :: [any()]
def paginate(data_source, number, size, opts \\ [])
when is_integer(number) and is_integer(size) do
data_source
|> Delegate.paginate(number, size)
|> delegate_all(opts)
end
@spec paginate(any(), keyword()) :: [any()]
def paginate(data_source, opts \\ []) do
paginate(
data_source,
page_number_or_default(opts),
page_size_or_default(opts),
opts
)
end
@spec paginate_with_info(any(), keyword()) :: with_page_info()
def paginate_with_info(data_source, opts \\ []) do
{paginate(data_source, opts), page_info(data_source, opts)}
end
@spec query(any(), keyword()) :: any()
def query(data_source, opts) do
data_source
|> Filter.call(Keyword.get(opts, :filter, []))
|> Sort.call(Keyword.get(opts, :sort, []))
end
@spec sort(any(), keyword(), keyword()) :: any()
def sort(data_source, sorters, opts \\ []) do
data_source
|> Sort.call(sorters)
|> delegate_all(opts)
end
@doc """
Returns the total number of resources in a `data_source`.
Args:
* `data_source`: See module overview.
* `opts`: Keyword list of options
Options: See settings for the delegated module (e.g. `Resourceful.Collection.Ecto`).
"""
@spec total(any(), keyword()) :: integer()
def total(data_source, opts \\ []) do
Delegate.collection(data_source).total(data_source, opts)
end
defp delegate_all(data_source, opts) do
Delegate.collection(data_source).all(data_source, opts)
end
end
|
lib/resourceful/collection.ex
| 0.916142 | 0.538316 |
collection.ex
|
starcoder
|
defmodule Cldr.Calendar.Base.Week do
@moduledoc false
alias Cldr.Calendar.Config
alias Cldr.Calendar.Base.Month
alias Calendar.ISO
alias Cldr.Math
@days_in_week 7
@weeks_in_quarter 13
@months_in_quarter 3
@months_in_year 12
@weeks_in_long_year 53
@weeks_in_normal_year 52
@quarters_in_year 4
defmacro __using__(options \\ []) do
quote bind_quoted: [options: options] do
@options options
@before_compile Cldr.Calendar.Compiler.Week
end
end
def valid_date?(year, week, day, config) do
week <= weeks_in_year(year, config) and day in 1..days_in_week()
end
def year_of_era(year, config) do
{_, year} = Cldr.Calendar.start_end_gregorian_years(year, config)
Calendar.ISO.year_of_era(year)
end
# Quarters are 13 weeks but if there
# are 53 weeks in a year then 4th
# quarter is longer
def quarter_of_year(_year, @weeks_in_long_year, _day, _config) do
4
end
def quarter_of_year(_year, week, _day, _config) do
div(week - 1, @weeks_in_quarter) + 1
end
def month_of_year(_year, @weeks_in_long_year, _day, _config) do
+12
end
def month_of_year(year, week, day, %Config{weeks_in_month: weeks_in_month} = config) do
quarter = quarter_of_year(year, week, day, config)
months_in_prior_quarters = (quarter - 1) * @months_in_quarter
week_in_quarter = Math.amod(week, @weeks_in_quarter)
[m1, m2, _m3] = weeks_in_month
month_in_quarter =
cond do
week_in_quarter <= m1 -> 1
week_in_quarter <= m1 + m2 -> 2
true -> 3
end
months_in_prior_quarters + month_in_quarter
end
def week_of_year(year, week, _day, _config) do
{year, week}
end
def iso_week_of_year(year, week, day, config) do
with {:ok, date} <- Date.new(year, week, day, config.calendar) do
{:ok, %{year: year, month: month, day: day}} = Date.convert(date, Cldr.Calendar.Gregorian)
Cldr.Calendar.Gregorian.iso_week_of_year(year, month, day)
end
end
def week_of_month(year, week, day, config) do
month = month_of_year(year, week, day, config)
%Date.Range{first: first} = month(year, month, config)
{month, week - first.month + 1}
end
def day_of_era(year, week, day, config) do
with {:ok, date} <- Date.new(year, week, day, config.calendar) do
{:ok, %{year: year, month: month, day: day}} = Date.convert(date, Calendar.ISO)
Calendar.ISO.day_of_era(year, month, day)
end
end
def day_of_year(year, week, day, config) do
start_of_year = first_gregorian_day_of_year(year, config)
this_day = first_gregorian_day_of_year(year, config) + week_to_days(week) + day
this_day - start_of_year + 1
end
def day_of_week(_year, _week, day, %{first_or_last: :first} = config) do
first_day = config.day_of_week
Math.amod(first_day + day - 1, days_in_week())
end
def day_of_week(_year, _week, day, %{first_or_last: :last} = config) do
last_day = config.day_of_week
Math.amod(last_day + day, days_in_week())
end
def months_in_year(year, _config) do
Calendar.ISO.months_in_year(year)
end
def weeks_in_year(year, config) do
if long_year?(year, config), do: weeks_in_long_year(), else: weeks_in_normal_year()
end
def weeks_in_long_year do
@weeks_in_long_year
end
def weeks_in_normal_year do
@weeks_in_normal_year
end
def weeks_in_quarter(year, quarter, config) do
if quarter == @quarters_in_year && long_year?(year, config) do
@weeks_in_quarter + 1
else
@weeks_in_quarter
end
end
def days_in_year(year, config) do
if long_year?(year, config) do
@weeks_in_long_year * @days_in_week
else
@weeks_in_normal_year * @days_in_week
end
end
def days_in_month(year, @months_in_year, config) do
%Config{weeks_in_month: [_, _, weeks_in_last_month]} = config
weeks = if long_year?(year, config), do: weeks_in_last_month + 1, else: weeks_in_last_month
weeks * days_in_week()
end
def days_in_month(_year, month, config) do
%Config{weeks_in_month: weeks_in_month} = config
month_in_quarter = Math.amod(rem(month, @months_in_quarter), @months_in_quarter)
Enum.at(weeks_in_month, month_in_quarter - 1) * days_in_week()
end
def days_in_week do
@days_in_week
end
def days_in_week(_year, _week) do
@days_in_week
end
def year(year, config) do
with {:ok, first_day} <- Date.new(year, 1, 1, config.calendar),
{:ok, last_day} <-
Date.new(year, weeks_in_year(year, config), days_in_week(), config.calendar) do
Date.range(first_day, last_day)
end
end
def quarter(year, quarter, config) do
starting_week = (quarter - 1) * @weeks_in_quarter + 1
ending_week = starting_week + weeks_in_quarter(year, quarter, config) - 1
with {:ok, first_day} <- Date.new(year, starting_week, 1, config.calendar),
{:ok, last_day} <- Date.new(year, ending_week, days_in_week(), config.calendar) do
Date.range(first_day, last_day)
end
end
def month(year, month, %{weeks_in_month: weeks_in_month} = config) do
months_prior_in_quarter = rem(month - 1, @months_in_quarter)
prior_quarters = Month.quarter_of_year(year, month, 1, config) - 1
quarter_weeks_prior = prior_quarters * @weeks_in_quarter
weeks_prior_in_quarter =
weeks_in_month
|> Enum.take(months_prior_in_quarter)
|> Enum.sum()
weeks_in_month =
Enum.at(weeks_in_month, months_prior_in_quarter) +
maybe_extra_week_for_long_year(year, month, config)
first_week = quarter_weeks_prior + weeks_prior_in_quarter + 1
last_week = first_week + weeks_in_month - 1
{:ok, start_of_month} = Date.new(year, first_week, 1, config.calendar)
{:ok, end_of_month} = Date.new(year, last_week, days_in_week(), config.calendar)
Date.range(start_of_month, end_of_month)
end
def week(year, week, config) do
with {:ok, first_day} <- Date.new(year, week, 1, config.calendar),
{:ok, last_day} <- Date.new(year, week, days_in_week(), config.calendar) do
Date.range(first_day, last_day)
end
end
def plus(year, week, day, config, :years, years, options) do
new_year = year + years
coerce? = Keyword.get(options, :coerce, false)
{new_week, new_day} = Cldr.Calendar.month_day(new_year, week, day, config.calendar, coerce?)
{new_year, new_week, new_day}
end
def plus(year, week, day, config, :quarters, quarters, options) do
plus(year, week, day, config, :months, quarters * @months_in_quarter, options)
end
def plus(year, week, day, _config, :months, 0, _options) do
{year, week, day}
end
# When adding a month the process is
# 1. Capture the day of the month
# 2. Get the first day of that month
# 3. Add the number of days in that month leaving us
# with the first day of the next month
# 4. Add back the day of the month, potentially
# coercing the last day of the month
def plus(year, week, day, config, :months, 1 = n, options) do
day_of_month = day_of_month(year, week, day, config)
{year, week, day} = first_day_of_month(year, week, day, config)
month_of_year = month_of_year(year, week, day, config)
month_in_quarter = month_in_quarter(week, config)
weeks_to_add =
slice_weeks(month_in_quarter - 1, n, config) +
maybe_extra_week_for_long_year(year, month_of_year, config)
{year_increment, week} = Cldr.Math.div_mod(week + weeks_to_add, weeks_in_year(year, config))
add_days(year + year_increment, week, day, day_of_month - 1, config, options)
end
# WHen substracting a month the process is
# 1. Work out the day of the month of the provided date
# 2. Get the last day of the month
# 3. Subtract the number of days in the month, leaving us with
# the last day of the previous month
# 4. Get the first day of the previous month
# 5. Add back the day of the month capture earlier - potentialy coercing
# the last day of the month
def plus(year, week, day, config, :months, -1 = n, options) do
day_of_month = day_of_month(year, week, day, config)
{year, week, day} = last_day_of_month(year, week, day, config)
month_of_year = month_of_year(year, week, day, config)
month_in_quarter = month_in_quarter(week, config)
weeks_to_sub =
slice_weeks(month_in_quarter - 1, abs(n), config) +
maybe_extra_week_for_long_year(year, month_of_year, config)
week = week - weeks_to_sub
# IO.puts " Proposed week of previous month: #{week}"
{year, week} =
if week < 1 do
{year - 1, weeks_in_year(year - 1, config) - week}
else
{year, week}
end
{year, week, day} = first_day_of_month(year, week, day, config)
add_days(year, week, day, day_of_month - 1, config, options)
end
# Accounting for long years is complex. So for now adding and
# subtracting months is done one month at a time. Therefore
# performance for large additions/subsctractions will be poor.
def plus(year, week, day, config, :months, months, options) when abs(months) > 1 do
increment = if months > 0, do: 1, else: -1
original_day_of_month = day_of_month(year, week, day, config)
{year, week, day} =
Enum.reduce(1..abs(months), {year, week, day}, fn _i, {year, week, day} ->
plus(year, week, day, config, :months, increment, coerce: true)
end)
# Now reconcile the original date of the month with the
# previously calculated day of the month.
proposed_day_of_month = day_of_month(year, week, day, config)
days_difference = original_day_of_month - proposed_day_of_month
month = month_of_year(year, week, day, config)
if days_difference < 0 && original_day_of_month <= days_in_month(year, month, config) do
add_days(year, month, day, days_difference, config, options)
else
{year, week, day}
end
end
def add(naive_datetime, :year, step) do
%{year: year, month: month, day: day} = Cldr.Calendar.plus(naive_datetime, :years, step)
%NaiveDateTime{naive_datetime | year: year, month: month, day: day}
end
def day_of_month(year, week, day, config) do
first_day = first_day_of_month(year, week, day, config)
date_diff({year, week, day}, first_day, config) + 1
end
def date_diff({y2, w2, d2}, {y1, w1, d1}, config) do
date_to_iso_days(y2, w2, d2, config) - date_to_iso_days(y1, w1, d1, config)
end
def add_days(year, week, day, days, config, options \\ [])
def add_days(year, week, day, 0, _config, _options) do
{year, week, day}
end
def add_days(year, week, day, days, config, options) do
days_to_add =
if Keyword.get(options, :coerce) do
month = month_of_year(year, week, day, config)
min(days_in_month(year, month, config) - 1, days)
else
days
end
iso_days = date_to_iso_days(year, week, day, config) + days_to_add
date_from_iso_days(iso_days, config)
end
def slice_weeks(from, n, %{weeks_in_month: weeks_in_month}) do
{_, weeks} = Enum.split(weeks_in_month, from)
weeks
|> Enum.take(n)
|> Enum.sum()
end
def first_day_of_month(year, week, day, config) do
quarters = quarter_of_year(year, week, day, config) - 1
month_in_quarter = month_in_quarter(week, config) - 1
week = quarters * @weeks_in_quarter + weeks_from_months(month_in_quarter, config) + 1
{year, week, 1}
end
def last_day_of_month(year, week, day, config) do
{year, week, day} = first_day_of_month(year, week, day, config)
month_of_year = month_of_year(year, week, day, config)
days_in_month = days_in_month(year, month_of_year, config)
add_days(year, week, day, days_in_month - 1, config)
end
def month_in_quarter(week, _config) when week == @weeks_in_long_year do
@months_in_quarter
end
def month_in_quarter(week, config) do
week_in_quarter = Cldr.Math.amod(week, @weeks_in_quarter)
month_from_weeks(week_in_quarter, config)
end
def weeks_from_months(0, _config) do
0
end
# When months is positive we just sum the first n members of the
# weeks_in_month list.
def weeks_from_months(months, %{weeks_in_month: weeks_in_month}) when months > 0 do
weeks_in_month
|> Enum.take(months)
|> Enum.sum()
end
# When months is negative
# TODO May not be correct
def weeks_from_months(months, %{weeks_in_month: weeks_in_month}) when months < 0 do
{_, weeks_in_month} = List.pop_at(weeks_in_month, -1)
weeks_in_month
|> Enum.take(months)
|> Enum.sum()
end
# For weeks <= 13
def month_from_weeks(weeks, %Config{weeks_in_month: [m1, m2, m3]})
when weeks <= @weeks_in_quarter do
cond do
weeks <= m1 -> 1
weeks <= m1 + m2 -> 2
weeks <= m1 + m2 + m3 -> 3
end
end
def sign(number) when number < 0, do: -1
def sign(_number), do: +1
def first_gregorian_day_of_year(year, %Config{first_or_last: :first} = config) do
{year, _} = Cldr.Calendar.start_end_gregorian_years(year, config)
%{
month_of_year: first_month,
day_of_week: first_day,
min_days_in_first_week: min_days
} = config
iso_days = ISO.date_to_iso_days(year, first_month, min_days)
day_of_week = Cldr.Calendar.iso_days_to_day_of_week(iso_days)
# The iso_days calulation is the last possible first day of the first week
# All starting days are less than or equal to this day
if first_day > day_of_week do
iso_days + (first_day - days_in_week() - day_of_week)
else
iso_days - (day_of_week - first_day)
end
end
def first_gregorian_day_of_year(year, %Config{first_or_last: :last} = config) do
last_gregorian_day_of_year(year - 1, config) + 1
end
def last_gregorian_day_of_year(year, %Config{first_or_last: :first} = config) do
first_gregorian_day_of_year(year + 1, config) - 1
end
def last_gregorian_day_of_year(year, %Config{first_or_last: :last} = config) do
{_, year} = Cldr.Calendar.start_end_gregorian_years(year, config)
%{
month_of_year: last_month,
day_of_week: last_day,
min_days_in_first_week: min_days
} = config
days_in_last_month = ISO.days_in_month(year, last_month)
iso_days = ISO.date_to_iso_days(year, last_month, days_in_last_month - min_days)
day_of_week = Cldr.Calendar.iso_days_to_day_of_week(iso_days)
if last_day <= day_of_week do
iso_days - (day_of_week - last_day) + days_in_week()
else
iso_days - (day_of_week - last_day)
end
end
def long_year?(year, %Config{} = config) do
first_day = first_gregorian_day_of_year(year, config)
last_day = last_gregorian_day_of_year(year, config)
days_in_year = last_day - first_day + 1
div(days_in_year, days_in_week()) == @weeks_in_long_year
end
def date_to_iso_days(year, week, day, config) do
{days, _day_fraction} = naive_datetime_to_iso_days(year, week, day, 0, 0, 0, {0, 6}, config)
days
end
def date_from_iso_days(iso_day_number, config) do
{year, week, day, _, _, _, _} = naive_datetime_from_iso_days({iso_day_number, {0, 6}}, config)
{year, week, day}
end
def date_to_string(year, week, day) do
"#{year}-W#{lpad(week)}-#{day}"
end
def naive_datetime_from_iso_days({days, day_fraction}, config) do
{year, _month, _day} = Calendar.ISO.date_from_iso_days(days)
first_day = first_gregorian_day_of_year(year, config)
{year, first_day} =
cond do
first_day > days ->
{year - 1, first_gregorian_day_of_year(year - 1, config)}
days - first_day + 1 > config.calendar.days_in_year(year) ->
{year + 1, first_gregorian_day_of_year(year + 1, config)}
true ->
{year, first_day}
end
day_of_year = days - first_day + 1
week = trunc(Float.ceil(day_of_year / days_in_week()))
day = day_of_year - (week - 1) * days_in_week()
{hour, minute, second, microsecond} = Calendar.ISO.time_from_day_fraction(day_fraction)
{year, week, day, hour, minute, second, microsecond}
end
def naive_datetime_to_iso_days(year, week, day, hour, minute, second, microsecond, config) do
days = first_gregorian_day_of_year(year, config) + week_to_days(week) + day - 1
day_fraction = Calendar.ISO.time_to_day_fraction(hour, minute, second, microsecond)
{days, day_fraction}
end
def datetime_to_string(
year,
month,
day,
hour,
minute,
second,
microsecond,
time_zone,
zone_abbr,
utc_offset,
std_offset
) do
date_to_string(year, month, day) <>
" " <>
Calendar.ISO.time_to_string(hour, minute, second, microsecond) <>
Cldr.Calendar.offset_to_string(utc_offset, std_offset, time_zone) <>
Cldr.Calendar.zone_to_string(utc_offset, std_offset, zone_abbr, time_zone)
end
def naive_datetime_to_string(
year,
month,
day,
hour,
minute,
second,
microsecond
) do
date_to_string(year, month, day) <>
" " <>
Calendar.ISO.time_to_string(hour, minute, second, microsecond)
end
defp lpad(week) when week < 10 do
"0#{week}"
end
defp lpad(week) do
week
end
defp week_to_days(week) do
(week - 1) * days_in_week()
end
defp maybe_extra_week_for_long_year(year, @months_in_year, config) do
if long_year?(year, config), do: 1, else: 0
end
defp maybe_extra_week_for_long_year(_year, _month, _config) do
0
end
end
|
lib/cldr/calendar/base/week.ex
| 0.660282 | 0.470189 |
week.ex
|
starcoder
|
defmodule ExUnited.Case do
@moduledoc """
This module makes it possible to seemlessly execute assertions and refutations
within spawned nodes.
You can setup your test module by either using `ExUnited.Case` instead of
`ExUnit.Case`:
defmodule MyNodesTest do
use ExUnited.Case
end
Or by importing the `ExUnited.Case` module:
defmodule MyNodesTest do
use ExUnit.Case
import ExUnited.Case
end
Writing assertions and refutations within the context of a certain spawned is
pretty straight forward with the use of the `ExUnited.Case.as_node/2` function:
## Example
defmodule MyNodesTest do
use ExUnited.Case
setup do
{:ok, spawned} = ExUnited.spawn([:bruce, :clark])
on_exit(fn ->
ExUnited.teardown()
end)
spawned
end
test "assertions and refutations within node contexts", spawned do
bruce = get_in(spawned, [:bruce, :node])
as_node(bruce) do
assert :"[email protected]" = Node.self()
refute :"[email protected]" == Node.self()
end
as_node(:clark) do
assert :"[email protected]" = Node.self()
refute :"[email protected]" == Node.self()
end
end
end
See `ExUnited.Case.as_node/2` for more information.
"""
@doc false
def send_error(pid_list, message) do
pid_list
|> :erlang.list_to_pid()
|> send(message)
end
@doc """
Injects `use ExUnit.Case` and `import ExUnited.Case, except: [send_error: 2]`
in the test module.
Gets triggered after having put `use ExUnited.Case` in your test module.
"""
defmacro __using__(_opts) do
quote do
use ExUnit.Case
import ExUnited.Case, except: [send_error: 2]
end
end
@doc """
A convenience macro function for writing assertions and refutations from within
spawned nodes.
It makes writing tests so much easier and more readable. The function accepts
the following:
* `node` - the simple name (used during spawning) or the node name
* `binding` - a keyword list containing variables which will be available
in the code block (optional)
* `block` - the code block containing assertions and/or refutations
## Example
test "assertions and refutations within the right context", spawned do
clark = get_in(spawned, [:clark, :node])
bruce = get_in(spawned, [:bruce, :node])
assert :"[email protected]" == Node.self()
assert [:"[email protected]", :"[email protected]"] = Node.list() |> Enum.sort()
as_node(clark) do
nodes = Node.list()
refute :"[email protected]" == Node.self()
assert :"[email protected]" = Node.self()
assert [:"[email protected]"] = Node.list()
assert ^nodes = Node.list()
end
as_node(:bruce, spawned_node: bruce) do
assert :"[email protected]" = Node.self()
assert :"[email protected]" = spawned_node
assert ^spawned_node = Node.self()
assert ^spawned_node = :"[email protected]"
assert [:"[email protected]"] = Node.list()
end
end
"""
defmacro as_node(node, binding \\ [], do: block) do
quoted = generate_module(binding, block)
quote do
quoted =
unquote(Macro.escape(quoted))
|> Macro.postwalk(fn
{:__CAPTAIN__, [], _} -> Node.self()
{:__PID__, [], _} -> :erlang.pid_to_list(self())
quoted -> quoted
end)
node =
get_in(ExUnited.Spawn.legion(), [:nodes, unquote(node), :node]) ||
unquote(node)
case :rpc.call(node, Code, :eval_quoted, [quoted]) do
{:badrpc, {:EXIT, {error, _}}} ->
raise(error)
{:badrpc, :nodedown} ->
raise(RuntimeError,
message: "node #{inspect(unquote(node))} seems to be unreachable"
)
_ ->
nil
end
:rpc.call(node, ExUnitedBlock, :run, [unquote(binding)])
message_count =
self()
|> Process.info(:message_queue_len)
|> elem(1)
if message_count > 0 do
for n <- 0..message_count do
receive do
error ->
raise error
end
end
end
end
end
@spec generate_module(keyword, list) :: {:__block__, list, list}
defp generate_module(binding, block) do
assigns =
Enum.map(binding, fn {name, _value} ->
quote do
unquote({name, [], nil}) = Keyword.get(binding, unquote(name))
end
end)
code =
case block do
{:__block__, [], lines} -> lines
line -> [line]
end
quote do
:code.purge(ExUnitedBlock)
:code.delete(ExUnitedBlock)
defmodule ExUnitedBlock do
@moduledoc false
import ExUnit.Assertions
def run(binding) do
unquote({:__block__, [], assigns ++ code})
rescue
error ->
:rpc.call(__CAPTAIN__, unquote(__MODULE__), :send_error, [
__PID__,
error
])
end
end
end
end
end
|
lib/ex_united/case.ex
| 0.816077 | 0.754938 |
case.ex
|
starcoder
|
defmodule WHATWG.URL.URLSearchParams do
@moduledoc """
Functions to work with `URLSearchParams`.
See [URL Standard - URLSearchParams class](https://url.spec.whatwg.org/#interface-urlsearchparams).
"""
alias WHATWG.URL.WwwFormUrlencoded
@doc """
Gets the value of the first pair whose name is `name`.
### Examples
iex> get([], "a")
nil
iex> get([{"a", "1"}, {"b", "2"}, {"a", "3"}], "a")
"1"
"""
def get(list, name) when is_list(list) and is_binary(name) do
Enum.find_value(list, nil, fn
{^name, value} -> value
_ -> false
end)
end
@doc """
Gets the values of all pairs whose name is `name`.
### Examples
iex> get_all([], "a")
[]
iex> get_all([{"a", "1"}, {"b", "2"}, {"a", "3"}], "a")
["1", "3"]
"""
def get_all(list, name) when is_list(list) and is_binary(name) do
list
|> Enum.reduce([], fn
{^name, value}, acc -> [value | acc]
{_, _}, acc -> acc
end)
|> Enum.reverse()
end
@doc """
Appends a new name-value pair.
Note that it becomes slower as the list grows in size. You may use `prepend/3` and `List.reverse`
### Examples
iex> append([], "a", "1")
[{"a", "1"}]
iex> append([{"a", "1"}], "a", "2")
[{"a", "1"}, {"a", "2"}]
"""
def append(list, name, value) when is_list(list) and is_binary(name) and is_binary(value),
do: list ++ [{name, value}]
@doc """
Prepends a new name-value pair.
This method is not defined in the specificiation.
### Examples
iex> prepend([], "a", "1")
[{"a", "1"}]
iex> prepend([{"a", "1"}], "a", "2")
[{"a", "2"}, {"a", "1"}]
"""
def prepend(list, name, value) when is_list(list) and is_binary(name) and is_binary(value),
do: [{name, value} | list]
@doc """
Returns a list of elements in `list` in reverse order.
"""
defdelegate reverse(list), to: Enum
@doc """
Sets the value for the name.
1. If the list contains any name-value pairs whose name is `name`, then set the value of the first such name-value pair to `value` and remove the others.
1. Otherwise, append a new name-value pair whose name is `name` and value is `value`, to the list.
### Examples
iex> set([], "a", "1")
[{"a", "1"}]
iex> set([{"a", "1"}, {"b", "2"}, {"a", "3"}], "a", "4")
[{"a", "4"}, {"b", "2"}]
"""
def set(list, name, value) when is_list(list) and is_binary(name) and is_binary(value) do
case Enum.reduce(list, {[], false}, fn
{^name, _}, {acc, false} -> {[{name, value} | acc], true}
{^name, _}, {acc, true} -> {acc, true}
{n, v}, {acc, placed} -> {[{n, v} | acc], placed}
end) do
{list, true} -> list
{list, false} -> [{name, value} | list]
end
|> Enum.reverse()
end
@doc """
Checks if the list contains a name-value pair whose name is `name`.
### Examples
iex> has?([], "foo")
false
iex> has?([{"foo", "bar"}], "foo")
true
"""
def has?(list, name) when is_list(list) and is_binary(name) do
Enum.any?(list, fn
{^name, _} -> true
_ -> false
end)
end
@doc """
Sorts name-value pairs by their names.
### Examples
iex> sort([])
[]
iex> sort([{"b", "1"}, {"a", "2"}, {"a", "1"}])
[{"a", "2"}, {"a", "1"}, {"b", "1"}]
"""
def sort(list) when is_list(list) do
Enum.sort_by(list, fn {k, _} -> k end)
end
def parse(string) when is_binary(string), do: WwwFormUrlencoded.parse(string)
@doc """
### Examples
iex> serialize([])
""
iex> serialize([{"foo", "bar"}])
"foo=bar"
iex> serialize([{"foo", "bar"}, {"foo", " baz "}])
"foo=bar&foo=+baz+"
"""
def serialize(list, to_str \\ false) when is_list(list),
do: WwwFormUrlencoded.serialize(list, to_str)
end
|
lib/whatwg/url/url_search_params.ex
| 0.80329 | 0.484136 |
url_search_params.ex
|
starcoder
|
defmodule Modbus.Model do
@moduledoc false
def apply(state, {:rc, slave, address, count}) do
reads(state, {slave, :c, address, count})
end
def apply(state, {:ri, slave, address, count}) do
reads(state, {slave, :i, address, count})
end
def apply(state, {:rhr, slave, address, count}) do
reads(state, {slave, :hr, address, count})
end
def apply(state, {:rir, slave, address, count}) do
reads(state, {slave, :ir, address, count})
end
def apply(state, {:fc, slave, address, value}) when is_integer(value) do
write(state, {slave, :c, address, value})
end
#cambiar funcion ya que la lista puede contener valores no enteros
def apply(state, {:fc, slave, address, values}) when is_list(values) do
writes(state, {slave, :c, address, values})
end
def apply(state, {:phr, slave, address, value}) when is_integer(value) do
write(state, {slave, :hr, address, value})
end
#cambiar funcion ya que la lista puede contener valores no enteros
def apply(state, {:phr, slave, address, values}) when is_list(values) do
writes(state, {slave, :hr, address, values})
end
# para el esclavo se necesitan combiar la estructura de la memoria...
defp reads(state, {slave, type, address, count}) do
#check the incoming request is valid for the current model.
if check_request(state, {slave, type, address, count}) do
map = Map.fetch!(state, slave)
list = for point <- address..address+count-1 do
Map.fetch!(map, {type, point})
end
{state, list}
else
{state, :error}
end
end
defp write(state, {slave, type, address, value}) do
cmap = Map.fetch!(state, slave)
nmap = Map.put(cmap, {type, address}, value)
{Map.put(state, slave, nmap), nil}
end
defp writes(state, {slave, type, address, values}) do
cmap = Map.fetch!(state, slave)
final = address + Enum.count(values)
{^final, nmap} = Enum.reduce(values, {address, cmap}, fn (value, {i, map}) ->
{i+1, Map.put(map, {type, i}, value)}
end)
{Map.put(state, slave, nmap), nil}
end
def check_request(state, {slave, type, addr, count}) do
if Map.has_key?(state, slave) do
addr_end = addr+count-1
if Map.has_key?(state[slave], {type, addr}) && Map.has_key?(state[slave], {type, addr_end}) do
true
else
false
end
else
false
end
end
end
|
lib/model.ex
| 0.524395 | 0.477737 |
model.ex
|
starcoder
|
defmodule ElixirRigidPhysics.Collision.Intersection.SphereCapsule do
@moduledoc """
Module for sphere-capsule intersection tests.
"""
require ElixirRigidPhysics.Dynamics.Body, as: Body
require ElixirRigidPhysics.Geometry.Sphere, as: Sphere
require ElixirRigidPhysics.Geometry.Capsule, as: Capsule
require ElixirRigidPhysics.Collision.Contact, as: Contact
alias ElixirRigidPhysics.Geometry.Util, as: GUtil
alias Graphmath.Vec3
alias Graphmath.Quatern
@verysmol 1.0e-12
@doc """
Check the intersection of a sphere and a capsule.
## Examples
iex> # Check non-touching capsule and sphere
iex> alias ElixirRigidPhysics.Collision.Intersection.SphereCapsule
iex> require ElixirRigidPhysics.Geometry.Capsule, as: Capsule
iex> require ElixirRigidPhysics.Geometry.Sphere, as: Sphere
iex> require ElixirRigidPhysics.Dynamics.Body, as: Body
iex> a = Body.body( shape: Sphere.sphere(radius: 1) , position: {35.0, 0.0, 0.0})
iex> b = Body.body( shape: Capsule.capsule(axial_length: 1, cap_radius: 0.5), position: {0.0, 0.0, 0.0})
iex> SphereCapsule.check(a,b)
:no_intersection
iex> # Check coincident capsule and sphere
iex> alias ElixirRigidPhysics.Collision.Intersection.SphereCapsule
iex> require ElixirRigidPhysics.Geometry.Capsule, as: Capsule
iex> require ElixirRigidPhysics.Geometry.Sphere, as: Sphere
iex> require ElixirRigidPhysics.Dynamics.Body, as: Body
iex> a = Body.body( shape: Sphere.sphere(radius: 1) , position: {1.0, 0.0, 0.0})
iex> b = Body.body( shape: Capsule.capsule(axial_length: 2, cap_radius: 0.5), position: {1.0, 0.0, 0.0})
iex> SphereCapsule.check(a,b)
:coincident
iex> # Check side-grazing capsule and sphere
iex> alias ElixirRigidPhysics.Collision.Intersection.SphereCapsule
iex> require ElixirRigidPhysics.Geometry.Capsule, as: Capsule
iex> require ElixirRigidPhysics.Geometry.Sphere, as: Sphere
iex> require ElixirRigidPhysics.Dynamics.Body, as: Body
iex> a = Body.body( shape: Sphere.sphere(radius: 1) , position: {2.0, 0.0, 0.0})
iex> b = Body.body( shape: Capsule.capsule(axial_length: 2, cap_radius: 1), position: {0.0, 0.0, 0.0})
iex> SphereCapsule.check(a,b)
{:contact_manifold, {{:contact_point, {1.0, 0.0, 0.0}, 0.0}}, {-1.0, 0.0, 0.0}}
iex> # Check top-grazing capsule and sphere
iex> alias ElixirRigidPhysics.Collision.Intersection.SphereCapsule
iex> require ElixirRigidPhysics.Geometry.Capsule, as: Capsule
iex> require ElixirRigidPhysics.Geometry.Sphere, as: Sphere
iex> require ElixirRigidPhysics.Dynamics.Body, as: Body
iex> sqrthalf = :math.sqrt(0.5)
iex> a = Body.body( shape: Sphere.sphere(radius: 1) , position: {0.0, 0.0, 4.0})
iex> b = Body.body( shape: Capsule.capsule(axial_length: 4, cap_radius: 1), orientation: {sqrthalf, sqrthalf, 0.0, 0.0})
iex> {:contact_manifold, {{:contact_point, {0.0, 0.0, 3.0}, distance}}, {0.0, 0.0, -1.0}} = SphereCapsule.check(a,b)
iex> distance < 0.0001
true
iex> # Check partially overlapping sphere and capsule
iex> # Check side-grazing capsule and sphere
iex> alias ElixirRigidPhysics.Collision.Intersection.SphereCapsule
iex> require ElixirRigidPhysics.Geometry.Capsule, as: Capsule
iex> require ElixirRigidPhysics.Geometry.Sphere, as: Sphere
iex> require ElixirRigidPhysics.Dynamics.Body, as: Body
iex> a = Body.body( shape: Sphere.sphere(radius: 3) , position: {4.0, 0.0, 0.0})
iex> b = Body.body( shape: Capsule.capsule(axial_length: 12, cap_radius: 2))
iex> SphereCapsule.check(a,b)
{:contact_manifold, {{:contact_point, {1.5, 0.0, 0.0}, 1.0}}, {-1.0, 0.0, 0.0}}
iex> # Check completely contained sphere and capsule
iex> # Check side-grazing capsule and sphere
iex> alias ElixirRigidPhysics.Collision.Intersection.SphereCapsule
iex> require ElixirRigidPhysics.Geometry.Capsule, as: Capsule
iex> require ElixirRigidPhysics.Geometry.Sphere, as: Sphere
iex> require ElixirRigidPhysics.Dynamics.Body, as: Body
iex> a = Body.body( shape: Sphere.sphere(radius: 2) , position: {3.0, 0.0, 0.0})
iex> b = Body.body( shape: Capsule.capsule(axial_length: 12, cap_radius: 5))
iex> SphereCapsule.check(a,b)
{:contact_manifold, {{:contact_point, {3.0, 0.0, 0.0}, 4.0}}, {-1.0, 0.0, 0.0}}
"""
@spec check(Body.body(), Body.body()) :: Contact.contact_result()
def check(
Body.body(shape: Sphere.sphere(radius: r_a), position: p_a),
Body.body(shape: Capsule.capsule(cap_radius: cr_b) = c, position: p_b, orientation: o_b)
) do
# note that it might well be faster to transform the sphere to capsule space
{capsule_local_a, capsule_local_b} = Capsule.get_principle_points(c)
capsule_a = o_b |> Quatern.transform_vector(capsule_local_a) |> Vec3.add(p_b)
capsule_b = o_b |> Quatern.transform_vector(capsule_local_b) |> Vec3.add(p_b)
nearest_in_capsule = GUtil.closest_point_on_line_to_point(p_a, capsule_a, capsule_b)
vec_to_capsule = Vec3.subtract(nearest_in_capsule, p_a)
dist_to_capsule = vec_to_capsule |> Vec3.length()
cond do
dist_to_capsule > r_a + cr_b ->
:no_intersection
dist_to_capsule <= @verysmol ->
:coincident
true ->
overlap = dist_to_capsule - (r_a + cr_b)
penetration_depth = abs(overlap)
direction = Vec3.normalize(vec_to_capsule)
Contact.contact_manifold(
contacts:
{Contact.contact_point(
world_point: direction |> Vec3.scale(r_a - penetration_depth / 2) |> Vec3.add(p_a),
depth: penetration_depth
)},
world_normal: direction
)
end
end
end
|
lib/collision/intersection/sphere_capsule.ex
| 0.894651 | 0.484258 |
sphere_capsule.ex
|
starcoder
|
defmodule Raygun.Format do
@moduledoc """
This module builds payloads of error messages that Raygun will understand.
These functions return maps of data which will be encoding as JSON prior
to submission to Raygun.
"""
@raygun_version Mix.Project.config()[:version]
@doc """
Builds an error payload that Raygun will understand for a string.
"""
def message_payload(msg, opts) when is_list(msg) do
msg_as_string = List.to_string(msg)
message_payload(msg_as_string, opts)
end
def message_payload(msg, opts) do
%{
occurredOn: now(),
details:
details()
|> Map.merge(environment())
|> Map.merge(user(opts))
|> Map.merge(custom(opts))
|> Map.merge(%{error: %{message: msg}})
}
end
@doc """
Builds an error payload that Raygun will understand for an exception and its
corresponding stacktrace.
"""
def stacktrace_payload(stacktrace, exception, opts) do
%{
occurredOn: now(),
details:
details()
|> Map.merge(err(stacktrace, exception))
|> Map.merge(environment())
|> Map.merge(user(opts))
|> Map.merge(custom(opts))
}
end
@doc """
Builds an error payload that Raygun will understand for an exception that was
caught in our Plug.
"""
def conn_payload(conn, stacktrace, exception, opts) do
%{
occurredOn: now(),
details:
details(opts)
|> Map.merge(err(stacktrace, exception))
|> Map.merge(environment())
|> Map.merge(request(conn))
|> Map.merge(response(conn))
|> Map.merge(user(opts))
|> Map.merge(custom(opts))
}
end
@doc """
Return custom information. Tags are configured per application via config and
user custom data can be provided per error.
"""
def custom(opts) do
%{
tags: Raygun.Util.get_env(:raygun, :tags),
userCustomData: Enum.into(opts |> Keyword.delete(:user), %{})
}
end
@doc """
Get the logged in user from the opts if one is provided.
If not, it gets the system user if one is specified.
"""
def user(opts) do
if Keyword.has_key?(opts, :user) and Keyword.get(opts, :user) do
%{user: Keyword.get(opts, :user)}
else
if Raygun.Util.get_env(:raygun, :system_user) do
%{user: Raygun.Util.get_env(:raygun, :system_user)}
else
%{}
end
end
end
@doc """
Return a map of information about the environment in which the bug was encountered.
"""
def environment do
disk_free_spaces = []
{:ok, hostname} = :inet.gethostname()
hostname = hostname |> List.to_string()
{os_type, os_flavor} = :os.type()
os_version = "#{os_type} - #{os_flavor}"
architecture = :erlang.system_info(:system_architecture) |> List.to_string()
sys_version = :erlang.system_info(:system_version) |> List.to_string()
processor_count = :erlang.system_info(:logical_processors_online)
memory_used = :erlang.memory(:total)
%{
environment: %{
osVersion: os_version,
architecture: architecture,
packageVersion: sys_version,
processorCount: processor_count,
totalPhysicalMemory: memory_used,
deviceName: hostname,
diskSpaceFree: disk_free_spaces
}
}
end
@doc """
Returns deatils about the client and server machine.
"""
def details(opts \\ []) do
{:ok, hostname} = :inet.gethostname()
hostname = hostname |> List.to_string()
app_version =
if opts[:version] do
opts[:version]
else
Raygun.Util.get_env(:raygun, :client_version)
end
%{
machineName: hostname,
version: app_version,
client: %{
name: Raygun.Util.get_env(:raygun, :client_name),
version: @raygun_version,
clientUrl: Raygun.Util.get_env(:raygun, :url)
}
}
end
defp now, do: DateTime.utc_now() |> DateTime.to_iso8601()
@doc """
Given a Plug Conn return a map containing information about the request.
"""
def request(conn) do
%{
request: %{
hostName: conn.host,
url: conn.request_path,
httpMethod: conn.method,
iPAddress: conn.remote_ip |> :inet.ntoa() |> List.to_string(),
queryString: Plug.Conn.fetch_query_params(conn).query_params,
form: Plug.Parsers.call(conn, []).params,
headers: Raygun.Util.format_headers(conn.req_headers),
rawData: %{}
}
}
end
@doc """
Given a Plug Conn return a map containing information about the response.
"""
def response(conn) do
%{
response: %{
statusCode: conn.status
}
}
end
@doc """
Given a stacktrace and an exception, return a map with the error data.
"""
def err(stacktrace, error) do
s0 = Enum.at(stacktrace, 0) |> stacktrace_entry
%{
error: %{
innerError: nil,
data: %{fileName: s0.fileName, lineNumber: s0.lineNumber, function: s0.methodName},
className: s0.className,
message: Exception.message(error),
stackTrace: stacktrace(stacktrace)
}
}
end
@doc """
Given a stacktrace return a list of maps for the frames.
"""
def stacktrace(s) do
s |> Enum.map(&stacktrace_entry/1)
end
@doc """
Given a stacktrace frame, return a map with the information in a structure
that Raygun will understand.
"""
def stacktrace_entry({function, arity_or_args, location}) do
stacktrace_entry({__MODULE__, function, arity_or_args, location})
end
def stacktrace_entry({module, function, arity_or_args, location}) do
%{
lineNumber: Raygun.Util.line_from(location),
className: Raygun.Util.mod_for(module),
fileName: Raygun.Util.file_from(location),
methodName: Raygun.Util.function_and_arity(function, arity_or_args)
}
end
end
|
lib/raygun/format.ex
| 0.799207 | 0.415284 |
format.ex
|
starcoder
|
defmodule GatherSubmissions.Config do
@moduledoc """
Provides the definition of the struct `t:t/0` that stores the configuration
options given to *GatherSubmissions*, and functions for reading it from a
YAML file.
The supported options are:
* `csv_file_name`: name of the CSV file containing students info.
* `csv_header`: a map that specifies the correspondence between the
information needed and the headers of the columns in the CSV file.
* `server_url`: URL of the DOMjudge server (without trailing slash)
* `server_username`, `server_password`: login data of the user which
will be used to fetch the submissions. It must have admin or jury role.
* `problem_name`, `contest_name`: name of the problem to be downloaded, and
the context to which it belongs.
* `deadline`: date and time of the last submission accepted (if given)
* `last_allowed`: identifier of the last submission gathered. All submissions
send after it will be discarded.
* `only_accepted`: boolean that specifies whether to consider only submissions
with an AC judgement.
* `output_dir`: name of directory in which the output files (.tex and source code)
will be generated.
* `strip_outside_tags`: when set to true, strips away all the code which is not
contained within `@ <answer>` and `@ </answer>` tags.
"""
@type t() :: %__MODULE__{
csv_file_name: String.t(),
csv_header: %{String.t() => String.t()},
server_url: String.t(),
server_username: String.t(),
server_password: String.t(),
contest_name: String.t(),
problem_name: String.t(),
deadline: NaiveDateTime.t() | nil,
last_allowed: integer() | nil,
only_accepted: boolean(),
output_dir: String.t(),
strip_outside_tags: boolean()
}
defstruct [
:csv_file_name,
:csv_header,
:server_url,
:server_username,
:server_password,
:contest_name,
:problem_name,
:deadline,
:last_allowed,
:only_accepted,
:output_dir,
:strip_outside_tags
]
defmodule MissingOptionError do
defexception [:option_name]
@impl true
def message(exception) do
"Missing option: #{exception.option_name}"
end
end
defmodule InvalidDeadlineError do
defexception message: "Invalid deadline format. Expected: YYYY-MM-DD HH:MM:SS"
end
defmodule InvalidLastSubmissionError do
defexception message: "Value of last_allowed must be an integer number"
end
defmodule ExpectedBoolean do
defexception [:field]
@impl true
def message(ex), do: "Value of #{ex.field} must be a boolean (true or false without quotes)"
end
@required_header_fields ["name", "surname", "user_id"]
@default_output_dir "out"
@doc """
Obtains a `t:t/0` struct from a string that contains YAML content.
All options are validated and processed, raising the following exceptions if
there is any error:
* `GatherSubmissions.Config.MissingOptionError`, when a mandatory option
is ommited.
* `GatherSubmissions.Config.InvalidDeadlineError`, when the deadline option
does not contain a valid date in format `YYYY-MM-DD HH:MM:SS`.
* `GatherSubmissions.Config.InvalidLastSubmissionError`, when the lastSubmission
option does not contain an integer number
* `GatherSubmissions.Config.ExpectedBoolean`, when `only_accepted` or
`strip_outside_tags` options contain a string different from `true` or `false`.
"""
@spec read_from_yaml(String.t()) :: t()
def read_from_yaml(string) do
yaml = YamlElixir.read_from_string!(string)
%__MODULE__{}
|> cast_mandatory_field(yaml, :csv_file_name)
|> cast_mandatory_field(yaml, :csv_header, &cast_header/1)
|> cast_optional_field(yaml, :server_url, &String.trim_trailing(&1, "/"))
|> cast_mandatory_field(yaml, :server_username)
|> cast_mandatory_field(yaml, :server_password)
|> cast_mandatory_field(yaml, :contest_name)
|> cast_mandatory_field(yaml, :problem_name)
|> cast_optional_field(yaml, :deadline, &cast_deadline/1)
|> cast_optional_field(yaml, :last_allowed, &expect_integer/1)
|> cast_optional_field(yaml, :only_accepted, &expect_boolean(&1, "only_accepted"), false)
|> cast_optional_field(yaml, :output_dir, &convert_to_string/1, @default_output_dir)
|> cast_optional_field(
yaml,
:strip_outside_tags,
&expect_boolean(&1, "strip_outside_tags"),
false
)
end
defp cast_mandatory_field(
config,
map,
field,
process_fun \\ fn val -> convert_to_string(val) end
) do
field_string = to_string(field)
if map[field_string] in [nil, ""] do
raise MissingOptionError, option_name: field_string
else
Map.put(config, field, process_fun.(map[field_string]))
end
end
defp cast_header(header) do
case Enum.find(@required_header_fields, nil, &(not Map.has_key?(header, &1))) do
nil -> header
field -> raise MissingOptionError, option_name: "csv_header.#{field}"
end
end
defp cast_optional_field(config, map, field, process_fun, default \\ nil) do
field_string = to_string(field)
if map[field_string] in [nil, ""] do
Map.put(config, field, default)
else
Map.put(config, field, process_fun.(map[field_string]))
end
end
defp convert_to_string(str) when is_binary(str), do: str
defp convert_to_string(number) when is_integer(number) or is_float(number),
do: to_string(number)
defp cast_deadline(string) when is_binary(string) do
case NaiveDateTime.from_iso8601(string) do
{:ok, dt} ->
dt |> NaiveDateTime.add(1, :second)
{:error, :invalid_format} ->
raise InvalidDeadlineError
end
end
defp cast_deadline(_other), do: raise(InvalidDeadlineError)
defp expect_integer(number) when is_integer(number), do: number
defp expect_integer(_other), do: raise(InvalidLastSubmissionError)
defp expect_boolean(true, _), do: true
defp expect_boolean(false, _), do: false
defp expect_boolean(_other, field), do: raise(ExpectedBoolean, field: field)
end
|
lib/config/config.ex
| 0.823825 | 0.617498 |
config.ex
|
starcoder
|
defmodule Txbox.Transactions.Tx do
@moduledoc """
Transaction schema module.
Txbox adds a single table to your database containing all of the transaction,
channel and meta data.
For any transaction, the only required attribute is the `txid`. If no channel
is specified, the transaction will be added to the `Txbox.default_channel/0`.
Optionally any of the following attributes can be set:
* `:rawtx` - The raw transaction data. Must be given as a raw `t:binary/0`, not a hex encoded string.
* `:tags` - A list of tags which can be used for organising and filtering transactions.
* `:meta` - A map containing structured metadata about the transaction. See `t:Txbox.Transactions.Meta.t/0`.
* `:data` - A map containing any other arbitarry fields.
When searching Txbox, the data from `:tags` and `:meta` are incorporated into
full text search.
Txbox automatically syncs the transaction with your configured miner, and
updates the `:status` attribute with a cached response from the miner's Merchant
API. See `t:Txbox.Transactions.Status.t/0`.
"""
use Ecto.Schema
import Ecto.Changeset
alias Txbox.Transactions.{Meta, MapiResponse}
@typedoc "Transaction schema"
@type t :: %__MODULE__{
guid: binary,
state: String.t,
txid: String.t,
rawtx: binary,
channel: String.t,
tags: list(String.t),
meta: Meta.t,
data: map,
block_height: integer,
inserted_at: DateTime.t,
updated_at: DateTime.t
}
@default_state "pending"
@default_channel "txbox"
@primary_key {:guid, :binary_id, autogenerate: true}
@foreign_key_type :binary_id
schema "txbox_txns" do
field :state, :string, default: @default_state
field :txid, :string
field :rawtx, :binary
field :channel, :string, default: @default_channel
field :tags, {:array, :string}
field :data, :map
field :block_height, :integer
embeds_one :meta, Meta, on_replace: :update
has_many :mapi_responses, MapiResponse, foreign_key: :tx_guid
has_one :status, MapiResponse, foreign_key: :tx_guid
timestamps(type: :utc_datetime_usec)
end
use Fsmx.Struct, transitions: %{
"pending" => ["queued", "pushed"],
"queued" => ["pushed", "failed"],
"pushed" => ["pushed", "confirmed"]
}
@doc false
def changeset(tx, attrs) do
tx
|> cast(attrs, [:state, :txid, :rawtx, :channel, :tags, :data])
|> cast_embed(:meta, with: &Meta.changeset/2)
|> validate_required([:state, :txid, :channel])
|> validate_format(:txid, ~r/^[a-f0-9]{64}$/i)
|> validate_format(:channel, ~r/^\w[\w\-\/]*$/)
|> validate_state
end
# Changeset for transitioning state from "pushed" to "confirmed"
def transition_changeset(tx, "pushed", "confirmed", response) do
block_height = get_in(response, [Access.key(:payload), "block_height"])
tx
|> cast(%{block_height: block_height}, [:block_height])
end
# Validates the changeset state change
defp validate_state(%{data: %__MODULE__{} = tx} = changeset) do
persisted? = Ecto.get_meta(tx, :state) == :loaded
changeset = case persisted? && tx.state != "pending" do
true -> add_error(changeset, :base, "cannot mutate non-pending transaction")
false -> changeset
end
transitions = __MODULE__.__fsmx__().__fsmx__(:transitions)
validate_change(changeset, :state, fn :state, state ->
case Map.keys(transitions) |> Enum.member?(state) do
true -> []
false -> [state: "cannot be #{state}"]
end
end)
end
end
|
lib/txbox/transactions/tx.ex
| 0.837487 | 0.569733 |
tx.ex
|
starcoder
|
use Bitwise
defmodule Day3 do
@moduledoc """
Find shortest distance in spiral matrix path
"""
@doc """
Get the Manhattan distance for a given number
## Examples
iex> Day3.get_distance(1)
0
iex> Day3.get_distance(12)
3
iex> Day3.get_distance(23)
2
iex> Day3.get_distance(1024)
31
"""
def get_distance(num) do
{_, _, {x, y}} = build_matrix(num)
abs(x) + abs(y)
end
@doc """
Get the next largest value in the spiral sequence when summing neighbors (part 2)
## Examples
iex> Day3.get_largest_value(325489)
330785
"""
def get_largest_value(num) do
Process.register(self(), :listener)
pid = spawn(Day3, :build_matrix, [num, num])
receive do
{:result, value} ->
Process.exit(pid, :kill)
Process.unregister(:listener)
value
end
end
@doc """
Given a number, build a spiral matrix starting with 1 at the center
## Examples
iex> Day3.build_matrix(1)
{%{{0, 0} => 1}, :down, {0, 0}}
iex> Day3.build_matrix(2)
{%{{0, 0} => 1, {1, 0} => 2}, :right, {1, 0}}
"""
def build_matrix(cell_num, max \\ nil) when cell_num > 1 do
{positions, direction, position} = build_matrix(cell_num - 1, max)
if left_filled(positions, direction, position) do
position = move(direction, position)
else
direction = turn_left(direction)
position = move(direction, position)
end
if max do
# Store cell_num as the value for Day 3 part 1, and use the `cell_value` func below for part 2
value = cell_value(positions, position)
if value > max do
send(:listener, {:result, value})
end
else
value = cell_num
end
positions = Map.put(positions, position, value)
{positions, direction, position}
end
def build_matrix(1, _), do: {%{{0, 0} => 1}, :down, {0, 0}}
@doc """
Check if the position to the left is filled
## Examples
iex> Day3.left_filled(%{{0,0} => 1}, :up, {1, 0})
true
iex> Day3.left_filled(%{{0,0} => 1, {1,0} => 2}, :up, {1, 1})
false
"""
def left_filled(positions, direction, current_position) do
check_position =
turn_left(direction)
|> move(current_position)
case Map.fetch(positions, check_position) do
{:ok, _} -> true
:error -> false
end
end
@doc """
Get the cell value for part 2 of the problem (part 1 has no calculation)
## Examples
iex> Day3.cell_value(%{{0, 0} => 1, {1, 0} => 1, {1, 1} => 2}, {0, 1})
4
"""
def cell_value(_, {0, 0}), do: 1
def cell_value(positions, position) do
{x, y} = position
neighbors = [
{x - 1, y + 1},
{x, y + 1},
{x + 1, y + 1},
{x + 1, y},
{x + 1, y - 1},
{x, y - 1},
{x - 1, y - 1},
{x - 1, y}
]
values = for neighbor <- neighbors, do: Map.get(positions, neighbor, 0)
Enum.sum(values)
end
def move(:up, {x, y}), do: {x, y + 1}
def move(:right, {x, y}), do: {x + 1, y}
def move(:down, {x, y}), do: {x, y - 1}
def move(:left, {x, y}), do: {x - 1, y}
@doc """
Determine which direction to go after turning left
## Examples
iex> Day3.turn_left(:up)
:left
"""
def turn_left(:up), do: :left
def turn_left(:right), do: :up
def turn_left(:down), do: :right
def turn_left(:left), do: :down
end
|
lib/day3.ex
| 0.885922 | 0.739728 |
day3.ex
|
starcoder
|
defmodule ChoreRunner do
@moduledoc """
A framework and library for productively writing and running code "Chores".
A "Chore" can really be anything, but most commonly it is just some infrequently, manually run code which achieve a business or development goal.
For example: updating a config value in a database that does not yet have a UI (perhaps due to time constraints) is a great use for a chore.
A chore could be created that accepts the desired value and runs the update query.
Usually, the alternative to this would be a direct prod-shell or prod-db connection, which is inherently insecure and dangerous.
Many fast-moving startups or companies are ok with this access for developers, and that's fine.
But many companies have regulations that they must follow, or do not want to take the risk of a developer mistake while working in these environments.
In these cases, ChoreRunner allows the rapid creation, testing, and reviewing of code chores, along with a bundled UI for running them that accepts a variety of input types,
with the goal of finding a "sweet spot" of safety and speed when solving such problems.
## Getting Started
Add `ChoreRunner` to your supervision tree, after your app's `PubSub`:
```
children = [
{Phoenix.PubSub, [name: MyApp.PubSub]},
{ChoreRunner, [pubsub: MyApp.PubSub]},
]
```
## Writing a chore
```
defmodule MyApp.MyChore do
use ChoreRunner.Chore
input :my_file, :file
def run(%{my_file: path}}) do
path
|> File.read!()
|> parse_file()
|> do_stuff()
end
end
```
Example of running this Chore:
```
iex> ChoreRunner.run_chore(MyApp.MyChore, %{my_file: file}, :infinity)
{:ok, %Chore{}}
```
"""
alias ChoreRunner.{Chore, ReporterSupervisor, Reporter}
@doc false
def child_spec(opts) do
%{
id: ChoreRunner.Supervisor,
start: {ChoreRunner.Supervisor, :start_link, [opts]}
}
end
@doc """
List the currently running chores on all nodes.
"""
@spec list_running_chores() :: [Chore.t()]
def list_running_chores do
__MODULE__
|> :pg.get_members(Reporter)
|> Enum.map(&:gen_server.send_request(&1, :chore_state))
|> Enum.flat_map(fn request ->
case :gen_server.receive_response(request, 1000) do
{:reply, chore} -> [chore]
:timeout -> []
{:error, _reason} -> []
end
end)
end
@doc """
Returns the pubsub topic used for a specific chore, or all chores if given the atom `:all`
"""
@spec chore_pubsub_topic(Chore.t() | :all) :: String.t()
def chore_pubsub_topic(:all), do: "chore_runner:*"
def chore_pubsub_topic(%Chore{id: id}) do
"chore_runner:id-#{id}"
end
@doc """
Runs the given chore module as a chore.
Accepts an input map with either string or atom keys as well as a keyword list of options.
Returns a `%ChoreRunner.Chore{}` struct.
Input map keys must match one of the inputs defined in the provided chore module.
If not, the input under the unmatched key is discarded.
Matched input will have default validations run on them, as well custom validations declared in the chore module.
If any inputs fail validation, the chore will not run, and instead an error tuple will be returned.
If all validations pass, the chore will then be run.
Currently, no options are supported.
"""
@spec run_chore(module(), map(), Keyword.t()) :: {:ok, Chore.t()} | {:error, any()}
def run_chore(chore_mod, input, opts \\ []) do
chore = %Chore{mod: chore_mod, id: gen_id()}
with {:ok, validated_input} <- Chore.validate_input(chore, input),
{:ok, updated_chore = %Chore{reporter: pid}} when not is_nil(pid) <-
do_start_reporter(chore, opts),
{:ok, started_chore} <- do_start_chore(updated_chore, validated_input, opts) do
{:ok, started_chore}
end
end
@doc """
Stops the provided chore by terminating both the chore task and the reporter.
Returns `:ok` if successful, and `:error` if not successful
"""
@spec stop_chore(Chore.t()) :: :ok | :error
def stop_chore(%Chore{reporter: pid}) do
GenServer.call(pid, :stop_chore)
end
defp gen_id do
16
|> :crypto.strong_rand_bytes()
|> Base.encode16()
end
defp do_start_reporter(%Chore{} = chore, opts) do
with {:ok, pid} <-
DynamicSupervisor.start_child(
ReporterSupervisor,
{Reporter, Keyword.put(opts, :chore, chore)}
),
:ok <- :pg.join(ChoreRunner, Reporter, pid) do
{:ok, %Chore{chore | reporter: pid}}
end
end
defp do_start_chore(%Chore{reporter: reporter_pid}, input, opts) do
# Start the task from the reporter so that the task reports to the reporter server
{:ok, GenServer.call(reporter_pid, {:start_chore_task, input, opts})}
end
end
|
lib/chore_runner.ex
| 0.848549 | 0.891717 |
chore_runner.ex
|
starcoder
|
defmodule Oli.Delivery.Evaluation.Rule do
alias Oli.Delivery.Evaluation.EvaluationContext
@doc """
Parses and evaluates a rule and returns `{:ok, result}` when succesful, where `result`
is is a boolean true or false indicating if the rule matched or not.
Returns `{:error, reason}` when it fails to parse or evaluate
"""
def parse_and_evaluate(rule_as_string, %EvaluationContext{} = context) do
with {:ok, tree} <- parse(rule_as_string),
{:ok, result} <- evaluate(tree, context) do
{:ok, result}
end
end
@doc """
Parses a rule and returns `{:ok, tree}` when succesful, where `tree`
is a series of nested tuples representing the parsed clauses in prefix notation, where
the first tuple entry is the operation, the second is the left hand side
operand and the third is the right hand side operand. An example:
{:&&, {:gt, :attempt_number, "1"}, {:like, :input, "some string here"}}
Returns `{:error, reason}` when it fails to parse
"""
@spec parse(binary) :: {:error, <<_::64, _::_*8>>} | {:ok, any}
def parse(rule_as_string), do: Oli.Delivery.Evaluation.Parser.rule(rule_as_string) |> unwrap()
defp unwrap({:ok, [acc], "", _, _, _}), do: {:ok, acc}
defp unwrap({:ok, _, rest, _, _, _}), do: {:error, "could not parse" <> rest}
defp unwrap({:error, reason, _rest, _, _, _}), do: {:error, reason}
def evaluate(tree, %EvaluationContext{} = context) do
try do
{:ok, eval(tree, context)}
rescue
e -> {:error, e}
end
end
defp eval({:&&, lhs, rhs}, context), do: eval(lhs, context) and eval(rhs, context)
defp eval({:||, lhs, rhs}, context), do: eval(lhs, context) or eval(rhs, context)
defp eval({:!, rhs}, context), do: !eval(rhs, context)
defp eval({:like, lhs, rhs}, context) do
{:ok, regex} = Regex.compile(rhs)
String.match?(eval(lhs, context), regex)
end
defp eval({:contains, lhs, rhs}, context) do
String.contains?(
String.downcase(eval(lhs, context)),
String.downcase(rhs)
)
end
defp eval(:attempt_number, context), do: context.activity_attempt_number |> Integer.to_string()
defp eval(:input, context), do: context.input
defp eval(:input_length, context), do: String.length(context.input) |> Integer.to_string()
defp eval({:lt, lhs, rhs}, context) do
{left, _} = eval(lhs, context) |> Float.parse()
{right, _} = eval(rhs, context) |> Float.parse()
left < right
end
defp eval({:gt, lhs, rhs}, context) do
{left, _} = eval(lhs, context) |> Float.parse()
{right, _} = eval(rhs, context) |> Float.parse()
left > right
end
defp eval({:eq, lhs, rhs}, context) do
left = eval(lhs, context)
right = eval(rhs, context)
if is_float?(left) or is_float?(right) do
{left, _} = Float.parse(left)
{right, _} = Float.parse(right)
abs(abs(left) - abs(right)) < 0.00001
else
eval(lhs, context) |> String.to_integer() ==
eval(rhs, context) |> String.to_integer()
end
end
defp eval(value, _) when is_binary(value), do: value
defp is_float?(str), do: String.contains?(str, ".")
end
|
lib/oli/delivery/evaluation/rule.ex
| 0.879626 | 0.578478 |
rule.ex
|
starcoder
|
defmodule Day16 do
def part_one(file_reader \\ InputFile) do
[fields, _our_ticket, other_tickets] =
file_reader.contents_of(16, :stream)
|> Enum.map(&String.trim/1)
|> Enum.chunk_while([], &accum/2, &finish/1)
fields = Enum.map(fields, &Day16.Field.parse/1)
other_tickets
|> Enum.reverse()
|> tl
|> Enum.map(&String.split(&1, ","))
|> Enum.map(fn arr -> Enum.map(arr, &String.to_integer/1) end)
|> Enum.flat_map(fn values ->
Enum.reject(values, fn value ->
Enum.any?(fields, &Day16.Field.valid_value?(&1, value))
end)
end)
|> Enum.reduce(&Kernel.+/2)
end
def part_two(file_reader \\ InputFile) do
[fields, our_ticket, other_tickets] =
file_reader.contents_of(16, :stream)
|> Enum.map(&String.trim/1)
|> Enum.chunk_while([], &accum/2, &finish/1)
fields = Enum.map(fields, &Day16.Field.parse/1)
ticket_values = our_ticket |> hd |> String.split(",") |> Enum.map(&String.to_integer/1)
other_tickets
|> Enum.reverse()
|> tl
|> Enum.map(&String.split(&1, ","))
|> Enum.map(fn arr -> Enum.map(arr, &String.to_integer/1) end)
|> Enum.filter(fn values ->
Enum.all?(values, fn value ->
Enum.any?(fields, fn field -> Day16.Field.valid_value?(field, value) end)
end)
end)
|> Enum.map(fn ticket ->
Enum.map(ticket, fn n ->
fields
|> Enum.filter(&Day16.Field.valid_value?(&1, n))
|> Enum.map(& &1.name)
|> Enum.into(MapSet.new())
end)
end)
|> Enum.reduce(&combine_sets/2)
|> simplify
|> Enum.zip(ticket_values)
|> Enum.filter(fn {name, _value} -> String.starts_with?(name, "departure") end)
|> Enum.map(fn {_name, value} -> value end)
|> Enum.reduce(&Kernel.*/2)
end
defp combine_sets(a, b), do: combine_sets(a, b, [])
defp combine_sets([], [], sets), do: Enum.reverse(sets)
defp combine_sets([a | rest_a], [b | rest_b], sets),
do: combine_sets(rest_a, rest_b, [MapSet.intersection(a, b) | sets])
defp simplify(sets) do
case Enum.all?(sets, fn set -> MapSet.size(set) == 1 end) do
true ->
Enum.flat_map(sets, &MapSet.to_list/1)
false ->
simplify(
sets,
sets |> Enum.filter(&(MapSet.size(&1) == 1)) |> Enum.reduce(&MapSet.union/2)
)
end
end
defp simplify(sets, chosen) do
sets
|> Enum.map(fn set ->
case MapSet.size(set) do
1 -> set
_ -> MapSet.difference(set, chosen)
end
end)
|> simplify
end
defp accum("", lines), do: {:cont, lines, []}
defp accum(line, lines) do
{:cont, [line | lines]}
end
defp finish(line), do: {:cont, line, []}
end
|
year_2020/lib/day_16.ex
| 0.518546 | 0.433442 |
day_16.ex
|
starcoder
|
defmodule Treex do
@moduledoc """
Convenient module for using the `gb_trees` from erlang's
standard library.
It reorders the arguments so the tree is always passed
first and therefore, it can be used with the pipe operators.
And therefore by using Treex instead, code can be written as:
```elixir
t = Treex.empty()
|> Treex.enter("hello", :world)
|> Treex.enter(:hello, "world")
```
As an additional facility this module implements a `stream/1`
to traverse all `{key, value}` from the given tree.
Its functions also hints when errors can be raised. Most of them
will be `FunctionClauseError`
Function's documentation is provided for the IDE's to get it. For
full details, refer to the official one at
[reference documents](http://erlang.org/doc/man/gb_trees.html)
"""
@type gb_tree_node() :: nil | {any(), any(), any(), any()}
@opaque t() :: {non_neg_integer(), gb_tree_node()}
@doc """
Rebalances `tree`. Notice that this is rarely necessary,
but can be motivated when many nodes have been deleted
from the tree without further insertions. Rebalancing
can then be forced to minimize lookup times, as deletion
does not rebalance the tree.
"""
@spec balance(t()) :: t()
def balance(tree) do
:gb_trees.balance(tree)
end
@doc """
Removes the node with key `key` from `tree` and returns the
new tree. Assumes that the key is present in the tree,
crashes otherwise.
"""
@spec delete!(t(), any()) :: t()
def delete!(tree, key) do
:gb_trees.delete(key, tree)
end
@doc """
Removes the node with key `key` from `tree` if the key is
present in the tree, otherwise does nothing.
Returns the new tree.
"""
@spec delete_any(t(), any()) :: t()
def delete_any(tree, key) do
:gb_trees.delete_any(key, tree)
end
@doc """
Returns a `{value, tree}` tuple from node with key
`key` and new tree without the node with this value.
Assumes that the node with `key` is present in the
tree, crashes otherwise.
"""
@spec take!(t(), any()) :: {any(), t()}
def take!(tree, key) do
:gb_trees.take(key, tree)
end
@doc """
Returns a `{value, tree2}` from node with key `key`;
new tree without the node with this value.
Returns error if the node with the key is not present in the tree.
"""
@spec take_any(t(), any()) :: :error | {any(), t()}
def take_any(tree, key) do
:gb_trees.take_any(key, tree)
end
@doc """
Returns a new empty tree.
"""
@spec empty() :: t()
def empty() do
:gb_trees.empty()
end
@doc """
Inserts `key` with value Value into `tree` if the key
is not present in the tree, otherwise updates Key
to value `value` in `tree`.
Returns the new tree.
"""
@spec enter(t(), any(), any()) :: t()
def enter(tree, key, value) do
:gb_trees.enter(key, value, tree)
end
@doc """
Retrieves the value stored with `key` in `tree`.
Assumes that the key is present in the tree, crashes otherwise.
"""
@spec get!(t(), any()) :: t()
def get!(tree, key) do
:gb_trees.get(key, tree)
end
@doc """
Inserts `key` with value `value` into `tree`
and returns the new tree.
Assumes that the key is not present in the tree,
crashes otherwise.
"""
@spec insert!(t(), any(), any()) :: t()
def insert!(tree, key, value) do
:gb_trees.insert(key, value, tree)
end
@doc """
Returns `true` if `key` is present in `tree`, otherwise `false`.
"""
@spec defined?(t(), any()) :: boolean()
def defined?(tree, key) do
:gb_trees.is_defined(key, tree)
end
@doc """
Returns `true` if `tree` is an empty tree, othwewise `false`.
"""
@spec empty?(t()) :: boolean()
def empty?(tree) do
:gb_trees.is_empty(tree)
end
@doc """
Returns an iterator that can be used for traversing the
entries of `tree`; see `next/1`. The implementation of
this is very efficient; traversing the whole tree using
`next/1` is only slightly slower than getting the list
of all elements using `to_list/1` and traversing that.
The main advantage of the iterator approach is that it
does not require the complete list of all elements to
be built in memory at one time.
"""
@spec iterator(t()) :: [gb_tree_node()]
def iterator(tree) do
:gb_trees.iterator(tree)
end
@doc """
Returns an iterator that can be used for traversing
the entries of `tree`; see `next/1`.
The difference as compared to the iterator
returned by `iterator/1` is that the first key greater
than or equal to `key` is returned.
"""
@spec iterator(t(), any()) :: [gb_tree_node()]
def iterator(tree, key) do
:gb_trees.iterator_from(key, tree)
end
@doc """
Returns the keys in `tree` as an ordered list.
"""
@spec keys(t()) :: [any()]
def keys(tree) do
:gb_trees.keys(tree)
end
@doc """
Returns `{key, value}`, where `key` is the largest key in `tree`,
and `value` is the value associated with this key.
Assumes that the tree is not empty.
"""
@spec largest!(t()) :: {any(), any()}
def largest!(tree) do
:gb_trees.largest(tree)
end
@doc """
Looks up `key` in `tree`. Returns `{:value, value}`,
or :none if `key` is not present.
"""
@spec lookup(t(), any()) :: :none | {:value, any()}
def lookup(tree, key) do
:gb_trees.lookup(key, tree)
end
@doc """
Maps function `fn(k, v) -> v2` to all key-value
pairs of tree `tree`.
Returns a new tree with the same set of keys as
`tree` and the new set of values `v2`.
"""
@spec map(t(), (any() -> any())) :: t()
def map(tree, fun) do
:gb_trees.map(fun, tree)
end
@doc """
Returns `{key, value, iter2}`, where `key` is the
smallest key referred to by iterator `it`, and
`iter2` is the new iterator to be used for traversing
the remaining nodes, or the atom `:none` if no nodes remain.
"""
@spec next(:gb_trees.iter(any(), any())) :: :none | {any(), any(), :gb_trees.iter(any(), any())}
def next(it) do
:gb_trees.next(it)
end
@doc """
Returns the number of nodes in `tree`.
"""
@spec size(t()) :: non_neg_integer()
def size(tree) do
:gb_trees.size(tree)
end
@doc """
Returns `{key, value}`, where `key` is the
smallest key in `tree`, and `value` is the value
associated with this key.
Assumes that the tree is not empty.
"""
@spec smallest!(t()) :: {any(), any()}
def smallest!(tree) do
:gb_trees.smallest(tree)
end
@doc """
Returns `{key, value, tree2}`, where `key` is the largest
key in `tree`, `value` is the value associated with this key,
and `tree2` is this tree with the corresponding node deleted.
Assumes that the tree is not empty.
"""
@spec take_largest!(t()) :: {any(), any()}
def take_largest!(tree) do
:gb_trees.take_largest(tree)
end
@doc """
Returns `{key, value, tree2}`, where `key` is the smallest key
in `tree`, `value` is the value associated with this key,
and `tree2` is this tree with the corresponding node
deleted.
Assumes that the tree is not empty.
"""
@spec take_smallest!(t()) :: {any(), any()}
def take_smallest!(tree) do
:gb_trees.take_smallest(tree)
end
@doc """
Converts a tree into an ordered list of key-value tuples.
"""
@spec to_list(t()) :: [{any(), any()}]
def to_list(tree) do
:gb_trees.to_list(tree)
end
@doc """
Updates `key` to value `value` in `tree` and returns the new tree.
Assumes that the key is present in the tree.
"""
@spec update!(t(), any(), any()) :: t()
def update!(tree, key, value) do
:gb_trees.update(key, value, tree)
end
@doc """
Returns the values in `tree` as an ordered list, sorted
by their corresponding keys.
Duplicates are not removed.
"""
@spec values(t()) :: [any()]
def values(tree) do
:gb_trees.values(tree)
end
@doc """
Implements a stream for `tree`.
Each value returned by the stream shall be a `{key, value}`
tuple. The exact same behaviour can be implemented
by using the `iterator/1`.
For example, the following code:
```elixir
Treex.empty()
|> Treex.enter(:key1, 1)
|> Treex.enter(:key2, 2)
|> Treex.enter(:key3, 3)
|> Treex.stream()
|> Enum.reduce(0, fn({_k, v}, acc) -> acc+v end)
```
It will return `6`
"""
def stream(tree) do
Stream.resource(
fn -> iterator(tree) end,
fn(it) ->
case next(it) do
:none ->
{:halt, it}
{key, val, iter2} ->
{[{key, val}], iter2}
end
end,
fn(_it) -> :ok end)
end
end
|
lib/treex.ex
| 0.942889 | 0.923489 |
treex.ex
|
starcoder
|
defmodule Statistics.Distributions.Normal do
@moduledoc """
The normal, or gaussian, distribution
"""
alias Statistics.Math
alias Statistics.Math.Functions
@doc """
Probability density function
get result of probability density function
## Examples
iex> Statistics.Distributions.Normal.pdf(0)
0.3989422804014327
iex> Statistics.Distributions.Normal.pdf(1.3, 0.2, 1)
0.21785217703255055
"""
def pdf(x) do
pdf(x, 0, 1)
end
def pdf(x, mu, sigma) do
numexp = Math.pow((x - mu), 2) / (2 * Math.pow(sigma, 2))
denom = sigma * Math.sqrt((2 * Math.pi))
numer = Math.pow(Math.e, (numexp * -1))
numer / denom
end
@doc """
Get the probability that a value lies below `x`
Cumulative gives a probability that a statistic
is less than Z. This equates to the area of the distribution below Z.
e.g: Pr(Z = 0.69) = 0.7549. This value is usually given in Z tables.
## Examples
iex> Statistics.Distributions.Normal.cdf(2)
0.9772499371127437
iex> Statistics.Distributions.Normal.cdf(0)
0.5000000005
"""
def cdf(x) do
cdf(x, 0, 1)
end
def cdf(x, mu, sigma) do
0.5 * (1.0 + Functions.erf((x - mu) / (sigma * Math.sqrt(2))))
end
@doc """
The percentile-point function
Get the maximum point which lies below the given probability.
This is the inverse of the cdf
## Examples
iex> Statistics.Distributions.Normal.ppf(0.025)
-1.96039491692534
iex> Statistics.Distributions.Normal.ppf(0.25, 7, 2.1)
5.584202805909036
"""
def ppf(x) do
ppf(x, 0, 1)
end
def ppf(x, mu, sigma) do
if x < 0.5 do
p = -Functions.inv_erf(Math.sqrt(-2.0*Math.ln(x)))
else
p = Functions.inv_erf(Math.sqrt(-2.0*Math.ln(1-x)))
end
mu + (p * sigma)
end
@doc """
Draw a random number from a normal distribution
`rnd/0` will return a random number from a normal distribution
with a mean of 0 and a standard deviation of 1
`rnd/3` allows you to provide the mean and standard deviation
parameters of the distribution from which the random number is drawn
Uses the [rejection sampling method](https://en.wikipedia.org/wiki/Rejection_sampling)
## Examples
iex> Statistics.Distributions.Normal.rand()
1.5990817245679434
iex> Statistics.Distributions.Normal.rand(22, 2.3)
23.900248900049736
"""
def rand do
rand(0, 1)
end
def rand(mu, sigma) do
# Note: an alternate method exists and may be better
# Inverse transform sampling - https://en.wikipedia.org/wiki/Inverse_transform_sampling
# ----
# Generate a random number between -10,+10
# (probability of 10 ocurring in a Normal(0,1) distribution is
# too small to calculate with the precision available to us)
x = Math.rand() * 20 - 10
{rmu, rsigma} = {0, 1}
if pdf(x, rmu, rsigma) > Math.rand() do
# get z-score
z = (rmu - x) / rsigma
# transpose to specified distribution
mu + (z * sigma)
else
rand(mu, sigma) # keep trying
end
end
end
|
lib/statistics/distributions/normal.ex
| 0.933363 | 0.801004 |
normal.ex
|
starcoder
|
defmodule Chex.Move.SanParser do
@moduledoc false
@doc """
Parses the given `binary` as move.
Returns `{:ok, [token], rest, context, position, byte_offset}` or
`{:error, reason, rest, context, line, byte_offset}` where `position`
describes the location of the move (start position) as `{line, column_on_line}`.
## Options
* `:byte_offset` - the byte offset for the whole binary, defaults to 0
* `:line` - the line and the byte offset into that line, defaults to `{1, byte_offset}`
* `:context` - the initial context value. It will be converted to a map
"""
@spec move(binary, keyword) ::
{:ok, [term], rest, context, line, byte_offset}
| {:error, reason, rest, context, line, byte_offset}
when line: {pos_integer, byte_offset},
byte_offset: pos_integer,
rest: binary,
reason: String.t(),
context: map()
def move(binary, opts \\ []) when is_binary(binary) do
context = Map.new(Keyword.get(opts, :context, []))
byte_offset = Keyword.get(opts, :byte_offset, 0)
line =
case(Keyword.get(opts, :line, 1)) do
{_, _} = line ->
line
line ->
{line, byte_offset}
end
case(move__0(binary, [], [], context, line, byte_offset)) do
{:ok, acc, rest, context, line, offset} ->
{:ok, :lists.reverse(acc), rest, context, line, offset}
{:error, _, _, _, _, _} = error ->
error
end
end
@compile {:inline,
[
move__1: 6,
move__0: 6,
move__49: 6,
move__44: 6,
move__53: 6,
move__51: 6,
move__50: 6,
move__43: 6,
move__48: 6,
move__46: 6,
move__45: 6,
move__33: 6,
move__41: 6,
move__37: 6,
move__39: 6,
move__36: 6,
move__35: 6,
move__32: 6,
move__27: 6,
move__26: 6,
move__31: 6,
move__29: 6,
move__28: 6,
move__16: 6,
move__24: 6,
move__20: 6,
move__22: 6,
move__19: 6,
move__18: 6,
move__14: 6,
move__13: 6,
move__2: 6,
move__4: 6,
move__3: 6,
move__12: 6,
move__8: 6,
move__11: 6,
move__7: 6,
move__6: 6
]}
defp move__0(rest, acc, stack, context, line, offset) do
move__33(rest, [], [{rest, context, line, offset}, acc | stack], context, line, offset)
end
defp move__2(rest, acc, stack, context, line, offset) do
move__3(rest, [], [acc | stack], context, line, offset)
end
defp move__3(rest, acc, stack, context, line, offset) do
move__8(rest, [], [{rest, context, line, offset}, acc | stack], context, line, offset)
end
defp move__5(<<"O-O-O", rest::binary>>, acc, stack, context, comb__line, comb__offset) do
move__6(rest, [:queenside] ++ acc, stack, context, comb__line, comb__offset + 5)
end
defp move__5(rest, _acc, _stack, context, line, offset) do
{:error,
"expected file (a-h) or rank (1-8), followed by capture indicator (x) or square or nothing, followed by capture indicator (x) or nothing, followed by square, followed by promotion indicator (=), followed by piece identifier or En passant indicator (e.p.) or nothing, followed by check indicator (+) or checkmate indicator or nothing or piece identifier, followed by square or file (a-h) or rank (1-8), followed by capture indicator (x) or square or nothing, followed by capture indicator (x) or nothing, followed by square, followed by check indicator (+) or checkmate indicator or nothing or string \"O-O\", followed by string \"-O\" or string \"O-O-O\"",
rest, context, line, offset}
end
defp move__6(rest, acc, [_, previous_acc | stack], context, line, offset) do
move__4(rest, acc ++ previous_acc, stack, context, line, offset)
end
defp move__7(_, _, [{rest, context, line, offset} | _] = stack, _, _, _) do
move__5(rest, [], stack, context, line, offset)
end
defp move__8(rest, acc, stack, context, line, offset) do
move__9(rest, [], [acc | stack], context, line, offset)
end
defp move__9(<<"O-O", rest::binary>>, acc, stack, context, comb__line, comb__offset) do
move__10(rest, acc, stack, context, comb__line, comb__offset + 3)
end
defp move__9(rest, _acc, stack, context, line, offset) do
[acc | stack] = stack
move__7(rest, acc, stack, context, line, offset)
end
defp move__10(<<"-O", _::binary>> = rest, acc, stack, context, line, offset) do
[acc | stack] = stack
move__7(rest, acc, stack, context, line, offset)
end
defp move__10(rest, acc, stack, context, line, offset) do
move__11(rest, acc, stack, context, line, offset)
end
defp move__11(rest, user_acc, [acc | stack], context, line, offset) do
_ = user_acc
move__12(rest, [:kingside] ++ acc, stack, context, line, offset)
end
defp move__12(rest, acc, [_, previous_acc | stack], context, line, offset) do
move__4(rest, acc ++ previous_acc, stack, context, line, offset)
end
defp move__4(rest, user_acc, [acc | stack], context, line, offset) do
_ = user_acc
move__13(
rest,
[
castle:
case(:lists.reverse(user_acc)) do
[one] ->
one
many ->
raise("unwrap_and_tag/3 expected a single token, got: #{inspect(many)}")
end
] ++ acc,
stack,
context,
line,
offset
)
end
defp move__13(rest, acc, [_, previous_acc | stack], context, line, offset) do
move__1(rest, acc ++ previous_acc, stack, context, line, offset)
end
defp move__14(_, _, [{rest, context, line, offset} | _] = stack, _, _, _) do
move__2(rest, [], stack, context, line, offset)
end
defp move__15(<<x0::integer, rest::binary>>, acc, stack, context, comb__line, comb__offset)
when x0 === 82 or x0 === 78 or x0 === 66 or x0 === 81 or x0 === 75 do
move__16(rest, [piece: [x0]] ++ acc, stack, context, comb__line, comb__offset + 1)
end
defp move__15(rest, acc, stack, context, line, offset) do
move__14(rest, acc, stack, context, line, offset)
end
defp move__16(rest, acc, stack, context, line, offset) do
move__20(rest, [], [{rest, context, line, offset}, acc | stack], context, line, offset)
end
defp move__18(rest, acc, [_, previous_acc | stack], context, line, offset) do
move__17(rest, acc ++ previous_acc, stack, context, line, offset)
end
defp move__19(_, _, [{rest, context, line, offset} | _] = stack, _, _, _) do
move__18(rest, [], stack, context, line, offset)
end
defp move__20(rest, acc, stack, context, line, offset) do
move__21(rest, [], [acc | stack], context, line, offset)
end
defp move__21(
<<x0::integer, x1::integer, rest::binary>>,
acc,
stack,
context,
comb__line,
comb__offset
)
when x0 >= 97 and x0 <= 104 and (x1 >= 49 and x1 <= 56) do
move__22(rest, [x1, x0] ++ acc, stack, context, comb__line, comb__offset + 2)
end
defp move__21(<<x0::integer, rest::binary>>, acc, stack, context, comb__line, comb__offset)
when x0 >= 97 and x0 <= 104 do
move__22(rest, [x0] ++ acc, stack, context, comb__line, comb__offset + 1)
end
defp move__21(<<x0::integer, rest::binary>>, acc, stack, context, comb__line, comb__offset)
when x0 >= 49 and x0 <= 56 do
move__22(rest, [x0] ++ acc, stack, context, comb__line, comb__offset + 1)
end
defp move__21(rest, _acc, stack, context, line, offset) do
[acc | stack] = stack
move__19(rest, acc, stack, context, line, offset)
end
defp move__22(rest, user_acc, [acc | stack], context, line, offset) do
_ = user_acc
move__23(rest, [origin: :lists.reverse(user_acc)] ++ acc, stack, context, line, offset)
end
defp move__23(<<x0::integer, _::binary>> = rest, acc, stack, context, line, offset)
when x0 === 120 do
move__24(rest, acc, stack, context, line, offset)
end
defp move__23(<<x0::integer, x1::integer, _::binary>> = rest, acc, stack, context, line, offset)
when x0 >= 97 and x0 <= 104 and (x1 >= 49 and x1 <= 56) do
move__24(rest, acc, stack, context, line, offset)
end
defp move__23(rest, acc, stack, context, line, offset) do
move__19(rest, acc, stack, context, line, offset)
end
defp move__24(rest, acc, [_, previous_acc | stack], context, line, offset) do
move__17(rest, acc ++ previous_acc, stack, context, line, offset)
end
defp move__17(<<x0::integer, rest::binary>>, acc, stack, context, comb__line, comb__offset)
when x0 === 120 do
move__25(rest, [capture: true] ++ acc, stack, context, comb__line, comb__offset + 1)
end
defp move__17(<<rest::binary>>, acc, stack, context, comb__line, comb__offset) do
move__25(rest, [] ++ acc, stack, context, comb__line, comb__offset)
end
defp move__25(
<<x0::integer, x1::integer, rest::binary>>,
acc,
stack,
context,
comb__line,
comb__offset
)
when x0 >= 97 and x0 <= 104 and (x1 >= 49 and x1 <= 56) do
move__26(rest, [destination: [x0, x1]] ++ acc, stack, context, comb__line, comb__offset + 2)
end
defp move__25(rest, acc, stack, context, line, offset) do
move__14(rest, acc, stack, context, line, offset)
end
defp move__26(rest, acc, stack, context, line, offset) do
move__30(rest, [], [{rest, context, line, offset}, acc | stack], context, line, offset)
end
defp move__28(rest, acc, [_, previous_acc | stack], context, line, offset) do
move__27(rest, acc ++ previous_acc, stack, context, line, offset)
end
defp move__29(_, _, [{rest, context, line, offset} | _] = stack, _, _, _) do
move__28(rest, [], stack, context, line, offset)
end
defp move__30(<<x0::integer, rest::binary>>, acc, stack, context, comb__line, comb__offset)
when x0 === 43 do
move__31(rest, [check: true] ++ acc, stack, context, comb__line, comb__offset + 1)
end
defp move__30(<<x0::integer, rest::binary>>, acc, stack, context, comb__line, comb__offset)
when x0 === 35 do
move__31(rest, [checkmate: true] ++ acc, stack, context, comb__line, comb__offset + 1)
end
defp move__30(rest, acc, stack, context, line, offset) do
move__29(rest, acc, stack, context, line, offset)
end
defp move__31(rest, acc, [_, previous_acc | stack], context, line, offset) do
move__27(rest, acc ++ previous_acc, stack, context, line, offset)
end
defp move__27(rest, acc, [_, previous_acc | stack], context, line, offset) do
move__1(rest, acc ++ previous_acc, stack, context, line, offset)
end
defp move__32(_, _, [{rest, context, line, offset} | _] = stack, _, _, _) do
move__15(rest, [], stack, context, line, offset)
end
defp move__33(rest, acc, stack, context, line, offset) do
move__37(rest, [], [{rest, context, line, offset}, acc | stack], context, line, offset)
end
defp move__35(rest, acc, [_, previous_acc | stack], context, line, offset) do
move__34(rest, acc ++ previous_acc, stack, context, line, offset)
end
defp move__36(_, _, [{rest, context, line, offset} | _] = stack, _, _, _) do
move__35(rest, [], stack, context, line, offset)
end
defp move__37(rest, acc, stack, context, line, offset) do
move__38(rest, [], [acc | stack], context, line, offset)
end
defp move__38(<<x0::integer, rest::binary>>, acc, stack, context, comb__line, comb__offset)
when x0 >= 97 and x0 <= 104 do
move__39(rest, [x0] ++ acc, stack, context, comb__line, comb__offset + 1)
end
defp move__38(<<x0::integer, rest::binary>>, acc, stack, context, comb__line, comb__offset)
when x0 >= 49 and x0 <= 56 do
move__39(rest, [x0] ++ acc, stack, context, comb__line, comb__offset + 1)
end
defp move__38(rest, _acc, stack, context, line, offset) do
[acc | stack] = stack
move__36(rest, acc, stack, context, line, offset)
end
defp move__39(rest, user_acc, [acc | stack], context, line, offset) do
_ = user_acc
move__40(rest, [origin: :lists.reverse(user_acc)] ++ acc, stack, context, line, offset)
end
defp move__40(<<x0::integer, _::binary>> = rest, acc, stack, context, line, offset)
when x0 === 120 do
move__41(rest, acc, stack, context, line, offset)
end
defp move__40(<<x0::integer, x1::integer, _::binary>> = rest, acc, stack, context, line, offset)
when x0 >= 97 and x0 <= 104 and (x1 >= 49 and x1 <= 56) do
move__41(rest, acc, stack, context, line, offset)
end
defp move__40(rest, acc, stack, context, line, offset) do
move__36(rest, acc, stack, context, line, offset)
end
defp move__41(rest, acc, [_, previous_acc | stack], context, line, offset) do
move__34(rest, acc ++ previous_acc, stack, context, line, offset)
end
defp move__34(<<x0::integer, rest::binary>>, acc, stack, context, comb__line, comb__offset)
when x0 === 120 do
move__42(rest, [capture: true] ++ acc, stack, context, comb__line, comb__offset + 1)
end
defp move__34(<<rest::binary>>, acc, stack, context, comb__line, comb__offset) do
move__42(rest, [] ++ acc, stack, context, comb__line, comb__offset)
end
defp move__42(
<<x0::integer, x1::integer, rest::binary>>,
acc,
stack,
context,
comb__line,
comb__offset
)
when x0 >= 97 and x0 <= 104 and (x1 >= 49 and x1 <= 56) do
move__43(rest, [destination: [x0, x1]] ++ acc, stack, context, comb__line, comb__offset + 2)
end
defp move__42(rest, acc, stack, context, line, offset) do
move__32(rest, acc, stack, context, line, offset)
end
defp move__43(rest, acc, stack, context, line, offset) do
move__47(rest, [], [{rest, context, line, offset}, acc | stack], context, line, offset)
end
defp move__45(rest, acc, [_, previous_acc | stack], context, line, offset) do
move__44(rest, acc ++ previous_acc, stack, context, line, offset)
end
defp move__46(_, _, [{rest, context, line, offset} | _] = stack, _, _, _) do
move__45(rest, [], stack, context, line, offset)
end
defp move__47(<<"=", x0::integer, rest::binary>>, acc, stack, context, comb__line, comb__offset)
when x0 === 82 or x0 === 78 or x0 === 66 or x0 === 81 or x0 === 75 do
move__48(rest, [promote: [x0]] ++ acc, stack, context, comb__line, comb__offset + 2)
end
defp move__47(<<"e.p.", rest::binary>>, acc, stack, context, comb__line, comb__offset) do
move__48(rest, [en_passant: true] ++ acc, stack, context, comb__line, comb__offset + 4)
end
defp move__47(rest, acc, stack, context, line, offset) do
move__46(rest, acc, stack, context, line, offset)
end
defp move__48(rest, acc, [_, previous_acc | stack], context, line, offset) do
move__44(rest, acc ++ previous_acc, stack, context, line, offset)
end
defp move__44(rest, acc, stack, context, line, offset) do
move__52(rest, [], [{rest, context, line, offset}, acc | stack], context, line, offset)
end
defp move__50(rest, acc, [_, previous_acc | stack], context, line, offset) do
move__49(rest, acc ++ previous_acc, stack, context, line, offset)
end
defp move__51(_, _, [{rest, context, line, offset} | _] = stack, _, _, _) do
move__50(rest, [], stack, context, line, offset)
end
defp move__52(<<x0::integer, rest::binary>>, acc, stack, context, comb__line, comb__offset)
when x0 === 43 do
move__53(rest, [check: true] ++ acc, stack, context, comb__line, comb__offset + 1)
end
defp move__52(<<x0::integer, rest::binary>>, acc, stack, context, comb__line, comb__offset)
when x0 === 35 do
move__53(rest, [checkmate: true] ++ acc, stack, context, comb__line, comb__offset + 1)
end
defp move__52(rest, acc, stack, context, line, offset) do
move__51(rest, acc, stack, context, line, offset)
end
defp move__53(rest, acc, [_, previous_acc | stack], context, line, offset) do
move__49(rest, acc ++ previous_acc, stack, context, line, offset)
end
defp move__49(rest, acc, [_, previous_acc | stack], context, line, offset) do
move__1(rest, acc ++ previous_acc, stack, context, line, offset)
end
defp move__1(rest, acc, _stack, context, line, offset) do
{:ok, acc, rest, context, line, offset}
end
end
|
lib/chex/move/san/san_parser.ex
| 0.897107 | 0.44571 |
san_parser.ex
|
starcoder
|
defmodule Microscope do
@moduledoc """
**Microscope** is a simple static web server built using `cowboy`.
It's original purpose was to provide an easy way to test your static
websites, but it's also useful as a temporary server for file sharing over
HTTP.
## Getting started
Use `Microscope.start_link/2` to start the web server.
```
# Example:
{:ok, pid} =
Microscope.start_link("/home/user/www", [base: "/base", port: 8080])
```
Then the HTTP server will start listening on port 8080, and when the user
requests `/base/path/to/file`, the server will respond with the contents of
`/home/user/www/path/to/file` on your system.
"""
alias Microscope.Options
@typedoc "A keyword list containing options for Microscope"
@type options :: [
port: non_neg_integer(),
base: String.t(),
callbacks: [module()],
index: boolean(),
route_overrides: [route_path()],
gen_server_options: GenServer.options()
]
@type route_path ::
{:_ | iodata(), module(), any()}
| {:_ | iodata(), :cowboy.fields(), module(), any()}
@doc """
Starts Microscope simple static web server.
By default, the server will start listening on port 8080, and serve files
located under the `webroot` directory. This behavior can be customized by
using the options below.
## Options
The second argument of this function expects a keyword list containing zero
or more options listed below:
* `port`: A port the web server listens on. The default value is `8080`.
* `base`: A string that represents the base URL. Any URL with the form of
`<base>/path/to/file` will be mapped to `<webroot>/path/to/file`; any
other requests will result in 404 error. The default value is `"/"`.
* `index`: *See below.*
* `callbacks`: *See below.*
* `gen_server_options`: A keyword list of options passed to
`GenServer.start_link` function.
## The "index" Option
When a user requests a directory, Microscope looks for either `index.html`
or `index.htm` under that directory, and serves the file if found. If neither
of them exists, how the server responds is determined by this option.
* If `index` is set to `true`, Microscope will generate an HTML page
containing a list of subdirectories and files and respond with 200 OK.
* If `index` is set to `false`, the user will receive a 404 error.
The default value for this option is `false`.
## The "callbacks" Option
The `callbacks` option expects a list of modules, each module implementing
`Microscope.Callback` behaviour. For example, if you want a line of access
log printed on every requests, use the built-in `Microscope.Logger` module.
The default value is an empty list.
## Return Value
This function returns `{:ok, pid}` on success, where `pid` is a PID of
process which can be later be stopped using `Microscope.stop/1,2` function.
If this function fails for some reason, one of the followings will happen:
- If the calling process does not trap exits, the process will exit with
`reason`, where `reason` is any Elixir term describing the error
information.
- Otherwise, this function will return `{:error, reason}` and the calling
process will receive `{:EXIT, pid, reason}` message.
"""
@spec start_link(String.t(), options()) :: GenServer.on_start()
def start_link(webroot, options \\ []) do
parsed_opts = Options.parse([{:webroot, webroot} | options])
Microscope.GenServer.start_link(parsed_opts)
end
@doc false
@spec default_handler(GenServer.server(), :cowboy_req.req()) :: {:ok, :cowboy_req.req()}
def default_handler(server, req) do
handler_state = Microscope.GenServer.handler_state(server)
{:ok, new_req, _state} = Microscope.Handler.init(req, handler_state)
{:ok, new_req}
end
@doc "Stops the server specified by `pid`."
@spec stop(GenServer.server(), timeout()) :: :ok
def stop(pid, timeout \\ :infinity) do
GenServer.stop(pid, :normal, timeout)
end
end
|
lib/microscope.ex
| 0.90444 | 0.806967 |
microscope.ex
|
starcoder
|
defmodule GCPPS.Message do
@moduledoc """
Definition and functions to decode valid maps into Message structs.
"""
alias __MODULE__, as: Message
require Logger
@typedoc """
GooglePubSub Message struct.
"""
@type t :: %Message{}
defstruct attributes: %{},
data: %{},
message_id: nil
defmodule ParseError do
@moduledoc """
Exception to raise when the message parse fails.
"""
defexception [:message]
def new(message) do
%__MODULE__{message: message}
end
defimpl Plug.Exception, for: __MODULE__ do
def status(_exp), do: 422
end
end
@typedoc "an atom explaning the reason of an error"
@type reason :: atom
@typedoc "a function to parse a map into some struct"
@type parser :: (map -> struct)
@typedoc "options"
@type parse_opts :: keyword(parser)
@spec from_request(map, parse_opts) :: {:ok, Message.t()} | {:error, reason}
@doc """
Converts an already parsed GCPPS request JSON data map to a `GCPPS.Message` struct.
You'd better pass a function which parses the attributes map into a struct,
or otherwise the original string-key map will be placed in the value of `attributes` key.
### Options
- attr_parser
- a function which receives the `attributes` map and parses
- data_parser
- a function which receives the `data` map and parses
"""
def from_request(parsed_request, opts \\ [])
when parsed_request |> is_map and opts |> is_list() do
with %{
"message" => message,
"subscription" => subscription_uri
} <- parsed_request do
message = message |> Map.put_new("attributes", %{})
from_message(message, opts)
else
_ ->
{:error, :invalid_request_format}
end
end
@spec from_request!(map, parse_opts) :: Message.t() | no_return
@doc """
Converts an already parsed GCPPS request JSON data map to a `GCPPS.Message` struct.
This function will raise an error.
### Options
- attr_parser
- a function which receives the `attributes` map and parses
- data_parser
- a function which receives the `data` map and parses
"""
def from_request!(parsed_request, opts \\ [])
when parsed_request |> is_map and opts |> is_list() do
case from_request(parsed_request, opts) do
{:ok, message} ->
message
{:error, reason} ->
raise ParseError, message: reason |> to_string()
end
end
@spec from_message(map, parse_opts) :: {:ok, Message.t()} | {:error, reason}
@doc """
Converts an already parsed GCPPS message JSON data map to a `GCPPS.Message` struct.
"""
def from_message(parsed_message, opts \\ [])
when parsed_message |> is_map and opts |> is_list() do
# parse options
attr_parser = opts |> Keyword.get(:attr_parser, & &1)
data_parser = opts |> Keyword.get(:data_parser, & &1)
with %{
"attributes" => attributes_map,
"data" => encoded_data,
"message_id" => message_id
} <- parsed_message,
{:ok, decoded_data} <- encoded_data |> Base.decode64(padding: false),
{:ok, data_map} <- decoded_data |> Jason.decode() do
# parse maps, raises on error
attributes = if attr_parser, do: attributes_map |> attr_parser.(), else: %{}
data = if data_parser, do: data_map |> data_parser.(), else: ""
message = %Message{
attributes: attributes,
data: data,
message_id: message_id
}
{:ok, message}
else
:error ->
{:error, :data_decoding_unprocessable}
{:error, %Jason.DecodeError{}} ->
{:error, :data_json_parse_error}
_ ->
{:error, :invalid_message_format}
end
end
@spec from_message!(map, parse_opts) :: Message.t() | no_return
def from_message!(parsed_message, opts \\ [])
when parsed_message |> is_map and opts |> is_list() do
case from_message(parsed_message, opts) do
{:ok, message} ->
message
{:error, reason} ->
raise ParseError, message: reason |> to_string()
end
end
end
|
apps/airport/lib/gcpps/message.ex
| 0.859649 | 0.476275 |
message.ex
|
starcoder
|
defmodule Rajska.QueryScopeAuthorization do
@moduledoc """
Absinthe middleware to perform query scoping.
## Usage
[Create your Authorization module and add it and QueryAuthorization to your Absinthe.Schema](https://hexdocs.pm/rajska/Rajska.html#module-usage). Since Scope Authorization middleware must be used with Query Authorization, it is automatically called when adding the former. Then set the scoped module and argument field:
```elixir
mutation do
field :create_user, :user do
arg :params, non_null(:user_params)
# all does not require scoping, since it means anyone can execute this query, even without being logged in.
middleware Rajska.QueryAuthorization, permit: :all
resolve &AccountsResolver.create_user/2
end
field :update_user, :user do
arg :id, non_null(:integer)
arg :params, non_null(:user_params)
middleware Rajska.QueryAuthorization, [permit: :user, scope: User] # same as [permit: :user, scope: User, args: :id]
resolve &AccountsResolver.update_user/2
end
field :delete_user, :user do
arg :user_id, non_null(:integer)
# Providing a map for args is useful to map query argument to struct field.
middleware Rajska.QueryAuthorization, [permit: [:user, :manager], scope: User, args: %{id: :user_id}]
resolve &AccountsResolver.delete_user/2
end
input_object :user_params do
field :id, non_null(:integer)
end
field :accept_user, :user do
arg :params, non_null(:user_params)
middleware Rajska.QueryAuthorization, [
permit: :user,
scope: User,
args: %{id: [:params, :id]},
rule: :accept_user
]
resolve &AccountsResolver.invite_user/2
end
end
```
In the above example, `:all` and `:admin` permissions don't require the `:scope` keyword, as defined in the `c:Rajska.Authorization.not_scoped_roles/0` function, but you can modify this behavior by overriding it.
## Options
All the following options are sent to `c:Rajska.Authorization.has_user_access?/3`:
* `:scope`
- `false`: disables scoping
- `User`: a module that will be passed to `c:Rajska.Authorization.has_user_access?/3`. It must define a struct.
* `:args`
- `%{user_id: [:params, :id]}`: where `user_id` is the scoped field and `id` is an argument nested inside the `params` argument.
- `:id`: this is the same as `%{id: :id}`, where `:id` is both the query argument and the scoped field that will be passed to `c:Rajska.Authorization.has_user_access?/3`
- `[:code, :user_group_id]`: this is the same as `%{code: :code, user_group_id: :user_group_id}`, where `code` and `user_group_id` are both query arguments and scoped fields.
* `:optional` (optional) - when set to true the arguments are optional, so if no argument is provided, the query will be authorized. Defaults to false.
* `:rule` (optional) - allows the same struct to have different rules. See `Rajska.Authorization` for `rule` default settings.
"""
@behaviour Absinthe.Middleware
alias Absinthe.Resolution
alias Rajska.Introspection
def call(%Resolution{state: :resolved} = resolution, _config), do: resolution
def call(resolution, [_ | [scope: false]]), do: resolution
def call(resolution, [{:permit, permission} | scope_config]) do
not_scoped_roles = Rajska.apply_auth_mod(resolution.context, :not_scoped_roles)
case Enum.member?(not_scoped_roles, permission) do
true -> resolution
false -> scope_user!(resolution, scope_config)
end
end
def scope_user!(%{context: context} = resolution, config) do
default_rule = Rajska.apply_auth_mod(context, :default_rule)
rule = Keyword.get(config, :rule, default_rule)
scope = Keyword.get(config, :scope)
arg_fields = config |> Keyword.get(:args, :id) |> arg_fields_to_map()
optional = Keyword.get(config, :optional, false)
arguments_source = get_arguments_source!(resolution, scope)
arg_fields
|> Enum.map(& get_scoped_struct_field(arguments_source, &1, optional, resolution.definition.name))
|> Enum.reject(&is_nil/1)
|> has_user_access?(scope, resolution.context, rule, optional)
|> update_result(resolution)
end
defp arg_fields_to_map(field) when is_atom(field), do: Map.new([{field, field}])
defp arg_fields_to_map(fields) when is_list(fields), do: fields |> Enum.map(& {&1, &1}) |> Map.new()
defp arg_fields_to_map(field) when is_map(field), do: field
defp get_arguments_source!(%Resolution{definition: %{name: name}}, nil) do
raise "Error in query #{name}: no scope argument found in middleware Scope Authorization"
end
defp get_arguments_source!(%Resolution{arguments: args}, _scope), do: args
def get_scoped_struct_field(arguments_source, {scope_field, arg_field}, optional, query_name) do
case get_scope_field_value(arguments_source, arg_field) do
nil when optional === true -> nil
nil when optional === false -> raise "Error in query #{query_name}: no argument #{inspect arg_field} found in #{inspect arguments_source}"
field_value -> {scope_field, field_value}
end
end
defp get_scope_field_value(arguments_source, fields) when is_list(fields), do: get_in(arguments_source, fields)
defp get_scope_field_value(arguments_source, field) when is_atom(field), do: Map.get(arguments_source, field)
defp has_user_access?([], _scope, _context, _rule, true), do: true
defp has_user_access?(scoped_struct_fields, scope, context, rule, _optional) do
scoped_struct = scope.__struct__(scoped_struct_fields)
Rajska.apply_auth_mod(context, :context_user_authorized?, [context, scoped_struct, rule])
end
defp update_result(true, resolution), do: resolution
defp update_result(false, %{context: context, definition: %{schema_node: %{type: object_type}}} = resolution) do
object_type = Introspection.get_object_type(object_type)
Resolution.put_result(
resolution,
{:error, Rajska.apply_auth_mod(context, :unauthorized_query_scope_message, [resolution, object_type])}
)
end
end
|
lib/middlewares/query_scope_authorization.ex
| 0.824991 | 0.79657 |
query_scope_authorization.ex
|
starcoder
|
defmodule Toolshed.Unix do
@moduledoc """
Helpers for when your fingers are too used to typing Unix
commands.
Helpers include:
* `cat/1` - print out a file
* `date/0` - print out the current date and time
* `grep/2` - print out lines of a file that match a regular expression
* `tree/1` - print out a directory tree
* `uptime/0` - print the update of the Erlang VM
"""
@doc """
Print out a file
"""
@spec cat(Path.t()) :: :"do not show this result in output"
def cat(path) do
File.read!(path) |> IO.puts()
IEx.dont_display_result()
end
@doc """
Run a regular expression on a file and print the matching lines.
iex> grep ~r/video/, "/etc/mime.types"
"""
@spec grep(Regex.t(), Path.t()) :: :"do not show this result in output"
def grep(regex, path) do
File.stream!(path)
|> Stream.filter(&Regex.match?(regex, &1))
|> Stream.each(&IO.write/1)
|> Stream.run()
IEx.dont_display_result()
end
@doc """
Print out directories and files in tree form.
"""
@spec tree(Path.t()) :: :"do not show this result in output"
def tree(path \\ ".") do
IO.puts(path)
case file_info(path, path) do
{:directory, _} ->
do_tree("", path, files(path))
_ ->
:ok
end
IEx.dont_display_result()
end
@doc """
Print out the current uptime.
"""
@spec uptime() :: :"do not show this result in output"
def uptime() do
:c.uptime()
IEx.dont_display_result()
end
@doc """
Print out the date similar to the Unix date command
"""
@spec date() :: String.t()
def date() do
dt = DateTime.utc_now()
"#{weekday_text(dt)} #{month_text(dt)} #{dt.day} #{time_text(dt)} UTC #{dt.year}"
end
defp weekday_text(dt) do
day_index = dt |> DateTime.to_date() |> Date.day_of_week()
elem(
{"", "Mon", "Tue", "Wed", "Thu", "Fri", "Sat", "Sun"},
day_index
)
end
defp month_text(dt) do
elem(
{"", "Jan", "Feb", "Mar", "Apr", "May", "Jun", "Jul", "Aug", "Sep", "Oct", "Nov", "Dec"},
dt.month
)
end
defp time_text(dt) do
dt
|> DateTime.to_time()
|> Time.truncate(:second)
|> Time.to_string()
end
defp do_tree(_prefix, _dir, []), do: :ok
defp do_tree(prefix, dir, [{:directory, filename} | rest]) do
puts_tree_branch(prefix, filename, rest)
path = Path.join(dir, filename)
do_tree([prefix, tree_trunk(rest)], path, files(path))
do_tree(prefix, dir, rest)
end
defp do_tree(prefix, dir, [{_type, filename} | rest]) do
puts_tree_branch(prefix, filename, rest)
do_tree(prefix, dir, rest)
end
defp puts_tree_branch(prefix, filename, rest) do
IO.puts([prefix, tree_branch(rest), filename])
end
defp tree_branch([]), do: "└── "
defp tree_branch(_), do: "├── "
defp tree_trunk([]), do: " "
defp tree_trunk(_), do: "│ "
defp files(dir) do
File.ls!(dir)
|> Enum.map(&file_info(Path.join(dir, &1), &1))
end
defp file_info(path, name) do
stat = File.lstat!(path)
{stat.type, name}
end
end
|
lib/toolshed/unix.ex
| 0.726523 | 0.505737 |
unix.ex
|
starcoder
|
defmodule Remedy.Embed do
@moduledoc """
Convenience functions for working with embeds.
Using the helpers in this module you can coneniently convert various objects into fields within an embed, allowing easier manipulation and more consistent embeds with less boilerplate.
Consider you wish to create an embed regarding an action that a user took. We can use pre established objects to populate the embed fields. For example:
user = %User{id: 1, name: "<NAME>"}
message = %Message{text: "Hello World", timestamp: timestamp}
%Embed{}
|> put_author(user)
|> put_timestamp(timestamp)
|> put_colour("#F34AC3")
|> put_title("User Silenced")
|> put_description("User was silenced for breaking the rules.")
It is recommended
"""
use Remedy.Schema, :schema_alias
@doc """
Update the Author to a user or member.
"""
@spec put_author(Embed.t(), Member.c() | User.c()) :: Embed.t()
def put_author(embed, %{nick: nickname, user: user} = member) do
update(embed, author: %{icon_url: Member.avatar(member) || User.avatar(user), name: nickname})
end
@doc """
Add the bot as the author.
"""
@spec put_author(Embed.t()) :: Embed.t()
def put_author(embed) do
update(embed, author: %{name: "Remedy", icon_url: "https://cdn.discordapp.com/embed/avatars/0.png"})
end
@doc """
Update the embed colour.
"""
@spec put_colour(Embed.t(), Colour.c()) :: Embed.t()
def put_colour(embed, color) do
update(embed, color: color)
end
@doc """
Update the embed description.
"""
@spec put_description(Embed.t(), String.t()) :: Embed.t()
def put_description(embed, description) do
update(embed, description: description)
end
@doc """
Adds a field to the embed.
"""
@spec put_field(Embed.t(), String.t(), String.t(), boolean() | nil) :: Embed.t()
def put_field(%{fields: fields} = embed, name, value, inline \\ true) do
update(embed, fields: fields ++ [%{name: name, value: value, inline: inline}])
end
@doc """
Update the embeds timestamp.
"""
@spec put_timestamp(Embed.t(), ISO8601.c()) :: Embed.t()
def put_timestamp(embed, timestamp) do
update(embed, timestamp: timestamp)
end
defp update(embed, params) do
params = Enum.into(params, %{})
Embed.changeset(embed, params) |> Ecto.Changeset.apply_changes()
end
end
|
lib/remedy/embed.ex
| 0.826572 | 0.422177 |
embed.ex
|
starcoder
|
# alias Scenic.Graph
# alias Scenic.Primitive
# alias FloUI.Scrollable.Direction
# alias FloUI.Scrollable.Drag
# alias FloUI.Scrollable.Wheel
# alias FloUI.Scrollable.PositionCap
# alias Scenic.Primitive.Style.Theme
# alias Scenic.Math.Vector2
# @moduledoc """
# The scroll bar component can be used to draw a scroll bar to the scene by adding it to the graph. The scroll bar is used internally by the `Scenic.Scrollable` component and for most cases it is recommended to use the `Scenic.Scrollable` component instead.
# The scroll bar can be setup to make use of scroll buttons at the scroll bars edges, in order to enable scrolling by pressing and holding such button, in addition to dragging the scroll bar slider control to drag, or clicking the slider background to jump.
# ## Data
# `t:Scenic.Scrollable.ScrollBar.settings/0`
# The scroll bar requires the following data for initialization:
# - width: number
# - height: number
# - content_size: number
# - scroll_position: number
# - direction: :horizontal | :vertical
# Width and height define the display size of the scroll bar.
# The content size defines the size of the scrollable content in the direction of the scroll bar. When the scroll bar is a horizontal scroll bar, the content size should correspond to the width of the content.
# The scroll position specifies the starting position of the scrollable content. Note that the scroll position corresponds to the translation of the content, rather than the scroll bar slider.
# The direction specifies if the scroll bar scrolls in horizontal, or in vertical direction.
# ## Styles
# `t:Scenic.Scrollable.ScrollBar.styles/0`
# Optional styles to customize the scroll bar. The following styles are supported:
# - scroll_buttons: boolean
# - scroll_bar_theme: map
# - scroll_bar_radius: number
# - scroll_bar_border: number
# - scroll_drag: `t:Scenic.Scrollable.Drag.settings/0`
# The scroll_buttons boolean can be used to specify of the scroll bar should contain buttons for scrolling, in addition to the scroll bar slider. The scroll buttons are not shown by default.
# A theme can be passed using the scroll_bar_theme element to provide a set of colors for the scroll bar. For more information on themes, see the `Scenic.Primitive.Style.Theme` module. The default theme is `:light`.
# The scroll bars rounding and border can be adjusted using the scroll_bar_radius and scroll_bar_border elements respectively. The default values are 3 and 1.
# The scroll_drag settings can be provided to specify by which mouse button the scroll bar slider can be dragged. By default, the `:left`, `:right` and `:middle` buttons are all enabled.
# ## Examples
# iex> graph = Scenic.Scrollable.Components.scroll_bar(
# ...> Scenic.Graph.build(),
# ...> %{
# ...> width: 200,
# ...> height: 10,
# ...> content_size: 1000,
# ...> scroll_position: 0,
# ...> direction: :horizontal
# ...> },
# ...> [id: :scroll_bar_component_1]
# ...> )
# ...> graph.primitives[1].id
# :scroll_bar_component_1
# iex> graph = Scenic.Scrollable.Components.scroll_bar(
# ...> Scenic.Graph.build(),
# ...> %{
# ...> width: 200,
# ...> height: 10,
# ...> content_size: 1000,
# ...> scroll_position: 0,
# ...> direction: :horizontal
# ...> },
# ...> [
# ...> scroll_buttons: true,
# ...> scroll_bar_theme: Scenic.Primitive.Style.Theme.preset(:dark),
# ...> scroll_bar_radius: 4,
# ...> scroll_bar_border: 1,
# ...> scroll_drag: %{
# ...> mouse_buttons: [:left, :right]
# ...> },
# ...> id: :scroll_bar_component_2
# ...> ]
# ...> )
# ...> graph.primitives[1].id
# :scroll_bar_component_2
# """
# @typedoc """
# Specifies the direction in which the scroll bar affects the content.
# A direction can be either :horizontal or :vertical.
# """
# @type scroll_direction :: Direction.direction()
# @typedoc """
# Data structure representing a vector 2, in the form of an {x, y} tuple.
# """
# @type v2 :: Scenic.Scrollable.v2()
# @typedoc """
# Data structure representing a rectangle.
# """
# @type rect :: Scenic.Scrollable.rect()
# @typedoc """
# The data required to initialize a scroll bar component.
# The scroll bar requires the following data for initialization:
# - width: number
# - height: number
# - content_size: number
# - scroll_position: number
# - direction: :horizontal | :vertical
# Width and height define the display size of the scroll bar.
# The content size defines the size of the scrollable content in the direction of the scroll bar. When the scroll bar is a horizontal scroll bar, the content size should correspond to the width of the content.
# The scroll position specifies the starting position of the scrollable content. Note that the scroll position corresponds to the translation of the content, rather than the scroll bar slider.
# The direction specifies if the scroll bar scrolls in horizontal, or in vertical direction.
# """
# @type settings :: %{
# width: number,
# height: number,
# content_size: number,
# scroll_position: number,
# direction: scroll_direction
# }
# @typedoc """
# Atom representing a mouse button.
# """
# @type mouse_button ::
# :left
# | :right
# | :middle
# @typedoc """
# The optional styles with which the scroll bar component can be customized. See this modules top section for a more detailed explanation of every style.
# """
# @type style ::
# {:scroll_buttons, boolean}
# # TODO use Scenic.Theme.t when/if it gets defined
# | {:scroll_bar_theme, %{}}
# | {:scroll_bar_radius, number}
# | {:scroll_bar_border, number}
# | {:scroll_drag, Drag.settings()}
# # TODO enable images as buttons
# @typedoc """
# A collection of optional styles with which the scroll bar component can be customized. See `t:Scenic.Scrollable.ScrollBar.style/0` and this modules top section for more information.
# """
# @type styles :: [style]
# @typedoc """
# A map containing information about the scroll button pressed states.
# """
# @type scroll_buttons :: %{
# scroll_button_1: :pressed | :released,
# scroll_button_2: :pressed | :released
# }
# @typedoc """
# The state with which the scroll bar components GenServer is running.
# """
# @type t :: %__MODULE__{
# id: atom,
# graph: Graph.t(),
# width: Direction.t(),
# height: Direction.t(),
# frame_size: Direction.t(),
# content_size: Direction.t(),
# scroll_position: Direction.t(),
# last_scroll_position: Direction.t(),
# direction: scroll_direction,
# drag_state: Drag.t(),
# wheel_state: Wheel.t(),
# position_cap: PositionCap.t(),
# scroll_buttons: :none | {:some, scroll_buttons},
# scroll_bar_slider_background: :pressed | :released,
# scroll_state: :idle | :wheel | :scrolling | :dragging,
# styles: styles,
# pid: pid
# }
# defstruct id: :scroll_bar,
# graph: Graph.build(),
# width: {:horizontal, 0},
# height: {:vertical, 0},
# frame_size: {:horizontal, 0},
# content_size: {:horizontal, 0},
# scroll_position: {:horizontal, 0},
# last_scroll_position: {:horizontal, 0},
# direction: :horizontal,
# drag_state: %Drag{},
# wheel_state: %Wheel{},
# position_cap: %PositionCap{},
# scroll_buttons: :none,
# scroll_bar_slider_background: :released,
# scroll_state: :idle,
# styles: [],
# pid: nil
# @default_drag_settings %{mouse_buttons: [:left, :right, :middle]}
# @default_button_radius 3
# @default_stroke_size 1
# @default_id :scroll_bar
# # PUBLIC API
# @doc """
# Find out the direction in which the content should be scrolled based on the scroll buttons currently being pressed.
# Although the scroll bar will move along a single axis, a vector 2 is returned to facilitate translation calculations of the content.
# """
# @spec direction(pid | t) :: v2
# def direction(pid) when is_pid(pid) do
# GenServer.call(pid, :direction)
# end
# def direction(%{
# scroll_buttons: {:some, %{scroll_button_1: :pressed, scroll_button_2: :released}},
# direction: direction
# }) do
# Direction.return(1, direction)
# |> Direction.to_vector_2()
# end
# def direction(%{
# scroll_buttons: {:some, %{scroll_button_1: :released, scroll_button_2: :pressed}},
# direction: direction
# }) do
# Direction.return(-1, direction)
# |> Direction.to_vector_2()
# end
# def direction(_), do: {0, 0}
# @doc """
# Find out if the scroll bar is currently being dragged by the user.
# """
# @spec dragging?(t) :: boolean
# def dragging?(state), do: Drag.dragging?(state.drag_state)
# @doc """
# Find the latest position the scrollable content should be updated with.
# The position corresponds to the contents translation, rather than the scroll bars drag control translation.
# """
# @spec new_position(t) :: v2
# def new_position(state) do
# scroll_position_vector2(state)
# end
# # CALLBACKS
# @impl Scenic.Scene
# def init(
# scene,
# %{width: width, height: height, content_size: content_size, direction: direction} =
# settings,
# opts
# ) do
# styles = Enum.into(opts || %{}, [])
# scroll_buttons = styles[:scroll_buttons] || false
# state =
# %__MODULE__{
# id: opts[:id] || @default_id,
# direction: direction,
# content_size: Direction.return(content_size, direction),
# frame_size: Direction.from_vector_2({width, height}, direction),
# scroll_position: Direction.return(settings.scroll_position, direction),
# last_scroll_position: Direction.return(settings.scroll_position, direction),
# drag_state: Drag.init(styles[:scroll_drag] || @default_drag_settings),
# scroll_buttons:
# OptionEx.from_bool(scroll_buttons, %{
# scroll_button_1: :released,
# scroll_button_2: :released
# }),
# styles: styles,
# pid: self()
# }
# |> init_size(width, height)
# |> init_position_cap()
# |> init_graph()
# scene =
# scene
# |> assign(state: state)
# |> push_graph(state.graph)
# send_parent_event(scene, {:scroll_bar_initialized, state.id, scene.assigns.state})
# {:ok, scene}
# end
# @impl Scenic.Component
# def validate(
# %{
# width: width,
# height: height,
# content_size: content_size,
# scroll_position: scroll_position,
# direction: direction
# } = settings
# )
# when is_number(width) and is_number(height) and is_number(content_size) and
# is_number(scroll_position) do
# if direction == :horizontal or direction == :vertical do
# {:ok, settings}
# else
# :invalid_input
# end
# end
# def validate(_), do: :invalid_input
# @impl Scenic.Scene
# def handle_input(
# {:cursor_button, {button, 1, _, position}},
# :scroll_bar_slider_drag_control,
# %{assigns: %{state: state}} = scene
# ) do
# state =
# state
# |> Map.update!(:drag_state, fn drag_state ->
# Drag.handle_mouse_click(
# drag_state,
# button,
# position,
# local_scroll_position_vector2(state)
# )
# end)
# |> update(scene)
# scene =
# scene
# |> assign(state: state)
# |> push_graph(state.graph)
# capture_input(scene, [:cursor_pos])
# # capture_input(scene, :cursor_pos)
# {:noreply, scene}
# end
# def handle_input(
# {:cursor_button, {_button, 0, _, _}},
# :scroll_button_1,
# %{assigns: %{state: state}} = scene
# ) do
# state =
# Map.update!(state, :scroll_buttons, fn scroll_buttons ->
# OptionEx.map(scroll_buttons, &%{&1 | scroll_button_1: :released})
# end)
# |> update(scene)
# :ok = send_parent_event(scene, {:scroll_bar_button_released, state.id, state})
# scene =
# scene
# |> assign(state: state)
# release_input(scene, :cursor_pos)
# {:noreply, scene}
# end
# def handle_input(
# {:cursor_button, {_button, 0, _, _}},
# :scroll_button_2,
# %{assigns: %{state: state}} = scene
# ) do
# state =
# Map.update!(state, :scroll_buttons, fn scroll_buttons ->
# OptionEx.map(scroll_buttons, &%{&1 | scroll_button_2: :released})
# end)
# |> update(scene)
# send_parent_event(scene, {:scroll_bar_button_released, state.id, state})
# scene =
# scene
# |> assign(state: state)
# release_input(scene, :cursor_pos)
# {:noreply, scene}
# end
# def handle_input(
# {:cursor_button, {button, 0, _, position}},
# :scroll_bar_slider_drag_control,
# %{assigns: %{state: state}} = scene
# ) do
# state =
# state
# |> Map.update!(:drag_state, fn drag_state ->
# Drag.handle_mouse_release(drag_state, button, position)
# end)
# |> update(scene)
# # :ok = send_parent_event(scene, {:scroll_bar_scroll_end, state.id, state})
# scene =
# scene
# |> assign(state: state)
# # push_graph(scene, state.graph)
# # release_input(scene, :cursor_pos)
# release_input(scene, :cursor_pos)
# {:noreply, scene}
# end
# def handle_input(
# {:cursor_button, {_button, 1, _, _}},
# :scroll_button_1,
# %{assigns: %{state: state}} = scene
# ) do
# state =
# Map.update!(state, :scroll_buttons, fn scroll_buttons ->
# OptionEx.map(scroll_buttons, &%{&1 | scroll_button_1: :pressed})
# end)
# |> update(scene)
# :ok = send_parent_event(scene, {:scroll_bar_button_pressed, state.id, state})
# scene =
# scene
# |> assign(state: state)
# {:noreply, scene}
# end
# def handle_input(
# {:cursor_button, {_button, 1, _, _}},
# :scroll_button_2,
# %{assigns: %{state: state}} = scene
# ) do
# state =
# Map.update!(state, :scroll_buttons, fn scroll_buttons ->
# OptionEx.map(scroll_buttons, &%{&1 | scroll_button_2: :pressed})
# end)
# |> Map.update!(:wheel_state, &Wheel.stop_scrolling(&1, {:horizontal, 0}))
# |> update(scene)
# :ok = send_parent_event(scene, {:cursor_scroll_stopped, state.id, state})
# :ok = send_parent_event(scene, {:scroll_bar_button_pressed, state.id, state})
# scene =
# scene
# |> assign(state: state)
# {:noreply, scene}
# end
# def handle_input(
# {:cursor_button, {_button, 1, _, _position}},
# :scroll_bar_slider_background,
# %{assigns: %{state: state}} = scene
# ) do
# state =
# %{state | scroll_bar_slider_background: :pressed}
# |> update(scene)
# scene =
# scene
# |> assign(state: state)
# {:noreply, scene}
# end
# def handle_input(
# {:cursor_button, {_button, 0, _, position}},
# :scroll_bar_slider_background,
# %{assigns: %{state: %{direction: direction} = state}} = scene
# ) do
# scroll_position =
# Direction.from_vector_2(position, direction)
# |> Direction.map_horizontal(fn pos -> pos - button_width(state) / 2 end)
# |> Direction.map_vertical(fn pos -> pos - button_height(state) / 2 end)
# scroll_position = local_to_world(state, scroll_position)
# state =
# state
# |> Map.put(:scroll_bar_slider_background, :released)
# |> Map.put(:last_scroll_position, state.scroll_position)
# |> Map.put(:scroll_position, scroll_position)
# |> Map.update!(:wheel_state, &Wheel.stop_scrolling(&1, {:horizontal, 0}))
# |> update(scene)
# :ok = send_parent_event(scene, {:cursor_scroll_stopped, state.id, state})
# scene =
# scene
# |> assign(state: state)
# # release_input(scene, :cursor_pos)
# release_input(scene, :cursor_pos)
# {:noreply, scene}
# end
# def handle_input(
# {:cursor_button, {_button, 1, _, _}},
# _,
# %{assigns: %{state: %{direction: direction} = state}} = scene
# ) do
# state =
# state
# |> Map.put(:scroll_bar_slider_background, :released)
# |> Map.update!(state, :scroll_buttons, fn scroll_buttons ->
# OptionEx.map(scroll_buttons, &%{&1 | scroll_button_1: :pressed})
# end)
# |> Map.update!(:wheel_state, &Wheel.stop_scrolling(&1, {:horizontal, 0}))
# |> Map.update!(state, :scroll_buttons, fn scroll_buttons ->
# OptionEx.map(scroll_buttons, &%{&1 | scroll_button_2: :pressed})
# end)
# :ok = send_parent_event(scene, {:cursor_scroll_stopped, state.id, state})
# scene =
# scene
# |> assign(state: state)
# |> update(scene)
# release_input(scene, :cursor_pos)
# {:noreply, scene}
# end
# def handle_input(
# {:cursor_button, {_button, 0, _, _}},
# _,
# %{assigns: %{state: %{direction: direction} = state}} = scene
# ) do
# state =
# state
# |> Map.put(:scroll_bar_slider_background, :released)
# |> Map.update!(state, :scroll_buttons, fn scroll_buttons ->
# OptionEx.map(scroll_buttons, &%{&1 | scroll_button_1: :pressed})
# end)
# |> Map.update!(:wheel_state, &Wheel.stop_scrolling(&1, {:horizontal, 0}))
# |> Map.update!(state, :scroll_buttons, fn scroll_buttons ->
# OptionEx.map(scroll_buttons, &%{&1 | scroll_button_2: :pressed})
# end)
# :ok = send_parent_event(scene, {:cursor_scroll_stopped, state.id, state})
# scene =
# scene
# |> assign(state: state)
# release_input(scene, :cursor_pos)
# {:noreply, scene}
# end
# def handle_input(
# {:cursor_pos, position},
# _,
# %{assigns: %{state: %{direction: direction} = state}} = scene
# ) do
# scroll_position =
# Direction.from_vector_2(position, direction)
# |> Direction.map_horizontal(fn pos -> pos - button_width(state) / 2 end)
# |> Direction.map_vertical(fn pos -> pos - button_height(state) / 2 end)
# scroll_position = local_to_world(state, scroll_position)
# state =
# state
# |> Map.update!(:drag_state, fn drag_state ->
# Drag.handle_mouse_move(drag_state, position)
# end)
# # |> Map.put(:scroll_bar_slider_background, :released)
# |> Map.put(:last_scroll_position, state.scroll_position)
# |> Map.put(:scroll_position, scroll_position)
# |> update(scene)
# scene =
# scene
# |> assign(state: state)
# {:noreply, scene}
# end
# def handle_input(event, id, scene) do
# release_input(scene, :cursor_pos)
# {:noreply, scene}
# end
# # no callback on the `Scenic.Scene` and no GenServer @behaviour, so impl will not work
# @spec handle_call(request :: term(), GenServer.from(), state :: term()) ::
# {:reply, reply :: term, new_state :: term}
# def handle_call({:update_scroll_position, position}, _, %{assigns: %{state: state}} = scene) do
# state =
# state
# |> Map.put(:last_scroll_position, state.scroll_position)
# |> Map.put(:scroll_position, Direction.return(position, state.direction))
# |> update_graph_drag_control_position
# scene =
# scene
# |> assign(state: state)
# push_graph(scene, state.graph)
# {:reply, :ok, scene}
# end
# def handle_call({:update_scroll_pos, position}, _, %{assigns: %{state: state}} = scene) do
# state =
# state
# |> Map.put(:last_scroll_position, state.scroll_position)
# |> Map.put(:scroll_position, Direction.return(position, state.direction))
# |> update_graph_drag_control_position
# scene =
# scene
# |> assign(state: state)
# push_graph(scene, state.graph)
# {:reply, :ok, scene}
# end
# def handle_call({:update_content_size, content_size}, _, %{assigns: %{state: state}} = scene) do
# state =
# %{state | content_size: Direction.return(content_size, state.direction)}
# |> update_graph_content_size
# |> init_position_cap()
# scene =
# scene
# |> assign(state: state)
# push_graph(scene, state.graph)
# {:reply, :ok, scene}
# end
# def handle_call(:direction, _, scene) do
# {:reply, direction(scene.assigns.state), scene}
# end
# def handle_call(msg, _, scene) do
# {:reply, {:error, {:unexpected_message, msg}}, scene}
# end
# def handle_cast(
# {:update_cursor_scroll, {{_, offset_y}, _}},
# %{assigns: %{state: %{direction: :vertical} = state}} = scene
# ) do
# scene =
# if Float.floor(offset_y) == 0 or Float.ceil(offset_y) == 0 do
# state =
# state
# |> Map.update!(:wheel_state, &Wheel.stop_scrolling(&1, {:vertical, 0}))
# # |> update(scene)
# :ok = send_parent_event(scene, {:cursor_scroll_stopped, state.id, state})
# assign(scene, state: state)
# else
# state =
# state
# |> Map.update!(:wheel_state, &Wheel.start_scrolling(&1, {:vertical, offset_y}))
# # |> update(scene)
# :ok = send_parent_event(scene, {:cursor_scroll_started, state.id, state})
# assign(scene, state: state)
# end
# {:noreply, scene}
# end
# def handle_cast(
# {:update_cursor_scroll, {{offset_x, _}, _}},
# %{assigns: %{state: %{direction: :horizontal} = state}} = scene
# ) do
# state =
# if Float.floor(offset_x) == 0 or Float.ceil(offset_x) == 0 do
# state =
# state
# |> Map.update!(:wheel_state, &Wheel.stop_scrolling(&1, {:horizontal, 0}))
# |> update(scene)
# :ok = send_parent_event(scene, {:cursor_scroll_stopped, state.id, state})
# state
# else
# state =
# state
# |> Map.update!(:wheel_state, &Wheel.start_scrolling(&1, {:horizontal, offset_x}))
# |> update(scene)
# :ok = send_parent_event(scene, {:cursor_scroll_started, state.id, state})
# state
# end
# scene =
# scene
# |> assign(state: state)
# # |> push_graph(state.graph)
# {:noreply, scene}
# end
# def handle_cast(:unrequest_cursor_scroll, scene) do
# release_input(scene, :cursor_scroll)
# {:noreply, scene}
# end
# def handle_cast(_, scene) do
# {:noreply, scene}
# end
# @impl Scenic.Scene
# def handle_event(_, _, scene), do: {:noreply, scene}
# # INITIALIZERS
# @spec init_size(t, width :: number, height :: number) :: t
# defp init_size(%{scroll_buttons: :none} = state, width, height) do
# state
# |> Map.put(:width, Direction.as_horizontal(width))
# |> Map.put(:height, Direction.as_vertical(height))
# end
# defp init_size(%{scroll_buttons: {:some, _}} = state, width, height) do
# width = Direction.as_horizontal(width)
# height = Direction.as_vertical(height)
# displacement =
# scroll_bar_displacement(
# state
# |> Map.put(:width, width)
# |> Map.put(:height, height)
# )
# button_size_difference = Direction.map(displacement, &(&1 * 2))
# state
# |> Map.put(:width, Direction.subtract(width, button_size_difference))
# |> Map.put(:height, Direction.subtract(height, button_size_difference))
# end
# @spec init_scroll_buttons(t) :: t
# defp init_scroll_buttons(%{scroll_buttons: :none} = state), do: state
# defp init_scroll_buttons(%{graph: graph, direction: direction} = state) do
# theme = state.styles[:scroll_bar_theme] || Theme.preset(:light)
# radius = state.styles[:scroll_bar_radius] || @default_button_radius
# size = scroll_button_size(state)
# {button_2_x, button_2_y} =
# button_2_position =
# Direction.return(size, direction)
# |> Direction.add(state.width)
# |> Direction.add(state.height)
# |> Direction.to_vector_2()
# graph
# |> rrect(
# {size, size, radius},
# id: :scroll_button_1_bg,
# translate: {0, -2},
# fill: theme.background,
# input: [:cursor_button]
# )
# |> FloUI.Icon.add_to_graph({:flo_ui, "icons/arrow_drop_down_white.png"},
# translate: {size * -1.05, size * -1.05 - 2},
# pin: {48 / 2, 48 / 2},
# rotate: :math.pi()
# )
# |> rrect(
# {size, size, radius},
# id: :scroll_button_1,
# translate: {0, -2},
# input: [:cursor_button]
# )
# |> rrect(
# {size, size, radius},
# id: :scroll_button_2_bg,
# translate: {button_2_x, button_2_y + 2},
# fill: theme.background,
# input: [:cursor_button]
# )
# |> FloUI.Icon.add_to_graph({:flo_ui, "icons/arrow_drop_down_white.png"},
# translate: Vector2.add(button_2_position, {size * -1.05, size * -1.05 + 2})
# )
# |> rrect(
# {size, size, radius},
# id: :scroll_button_2,
# translate: {button_2_x, button_2_y + 2},
# input: [:cursor_button]
# )
# |> (&%{state | graph: &1}).()
# end
# @spec init_position_cap(t) :: t
# defp init_position_cap(%{direction: direction} = state) do
# max =
# Direction.return(0, direction)
# |> Direction.add(state.width)
# |> Direction.add(state.height)
# |> Direction.map_horizontal(fn width ->
# width - button_width(state) + scroll_button_size(state)
# end)
# |> Direction.map_vertical(fn height ->
# height - button_height(state) + scroll_button_size(state)
# end)
# |> Direction.to_vector_2()
# min =
# scroll_bar_displacement(state)
# |> Direction.to_vector_2()
# Map.put(state, :position_cap, PositionCap.init(%{min: min, max: max}))
# end
# @spec init_graph(t) :: t
# defp init_graph(state) do
# width = Direction.unwrap(state.width)
# height = Direction.unwrap(state.height)
# theme = state.styles[:scroll_bar_theme] || Theme.preset(:light)
# radius = state.styles[:scroll_bar_radius] || @default_button_radius
# border = state.styles[:scroll_bar_border] || @default_stroke_size
# Map.update!(state, :graph, fn graph ->
# graph
# |> rrect(
# {width, height, radius},
# id: :scroll_bar_slider_background,
# fill: theme.border,
# stroke: {border, theme.background},
# translate: Direction.to_vector_2(scroll_bar_displacement(state)),
# input: :cursor_button
# )
# |> rrect(
# {button_width(state), button_height(state), radius},
# id: :scroll_bar_slider_drag_control,
# translate: local_scroll_position_vector2(state),
# fill: theme.background,
# input: :cursor_button
# )
# end)
# |> init_scroll_buttons
# end
# # UPDATERS
# @spec update(t, scene :: any) :: t
# defp update(state, scene) do
# state
# |> update_scroll_state
# |> update_scroll_position
# |> update_graph_drag_control_position
# |> update_control_colors
# |> send_position_change_event(scene)
# end
# @spec update_scroll_state(t) :: t
# defp update_scroll_state(state) do
# verify_scrolling(state)
# |> OptionEx.or_try(fn -> verify_dragging(state) end)
# |> OptionEx.or_else(:idle)
# |> (&%{state | scroll_state: &1}).()
# end
# @spec update_scroll_position(t) :: t
# defp update_scroll_position(%{direction: direction} = state) do
# Drag.new_position(state.drag_state)
# |> OptionEx.map(&Direction.from_vector_2(&1, direction))
# |> OptionEx.map(&Direction.map(&1, fn position -> local_to_world(state, position) end))
# |> OptionEx.map(&%{state | last_scroll_position: state.scroll_position, scroll_position: &1})
# |> OptionEx.or_else(state)
# end
# @spec update_graph_content_size(t) :: t
# defp update_graph_content_size(state) do
# update_graph_component(state, :scroll_bar_slider_drag_control, fn primitive ->
# Map.update(primitive, :data, %{}, fn {_, _, radius} ->
# {button_width(state), button_height(state), radius}
# end)
# end)
# end
# @spec update_graph_drag_control_position(t) :: t
# defp update_graph_drag_control_position(state) do
# update_graph_component(state, :scroll_bar_slider_drag_control, fn primitive ->
# Map.update(primitive, :transforms, %{}, fn transforms ->
# Map.put(transforms, :translate, local_scroll_position_vector2(state))
# end)
# end)
# end
# @spec update_graph(t, (Graph.t() -> Graph.t())) :: t
# defp update_graph(state, updater) do
# state
# |> Map.update!(:graph, updater)
# end
# @spec update_graph_component(t, id :: term, (Primitive.t() -> Primitive.t())) :: t
# defp update_graph_component(state, id, updater) do
# update_graph(state, fn graph ->
# Graph.modify(graph, id, updater)
# end)
# end
# @spec update_control_colors(t) :: t
# defp update_control_colors(state) do
# theme = state.styles[:scroll_bar_theme] || Theme.preset(:light)
# drag_control_color =
# Drag.dragging?(state.drag_state)
# |> OptionEx.from_bool(theme.active)
# |> OptionEx.or_else(theme.background)
# scroll_bar_slider_background_color =
# OptionEx.from_bool(state.scroll_bar_slider_background == :pressed, theme.text)
# |> OptionEx.or_else(theme.border)
# graph =
# state.graph
# |> Graph.modify(
# :scroll_bar_slider_drag_control,
# &Primitive.put_style(&1, :fill, drag_control_color)
# )
# |> Graph.modify(
# :scroll_bar_slider_background,
# &Primitive.put_style(&1, :fill, scroll_bar_slider_background_color)
# )
# graph =
# state.scroll_buttons
# |> OptionEx.map(fn scroll_buttons ->
# button1_color =
# OptionEx.from_bool(scroll_buttons.scroll_button_1 == :pressed, theme.active)
# |> OptionEx.or_else(theme.background)
# button2_color =
# OptionEx.from_bool(scroll_buttons.scroll_button_2 == :pressed, theme.active)
# |> OptionEx.or_else(theme.background)
# graph
# |> Graph.modify(:scroll_button_1_bg, &Primitive.put_style(&1, :fill, button1_color))
# |> Graph.modify(:scroll_button_2_bg, &Primitive.put_style(&1, :fill, button2_color))
# end)
# |> OptionEx.or_else(graph)
# Map.put(state, :graph, graph)
# end
# @spec verify_scrolling(t) :: {:some, :scrolling} | :none
# defp verify_scrolling(%{scroll_buttons: {:some, buttons}} = state) do
# OptionEx.from_bool(buttons.scroll_button_1 == :pressed, :scrolling)
# |> OptionEx.or_try(fn ->
# OptionEx.from_bool(buttons.scroll_button_2 == :pressed, :scrolling)
# end)
# |> OptionEx.or_try(fn ->
# OptionEx.from_bool(state.wheel_state == :scrolling, :scrolling)
# end)
# end
# defp verify_scrolling(_), do: :none
# @spec verify_dragging(t) :: {:some, :dragging} | :none
# defp verify_dragging(state) do
# OptionEx.from_bool(Drag.dragging?(state.drag_state), :dragging)
# end
# # UTILITY
# # MEMO: scrolling using directional buttons will only set the direction, the position of the scroll controls will be updated by the :update_scroll_position call called back by the scrollable component
# @spec send_position_change_event(t, scene :: any) :: t
# defp send_position_change_event(%{scroll_state: :scrolling} = state, _scene), do: state
# defp send_position_change_event(
# %{last_scroll_position: last, scroll_position: current} = state,
# scene
# ) do
# OptionEx.from_bool(last != current, state)
# |> OptionEx.map(fn state ->
# :ok = send_parent_event(scene, {:scroll_bar_position_change, state.id, state})
# state
# end).()
# |> OptionEx.or_else(state)
# end
# # SIZE CALCULATIONS
# @spec scroll_button_size(any) :: number
# defp scroll_button_size(%{scroll_buttons: :none}), do: 0
# defp scroll_button_size(%{width: width, height: height, direction: direction}) do
# Direction.return(1, direction)
# |> Direction.invert()
# |> Direction.multiply(width)
# |> Direction.multiply(height)
# |> Direction.unwrap()
# end
# @spec button_width(t) :: number
# defp button_width(%{direction: :horizontal} = state) do
# Direction.divide(state.frame_size, state.content_size)
# |> Direction.multiply(state.width)
# |> Direction.unwrap()
# end
# defp button_width(state), do: Direction.unwrap(state.width)
# @spec button_height(t) :: number
# defp button_height(%{direction: :vertical} = state) do
# Direction.divide(state.frame_size, state.content_size)
# |> Direction.multiply(state.height)
# |> Direction.unwrap()
# end
# defp button_height(state), do: Direction.unwrap(state.height)
# @spec width_factor(t) :: number
# defp width_factor(%{content_size: {:horizontal, size}, width: {_, width}}) do
# width / size
# end
# defp width_factor(_), do: 1
# @spec height_factor(t) :: number
# defp height_factor(%{content_size: {:vertical, size}, height: {_, height}}) do
# height / size
# end
# defp height_factor(_), do: 1
# # POSITION CALCULATIONS
# @spec scroll_bar_displacement(t) :: Direction.t()
# defp scroll_bar_displacement(%{direction: direction} = state) do
# scroll_button_size(state)
# |> Direction.return(direction)
# end
# @spec scroll_position_vector2(t) :: v2
# defp scroll_position_vector2(state) do
# Direction.to_vector_2(state.scroll_position)
# end
# @spec local_scroll_position_vector2(t) :: v2
# defp local_scroll_position_vector2(state) do
# world_to_local(state, scroll_position_vector2(state))
# end
# @spec local_to_world(t, Direction.t() | number | v2) :: Direction.t() | number | v2
# defp local_to_world(%{direction: :horizontal} = state, {:horizontal, x}) do
# {:horizontal, local_to_world(state, x)}
# end
# defp local_to_world(%{direction: :vertical} = state, {:vertical, y}) do
# {:vertical, local_to_world(state, y)}
# end
# defp local_to_world(_, {:horizontal, _}), do: {:horizontal, 0}
# defp local_to_world(_, {:vertical, _}), do: {:vertical, 0}
# defp local_to_world(state, {x, y}) do
# {local_to_world(state, x), local_to_world(state, y)}
# end
# defp local_to_world(_, 0), do: 0
# defp local_to_world(%{direction: :horizontal} = state, x) do
# {x, _} = PositionCap.cap(state.position_cap, {x, 0})
# -(x - scroll_button_size(state)) / width_factor(state)
# end
# defp local_to_world(%{direction: :vertical} = state, y) do
# {_, y} = PositionCap.cap(state.position_cap, {0, y})
# -(y - scroll_button_size(state)) / height_factor(state)
# end
# @spec world_to_local(t, number | v2) :: number | v2
# defp world_to_local(%{direction: direction} = state, {x, y}) do
# position =
# Direction.from_vector_2({x, y}, direction)
# |> Direction.map(&world_to_local(state, &1))
# |> Direction.to_vector_2()
# PositionCap.cap(state.position_cap, position)
# end
# defp world_to_local(%{direction: :horizontal} = state, x),
# do: -x * width_factor(state) + scroll_button_size(state)
# defp world_to_local(%{direction: :vertical} = state, y),
# do: -y * height_factor(state) + scroll_button_size(state)
# end
|
lib/scrollbar/scroll_bar.ex
| 0.846863 | 0.480844 |
scroll_bar.ex
|
starcoder
|
defmodule Uplink do
@moduledoc """
A simple abstraction for standardized observability with telemetry and more.
Uplink provides a simple abstraction for configuring observability for libraries
and applications. The heart of Uplink is the `Uplink.Monitor` which provides a
standard template for common observability needs such as creating Telemetry.Metrics
definitions, telemetry pollers, or setting up other custom observability requirements.
The most common challenge when getting started with telemetry is understanding where
to start, what all the libraries do, and how they all fit together. This creates a
high learning curve while leaving a huge gap at organizations where most developers
just want a simple "drop-in" solution for observability that meets the org's requirements.
Uplink can be used on its own in simple personal projects or as the basis for a standard
"drop-in" library with a simple abstraction containing standard monitors for your
organization, such as your Telemetry.Metrics reporter of choice, BEAM VM measurements,
or Phoenix measurements, all conforming to your org's metric naming conventions or
other standard practices.
## Usage
```
# application supervisor
children = [
{
Uplink, [
monitors: [
{MyMonitors.Ecto, [:my_repo]},
Uplink.Monitors.VM
],
pollers: [
{10, [{TestModule, :test_emitter, []}]}
],
metric_definitions: [
Telemetry.Metrics.counter("poller.test.event.lasers")
],
reporters: [
Telemetry.Metrics.ConsoleReporter
]
]
}
]
```
"""
@typedoc """
An MFA tuple for the poller to execute.
"""
@type measurement :: {module :: module(), function :: atom(), args :: keyword()}
@typedoc """
A list of `t:TelemetryMetrics.t/0` definitions.
"""
@type metric_definitions :: [Telemetry.Metrics.t()]
@typedoc """
A module or two-element tuple consisting of a module and arguments to be
supplied to the monitor. Any arguments passed are passed to all callbacks
in the monitor.
"""
@type monitor :: module() | {module :: module(), args :: keyword()}
@type monitors :: [monitor()]
@typedoc """
Time in ms between poller executions.
Example: `:timer.seconds(5)`
"""
# move to :telemetry_poller.period() once released https://github.com/beam-telemetry/telemetry_poller/pull/50
@type period :: pos_integer()
@typedoc """
A shorthand specification for listing a number of pollers to be executed at
a shared interval.
Example: `{:timer.seconds(5), [{MyApp.Telemetry, :emit_stats, []}]}`
"""
@type poller_spec :: {period(), [measurement()]}
@type poller_specs :: [poller_spec()]
@typedoc """
A module or two-element tuple consisting of a reporter and arguments to be
passed to the reporter,
Example: `{TelemetryMetricsPrometheus, [port: 9568]}`
"""
@type reporter_spec :: module() | {module :: module(), args :: keyword()}
@type reporter_specs :: [reporter_spec()]
@type option ::
{:metric_definitions, metric_definitions()}
| {:monitors, monitors()}
| {:poller_specs, poller_specs()}
| {:reporters, reporter_specs()}
@typedoc """
Valid options. No options are required but a monitor and/or monitor are the
minimum required to do anything.
* `:metric_definitions` - a list of additional `t:Telemetry.Metrics.t/0` definitions not
exposed by monitors.
* `:monitors` - a list of `t:monitor/0` to use.
* `:poller_specs` - a list of additoinal `t:poller_spec/0` not exposed by monitors.
* `:reporters` - a list of `Telemetry.Metrics` reporters, usually 0 or 1.
"""
@type options :: [option()]
@doc """
Returns the child spec for running Uplink under a supervisor.
Example:
children = [
{Uplink, options}
]
See `t:options/0` for a list of available options.
"""
@spec child_spec(options()) :: Supervisor.child_spec()
def child_spec(opts) do
id =
case Keyword.get(opts, :name, :uplink) do
name when is_atom(name) -> name
{:global, name} -> name
{:via, _, name} -> name
end
spec = %{
id: id,
start: {Uplink.Supervisor, :start_link, [opts]},
type: :supervisor
}
Supervisor.child_spec(spec, [])
end
end
|
lib/uplink.ex
| 0.927042 | 0.936807 |
uplink.ex
|
starcoder
|
defmodule Silicon do
@moduledoc """
Silicon is another wrapper of Elixir/Erlang crypto packages.
### Packages
We wrapped the packages below:
* `{:libdecaf, "~> 1.0"}` for Ed25519 and SHA3
* `{:keccakf1600, "~> 2.0", hex: :keccakf1600_orig}` for Keccak Origin
* `{:libsecp256k1, "~> 0.1.10"}` for Secp256k1
* `{:blake2_elixir, git: "https://github.com/riverrun/blake2_elixir.git"}` for Blake2b
* `:crypto` for others
### Test Vectors
We added lots of extra test vectors to test against the methods exposed by silicon
#### AES Test Vectors
* AES-CBC: `https://csrc.nist.gov/projects/cryptographic-algorithm-validation-program/block-ciphers#AES`
* AES-CBC-PKCS7: `https://raw.githubusercontent.com/google/wycheproof/master/testvectors/aes_cbc_pkcs5_test.json`
* AES-GCM: `https://csrc.nist.gov/Projects/Cryptographic-Algorithm-Validation-Program/CAVP-TESTING-BLOCK-CIPHER-MODES#GCMVS`
* AES-CTR: Got from `https://github.com/pyca/cryptography`, which is the test vectors from RFC 3686
#### Hash Function Test Vectors
* SHA3: `https://csrc.nist.gov/Projects/cryptographic-algorithm-validation-program/Secure-Hashing`
* Keccak origin: `https://keccak.team/archives.html` Known-answer and Monte Carlo test results part
* Blake2b: `https://github.com/jedisct1/crypto-test-vectors` and `https://pynacl.readthedocs.io/en/latest/vectors/blake2_vectors/`
* MD5/SHA2: `https://cryptii.com`
* HASH160: `https://bitcoinprices.org/public-key-to-hash/`
* RIPEMD160: `https://www.browserling.com/tools/ripemd160-hash`
#### HMAC Test Vectors
* HMAC_SHA256/SHA512: `https://cryptii.com`
#### Ed25519 Test Vectors
* `https://ed25519.cr.yp.to/python/sign.input`
#### Secp256k1 Test Vectors
* `https://github.com/btccom/secp256k1-go`
* `https://github.com/google/wycheproof/tree/master/testvectors`
For wycheproof test vectors, We only have tested against ecdh vectors. For ecdsa tests, some cases might fail because that the Bitcoin-Secp256k1 seems more strict. See an example [here](https://github.com/bitcoin-core/secp256k1/issues/609). We don't have a good way to distinguish those cases [now](https://github.com/google/wycheproof/issues/70).
"""
end
|
lib/silicon.ex
| 0.826852 | 0.53279 |
silicon.ex
|
starcoder
|
defmodule Car5g do
use GenServer
require Logger
defstruct [
:signal_pid,
:recorded_file,
:name,
:tcp_connection,
signal_map: %{}
]
def start_link({name, tcp_server}) do
state = %__MODULE__{tcp_connection: tcp_server, name: name}
GenServer.start_link(__MODULE__, state, name: name)
end
@send_data false
@namespace "VirtualCanInterface"
# @flexray_namespace "Flexray"
@signals (
signal1 = Base.SignalId.new(name: "BenchC_a", namespace: Base.NameSpace.new(name: @namespace))
signal2 = Base.SignalId.new(name: "BenchC_b", namespace: Base.NameSpace.new(name: @namespace))
signal3 = Base.SignalId.new(name: "BenchC_c_1", namespace: Base.NameSpace.new(name: @namespace))
# signal4 = Base.SignalId.new(name: "BenchC_c_3", namespace: Base.NameSpace.new(name: @flexray_namespace))
# signal5 = Base.SignalId.new(name: "BenchC_c_4", namespace: Base.NameSpace.new(name: @flexray_namespace))
signal6 = Base.SignalId.new(name: "BenchC_c_2", namespace: Base.NameSpace.new(name: @namespace))
[signal1, signal2, signal3, signal6]
)
@intervall_in_ms 100
def init(state) do
:ssl.start()
spawn(__MODULE__, :subscribe_to_signals, [self(), @signals])
# schedule_work(@intervall_in_ms, :grpc_read)
schedule_work(@intervall_in_ms, :grpc_subscribe)
{:ok, state}
end
defp schedule_work(intervall_in_ms, func),
do: Process.send_after(self(), func, intervall_in_ms)
# speed
# 0.01 m/s
# Speed of the RU in direction specified in the “heading” field.
# acceleration
# 0.1 m/s²
# Acceleration of the RU in direction specified in the “heading” field.
# yaw_rate
# 0.1 degrees/s
# Heading change of the road user, where the sign indicates the direction
# "position":{
# "latitude":<int32>,
# "longitude":<int32>},
# "position_type":<string>,
# "heading":<uint16>,
# "speed":<uint16>,
# "acceleration":<int16>,
# "yaw_rate":<int16>,
def convert_constant() do
180/:math.pi
end
def radpers_todefpers(radians_per_second) do
radians_per_second * convert_constant() * 10
end
def subscribe_to_signals(dest, signals) do
{:ok, channel} = GRPC.Stub.connect("localhost:50051")
signalIds = Base.SignalIds.new(signalId: signals)
Logger.debug "signals #{inspect signals}"
request = Base.SubscriberConfig.new(clientId: Base.ClientId.new(id: "grpc-client"), signals: signalIds, on_change: false)
# https://github.com/tony612/grpc-elixir/issues/88
{:ok, stream} = channel |> Base.NetworkService.Stub.subscribe_to_signals(request, timeout: :infinity)
Enum.each stream, fn (response) ->
case response do
{:ok, %Base.Signals{signal: signals}} ->
GenServer.cast(dest, {:subscription_arrived, signals})
{:error, %GRPC.RPCError{message: message, status: status}} ->
Logger.debug "bang #{inspect message} #{inspect status}"
subscribe_to_signals(dest, signals)
general ->
Logger.debug "inspect general"
subscribe_to_signals(dest, signals)
end
end
end
def handle_cast({:subscription_arrived, signals}, state) do
update_map =
Enum.reduce(signals, state.signal_map, fn(%Base.Signal{id: %Base.SignalId{name: name}, payload: payload}, acc) ->
pack_map(name, payload, acc)
end)
{:noreply, %__MODULE__{state | signal_map: update_map}}
end
def pack_map(name, payload, acc) do
case name do
"BenchC_a" -> update_map(acc, "speed", extract_payload(payload), &(&1*100))
"BenchC_b" -> update_map(acc, "acceleration", extract_payload(payload), &(&1*10))
"BenchC_c_1" -> update_map(acc, "yaw_rate", extract_payload(payload), &radpers_todefpers/1)
"BenchC_c_3" -> update_map(acc, "position.latitude", extract_payload(payload))
"BenchC_c_4" -> update_map(acc, "position.longitude", extract_payload(payload))
"BenchC_c_2" -> update_map(acc, "test_data", extract_payload(payload))
_ -> acc
end
end
def read_grpc(state) do
{:ok, channel} = GRPC.Stub.connect("localhost:50051")
request = Base.SignalIds.new(signal_id: @signals)
response = Base.NetworkService.Stub.read_signals(channel, request)
{:ok, %Base.Signals{signal: signals}} = response
response_map = Enum.reduce(signals, %{}, fn(%Base.Signal{id: %Base.SignalId{name: name}, payload: payload}, acc) ->
pack_map(name, payload, acc)
end)
dispatch_data(response_map, state)
end
@default_info (
%{"type" => "override", "origin" => "can_gateway"}
)
def add_timestamp(response_map) do
Map.put(response_map, "timestamp", System.system_time(:microsecond))
end
def dispatch_data(response_map, state) do
response_map_with_defaults = Map.merge(response_map, @default_info)
{:ok, encoded_map} = Poison.encode(add_timestamp(response_map_with_defaults))
Car5g.Client.send_message(state.tcp_connection, encoded_map)
end
def update_map(map, name, value, conversion \\ &(&1)) do
case value do
:empty -> map
value -> Map.put(map, name, conversion.(value))
end
end
def extract_payload(payload) do
case payload do
{:empty, true} -> :empty
# this application is not interested in arbitartion.
{:arbitration} -> :empty
{:double, value} -> value
{:integer, value} -> value
end
end
def handle_info(:grpc_read = worker, state) do
schedule_work(@intervall_in_ms, worker)
read_grpc(state)
{:noreply, state}
end
def handle_info(:grpc_subscribe = worker, state) do
schedule_work(@intervall_in_ms, worker)
dispatch_data(state.signal_map, state)
{:noreply, state}
end
# this is a test url, go visit it on the web, potentially generate your own.
@url 'http://requestbin.fullcontact.com/znfw38zn'
def post_data_to_endpoint(body) do
if (@send_data) do
# use this to post params.
{:ok, {{'HTTP/1.1', 200, 'OK'}, headers, _body}} = :httpc.request(:post, {@url, [], 'application/json', String.to_charlist(body)}, [], [])
end
end
end
|
examples/grpc/elixir/car5g/lib/car5g.ex
| 0.522446 | 0.414484 |
car5g.ex
|
starcoder
|
defmodule OptionParser do
@moduledoc """
This module contains functions to parse command line options.
"""
@type argv :: [String.t]
@type parsed :: Keyword.t
@type errors :: [{String.t, String.t | nil}]
@type options :: [switches: Keyword.t, strict: Keyword.t, aliases: Keyword.t]
defmodule ParseError do
defexception [:message]
end
@doc """
Parses `argv` into a keyword list.
It returns a three-element tuple with the form `{parsed, args, invalid}`, where:
* `parsed` is a keyword list of parsed switches with `{switch_name, value}`
tuples in it; `switch_name` is the atom representing the switch name while
`value` is the value for that switch parsed according to `opts` (see the
"Examples" section for more information)
* `args` is a list of the remaining arguments in `argv` as strings
* `invalid` is a list of invalid options as `{option_name, value}` where
`option_name` is the raw option and `value` is `nil` if the option wasn't
expected or the string value if the value didn't have the expected type for
the corresponding option
Elixir converts switches to underscored atoms, so `--source-path` becomes
`:source_path`. This is done to better suit Elixir conventions. However, this
means that switches can't contain underscores and switches that do contain
underscores are always returned in the list of invalid options.
Without any options, this function will try to parse all switches in the `argv`.
iex> OptionParser.parse(["--debug"])
{[debug: true], [], []}
iex> OptionParser.parse(["--source", "lib"])
{[source: "lib"], [], []}
iex> OptionParser.parse(["--source-path", "lib", "test/enum_test.exs", "--verbose"])
{[source_path: "lib", verbose: true], ["test/enum_test.exs"], []}
Switches followed by a value will be assigned the value, as a string.
Switches without an argument, like `--debug` in the examples above, will
automatically be set to `true`.
## Options
The following options are supported:
* `:switches` or `:strict` - see the "Switch definitions" section below
* `:aliases` - see the "Aliases" section below
## Switch definitions
Often it is better to explicitly list the known
switches and their formats. The switches can be specified via one of two
options:
* `:switches` - defines some switches and their types. This function
still attempts to parse switches that are not in this list.
* `:strict` - defines strict switches. Any switch in `argv` that is not
specified in the list is returned in the invalid options list.
Both these options accept a keyword list of `{name, type}` tuples where `name`
is an atom defining the name of the switch and `type` is an atom that
specifies the type for the value of this switch (see the "Types" section below
for the possible types and more information about type casting).
Note that you should only supply the `:switches` or the`:strict` option.
If you supply both, an `ArgumentError` exception will be raised.
### Types
Switches parsed by `OptionParser` may take zero or one arguments.
The following switches types take no arguments:
* `:boolean` - sets the value to `true` when given (see also the
"Negation switches" section below)
* `:count` - counts the number of times the switch is given
The following switches take one argument:
* `:integer` - parses the value as an integer
* `:float` - parses the value as a float
* `:string` - parses the value as a string
If a switch can't be parsed according to the given type, it is returned
in the invalid options list.
### Modifiers
Switches can be specified with modifiers, which change how
they behave. The following modifiers are supported:
* `:keep` - keeps duplicated items instead of overriding them; works with
all types except `:count`. Specifying `switch_name: :keep` assumes the
type of `:switch_name` will be `:string`.
Note that if you want to use `:keep` with a type other than `:string`, use a list
as the type for the switch. For example: `[foo: [:integer, :keep]]`.
### Negation switches
In case a switch `SWITCH` is specified to have type `:boolean`, it may be
passed as `--no-SWITCH` as well which will set the option to `false`:
iex> OptionParser.parse(["--no-op", "path/to/file"], switches: [op: :boolean])
{[op: false], ["path/to/file"], []}
## Aliases
A set of aliases can be specified in the `:aliases` option:
iex> OptionParser.parse(["-d"], aliases: [d: :debug])
{[debug: true], [], []}
## Examples
Here are some examples of working with different types and modifiers:
iex> OptionParser.parse(["--unlock", "path/to/file"], strict: [unlock: :boolean])
{[unlock: true], ["path/to/file"], []}
iex> OptionParser.parse(["--unlock", "--limit", "0", "path/to/file"],
...> strict: [unlock: :boolean, limit: :integer])
{[unlock: true, limit: 0], ["path/to/file"], []}
iex> OptionParser.parse(["--limit", "3"], strict: [limit: :integer])
{[limit: 3], [], []}
iex> OptionParser.parse(["--limit", "xyz"], strict: [limit: :integer])
{[], [], [{"--limit", "xyz"}]}
iex> OptionParser.parse(["--verbose"], switches: [verbose: :count])
{[verbose: 1], [], []}
iex> OptionParser.parse(["-v", "-v"], aliases: [v: :verbose], strict: [verbose: :count])
{[verbose: 2], [], []}
iex> OptionParser.parse(["--unknown", "xyz"], strict: [])
{[], ["xyz"], [{"--unknown", nil}]}
iex> OptionParser.parse(["--limit", "3", "--unknown", "xyz"],
...> switches: [limit: :integer])
{[limit: 3, unknown: "xyz"], [], []}
iex> OptionParser.parse(["--unlock", "path/to/file", "--unlock", "path/to/another/file"], strict: [unlock: :keep])
{[unlock: "path/to/file", unlock: "path/to/another/file"], [], []}
"""
@spec parse(argv, options) :: {parsed, argv, errors}
def parse(argv, opts \\ []) when is_list(argv) and is_list(opts) do
do_parse(argv, compile_config(opts), [], [], [], true)
end
@doc """
The same as `parse/2` but raises an `OptionParser.ParseError`
exception if any invalid options are given.
If there are no errors, returns a `{parsed, rest}` tuple where:
* `parsed` is the list of parsed switches (same as in `parse/2`)
* `rest` is the list of arguments (same as in `parse/2`)
## Examples
iex> OptionParser.parse!(["--debug", "path/to/file"], strict: [debug: :boolean])
{[debug: true], ["path/to/file"]}
iex> OptionParser.parse!(["--limit", "xyz"], strict: [limit: :integer])
** (OptionParser.ParseError) 1 error found!
--limit : Expected type integer, got "xyz"
iex> OptionParser.parse!(["--unknown", "xyz"], strict: [])
** (OptionParser.ParseError) 1 error found!
--unknown : Unknown option
iex> OptionParser.parse!(["-l", "xyz", "-f", "bar"],
...> switches: [limit: :integer, foo: :integer], aliases: [l: :limit, f: :foo])
** (OptionParser.ParseError) 2 errors found!
-l : Expected type integer, got "xyz"
-f : Expected type integer, got "bar"
"""
@spec parse!(argv, options) :: {parsed, argv} | no_return
def parse!(argv, opts \\ []) when is_list(argv) and is_list(opts) do
case parse(argv, opts) do
{parsed, args, []} -> {parsed, args}
{_, _, errors} -> raise ParseError, format_errors(errors, opts)
end
end
@doc """
Similar to `parse/2` but only parses the head of `argv`;
as soon as it finds a non-switch, it stops parsing.
See `parse/2` for more information.
## Example
iex> OptionParser.parse_head(["--source", "lib", "test/enum_test.exs", "--verbose"])
{[source: "lib"], ["test/enum_test.exs", "--verbose"], []}
iex> OptionParser.parse_head(["--verbose", "--source", "lib", "test/enum_test.exs", "--unlock"])
{[verbose: true, source: "lib"], ["test/enum_test.exs", "--unlock"], []}
"""
@spec parse_head(argv, options) :: {parsed, argv, errors}
def parse_head(argv, opts \\ []) when is_list(argv) and is_list(opts) do
do_parse(argv, compile_config(opts), [], [], [], false)
end
@doc """
The same as `parse_head/2` but raises an `OptionParser.ParseError`
exception if any invalid options are given.
If there are no errors, returns a `{parsed, rest}` tuple where:
* `parsed` is the list of parsed switches (same as in `parse_head/2`)
* `rest` is the list of arguments (same as in `parse_head/2`)
## Examples
iex> OptionParser.parse_head!(["--source", "lib", "path/to/file", "--verbose"])
{[source: "lib"], ["path/to/file", "--verbose"]}
iex> OptionParser.parse_head!(["--number", "lib", "test/enum_test.exs", "--verbose"], strict: [number: :integer])
** (OptionParser.ParseError) 1 error found!
--number : Expected type integer, got "lib"
iex> OptionParser.parse_head!(["--verbose", "--source", "lib", "test/enum_test.exs", "--unlock"],
...> strict: [verbose: :integer, source: :integer])
** (OptionParser.ParseError) 2 errors found!
--verbose : Missing argument of type integer
--source : Expected type integer, got "lib"
"""
@spec parse_head!(argv, options) :: {parsed, argv} | no_return
def parse_head!(argv, opts \\ []) when is_list(argv) and is_list(opts) do
case parse_head(argv, opts) do
{parsed, args, []} -> {parsed, args}
{_, _, errors} -> raise ParseError, format_errors(errors, opts)
end
end
defp do_parse([], _config, opts, args, invalid, _all?) do
{Enum.reverse(opts), Enum.reverse(args), Enum.reverse(invalid)}
end
defp do_parse(argv, {aliases, switches, strict?} = config, opts, args, invalid, all?) do
case next(argv, aliases, switches, strict?) do
{:ok, option, value, rest} ->
# the option exists and it was successfully parsed
kinds = List.wrap Keyword.get(switches, option)
new_opts = store_option(opts, option, value, kinds)
do_parse(rest, config, new_opts, args, invalid, all?)
{:invalid, option, value, rest} ->
# the option exist but it has wrong value
do_parse(rest, config, opts, args, [{option, value} | invalid], all?)
{:undefined, option, _value, rest} ->
# the option does not exist (for strict cases)
do_parse(rest, config, opts, args, [{option, nil} | invalid], all?)
{:error, ["--" | rest]} ->
{Enum.reverse(opts), Enum.reverse(args, rest), Enum.reverse(invalid)}
{:error, [arg | rest] = remaining_args} ->
# there is no option
if all? do
do_parse(rest, config, opts, [arg | args], invalid, all?)
else
{Enum.reverse(opts), Enum.reverse(args, remaining_args), Enum.reverse(invalid)}
end
end
end
@doc """
Low-level function that parses one option.
It accepts the same options as `parse/2` and `parse_head/2`
as both functions are built on top of this function. This function
may return:
* `{:ok, key, value, rest}` - the option `key` with `value` was
successfully parsed
* `{:invalid, key, value, rest}` - the option `key` is invalid with `value`
(returned when the value cannot be parsed according to the switch type)
* `{:undefined, key, value, rest}` - the option `key` is undefined
(returned in strict mode when the switch is unknown)
* `{:error, rest}` - there are no switches at the head of the given `argv`
"""
@spec next(argv, options) ::
{:ok, key :: atom, value :: term, argv} |
{:invalid, String.t, String.t | nil, argv} |
{:undefined, String.t, String.t | nil, argv} |
{:error, argv}
def next(argv, opts \\ []) when is_list(argv) and is_list(opts) do
{aliases, switches, strict?} = compile_config(opts)
next(argv, aliases, switches, strict?)
end
defp next([], _aliases, _switches, _strict?) do
{:error, []}
end
defp next(["--" | _] = argv, _aliases, _switches, _strict?) do
{:error, argv}
end
defp next(["-" | _] = argv, _aliases, _switches, _strict?) do
{:error, argv}
end
defp next(["- " <> _ | _] = argv, _aliases, _switches, _strict?) do
{:error, argv}
end
# Handles --foo or --foo=bar
defp next(["--" <> option | rest], _aliases, switches, strict?) do
{option, value} = split_option(option)
tagged = tag_option(option, switches)
do_next(tagged, value, "--" <> option, rest, switches, strict?)
end
# Handles -a, -abc, -abc=something
defp next(["-" <> option | rest] = argv, aliases, switches, strict?) do
{option, value} = split_option(option)
original = "-" <> option
cond do
is_nil(value) and negative_number?(original) ->
{:error, argv}
String.contains?(option, ["-", "_"]) ->
{:undefined, original, value, rest}
String.length(option) > 1 ->
key = get_option_key(option)
option_key = aliases[key]
if key && option_key do
IO.warn "multi-letter aliases are deprecated, got: #{inspect(key)}"
do_next({:default, option_key}, value, original, rest, switches, strict?)
else
next(expand_multiletter_alias(option, value) ++ rest, aliases, switches, strict?)
end
true ->
# We have a regular one-letter alias here
tagged = tag_oneletter_alias(option, aliases)
do_next(tagged, value, original, rest, switches, strict?)
end
end
defp next(argv, _aliases, _switches, _strict?) do
{:error, argv}
end
defp do_next(tagged, value, original, rest, switches, strict?) do
if strict? and not option_defined?(tagged, switches) do
{:undefined, original, value, rest}
else
{option, kinds, value} = normalize_option(tagged, value, switches)
{value, kinds, rest} = normalize_value(value, kinds, rest, strict?)
case validate_option(value, kinds) do
{:ok, new_value} -> {:ok, option, new_value, rest}
:invalid -> {:invalid, original, value, rest}
end
end
end
@doc """
Receives a key-value enumerable and converts it to `t:argv/0`.
Keys must be atoms. Keys with `nil` value are discarded,
boolean values are converted to `--key` or `--no-key`
(if the value is `true` or `false`, respectively),
and all other values are converted using `Kernel.to_string/1`.
It is advised to pass to `to_argv/2` the same set of `options`
given to `parse/2`. Some switches can only be reconstructed
correctly with the `switches` information in hand.
## Examples
iex> OptionParser.to_argv([foo_bar: "baz"])
["--foo-bar", "baz"]
iex> OptionParser.to_argv([bool: true, bool: false, discarded: nil])
["--bool", "--no-bool"]
Some switches will output different values based on the switches
flag:
iex> OptionParser.to_argv([number: 2], switches: [])
["--number", "2"]
iex> OptionParser.to_argv([number: 2], switches: [number: :count])
["--number", "--number"]
"""
@spec to_argv(Enumerable.t, options) :: argv
def to_argv(enum, opts \\ []) do
switches = Keyword.get(opts, :switches, [])
Enum.flat_map(enum, fn
{_key, nil} -> []
{key, true} -> [to_switch(key)]
{key, false} -> [to_switch(key, "--no-")]
{key, value} -> to_argv(key, value, switches)
end)
end
defp to_argv(key, value, switches) do
if switches[key] == :count do
List.duplicate(to_switch(key), value)
else
[to_switch(key), to_string(value)]
end
end
defp to_switch(key, prefix \\ "--") when is_atom(key) do
prefix <> String.replace(Atom.to_string(key), "_", "-")
end
@doc ~S"""
Splits a string into `t:argv/0` chunks.
This function splits the given `string` into a list of strings in a similar
way to many shells.
## Examples
iex> OptionParser.split("foo bar")
["foo", "bar"]
iex> OptionParser.split("foo \"bar baz\"")
["foo", "bar baz"]
"""
@spec split(String.t) :: argv
def split(string) do
do_split(String.trim_leading(string, " "), "", [], nil)
end
# If we have an escaped quote, simply remove the escape
defp do_split(<<?\\, quote, t::binary>>, buffer, acc, quote),
do: do_split(t, <<buffer::binary, quote>>, acc, quote)
# If we have a quote and we were not in a quote, start one
defp do_split(<<quote, t::binary>>, buffer, acc, nil) when quote in [?", ?'],
do: do_split(t, buffer, acc, quote)
# If we have a quote and we were inside it, close it
defp do_split(<<quote, t::binary>>, buffer, acc, quote),
do: do_split(t, buffer, acc, nil)
# If we have an escaped quote/space, simply remove the escape as long as we are not inside a quote
defp do_split(<<?\\, h, t::binary>>, buffer, acc, nil) when h in [?\s, ?', ?"],
do: do_split(t, <<buffer::binary, h>>, acc, nil)
# If we have space and we are outside of a quote, start new segment
defp do_split(<<?\s, t::binary>>, buffer, acc, nil),
do: do_split(String.trim_leading(t, " "), "", [buffer | acc], nil)
# All other characters are moved to buffer
defp do_split(<<h, t::binary>>, buffer, acc, quote) do
do_split(t, <<buffer::binary, h>>, acc, quote)
end
# Finish the string expecting a nil marker
defp do_split(<<>>, "", acc, nil),
do: Enum.reverse(acc)
defp do_split(<<>>, buffer, acc, nil),
do: Enum.reverse([buffer | acc])
# Otherwise raise
defp do_split(<<>>, _, _acc, marker) do
raise "argv string did not terminate properly, a #{<<marker>>} was opened but never closed"
end
## Helpers
defp compile_config(opts) do
aliases = opts[:aliases] || []
{switches, strict?} = cond do
opts[:switches] && opts[:strict] ->
raise ArgumentError, ":switches and :strict cannot be given together"
switches = opts[:switches] ->
{switches, false}
strict = opts[:strict] ->
{strict, true}
true ->
{[], false}
end
{aliases, switches, strict?}
end
defp validate_option(value, kinds) do
{invalid?, value} =
cond do
:invalid in kinds ->
{true, value}
:boolean in kinds ->
case value do
t when t in [true, "true"] -> {false, true}
f when f in [false, "false"] -> {false, false}
_ -> {true, value}
end
:count in kinds ->
case value do
1 -> {false, value}
_ -> {true, value}
end
:integer in kinds ->
case Integer.parse(value) do
{value, ""} -> {false, value}
_ -> {true, value}
end
:float in kinds ->
case Float.parse(value) do
{value, ""} -> {false, value}
_ -> {true, value}
end
true ->
{false, value}
end
if invalid? do
:invalid
else
{:ok, value}
end
end
defp store_option(dict, option, value, kinds) do
cond do
:count in kinds ->
Keyword.update(dict, option, value, & &1 + 1)
:keep in kinds ->
[{option, value} | dict]
true ->
[{option, value} | Keyword.delete(dict, option)]
end
end
defp tag_option("no-" <> option = original, switches) do
cond do
(negated = get_option_key(option)) && :boolean in List.wrap(switches[negated]) ->
{:negated, negated}
option_key = get_option_key(original) ->
{:default, option_key}
true ->
:unknown
end
end
defp tag_option(option, _switches) do
if option_key = get_option_key(option) do
{:default, option_key}
else
:unknown
end
end
defp tag_oneletter_alias(alias, aliases) when is_binary(alias) do
if option_key = aliases[to_existing_key(alias)] do
{:default, option_key}
else
:unknown
end
end
defp expand_multiletter_alias(letters, value) when is_binary(letters) do
{last, expanded} =
letters
|> String.codepoints()
|> Enum.map(&("-" <> &1))
|> List.pop_at(-1)
expanded ++ [last <> if(value, do: "=" <> value, else: "")]
end
defp option_defined?(:unknown, _switches) do
false
end
defp option_defined?({:negated, option}, switches) do
Keyword.has_key?(switches, option)
end
defp option_defined?({:default, option}, switches) do
Keyword.has_key?(switches, option)
end
defp normalize_option(:unknown, value, _switches) do
{nil, [:invalid], value}
end
defp normalize_option({:negated, option}, value, switches) do
if value do
{option, [:invalid], value}
else
{option, List.wrap(switches[option]), false}
end
end
defp normalize_option({:default, option}, value, switches) do
{option, List.wrap(switches[option]), value}
end
defp normalize_value(nil, kinds, t, strict?) do
cond do
:boolean in kinds ->
{true, kinds, t}
:count in kinds ->
{1, kinds, t}
value_in_tail?(t) ->
[h | t] = t
{h, kinds, t}
kinds == [] and strict? ->
{nil, kinds, t}
kinds == [] ->
{true, kinds, t}
true ->
{nil, [:invalid], t}
end
end
defp normalize_value(value, kinds, t, _strict?) do
{value, kinds, t}
end
defp value_in_tail?(["-" | _]), do: true
defp value_in_tail?(["- " <> _ | _]), do: true
defp value_in_tail?(["-" <> arg | _]), do: negative_number?("-" <> arg)
defp value_in_tail?([]), do: false
defp value_in_tail?(_), do: true
defp split_option(option) do
case :binary.split(option, "=") do
[h] -> {h, nil}
[h, t] -> {h, t}
end
end
defp to_underscore(option),
do: to_underscore(option, <<>>)
defp to_underscore("_" <> _rest, _acc),
do: nil
defp to_underscore("-" <> rest, acc),
do: to_underscore(rest, acc <> "_")
defp to_underscore(<<c>> <> rest, acc),
do: to_underscore(rest, <<acc::binary, c>>)
defp to_underscore(<<>>, acc),
do: acc
def get_option_key(option) do
if string = to_underscore(option) do
to_existing_key(string)
end
end
defp to_existing_key(option) do
try do
String.to_existing_atom(option)
rescue
ArgumentError -> nil
end
end
defp negative_number?(arg) do
match?({_, ""}, Float.parse(arg))
end
defp format_errors([_ | _] = errors, opts) do
types = opts[:switches] || opts[:strict]
error_count = length(errors)
error = if error_count == 1, do: "error", else: "errors"
"#{error_count} #{error} found!\n" <>
Enum.map_join(errors, "\n", &format_error(&1, opts, types))
end
defp format_error({option, nil}, opts, types) do
if type = get_type(option, opts, types) do
"#{option} : Missing argument of type #{type}"
else
"#{option} : Unknown option"
end
end
defp format_error({option, value}, opts, types) do
type = get_type(option, opts, types)
"#{option} : Expected type #{type}, got #{inspect value}"
end
defp get_type(option, opts, types) do
key = option |> String.trim_leading("-") |> get_option_key()
if option_key = opts[:aliases][key] do
types[option_key]
else
types[key]
end
end
end
|
lib/elixir/lib/option_parser.ex
| 0.866005 | 0.631197 |
option_parser.ex
|
starcoder
|
defmodule Mix.Tasks.Hex.Package do
use Mix.Task
alias Hex.Registry.Server, as: Registry
@shortdoc "Fetches or diffs packages"
@default_diff_command "git diff --no-index __PATH1__ __PATH2__"
@doc false
def default_diff_command(), do: @default_diff_command
@moduledoc """
Fetches or diffs packages.
## Fetch package
Fetch a package tarball to the current directory.
$ mix hex.package fetch PACKAGE [VERSION] [--unpack] [--output PATH]
If `version` is not given, use the latest version.
You can pipe the fetched tarball to stdout by setting `--output -`.
## Diff package versions
$ mix hex.package diff APP VERSION
This command compares the project's dependency `APP` against
the target package version, unpacking the target version into
temporary directory and running a diff command.
## Fetch and diff package contents between versions
$ mix hex.package diff PACKAGE VERSION1 VERSION2
$ mix hex.package diff PACKAGE VERSION1..VERSION2
This command fetches package tarballs for both versions,
unpacks them into temporary directories and runs a diff command.
Afterwards, the temporary directories are automatically deleted.
Note, similarly to when tarballs are fetched with `mix deps.get`,
a `hex_metadata.config` is placed in each unpacked directory.
This file contains package's metadata as Erlang terms and so
we can additionally see the diff of that.
The exit code of the task is that of the underlying diff command.
### Diff command
The diff command can be customized by setting `diff_command`
configuration option, see `mix help hex.config` for more information.
The default diff command is:
$ #{@default_diff_command}
The `__PATH1__` and `__PATH2__` placeholders will be interpolated with
paths to directories of unpacked tarballs for each version.
Many diff commands supports coloured output but because we execute
the command in non-interactive mode, they'd usually be disabled.
On Unix systems you can pipe the output to more commands, for example:
$ mix hex.package diff decimal 1.0.0..1.1.0 | colordiff | less -R
Here, the output of `mix hex.package diff` is piped to the `colordiff`
utility to adds colours, which in turn is piped to `less -R` which
"pages" it. (`-R` preserves escape codes which allows colours to work.)
Another option is to configure the diff command itself. For example, to
force Git to always colour the output we can set the `--color=always` option:
$ mix hex.config diff_command "git diff --color=always --no-index __PATH1__ __PATH2__"
$ mix hex.package diff decimal 1.0.0..1.1.0
## Command line options
* `--unpack` - Unpacks the tarball after fetching it
* `-o`, `--output` - Sets output path. When used with `--unpack` it means
the directory (Default: `<app>-<version>`). Otherwise, it specifies
tarball path (Default: `<app>-<version>.tar`)
* `--organization ORGANIZATION` - Set this for private packages belonging to an organization
* `--repo REPO` - Set this for self-hosted Hex instances, default: `hexpm`
"""
@behaviour Hex.Mix.TaskDescription
@switches [unpack: :boolean, organization: :string, output: :string, repo: :string]
@aliases [o: :output]
@impl true
def run(args) do
Hex.start()
{opts, args} = Hex.OptionParser.parse!(args, strict: @switches, aliases: @aliases)
unpack = Keyword.get(opts, :unpack, false)
output = Keyword.get(opts, :output, nil)
case args do
["fetch", package] ->
fetch(repo(opts), package, nil, unpack, output)
["fetch", package, version] ->
fetch(repo(opts), package, version, unpack, output)
["diff", package, version1, version2] ->
diff(repo(opts), package, parse_version!(version1, version2))
["diff", package, version] ->
diff(repo(opts), package, parse_version!(version))
_ ->
Mix.raise("""
Invalid arguments, expected one of:
mix hex.package fetch PACKAGE [VERSION] [--unpack]
mix hex.package diff APP VERSION
mix hex.package diff PACKAGE VERSION1 VERSION2
mix hex.package diff PACKAGE VERSION1..VERSION2
""")
end
end
@impl true
def tasks() do
[
{"fetch PACKAGE [VERSION] [--unpack]", "Fetch the package"},
{"diff APP VERSION", "Diff dependency against version"},
{"diff PACKAGE VERSION1 VERSION2", "Diff package versions"},
{"diff PACKAGE VERSION1..VERSION2", "Diff package versions"}
]
end
defp fetch(repo, package, nil, unpack?, output) do
version = find_package_latest_version(repo, package)
fetch(repo, package, version, unpack?, output)
end
defp fetch(repo, package, version, false, "-") do
Hex.Registry.Server.open()
Hex.Registry.Server.prefetch([{repo, package}])
tarball = fetch_tarball!(repo, package, version)
IO.binwrite(tarball)
Hex.Registry.Server.close()
end
defp fetch(_repo, _package, _version, true, "-") do
Mix.raise("Cannot unpack the package while output destination is stdout")
end
defp fetch(repo, package, version, unpack?, output) do
Hex.Registry.Server.open()
Hex.Registry.Server.prefetch([{repo, package}])
tarball = fetch_tarball!(repo, package, version)
if output, do: File.mkdir_p!(output)
abs_name = Path.absname("#{package}-#{version}")
{abs_path, tar_path} =
if output do
{output, Path.join(output, "#{package}-#{version}.tar")}
else
{abs_name, "#{abs_name}.tar"}
end
File.write!(tar_path, tarball)
if unpack? do
%{inner_checksum: inner_checksum, outer_checksum: outer_checksum} =
Hex.Tar.unpack!(tar_path, abs_path)
verify_inner_checksum!(repo, package, version, inner_checksum)
verify_outer_checksum!(repo, package, version, outer_checksum)
else
{:ok, outer_checksum} = Hex.Tar.outer_checksum(tar_path)
verify_outer_checksum!(repo, package, version, outer_checksum)
end
message =
if unpack? do
File.rm!(tar_path)
"#{package} v#{version} extracted to #{abs_path}"
else
"#{package} v#{version} downloaded to #{tar_path}"
end
Hex.Shell.info(message)
Hex.Registry.Server.close()
end
defp fetch_tarball!(repo, package, version) do
path = Hex.SCM.cache_path(repo, package, version)
case Hex.SCM.fetch(repo, package, version) do
{:ok, _} ->
File.read!(path)
{:error, reason} ->
if File.exists?(path) do
File.read!(path)
else
Mix.raise(
"Downloading " <>
Hex.Repo.tarball_url(repo, package, version) <> " failed:\n\n" <> reason
)
end
end
end
defp verify_inner_checksum!(repo, package, version, checksum) do
registry_checksum = Registry.inner_checksum(repo, package, version)
if checksum != registry_checksum do
Mix.raise("Checksum mismatch against registry (inner)")
end
end
defp verify_outer_checksum!(repo, package, version, checksum) do
registry_checksum = Registry.outer_checksum(repo, package, version)
if checksum != registry_checksum do
Mix.raise("Checksum mismatch against registry (outer)")
end
end
defp diff(repo, app, version) when is_binary(version) do
Hex.Mix.check_deps()
{path_lock, package} =
case Map.get(Mix.Dep.Lock.read(), String.to_atom(app)) do
nil ->
Mix.raise(
"Cannot find the app \"#{app}\" in \"mix.lock\" file, " <>
"please ensure it has been specified in \"mix.exs\" and run \"mix deps.get\""
)
lock ->
path = Path.join(Mix.Project.deps_path(), app)
package = Hex.Utils.lock(lock).name
{path, package}
end
path = tmp_path("#{package}-#{version}-")
try do
fetch_and_unpack!(repo, package, [{path, version}])
code = run_diff_path!(path_lock, path)
Mix.Tasks.Hex.set_exit_code(code)
after
File.rm_rf!(path)
end
end
defp diff(repo, package, {version1, version2}) do
path1 = tmp_path("#{package}-#{version1}-")
path2 = tmp_path("#{package}-#{version2}-")
try do
fetch_and_unpack!(repo, package, [{path1, version1}, {path2, version2}])
code = run_diff_path!(path1, path2)
Mix.Tasks.Hex.set_exit_code(code)
after
File.rm_rf!(path1)
File.rm_rf!(path2)
end
end
defp fetch_and_unpack!(repo, package, versions) do
Hex.Registry.Server.open()
Hex.Registry.Server.prefetch([{repo, package}])
try do
Enum.each(versions, fn {path, version} ->
tarball = fetch_tarball!(repo, package, version)
%{inner_checksum: inner_checksum, outer_checksum: outer_checksum} =
Hex.Tar.unpack!({:binary, tarball}, path)
verify_inner_checksum!(repo, package, version, inner_checksum)
verify_outer_checksum!(repo, package, version, outer_checksum)
end)
after
Hex.Registry.Server.close()
end
end
defp run_diff_path!(path1, path2) do
cmd =
Hex.State.fetch!(:diff_command)
|> String.replace("__PATH1__", escape_and_quote_path(path1))
|> String.replace("__PATH2__", escape_and_quote_path(path2))
Mix.shell().cmd(cmd)
end
defp escape_and_quote_path(path) do
escaped = String.replace(path, "\"", "\\\"")
~s("#{escaped}")
end
defp tmp_path(prefix) do
random_string = Base.encode16(:crypto.strong_rand_bytes(4))
Path.join(System.tmp_dir!(), prefix <> random_string)
end
defp parse_version!(string) do
case String.split(string, "..", trim: true) do
[version1, version2] ->
parse_two_versions!(version1, version2)
[version] ->
version |> Hex.Version.parse!() |> to_string()
end
end
defp parse_version!(version1, version2) do
parse_two_versions!(version1, version2)
end
defp parse_two_versions!(version1, version2) do
version1 = Hex.Version.parse!(version1)
version2 = Hex.Version.parse!(version2)
{to_string(version1), to_string(version2)}
end
defp repo(opts) do
repo = Keyword.get(opts, :repo, "hexpm")
if organization = opts[:organization] do
Enum.join([repo, organization], ":")
else
repo
end
end
defp find_package_latest_version(organization, name) do
%{"latest_stable_version" => latest_stable_version} =
retrieve_package_info(organization, name)
latest_stable_version
end
defp retrieve_package_info(organization, name) do
case Hex.API.Package.get(organization, name) do
{:ok, {code, body, _}} when code in 200..299 ->
body
{:ok, {404, _, _}} ->
Mix.raise("No package with name #{name}")
other ->
Hex.Shell.error("Failed to retrieve package information")
Hex.Utils.print_error_result(other)
end
end
end
|
lib/mix/tasks/hex.package.ex
| 0.800107 | 0.410047 |
hex.package.ex
|
starcoder
|
defmodule Sue.DB.Schema do
@moduledoc """
Used to define the types of elements (vertices) in our graph.
Some of these are connected by edges:
Chat <-> PlatformAccount <-> Chat
Account -> Definition
Chat -> Definition
Some of these are left disconnected, and function simply as normal tables:
Poll
"""
alias Sue.Models.{Account, PlatformAccount, Chat, Poll, Definition}
defmodule Vertex do
alias __MODULE__
@type t() :: Account.t() | PlatformAccount.t() | Chat.t() | Definition.t() | Poll.t()
@type module_name_t() :: Account | PlatformAccount | Chat | Definition | Poll
@type tuple_t() :: {atom(), any()}
@spec id(Vertex.t()) :: any()
def id(v) do
case v do
%Account{} -> v.ref
%PlatformAccount{} -> {v.platform, v.id}
%Chat{} -> {v.platform, v.id}
%Definition{} -> v.ref
%Poll{} -> v.chat
end
end
@spec label(Vertex.t() | module_name_t()) :: atom()
def label(v_module) when is_atom(v_module) do
case v_module do
Account -> :account
PlatformAccount -> :platform_account
Chat -> :chat
Definition -> :defn
Poll -> :poll
end
end
def label(vstruct) do
label(vstruct.__struct__)
end
@doc """
Compare two vertices (either as their struct forms, or {type, id} tuples).
"""
@spec equals?(__MODULE__.t() | {atom(), any()}, __MODULE__.t() | {atom(), any()}) :: boolean()
def equals?({v1_type, v1_id}, {v2_type, v2_id}) do
v1_type == v2_type and v1_id == v2_id
end
def equals?({v1_type, v1_id}, v2) do
v1_type == label(v2) and v1_id == id(v2)
end
def equals?(v1, {v2_type, v2_id}) do
label(v1) == v2_type and id(v1) == v2_id
end
def equals?(v1, v2) do
label(v1) == label(v2) and id(v1) == id(v2)
end
end
# Public API
@spec vtypes :: [Vertex.module_name_t()]
def vtypes() do
[
Account,
PlatformAccount,
Chat,
Definition,
Poll
]
end
def tables() do
graph_tables =
for vtype <- vtypes() do
table_name = Vertex.label(vtype)
table_opts = kv_opts()
{table_name, table_opts}
end ++
[
{:edges,
[
type: :bag,
attributes: [:srctype, :dsttype, :srcid, :dstid, :metadata],
index: [:dsttype, :srcid, :dstid]
]}
]
kv_tables = [
{:state, kv_opts()}
]
graph_tables ++ kv_tables
end
defp kv_opts(), do: [type: :set, attributes: [:key, :val]]
end
|
apps/sue/lib/sue/db/schema.ex
| 0.739046 | 0.499695 |
schema.ex
|
starcoder
|
defmodule PromEx.DashboardUploader do
@moduledoc """
This GenServer is responsible for uploading the configured PromEx module
dashboards to Grafana. This is a transient process and will terminate after
the dashboards have been successfully uploaded. It requires the name of the
PromEx module as an option so that it can look into the application
config for the appropriate Grafana settings. For example, if the name of the
PromEx module is `WebApp.PromEx`, then your config should provide the following
settings:
```elixir
config :web_app, WebApp.PromEx,
grafana_host: "<YOUR HOST ADDRESS>",
grafana_auth_token: "<YOUR GRAFANA AUTH TOKEN>"
```
"""
use GenServer, restart: :transient
require Logger
alias PromEx.{DashboardRenderer, GrafanaClient, GrafanaClient.Connection}
@doc """
Used to start the DashboardUploader process
"""
@spec start_link(opts :: keyword()) :: GenServer.on_start()
def start_link(opts) do
{name, remaining_opts} = Keyword.pop(opts, :name)
state = Map.new(remaining_opts)
GenServer.start_link(__MODULE__, state, name: name)
end
@impl true
def init(state) do
{:ok, state, {:continue, :upload_grafana_dashboards}}
end
@impl true
def handle_continue(:upload_grafana_dashboards, state) do
%{
prom_ex_module: prom_ex_module,
default_dashboard_opts: default_dashboard_opts
} = state
%PromEx.Config{grafana_config: grafana_config} = prom_ex_module.init_opts()
# Start Finch process and build Grafana connection
finch_name = Module.concat([prom_ex_module, __MODULE__, Finch])
Finch.start_link(name: finch_name)
grafana_conn = Connection.build(finch_name, grafana_config)
upload_opts =
case grafana_config.folder_name do
:default ->
[]
folder_name ->
[folderId: get_folder_id(grafana_conn, folder_name, prom_ex_module)]
end
# Iterate over all the configured dashboards and upload them
prom_ex_module.dashboards()
|> Enum.each(fn dashboard ->
dashboard
|> handle_dashboard_render(default_dashboard_opts, prom_ex_module)
|> case do
%DashboardRenderer{valid_json?: true, rendered_file: rendered_dashboard, full_path: full_path} ->
upload_dashboard(rendered_dashboard, grafana_conn, upload_opts, full_path)
%DashboardRenderer{full_path: path, error: error} ->
Logger.info(
"The dashboard definition for #{inspect(path)} is invalid due to the following error: #{inspect(error)}"
)
end
end)
# No longer need this short-lived Finch process
finch_name
|> Process.whereis()
|> Process.exit(:normal)
# Kill the uploader process as there is no more work to do
{:stop, :normal, :ok}
end
defp handle_dashboard_render({otp_app, dashboard_relative_path}, default_assigns, prom_ex_module) do
handle_dashboard_render({otp_app, dashboard_relative_path, []}, default_assigns, prom_ex_module)
end
defp handle_dashboard_render(
{dashboard_otp_app, dashboard_relative_path, dashboard_opts},
default_assigns,
prom_ex_module
) do
user_provided_assigns = prom_ex_module.dashboard_assigns()
default_title =
prom_ex_module.__otp_app__()
|> Atom.to_string()
|> Macro.camelize()
default_dashboard_name =
dashboard_relative_path
|> Path.basename()
|> normalize_file_name()
|> Macro.camelize()
default_dashboard_assigns = [
title: "#{default_title} - PromEx #{default_dashboard_name} Dashboard"
]
dashboard_otp_app
|> DashboardRenderer.build(dashboard_relative_path)
|> DashboardRenderer.merge_assigns(default_assigns)
|> DashboardRenderer.merge_assigns(user_provided_assigns)
|> DashboardRenderer.merge_assigns(default_dashboard_assigns)
|> DashboardRenderer.merge_assigns(dashboard_opts)
|> DashboardRenderer.render_dashboard(prom_ex_module)
|> DashboardRenderer.decode_dashboard()
end
defp normalize_file_name(path) do
if Path.extname(path) == "" do
path
else
path
|> Path.rootname()
|> normalize_file_name()
end
end
defp upload_dashboard(dashboard_contents, grafana_conn, upload_opts, full_dashboard_path) do
case GrafanaClient.upload_dashboard(grafana_conn, dashboard_contents, upload_opts) do
{:ok, _response_payload} ->
Logger.info("PromEx.DashboardUploader successfully uploaded #{full_dashboard_path} to Grafana.")
{:error, reason} ->
Logger.warn("PromEx.DashboardUploader failed to upload #{full_dashboard_path} to Grafana: #{inspect(reason)}")
end
end
defp get_folder_id(grafana_conn, folder_name, prom_ex_module) do
folder_uid = prom_ex_module.__grafana_folder_uid__()
%{"id" => id, "title" => title} =
case GrafanaClient.get_folder(grafana_conn, folder_uid) do
{:ok, folder_details} ->
folder_details
{:error, :not_found} ->
create_folder(grafana_conn, folder_uid, folder_name)
error ->
Logger.error(
"PromEx.DashboardUploader (#{inspect(self())}) failed to retrieve the dashboard folderId from Grafana (#{
grafana_conn.base_url
}) because: #{inspect(error)}"
)
Process.exit(self(), :normal)
end
# Update the folder if the name is not up to date with the config
if title != folder_name do
GrafanaClient.update_folder(grafana_conn, folder_uid, folder_name)
end
id
end
defp create_folder(grafana_conn, folder_uid, folder_name) do
case GrafanaClient.create_folder(grafana_conn, folder_uid, folder_name) do
{:ok, folder_details} ->
folder_details
{:error, :bad_request} ->
{:ok, all_folders} = GrafanaClient.get_all_folders(grafana_conn)
all_folders
|> Enum.find(fn %{"title" => find_folder_name} ->
find_folder_name == folder_name
end)
|> Map.get("uid")
|> update_existing_folder_uid(grafana_conn, folder_uid, folder_name)
end
end
defp update_existing_folder_uid(uid_of_mismatch, grafana_conn, folder_uid, folder_name) do
case GrafanaClient.update_folder(grafana_conn, uid_of_mismatch, folder_name, %{uid: folder_uid}) do
{:ok, folder_details} ->
Logger.info(
"There was a folder UID mismatch for the folder titled \"#{folder_name}\". PromEx has updated the folder configuration in Grafana and resolved the issue."
)
folder_details
error ->
Logger.error(
"PromEx.DashboardUploader (#{inspect(self())}) failed to update the folder UID from Grafana (#{
grafana_conn.base_url
}) because: #{inspect(error)}"
)
Process.exit(self(), :normal)
end
end
end
|
lib/prom_ex/dashboard_uploader.ex
| 0.759761 | 0.554169 |
dashboard_uploader.ex
|
starcoder
|
defmodule ExGdax do
@moduledoc """
GDAX API client.
"""
@doc """
List known currencies.
## Examples
iex> ExGdax.list_currencies()
{:ok,
[%{"id" => "BTC", "min_size" => "0.00000001", "name" => "Bitcoin"},
%{"id" => "ETH", "min_size" => "0.00000001", "name" => "Ether"}, ...]}
"""
defdelegate list_currencies, to: ExGdax.Market, as: :list_currencies
@doc """
Get the API server time.
## Examples
iex> EcGdax.get_time()
{:ok, %{"epoch" => 1501141821.835, "iso" => "2017-07-27T07:50:21.835Z"}}
"""
defdelegate get_time, to: ExGdax.Market, as: :get_time
@doc """
Get a list of available currency pairs for trading.
## Examples
iex> ExGdax.list_products()
{:ok,
[%{"base_currency" => "ETH", "base_max_size" => "5000",
"base_min_size" => "0.01", "display_name" => "ETH/USD", "id" => "ETH-USD",
"margin_enabled" => false, "quote_currency" => "USD",
"quote_increment" => "0.01"}, ...]}
"""
defdelegate list_products, to: ExGdax.Market, as: :list_products
@doc """
Get a list of open orders for a product.
## Parameters
Name | Description
:------ | :----------
`level` | Response detail. Valid options are 1, 2, or 3.
## Examples
iex> ExGdax.get_order_book("ETH-USD")
{:ok,
%{"asks" => [["200.42", "28.447359", 4]],
"bids" => [["200.41", "11.35615248", 3]], "sequence" => 873754533}}
iex> ExGdax.order_book("ETH-USD", %{level: 2})
{:ok,
%{"asks" => [["200.49", "73.898254", 6], ["200.5", "1.017412", 2],
["200.51", "0.017366", 1], ["200.52", "0.017387", 1], ...],
"bids" => [["200.48", "0.7", 2], ["200.47", "0.01", 1],
["200.42", "0.76212582", 1], ["200.32", "0.2", 1], ...]}
"""
defdelegate get_order_book(product_id, params \\ %{}), to: ExGdax.Market, as: :get_order_book
@doc """
Snapshot information about the last trade (tick), best bid/ask and 24h volume.
## Examples
iex> ExGdax.get_ticker("ETH-USD")
{:ok,
%{"ask" => "200.47", "bid" => "200.46", "price" => "200.47000000",
"size" => "2.65064800", "time" => "2017-07-27T08:00:43.697000Z",
"trade_id" => 8430635, "volume" => "144080.88916080"}}
"""
defdelegate get_ticker(product_id), to: ExGdax.Market, as: :get_ticker
@doc """
List the latest trades for a product.
## Parameters
Name | Description
:------- | :----------
`before` | Request page before (newer) this pagination id.
`after` | Request page after (older) this pagination id.
`limit` | Number of results per request. Maximum 100. (default 100)
## Examples
iex> ExGdax.list_trades("ETH-USD")
{:ok,
[%{"price" => "200.65000000", "side" => "sell", "size" => "1.94831509",
"time" => "2017-07-27T08:01:54.347Z", "trade_id" => 8430778}, ...]
"""
defdelegate list_trades(product_id, params \\ %{}), to: ExGdax.Market, as: :list_trades
@doc """
Historic rates for a product.
## Parameters
Name | Description
:------------ | :----------
`start` | Start time in ISO 8601.
`end` | End time in ISO 8601.
`granularity` | Desired timeslice in seconds.
## Examples
iex> ExGdax.list_historic_rates("ETH-USD")
{:ok,
[[1501142880, 200.43, 200.43, 200.43, 200.43, 5.6956], ...]}
"""
defdelegate list_historic_rates(product_id, params \\ %{}), to: ExGdax.Market, as: :list_historic_rates
@doc """
Get 24 hr stats for a product.
## Examples
iex> ExGdax.get_stats("ETH-USD")
{:ok,
%{"high" => "205.80000000", "last" => "201.68000000", "low" => "194.42000000",
"open" => "197.97000000", "volume" => "143965.79255890",
"volume_30day" => "9270459.77394214"}}
"""
defdelegate get_stats(product_id), to: ExGdax.Market, as: :get_stats
@doc """
List accounts.
## Examples
iex> ExGdax.list_accounts()
{:ok,
[%{"available" => "0.0000000000000000", "balance" => "0.0000000000000000",
"currency" => "USD", "hold" => "0.0000000000000000",
"id" => "XXXXXXXX-XXXX-XXXX-XXXX-XXXXXXXXXXXX",
"profile_id" => "XXXXXXXX-XXXX-XXXX-XXXX-XXXXXXXXXXXX"}, ...]}
"""
defdelegate list_accounts, to: ExGdax.Private, as: :list_accounts
@doc """
Get an account.
## Examples
iex> ExGdax.get_account(account["id"])
{:ok,
%{"available" => "0.0000000000000000", "balance" => "0.0000000000000000",
"currency" => "USD", "hold" => "0.0000000000000000",
"id" => "XXXXXXXX-XXXX-XXXX-XXXX-XXXXXXXXXXXX",
"profile_id" => "XXXXXXXX-XXXX-XXXX-XXXX-XXXXXXXXXXXX"}}
"""
defdelegate get_account(account_id), to: ExGdax.Private, as: :get_account
@doc """
List activity for an account.
## Parameters
Name | Description
:------- | :----------
`before` | Request page before (newer) this pagination id.
`after` | Request page after (older) this pagination id.
`limit` | Number of results per request. Maximum 100. (default 100)
## Examples
iex> ExGdax.list_account_history(account["id"], %{limit: 5})
{:ok,
[%{"amount" => "0.0000000000000000", "balance" => "0.0000000000000000",
"created_at" => "2017-07-08T15:26:17.04917Z",
"details" => %{"transfer_id" => "XXXXXXXX-XXXX-XXXX-XXXX-XXXXXXXXXXXX",
"transfer_type" => "withdraw"}, "id" => XXXXXXXX, "type" => "transfer"}, ...]}
"""
defdelegate list_account_history(account_id, params \\ %{}), to: ExGdax.Private, as: :list_account_history
@doc """
Lists holds on an account for active orders or withdraw requests.
## Parameters
Name | Description
:------- | :----------
`before` | Request page before (newer) this pagination id.
`after` | Request page after (older) this pagination id.
`limit` | Number of results per request. Maximum 100. (default 100)
## Examples
iex> ExGdax.list_holds(account["id"])
{:ok, []}
"""
defdelegate list_holds(account_id, params \\ %{}), to: ExGdax.Private, as: :list_holds
@doc """
Place a new order.
Refer to params listed in [GDAX API docs](https://docs.gdax.com/#place-a-new-order)
## Examples
iex> ExGdax.create_order(%{type: "limit", side: "buy", product_id: "ETH-USD", price: "0.50", size: "1.0"})
{:ok,
%{"created_at" => "2017-08-20T23:29:17.752637Z",
"executed_value" => "0.0000000000000000",
"fill_fees" => "0.0000000000000000", "filled_size" => "0.00000000",
"id" => "XXXXXXXX-XXXX-XXXX-XXXX-XXXXXXXXXXXX", "post_only" => false,
"price" => "0.50000000", "product_id" => "ETH-USD",
"settled" => false, "side" => "buy", "size" => "1.00000000",
"status" => "pending", "stp" => "dc", "time_in_force" => "GTC",
"type" => "limit"}}
"""
defdelegate create_order(params), to: ExGdax.Private, as: :create_order
@doc """
Cancel all open orders.
## Examples
iex> ExGdax.cancel_orders()
{:ok, ["XXXXXXXX-XXXX-XXXX-XXXX-XXXXXXXXXXXX"]}
"""
defdelegate cancel_orders, to: ExGdax.Private, as: :cancel_orders
@doc """
List open orders.
## Parameters
Name | Default | Description
:----------- | :---------------------- | :----------
`status` | [open, pending, active] | Limit list of orders to these statuses.
`product_id` | | Only list orders for a specific product.
`before` | | Request page before (newer) this pagination id.
`after` | | Request page after (older) this pagination id.
`limit` | | Number of results per request. Maximum 100. (default 100)
## Examples
iex> ExGdax.list_orders(%{status: "open"})
{:ok,
[%{"created_at" => "2017-08-20T23:31:49.235409Z",
"executed_value" => "0.0000000000000000",
"fill_fees" => "0.0000000000000000", "filled_size" => "0.00000000",
"id" => "XXXXXXXX-XXXX-XXXX-XXXX-XXXXXXXXXXXX", "post_only" => true,
"price" => "0.75000000", "product_id" => "ETH-USD",
"settled" => false, "side" => "buy", "size" => "1.00000000",
"status" => "open", "stp" => "dc", "time_in_force" => "GTC",
"type" => "limit"}]}
"""
defdelegate list_orders(params \\ %{}), to: ExGdax.Private, as: :list_orders
@doc """
Get an order.
## Examples
iex> ExGdax.get_order(order["id"])
{:ok,
%{"created_at" => "2017-08-20T23:31:49.235409Z",
"executed_value" => "0.0000000000000000",
"fill_fees" => "0.0000000000000000", "filled_size" => "0.00000000",
"id" => "XXXXXXXX-XXXX-XXXX-XXXX-XXXXXXXXXXXX", "post_only" => true,
"price" => "0.75000000", "product_id" => "ETH-USD",
"settled" => false, "side" => "buy", "size" => "1.00000000",
"status" => "open", "stp" => "dc", "time_in_force" => "GTC",
"type" => "limit"}}
"""
defdelegate get_order(order_id), to: ExGdax.Private, as: :get_order
@doc """
Get a list of recent fills.
## Parameters
Name | Description
:----------- | :----------
`order_id` | Limit list of fills to this order_id.
`product_id` | Limit list of fills to this product_id.
`before` | Request page before (newer) this pagination id.
`after` | Request page after (older) this pagination id.
`limit` | Number of results per request. Maximum 100. (default 100)
## Examples
iex> ExGdax.list_fills(%{product_id: "ETH-USD", limit: 1})
{:ok,
[%{"created_at" => "2017-08-12T21:25:43.453Z",
"fee" => "0.0000000000000000", "liquidity" => "M",
"order_id" => "XXXXXXXX-XXXX-XXXX-XXXX-XXXXXXXXXXXX",
"price" => "305.00000000", "product_id" => "ETH-USD",
"profile_id" => "XXXXXXXX-XXXX-XXXX-XXXX-XXXXXXXXXXXX",
"settled" => true, "side" => "buy", "size" => "0.29000000",
"trade_id" => XXXXXXX, "user_id" => "XXXXXXXXXXXXXXXXXXXXXXX"}]}
"""
defdelegate list_fills(params \\ %{}), to: ExGdax.Private, as: :list_fills
@doc """
List funding records.
## Parameters
Name | Options | Description
:------- | :-------------------------------- | :----------
`status` | outstanding, settled, or rejected | Limit list of funding records to these statuses.
`before` | | Request page before (newer) this pagination id.
`after` | | Request page after (older) this pagination id.
`limit` | | Number of results per request. Maximum 100. (default 100)
"""
defdelegate list_funding(params \\ %{}), to: ExGdax.Private, as: :list_funding
@doc """
Repay funding. Repays the older funding records first.
## Parameters
Name | Description
:--------- | :----------
`amount` | Amount of currency to repay.
`currency` | The currency, example `USD`.
"""
defdelegate repay_funding(params), to: ExGdax.Private, as: :repay_funding
@doc """
Transfer funds between your standard/default profile and a margin profile.
## Parameters
Name | Description
:------------------ | :----------
`margin_profile_id` | The id of the margin profile you’d like to deposit to or withdraw from.
`type` | `deposit` or `withdraw`.
`currency` | The currency to transfer, ex: `BTC` or `USD`.
`amount` | The amount to transfer between the default and margin profile.
"""
defdelegate margin_transfer(params), to: ExGdax.Private, as: :margin_transfer
@doc """
An overview of your profile.
## Examples
iex> ExGdax.get_position()
{:ok,
%{"accounts" => %{"BTC" => %{"balance" => "0.0000000000000000",
"default_amount" => "0", "funded_amount" => "0",
"hold" => "0.0000000000000000",
"id" => "XXXXXXXX-XXXX-XXXX-XXXX-XXXXXXXXXXXX"},
"ETH" => %{"balance" => "0.0000000000000000", "default_amount" => "0",
"funded_amount" => "0", "hold" => "0.0000000000000000",
"id" => "XXXXXXXX-XXXX-XXXX-XXXX-XXXXXXXXXXXX"},
"LTC" => %{"balance" => "0.0000000000000000", "default_amount" => "0",
"funded_amount" => "0", "hold" => "0.0000000000000000",
"id" => "XXXXXXXX-XXXX-XXXX-XXXX-XXXXXXXXXXXX"},
"USD" => %{"balance" => "0.0000000000000000", "default_amount" => "0",
"funded_amount" => "0", "hold" => "0.0000000000000000",
"id" => "XXXXXXXX-XXXX-XXXX-XXXX-XXXXXXXXXXXX"}},
"profile_id" => "XXXXXXXX-XXXX-XXXX-XXXX-XXXXXXXXXXXX", "status" => "active",
"user_id" => "XXXXXXXXXXXXXXXXXXXXXXXX"}}
"""
defdelegate get_position, to: ExGdax.Private, as: :get_position
@doc """
Close your position.
## Parameters
Name | Description
:----------- | :----------
`repay_only` | Either `true` or `false`
"""
defdelegate close_position(params), to: ExGdax.Private, as: :close_position
@doc """
Deposit funds from a payment method.
## Parameters
Name | Description
:------------------ | :----------
`amount` | The amount to deposit.
`currency` | The type of currency.
`payment_method_id` | ID of the payment method.
"""
defdelegate deposit_from_payment_method(params), to: ExGdax.Private, as: :deposit_from_payment_method
@doc """
Deposit funds from a coinbase account.
## Parameters
Name | Description
:-------------------- | :----------
`amount` | The amount to deposit.
`currency` | The type of currency.
`coinbase_account_id` | ID of the coinbase account.
## Examples
iex> ExGdax.deposit_from_coinbase(%{amount: "0.1", currency: "ETH", coinbase_account_id: "XX<KEY>"})
{:ok,
%{"amount" => "0.10000000", "currency" => "ETH",
"id" => "XXXXX-XXXX-XXXX-XXXX-XXXXXXXXXXXX"}}
"""
defdelegate deposit_from_coinbase(params), to: ExGdax.Private, as: :deposit_from_coinbase
@doc """
Withdraw funds to a payment method.
## Parameters
Name | Description
:------------------ | :----------
`amount` | The amount to withdraw.
`currency` | The type of currency.
`payment_method_id` | ID of the payment method.
"""
defdelegate withdraw_to_payment_method(params), to: ExGdax.Private, as: :withdraw_to_payment_method
@doc """
Withdraw funds to a coinbase account.
## Parameters
Name | Description
:-------------------- | :----------
`amount` | The amount to withdraw.
`currency` | The type of currency.
`coinbase_account_id` | ID of the coinbase account.
"""
defdelegate withdraw_to_coinbase(params), to: ExGdax.Private, as: :withdraw_to_coinbase
@doc """
Withdraw funds to a crypto address.
## Parameters
Name | Description
:--------------- | :----------
`amount` | The amount to withdraw.
`currency` | The type of currency.
`crypto_address` | A crypto address of the recipient.
## Examples
iex> ExGdax.withdraw_to_crypto(%{amount: "0.01", currency: "ETH", crypto_address: "0x30a9f8b57e2dcb519a4e4982ed6379f9dd6a0bfc"})
{:ok,
%{"amount" => "0.01000000", "currency" => "ETH",
"id" => "XXXXXXXX-XXXX-XXXX-XXXX-XXXXXXXXXXXX"}}
"""
defdelegate withdraw_to_crypto(params), to: ExGdax.Private, as: :withdraw_to_crypto
@doc """
List your payment methods.
## Examples
iex> ExGdax.list_payment_methods()
{:ok,
[%{"allow_buy" => false, "allow_deposit" => true, "allow_sell" => true,
"allow_withdraw" => false, "created_at" => "2015-11-03T00:32:02Z",
"currency" => "USD",
"fiat_account" => %{"id" => "XXXXXXXX-<KEY>",
"resource" => "account",
"resource_path" => "/v2/accounts/XXXXXXXX-XXXX-XXXX-XXXX-XXXXXXXXXXXX"}, ...]}
"""
defdelegate list_payment_methods, to: ExGdax.Private, as: :list_payment_methods
@doc """
List your coinbase accounts.
## Examples
iex> ExGdax.list_coinbase_accounts()
{:ok,
[%{"active" => true, "balance" => "0.00000000", "currency" => "ETH",
"id" => "XXXXXXXX-XXXX-XXXX-XXXX-XXXXXXXXXXXX", "name" => "<NAME>",
"primary" => false, "type" => "wallet"}, ...]}
"""
defdelegate list_coinbase_accounts, to: ExGdax.Private, as: :list_coinbase_accounts
@doc """
Create a report.
## Parameters
Name | Description
:----------- | :----------
`type` | `fills` or `account`.
`start_date` | Starting date for the report (inclusive).
`end_date` | Ending date for the report (inclusive).
`product_id` | ID of the product to generate a fills report for. E.g. BTC-USD. Required if `type` is `fills`.
`account_id` | ID of the account to generate an account report for. Required if `type` is `account`.
`format` | `pdf` or `csv` (defualt is `pdf`).
`email` | Email address to send the report to (optional).
"""
defdelegate create_report(params), to: ExGdax.Private, as: :create_report
@doc """
Get report status.
"""
defdelegate get_report(report_id), to: ExGdax.Private, as: :get_report
@doc """
List your 30-day trailing volume for all products.
## Examples
iex> ExGdax.list_trailing_volume()
{:ok,
[%{"exchange_volume" => "8704829.60943332", "product_id" => "ETH-USD",
"recorded_at" => "2017-07-31T00:17:16.331884Z", "volume" => "1.00000000"}]}
"""
defdelegate list_trailing_volume, to: ExGdax.Private, as: :list_trailing_volume
end
|
lib/ex_gdax.ex
| 0.744563 | 0.407746 |
ex_gdax.ex
|
starcoder
|
defmodule Dynamo.Helpers.ContentFor do
@moduledoc """
This module exposes macros that allows a child
template to assign data which will eventually
be read and use by the parent.
## Examples
Imagine the following layout:
<html>
<head>
<title>
<%= content_for(:title) %>
</title>
</head>
<body>
<%= content_for(:template) %>
<%= content_for(:footer) || "Default footer" %>
</body>
</html>
And the following template:
<% content_for :title do %>
Title from template
<% end %>
Template body.
This is returned by content_for :template.
<% content_for :footer, "Template footer" %>
Whenever this template and layout pair are used,
the template is first rendered, collecting the
`content_for` chunks used and then finally assigning
the whole template to a `:template` chunk. The layout
can later retrieve any chunk by calling `content_for(key)`.
## Implementation details
Whenever `content_for` is called, the contents are stored
in `conn`, which is then reassigned. The connection is
passed transparently to `content_for` via a macro. The
decision to make this transparent is because different
templates implementations may use other mechanisms to pass
the data around, which does not require mangling with the
connection.
Manual interaction with the connection can be done via
`append_content` and `get_content` functions.
"""
@key :dynamo_contents
@doc """
Stores the given `value` under the given `key`. This value
can later be retrieved by calling `content_for(key)`.
"""
defmacro content_for(key, do: value) do
quote do
var!(conn) = unquote(__MODULE__).append_content(var!(conn), unquote(key), unquote(value))
end
end
defmacro content_for(key, value) do
quote do
var!(conn) = unquote(__MODULE__).append_content(var!(conn), unquote(key), unquote(value))
end
end
@doc """
Gets the content for the given key. If the stored value
is a function, it is automatically invoked, otherwise
returns the raw value.
"""
defmacro content_for(key) do
quote do
unquote(__MODULE__).get_content(var!(conn), unquote(key))
end
end
@doc """
Appends a content chunk to the connection.
"""
def append_content(conn, key, value) when is_atom(key) and is_binary(value) do
value = Keyword.update(conn.private[@key] || [], key, value, &(&1 <> value))
conn.put_private(@key, value)
end
@doc """
Puts a content chunk to the connection replacing previous entries.
"""
def put_content(conn, key, value) when is_atom(key) and is_binary(value) do
value = Keyword.put(conn.private[@key] || [], key, value)
conn.put_private(@key, value)
end
@doc """
Gets a content chunk from the connection.
"""
def get_content(conn, key) when is_atom(key) do
conn.private[@key][key]
end
end
|
lib/dynamo/helpers/content_for.ex
| 0.835886 | 0.478651 |
content_for.ex
|
starcoder
|
defmodule ExAws.Boto.Protocol.Query do
require Logger
import SweetXml, only: [sigil_x: 2]
@behaviour ExAws.Boto.Protocol
@doc """
Converts an `ExAws.Boto.Operation`, which describes an API method invocation in terms of `ExAws.Boto`
into an `ExAws.Operation.Query`, which describes an HTTP request. This method is also responsible for
converting the input object into something that an AWS query strings based API can understand.
"""
@impl true
def make_operation(operation) do
%ExAws.Boto.Operation{
name: op_name_str,
input: input_mod,
http: %{
"requestUri" => http_path
},
metadata: %{
"protocol" => "query",
"endpointPrefix" => endpoint_prefix,
"apiVersion" => version
}
} = operation.__struct__.op_spec()
params =
case input_mod do
nil ->
%{}
mod ->
to_aws(mod.shape_spec(), operation.input)
end
|> Enum.flat_map(fn
{_key, nil} ->
[]
{key, val} when is_list(val) ->
ExAws.Utils.format(val, type: :xml, prefix: "#{key}.member")
{key, val} ->
ExAws.Utils.format(val, type: :xml, prefix: key)
end)
%ExAws.Operation.Query{
path: http_path,
action: op_name_str,
service: String.to_atom(endpoint_prefix),
params: [
{"Action", op_name_str},
{"Version", version}
| params
],
parser: fn result, _action, _config ->
parse_response(operation, result)
end
}
end
@doc """
Parses an ExAWS response into an `ExAws.Boto` object.
"""
@impl true
def parse_response(operation, {:ok, %{body: xml}}) do
%ExAws.Boto.Operation{
output: output_mod,
output_wrapper: wrapper
} = operation.__struct__.op_spec()
result =
case wrapper do
nil -> SweetXml.xpath(xml, ~x"./"e)
w -> SweetXml.xpath(xml, ~x"./#{w}")
end
case output_mod do
nil -> {:ok, nil}
mod -> {:ok, parse(mod.shape_spec(), result)}
end
end
@doc """
Given a `ExAws.Boto.Shape` and a fragment of XML, produce a domain object representing that XML
"""
@spec parse(ExAws.Boto.Shape.t(), term()) :: struct()
def parse(shape_spec, xml)
def parse(%ExAws.Boto.Shape.Structure{module: module}, nil) do
Kernel.struct(module, [])
end
def parse(%ExAws.Boto.Shape.Structure{module: module, members: members}, xml) do
result =
members
|> Enum.map(fn {attr, {member_name, member_mod}} ->
attr_xml = SweetXml.xpath(xml, ~x"./#{member_name}"e)
{
attr,
parse(member_mod.shape_spec(), attr_xml)
}
end)
Kernel.struct(module, result)
end
def parse(%ExAws.Boto.Shape.List{}, nil), do: []
def parse(%ExAws.Boto.Shape.List{member: member_module}, xml) do
xml
|> SweetXml.xpath(~x"./member"el)
|> Enum.map(&parse(member_module.shape_spec(), &1))
end
def parse(%ExAws.Boto.Shape.Map{}, nil), do: %{}
def parse(%ExAws.Boto.Shape.Map{key_module: key_module, value_module: value_module}, xml) do
xml
|> SweetXml.xpath(~x".")
|> Enum.map(fn {k, v} ->
{
parse(key_module.shape_spec(), k),
parse(value_module.shape_spec(), v)
}
end)
|> Enum.into(%{})
end
def parse(_, nil), do: nil
def parse(%ExAws.Boto.Shape.Basic{type: t}, xml) when t in ["integer", "long"] do
xml
|> SweetXml.xpath(~x"./text()"s)
|> String.to_integer()
end
def parse(%ExAws.Boto.Shape.Basic{type: "boolean"}, xml) do
SweetXml.xpath(xml, ~x"./text()"s) == "true"
end
def parse(%ExAws.Boto.Shape.Basic{type: "timestamp"}, xml) do
{:ok, timestamp, 0} =
xml
|> SweetXml.xpath(~x"./text()"s)
|> DateTime.from_iso8601()
timestamp
end
def parse(%ExAws.Boto.Shape.Basic{type: "string"}, xml) do
xml |> SweetXml.xpath(~x"./text()"s)
end
defp to_aws(%ExAws.Boto.Shape.Structure{module: module, members: members}, %module{} = req) do
members
|> Enum.map(fn {property, {name, member_mod}} ->
{
name,
case Map.get(req, property) do
nil -> nil
value -> to_aws(member_mod.shape_spec(), value)
end
}
end)
|> Enum.into(%{})
end
defp to_aws(%ExAws.Boto.Shape.List{member_name: m_name, member: m_mod}, list)
when is_list(list) do
%{
m_name => list |> Enum.map(&to_aws(m_mod.shape_spec(), &1))
}
end
defp to_aws(%ExAws.Boto.Shape.Basic{}, val) do
val
end
end
|
lib/ex_aws/boto/protocol/query.ex
| 0.735642 | 0.432723 |
query.ex
|
starcoder
|
defmodule Elixium.Utilities do
@moduledoc """
Various functions that don't need their own module, since theyre used in multiple
places for different things
"""
def sha256(data),
do: hash(data, :sha256)
defp hash(data, algorithm),
do: :crypto.hash(algorithm, data)
def sha_base16(input) do
:sha256
|> :crypto.hash(input)
|> Base.encode16()
end
def sha3_base16(list) when is_list(list) do
list
|> Enum.join()
|> sha3_base16()
end
# Concatintes the list items together to a string, hashes the block header with keccak sha3 algorithm, return the encoded string
def sha3_base16(input) do
:sha3_256
|> :keccakf1600.hash(input)
|> Base.encode16()
end
@doc """
The merkle root lets us represent a large dataset using only one string. We can be confident that
if any of the data changes, the merkle root will be different, which invalidates the dataset
"""
@spec calculate_merkle_root(list) :: String.t()
def calculate_merkle_root(list) do
list
|> Enum.chunk_every(2)
|> Enum.map(&sha_base16(&1))
|> calculate_merkle_root(true)
end
def calculate_merkle_root(list, true) when length(list) == 1, do: hd(list)
def calculate_merkle_root(list, true), do: calculate_merkle_root(list)
def pad(data, block_size) do
to_add = block_size - rem(byte_size(data), block_size)
data <> String.duplicate(<<to_add>>, to_add)
end
def zero_pad(bytes, size) do
String.duplicate(<<0>>, size - byte_size(bytes)) <> bytes
end
@doc """
Gets an option that was passed in as a command line argument
"""
@spec get_arg(atom, any) :: String.t()
def get_arg(arg, not_found \\ nil), do: Map.get(args(), arg, not_found)
def args do
:init.get_plain_arguments()
|> Enum.at(1)
|> List.to_string()
|> String.split("--")
|> Enum.filter(& &1 != "")
|> Enum.map(fn a ->
kv =
a
|> String.trim()
|> String.replace("=", " ")
|> String.replace(~r/\s+/, " ")
|> String.split(" ")
case kv do
[key, value] -> {String.to_atom(key), value}
[key] -> {String.to_atom(key), true}
end
end)
|> Map.new()
end
end
|
lib/utilities.ex
| 0.727879 | 0.443359 |
utilities.ex
|
starcoder
|
defmodule Rayray.Matrix do
alias Rayray.Tuple
defstruct impl: []
def new(rows) do
%__MODULE__{impl: rows}
end
def identity() do
%__MODULE__{impl: [[1, 0, 0, 0], [0, 1, 0, 0], [0, 0, 1, 0], [0, 0, 0, 1]]}
end
def get(%__MODULE__{impl: impl}, {row, col}) do
impl
|> Enum.at(row)
|> Enum.at(col)
end
def equal?(m1, m2) do
m1.impl == m2.impl
end
def fuzzy_equal?(%__MODULE__{impl: rows1}, %__MODULE__{} = m2, epsilon) do
rows1
|> Enum.with_index()
|> Enum.reduce_while(true, fn
{_row, _i}, false ->
{:halt, false}
{row, i}, true ->
row_res =
row
|> Enum.with_index()
|> Enum.reduce_while(true, fn
{_el, _j}, false ->
{:halt, false}
{el, j}, true ->
if el - get(m2, {i, j}) < epsilon do
{:cont, true}
else
{:halt, false}
end
end)
if row_res do
{:cont, true}
else
{:halt, false}
end
end)
end
def multiply(%__MODULE__{} = m1, %__MODULE__{} = m2) do
empty = [[], [], [], []]
for row <- 0..3,
col <- 0..3 do
val =
get(m1, {row, 0}) * get(m2, {0, col}) +
get(m1, {row, 1}) * get(m2, {1, col}) +
get(m1, {row, 2}) * get(m2, {2, col}) +
get(m1, {row, 3}) * get(m2, {3, col})
{row, col, val}
end
|> Enum.reduce(empty, fn {row, col, val}, acc ->
List.update_at(acc, row, fn oldrow ->
List.insert_at(oldrow, col, val)
end)
end)
|> new()
end
def multiply(%__MODULE__{} = m, t) do
new_t =
Enum.reduce(m.impl, [], fn row, acc ->
row_as_tuple = apply(Tuple, :tuple, row)
dot_product = Tuple.dot(row_as_tuple, t)
[dot_product | acc]
end)
|> Enum.reverse()
apply(Tuple, :tuple, new_t)
end
def transpose(%__MODULE__{impl: rows}) do
Enum.map(0..3, fn i ->
Enum.map(rows, fn row ->
Enum.at(row, i)
end)
end)
|> new()
end
def determinant(%__MODULE__{impl: [[a, b], [c, d]]}) do
a * d - b * c
end
def determinant(%__MODULE__{impl: [row | _rest]} = m) do
row
|> Enum.with_index()
|> Enum.reduce(0, fn {el, i}, acc ->
el * cofactor(m, 0, i) + acc
end)
end
def submatrix(%__MODULE__{impl: rows}, row, column) do
rows
|> List.delete_at(row)
|> Enum.map(fn r ->
List.delete_at(r, column)
end)
|> new()
end
def minor(%__MODULE__{} = m, row, column) do
m
|> submatrix(row, column)
|> determinant()
end
def cofactor(m, row, column) do
minor = minor(m, row, column)
if rem(row + column, 2) == 0 do
minor
else
-1 * minor
end
end
def invertible?(m) do
determinant(m) != 0
end
def inverse(%__MODULE__{impl: rows} = m) do
cofactor_matrix =
rows
|> Enum.with_index()
|> Enum.map(fn {row, i} ->
row
|> Enum.with_index()
|> Enum.map(fn {_el, j} ->
cofactor(m, i, j)
end)
end)
|> new()
transposed_cofactor_matrix = transpose(cofactor_matrix)
original_determinant = determinant(m)
Enum.map(transposed_cofactor_matrix.impl, fn row ->
Enum.map(row, fn el ->
el / original_determinant
end)
end)
|> new()
end
def translation(x, y, z) do
identity().impl
|> List.update_at(0, fn old ->
List.update_at(old, 3, fn _ -> x end)
end)
|> List.update_at(1, fn old ->
List.update_at(old, 3, fn _ -> y end)
end)
|> List.update_at(2, fn old ->
List.update_at(old, 3, fn _ -> z end)
end)
|> new()
end
def scaling(x, y, z) do
identity().impl
|> List.update_at(0, fn old ->
List.update_at(old, 0, fn _ -> x end)
end)
|> List.update_at(1, fn old ->
List.update_at(old, 1, fn _ -> y end)
end)
|> List.update_at(2, fn old ->
List.update_at(old, 2, fn _ -> z end)
end)
|> new()
end
def rotation_x(r) do
new([
[1, 0, 0, 0],
[0, :math.cos(r), -1 * :math.sin(r), 0],
[0, :math.sin(r), :math.cos(r), 0],
[0, 0, 0, 1]
])
end
def rotation_y(r) do
new([
[:math.cos(r), 0, :math.sin(r), 0],
[0, 1, 0, 0],
[-1 * :math.sin(r), 0, :math.cos(r), 0],
[0, 0, 0, 1]
])
end
def rotation_z(r) do
new([
[:math.cos(r), -1 * :math.sin(r), 0, 0],
[:math.sin(r), :math.cos(r), 0, 0],
[0, 0, 1, 0],
[0, 0, 0, 1]
])
end
def shearing(xy, xz, yx, yz, zx, zy) do
new([
[1, xy, xz, 0],
[yx, 1, yz, 0],
[zx, zy, 1, 0],
[0, 0, 0, 1]
])
end
end
|
lib/rayray/matrix.ex
| 0.753739 | 0.562357 |
matrix.ex
|
starcoder
|
defmodule Benchee.Scenario do
@moduledoc """
A Scenario in Benchee is a particular case of a whole benchmarking suite. That
is the combination of a particular function to benchmark (`job_name` and
`function`) in combination with a specific input (`input_name` and `input`).
It then gathers all data measured for this particular combination during
`Benchee.Benchmark.collect/3`, which are then used later in the process by
`Benchee.Statistics` to compute the relevant statistics.
`name` is the name that should be used by formatters to display scenarios as
it potentially includes the `tag` present when loading scenarios that were
saved before. See `display_name/1`.
"""
alias Benchee.CollectionData
defstruct [
:name,
:job_name,
:function,
:input_name,
:input,
run_time_data: %CollectionData{},
memory_usage_data: %CollectionData{},
before_each: nil,
after_each: nil,
before_scenario: nil,
after_scenario: nil,
tag: nil
]
@type t :: %__MODULE__{
name: String.t(),
job_name: String.t(),
function: fun,
input_name: String.t() | nil,
input: any | nil,
run_time_data: CollectionData.t(),
memory_usage_data: CollectionData.t(),
before_each: fun | nil,
after_each: fun | nil,
before_scenario: fun | nil,
after_scenario: fun | nil,
tag: String.t() | nil
}
@doc """
Returns the correct name to display of the given scenario data.
In the normal case this is `job_name`, however when scenarios are loaded they
are tagged and these tags should be shown for disambiguation.
## Examples
iex> alias Benchee.Scenario
iex> Scenario.display_name(%Scenario{job_name: "flat_map"})
"flat_map"
iex> Scenario.display_name(%Scenario{job_name: "flat_map", tag: "master"})
"flat_map (master)"
iex> Scenario.display_name(%{job_name: "flat_map"})
"flat_map"
"""
@spec display_name(t) :: String.t()
def display_name(%{job_name: job_name, tag: nil}), do: job_name
def display_name(%{job_name: job_name, tag: tag}), do: "#{job_name} (#{tag})"
def display_name(%{job_name: job_name}), do: job_name
@doc """
Returns `true` if data of the provided type has been fully procsessed, `false` otherwise.
Current available types are `run_time` and `memory`. Reasons they might not have been processed
yet are:
* Suite wasn't configured to collect them at all
* `Benchee.statistics/1` hasn't been called yet so that data was collected but statistics
aren't present yet
## Examples
iex> alias Benchee.Scenario
iex> alias Benchee.Statistics
iex> scenario = %Scenario{run_time_data: %Benchee.CollectionData{statistics: %Statistics{sample_size: 100}}}
iex> Scenario.data_processed?(scenario, :run_time)
true
iex> scenario = %Scenario{memory_usage_data: %Benchee.CollectionData{statistics: %Statistics{sample_size: 1}}}
iex> Scenario.data_processed?(scenario, :memory)
true
iex> scenario = %Scenario{memory_usage_data: %Benchee.CollectionData{statistics: %Statistics{sample_size: 0}}}
iex> Scenario.data_processed?(scenario, :memory)
false
"""
@spec data_processed?(t, :run_time | :memory) :: boolean
def data_processed?(scenario, :run_time) do
scenario.run_time_data.statistics.sample_size > 0
end
def data_processed?(scenario, :memory) do
scenario.memory_usage_data.statistics.sample_size > 0
end
end
|
lib/benchee/scenario.ex
| 0.900573 | 0.772101 |
scenario.ex
|
starcoder
|
defmodule ExDns.Message.Answer do
@moduledoc """
Encodes and decodes Answer records
4.1.3. Resource record format
The answer, authority, and additional sections all share the same
format: a variable number of resource records, where the number of
records is specified in the corresponding count field in the header.
Each resource record has the following format:
1 1 1 1 1 1
0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5
+--+--+--+--+--+--+--+--+--+--+--+--+--+--+--+--+
| |
/ /
/ NAME /
| |
+--+--+--+--+--+--+--+--+--+--+--+--+--+--+--+--+
| TYPE |
+--+--+--+--+--+--+--+--+--+--+--+--+--+--+--+--+
| CLASS |
+--+--+--+--+--+--+--+--+--+--+--+--+--+--+--+--+
| TTL |
| |
+--+--+--+--+--+--+--+--+--+--+--+--+--+--+--+--+
| RDLENGTH |
+--+--+--+--+--+--+--+--+--+--+--+--+--+--+--+--|
/ RDATA /
/ /
+--+--+--+--+--+--+--+--+--+--+--+--+--+--+--+--+
where:
NAME a domain name to which this resource record pertains.
TYPE two octets containing one of the RR type codes. This
field specifies the meaning of the data in the RDATA
field.
CLASS two octets which specify the class of the data in the
RDATA field.
TTL a 32 bit unsigned integer that specifies the time
interval (in seconds) that the resource record may be
cached before it should be discarded. Zero values are
interpreted to mean that the RR can only be used for the
transaction in progress, and should not be cached.
RDLENGTH an unsigned 16 bit integer that specifies the length in
octets of the RDATA field.
RDATA a variable length string of octets that describes the
resource. The format of this information varies
according to the TYPE and CLASS of the resource record.
For example, if the TYPE is A and the CLASS is IN,
the RDATA field is a 4 octet ARPA Internet address.
"""
def encode(answer) do
end
def decode(header, rest, message) do
{:ok, nil, message}
end
end
|
lib/ex_dns/message/answer.ex
| 0.544075 | 0.517327 |
answer.ex
|
starcoder
|
defmodule ExPesa.Mpesa.B2B do
@moduledoc """
This API enables Business to Business (B2B) transactions between a business and another business.
Use of this API requires a valid and verified B2B M-Pesa short code for the business initiating the transaction and the both businesses involved in the transaction.
"""
import ExPesa.Mpesa.MpesaBase
import ExPesa.Util
@doc """
Initiates the Mpesa B2B .
## Configuration
Add below config to dev.exs / prod.exs files
This asumes you have a clear understanding of how Daraja API works. See docs here https://developer.safaricom.co.ke/docs#b2b-api
#### B2B - Configuration Parameters
- `initiator` - This is the credential/username used to authenticate the transaction request.
Environment
- production
- create a user with api access method (access channel)
- Enter user name
- assign business manager role and B2B ORG API initiator role.
Use the username from your notifitation channel (SMS)
- sandbox - use your own custom username.
- `timeout_url` - The path that stores information of time out transactions.it should be properly validated to make sure that it contains the port, URI and domain name or publicly available IP.
- `result_url` - The path that receives results from M-Pesa it should be properly validated to make sure that it contains the port, URI and domain name or publicly available IP.
- `security credential` - To generate security_credential, head over to https://developer.safaricom.co.ke/test_credentials, then Initiator Security Password for your environment.
`config.exs`
```elixir
config :ex_pesa,
mpesa: [
b2b: [
short_code: "",
initiator_name: "",
timeout_url: "",
result_url: "",
security_credential: "<credential here>"
]
]
```
Alternatively, generate security credential using certificate
`cert` - This is the M-Pesa public key certificate used to encrypt your plain password.
There are 2 types of certificates.
- sandox - https://developer.safaricom.co.ke/sites/default/files/cert/cert_sandbox/cert.cer .
- production - https://developer.safaricom.co.ke/sites/default/files/cert/cert_prod/cert.cer .
`password` - This is a plain unencrypted password.
Environment
- production - set password from the organization portal.
- sandbox - use your own custom password
`config.exs`
```elixir
config :ex_pesa,
mpesa: [
cert: "<certificate content>"
b2b: [
short_code: "",
initiator_name: "",
password: "<<PASSWORD>>",
timeout_url: "",
result_url: ""
]
]
```
## Parameters
attrs: - a map containing:
- `command_id` - Unique command for each transaction type, possible values are: BusinessPayBill, MerchantToMerchantTransfer, MerchantTransferFromMerchantToWorking, MerchantServicesMMFAccountTransfer, AgencyFloatAdvance.
- `amount` - The amount being transacted.
- `receiver_party` - Organization’s short code receiving the funds being transacted.
- `remarks` - Comments that are sent along with the transaction.
- `account_reference` - Account Reference mandatory for "BusinessPaybill" CommandID.
## Example
iex> ExPesa.Mpesa.B2B.request(%{command_id: "BusinessPayBill", amount: 10500, receiver_party: 600000, remarks: "B2B Request", account_reference: "BILL PAYMENT"})
{:ok,
%{
"ConversationID" => "AG_20200927_00007d4c98884c889b25",
"OriginatorConversationID" => "27274-37744848-4",
"ResponseCode" => "0",
"ResponseDescription" => "Accept the service request successfully."
}}
"""
def request(params) do
case get_security_credential_for(:b2b) do
nil -> {:error, "cannot generate security_credential due to missing configuration fields"}
security_credential -> b2b_request(security_credential, params)
end
end
defp b2b_request(
security_credential,
%{
command_id: command_id,
amount: amount,
receiver_party: receiver_party,
remarks: remarks
} = params
) do
account_reference = Map.get(params, :account_reference, nil)
payload = %{
"Initiator" => Application.get_env(:ex_pesa, :mpesa)[:b2b][:initiator_name],
"SecurityCredential" => security_credential,
"CommandID" => command_id,
"Amount" => amount,
"PartyA" => Application.get_env(:ex_pesa, :mpesa)[:b2b][:short_code],
"SenderIdentifierType" => 4,
"PartyB" => receiver_party,
"RecieverIdentifierType" => 4,
"Remarks" => remarks,
"AccountReference" => account_reference,
"QueueTimeOutURL" => Application.get_env(:ex_pesa, :mpesa)[:b2b][:timeout_url],
"ResultURL" => Application.get_env(:ex_pesa, :mpesa)[:b2b][:result_url]
}
make_request("/mpesa/b2b/v1/paymentrequest", payload)
end
defp b2b_request(_security_credential, _) do
{:error, "Required Parameter missing, 'command_id','amount','receiver_party', 'remarks'"}
end
end
|
lib/ex_pesa/Mpesa/b2b.ex
| 0.829146 | 0.865622 |
b2b.ex
|
starcoder
|
defmodule AWS.Connect do
@moduledoc """
Amazon Connect is a cloud-based contact center solution that you use to set up
and manage a customer contact center and provide reliable customer engagement at
any scale.
Amazon Connect provides metrics and real-time reporting that enable you to
optimize contact routing. You can also resolve customer issues more efficiently
by getting customers in touch with the appropriate agents.
There are limits to the number of Amazon Connect resources that you can create.
There are also limits to the number of requests that you can make per second.
For more information, see [Amazon Connect Service Quotas](https://docs.aws.amazon.com/connect/latest/adminguide/amazon-connect-service-limits.html)
in the *Amazon Connect Administrator Guide*.
You can connect programmatically to an Amazon Web Services service by using an
endpoint. For a list of Amazon Connect endpoints, see [Amazon Connect Endpoints](https://docs.aws.amazon.com/general/latest/gr/connect_region.html).
Working with contact flows? Check out the [Amazon Connect Flow language](https://docs.aws.amazon.com/connect/latest/adminguide/flow-language.html).
"""
alias AWS.Client
alias AWS.Request
def metadata do
%AWS.ServiceMetadata{
abbreviation: nil,
api_version: "2017-08-08",
content_type: "application/x-amz-json-1.1",
credential_scope: nil,
endpoint_prefix: "connect",
global?: false,
protocol: "rest-json",
service_id: "Connect",
signature_version: "v4",
signing_name: "connect",
target_prefix: nil
}
end
@doc """
This API is in preview release for Amazon Connect and is subject to change.
Associates an approved origin to an Amazon Connect instance.
"""
def associate_approved_origin(%Client{} = client, instance_id, input, options \\ []) do
url_path = "/instance/#{AWS.Util.encode_uri(instance_id)}/approved-origin"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:put,
url_path,
query_params,
headers,
input,
options,
nil
)
end
@doc """
This API is in preview release for Amazon Connect and is subject to change.
Allows the specified Amazon Connect instance to access the specified Amazon Lex
or Amazon Lex V2 bot.
"""
def associate_bot(%Client{} = client, instance_id, input, options \\ []) do
url_path = "/instance/#{AWS.Util.encode_uri(instance_id)}/bot"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:put,
url_path,
query_params,
headers,
input,
options,
nil
)
end
@doc """
Associates an existing vocabulary as the default.
Contact Lens for Amazon Connect uses the vocabulary in post-call and real-time
analysis sessions for the given language.
"""
def associate_default_vocabulary(
%Client{} = client,
instance_id,
language_code,
input,
options \\ []
) do
url_path =
"/default-vocabulary/#{AWS.Util.encode_uri(instance_id)}/#{AWS.Util.encode_uri(language_code)}"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:put,
url_path,
query_params,
headers,
input,
options,
nil
)
end
@doc """
This API is in preview release for Amazon Connect and is subject to change.
Associates a storage resource type for the first time. You can only associate
one type of storage configuration in a single call. This means, for example,
that you can't define an instance with multiple S3 buckets for storing chat
transcripts.
This API does not create a resource that doesn't exist. It only associates it to
the instance. Ensure that the resource being specified in the storage
configuration, like an S3 bucket, exists when being used for association.
"""
def associate_instance_storage_config(%Client{} = client, instance_id, input, options \\ []) do
url_path = "/instance/#{AWS.Util.encode_uri(instance_id)}/storage-config"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:put,
url_path,
query_params,
headers,
input,
options,
nil
)
end
@doc """
This API is in preview release for Amazon Connect and is subject to change.
Allows the specified Amazon Connect instance to access the specified Lambda
function.
"""
def associate_lambda_function(%Client{} = client, instance_id, input, options \\ []) do
url_path = "/instance/#{AWS.Util.encode_uri(instance_id)}/lambda-function"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:put,
url_path,
query_params,
headers,
input,
options,
nil
)
end
@doc """
This API is in preview release for Amazon Connect and is subject to change.
Allows the specified Amazon Connect instance to access the specified Amazon Lex
bot.
"""
def associate_lex_bot(%Client{} = client, instance_id, input, options \\ []) do
url_path = "/instance/#{AWS.Util.encode_uri(instance_id)}/lex-bot"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:put,
url_path,
query_params,
headers,
input,
options,
nil
)
end
@doc """
This API is in preview release for Amazon Connect and is subject to change.
Associates a set of quick connects with a queue.
"""
def associate_queue_quick_connects(
%Client{} = client,
instance_id,
queue_id,
input,
options \\ []
) do
url_path =
"/queues/#{AWS.Util.encode_uri(instance_id)}/#{AWS.Util.encode_uri(queue_id)}/associate-quick-connects"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:post,
url_path,
query_params,
headers,
input,
options,
nil
)
end
@doc """
Associates a set of queues with a routing profile.
"""
def associate_routing_profile_queues(
%Client{} = client,
instance_id,
routing_profile_id,
input,
options \\ []
) do
url_path =
"/routing-profiles/#{AWS.Util.encode_uri(instance_id)}/#{AWS.Util.encode_uri(routing_profile_id)}/associate-queues"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:post,
url_path,
query_params,
headers,
input,
options,
nil
)
end
@doc """
This API is in preview release for Amazon Connect and is subject to change.
Associates a security key to the instance.
"""
def associate_security_key(%Client{} = client, instance_id, input, options \\ []) do
url_path = "/instance/#{AWS.Util.encode_uri(instance_id)}/security-key"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:put,
url_path,
query_params,
headers,
input,
options,
nil
)
end
@doc """
This API is in preview release for Amazon Connect and is subject to change.
Creates an agent status for the specified Amazon Connect instance.
"""
def create_agent_status(%Client{} = client, instance_id, input, options \\ []) do
url_path = "/agent-status/#{AWS.Util.encode_uri(instance_id)}"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:put,
url_path,
query_params,
headers,
input,
options,
nil
)
end
@doc """
Creates a contact flow for the specified Amazon Connect instance.
You can also create and update contact flows using the [Amazon Connect Flow language](https://docs.aws.amazon.com/connect/latest/adminguide/flow-language.html).
"""
def create_contact_flow(%Client{} = client, instance_id, input, options \\ []) do
url_path = "/contact-flows/#{AWS.Util.encode_uri(instance_id)}"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:put,
url_path,
query_params,
headers,
input,
options,
nil
)
end
@doc """
Creates a contact flow module for the specified Amazon Connect instance.
"""
def create_contact_flow_module(%Client{} = client, instance_id, input, options \\ []) do
url_path = "/contact-flow-modules/#{AWS.Util.encode_uri(instance_id)}"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:put,
url_path,
query_params,
headers,
input,
options,
nil
)
end
@doc """
This API is in preview release for Amazon Connect and is subject to change.
Creates hours of operation.
"""
def create_hours_of_operation(%Client{} = client, instance_id, input, options \\ []) do
url_path = "/hours-of-operations/#{AWS.Util.encode_uri(instance_id)}"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:put,
url_path,
query_params,
headers,
input,
options,
nil
)
end
@doc """
This API is in preview release for Amazon Connect and is subject to change.
Initiates an Amazon Connect instance with all the supported channels enabled. It
does not attach any storage, such as Amazon Simple Storage Service (Amazon S3)
or Amazon Kinesis. It also does not allow for any configurations on features,
such as Contact Lens for Amazon Connect.
Amazon Connect enforces a limit on the total number of instances that you can
create or delete in 30 days. If you exceed this limit, you will get an error
message indicating there has been an excessive number of attempts at creating or
deleting instances. You must wait 30 days before you can restart creating and
deleting instances in your account.
"""
def create_instance(%Client{} = client, input, options \\ []) do
url_path = "/instance"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:put,
url_path,
query_params,
headers,
input,
options,
nil
)
end
@doc """
Creates an Amazon Web Services resource association with an Amazon Connect
instance.
"""
def create_integration_association(%Client{} = client, instance_id, input, options \\ []) do
url_path = "/instance/#{AWS.Util.encode_uri(instance_id)}/integration-associations"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:put,
url_path,
query_params,
headers,
input,
options,
nil
)
end
@doc """
This API is in preview release for Amazon Connect and is subject to change.
Creates a new queue for the specified Amazon Connect instance.
"""
def create_queue(%Client{} = client, instance_id, input, options \\ []) do
url_path = "/queues/#{AWS.Util.encode_uri(instance_id)}"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:put,
url_path,
query_params,
headers,
input,
options,
nil
)
end
@doc """
Creates a quick connect for the specified Amazon Connect instance.
"""
def create_quick_connect(%Client{} = client, instance_id, input, options \\ []) do
url_path = "/quick-connects/#{AWS.Util.encode_uri(instance_id)}"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:put,
url_path,
query_params,
headers,
input,
options,
nil
)
end
@doc """
Creates a new routing profile.
"""
def create_routing_profile(%Client{} = client, instance_id, input, options \\ []) do
url_path = "/routing-profiles/#{AWS.Util.encode_uri(instance_id)}"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:put,
url_path,
query_params,
headers,
input,
options,
nil
)
end
@doc """
This API is in preview release for Amazon Connect and is subject to change.
Creates a security profile.
"""
def create_security_profile(%Client{} = client, instance_id, input, options \\ []) do
url_path = "/security-profiles/#{AWS.Util.encode_uri(instance_id)}"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:put,
url_path,
query_params,
headers,
input,
options,
nil
)
end
@doc """
Creates a use case for an integration association.
"""
def create_use_case(
%Client{} = client,
instance_id,
integration_association_id,
input,
options \\ []
) do
url_path =
"/instance/#{AWS.Util.encode_uri(instance_id)}/integration-associations/#{AWS.Util.encode_uri(integration_association_id)}/use-cases"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:put,
url_path,
query_params,
headers,
input,
options,
nil
)
end
@doc """
Creates a user account for the specified Amazon Connect instance.
For information about how to create user accounts using the Amazon Connect
console, see [Add Users](https://docs.aws.amazon.com/connect/latest/adminguide/user-management.html)
in the *Amazon Connect Administrator Guide*.
"""
def create_user(%Client{} = client, instance_id, input, options \\ []) do
url_path = "/users/#{AWS.Util.encode_uri(instance_id)}"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:put,
url_path,
query_params,
headers,
input,
options,
nil
)
end
@doc """
Creates a new user hierarchy group.
"""
def create_user_hierarchy_group(%Client{} = client, instance_id, input, options \\ []) do
url_path = "/user-hierarchy-groups/#{AWS.Util.encode_uri(instance_id)}"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:put,
url_path,
query_params,
headers,
input,
options,
nil
)
end
@doc """
Creates a custom vocabulary associated with your Amazon Connect instance.
You can set a custom vocabulary to be your default vocabulary for a given
language. Contact Lens for Amazon Connect uses the default vocabulary in
post-call and real-time contact analysis sessions for that language.
"""
def create_vocabulary(%Client{} = client, instance_id, input, options \\ []) do
url_path = "/vocabulary/#{AWS.Util.encode_uri(instance_id)}"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:post,
url_path,
query_params,
headers,
input,
options,
nil
)
end
@doc """
Deletes a contact flow for the specified Amazon Connect instance.
"""
def delete_contact_flow(%Client{} = client, contact_flow_id, instance_id, input, options \\ []) do
url_path =
"/contact-flows/#{AWS.Util.encode_uri(instance_id)}/#{AWS.Util.encode_uri(contact_flow_id)}"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:delete,
url_path,
query_params,
headers,
input,
options,
nil
)
end
@doc """
Deletes the specified contact flow module.
"""
def delete_contact_flow_module(
%Client{} = client,
contact_flow_module_id,
instance_id,
input,
options \\ []
) do
url_path =
"/contact-flow-modules/#{AWS.Util.encode_uri(instance_id)}/#{AWS.Util.encode_uri(contact_flow_module_id)}"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:delete,
url_path,
query_params,
headers,
input,
options,
nil
)
end
@doc """
This API is in preview release for Amazon Connect and is subject to change.
Deletes an hours of operation.
"""
def delete_hours_of_operation(
%Client{} = client,
hours_of_operation_id,
instance_id,
input,
options \\ []
) do
url_path =
"/hours-of-operations/#{AWS.Util.encode_uri(instance_id)}/#{AWS.Util.encode_uri(hours_of_operation_id)}"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:delete,
url_path,
query_params,
headers,
input,
options,
nil
)
end
@doc """
This API is in preview release for Amazon Connect and is subject to change.
Deletes the Amazon Connect instance.
Amazon Connect enforces a limit on the total number of instances that you can
create or delete in 30 days. If you exceed this limit, you will get an error
message indicating there has been an excessive number of attempts at creating or
deleting instances. You must wait 30 days before you can restart creating and
deleting instances in your account.
"""
def delete_instance(%Client{} = client, instance_id, input, options \\ []) do
url_path = "/instance/#{AWS.Util.encode_uri(instance_id)}"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:delete,
url_path,
query_params,
headers,
input,
options,
nil
)
end
@doc """
Deletes an Amazon Web Services resource association from an Amazon Connect
instance.
The association must not have any use cases associated with it.
"""
def delete_integration_association(
%Client{} = client,
instance_id,
integration_association_id,
input,
options \\ []
) do
url_path =
"/instance/#{AWS.Util.encode_uri(instance_id)}/integration-associations/#{AWS.Util.encode_uri(integration_association_id)}"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:delete,
url_path,
query_params,
headers,
input,
options,
nil
)
end
@doc """
Deletes a quick connect.
"""
def delete_quick_connect(
%Client{} = client,
instance_id,
quick_connect_id,
input,
options \\ []
) do
url_path =
"/quick-connects/#{AWS.Util.encode_uri(instance_id)}/#{AWS.Util.encode_uri(quick_connect_id)}"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:delete,
url_path,
query_params,
headers,
input,
options,
nil
)
end
@doc """
This API is in preview release for Amazon Connect and is subject to change.
Deletes a security profile.
"""
def delete_security_profile(
%Client{} = client,
instance_id,
security_profile_id,
input,
options \\ []
) do
url_path =
"/security-profiles/#{AWS.Util.encode_uri(instance_id)}/#{AWS.Util.encode_uri(security_profile_id)}"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:delete,
url_path,
query_params,
headers,
input,
options,
nil
)
end
@doc """
Deletes a use case from an integration association.
"""
def delete_use_case(
%Client{} = client,
instance_id,
integration_association_id,
use_case_id,
input,
options \\ []
) do
url_path =
"/instance/#{AWS.Util.encode_uri(instance_id)}/integration-associations/#{AWS.Util.encode_uri(integration_association_id)}/use-cases/#{AWS.Util.encode_uri(use_case_id)}"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:delete,
url_path,
query_params,
headers,
input,
options,
nil
)
end
@doc """
Deletes a user account from the specified Amazon Connect instance.
For information about what happens to a user's data when their account is
deleted, see [Delete Users from Your Amazon Connect Instance](https://docs.aws.amazon.com/connect/latest/adminguide/delete-users.html)
in the *Amazon Connect Administrator Guide*.
"""
def delete_user(%Client{} = client, instance_id, user_id, input, options \\ []) do
url_path = "/users/#{AWS.Util.encode_uri(instance_id)}/#{AWS.Util.encode_uri(user_id)}"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:delete,
url_path,
query_params,
headers,
input,
options,
nil
)
end
@doc """
Deletes an existing user hierarchy group.
It must not be associated with any agents or have any active child groups.
"""
def delete_user_hierarchy_group(
%Client{} = client,
hierarchy_group_id,
instance_id,
input,
options \\ []
) do
url_path =
"/user-hierarchy-groups/#{AWS.Util.encode_uri(instance_id)}/#{AWS.Util.encode_uri(hierarchy_group_id)}"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:delete,
url_path,
query_params,
headers,
input,
options,
nil
)
end
@doc """
Deletes the vocabulary that has the given identifier.
"""
def delete_vocabulary(%Client{} = client, instance_id, vocabulary_id, input, options \\ []) do
url_path =
"/vocabulary-remove/#{AWS.Util.encode_uri(instance_id)}/#{AWS.Util.encode_uri(vocabulary_id)}"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:post,
url_path,
query_params,
headers,
input,
options,
nil
)
end
@doc """
This API is in preview release for Amazon Connect and is subject to change.
Describes an agent status.
"""
def describe_agent_status(%Client{} = client, agent_status_id, instance_id, options \\ []) do
url_path =
"/agent-status/#{AWS.Util.encode_uri(instance_id)}/#{AWS.Util.encode_uri(agent_status_id)}"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:get,
url_path,
query_params,
headers,
nil,
options,
nil
)
end
@doc """
This API is in preview release for Amazon Connect and is subject to change.
Describes the specified contact.
Contact information remains available in Amazon Connect for 24 months, and then
it is deleted.
"""
def describe_contact(%Client{} = client, contact_id, instance_id, options \\ []) do
url_path = "/contacts/#{AWS.Util.encode_uri(instance_id)}/#{AWS.Util.encode_uri(contact_id)}"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:get,
url_path,
query_params,
headers,
nil,
options,
nil
)
end
@doc """
Describes the specified contact flow.
You can also create and update contact flows using the [Amazon Connect Flow language](https://docs.aws.amazon.com/connect/latest/adminguide/flow-language.html).
"""
def describe_contact_flow(%Client{} = client, contact_flow_id, instance_id, options \\ []) do
url_path =
"/contact-flows/#{AWS.Util.encode_uri(instance_id)}/#{AWS.Util.encode_uri(contact_flow_id)}"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:get,
url_path,
query_params,
headers,
nil,
options,
nil
)
end
@doc """
Describes the specified contact flow module.
"""
def describe_contact_flow_module(
%Client{} = client,
contact_flow_module_id,
instance_id,
options \\ []
) do
url_path =
"/contact-flow-modules/#{AWS.Util.encode_uri(instance_id)}/#{AWS.Util.encode_uri(contact_flow_module_id)}"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:get,
url_path,
query_params,
headers,
nil,
options,
nil
)
end
@doc """
This API is in preview release for Amazon Connect and is subject to change.
Describes the hours of operation.
"""
def describe_hours_of_operation(
%Client{} = client,
hours_of_operation_id,
instance_id,
options \\ []
) do
url_path =
"/hours-of-operations/#{AWS.Util.encode_uri(instance_id)}/#{AWS.Util.encode_uri(hours_of_operation_id)}"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:get,
url_path,
query_params,
headers,
nil,
options,
nil
)
end
@doc """
This API is in preview release for Amazon Connect and is subject to change.
Returns the current state of the specified instance identifier. It tracks the
instance while it is being created and returns an error status, if applicable.
If an instance is not created successfully, the instance status reason field
returns details relevant to the reason. The instance in a failed state is
returned only for 24 hours after the CreateInstance API was invoked.
"""
def describe_instance(%Client{} = client, instance_id, options \\ []) do
url_path = "/instance/#{AWS.Util.encode_uri(instance_id)}"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:get,
url_path,
query_params,
headers,
nil,
options,
nil
)
end
@doc """
This API is in preview release for Amazon Connect and is subject to change.
Describes the specified instance attribute.
"""
def describe_instance_attribute(%Client{} = client, attribute_type, instance_id, options \\ []) do
url_path =
"/instance/#{AWS.Util.encode_uri(instance_id)}/attribute/#{AWS.Util.encode_uri(attribute_type)}"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:get,
url_path,
query_params,
headers,
nil,
options,
nil
)
end
@doc """
This API is in preview release for Amazon Connect and is subject to change.
Retrieves the current storage configurations for the specified resource type,
association ID, and instance ID.
"""
def describe_instance_storage_config(
%Client{} = client,
association_id,
instance_id,
resource_type,
options \\ []
) do
url_path =
"/instance/#{AWS.Util.encode_uri(instance_id)}/storage-config/#{AWS.Util.encode_uri(association_id)}"
headers = []
query_params = []
query_params =
if !is_nil(resource_type) do
[{"resourceType", resource_type} | query_params]
else
query_params
end
Request.request_rest(
client,
metadata(),
:get,
url_path,
query_params,
headers,
nil,
options,
nil
)
end
@doc """
This API is in preview release for Amazon Connect and is subject to change.
Describes the specified queue.
"""
def describe_queue(%Client{} = client, instance_id, queue_id, options \\ []) do
url_path = "/queues/#{AWS.Util.encode_uri(instance_id)}/#{AWS.Util.encode_uri(queue_id)}"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:get,
url_path,
query_params,
headers,
nil,
options,
nil
)
end
@doc """
Describes the quick connect.
"""
def describe_quick_connect(%Client{} = client, instance_id, quick_connect_id, options \\ []) do
url_path =
"/quick-connects/#{AWS.Util.encode_uri(instance_id)}/#{AWS.Util.encode_uri(quick_connect_id)}"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:get,
url_path,
query_params,
headers,
nil,
options,
nil
)
end
@doc """
Describes the specified routing profile.
"""
def describe_routing_profile(%Client{} = client, instance_id, routing_profile_id, options \\ []) do
url_path =
"/routing-profiles/#{AWS.Util.encode_uri(instance_id)}/#{AWS.Util.encode_uri(routing_profile_id)}"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:get,
url_path,
query_params,
headers,
nil,
options,
nil
)
end
@doc """
This API is in preview release for Amazon Connect and is subject to change.
Gets basic information about the security profle.
"""
def describe_security_profile(
%Client{} = client,
instance_id,
security_profile_id,
options \\ []
) do
url_path =
"/security-profiles/#{AWS.Util.encode_uri(instance_id)}/#{AWS.Util.encode_uri(security_profile_id)}"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:get,
url_path,
query_params,
headers,
nil,
options,
nil
)
end
@doc """
Describes the specified user account.
You can find the instance ID in the console (it’s the final part of the ARN).
The console does not display the user IDs. Instead, list the users and note the
IDs provided in the output.
"""
def describe_user(%Client{} = client, instance_id, user_id, options \\ []) do
url_path = "/users/#{AWS.Util.encode_uri(instance_id)}/#{AWS.Util.encode_uri(user_id)}"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:get,
url_path,
query_params,
headers,
nil,
options,
nil
)
end
@doc """
Describes the specified hierarchy group.
"""
def describe_user_hierarchy_group(
%Client{} = client,
hierarchy_group_id,
instance_id,
options \\ []
) do
url_path =
"/user-hierarchy-groups/#{AWS.Util.encode_uri(instance_id)}/#{AWS.Util.encode_uri(hierarchy_group_id)}"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:get,
url_path,
query_params,
headers,
nil,
options,
nil
)
end
@doc """
Describes the hierarchy structure of the specified Amazon Connect instance.
"""
def describe_user_hierarchy_structure(%Client{} = client, instance_id, options \\ []) do
url_path = "/user-hierarchy-structure/#{AWS.Util.encode_uri(instance_id)}"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:get,
url_path,
query_params,
headers,
nil,
options,
nil
)
end
@doc """
Describes the specified vocabulary.
"""
def describe_vocabulary(%Client{} = client, instance_id, vocabulary_id, options \\ []) do
url_path =
"/vocabulary/#{AWS.Util.encode_uri(instance_id)}/#{AWS.Util.encode_uri(vocabulary_id)}"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:get,
url_path,
query_params,
headers,
nil,
options,
nil
)
end
@doc """
This API is in preview release for Amazon Connect and is subject to change.
Revokes access to integrated applications from Amazon Connect.
"""
def disassociate_approved_origin(%Client{} = client, instance_id, input, options \\ []) do
url_path = "/instance/#{AWS.Util.encode_uri(instance_id)}/approved-origin"
headers = []
{query_params, input} =
[
{"Origin", "origin"}
]
|> Request.build_params(input)
Request.request_rest(
client,
metadata(),
:delete,
url_path,
query_params,
headers,
input,
options,
nil
)
end
@doc """
This API is in preview release for Amazon Connect and is subject to change.
Revokes authorization from the specified instance to access the specified Amazon
Lex or Amazon Lex V2 bot.
"""
def disassociate_bot(%Client{} = client, instance_id, input, options \\ []) do
url_path = "/instance/#{AWS.Util.encode_uri(instance_id)}/bot"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:post,
url_path,
query_params,
headers,
input,
options,
nil
)
end
@doc """
This API is in preview release for Amazon Connect and is subject to change.
Removes the storage type configurations for the specified resource type and
association ID.
"""
def disassociate_instance_storage_config(
%Client{} = client,
association_id,
instance_id,
input,
options \\ []
) do
url_path =
"/instance/#{AWS.Util.encode_uri(instance_id)}/storage-config/#{AWS.Util.encode_uri(association_id)}"
headers = []
{query_params, input} =
[
{"ResourceType", "resourceType"}
]
|> Request.build_params(input)
Request.request_rest(
client,
metadata(),
:delete,
url_path,
query_params,
headers,
input,
options,
nil
)
end
@doc """
This API is in preview release for Amazon Connect and is subject to change.
Remove the Lambda function from the dropdown options available in the relevant
contact flow blocks.
"""
def disassociate_lambda_function(%Client{} = client, instance_id, input, options \\ []) do
url_path = "/instance/#{AWS.Util.encode_uri(instance_id)}/lambda-function"
headers = []
{query_params, input} =
[
{"FunctionArn", "functionArn"}
]
|> Request.build_params(input)
Request.request_rest(
client,
metadata(),
:delete,
url_path,
query_params,
headers,
input,
options,
nil
)
end
@doc """
This API is in preview release for Amazon Connect and is subject to change.
Revokes authorization from the specified instance to access the specified Amazon
Lex bot.
"""
def disassociate_lex_bot(%Client{} = client, instance_id, input, options \\ []) do
url_path = "/instance/#{AWS.Util.encode_uri(instance_id)}/lex-bot"
headers = []
{query_params, input} =
[
{"BotName", "botName"},
{"LexRegion", "lexRegion"}
]
|> Request.build_params(input)
Request.request_rest(
client,
metadata(),
:delete,
url_path,
query_params,
headers,
input,
options,
nil
)
end
@doc """
This API is in preview release for Amazon Connect and is subject to change.
Disassociates a set of quick connects from a queue.
"""
def disassociate_queue_quick_connects(
%Client{} = client,
instance_id,
queue_id,
input,
options \\ []
) do
url_path =
"/queues/#{AWS.Util.encode_uri(instance_id)}/#{AWS.Util.encode_uri(queue_id)}/disassociate-quick-connects"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:post,
url_path,
query_params,
headers,
input,
options,
nil
)
end
@doc """
Disassociates a set of queues from a routing profile.
"""
def disassociate_routing_profile_queues(
%Client{} = client,
instance_id,
routing_profile_id,
input,
options \\ []
) do
url_path =
"/routing-profiles/#{AWS.Util.encode_uri(instance_id)}/#{AWS.Util.encode_uri(routing_profile_id)}/disassociate-queues"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:post,
url_path,
query_params,
headers,
input,
options,
nil
)
end
@doc """
This API is in preview release for Amazon Connect and is subject to change.
Deletes the specified security key.
"""
def disassociate_security_key(
%Client{} = client,
association_id,
instance_id,
input,
options \\ []
) do
url_path =
"/instance/#{AWS.Util.encode_uri(instance_id)}/security-key/#{AWS.Util.encode_uri(association_id)}"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:delete,
url_path,
query_params,
headers,
input,
options,
nil
)
end
@doc """
Retrieves the contact attributes for the specified contact.
"""
def get_contact_attributes(%Client{} = client, initial_contact_id, instance_id, options \\ []) do
url_path =
"/contact/attributes/#{AWS.Util.encode_uri(instance_id)}/#{AWS.Util.encode_uri(initial_contact_id)}"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:get,
url_path,
query_params,
headers,
nil,
options,
nil
)
end
@doc """
Gets the real-time metric data from the specified Amazon Connect instance.
For a description of each metric, see [Real-time Metrics Definitions](https://docs.aws.amazon.com/connect/latest/adminguide/real-time-metrics-definitions.html)
in the *Amazon Connect Administrator Guide*.
"""
def get_current_metric_data(%Client{} = client, instance_id, input, options \\ []) do
url_path = "/metrics/current/#{AWS.Util.encode_uri(instance_id)}"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:post,
url_path,
query_params,
headers,
input,
options,
nil
)
end
@doc """
Retrieves a token for federation.
This API doesn't support root users. If you try to invoke GetFederationToken
with root credentials, an error message similar to the following one appears:
`Provided identity: Principal: .... User: .... cannot be used for federation
with Amazon Connect`
"""
def get_federation_token(%Client{} = client, instance_id, options \\ []) do
url_path = "/user/federate/#{AWS.Util.encode_uri(instance_id)}"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:get,
url_path,
query_params,
headers,
nil,
options,
nil
)
end
@doc """
Gets historical metric data from the specified Amazon Connect instance.
For a description of each historical metric, see [Historical Metrics Definitions](https://docs.aws.amazon.com/connect/latest/adminguide/historical-metrics-definitions.html)
in the *Amazon Connect Administrator Guide*.
"""
def get_metric_data(%Client{} = client, instance_id, input, options \\ []) do
url_path = "/metrics/historical/#{AWS.Util.encode_uri(instance_id)}"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:post,
url_path,
query_params,
headers,
input,
options,
nil
)
end
@doc """
This API is in preview release for Amazon Connect and is subject to change.
Lists agent statuses.
"""
def list_agent_statuses(
%Client{} = client,
instance_id,
agent_status_types \\ nil,
max_results \\ nil,
next_token \\ nil,
options \\ []
) do
url_path = "/agent-status/#{AWS.Util.encode_uri(instance_id)}"
headers = []
query_params = []
query_params =
if !is_nil(next_token) do
[{"nextToken", next_token} | query_params]
else
query_params
end
query_params =
if !is_nil(max_results) do
[{"maxResults", max_results} | query_params]
else
query_params
end
query_params =
if !is_nil(agent_status_types) do
[{"AgentStatusTypes", agent_status_types} | query_params]
else
query_params
end
Request.request_rest(
client,
metadata(),
:get,
url_path,
query_params,
headers,
nil,
options,
nil
)
end
@doc """
This API is in preview release for Amazon Connect and is subject to change.
Returns a paginated list of all approved origins associated with the instance.
"""
def list_approved_origins(
%Client{} = client,
instance_id,
max_results \\ nil,
next_token \\ nil,
options \\ []
) do
url_path = "/instance/#{AWS.Util.encode_uri(instance_id)}/approved-origins"
headers = []
query_params = []
query_params =
if !is_nil(next_token) do
[{"nextToken", next_token} | query_params]
else
query_params
end
query_params =
if !is_nil(max_results) do
[{"maxResults", max_results} | query_params]
else
query_params
end
Request.request_rest(
client,
metadata(),
:get,
url_path,
query_params,
headers,
nil,
options,
nil
)
end
@doc """
This API is in preview release for Amazon Connect and is subject to change.
For the specified version of Amazon Lex, returns a paginated list of all the
Amazon Lex bots currently associated with the instance.
"""
def list_bots(
%Client{} = client,
instance_id,
lex_version,
max_results \\ nil,
next_token \\ nil,
options \\ []
) do
url_path = "/instance/#{AWS.Util.encode_uri(instance_id)}/bots"
headers = []
query_params = []
query_params =
if !is_nil(next_token) do
[{"nextToken", next_token} | query_params]
else
query_params
end
query_params =
if !is_nil(max_results) do
[{"maxResults", max_results} | query_params]
else
query_params
end
query_params =
if !is_nil(lex_version) do
[{"lexVersion", lex_version} | query_params]
else
query_params
end
Request.request_rest(
client,
metadata(),
:get,
url_path,
query_params,
headers,
nil,
options,
nil
)
end
@doc """
Provides information about the contact flow modules for the specified Amazon
Connect instance.
"""
def list_contact_flow_modules(
%Client{} = client,
instance_id,
contact_flow_module_state \\ nil,
max_results \\ nil,
next_token \\ nil,
options \\ []
) do
url_path = "/contact-flow-modules-summary/#{AWS.Util.encode_uri(instance_id)}"
headers = []
query_params = []
query_params =
if !is_nil(next_token) do
[{"nextToken", next_token} | query_params]
else
query_params
end
query_params =
if !is_nil(max_results) do
[{"maxResults", max_results} | query_params]
else
query_params
end
query_params =
if !is_nil(contact_flow_module_state) do
[{"state", contact_flow_module_state} | query_params]
else
query_params
end
Request.request_rest(
client,
metadata(),
:get,
url_path,
query_params,
headers,
nil,
options,
nil
)
end
@doc """
Provides information about the contact flows for the specified Amazon Connect
instance.
You can also create and update contact flows using the [Amazon Connect Flow language](https://docs.aws.amazon.com/connect/latest/adminguide/flow-language.html).
For more information about contact flows, see [Contact Flows](https://docs.aws.amazon.com/connect/latest/adminguide/concepts-contact-flows.html)
in the *Amazon Connect Administrator Guide*.
"""
def list_contact_flows(
%Client{} = client,
instance_id,
contact_flow_types \\ nil,
max_results \\ nil,
next_token \\ nil,
options \\ []
) do
url_path = "/contact-flows-summary/#{AWS.Util.encode_uri(instance_id)}"
headers = []
query_params = []
query_params =
if !is_nil(next_token) do
[{"nextToken", next_token} | query_params]
else
query_params
end
query_params =
if !is_nil(max_results) do
[{"maxResults", max_results} | query_params]
else
query_params
end
query_params =
if !is_nil(contact_flow_types) do
[{"contactFlowTypes", contact_flow_types} | query_params]
else
query_params
end
Request.request_rest(
client,
metadata(),
:get,
url_path,
query_params,
headers,
nil,
options,
nil
)
end
@doc """
This API is in preview release for Amazon Connect and is subject to change.
For the specified `referenceTypes`, returns a list of references associated with
the contact.
"""
def list_contact_references(
%Client{} = client,
contact_id,
instance_id,
next_token \\ nil,
reference_types,
options \\ []
) do
url_path =
"/contact/references/#{AWS.Util.encode_uri(instance_id)}/#{AWS.Util.encode_uri(contact_id)}"
headers = []
query_params = []
query_params =
if !is_nil(reference_types) do
[{"referenceTypes", reference_types} | query_params]
else
query_params
end
query_params =
if !is_nil(next_token) do
[{"nextToken", next_token} | query_params]
else
query_params
end
Request.request_rest(
client,
metadata(),
:get,
url_path,
query_params,
headers,
nil,
options,
nil
)
end
@doc """
Lists the default vocabularies for the specified Amazon Connect instance.
"""
def list_default_vocabularies(%Client{} = client, instance_id, input, options \\ []) do
url_path = "/default-vocabulary-summary/#{AWS.Util.encode_uri(instance_id)}"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:post,
url_path,
query_params,
headers,
input,
options,
nil
)
end
@doc """
Provides information about the hours of operation for the specified Amazon
Connect instance.
For more information about hours of operation, see [Set the Hours of Operation for a
Queue](https://docs.aws.amazon.com/connect/latest/adminguide/set-hours-operation.html)
in the *Amazon Connect Administrator Guide*.
"""
def list_hours_of_operations(
%Client{} = client,
instance_id,
max_results \\ nil,
next_token \\ nil,
options \\ []
) do
url_path = "/hours-of-operations-summary/#{AWS.Util.encode_uri(instance_id)}"
headers = []
query_params = []
query_params =
if !is_nil(next_token) do
[{"nextToken", next_token} | query_params]
else
query_params
end
query_params =
if !is_nil(max_results) do
[{"maxResults", max_results} | query_params]
else
query_params
end
Request.request_rest(
client,
metadata(),
:get,
url_path,
query_params,
headers,
nil,
options,
nil
)
end
@doc """
This API is in preview release for Amazon Connect and is subject to change.
Returns a paginated list of all attribute types for the given instance.
"""
def list_instance_attributes(
%Client{} = client,
instance_id,
max_results \\ nil,
next_token \\ nil,
options \\ []
) do
url_path = "/instance/#{AWS.Util.encode_uri(instance_id)}/attributes"
headers = []
query_params = []
query_params =
if !is_nil(next_token) do
[{"nextToken", next_token} | query_params]
else
query_params
end
query_params =
if !is_nil(max_results) do
[{"maxResults", max_results} | query_params]
else
query_params
end
Request.request_rest(
client,
metadata(),
:get,
url_path,
query_params,
headers,
nil,
options,
nil
)
end
@doc """
This API is in preview release for Amazon Connect and is subject to change.
Returns a paginated list of storage configs for the identified instance and
resource type.
"""
def list_instance_storage_configs(
%Client{} = client,
instance_id,
max_results \\ nil,
next_token \\ nil,
resource_type,
options \\ []
) do
url_path = "/instance/#{AWS.Util.encode_uri(instance_id)}/storage-configs"
headers = []
query_params = []
query_params =
if !is_nil(resource_type) do
[{"resourceType", resource_type} | query_params]
else
query_params
end
query_params =
if !is_nil(next_token) do
[{"nextToken", next_token} | query_params]
else
query_params
end
query_params =
if !is_nil(max_results) do
[{"maxResults", max_results} | query_params]
else
query_params
end
Request.request_rest(
client,
metadata(),
:get,
url_path,
query_params,
headers,
nil,
options,
nil
)
end
@doc """
This API is in preview release for Amazon Connect and is subject to change.
Return a list of instances which are in active state, creation-in-progress
state, and failed state. Instances that aren't successfully created (they are in
a failed state) are returned only for 24 hours after the CreateInstance API was
invoked.
"""
def list_instances(%Client{} = client, max_results \\ nil, next_token \\ nil, options \\ []) do
url_path = "/instance"
headers = []
query_params = []
query_params =
if !is_nil(next_token) do
[{"nextToken", next_token} | query_params]
else
query_params
end
query_params =
if !is_nil(max_results) do
[{"maxResults", max_results} | query_params]
else
query_params
end
Request.request_rest(
client,
metadata(),
:get,
url_path,
query_params,
headers,
nil,
options,
nil
)
end
@doc """
Provides summary information about the Amazon Web Services resource associations
for the specified Amazon Connect instance.
"""
def list_integration_associations(
%Client{} = client,
instance_id,
integration_type \\ nil,
max_results \\ nil,
next_token \\ nil,
options \\ []
) do
url_path = "/instance/#{AWS.Util.encode_uri(instance_id)}/integration-associations"
headers = []
query_params = []
query_params =
if !is_nil(next_token) do
[{"nextToken", next_token} | query_params]
else
query_params
end
query_params =
if !is_nil(max_results) do
[{"maxResults", max_results} | query_params]
else
query_params
end
query_params =
if !is_nil(integration_type) do
[{"integrationType", integration_type} | query_params]
else
query_params
end
Request.request_rest(
client,
metadata(),
:get,
url_path,
query_params,
headers,
nil,
options,
nil
)
end
@doc """
This API is in preview release for Amazon Connect and is subject to change.
Returns a paginated list of all Lambda functions that display in the dropdown
options in the relevant contact flow blocks.
"""
def list_lambda_functions(
%Client{} = client,
instance_id,
max_results \\ nil,
next_token \\ nil,
options \\ []
) do
url_path = "/instance/#{AWS.Util.encode_uri(instance_id)}/lambda-functions"
headers = []
query_params = []
query_params =
if !is_nil(next_token) do
[{"nextToken", next_token} | query_params]
else
query_params
end
query_params =
if !is_nil(max_results) do
[{"maxResults", max_results} | query_params]
else
query_params
end
Request.request_rest(
client,
metadata(),
:get,
url_path,
query_params,
headers,
nil,
options,
nil
)
end
@doc """
This API is in preview release for Amazon Connect and is subject to change.
Returns a paginated list of all the Amazon Lex bots currently associated with
the instance.
"""
def list_lex_bots(
%Client{} = client,
instance_id,
max_results \\ nil,
next_token \\ nil,
options \\ []
) do
url_path = "/instance/#{AWS.Util.encode_uri(instance_id)}/lex-bots"
headers = []
query_params = []
query_params =
if !is_nil(next_token) do
[{"nextToken", next_token} | query_params]
else
query_params
end
query_params =
if !is_nil(max_results) do
[{"maxResults", max_results} | query_params]
else
query_params
end
Request.request_rest(
client,
metadata(),
:get,
url_path,
query_params,
headers,
nil,
options,
nil
)
end
@doc """
Provides information about the phone numbers for the specified Amazon Connect
instance.
For more information about phone numbers, see [Set Up Phone Numbers for Your Contact
Center](https://docs.aws.amazon.com/connect/latest/adminguide/contact-center-phone-number.html)
in the *Amazon Connect Administrator Guide*.
"""
def list_phone_numbers(
%Client{} = client,
instance_id,
max_results \\ nil,
next_token \\ nil,
phone_number_country_codes \\ nil,
phone_number_types \\ nil,
options \\ []
) do
url_path = "/phone-numbers-summary/#{AWS.Util.encode_uri(instance_id)}"
headers = []
query_params = []
query_params =
if !is_nil(phone_number_types) do
[{"phoneNumberTypes", phone_number_types} | query_params]
else
query_params
end
query_params =
if !is_nil(phone_number_country_codes) do
[{"phoneNumberCountryCodes", phone_number_country_codes} | query_params]
else
query_params
end
query_params =
if !is_nil(next_token) do
[{"nextToken", next_token} | query_params]
else
query_params
end
query_params =
if !is_nil(max_results) do
[{"maxResults", max_results} | query_params]
else
query_params
end
Request.request_rest(
client,
metadata(),
:get,
url_path,
query_params,
headers,
nil,
options,
nil
)
end
@doc """
Provides information about the prompts for the specified Amazon Connect
instance.
"""
def list_prompts(
%Client{} = client,
instance_id,
max_results \\ nil,
next_token \\ nil,
options \\ []
) do
url_path = "/prompts-summary/#{AWS.Util.encode_uri(instance_id)}"
headers = []
query_params = []
query_params =
if !is_nil(next_token) do
[{"nextToken", next_token} | query_params]
else
query_params
end
query_params =
if !is_nil(max_results) do
[{"maxResults", max_results} | query_params]
else
query_params
end
Request.request_rest(
client,
metadata(),
:get,
url_path,
query_params,
headers,
nil,
options,
nil
)
end
@doc """
This API is in preview release for Amazon Connect and is subject to change.
Lists the quick connects associated with a queue.
"""
def list_queue_quick_connects(
%Client{} = client,
instance_id,
queue_id,
max_results \\ nil,
next_token \\ nil,
options \\ []
) do
url_path =
"/queues/#{AWS.Util.encode_uri(instance_id)}/#{AWS.Util.encode_uri(queue_id)}/quick-connects"
headers = []
query_params = []
query_params =
if !is_nil(next_token) do
[{"nextToken", next_token} | query_params]
else
query_params
end
query_params =
if !is_nil(max_results) do
[{"maxResults", max_results} | query_params]
else
query_params
end
Request.request_rest(
client,
metadata(),
:get,
url_path,
query_params,
headers,
nil,
options,
nil
)
end
@doc """
Provides information about the queues for the specified Amazon Connect instance.
If you do not specify a `QueueTypes` parameter, both standard and agent queues
are returned. This might cause an unexpected truncation of results if you have
more than 1000 agents and you limit the number of results of the API call in
code.
For more information about queues, see [Queues: Standard and Agent](https://docs.aws.amazon.com/connect/latest/adminguide/concepts-queues-standard-and-agent.html)
in the *Amazon Connect Administrator Guide*.
"""
def list_queues(
%Client{} = client,
instance_id,
max_results \\ nil,
next_token \\ nil,
queue_types \\ nil,
options \\ []
) do
url_path = "/queues-summary/#{AWS.Util.encode_uri(instance_id)}"
headers = []
query_params = []
query_params =
if !is_nil(queue_types) do
[{"queueTypes", queue_types} | query_params]
else
query_params
end
query_params =
if !is_nil(next_token) do
[{"nextToken", next_token} | query_params]
else
query_params
end
query_params =
if !is_nil(max_results) do
[{"maxResults", max_results} | query_params]
else
query_params
end
Request.request_rest(
client,
metadata(),
:get,
url_path,
query_params,
headers,
nil,
options,
nil
)
end
@doc """
Provides information about the quick connects for the specified Amazon Connect
instance.
"""
def list_quick_connects(
%Client{} = client,
instance_id,
max_results \\ nil,
next_token \\ nil,
quick_connect_types \\ nil,
options \\ []
) do
url_path = "/quick-connects/#{AWS.Util.encode_uri(instance_id)}"
headers = []
query_params = []
query_params =
if !is_nil(quick_connect_types) do
[{"QuickConnectTypes", quick_connect_types} | query_params]
else
query_params
end
query_params =
if !is_nil(next_token) do
[{"nextToken", next_token} | query_params]
else
query_params
end
query_params =
if !is_nil(max_results) do
[{"maxResults", max_results} | query_params]
else
query_params
end
Request.request_rest(
client,
metadata(),
:get,
url_path,
query_params,
headers,
nil,
options,
nil
)
end
@doc """
Lists the queues associated with a routing profile.
"""
def list_routing_profile_queues(
%Client{} = client,
instance_id,
routing_profile_id,
max_results \\ nil,
next_token \\ nil,
options \\ []
) do
url_path =
"/routing-profiles/#{AWS.Util.encode_uri(instance_id)}/#{AWS.Util.encode_uri(routing_profile_id)}/queues"
headers = []
query_params = []
query_params =
if !is_nil(next_token) do
[{"nextToken", next_token} | query_params]
else
query_params
end
query_params =
if !is_nil(max_results) do
[{"maxResults", max_results} | query_params]
else
query_params
end
Request.request_rest(
client,
metadata(),
:get,
url_path,
query_params,
headers,
nil,
options,
nil
)
end
@doc """
Provides summary information about the routing profiles for the specified Amazon
Connect instance.
For more information about routing profiles, see [Routing Profiles](https://docs.aws.amazon.com/connect/latest/adminguide/concepts-routing.html)
and [Create a Routing Profile](https://docs.aws.amazon.com/connect/latest/adminguide/routing-profiles.html)
in the *Amazon Connect Administrator Guide*.
"""
def list_routing_profiles(
%Client{} = client,
instance_id,
max_results \\ nil,
next_token \\ nil,
options \\ []
) do
url_path = "/routing-profiles-summary/#{AWS.Util.encode_uri(instance_id)}"
headers = []
query_params = []
query_params =
if !is_nil(next_token) do
[{"nextToken", next_token} | query_params]
else
query_params
end
query_params =
if !is_nil(max_results) do
[{"maxResults", max_results} | query_params]
else
query_params
end
Request.request_rest(
client,
metadata(),
:get,
url_path,
query_params,
headers,
nil,
options,
nil
)
end
@doc """
This API is in preview release for Amazon Connect and is subject to change.
Returns a paginated list of all security keys associated with the instance.
"""
def list_security_keys(
%Client{} = client,
instance_id,
max_results \\ nil,
next_token \\ nil,
options \\ []
) do
url_path = "/instance/#{AWS.Util.encode_uri(instance_id)}/security-keys"
headers = []
query_params = []
query_params =
if !is_nil(next_token) do
[{"nextToken", next_token} | query_params]
else
query_params
end
query_params =
if !is_nil(max_results) do
[{"maxResults", max_results} | query_params]
else
query_params
end
Request.request_rest(
client,
metadata(),
:get,
url_path,
query_params,
headers,
nil,
options,
nil
)
end
@doc """
This API is in preview release for Amazon Connect and is subject to change.
Lists the permissions granted to a security profile.
"""
def list_security_profile_permissions(
%Client{} = client,
instance_id,
security_profile_id,
max_results \\ nil,
next_token \\ nil,
options \\ []
) do
url_path =
"/security-profiles-permissions/#{AWS.Util.encode_uri(instance_id)}/#{AWS.Util.encode_uri(security_profile_id)}"
headers = []
query_params = []
query_params =
if !is_nil(next_token) do
[{"nextToken", next_token} | query_params]
else
query_params
end
query_params =
if !is_nil(max_results) do
[{"maxResults", max_results} | query_params]
else
query_params
end
Request.request_rest(
client,
metadata(),
:get,
url_path,
query_params,
headers,
nil,
options,
nil
)
end
@doc """
Provides summary information about the security profiles for the specified
Amazon Connect instance.
For more information about security profiles, see [Security Profiles](https://docs.aws.amazon.com/connect/latest/adminguide/connect-security-profiles.html)
in the *Amazon Connect Administrator Guide*.
"""
def list_security_profiles(
%Client{} = client,
instance_id,
max_results \\ nil,
next_token \\ nil,
options \\ []
) do
url_path = "/security-profiles-summary/#{AWS.Util.encode_uri(instance_id)}"
headers = []
query_params = []
query_params =
if !is_nil(next_token) do
[{"nextToken", next_token} | query_params]
else
query_params
end
query_params =
if !is_nil(max_results) do
[{"maxResults", max_results} | query_params]
else
query_params
end
Request.request_rest(
client,
metadata(),
:get,
url_path,
query_params,
headers,
nil,
options,
nil
)
end
@doc """
Lists the tags for the specified resource.
For sample policies that use tags, see [Amazon Connect Identity-Based Policy Examples](https://docs.aws.amazon.com/connect/latest/adminguide/security_iam_id-based-policy-examples.html)
in the *Amazon Connect Administrator Guide*.
"""
def list_tags_for_resource(%Client{} = client, resource_arn, options \\ []) do
url_path = "/tags/#{AWS.Util.encode_uri(resource_arn)}"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:get,
url_path,
query_params,
headers,
nil,
options,
nil
)
end
@doc """
Lists the use cases for the integration association.
"""
def list_use_cases(
%Client{} = client,
instance_id,
integration_association_id,
max_results \\ nil,
next_token \\ nil,
options \\ []
) do
url_path =
"/instance/#{AWS.Util.encode_uri(instance_id)}/integration-associations/#{AWS.Util.encode_uri(integration_association_id)}/use-cases"
headers = []
query_params = []
query_params =
if !is_nil(next_token) do
[{"nextToken", next_token} | query_params]
else
query_params
end
query_params =
if !is_nil(max_results) do
[{"maxResults", max_results} | query_params]
else
query_params
end
Request.request_rest(
client,
metadata(),
:get,
url_path,
query_params,
headers,
nil,
options,
nil
)
end
@doc """
Provides summary information about the hierarchy groups for the specified Amazon
Connect instance.
For more information about agent hierarchies, see [Set Up Agent Hierarchies](https://docs.aws.amazon.com/connect/latest/adminguide/agent-hierarchy.html)
in the *Amazon Connect Administrator Guide*.
"""
def list_user_hierarchy_groups(
%Client{} = client,
instance_id,
max_results \\ nil,
next_token \\ nil,
options \\ []
) do
url_path = "/user-hierarchy-groups-summary/#{AWS.Util.encode_uri(instance_id)}"
headers = []
query_params = []
query_params =
if !is_nil(next_token) do
[{"nextToken", next_token} | query_params]
else
query_params
end
query_params =
if !is_nil(max_results) do
[{"maxResults", max_results} | query_params]
else
query_params
end
Request.request_rest(
client,
metadata(),
:get,
url_path,
query_params,
headers,
nil,
options,
nil
)
end
@doc """
Provides summary information about the users for the specified Amazon Connect
instance.
"""
def list_users(
%Client{} = client,
instance_id,
max_results \\ nil,
next_token \\ nil,
options \\ []
) do
url_path = "/users-summary/#{AWS.Util.encode_uri(instance_id)}"
headers = []
query_params = []
query_params =
if !is_nil(next_token) do
[{"nextToken", next_token} | query_params]
else
query_params
end
query_params =
if !is_nil(max_results) do
[{"maxResults", max_results} | query_params]
else
query_params
end
Request.request_rest(
client,
metadata(),
:get,
url_path,
query_params,
headers,
nil,
options,
nil
)
end
@doc """
When a contact is being recorded, and the recording has been suspended using
SuspendContactRecording, this API resumes recording the call.
Only voice recordings are supported at this time.
"""
def resume_contact_recording(%Client{} = client, input, options \\ []) do
url_path = "/contact/resume-recording"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:post,
url_path,
query_params,
headers,
input,
options,
nil
)
end
@doc """
Searches for vocabularies within a specific Amazon Connect instance using
`State`, `NameStartsWith`, and `LanguageCode`.
"""
def search_vocabularies(%Client{} = client, instance_id, input, options \\ []) do
url_path = "/vocabulary-summary/#{AWS.Util.encode_uri(instance_id)}"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:post,
url_path,
query_params,
headers,
input,
options,
nil
)
end
@doc """
Initiates a contact flow to start a new chat for the customer.
Response of this API provides a token required to obtain credentials from the
[CreateParticipantConnection](https://docs.aws.amazon.com/connect-participant/latest/APIReference/API_CreateParticipantConnection.html) API in the Amazon Connect Participant Service.
When a new chat contact is successfully created, clients must subscribe to the
participant’s connection for the created chat within 5 minutes. This is achieved
by invoking
[CreateParticipantConnection](https://docs.aws.amazon.com/connect-participant/latest/APIReference/API_CreateParticipantConnection.html)
with WEBSOCKET and CONNECTION_CREDENTIALS.
A 429 error occurs in the following situations:
* API rate limit is exceeded. API TPS throttling returns a
`TooManyRequests` exception.
* The [quota for concurrent active chats](https://docs.aws.amazon.com/connect/latest/adminguide/amazon-connect-service-limits.html)
is exceeded. Active chat throttling returns a `LimitExceededException`.
If you use the `ChatDurationInMinutes` parameter and receive a 400 error, your
account may not support the ability to configure custom chat durations. For more
information, contact Amazon Web Services Support.
For more information about chat, see
[Chat](https://docs.aws.amazon.com/connect/latest/adminguide/chat.html) in the
*Amazon Connect Administrator Guide*.
"""
def start_chat_contact(%Client{} = client, input, options \\ []) do
url_path = "/contact/chat"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:put,
url_path,
query_params,
headers,
input,
options,
nil
)
end
@doc """
Starts recording the contact when the agent joins the call.
StartContactRecording is a one-time action. For example, if you use
StopContactRecording to stop recording an ongoing call, you can't use
StartContactRecording to restart it. For scenarios where the recording has
started and you want to suspend and resume it, such as when collecting sensitive
information (for example, a credit card number), use SuspendContactRecording and
ResumeContactRecording.
You can use this API to override the recording behavior configured in the [Set recording
behavior](https://docs.aws.amazon.com/connect/latest/adminguide/set-recording-behavior.html)
block.
Only voice recordings are supported at this time.
"""
def start_contact_recording(%Client{} = client, input, options \\ []) do
url_path = "/contact/start-recording"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:post,
url_path,
query_params,
headers,
input,
options,
nil
)
end
@doc """
Initiates real-time message streaming for a new chat contact.
For more information about message streaming, see [Enable real-time chat message streaming](https://docs.aws.amazon.com/connect/latest/adminguide/chat-message-streaming.html)
in the *Amazon Connect Administrator Guide*.
"""
def start_contact_streaming(%Client{} = client, input, options \\ []) do
url_path = "/contact/start-streaming"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:post,
url_path,
query_params,
headers,
input,
options,
nil
)
end
@doc """
Places an outbound call to a contact, and then initiates the contact flow.
It performs the actions in the contact flow that's specified (in
`ContactFlowId`).
Agents do not initiate the outbound API, which means that they do not dial the
contact. If the contact flow places an outbound call to a contact, and then puts
the contact in queue, the call is then routed to the agent, like any other
inbound case.
There is a 60-second dialing timeout for this operation. If the call is not
connected after 60 seconds, it fails.
UK numbers with a 447 prefix are not allowed by default. Before you can dial
these UK mobile numbers, you must submit a service quota increase request. For
more information, see [Amazon Connect Service Quotas](https://docs.aws.amazon.com/connect/latest/adminguide/amazon-connect-service-limits.html)
in the *Amazon Connect Administrator Guide*.
Campaign calls are not allowed by default. Before you can make a call with
`TrafficType` = `CAMPAIGN`, you must submit a service quota increase request.
For more information, see [Amazon Connect Service Quotas](https://docs.aws.amazon.com/connect/latest/adminguide/amazon-connect-service-limits.html)
in the *Amazon Connect Administrator Guide*.
"""
def start_outbound_voice_contact(%Client{} = client, input, options \\ []) do
url_path = "/contact/outbound-voice"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:put,
url_path,
query_params,
headers,
input,
options,
nil
)
end
@doc """
Initiates a contact flow to start a new task.
"""
def start_task_contact(%Client{} = client, input, options \\ []) do
url_path = "/contact/task"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:put,
url_path,
query_params,
headers,
input,
options,
nil
)
end
@doc """
Ends the specified contact.
"""
def stop_contact(%Client{} = client, input, options \\ []) do
url_path = "/contact/stop"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:post,
url_path,
query_params,
headers,
input,
options,
nil
)
end
@doc """
Stops recording a call when a contact is being recorded.
StopContactRecording is a one-time action. If you use StopContactRecording to
stop recording an ongoing call, you can't use StartContactRecording to restart
it. For scenarios where the recording has started and you want to suspend it for
sensitive information (for example, to collect a credit card number), and then
restart it, use SuspendContactRecording and ResumeContactRecording.
Only voice recordings are supported at this time.
"""
def stop_contact_recording(%Client{} = client, input, options \\ []) do
url_path = "/contact/stop-recording"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:post,
url_path,
query_params,
headers,
input,
options,
nil
)
end
@doc """
Ends message streaming on a specified contact.
To restart message streaming on that contact, call the
[StartContactStreaming](https://docs.aws.amazon.com/connect/latest/APIReference/API_StartContactStreaming.html)
API.
"""
def stop_contact_streaming(%Client{} = client, input, options \\ []) do
url_path = "/contact/stop-streaming"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:post,
url_path,
query_params,
headers,
input,
options,
nil
)
end
@doc """
When a contact is being recorded, this API suspends recording the call.
For example, you might suspend the call recording while collecting sensitive
information, such as a credit card number. Then use ResumeContactRecording to
restart recording.
The period of time that the recording is suspended is filled with silence in the
final recording.
Only voice recordings are supported at this time.
"""
def suspend_contact_recording(%Client{} = client, input, options \\ []) do
url_path = "/contact/suspend-recording"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:post,
url_path,
query_params,
headers,
input,
options,
nil
)
end
@doc """
Adds the specified tags to the specified resource.
The supported resource types are users, routing profiles, queues, quick
connects, contact flows, agent status, and hours of operation.
For sample policies that use tags, see [Amazon Connect Identity-Based Policy Examples](https://docs.aws.amazon.com/connect/latest/adminguide/security_iam_id-based-policy-examples.html)
in the *Amazon Connect Administrator Guide*.
"""
def tag_resource(%Client{} = client, resource_arn, input, options \\ []) do
url_path = "/tags/#{AWS.Util.encode_uri(resource_arn)}"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:post,
url_path,
query_params,
headers,
input,
options,
nil
)
end
@doc """
Removes the specified tags from the specified resource.
"""
def untag_resource(%Client{} = client, resource_arn, input, options \\ []) do
url_path = "/tags/#{AWS.Util.encode_uri(resource_arn)}"
headers = []
{query_params, input} =
[
{"tagKeys", "tagKeys"}
]
|> Request.build_params(input)
Request.request_rest(
client,
metadata(),
:delete,
url_path,
query_params,
headers,
input,
options,
nil
)
end
@doc """
This API is in preview release for Amazon Connect and is subject to change.
Updates agent status.
"""
def update_agent_status(%Client{} = client, agent_status_id, instance_id, input, options \\ []) do
url_path =
"/agent-status/#{AWS.Util.encode_uri(instance_id)}/#{AWS.Util.encode_uri(agent_status_id)}"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:post,
url_path,
query_params,
headers,
input,
options,
nil
)
end
@doc """
This API is in preview release for Amazon Connect and is subject to change.
Adds or updates user-defined contact information associated with the specified
contact. At least one field to be updated must be present in the request.
You can add or update user-defined contact information for both ongoing and
completed contacts.
"""
def update_contact(%Client{} = client, contact_id, instance_id, input, options \\ []) do
url_path = "/contacts/#{AWS.Util.encode_uri(instance_id)}/#{AWS.Util.encode_uri(contact_id)}"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:post,
url_path,
query_params,
headers,
input,
options,
nil
)
end
@doc """
Creates or updates user-defined contact attributes associated with the specified
contact.
You can create or update user-defined attributes for both ongoing and completed
contacts. For example, while the call is active, you can update the customer's
name or the reason the customer called. You can add notes about steps that the
agent took during the call that display to the next agent that takes the call.
You can also update attributes for a contact using data from your CRM
application and save the data with the contact in Amazon Connect. You could also
flag calls for additional analysis, such as legal review or to identify abusive
callers.
Contact attributes are available in Amazon Connect for 24 months, and are then
deleted. For information about CTR retention and the maximum size of the CTR
attributes section, see [Feature specifications](https://docs.aws.amazon.com/connect/latest/adminguide/amazon-connect-service-limits.html#feature-limits)
in the *Amazon Connect Administrator Guide*.
**Important:** You cannot use the operation to update attributes for contacts
that occurred prior to the release of the API, which was September 12, 2018. You
can update attributes only for contacts that started after the release of the
API. If you attempt to update attributes for a contact that occurred prior to
the release of the API, a 400 error is returned. This applies also to queued
callbacks that were initiated prior to the release of the API but are still
active in your instance.
"""
def update_contact_attributes(%Client{} = client, input, options \\ []) do
url_path = "/contact/attributes"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:post,
url_path,
query_params,
headers,
input,
options,
nil
)
end
@doc """
Updates the specified contact flow.
You can also create and update contact flows using the [Amazon Connect Flow language](https://docs.aws.amazon.com/connect/latest/adminguide/flow-language.html).
"""
def update_contact_flow_content(
%Client{} = client,
contact_flow_id,
instance_id,
input,
options \\ []
) do
url_path =
"/contact-flows/#{AWS.Util.encode_uri(instance_id)}/#{AWS.Util.encode_uri(contact_flow_id)}/content"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:post,
url_path,
query_params,
headers,
input,
options,
nil
)
end
@doc """
Updates metadata about specified contact flow.
"""
def update_contact_flow_metadata(
%Client{} = client,
contact_flow_id,
instance_id,
input,
options \\ []
) do
url_path =
"/contact-flows/#{AWS.Util.encode_uri(instance_id)}/#{AWS.Util.encode_uri(contact_flow_id)}/metadata"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:post,
url_path,
query_params,
headers,
input,
options,
nil
)
end
@doc """
Updates specified contact flow module for the specified Amazon Connect instance.
"""
def update_contact_flow_module_content(
%Client{} = client,
contact_flow_module_id,
instance_id,
input,
options \\ []
) do
url_path =
"/contact-flow-modules/#{AWS.Util.encode_uri(instance_id)}/#{AWS.Util.encode_uri(contact_flow_module_id)}/content"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:post,
url_path,
query_params,
headers,
input,
options,
nil
)
end
@doc """
Updates metadata about specified contact flow module.
"""
def update_contact_flow_module_metadata(
%Client{} = client,
contact_flow_module_id,
instance_id,
input,
options \\ []
) do
url_path =
"/contact-flow-modules/#{AWS.Util.encode_uri(instance_id)}/#{AWS.Util.encode_uri(contact_flow_module_id)}/metadata"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:post,
url_path,
query_params,
headers,
input,
options,
nil
)
end
@doc """
The name of the contact flow.
You can also create and update contact flows using the [Amazon Connect Flow language](https://docs.aws.amazon.com/connect/latest/adminguide/flow-language.html).
"""
def update_contact_flow_name(
%Client{} = client,
contact_flow_id,
instance_id,
input,
options \\ []
) do
url_path =
"/contact-flows/#{AWS.Util.encode_uri(instance_id)}/#{AWS.Util.encode_uri(contact_flow_id)}/name"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:post,
url_path,
query_params,
headers,
input,
options,
nil
)
end
@doc """
Updates the scheduled time of a task contact that is already scheduled.
"""
def update_contact_schedule(%Client{} = client, input, options \\ []) do
url_path = "/contact/schedule"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:post,
url_path,
query_params,
headers,
input,
options,
nil
)
end
@doc """
This API is in preview release for Amazon Connect and is subject to change.
Updates the hours of operation.
"""
def update_hours_of_operation(
%Client{} = client,
hours_of_operation_id,
instance_id,
input,
options \\ []
) do
url_path =
"/hours-of-operations/#{AWS.Util.encode_uri(instance_id)}/#{AWS.Util.encode_uri(hours_of_operation_id)}"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:post,
url_path,
query_params,
headers,
input,
options,
nil
)
end
@doc """
This API is in preview release for Amazon Connect and is subject to change.
Updates the value for the specified attribute type.
"""
def update_instance_attribute(
%Client{} = client,
attribute_type,
instance_id,
input,
options \\ []
) do
url_path =
"/instance/#{AWS.Util.encode_uri(instance_id)}/attribute/#{AWS.Util.encode_uri(attribute_type)}"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:post,
url_path,
query_params,
headers,
input,
options,
nil
)
end
@doc """
This API is in preview release for Amazon Connect and is subject to change.
Updates an existing configuration for a resource type. This API is idempotent.
"""
def update_instance_storage_config(
%Client{} = client,
association_id,
instance_id,
input,
options \\ []
) do
url_path =
"/instance/#{AWS.Util.encode_uri(instance_id)}/storage-config/#{AWS.Util.encode_uri(association_id)}"
headers = []
{query_params, input} =
[
{"ResourceType", "resourceType"}
]
|> Request.build_params(input)
Request.request_rest(
client,
metadata(),
:post,
url_path,
query_params,
headers,
input,
options,
nil
)
end
@doc """
This API is in preview release for Amazon Connect and is subject to change.
Updates the hours of operation for the specified queue.
"""
def update_queue_hours_of_operation(
%Client{} = client,
instance_id,
queue_id,
input,
options \\ []
) do
url_path =
"/queues/#{AWS.Util.encode_uri(instance_id)}/#{AWS.Util.encode_uri(queue_id)}/hours-of-operation"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:post,
url_path,
query_params,
headers,
input,
options,
nil
)
end
@doc """
This API is in preview release for Amazon Connect and is subject to change.
Updates the maximum number of contacts allowed in a queue before it is
considered full.
"""
def update_queue_max_contacts(%Client{} = client, instance_id, queue_id, input, options \\ []) do
url_path =
"/queues/#{AWS.Util.encode_uri(instance_id)}/#{AWS.Util.encode_uri(queue_id)}/max-contacts"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:post,
url_path,
query_params,
headers,
input,
options,
nil
)
end
@doc """
This API is in preview release for Amazon Connect and is subject to change.
Updates the name and description of a queue. At least `Name` or `Description`
must be provided.
"""
def update_queue_name(%Client{} = client, instance_id, queue_id, input, options \\ []) do
url_path = "/queues/#{AWS.Util.encode_uri(instance_id)}/#{AWS.Util.encode_uri(queue_id)}/name"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:post,
url_path,
query_params,
headers,
input,
options,
nil
)
end
@doc """
This API is in preview release for Amazon Connect and is subject to change.
Updates the outbound caller ID name, number, and outbound whisper flow for a
specified queue.
"""
def update_queue_outbound_caller_config(
%Client{} = client,
instance_id,
queue_id,
input,
options \\ []
) do
url_path =
"/queues/#{AWS.Util.encode_uri(instance_id)}/#{AWS.Util.encode_uri(queue_id)}/outbound-caller-config"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:post,
url_path,
query_params,
headers,
input,
options,
nil
)
end
@doc """
This API is in preview release for Amazon Connect and is subject to change.
Updates the status of the queue.
"""
def update_queue_status(%Client{} = client, instance_id, queue_id, input, options \\ []) do
url_path =
"/queues/#{AWS.Util.encode_uri(instance_id)}/#{AWS.Util.encode_uri(queue_id)}/status"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:post,
url_path,
query_params,
headers,
input,
options,
nil
)
end
@doc """
Updates the configuration settings for the specified quick connect.
"""
def update_quick_connect_config(
%Client{} = client,
instance_id,
quick_connect_id,
input,
options \\ []
) do
url_path =
"/quick-connects/#{AWS.Util.encode_uri(instance_id)}/#{AWS.Util.encode_uri(quick_connect_id)}/config"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:post,
url_path,
query_params,
headers,
input,
options,
nil
)
end
@doc """
Updates the name and description of a quick connect.
The request accepts the following data in JSON format. At least `Name` or
`Description` must be provided.
"""
def update_quick_connect_name(
%Client{} = client,
instance_id,
quick_connect_id,
input,
options \\ []
) do
url_path =
"/quick-connects/#{AWS.Util.encode_uri(instance_id)}/#{AWS.Util.encode_uri(quick_connect_id)}/name"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:post,
url_path,
query_params,
headers,
input,
options,
nil
)
end
@doc """
Updates the channels that agents can handle in the Contact Control Panel (CCP)
for a routing profile.
"""
def update_routing_profile_concurrency(
%Client{} = client,
instance_id,
routing_profile_id,
input,
options \\ []
) do
url_path =
"/routing-profiles/#{AWS.Util.encode_uri(instance_id)}/#{AWS.Util.encode_uri(routing_profile_id)}/concurrency"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:post,
url_path,
query_params,
headers,
input,
options,
nil
)
end
@doc """
Updates the default outbound queue of a routing profile.
"""
def update_routing_profile_default_outbound_queue(
%Client{} = client,
instance_id,
routing_profile_id,
input,
options \\ []
) do
url_path =
"/routing-profiles/#{AWS.Util.encode_uri(instance_id)}/#{AWS.Util.encode_uri(routing_profile_id)}/default-outbound-queue"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:post,
url_path,
query_params,
headers,
input,
options,
nil
)
end
@doc """
Updates the name and description of a routing profile.
The request accepts the following data in JSON format. At least `Name` or
`Description` must be provided.
"""
def update_routing_profile_name(
%Client{} = client,
instance_id,
routing_profile_id,
input,
options \\ []
) do
url_path =
"/routing-profiles/#{AWS.Util.encode_uri(instance_id)}/#{AWS.Util.encode_uri(routing_profile_id)}/name"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:post,
url_path,
query_params,
headers,
input,
options,
nil
)
end
@doc """
Updates the properties associated with a set of queues for a routing profile.
"""
def update_routing_profile_queues(
%Client{} = client,
instance_id,
routing_profile_id,
input,
options \\ []
) do
url_path =
"/routing-profiles/#{AWS.Util.encode_uri(instance_id)}/#{AWS.Util.encode_uri(routing_profile_id)}/queues"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:post,
url_path,
query_params,
headers,
input,
options,
nil
)
end
@doc """
This API is in preview release for Amazon Connect and is subject to change.
Updates a security profile.
"""
def update_security_profile(
%Client{} = client,
instance_id,
security_profile_id,
input,
options \\ []
) do
url_path =
"/security-profiles/#{AWS.Util.encode_uri(instance_id)}/#{AWS.Util.encode_uri(security_profile_id)}"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:post,
url_path,
query_params,
headers,
input,
options,
nil
)
end
@doc """
Assigns the specified hierarchy group to the specified user.
"""
def update_user_hierarchy(%Client{} = client, instance_id, user_id, input, options \\ []) do
url_path =
"/users/#{AWS.Util.encode_uri(instance_id)}/#{AWS.Util.encode_uri(user_id)}/hierarchy"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:post,
url_path,
query_params,
headers,
input,
options,
nil
)
end
@doc """
Updates the name of the user hierarchy group.
"""
def update_user_hierarchy_group_name(
%Client{} = client,
hierarchy_group_id,
instance_id,
input,
options \\ []
) do
url_path =
"/user-hierarchy-groups/#{AWS.Util.encode_uri(instance_id)}/#{AWS.Util.encode_uri(hierarchy_group_id)}/name"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:post,
url_path,
query_params,
headers,
input,
options,
nil
)
end
@doc """
Updates the user hierarchy structure: add, remove, and rename user hierarchy
levels.
"""
def update_user_hierarchy_structure(%Client{} = client, instance_id, input, options \\ []) do
url_path = "/user-hierarchy-structure/#{AWS.Util.encode_uri(instance_id)}"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:post,
url_path,
query_params,
headers,
input,
options,
nil
)
end
@doc """
Updates the identity information for the specified user.
We strongly recommend limiting who has the ability to invoke
`UpdateUserIdentityInfo`. Someone with that ability can change the login
credentials of other users by changing their email address. This poses a
security risk to your organization. They can change the email address of a user
to the attacker's email address, and then reset the password through email. For
more information, see [Best Practices for Security Profiles](https://docs.aws.amazon.com/connect/latest/adminguide/security-profile-best-practices.html)
in the *Amazon Connect Administrator Guide*.
"""
def update_user_identity_info(%Client{} = client, instance_id, user_id, input, options \\ []) do
url_path =
"/users/#{AWS.Util.encode_uri(instance_id)}/#{AWS.Util.encode_uri(user_id)}/identity-info"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:post,
url_path,
query_params,
headers,
input,
options,
nil
)
end
@doc """
Updates the phone configuration settings for the specified user.
"""
def update_user_phone_config(%Client{} = client, instance_id, user_id, input, options \\ []) do
url_path =
"/users/#{AWS.Util.encode_uri(instance_id)}/#{AWS.Util.encode_uri(user_id)}/phone-config"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:post,
url_path,
query_params,
headers,
input,
options,
nil
)
end
@doc """
Assigns the specified routing profile to the specified user.
"""
def update_user_routing_profile(%Client{} = client, instance_id, user_id, input, options \\ []) do
url_path =
"/users/#{AWS.Util.encode_uri(instance_id)}/#{AWS.Util.encode_uri(user_id)}/routing-profile"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:post,
url_path,
query_params,
headers,
input,
options,
nil
)
end
@doc """
Assigns the specified security profiles to the specified user.
"""
def update_user_security_profiles(
%Client{} = client,
instance_id,
user_id,
input,
options \\ []
) do
url_path =
"/users/#{AWS.Util.encode_uri(instance_id)}/#{AWS.Util.encode_uri(user_id)}/security-profiles"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:post,
url_path,
query_params,
headers,
input,
options,
nil
)
end
end
|
lib/aws/generated/connect.ex
| 0.796649 | 0.423637 |
connect.ex
|
starcoder
|
defmodule Cldr.List do
@moduledoc """
`Cldr` incudes patterns that enable list to be catenated together
to form a grammatically correct language construct for a given locale.
If we have a list of days like `["Monday", "Tuesday", "Wednesday"]`
then we can format that list for a given locale by:
iex> Cldr.List.to_string(["Monday", "Tuesday", "Wednesday"], locale: "en")
{:ok, "Monday, Tuesday, and Wednesday"}
"""
require Cldr
alias Cldr.Substitution
@type pattern_type :: :or | :standard | :unit | :unit_narrow | :unit_short
@default_style :standard
@doc """
Formats a list into a string according to the list pattern rules for a locale.
* `list` is any list of of terms that can be passed through `Kernel.to_string/1`
* `options` are:
* `locale` is any configured locale. See `Cldr.known_locales()`. The default
is `locale: Cldr.get_current_locale/0`
* `format` is one of those returned by
`Cldr.List.list_pattern_types_for/1`. The default is `format: :standard`
## Examples
iex> Cldr.List.to_string(["a", "b", "c"], locale: "en")
{:ok, "a, b, and c"}
iex> Cldr.List.to_string(["a", "b", "c"], locale: "en", format: :unit_narrow)
{:ok, "a b c"}
iex> Cldr.List.to_string(["a", "b", "c"], locale: "fr")
{:ok, "a, b et c"}
iex> Cldr.List.to_string([1,2,3,4,5,6])
{:ok, "1, 2, 3, 4, 5, and 6"}
iex> Cldr.List.to_string(["a"])
{:ok, "a"}
iex> Cldr.List.to_string([1,2])
{:ok, "1 and 2"}
"""
@spec to_string(List.t, Keyword.t) :: {:ok, String.t} | {:error, {atom, binary}}
def to_string(list, options \\ [])
def to_string([], _options) do
{:ok, ""}
end
def to_string(list, options) do
case normalize_options(options) do
{:error, {_exception, _message}} = error ->
error
{locale, format} ->
{:ok, :erlang.iolist_to_binary(to_string(list, locale, format))}
end
end
@doc """
Formats a list using `to_string/2` but raises if there is
an error.
## Examples
iex> Cldr.List.to_string!(["a", "b", "c"], locale: "en")
"a, b, and c"
iex> Cldr.List.to_string!(["a", "b", "c"], locale: "en", format: :unit_narrow)
"a b c"
"""
@spec to_string!(List.t, Keyword.t) :: String.t | Exception.t
def to_string!(list, options \\ []) do
case to_string(list, options) do
{:error, {exception, message}} ->
raise exception, message
{:ok, string} ->
string
end
end
# For when the list is empty
defp to_string([], _locale, _pattern_type) do
""
end
# For when there is one element only
defp to_string([first], _locale, _pattern_type) do
Kernel.to_string(first)
end
# For when there are two elements only
defp to_string([first, last], locale, pattern_type) do
pattern = list_patterns_for(locale.cldr_locale_name)[pattern_type][:"2"]
Substitution.substitute([first, last], pattern)
|> :erlang.iolist_to_binary
end
# For when there are three elements only
defp to_string([first, middle, last], locale, pattern_type) do
first_pattern = list_patterns_for(locale.cldr_locale_name)[pattern_type][:start]
last_pattern = list_patterns_for(locale.cldr_locale_name)[pattern_type][:end]
last = Substitution.substitute([middle, last], last_pattern)
Substitution.substitute([first, last], first_pattern)
end
# For when there are more than 3 elements
defp to_string([first | rest], locale, pattern_type) do
first_pattern = list_patterns_for(locale.cldr_locale_name)[pattern_type][:start]
Substitution.substitute([first, do_to_string(rest, locale, pattern_type)], first_pattern)
end
# When there are only two left (ie last)
defp do_to_string([first, last], locale, pattern_type) do
last_pattern = list_patterns_for(locale.cldr_locale_name)[pattern_type][:end]
Substitution.substitute([first, last], last_pattern)
end
# For the middle elements
defp do_to_string([first | rest], locale, pattern_type) do
middle_pattern = list_patterns_for(locale.cldr_locale_name)[pattern_type][:middle]
Substitution.substitute([first, do_to_string(rest, locale, pattern_type)], middle_pattern)
end
@spec list_patterns_for(Cldr.locale) :: Map.t
@spec list_pattern_styles_for(Cldr.locale) :: [atom]
for locale_name <- Cldr.known_locale_names do
patterns = Cldr.Config.get_locale(locale_name).list_formats
pattern_names = Map.keys(patterns)
@doc """
Returns the list patterns for a locale.
List patterns provide rules for combining multiple
items into a language format appropriate for a locale.
## Example
iex> Cldr.List.list_patterns_for "en"
%{
or: %{
"2": [0, " or ", 1],
end: [0, ", or ", 1],
middle: [0, ", ", 1],
start: [0, ", ", 1]
},
or_narrow: %{
"2": [0, " or ", 1],
end: [0, ", or ", 1],
middle: [0, ", ", 1],
start: [0, ", ", 1]
},
or_short: %{
"2": [0, " or ", 1],
end: [0, ", or ", 1],
middle: [0, ", ", 1],
start: [0, ", ", 1]
},
standard: %{
"2": [0, " and ", 1],
end: [0, ", and ", 1],
middle: [0, ", ", 1],
start: [0, ", ", 1]
},
standard_narrow: %{
"2": [0, " and ", 1],
end: [0, ", and ", 1],
middle: [0, ", ", 1],
start: [0, ", ", 1]
},
standard_short: %{
"2": [0, " and ", 1],
end: [0, ", and ", 1],
middle: [0, ", ", 1],
start: [0, ", ", 1]
},
unit: %{
"2": [0, ", ", 1],
end: [0, ", ", 1],
middle: [0, ", ", 1],
start: [0, ", ", 1]
},
unit_narrow: %{
"2": [0, " ", 1],
end: [0, " ", 1],
middle: [0, " ", 1],
start: [0, " ", 1]
},
unit_short: %{
"2": [0, ", ", 1],
end: [0, ", ", 1],
middle: [0, ", ", 1],
start: [0, ", ", 1]
}
}
"""
def list_patterns_for(unquote(locale_name)) do
unquote(Macro.escape(patterns))
end
@doc """
Returns the styles of list patterns available for a locale.
Returns a list of `atom`s of of the list format styles that are
available in CLDR for a locale.
## Example
iex> Cldr.List.list_pattern_styles_for("en")
[:or, :or_narrow, :or_short, :standard, :standard_narrow, :standard_short, :unit, :unit_narrow, :unit_short]
"""
def list_pattern_styles_for(unquote(locale_name)) do
unquote(pattern_names)
end
end
defp normalize_options(options) do
locale = options[:locale] || Cldr.get_current_locale()
format = options[:format] || @default_style
with \
{:ok, locale} <- Cldr.validate_locale(locale),
{:ok, _} <- verify_format(locale.cldr_locale_name, format)
do
{locale, format}
else
{:error, {_exception, _message}} = error -> error
end
end
defp verify_format(locale_name, format) do
if !(format in list_pattern_styles_for(locale_name)) do
{:error, {Cldr.UnknownFormatError, "The list format style #{inspect format} is not known."}}
else
{:ok, format}
end
end
end
|
lib/cldr/list.ex
| 0.907176 | 0.644505 |
list.ex
|
starcoder
|
defmodule Diceware do
@moduledoc """
A library that generates a [Diceware](https://theworld.com/~reinhold/diceware.html) passphrase.
A diceware passphrase builds a readable passphrase of 6 (or more) words from a list of 8,192 words.
Creating a passphrase combining readable words helps the user more easily memorize it.
"""
alias Diceware.Passphrase
alias IO.ANSI
require Logger
@colors [ANSI.cyan(), ANSI.magenta(), ANSI.yellow(), ANSI.blue(), ANSI.green(), ANSI.red()]
@doc ~S"""
Generate a passphrase with at least 6 words.
Takes a keyword list of options:
* `:count` - number of words in phrase, defaults to 6
"""
@spec generate(Keyword.t()) :: Diceware.Passphrase.t()
def generate(opts \\ []) do
number = Keyword.get(opts, :count, 6)
randoms = random_numbers(number)
Diceware.Words.all()
|> Stream.with_index()
|> Stream.filter(fn {_word, index} ->
Enum.member?(randoms, index)
end)
|> Stream.map(&get_word/1)
|> Enum.to_list()
|> Diceware.Passphrase.new()
end
defp get_word({word, _}), do: String.trim(word, "\n")
defp get_word(_), do: ""
@doc false
def random_numbers(number), do: Enum.map(1..number, fn _ -> Enum.random(0..8_191) end)
@doc ~S"""
Return a string of the password with ANSI colors for printing to the console
## Examples
iex> passphrase = Diceware.Passphrase.new(["a", "b", "c"])
iex> Diceware.with_colors(passphrase)
"\e[36ma\e[35mb\e[33mc"
"""
@spec with_colors(Passphrase.t()) :: String.t()
def with_colors(%Passphrase{} = passphrase) do
passphrase.count
|> color_list()
|> Enum.zip(passphrase.words)
|> Enum.map(fn {c, w} -> c <> w end)
|> Enum.join()
end
defp color_list(word_size) when word_size <= 6,
do: Enum.map(0..(word_size - 1), &Enum.at(@colors, &1))
defp color_list(word_size) do
number_of_color_lists = div(word_size, Enum.count(@colors))
extra_colors = rem(word_size, Enum.count(@colors))
colors =
Enum.reduce(0..number_of_color_lists, [], fn _x, acc ->
acc ++ @colors
end)
colors ++ Enum.take(@colors, extra_colors)
end
end
|
lib/diceware.ex
| 0.845081 | 0.529568 |
diceware.ex
|
starcoder
|
defmodule Site.ContentRewriters.LiquidObjects.Fare do
@moduledoc """
This module converts a string-based set of fare filters to a proper keyword list request
intended for Fares.Repo.all/1, and parses the price of the final result into a string.
IMPORTANT: Any atom changes in the Fares.Fare module need to also be made here, and should
likewise be updated in the Content team's online legend for fare replacement usage here:
https://docs.google.com/spreadsheets/d/18DGY0es_12xy54oDE9lDTJwATx4jhodWkND7MuY7R6E?pli=1#gid=1197832395
"""
alias Fares.{Fare, Format, Repo, Summary}
# Fares.Fare related type specs
@type required_key :: :reduced | :duration
@type optional_key :: :name | :mode | :includes_media
@type placeholder_key :: :zone_type | :zone_id
@type summary_mode :: :commuter_rail | :bus_subway | :ferry
@type the_ride :: :ada_ride | :premium_ride
@type zone_type :: :zone | :interzone
@type fare_key :: optional_key | required_key | placeholder_key
@type fare_name ::
:commuter_ferry_logan
| :commuter_ferry
| :ferry_cross_harbor
| :ferry_inner_harbor
| :foxboro
| :express_bus
| :local_bus
| :subway
@type fare_value ::
fare_name
| the_ride
| summary_mode
| zone_type
| Fare.media()
| Fare.reduced()
| Fare.duration()
@type fares_or_summaries :: [Summary.t()] | Summary.t() | [Fare.t()] | Fare.t()
@type repo_arg :: {fare_key, fare_value}
@type request_error :: {:error, {:invalid | :empty | :incomplete | :unmatched, String.t()}}
@type request_tuple :: {:ok, [repo_arg]} | {:ok, {summary_mode, [repo_arg]}}
@default_args [reduced: nil, duration: :single_trip]
# These are the route types that are compatible for fare ranges
@summary_atoms [:commuter_rail, :bus_subway, :ferry]
@fare_summary [
"commuter_rail",
"bus_subway",
"ferry"
]
@fare_name [
"commuter_ferry_logan",
"commuter_ferry",
"ferry_cross_harbor",
"ferry_inner_harbor",
"foxboro",
"express_bus",
"local_bus",
"subway"
]
@fare_ride [
"ada_ride",
"premium_ride"
]
@fare_media [
"cash",
"charlie_card",
"charlie_ticket",
"commuter_ticket",
"mticket",
"paper_ferry",
"special_event"
]
@fare_reduced [
"senior_disabled",
"student",
"reduced"
]
@fare_duration [
"day",
"week",
"weekend",
"month",
"single_trip",
"round_trip"
]
@zone_type [
"interzone",
"zone"
]
@zone_id ["1A", "1", "2", "3", "4", "5", "6", "7", "8", "9", "10"]
@spec fare_request(String.t()) :: {:ok, String.t()} | request_error
def fare_request(string) do
string
|> String.split(":", trim: true)
|> parse_tokens()
|> compose_args()
|> request_fares()
|> process_results()
end
@spec fare_object_request(String.t()) :: Fares.Fare.t() | Summary.t()
def fare_object_request(string) do
tokens =
string
|> String.split(":", trim: true)
|> parse_tokens()
case tokens do
{:ok, _} ->
tokens
|> compose_args()
|> request_fares()
|> List.first()
_ ->
%Fare{
name: :invalid,
mode: :subway,
duration: :invalid
}
end
end
@spec parse_tokens([String.t()]) :: {:ok, [repo_arg]} | request_error
defp parse_tokens(new), do: parse_tokens(new, [], [])
@spec parse_tokens([String.t()], [repo_arg], [String.t()]) :: {:ok, [repo_arg]} | request_error
defp parse_tokens(_, _, [token]) do
{:error, {:invalid, token}}
end
defp parse_tokens([], filters, _) do
{:ok, filters}
end
defp parse_tokens([string | remaining_strings], good, bad) do
{valid, invalid} = parse_token(string, good, bad)
parse_tokens(remaining_strings, valid, invalid)
end
@spec parse_token(String.t(), [repo_arg], [String.t()]) :: {[repo_arg], [String.t()]}
defp parse_token(value, good, bad) when value in @fare_summary do
{filter_insert(good, mode: value), bad}
end
defp parse_token(value, good, bad) when value in ["inner_express_bus", "outer_express_bus"] do
{filter_insert(good, name: "express_bus"), bad}
end
defp parse_token(value, good, bad) when value in @fare_name do
{filter_insert(good, name: value), bad}
end
defp parse_token(value, good, bad) when value in @fare_ride do
{filter_insert(good, name: value, reduced: "senior_disabled"), bad}
end
defp parse_token(value, good, bad) when value in @fare_media do
{filter_insert(good, includes_media: value), bad}
end
defp parse_token(value, good, bad) when value in @fare_reduced do
{filter_insert(good, reduced: (value == "reduced" && "any") || value), bad}
end
defp parse_token(value, good, bad) when value in @fare_duration do
{filter_insert(good, duration: value), bad}
end
defp parse_token(value, good, bad) when value in @zone_type do
{filter_insert(good, zone_type: value), bad}
end
defp parse_token(value, good, bad) when value in @zone_id do
{[{:zone_id, value} | good], bad}
end
defp parse_token(value, good, bad) do
{good, [value | bad]}
end
@spec compose_args({:ok, list} | request_error) :: request_tuple | request_error
defp compose_args({:ok, []}) do
{:error, {:empty, "no input"}}
end
defp compose_args({:ok, args}) do
case Enum.into(args, %{}) do
# CR zone args need to be converted to a :name Tuple from their temporary placeholders
%{zone_type: type, zone_id: id} ->
zone_request =
args
|> Keyword.put(:name, {type, id})
|> Keyword.drop([:zone_type, :zone_id])
{:ok, zone_request}
# Prevent both :mode and :name keys from being sent to Repo.all (never matches fare)
%{name: _} ->
{:ok, Keyword.delete(args, :mode)}
# When using a :mode, the summarize/3 function requires an explicit :mode argument
%{mode: mode} ->
{:ok, {mode, args}}
# If there is neither a :mode nor a :name key/value, we cannot perform the request
_ ->
{:error, {:incomplete, "missing mode/name"}}
end
end
defp compose_args(invalid_error) do
invalid_error
end
@spec request_fares(request_tuple | request_error) :: [Summary.t()] | [Fare.t()] | request_error
# If the mode indicates a summary will be returned, format the results of get_fares/1 as a summary
defp request_fares({:ok, {mode, args}}) when mode in @summary_atoms do
args
|> get_fares()
|> Format.summarize(mode)
end
defp request_fares({:ok, args}) do
get_fares(args)
end
defp request_fares(error) do
error
end
@spec process_results(fares_or_summaries | request_error) :: {:ok, String.t()} | request_error
defp process_results([]) do
{:error, {:unmatched, "no results"}}
end
defp process_results([first_result | _]) do
process_results(first_result)
end
defp process_results(%Fares.Fare{} = fare) do
{:ok, Format.price(fare)}
end
defp process_results(%Fares.Summary{} = summary) do
{:ok, Summary.price_range(summary)}
end
defp process_results(error) do
error
end
# Helpers
# Adds the valid key/val into our arg list, after first
# converting the value into a proper, known Fare atom.
@spec filter_insert([repo_arg], [{fare_key, String.t()}]) :: [repo_arg]
defp filter_insert(old_args, new_args) do
Enum.reduce(new_args, old_args, fn {k, v}, args ->
Keyword.put(args, k, String.to_existing_atom(v))
end)
end
# Fill in any missing/required arguments with the default,
# then call Fares.Repo.all/1 to get matching fares.
@spec get_fares([repo_arg]) :: [Fare.t()]
defp get_fares(args) do
@default_args
|> Keyword.merge(args)
|> Repo.all()
end
end
|
apps/site/lib/site/content_rewriters/liquid_objects/fare.ex
| 0.733643 | 0.574514 |
fare.ex
|
starcoder
|
defmodule CodeCorps.Adapter.MapTransformer do
@moduledoc ~S"""
Module used to transform maps for the purposes of various adapters used by the
application.
"""
@typedoc ~S"""
A format representing how a single key should be mapped from a source. The
actual format is a 2 element tuple.
The first element is the destination key in the output map.
The second element is a list of keys representing the nested path to the key
in the source map.
For example, the tuple:
`{:target_path, ["nested", "path", "to", "source"]}`
Means that, from the source map, we need to take the nested value under
"nested" => "path" => "to" => "source" and then put it into the output map,
as a value for the key ":target_path".
"""
@type key_mapping :: {atom, list[atom]}
@typedoc """
"""
@type mapping :: list(key_mapping)
@doc ~S"""
Takes a source map and a list of tuples representing how the source map
should be transformed into a new map, then applies the mapping
operation on each field.
"""
@spec transform(map, mapping) :: map
def transform(%{} = source_map, mapping) when is_list(mapping) do
mapping |> Enum.reduce(%{}, &map_field(&1, &2, source_map))
end
@spec map_field(key_mapping, map, map) :: map
defp map_field({target_field, source_path}, %{} = target_map, %{} = source_map) do
value = get_in(source_map, source_path)
target_map |> Map.put(target_field, value)
end
@doc ~S"""
Performs the inverse of `&transform/2`
"""
@spec transform_inverse(map, mapping) :: map
def transform_inverse(%{} = map, mapping) when is_list(mapping) do
mapping |> Enum.reduce(%{}, &map_field_inverse(&1, &2, map))
end
@spec map_field_inverse(key_mapping, map, map) :: map
defp map_field_inverse({source_field, target_path}, target_map, source_map) do
value = source_map |> Map.get(source_field)
list = target_path |> Enum.reverse
result = put_value(list, value, %{})
deep_merge(target_map, result)
end
defp put_value(_, value, map) when is_nil(value), do: map
defp put_value([head | tail], value, map) do
new_value = Map.put(%{}, head, value)
put_value(tail, new_value, map)
end
defp put_value([], new_value, _map), do: new_value
defp deep_merge(left, right) do
Map.merge(left, right, &deep_resolve/3)
end
# Key exists in both maps, and both values are maps as well.
# These can be merged recursively.
defp deep_resolve(_key, left = %{}, right = %{}) do
deep_merge(left, right)
end
# Key exists in both maps, but at least one of the values is
# NOT a map. We fall back to standard merge behavior, preferring
# the value on the right.
defp deep_resolve(_key, _left, right) do
right
end
end
|
lib/code_corps/adapter/map_transformer.ex
| 0.890629 | 0.665256 |
map_transformer.ex
|
starcoder
|
defmodule AWS.Savingsplans do
@moduledoc """
Savings Plans are a pricing model that offer significant savings on AWS
usage (for example, on Amazon EC2 instances). You commit to a consistent
amount of usage, in USD per hour, for a term of 1 or 3 years, and receive a
lower price for that usage. For more information, see the [AWS Savings
Plans User
Guide](https://docs.aws.amazon.com/savingsplans/latest/userguide/).
"""
@doc """
Creates a Savings Plan.
"""
def create_savings_plan(client, input, options \\ []) do
path_ = "/CreateSavingsPlan"
headers = []
query_ = []
request(client, :post, path_, query_, headers, input, options, nil)
end
@doc """
Describes the specified Savings Plans rates.
"""
def describe_savings_plan_rates(client, input, options \\ []) do
path_ = "/DescribeSavingsPlanRates"
headers = []
query_ = []
request(client, :post, path_, query_, headers, input, options, nil)
end
@doc """
Describes the specified Savings Plans.
"""
def describe_savings_plans(client, input, options \\ []) do
path_ = "/DescribeSavingsPlans"
headers = []
query_ = []
request(client, :post, path_, query_, headers, input, options, nil)
end
@doc """
Describes the specified Savings Plans offering rates.
"""
def describe_savings_plans_offering_rates(client, input, options \\ []) do
path_ = "/DescribeSavingsPlansOfferingRates"
headers = []
query_ = []
request(client, :post, path_, query_, headers, input, options, nil)
end
@doc """
Describes the specified Savings Plans offerings.
"""
def describe_savings_plans_offerings(client, input, options \\ []) do
path_ = "/DescribeSavingsPlansOfferings"
headers = []
query_ = []
request(client, :post, path_, query_, headers, input, options, nil)
end
@doc """
Lists the tags for the specified resource.
"""
def list_tags_for_resource(client, input, options \\ []) do
path_ = "/ListTagsForResource"
headers = []
query_ = []
request(client, :post, path_, query_, headers, input, options, nil)
end
@doc """
Adds the specified tags to the specified resource.
"""
def tag_resource(client, input, options \\ []) do
path_ = "/TagResource"
headers = []
query_ = []
request(client, :post, path_, query_, headers, input, options, nil)
end
@doc """
Removes the specified tags from the specified resource.
"""
def untag_resource(client, input, options \\ []) do
path_ = "/UntagResource"
headers = []
query_ = []
request(client, :post, path_, query_, headers, input, options, nil)
end
@spec request(AWS.Client.t(), binary(), binary(), list(), list(), map(), list(), pos_integer()) ::
{:ok, Poison.Parser.t(), Poison.Response.t()}
| {:error, Poison.Parser.t()}
| {:error, HTTPoison.Error.t()}
defp request(client, method, path, query, headers, input, options, success_status_code) do
client = %{client | service: "savingsplans",
region: "us-east-1"}
host = build_host("savingsplans", client)
url = host
|> build_url(path, client)
|> add_query(query)
additional_headers = [{"Host", host}, {"Content-Type", "application/x-amz-json-1.1"}]
headers = AWS.Request.add_headers(additional_headers, headers)
payload = encode_payload(input)
headers = AWS.Request.sign_v4(client, method, url, headers, payload)
perform_request(method, url, payload, headers, options, success_status_code)
end
defp perform_request(method, url, payload, headers, options, nil) do
case HTTPoison.request(method, url, payload, headers, options) do
{:ok, %HTTPoison.Response{status_code: 200, body: ""} = response} ->
{:ok, response}
{:ok, %HTTPoison.Response{status_code: status_code, body: body} = response}
when status_code == 200 or status_code == 202 or status_code == 204 ->
{:ok, Poison.Parser.parse!(body, %{}), response}
{:ok, %HTTPoison.Response{body: body}} ->
error = Poison.Parser.parse!(body, %{})
{:error, error}
{:error, %HTTPoison.Error{reason: reason}} ->
{:error, %HTTPoison.Error{reason: reason}}
end
end
defp perform_request(method, url, payload, headers, options, success_status_code) do
case HTTPoison.request(method, url, payload, headers, options) do
{:ok, %HTTPoison.Response{status_code: ^success_status_code, body: ""} = response} ->
{:ok, %{}, response}
{:ok, %HTTPoison.Response{status_code: ^success_status_code, body: body} = response} ->
{:ok, Poison.Parser.parse!(body, %{}), response}
{:ok, %HTTPoison.Response{body: body}} ->
error = Poison.Parser.parse!(body, %{})
{:error, error}
{:error, %HTTPoison.Error{reason: reason}} ->
{:error, %HTTPoison.Error{reason: reason}}
end
end
defp build_host(_endpoint_prefix, %{region: "local"}) do
"localhost"
end
defp build_host(endpoint_prefix, %{endpoint: endpoint}) do
"#{endpoint_prefix}.#{endpoint}"
end
defp build_url(host, path, %{:proto => proto, :port => port}) do
"#{proto}://#{host}:#{port}#{path}"
end
defp add_query(url, []) do
url
end
defp add_query(url, query) do
querystring = AWS.Util.encode_query(query)
"#{url}?#{querystring}"
end
defp encode_payload(input) do
if input != nil, do: Poison.Encoder.encode(input, %{}), else: ""
end
end
|
lib/aws/savingsplans.ex
| 0.710427 | 0.430506 |
savingsplans.ex
|
starcoder
|
defmodule Pow.Ecto.Schema.Changeset do
@moduledoc """
Handles changesets methods for Pow schema.
These methods should never be called directly, but instead the methods
build in macros in `Pow.Ecto.Schema` should be used. This is to ensure
that only compile time configuration is used.
## Configuration options
* `:password_min_length` - minimum password length, defaults to 10
* `:password_max_length` - maximum password length, defaults to 4096
* `:password_hash_methods` - the password hash and verify methods to use,
defaults to:
```elixir
{&Pow.Ecto.Schema.Password.pbkdf2_hash/1,
&Pow.Ecto.Schema.Password.pbkdf2_verify/2}
```
"""
alias Ecto.Changeset
alias Pow.{Config, Ecto.Schema, Ecto.Schema.Password}
@password_min_length 10
@password_max_length 4096
@doc """
Validates the user id field.
The user id field is always required. It will be treated as case insensitive,
and it's required to be unique. If the user id field is `:email`, the value
will be validated as an e-mail address too.
"""
@spec user_id_field_changeset(Ecto.Schema.t() | Changeset.t(), map(), Config.t()) :: Changeset.t()
def user_id_field_changeset(user_or_changeset, params, _config) do
user_id_field =
case user_or_changeset do
%Changeset{data: %struct{}} -> struct.pow_user_id_field()
%struct{} -> struct.pow_user_id_field()
end
user_or_changeset
|> Changeset.cast(params, [user_id_field])
|> Changeset.update_change(user_id_field, &Schema.normalize_user_id_field_value/1)
|> maybe_validate_email_format(user_id_field)
|> Changeset.validate_required([user_id_field])
|> Changeset.unique_constraint(user_id_field)
end
@doc """
Validates the password field.
The `password` and `confirm_password` params is required to be equal. A
password hash is generated by using `:password_hash_methods` in the
configuration. The password is always required if the password hash is nil,
and it's required to be between `:password_min_length` to
`:password_max_length` characters long.
The password hash is only generated if the changeset is valid, but always
required.
"""
@spec password_changeset(Ecto.Schema.t() | Changeset.t(), map(), Config.t()) :: Changeset.t()
def password_changeset(user_or_changeset, params, config) do
user_or_changeset
|> Changeset.cast(params, [:password, :confirm_password])
|> maybe_require_password()
|> maybe_validate_password(config)
|> maybe_validate_confirm_password()
|> maybe_put_password_hash(config)
|> Changeset.validate_required([:password_hash])
end
@doc """
Validates the current password field.
It's only required to provide a current password if the `password_hash`
value exists in the data struct.
"""
@spec current_password_changeset(Ecto.Schema.t() | Changeset.t(), map(), Config.t()) :: Changeset.t()
def current_password_changeset(user_or_changeset, params, config) do
user_or_changeset
|> Changeset.cast(params, [:current_password])
|> maybe_validate_current_password(config)
end
defp maybe_validate_email_format(changeset, :email) do
Changeset.validate_format(changeset, :email, email_regexp())
end
defp maybe_validate_email_format(changeset, _), do: changeset
defp maybe_validate_current_password(%{data: %{password_hash: nil}} = changeset, _config),
do: changeset
defp maybe_validate_current_password(changeset, config) do
changeset = Changeset.validate_required(changeset, [:current_password])
case changeset.valid? do
true -> validate_current_password(changeset, config)
false -> changeset
end
end
defp validate_current_password(%{data: user, changes: %{current_password: password}} = changeset, config) do
user
|> verify_password(password, config)
|> case do
true -> changeset
_ -> Changeset.add_error(changeset, :current_password, "is invalid")
end
end
@doc """
Verifies a password in a struct.
The password will be verified by using the `:password_hash_methods` in the
configuration.
"""
@spec verify_password(Ecto.Schema.t(), binary(), Config.t()) :: boolean()
def verify_password(%{password_hash: nil}, _password, _config), do: false
def verify_password(%{password_hash: password_hash}, password, config) do
config
|> password_verify_method()
|> apply([password, password_hash])
end
defp maybe_require_password(%{data: %{password_hash: nil}} = changeset) do
Changeset.validate_required(changeset, [:password])
end
defp maybe_require_password(changeset), do: changeset
defp maybe_validate_password(changeset, config) do
changeset
|> Changeset.get_change(:password)
|> case do
nil -> changeset
_ -> validate_password(changeset, config)
end
end
defp validate_password(changeset, config) do
password_min_length = Config.get(config, :password_min_length, @password_min_length)
password_max_length = Config.get(config, :password_max_length, @password_max_length)
Changeset.validate_length(changeset, :password, min: password_min_length, max: password_max_length)
end
defp maybe_validate_confirm_password(changeset) do
changeset
|> Changeset.get_change(:password)
|> case do
nil -> changeset
password -> validate_confirm_password(changeset, password)
end
end
defp validate_confirm_password(changeset, password) do
confirm_password = Changeset.get_change(changeset, :confirm_password)
case password do
^confirm_password -> changeset
_ -> Changeset.add_error(changeset, :confirm_password, "not same as password")
end
end
defp maybe_put_password_hash(%Changeset{valid?: true, changes: %{password: password}} = changeset, config) do
Changeset.put_change(changeset, :password_hash, hash_password(password, config))
end
defp maybe_put_password_hash(changeset, _config), do: changeset
defp hash_password(password, config) do
config
|> password_hash_method()
|> apply([password])
end
defp password_hash_method(config) do
{password_hash_method, _} = password_hash_methods(config)
password_hash_method
end
defp password_verify_method(config) do
{_, password_verify_method} = password_hash_methods(config)
password_verify_method
end
defp password_hash_methods(config) do
Config.get(config, :password_hash_methods, {&Password.pbkdf2_hash/1, &Password.pbkdf2_verify/2})
end
@rfc_5332_regexp_no_ip ~r<\A[a-z0-9!#$%&'*+/=?^_`{|}~-]+(?:\.[a-z0-9!#$%&'*+/=?^_`{|}~-]+)*@(?:[a-z0-9](?:[a-z0-9-]*[a-z0-9])?\.)+[a-z0-9](?:[a-z0-9-]*[a-z0-9])?\z>
defp email_regexp, do: @rfc_5332_regexp_no_ip
end
|
lib/pow/ecto/schema/changeset.ex
| 0.861815 | 0.541954 |
changeset.ex
|
starcoder
|
defmodule <%= inspect value_context %> do
@moduledoc """
Module to allow better Value composition. With this module we're able to
compose complex structures faster and simpler.
A Value's base format can only be a List or a Map.
"""
def build(module, data, opts \\ []) do
data
|> module.build()
|> filter_fields(opts)
|> remove_fields(opts)
end
defp filter_fields(value, opts) do
case Keyword.get(opts, :only) do
nil -> value
fields -> __MODULE__.only(value, fields)
end
end
defp remove_fields(value, opts) do
case Keyword.get(opts, :except) do
nil -> value
fields -> __MODULE__.except(value, fields)
end
end
@doc """
Initiate a Value base format as a List
## Examples
iex> init_with_list()
[]
"""
def init_with_list, do: []
@doc """
Initiate a Value base format as a Map
## Examples
iex> init_with_map()
%{}
"""
def init_with_map, do: %{}
@doc """
Initiate a Value based on a pre-existing Struct.
## Examples
iex> country = %Country{name: "Portugal", region: "Europe", slug: "slug", code: "code"}
%Country{name: "Portugal", region: "Europe", slug: "slug", code: "code"}
iex> init(country)
%{name: "Portugal", region: "Europe", slug: "slug", code: "code"}
iex> init(%{a: 1})
%{a: 1}
iex> init([1, 2, 3])
[1, 2, 3]
"""
def init(%{__struct__: _} = value) do
value
|> Map.from_struct()
|> Map.drop([:__meta__, :__struct__])
end
# Initiate a Value based on a pre-existing Map or List.
def init(value) do
value
end
@doc """
Remove specified keys from a Value.
## Examples
iex> response = init(%{a: 1, b: 2})
%{a: 1, b: 2}
iex> except(response, [:a])
%{b: 2}
"""
def except(value, keys) when is_map(value), do: Map.drop(value, keys)
@doc """
Return only specified keys from a Value.
## Examples
iex> response = init(%{a: 1, b: 2})
%{a: 1, b: 2}
iex> only(response, [:a])
%{a: 1}
"""
def only(value, keys) when is_map(value), do: Map.take(value, keys)
@doc """
Add an item to a Value list.
## Examples
iex> response = init([1, 2, 3])
[1, 2, 3]
iex> add(response, 4)
[4, 1, 2, 3]
iex> response = init(%{a: 1, b: 2})
%{a: 1, b: 2}
iex> add(response, %{c: 3})
%{a: 1, b: 2, c: 3}
iex> add(response, c: 3)
%{a: 1, b: 2, c: 3}
"""
def add(value, entry) when is_list(value), do: [entry | value]
# Add an item to a value map. Accepts a Map or a simple keyword list.
def add(value, entry) when is_map(value) do
Enum.reduce(entry, value, fn {key, key_value}, acc ->
Map.put(acc, key, key_value)
end)
end
@doc """
Removes keys with `nil` values from the map
"""
def compact(map) do
map
|> Enum.reject(fn {_, value} -> is_nil(value) end)
|> Map.new()
end
@doc """
Modifies provided key by applying provided function.
If key is not present it won't be updated, no exception be raised.
## Examples
iex> response = init(%{a: 1, b: 2})
%{a: 1, b: 2}
iex> modify(response, :b, fn val -> val * 2 end)
%{a: 1, b: 4}
iex> modify(response, :c, fn val -> val * 2 end)
%{a: 1, b: 2}
"""
def modify(data, key, fun) when is_map(data) and is_function(fun) do
data
|> Map.update(key, nil, fun)
|> compact()
end
@doc """
build associations with their own 'Value' modules when their are present,
avoiding `nil` or unloaded structs
"""
def build_assoc(value_module, assoc, fields \\ nil)
def build_assoc(_value_module, nil, _), do: nil
def build_assoc(_value_module, %Ecto.Association.NotLoaded{}, _), do: nil
def build_assoc(value_module, assoc, nil), do: value_module.build(assoc)
def build_assoc(value_module, assoc, fields), do: value_module.build(assoc, fields)
end
|
priv/templates/phx.gen.solid/value_context.ex
| 0.804905 | 0.597667 |
value_context.ex
|
starcoder
|
defmodule Day15 do
def part1(file_name \\ "test.txt") do
file_name
|> parse()
|> min_cost()
end
def part2(file_name \\ "test.txt") do
file_name
|> parse()
|> add_mins()
|> add_direction()
|> add_final_step(24)
|> expand_grid()
end
def add_mins(%{grid: grid, target: {max_x, max_y}}) do
%{grid: grid, mins: {0, 0}, maxs: {max_x, max_y}}
end
def add_direction(state) do
Map.put(state, :direction, :right)
end
def add_final_step(state, final_step) do
Map.put(state, :final_step, final_step)
end
def inc(9), do: 1
def inc(value), do: value + 1
def dec(1), do: 9
def dec(value), do: value - 1
def opposite_direction(:left), do: :right
def opposite_direction(:right), do: :left
def expand_grid(state, step \\ 1)
def expand_grid(%{grid: grid, maxs: maxs, final_step: final_step}, step) when step > final_step do
grid
end
def expand_grid(%{grid: grid, mins: {min_x, min_y}, maxs: {max_x, max_y}, direction: direction} = state, step) when rem(step, 5) == 0 do
last_tile = last_tile(state)
diff = max_y - min_y
new_grid =
Enum.reduce(last_tile, grid, fn {{x, y}, value}, acc ->
Map.put(acc, {x, y + diff + 1}, inc(value))
end)
new_min_y = min_y + diff + 1
new_max_y = new_min_y + diff
new_state = %{state | grid: new_grid, mins: {min_x, new_min_y}, maxs: {max_x, new_max_y}, direction: opposite_direction(direction)}
expand_grid(new_state, step + 1)
end
def expand_grid(%{grid: grid, mins: {min_x, min_y}, maxs: {max_x, max_y}, direction: :right} = state, step) do
last_tile = last_tile(state)
diff = max_x - min_x
new_grid =
Enum.reduce(last_tile, grid, fn {{x, y}, value}, acc ->
Map.put(acc, {x + diff + 1, y}, inc(value))
end)
new_min_x = max_x + 1
new_max_x = new_min_x + diff
new_state = %{state | grid: new_grid, mins: {new_min_x, min_y}, maxs: {new_max_x, max_y}, direction: :right}
expand_grid(new_state, step + 1)
end
def expand_grid(%{grid: grid, mins: {min_x, min_y}, maxs: {max_x, max_y}, direction: :left} = state, step) do
last_tile = last_tile(state)
diff = max_x - min_x
new_grid =
Enum.reduce(last_tile, grid, fn {{x, y}, value}, acc ->
Map.put(acc, {x - diff - 1, y}, dec(value))
end)
new_max_x = min_x - 1
new_min_x = new_max_x - diff
new_state = %{state | grid: new_grid, mins: {new_min_x, min_y}, maxs: {new_max_x, max_y}, direction: :left}
expand_grid(new_state, step + 1)
end
def last_tile(%{grid: grid, mins: {min_x, min_y}, maxs: {max_x, max_y}}) do
for y <- min_y..max_y,
x <- min_x..max_x,
do: {{x,y}, Map.get(grid, {x, y})},
into: Map.new()
end
def min_cost(%{grid: grid, maxs: {max_x, max_y} = target}) do
# copy the grid into costs
# set origin as 0 since the instructions state it is not counted in the cost
costs = grid |> Map.put({0, 0}, 0)
# top row except for the first cell
costs =
1..max_x
|> Enum.reduce(costs, fn x, acc ->
prev_value = Map.get(acc, {x-1, 0})
Map.update!(acc, {x, 0}, & &1 + prev_value)
end)
# first column except for the first cell
costs =
1..max_y
|> Enum.reduce(costs, fn y, acc ->
prev_value = Map.get(acc, {0, y-1})
Map.update!(acc, {0, y}, & &1 + prev_value)
end)
# the rest of the cells
costs =
(for y <- 1..max_y, x <- 1..max_x, do: {x, y})
|> Enum.reduce(costs, fn {x, y}, acc ->
top_value = Map.get(acc, {x, y - 1})
left_value = Map.get(acc, {x - 1, y})
min = Enum.min([top_value, left_value])
Map.update!(acc, {x, y}, & &1 + min)
end)
Map.get(costs, target)
end
def target(grid) do
grid
|> Enum.max_by(fn {{x, y}, _value} -> {x, y} end)
|> elem(0)
end
def parse(file_name) do
list =
"priv/" <> file_name
|> File.read!()
|> String.split("\n", trim: true)
|> Enum.map(&to_charlist/1)
grid =
for {lines, y} <- Enum.with_index(list),
{value, x} <- Enum.with_index(lines),
do: {{x, y}, value - ?0},
into: Map.new()
%{grid: grid, target: target(grid)}
end
end
|
jpcarver+elixir/day15/lib/day15.ex
| 0.526586 | 0.639455 |
day15.ex
|
starcoder
|
defmodule Elixometer do
@moduledoc ~S"""
A light wrapper around [exometer](https://github.com/Feuerlabs/exometer).
Elixometer allows you to define metrics and subscribe them automatically
to the default reporter for your environment.
## Configuration
In one of your config files, set up an exometer reporter, and then register
it to elixometer like this:
config(:exometer_core, report: [reporters: [{:exometer_report_tty, []}]])
config(:elixometer, reporter: :exometer_report_tty)
## Metrics
Defining metrics in elixometer is substantially easier than in exometer.
Instead of defining and then updating a metric, just update it. Also, instead
of providing a list of atoms, a metric is named with a period separated
bitstring. Presently, Elixometer supports timers, histograms, gauges,
and counters.
Timings may also be defined by annotating a function with a `@timed`
annotation. This annotation takes a key argument, which tells elixometer
what key to use. You can specify `:auto` and a key will be generated from
the module name and method name.
Updating a metric is similarly easy:
defmodule ParentModule.MetricsTest do
use Elixometer
def counter_test(thingie) do
update_counter("metrics_test.\#{thingie}.count", 1)
end
def timer_test do
timed("metrics_test.timer_test.timings") do
OtherModule.slow_method
end
end
@timed(key: "timed.function")
def function_that_is_timed do
OtherModule.slow_method
end
@timed(key: :auto) # The key will be "parent_module.metrics_test.another_timed_function"
def another_timed_function do
OtherModule.slow_method
end
end
"""
@type metric_name :: String.t() | String.Chars.t()
defmodule App do
@moduledoc false
use Application
def start(_type, _args_) do
Elixometer.Supervisor.start_link()
end
end
defmodule Config do
@moduledoc false
defstruct table_name: nil, counters: Map.new()
end
defmodule Timer do
@moduledoc false
defstruct method_name: nil, key: nil, units: :microsecond, args: nil, guards: nil, body: nil
end
@elixometer_table :elixometer
alias Elixometer.Updater
import Elixometer.Utils
use GenServer
defmacro __using__(_mod) do
quote do
import Elixometer
Module.register_attribute(__MODULE__, :elixometer_timers, accumulate: true)
@before_compile Elixometer
@on_definition Elixometer
end
end
def __on_definition__(env, _kind, name, args, guards, body) do
mod = env.module
timer_info = Module.get_attribute(mod, :timed)
if timer_info do
key =
case timer_info[:key] do
:auto ->
# Convert a fully qualified module to an underscored representation.
# Module.SubModule.SubSubModule will become
# module.sub_module.sub_sub_module
prefix =
mod
|> inspect
|> String.replace(~r/([a-z])([A-Z])/, ~S"\1_\2")
|> String.downcase()
"#{prefix}.#{name}"
other ->
other
end
units = timer_info[:units] || :microsecond
Module.put_attribute(mod, :elixometer_timers, %Timer{
method_name: name,
args: args,
guards: guards,
body: normalize_body(body),
units: units,
key: key
})
Module.delete_attribute(mod, :timed)
end
end
# Elixir 1.5.0-rc changed on_definition/6 to always wrap body in a keyword
# list (e.g. `[do: body]`). For backwards compatibility, this normalization
# function wraps earlier versions' bodies in a keyword list, too.
defp normalize_body(body) do
case Version.compare(System.version(), "1.5.0-rc") do
:lt -> [do: body]
_ when is_nil(body) -> raise "timed function must have a body"
_ -> body
end
end
defp build_timer_body(%Timer{key: key, units: units, body: [do: body]}) do
build_timer(key, units, body)
end
defmacro __before_compile__(env) do
mod = env.module
timers = Module.get_attribute(mod, :elixometer_timers)
timed_methods =
timers
|> Enum.reverse()
|> Enum.map(fn %Timer{} = timer_data ->
Module.make_overridable(
mod,
[{timer_data.method_name, length(timer_data.args)}]
)
body = build_timer_body(timer_data)
if length(timer_data.guards) > 0 do
quote do
def unquote(timer_data.method_name)(unquote_splicing(timer_data.args))
when unquote_splicing(timer_data.guards) do
unquote(body)
end
end
else
quote do
def unquote(timer_data.method_name)(unquote_splicing(timer_data.args)) do
unquote(body)
end
end
end
end)
quote do
(unquote_splicing(timed_methods))
end
end
def init(:ok) do
table_name = :ets.new(@elixometer_table, [:set, :named_table, read_concurrency: true])
:timer.send_interval(250, :tick)
{:ok, %Config{table_name: table_name}}
end
def start_link do
GenServer.start_link(__MODULE__, :ok, name: __MODULE__)
end
@spec get_metric_value(metric_name) :: {:ok, any} | {:error, :not_found}
def get_metric_value(metric_name) do
metric_name
|> to_exometer_key
|> :exometer.get_value()
end
@spec get_metric_value(metric_name, :exometer.datapoint()) :: {:ok, any} | {:error, :not_found}
def get_metric_value(metric_name, data_point) do
metric_val =
metric_name
|> to_exometer_key
|> :exometer.get_value(data_point)
case metric_val do
{:ok, metric} -> {:ok, metric[data_point]}
r = {:error, _reason} -> r
end
end
@spec get_metric_values(metric_name) :: [
{:exometer.name(), :exometer.type(), :exometer.status()}
]
def get_metric_values(metric_name) do
metric_name
|> to_exometer_key
|> get_values
end
@spec get_metric_values(metric_name, :exometer.datapoint()) ::
{:ok, integer} | {:error, :not_found}
def get_metric_values(metric_name, data_point) do
metric_val =
metric_name
|> get_metric_values
|> get_values_total(data_point)
case metric_val do
:not_found -> {:error, :not_found}
total -> {:ok, total}
end
end
defp to_exometer_key(metric_name) when is_list(metric_name), do: metric_name
defp to_exometer_key(metric_name) when is_binary(metric_name) do
String.split(metric_name, ".")
end
defp get_values(key) when is_list(key) do
:exometer.get_values((key -- ["_"]) ++ [:_])
end
defp get_values_total(values, data_point) do
Enum.reduce_while(values, 0, fn {_, attrs}, total ->
case Keyword.get(attrs, data_point) do
nil -> {:halt, :not_found}
value -> {:cont, total + value}
end
end)
end
@doc """
Updates a histogram with a new value. If the metric doesn't exist, a new metric
is created and subscribed to.
"""
@spec update_histogram(String.t(), number, pos_integer, boolean) :: :ok
def update_histogram(name, delta, aggregate_seconds \\ 60, truncate \\ true)
when is_bitstring(name) do
Updater.histogram(name, delta, aggregate_seconds, truncate)
end
@doc """
Updates and alternately creates spiral metric. A spiral metric is a metric maintains a series
of internal slots that "age out" and are replaced by newer values. This is useful for
maintaining QPS stats.
"""
@spec update_spiral(String.t(), number, time_span: pos_integer, slot_period: pos_integer) :: :ok
def update_spiral(name, delta, opts \\ [time_span: :timer.seconds(60), slot_period: 1000]) do
Updater.spiral(name, delta, opts)
end
@doc """
Updates a counter metric. If the metric doesn't exist, the metric is created
and the metric is subscribed to the default reporter.
If the value of the `:reset_seconds` option is greater than zero, the counter will be reset
automatically at the specified interval.
"""
@spec update_counter(String.t(), integer, reset_seconds: nil | integer) :: :ok
def update_counter(name, delta, [reset_seconds: secs] \\ [reset_seconds: nil])
when is_bitstring(name) and (is_nil(secs) or secs >= 1) do
Updater.counter(name, delta, secs)
end
@doc """
Clears a counter with the given name.
"""
@spec clear_counter(metric_name) :: :ok | {:error, any}
def clear_counter(metric_name) when is_bitstring(metric_name) do
clear_counter(format(:counters, metric_name))
end
def clear_counter(metric_name) when is_list(metric_name) do
:exometer.reset(metric_name)
end
@doc """
Updates a gauge metric. If the metric doesn't exist, the metric is created
and the metric is subscribed to the default reporter.
"""
@spec update_gauge(String.t(), number) :: :ok
def update_gauge(name, value) when is_bitstring(name) do
Updater.gauge(name, value)
end
@doc """
Updates a timer metric. If the metric doesn't exist, it will be created and
subscribed to the default reporter.
The time units default to *microseconds*, but you can also pass in any of
the units in `t:System.time_unit/0`, with the exception of `pos_integer`.
This includes `:second`, `:millisecond`, `:microsecond`, and `:nanosecond`.
Note that nanoseconds are provided for convenience, but Erlang does not
actually provide this much granularity.
"""
defmacro timed(name, units \\ :microsecond, do: block) do
build_timer(name, units, block)
end
defp build_timer(name, units, block) do
quote do
{elapsed_us, rv} = :timer.tc(fn -> unquote(block) end)
Updater.timer(unquote(name), unquote(units), elapsed_us)
rv
end
end
def add_counter(metric_name, ttl_millis) do
GenServer.cast(__MODULE__, {:add_counter, metric_name, ttl_millis})
end
def add_counter(metric_name) do
GenServer.cast(__MODULE__, {:add_counter, metric_name, nil})
end
def metric_defined?(name) when is_bitstring(name) do
name |> String.split(".") |> metric_defined?
end
def metric_defined?(name) do
:ets.member(@elixometer_table, {:definitions, name})
end
def metric_subscribed?(name) do
:ets.member(@elixometer_table, {:subscriptions, name})
end
def ensure_subscribed(name) do
if not metric_subscribed?(name) do
GenServer.call(__MODULE__, {:subscribe, name})
end
end
def ensure_metric_defined(name, defn_fn) do
if not metric_defined?(name) do
GenServer.call(__MODULE__, {:define_metric, name, defn_fn})
end
:ok
end
@doc """
Ensures a metric is correctly registered in Elixometer.
This means that Elixometer knows about it and its metrics are
subscribed to an exometer reporter
"""
def ensure_registered(metric_name, register_fn) do
ensure_metric_defined(metric_name, register_fn)
subscribe(metric_name)
rescue
e in ErlangError -> e
end
@doc """
Ensures that a metric is subscribed to an exometer reporter.
"""
def subscribe(metric_name) do
if not metric_subscribed?(metric_name) do
GenServer.call(__MODULE__, {:subscribe, metric_name})
end
end
def handle_call({:subscribe, metric_name}, _caller, state) do
create_subscription(metric_name)
{:reply, :ok, state}
end
def handle_call({:define_metric, metric_name, defn_fn}, _caller, state) do
# we re-check whether the metric is defined here to prevent
# a race condition in ensure_metric_defined
if not metric_defined?(metric_name) do
defn_fn.()
:ets.insert(@elixometer_table, {{:definitions, metric_name}, true})
end
{:reply, :ok, state}
end
def handle_cast({:add_counter, metric_name, ttl_millis}, config) do
new_counters = Map.put(config.counters, metric_name, ttl_millis)
{:noreply, %Config{config | counters: new_counters}}
end
def handle_info(:tick, config) do
Enum.each(
config.counters,
fn {name, millis} ->
{:ok, [ms_since_reset: since_reset]} = :exometer.get_value(name, :ms_since_reset)
if millis && since_reset >= millis do
:exometer.reset(name)
end
end
)
{:noreply, config}
end
defp create_subscription(metric_name) do
# If a metric isn't subscribed to our reporters, create a subscription in our
# ets table and subscribe our metric to exometer's reporters.
if not metric_subscribed?(metric_name) do
cfg = Application.get_all_env(:elixometer)
reporter = cfg[:reporter]
interval = cfg[:update_frequency]
subscribe_options = cfg[:subscribe_options] || []
excluded_datapoints = cfg[:excluded_datapoints] || []
if reporter do
metric_name
|> :exometer.info()
|> get_datapoints()
|> Enum.reject(&Enum.member?(excluded_datapoints, &1))
|> Enum.map(
&:exometer_report.subscribe(reporter, metric_name, &1, interval, subscribe_options)
)
end
:ets.insert(@elixometer_table, {{:subscriptions, metric_name}, true})
end
end
defp get_datapoints(info) do
case Keyword.fetch(info, :datapoints) do
{:ok, datapoints} ->
datapoints
:error ->
info
|> Keyword.fetch!(:value)
|> Keyword.keys()
end
end
end
|
lib/elixometer.ex
| 0.8815 | 0.424233 |
elixometer.ex
|
starcoder
|
defmodule GenFSMHelpers do
@moduledoc """
Helper library for GenFSM.
Add helper functions to eliminate returning those nasty tuples.
* next_state
* reply
"""
@doc """
Return next_state tuple, given a tuple.
## Examples
iex> GenFSMHelpers.next_state({:data, :idle})
{:next_state, :idle, :data}
"""
@spec next_state({any, atom}) :: {:next_state, atom, any}
def next_state({state_data, state_name}) do
next_state(state_data, state_name)
end
@doc """
Return next_state tuple.
## Examples
iex> GenFSMHelpers.next_state(:data, :active)
{:next_state, :active, :data}
iex> GenFSMHelpers.next_state({:data, :active}, 5000)
{:next_state, :active, :data, 5000}
"""
@spec next_state({any, atom}, integer) :: {:next_state, atom, any, integer}
def next_state({state_data, state_name}, timeout) do
next_state(state_data, state_name, timeout)
end
@spec next_state(any, atom) :: {:next_state, atom, any}
def next_state(state_data, state_name) do
{:next_state, state_name, state_data}
end
@doc """
Return next_state tuple with timeout.
## Examples
iex> GenFSMHelpers.next_state(%{data: true}, :busy, 1000)
{:next_state, :busy, %{data: true}, 1000}
"""
@spec next_state(any, atom, integer) :: {:next_state, atom, any, integer}
def next_state(state_data, state_name, timeout) do
{:next_state, state_name, state_data, timeout}
end
@doc """
Return reply tuple.
## Examples
iex> GenFSMHelpers.reply(%{}, true, :pending)
{:reply, true, :pending, %{}}
"""
@spec reply(any, any, atom) :: {:reply, any, atom, any}
def reply(state_data, response, state_name) do
{:reply, response, state_name, state_data}
end
@doc """
Return reply tuple with timeout.
## Examples
iex> GenFSMHelpers.reply(%{}, true, :pending, 5000)
{:reply, true, :pending, %{}, 5000}
"""
@spec reply(any, any, atom, integer) :: {:reply, any, atom, any, integer}
def reply(state_data, response, state_name, timeout) do
{:reply, response, state_name, state_data, timeout}
end
end
|
lib/gen_fsm_helpers.ex
| 0.849504 | 0.45532 |
gen_fsm_helpers.ex
|
starcoder
|
defmodule Tanx.Handoff.Impl do
require Logger
defmodule State do
@moduledoc false
defstruct name: nil,
nodes: MapSet.new(),
processes_updated_counter: 0,
processes_updated_at: 0,
ets_table: nil,
requests: %{}
end
@spec child_spec(options :: list()) :: Supervisor.child_spec()
def child_spec(options \\ []) do
%{
id: Keyword.get(options, :name, __MODULE__),
start: {__MODULE__, :start_link, [options]}
}
end
@spec start_link(options :: list()) :: GenServer.on_start()
def start_link(options \\ []) do
name = Keyword.get(options, :name)
if !is_atom(name) || is_nil(name) do
raise ArgumentError, "expected :name to be given and to be an atom, got: #{inspect(name)}"
end
GenServer.start_link(__MODULE__, options, name: name)
end
### GenServer callbacks
def init(opts) do
{:ok, opts} =
case Keyword.get(opts, :init_module) do
nil -> {:ok, opts}
module -> module.init(opts)
end
Process.flag(:trap_exit, true)
name = Keyword.get(opts, :name)
Logger.info("Starting #{inspect(__MODULE__)} with name #{inspect(name)}")
unless is_atom(name) do
raise ArgumentError, "expected :name to be given and to be an atom, got: #{inspect(name)}"
end
:ets.new(name, [:named_table, {:read_concurrency, true}])
state = %State{
name: name,
ets_table: name
}
state =
case Keyword.get(opts, :members) do
nil ->
state
members ->
members = Enum.map(members, &fully_qualified_name/1)
Enum.each(members, fn member ->
DeltaCrdt.mutate_async(members_crdt_name(state.name), :add, [member, 1])
end)
neighbours = members -- [fully_qualified_name(state.name)]
send(members_crdt_name(state.name), {:set_neighbours, members_crdt_names(neighbours)})
send(handoff_crdt_name(state.name), {:set_neighbours, handoff_crdt_names(neighbours)})
%{state | nodes: Enum.map(members, fn {_name, node} -> node end) |> MapSet.new()}
end
{:ok, state}
end
def handle_info({:handoff_updated, reply_to}, state) do
handoff_data = DeltaCrdt.read(handoff_crdt_name(state.name), 30_000)
Enum.each(:ets.match(state.ets_table, {:"$1", :_}), fn [key] ->
if !Map.has_key?(handoff_data, key) do
:ets.delete(state.ets_table, key)
end
end)
:ets.insert(state.ets_table, Map.to_list(handoff_data))
GenServer.reply(reply_to, :ok)
requests =
state.requests
|> Enum.filter(fn {name, {pid, message}} ->
case Map.get(handoff_data, name) do
nil ->
true
data ->
send(pid, {message, data})
Logger.info("**** Handoff sending message for: #{inspect(name)}")
DeltaCrdt.mutate_async(members_crdt_name(state.name), :remove, [name])
:ets.delete(state.ets_table, name)
false
end
end)
|> Enum.into(%{})
state = %State{state | requests: requests}
{:noreply, state}
end
def handle_info({:members_updated, reply_to}, state) do
members = Map.keys(DeltaCrdt.read(members_crdt_name(state.name), 30_000))
members = members -- [state.name]
new_nodes = Enum.map(members, fn {_name, node} -> node end) |> MapSet.new()
send(members_crdt_name(state.name), {:set_neighbours, members_crdt_names(members)})
send(handoff_crdt_name(state.name), {:set_neighbours, handoff_crdt_names(members)})
GenServer.reply(reply_to, :ok)
{:noreply, %{state | nodes: new_nodes}}
end
def handle_info({:EXIT, _pid, reason}, state) do
{:stop, reason, state}
end
def handle_call({:set_members, members}, _from, state) do
existing_members = MapSet.new(Map.keys(DeltaCrdt.read(members_crdt_name(state.name))))
new_members = MapSet.new(member_names(members))
Enum.each(MapSet.difference(existing_members, new_members), fn removed_member ->
DeltaCrdt.mutate_async(members_crdt_name(state.name), :remove, [removed_member])
end)
Enum.each(MapSet.difference(new_members, existing_members), fn added_member ->
DeltaCrdt.mutate_async(members_crdt_name(state.name), :add, [added_member, 1])
end)
neighbours = MapSet.difference(new_members, MapSet.new([state.name]))
send(members_crdt_name(state.name), {:set_neighbours, members_crdt_names(neighbours)})
send(handoff_crdt_name(state.name), {:set_neighbours, handoff_crdt_names(neighbours)})
{:reply, :ok, state}
end
def handle_call(:get_handoff_ets_table, _from, %{ets_table: t} = state),
do: {:reply, t, state}
def handle_call({:request, name, message, pid}, _from, state) do
case :ets.lookup(state.ets_table, name) do
[{^name, data}] ->
Logger.info("**** Handoff fulfilling request for: #{inspect(name)}")
DeltaCrdt.mutate_async(handoff_crdt_name(state.name), :remove, [name])
:ets.delete(state.ets_table, name)
{:reply, {:ok, :data, data}, state}
_ ->
Logger.info("**** Handoff deferring request for: #{inspect(name)}")
requests = Map.put(state.requests, name, {pid, message})
{:reply, {:ok, :requested}, %State{state | requests: requests}}
end
end
def handle_call({:unrequest, name}, _from, state) do
requests = Map.delete(state.requests, name)
{:reply, :ok, %State{state | requests: requests}}
end
def handle_call({:store, name, data}, _from, state) do
Logger.info("**** Handoff storing data for: #{inspect(name)}")
case Map.get(state.requests, name) do
nil ->
DeltaCrdt.mutate(handoff_crdt_name(state.name), :add, [name, data])
:ets.insert(state.ets_table, {name, data})
{pid, message} ->
send(pid, {message, data})
end
{:reply, :ok, state}
end
defp member_names(names) do
Enum.map(names, fn
{name, node} -> {name, node}
name when is_atom(name) -> {name, node()}
end)
end
defp members_crdt_names(names) do
Enum.map(names, fn {name, node} -> {members_crdt_name(name), node} end)
end
defp handoff_crdt_names(names) do
Enum.map(names, fn {name, node} -> {handoff_crdt_name(name), node} end)
end
defp members_crdt_name(name), do: :"#{name}.MembersCrdt"
defp handoff_crdt_name(name), do: :"#{name}.HandoffCrdt"
defp fully_qualified_name({name, node}) when is_atom(name) and is_atom(node), do: {name, node}
defp fully_qualified_name(name) when is_atom(name), do: {name, node()}
end
|
apps/tanx/lib/tanx/handoff/impl.ex
| 0.674587 | 0.431225 |
impl.ex
|
starcoder
|
defmodule Ockam.Hub.Service.StaticForwarding do
@moduledoc """
Static forwarding service
Subscribes workers (by return route) to a string forwarding alias
Forwarding alias is parsed from the payload as a BARE `string` type
New subscriptions update the forwarding route in the same forwarding alias
Forwarder address is created from prefix and alias as <prefix>_<alias>
e.g. if prefix is `forward_to_` and alias is `my_alias`, forwarder address will be: `forward_to_my_alias`
Messages sent to the forwarder address will be forwarded to the forwarding route
Options:
`prefix` - address prefix
"""
use Ockam.Worker
alias __MODULE__.Forwarder
alias Ockam.Message
require Logger
@impl true
def setup(options, state) do
prefix = Keyword.get(options, :prefix, state.address)
{:ok, Map.put(state, :prefix, prefix)}
end
@impl true
def handle_message(message, state) do
payload = Message.payload(message)
case :bare.decode(payload, :string) do
{:ok, alias_str, ""} ->
return_route = Message.return_route(message)
subscribe(alias_str, return_route, state)
err ->
Logger.error("Invalid message format: #{inspect(payload)}, reason #{inspect(err)}")
end
end
def subscribe(alias_str, route, state) do
with {:ok, worker} <- ensure_alias_worker(alias_str, state) do
## NOTE: Non-ockam message routing here
Forwarder.update_route(worker, route)
{:ok, state}
end
end
def ensure_alias_worker(alias_str, state) do
forwarder_address = forwarder_address(alias_str, state)
case Ockam.Node.whereis(forwarder_address) do
nil -> Forwarder.create(alias: alias_str, address: forwarder_address)
_pid -> {:ok, forwarder_address}
end
end
def forwarder_address(alias_str, state) do
Map.get(state, :prefix, "") <> "_" <> alias_str
end
end
defmodule Ockam.Hub.Service.StaticForwarding.Forwarder do
@moduledoc """
Topic subscription for pub_sub service
Forwards all messages to all subscribed routes
Subscribe API is internal, it adds a route to the subscribers set
"""
use Ockam.Worker
alias Ockam.Message
def update_route(worker, route) do
## TODO: reply to the subscriber?
Ockam.Worker.call(worker, {:update_route, route})
end
@impl true
def setup(options, state) do
alias_str = Keyword.get(options, :alias)
{:ok, Map.merge(state, %{alias: alias_str, route: []})}
end
@impl true
def handle_call({:update_route, route}, _from, %{alias: alias_str} = state) do
state = Map.put(state, :route, route)
Ockam.Router.route(%{
onward_route: route,
return_route: [state.address],
payload: :bare.encode("#{alias_str}", :string)
})
{:reply, :ok, state}
end
@impl true
def handle_message(message, state) do
[_me | onward_route] = Message.onward_route(message)
route = Map.get(state, :route, [])
Ockam.Router.route(Message.forward(message, route ++ onward_route))
{:ok, state}
end
end
|
implementations/elixir/ockam/ockam_hub/lib/hub/service/static_forwarding.ex
| 0.83104 | 0.436802 |
static_forwarding.ex
|
starcoder
|
defmodule Clipboard do
@moduledoc """
Copy and paste from system clipboard.
Wraps ports to system-specific utilities responsible for clipboard access. It uses the default
clipboard utilities on macOS, Linux and Windows but can be configured to call any executable.
"""
@doc """
Copy `value` to system clipboard.
The original `value` is always returned, so `copy/1` can be used in pipelines.
# Examples
iex> Clipboard.copy("Hello, World!")
"Hello, World!"
iex> Clipboard.copy(["Hello", "World!"])
["Hello", "World!"]
iex> "Hello, World!" |> Clipboard.copy() |> IO.puts()
"Hello, World"
"""
@spec copy(iodata) :: iodata
def copy(value) do
copy(:os.type(), value)
value
end
@doc """
Copy `value` to system clipboard but throw exception if it fails.
Identical to `copy/1`, except raise an exception if the operation fails.
The operation may fail when running Clipboard on unsupported operating systems or with missing
executables (check your config).
"""
@spec copy!(iodata) :: iodata | no_return
def copy!(value) do
case copy(:os.type(), value) do
:ok ->
value
{:error, reason} ->
raise reason
end
end
defp copy({:unix, :darwin}, value) do
command = Application.get_env(:clipboard, :macos)[:copy] || {"pbcopy", []}
execute(command, value)
end
defp copy({:unix, _os_name}, value) do
command = Application.get_env(:clipboard, :unix)[:copy] || {"xclip", []}
execute(command, value)
end
defp copy({:win32, _os_name}, value) do
command = Application.get_env(:clipboard, :windows)[:copy] || {"clip", []}
execute(command, value)
end
defp copy({_unsupported_family, _unsupported_name}, _value) do
{:error, "Unsupported operating system"}
end
@doc """
Return the contents of system clipboard.
# Examples
iex> Clipboard.paste()
"Hello, World!"
"""
@spec paste() :: String.t()
def paste do
case paste(:os.type()) do
{:error, _reason} ->
nil
output ->
output
end
end
@doc """
Return the contents of system clipboard but throw exception if it fails.
Identical to `paste/1`, except raise an exception if the operation fails.
The operation may fail when running Clipboard on unsupported operating systems or with missing
executables (check your config).
"""
@spec paste!() :: String.t() | no_return
def paste! do
case paste(:os.type()) do
{:error, reason} ->
raise reason
output ->
output
end
end
defp paste({:unix, :darwin}) do
command = Application.get_env(:clipboard, :macos)[:paste] || {"pbpaste", []}
execute(command)
end
defp paste({:unix, _os_name}) do
command = Application.get_env(:clipboard, :unix)[:paste] || {"xclip", ["-o"]}
execute(command)
end
defp paste(_unsupported_os) do
{:error, "Unsupported operating system"}
end
# Ports
defp execute(nil), do: {:error, "Unsupported operating system"}
defp execute({executable, args}) when is_binary(executable) and is_list(args) do
case System.find_executable(executable) do
nil ->
{:error, "Cannot find #{executable}"}
_ ->
case System.cmd(executable, args) do
{output, 0} ->
output
{error, _} ->
{:error, error}
end
end
end
defp execute(nil, _), do: {:error, "Unsupported operating system"}
defp execute({executable, args}, value) when is_binary(executable) and is_list(args) do
case System.find_executable(executable) do
nil ->
{:error, "Cannot find #{executable}"}
path ->
port = Port.open({:spawn_executable, path}, [:binary, args: args])
case value do
value when is_binary(value) ->
send(port, {self(), {:command, value}})
value ->
send(port, {self(), {:command, format(value)}})
end
send(port, {self(), :close})
:ok
end
end
defp format(value) do
doc = Inspect.Algebra.to_doc(value, %Inspect.Opts{limit: :infinity})
Inspect.Algebra.format(doc, :infinity)
end
end
|
lib/clipboard.ex
| 0.864454 | 0.498108 |
clipboard.ex
|
starcoder
|
defmodule Asteroid.ObjectStore.RefreshToken.Riak do
@moduledoc """
Riak implementation of the `Asteroid.ObjectStore.RefreshToken` behaviour
## Initializing a Riak bucket type
```console
$ sudo riak-admin bucket-type create token '{"props":{"datatype":"map", "backend":"bitcask_mult"}}'
token created
$ sudo riak-admin bucket-type activate token
token has been activated
```
## Options
The options (`Asteroid.ObjectStore.RefreshToken.opts()`) are:
- `:bucket_type`: an `String.t()` for the bucket type that must be created beforehand in
Riak. No defaults, **mandatory**
- `bucket_name`: a `String.t()` for the bucket name. Defaults to `"refresh_token"`
- `:purge_interval`: the `integer()` interval in seconds the purge process will be triggered,
or `:no_purge` to disable purge. Defaults to `1200` (20 minutes)
- `:rows`: the maximum number of results that a search will return. Defaults to `1_000_000`.
Search is used by the purge process.
## Installation function
The `install/1` function executes the following actions:
- it installs a custom schema (`asteroid_object_store_refresh_token_riak_schema`)
- it creates a new index (`asteroid_object_store_refresh_token_riak_index`) on the bucket
(and not the bucket type - so as to avoid collisions)
This is necessary to:
1. Efficiently index expiration timestamp
2. Disable indexing of raw refresh token data
## Purge process
The purge process uses the `Singleton` library. Therefore the purge process will be unique
per cluster (and that's probably what you want if you use Riak).
"""
require Logger
@behaviour Asteroid.ObjectStore.RefreshToken
@impl true
def install(opts) do
bucket_type = opts[:bucket_type] || raise "Missing bucket type"
bucket_name = opts[:bucket_name] || "refresh_token"
with :ok <-
Riak.Search.Schema.create(
schema_name(),
(:code.priv_dir(:asteroid) ++ '/riak/object_store_refresh_token_schema.xml')
|> File.read!()
),
:ok <- Riak.Search.Index.put(index_name(), schema_name()),
:ok <- Riak.Search.Index.set({bucket_type, bucket_name}, index_name()) do
Logger.info(
"#{__MODULE__}: created refresh token store `#{bucket_name}` " <>
"of bucket type `#{bucket_type}`"
)
:ok
else
e ->
"#{__MODULE__}: failed to create refresh token store `#{bucket_name}` " <>
"of bucket type `#{bucket_type}` (reason: #{inspect(e)})"
{:error, "#{inspect(e)}"}
end
catch
:exit, e ->
bucket_type = opts[:bucket_type] || raise "Missing bucket type"
bucket_name = opts[:bucket_name] || "refresh_token"
"#{__MODULE__}: failed to create refresh token store `#{bucket_name}` " <>
"of bucket type `#{bucket_type}` (reason: #{inspect(e)})"
{:error, "#{inspect(e)}"}
end
@impl true
def start_link(opts) do
opts = Keyword.merge([purge_interval: 1200], opts)
# we launch the process anyway because we need to return a process
# but the singleton will do nothing if the value is `:no_purge`
Singleton.start_child(__MODULE__.Purge, opts, __MODULE__)
end
@impl true
def get(refresh_token_id, opts) do
bucket_type = opts[:bucket_type] || raise "Missing bucket type"
bucket_name = opts[:bucket_name] || "refresh_token"
case Riak.find(bucket_type, bucket_name, refresh_token_id) do
res when not is_nil(res) ->
refresh_token =
res
|> Riak.CRDT.Map.get(:register, "refresh_token_data_binary")
|> Base.decode64!(padding: false)
|> :erlang.binary_to_term()
Logger.debug(
"#{__MODULE__}: getting refresh token `#{refresh_token_id}`, " <>
"value: `#{inspect(refresh_token)}`"
)
{:ok, refresh_token}
nil ->
Logger.debug(
"#{__MODULE__}: getting refresh token `#{refresh_token_id}`, " <> "value: `nil`"
)
{:ok, nil}
end
catch
:exit, e ->
{:error, "#{inspect(e)}"}
end
@impl true
def get_from_subject_id(sub, opts) do
search("sub_register:\"#{String.replace(sub, "\"", "\\\"")}\"", opts)
end
@impl true
def get_from_client_id(client_id, opts) do
search("client_id_register:\"#{String.replace(client_id, "\"", "\\\"")}\"", opts)
end
@impl true
def get_from_device_id(device_id, opts) do
search("device_id_register:\"#{String.replace(device_id, "\"", "\\\"")}\"", opts)
end
@impl true
def get_from_authenticated_session_id(as_id, opts) do
search("authenticated_session_id_register:\"#{String.replace(as_id, "\"", "\\\"")}\"", opts)
end
@impl true
def put(refresh_token, opts) do
bucket_type = opts[:bucket_type] || raise "Missing bucket type"
bucket_name = opts[:bucket_name] || "refresh_token"
riak_map = Riak.CRDT.Map.new()
refresh_token_data_binary =
refresh_token
|> :erlang.term_to_binary()
|> Base.encode64(padding: false)
|> Riak.CRDT.Register.new()
riak_map = Riak.CRDT.Map.put(riak_map, "refresh_token_data_binary", refresh_token_data_binary)
riak_map =
if refresh_token.data["exp"] != nil do
Riak.CRDT.Map.put(
riak_map,
"exp_int",
Riak.CRDT.Register.new(to_string(refresh_token.data["exp"]))
)
else
Logger.warn(
"Inserting refresh token with no expiration: #{String.slice(refresh_token.id, 1..5)}..."
)
riak_map
end
riak_map =
if refresh_token.data["sub"] != nil do
Riak.CRDT.Map.put(
riak_map,
"sub",
Riak.CRDT.Register.new(to_string(refresh_token.data["sub"]))
)
else
riak_map
end
riak_map =
if refresh_token.data["client_id"] != nil do
Riak.CRDT.Map.put(
riak_map,
"client_id",
Riak.CRDT.Register.new(to_string(refresh_token.data["client_id"]))
)
else
riak_map
end
riak_map =
if refresh_token.data["device_id"] != nil do
Riak.CRDT.Map.put(
riak_map,
"device_id",
Riak.CRDT.Register.new(to_string(refresh_token.data["device_id"]))
)
else
riak_map
end
riak_map =
if refresh_token.data["authenticated_session_id"] != nil do
Riak.CRDT.Map.put(
riak_map,
"authenticated_session_id",
Riak.CRDT.Register.new(to_string(refresh_token.data["authenticated_session_id"]))
)
else
riak_map
end
Riak.update(riak_map, bucket_type, bucket_name, refresh_token.id)
Logger.debug(
"#{__MODULE__}: stored refresh token `#{refresh_token.id}`, " <>
"value: `#{inspect(refresh_token)}`"
)
:ok
catch
:exit, e ->
{:error, "#{inspect(e)}"}
end
@impl true
def delete(refresh_token_id, opts) do
bucket_type = opts[:bucket_type] || raise "Missing bucket type"
bucket_name = opts[:bucket_name] || "refresh_token"
Riak.delete(bucket_type, bucket_name, refresh_token_id)
Logger.debug("#{__MODULE__}: deleted refresh token `#{refresh_token_id}`")
:ok
catch
:exit, e ->
{:error, "#{inspect(e)}"}
end
@doc """
Searches in Riak-stored refresh tokens
This function is used internaly and made available for user convenience. Refresh tokens are
stored in the following fields:
| Field name | Indexed as |
|------------------------------------|:-------------:|
| refresh_token_data_binary_register | *not indexed* |
| exp_int_register | int |
| sub_register | string |
| client_id_register | string |
| device_id_register | string |
| authenticated_session_id_register | string |
Note that you are responsible for escaping values accordingly with Solr escaping.
## Example
```elixir
iex(13)> Asteroid.ObjectStore.RefreshToken.Riak.search("sub_register:j* AND exp_int_register:[0 TO #{
:os.system_time(:seconds)
}]", opts)
{:ok, ["7WRQL4EAKW27C5BEFF3JDGXBTA", "WCJBCL7SC2THS7TSRXB2KZH7OQ"]}
```
"""
@spec search(String.t(), Asteroid.ObjectStore.RefreshToken.opts()) ::
{:ok, [Asteroid.Token.RefreshToken.id()]}
| {:error, any()}
def search(search_query, opts) do
case Riak.Search.query(index_name(), search_query, rows: opts[:rows] || 1_000_000) do
{:ok, {:search_results, result_list, _, _}} ->
{:ok,
for {_index_name, attribute_list} <- result_list do
:proplists.get_value("_yz_rk", attribute_list)
end}
{:error, _} = error ->
error
end
end
@spec schema_name() :: String.t()
defp schema_name(), do: "asteroid_object_store_refresh_token_riak_schema"
@doc false
@spec index_name() :: String.t()
def index_name(), do: "asteroid_object_store_refresh_token_riak_index"
end
|
lib/asteroid/object_store/refresh_token/riak.ex
| 0.850608 | 0.709497 |
riak.ex
|
starcoder
|
defmodule Scenic.Primitive.Circle do
@moduledoc """
Draw a circle on the screen.
## Data
`radius`
The data for an arc is a single number.
* `radius` - the radius of the arc
## Styles
This primitive recognizes the following styles
* [`hidden`](Scenic.Primitive.Style.Hidden.html) - show or hide the primitive
* [`fill`](Scenic.Primitive.Style.Fill.html) - fill in the area of the primitive
* [`stroke`](Scenic.Primitive.Style.Stroke.html) - stroke the outline of the primitive. In this case, only the curvy part.
## Usage
You should add/modify primitives via the helper functions in
[`Scenic.Primitives`](Scenic.Primitives.html#circle/3)
```elixir
graph
|> circle( 100, stroke: {1, :yellow} )
```
"""
use Scenic.Primitive
alias Scenic.Script
alias Scenic.Primitive
alias Scenic.Primitive.Style
@type t :: radius :: number
@type styles_t :: [:hidden | :scissor | :fill | :stroke_width | :stroke_fill | :cap]
@styles [:hidden, :scissor, :fill, :stroke_width, :stroke_fill]
@impl Primitive
@spec validate(t()) :: {:ok, radius :: number} | {:error, String.t()}
def validate(radius)
when is_number(radius) do
{:ok, radius}
end
def validate(data) do
{
:error,
"""
#{IO.ANSI.red()}Invalid Circle specification
Received: #{inspect(data)}
#{IO.ANSI.yellow()}
The data for an Circle is radius.{IO.ANSI.default_color()}
"""
}
end
# --------------------------------------------------------
@doc """
Returns a list of styles recognized by this primitive.
"""
@impl Primitive
@spec valid_styles() :: styles_t()
def valid_styles(), do: @styles
# --------------------------------------------------------
@doc """
Compile the data for this primitive into a mini script. This can be combined with others to
generate a larger script and is called when a graph is compiled.
"""
@spec compile(primitive :: Primitive.t(), styles :: Style.t()) :: Script.t()
@impl Primitive
def compile(%Primitive{module: __MODULE__, data: radius}, styles) do
Script.draw_circle([], radius, Script.draw_flag(styles))
end
# ============================================================================
# data verification and serialization
# --------------------------------------------------------
def contains_point?(radius, {xp, yp}) do
# calc the distance squared fromthe pont to the center
d_sqr = xp * xp + yp * yp
# test if less or equal to radius squared
d_sqr <= radius * radius
end
end
|
lib/scenic/primitive/circle.ex
| 0.938195 | 0.86988 |
circle.ex
|
starcoder
|
defmodule Bounds.Map do
import Bounds.Map.Records
alias Bounds.Map.Impl
defstruct [
size: 0,
priority_seq: 0,
offset: 0,
root: nil
]
def new, do: %__MODULE__{}
@doc false
def insert(%__MODULE__{root: tnode0, size: size0} = bset0, interval() = ival) do
{tnode1, size1} = Impl.insert({tnode0, size0}, ival)
%__MODULE__{bset0 | root: tnode1, size: size1}
end
def insert(%__MODULE__{root: tnode0, priority_seq: pseq0, size: size0} = bset0, boundable, value) do
priority = [:"$p" | pseq0]
{%Bounds{lower: lower, upper: upper}, _} = Coerce.coerce(boundable, %Bounds{})
{tnode1, size1} = Impl.insert({tnode0, size0}, interval(lower: lower, upper: upper, priority: priority, value: value))
%__MODULE__{bset0 | root: tnode1, priority_seq: pseq0 + 1, size: size1}
end
def insert(%__MODULE__{root: tnode0, size: size0} = bset0, boundable, priority, value) do
{%Bounds{lower: lower, upper: upper}, _} = Coerce.coerce(boundable, %Bounds{})
{tnode1, size1} = Impl.insert({tnode0, size0}, interval(lower: lower, upper: upper, priority: priority, value: value))
%__MODULE__{bset0 | root: tnode1, size: size1}
end
def all(%__MODULE__{} = bmap, boundable, selector_name \\ :coincidents), do:
do_match(bmap, {selector_name, boundable}, :all, :triples)
def highest(%__MODULE__{} = bmap, boundable, selector_name \\ :coincidents), do:
do_match(bmap, {selector_name, boundable}, :highest, :triples)
def layer(%__MODULE__{} = bmap, z), do:
do_match(bmap, :all, {:priority, z}, :map)
def filter(%__MODULE__{} = bmap, pred), do:
do_match(bmap, :all, {:predicate, pred}, :map)
def delete_all(%__MODULE__{} = bmap, boundable, selector_name \\ :coincidents), do:
do_match(bmap, {selector_name, boundable}, :all, :delete)
def delete_highest(%__MODULE__{} = bmap, boundable, selector_name \\ :coincidents), do:
do_match(bmap, {selector_name, boundable}, :highest, :delete)
def delete_layer(%__MODULE__{} = bmap, z), do:
do_match(bmap, :all, {:priority, z}, :delete)
def keys(%__MODULE__{root: tnode}, opts \\ []) do
v_stream = Impl.stream_vertices(tnode)
if Keyword.get(opts, :with_priorities, true) do
Stream.map(v_stream, fn interval(lower: lower, upper: upper, priority: priority) ->
{%Bounds{lower: lower, upper: upper}, priority}
end)
else
Stream.map(v_stream, fn interval(lower: lower, upper: upper) ->
%Bounds{lower: lower, upper: upper}
end)
end
end
def values(%__MODULE__{root: tnode}) do
Impl.stream_vertices(tnode)
|> Stream.map(fn interval(value: value) -> value end)
end
def triples(%__MODULE__{root: tnode}) do
Impl.stream_vertices(tnode)
|> Stream.map(fn interval(lower: lower, upper: upper, priority: priority, value: value) ->
{%Bounds{lower: lower, upper: upper}, priority, value}
end)
end
def member?(%__MODULE__{root: tnode}, {%Bounds{lower: lower, upper: upper}, priority, value}) do
Impl.coincidents(tnode, interval(lower: lower, upper: upper))
|> Enum.any?(fn
interval(priority: ^priority, value: ^value) -> true
_ -> false
end)
end
def member?(%__MODULE__{}, _), do: false
def extent(%__MODULE__{root: tnode}) do
interval(lower: min_lower) = Impl.min_ival(tnode)
interval(upper: max_upper) = Impl.max_ival(tnode)
%Bounds{lower: min_lower, upper: max_upper}
end
def slice(%__MODULE__{root: tnode0, offset: offset0}, interval(lower: mask_lower, upper: mask_upper) = mask_ival) do
Impl.overlaps(tnode0, mask_ival)
|> Stream.map(fn interval(lower: shape_lower, upper: shape_upper) = ival ->
slice_lower = :erlang.max(mask_lower, shape_lower) - mask_lower
slice_upper = :erlang.min(mask_upper, shape_upper) - mask_lower
interval(ival, lower: slice_lower, upper: slice_upper)
end)
|> Stream.filter(fn
interval(lower: common, upper: common) -> false
_ -> true
end)
|> Enum.into(%__MODULE__{offset: offset0 + mask_lower})
end
def slice(%__MODULE__{} = bmap, mask_boundable) do
{%Bounds{lower: mask_lower, upper: mask_upper}, _} = Coerce.coerce(mask_boundable, %Bounds{})
slice(bmap, interval(lower: mask_lower, upper: mask_upper))
end
def clear(%__MODULE__{} = bmap0), do:
%__MODULE__{bmap0 | root: nil, size: 0}
def surface(%__MODULE__{root: tnode} = bmap0) do
{bmap, _mask} =
Impl.stream_vertices(tnode)
|> Enum.sort_by(fn interval(lower: lower, upper: upper, priority: priority) ->
{priority, -lower, upper}
end)
|> Enum.reverse()
|> Enum.reduce({clear(bmap0), Bounds.Set.new()}, fn ival, {bmap0, mask0} = acc0 ->
if Bounds.Set.covers?(mask0, ival) do
acc0
else
clipped_ival_parts = Bounds.Set.clip(mask0, ival, as: :negative)
mask1 = Bounds.Set.set(mask0, ival)
bmap1 = Enum.reduce(clipped_ival_parts, bmap0, fn part, acc ->
insert(acc, part)
end)
{bmap1, mask1}
end
end)
bmap
end
## helpers
def do_match(%__MODULE__{} = bmap, select_part, filter_part, return_part) do
matching_ivals = do_match_select(select_part, bmap)
filtered_ivals = do_match_filter(filter_part, matching_ivals)
do_match_reduce(return_part, filtered_ivals, bmap)
end
defp do_match_select(:all, %__MODULE__{root: tnode}), do:
Impl.stream_vertices(tnode)
defp do_match_select({selector_name, boundable}, %__MODULE__{root: tnode}) do
{%Bounds{lower: lower, upper: upper}, _} = Coerce.coerce(boundable, %Bounds{})
query_ival = interval(lower: lower, upper: upper)
do_match_select2(selector_name, tnode, query_ival)
end
defp do_match_select2(:coincidents, tnode, select_ival), do:
Impl.coincidents(tnode, select_ival)
defp do_match_select2(:overlaps, tnode, select_ival), do:
Impl.overlaps(tnode, select_ival)
defp do_match_select2(:covers, tnode, select_ival), do:
Impl.covers(tnode, select_ival)
defp do_match_select2(:strict_subsets, tnode, select_ival), do:
Impl.covers(tnode, select_ival) -- Impl.coincidents(tnode, select_ival)
defp do_match_filter(:all, result_set), do:
result_set
defp do_match_filter({:priority, n}, result_set), do:
Impl.with_priority(result_set, n)
defp do_match_filter(:highest, result_set), do:
Impl.highest_priority(result_set)
defp do_match_filter({:predicate, pred}, result_set), do:
Enum.filter(result_set, pred)
defp do_match_filter(:outermost, result_set) do
%__MODULE__{root: tnode} =
Enum.sort_by(result_set, fn interval(lower: lower, upper: upper) -> {lower, -upper} end)
|> Enum.reduce(%__MODULE__{}, fn ival, %__MODULE__{root: tnode} = bmap_acc ->
case Impl.covered_by(tnode, ival) do
[] -> insert(bmap_acc, ival)
_ -> bmap_acc
end
end)
Impl.stream_vertices(tnode)
end
defp do_match_reduce(:intervals, result_set, _orig_bmap), do:
result_set
defp do_match_reduce(:triples, result_set, _orig_bmap) do
Enum.map(result_set, fn interval(lower: lower, upper: upper, priority: priority, value: value) ->
{%Bounds{lower: lower, upper: upper}, priority, value}
end)
end
defp do_match_reduce(:map, result_set, _orig_bmap), do:
Enum.into(result_set, new())
defp do_match_reduce(:delete, [], orig_bmap), do:
orig_bmap
defp do_match_reduce(:delete, result_set, %__MODULE__{root: tnode0, size: size0} = orig_bmap) do
{tnode1, size1} = Impl.delete_matches({tnode0, size0}, result_set)
%__MODULE__{orig_bmap | root: tnode1, size: size1}
end
end
defimpl Enumerable, for: Bounds.Map do
alias Bounds.Map, as: BMap
def count(%BMap{size: size}) do
{:ok, size}
end
def member?(%BMap{} = bmap, triple) do
{:ok, BMap.member?(bmap, triple)}
end
def reduce(%BMap{} = bmap, acc, fun) do
Enumerable.reduce(BMap.triples(bmap), acc, fun)
end
def slice(%BMap{}) do
{:error, __MODULE__}
end
end
defimpl Collectable, for: Bounds.Map do
alias Bounds.Map, as: BMap
def into(%BMap{} = bmap), do:
{bmap, &collector/2}
defp collector(acc, cmd)
defp collector(bmap, {:cont, {bounds, priority, value}}), do:
BMap.insert(bmap, bounds, priority, value)
defp collector(bmap, {:cont, {bounds, value}}), do:
BMap.insert(bmap, bounds, value)
defp collector(bmap, {:cont, ival}), do:
BMap.insert(bmap, ival)
defp collector(bmap, :done), do:
bmap
defp collector(_acc, :halt), do:
:ok
end
|
lib/bounds/map.ex
| 0.649801 | 0.580501 |
map.ex
|
starcoder
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.