hexsha
stringlengths 40
40
| size
int64 2
1.01M
| content
stringlengths 2
1.01M
| avg_line_length
float64 1.5
100
| max_line_length
int64 2
1k
| alphanum_fraction
float64 0.25
1
|
---|---|---|---|---|---|
26d226dce233ab9eb1b0b199e3bee698648f08a0 | 130 | class CategoriesController < ApplicationController
def show
@products = Category.find(params[:id]).products
end
end
| 16.25 | 51 | 0.730769 |
110340506d3a5ad65f21200a4b5f1d43fd004c53 | 291 |
module Sprout
class ToolTaskModel < Rake::FileTask
include DynamicAccessors
def self.define_task(args, &block)
t = super
yield t if block_given?
end
end
end
def tool_task_model(*args, &block)
Sprout::ToolTaskModel.define_task(args, &block)
end
| 14.55 | 49 | 0.670103 |
912add7017d7fc543e7428615df89635cc922b3b | 75 | require "selenium/webdriver"
Capybara.javascript_driver = :selenium_chrome
| 25 | 45 | 0.853333 |
3900e1d30d6050223a9ab5c3efec59de99ae29c7 | 1,320 | #
# Copyright 2011 Red Hat, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
module Backstage
class Application < Sinatra::Base
resource :queue, :topic, :view_path => :destinations, :actions => [:pause, :resume]
get "/queue/:name/messages" do
@destination = Queue.find( Util.decode_name( params[:name] ) )
if html_requested?
haml :'messages/index'
else
content_type :json
collection_to_json( @destination.entries )
end
end
get "/queue/:name/message/:id" do
@destination = Queue.find( Util.decode_name( params[:name] ) )
@object = @destination.find { |m| m.jms_id == Util.decode_name( params[:id] ) }
if html_requested?
haml :'messages/show'
else
object_to_json( @object )
end
end
end
end
| 29.333333 | 87 | 0.67197 |
28c49715977d27ec96bd60ca594c9ce18b53658c | 163 | json.extract! model, :id, :name, :resolution, :screen, :weight, :dimensions, :batery, :brand_id, :created_at, :updated_at
json.url model_url(model, format: :json)
| 54.333333 | 121 | 0.730061 |
38ec094ccf0a03b4dfa1f60551988ec8c22c2761 | 289 | require_relative "../../integration_test_helper"
require_relative "../../helpers/fixture_flows_helper"
class EngineIntegrationTest < ActionDispatch::IntegrationTest
include FixtureFlowsHelper
setup do
setup_fixture_flows
end
teardown do
teardown_fixture_flows
end
end
| 19.266667 | 61 | 0.792388 |
ed602c4c9374c174055108defb7f5b3d20f05aba | 171 | # frozen_string_literal: true
class RenameMemberForeignKeys < ActiveRecord::Migration[4.2]
def change
rename_column :actions, :action_user_id, :member_id
end
end
| 21.375 | 60 | 0.783626 |
bbba60a3b5a565fb929295ee301b5a48e9f7c7c6 | 14,025 | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# EDITING INSTRUCTIONS
# This file was generated from the file
# https://github.com/googleapis/googleapis/blob/master/google/ads/google_ads/v0/services/google_ads_service.proto,
# and updates to that file get reflected here through a refresh process.
# For the short term, the refresh process will only be runnable by Google
# engineers.
require "json"
require "pathname"
require "google/gax"
require "google/ads/google_ads/v0/services/google_ads_service_pb"
require "google/ads/google_ads/v0/services/credentials"
module Google
module Ads
module GoogleAds
module V0
module Services
# Service to fetch data and metrics across resources.
#
# @!attribute [r] google_ads_service_stub
# @return [Google::Ads::GoogleAds::V0::Services::GoogleAdsService::Stub]
class GoogleAdsServiceClient
attr_reader :google_ads_service_stub
# The default address of the service.
SERVICE_ADDRESS = "googleads.googleapis.com".freeze
# The default port of the service.
DEFAULT_SERVICE_PORT = 443
# The default set of gRPC interceptors.
GRPC_INTERCEPTORS = []
DEFAULT_TIMEOUT = 30
PAGE_DESCRIPTORS = {
"search" => Google::Gax::PageDescriptor.new(
"page_token",
"next_page_token",
"results")
}.freeze
private_constant :PAGE_DESCRIPTORS
# The scopes needed to make gRPC calls to all of the methods defined in
# this service.
ALL_SCOPES = [
].freeze
# @param credentials [Google::Auth::Credentials, String, Hash, GRPC::Core::Channel, GRPC::Core::ChannelCredentials, Proc]
# Provides the means for authenticating requests made by the client. This parameter can
# be many types.
# A `Google::Auth::Credentials` uses a the properties of its represented keyfile for
# authenticating requests made by this client.
# A `String` will be treated as the path to the keyfile to be used for the construction of
# credentials for this client.
# A `Hash` will be treated as the contents of a keyfile to be used for the construction of
# credentials for this client.
# A `GRPC::Core::Channel` will be used to make calls through.
# A `GRPC::Core::ChannelCredentials` for the setting up the RPC client. The channel credentials
# should already be composed with a `GRPC::Core::CallCredentials` object.
# A `Proc` will be used as an updater_proc for the Grpc channel. The proc transforms the
# metadata for requests, generally, to give OAuth credentials.
# @param scopes [Array<String>]
# The OAuth scopes for this service. This parameter is ignored if
# an updater_proc is supplied.
# @param client_config [Hash]
# A Hash for call options for each method. See
# Google::Gax#construct_settings for the structure of
# this data. Falls back to the default config if not specified
# or the specified config is missing data points.
# @param timeout [Numeric]
# The default timeout, in seconds, for calls made through this client.
# @param metadata [Hash]
# Default metadata to be sent with each request. This can be overridden on a per call basis.
# @param exception_transformer [Proc]
# An optional proc that intercepts any exceptions raised during an API call to inject
# custom error handling.
def initialize \
credentials: nil,
scopes: ALL_SCOPES,
client_config: {},
timeout: DEFAULT_TIMEOUT,
metadata: nil,
exception_transformer: nil,
lib_name: nil,
lib_version: ""
# These require statements are intentionally placed here to initialize
# the gRPC module only when it's required.
# See https://github.com/googleapis/toolkit/issues/446
require "google/gax/grpc"
require "google/ads/google_ads/v0/services/google_ads_service_services_pb"
credentials ||= Google::Ads::GoogleAds::V0::Services::Credentials.default
if credentials.is_a?(String) || credentials.is_a?(Hash)
updater_proc = Google::Ads::GoogleAds::V0::Services::Credentials.new(credentials).updater_proc
end
if credentials.is_a?(GRPC::Core::Channel)
channel = credentials
end
if credentials.is_a?(GRPC::Core::ChannelCredentials)
chan_creds = credentials
end
if credentials.is_a?(Proc)
updater_proc = credentials
end
if credentials.is_a?(Google::Auth::Credentials)
updater_proc = credentials.updater_proc
end
package_version = Gem.loaded_specs['google-ads-googleads'].version.version
google_api_client = "gl-ruby/#{RUBY_VERSION}"
google_api_client << " #{lib_name}/#{lib_version}" if lib_name
google_api_client << " gapic/#{package_version} gax/#{Google::Gax::VERSION}"
google_api_client << " grpc/#{GRPC::VERSION}"
google_api_client.freeze
headers = { :"x-goog-api-client" => google_api_client }
headers.merge!(metadata) unless metadata.nil?
client_config_file = Pathname.new(__dir__).join(
"google_ads_service_client_config.json"
)
defaults = client_config_file.open do |f|
Google::Gax.construct_settings(
"google.ads.googleads.v0.services.GoogleAdsService",
JSON.parse(f.read),
client_config,
Google::Gax::Grpc::STATUS_CODE_NAMES,
timeout,
page_descriptors: PAGE_DESCRIPTORS,
errors: Google::Gax::Grpc::API_ERRORS,
metadata: headers
)
end
# Allow overriding the service path/port in subclasses.
service_path = self.class::SERVICE_ADDRESS
port = self.class::DEFAULT_SERVICE_PORT
interceptors = self.class::GRPC_INTERCEPTORS
@google_ads_service_stub = Google::Gax::Grpc.create_stub(
service_path,
port,
chan_creds: chan_creds,
channel: channel,
updater_proc: updater_proc,
scopes: scopes,
interceptors: interceptors,
&Google::Ads::GoogleAds::V0::Services::GoogleAdsService::Stub.method(:new)
)
@search = Google::Gax.create_api_call(
@google_ads_service_stub.method(:search),
defaults["search"],
exception_transformer: exception_transformer
)
@mutate = Google::Gax.create_api_call(
@google_ads_service_stub.method(:mutate),
defaults["mutate"],
exception_transformer: exception_transformer
)
end
# Service calls
# Returns all rows that match the search query.
#
# @param customer_id [String]
# The ID of the customer being queried.
# @param query [String]
# The query string.
# @param page_size [Integer]
# The maximum number of resources contained in the underlying API
# response. If page streaming is performed per-resource, this
# parameter does not affect the return value. If page streaming is
# performed per-page, this determines the maximum number of
# resources in a page.
# @param validate_only [true, false]
# If true, the request is validated but not executed.
# @param options [Google::Gax::CallOptions]
# Overrides the default settings for this call, e.g, timeout,
# retries, etc.
# @yield [result, operation] Access the result along with the RPC operation
# @yieldparam result [Google::Gax::PagedEnumerable<Google::Ads::GoogleAds::V0::Services::GoogleAdsRow>]
# @yieldparam operation [GRPC::ActiveCall::Operation]
# @return [Google::Gax::PagedEnumerable<Google::Ads::GoogleAds::V0::Services::GoogleAdsRow>]
# An enumerable of Google::Ads::GoogleAds::V0::Services::GoogleAdsRow instances.
# See Google::Gax::PagedEnumerable documentation for other
# operations such as per-page iteration or access to the response
# object.
# @raise [Google::Gax::GaxError] if the RPC is aborted.
# @example
# require "google/ads/google_ads"
#
# google_ads_service_client = Google::Ads::GoogleAds::GoogleAds.new(version: :v0)
#
# # TODO: Initialize `customer_id`:
# customer_id = ''
#
# # TODO: Initialize `query`:
# query = ''
#
# # Iterate over all results.
# google_ads_service_client.search(customer_id, query).each do |element|
# # Process element.
# end
#
# # Or iterate over results one page at a time.
# google_ads_service_client.search(customer_id, query).each_page do |page|
# # Process each page at a time.
# page.each do |element|
# # Process element.
# end
# end
def search \
customer_id,
query,
page_size: nil,
validate_only: nil,
options: nil,
&block
req = {
customer_id: customer_id,
query: query,
page_size: page_size,
validate_only: validate_only
}.delete_if { |_, v| v.nil? }
req = Google::Gax::to_proto(req, Google::Ads::GoogleAds::V0::Services::SearchGoogleAdsRequest)
@search.call(req, options, &block)
end
# Creates, updates, or removes resources. Operation statuses are returned.
#
# @param customer_id [String]
# The ID of the customer whose resources are being modified.
# @param mutate_operations [Array<Google::Ads::GoogleAds::V0::Services::MutateOperation | Hash>]
# The list of operations to perform on individual resources.
# A hash of the same form as `Google::Ads::GoogleAds::V0::Services::MutateOperation`
# can also be provided.
# @param partial_failure [true, false]
# If true, successful operations will be carried out and invalid
# operations will return errors. If false, all operations will be carried
# out in one transaction if and only if they are all valid.
# Default is false.
# @param validate_only [true, false]
# If true, the request is validated but not executed. Only errors are
# returned, not results.
# @param options [Google::Gax::CallOptions]
# Overrides the default settings for this call, e.g, timeout,
# retries, etc.
# @yield [result, operation] Access the result along with the RPC operation
# @yieldparam result [Google::Ads::GoogleAds::V0::Services::MutateGoogleAdsResponse]
# @yieldparam operation [GRPC::ActiveCall::Operation]
# @return [Google::Ads::GoogleAds::V0::Services::MutateGoogleAdsResponse]
# @raise [Google::Gax::GaxError] if the RPC is aborted.
# @example
# require "google/ads/google_ads"
#
# google_ads_service_client = Google::Ads::GoogleAds::GoogleAds.new(version: :v0)
#
# # TODO: Initialize `customer_id`:
# customer_id = ''
#
# # TODO: Initialize `mutate_operations`:
# mutate_operations = []
# response = google_ads_service_client.mutate(customer_id, mutate_operations)
def mutate \
customer_id,
mutate_operations,
partial_failure: nil,
validate_only: nil,
options: nil,
&block
req = {
customer_id: customer_id,
mutate_operations: mutate_operations,
partial_failure: partial_failure,
validate_only: validate_only
}.delete_if { |_, v| v.nil? }
req = Google::Gax::to_proto(req, Google::Ads::GoogleAds::V0::Services::MutateGoogleAdsRequest)
@mutate.call(req, options, &block)
end
end
end
end
end
end
end
| 45.241935 | 133 | 0.576827 |
4a8a57a8146789d3e9e011c792272fbabc0dee93 | 53 | module EnvisiaContacts
VERSION = "0.1.0.beta1"
end
| 13.25 | 25 | 0.735849 |
bbed6930ce03f861b1944fc3bf7afd0b722a0951 | 37 | module Freee
VERSION = "0.3.1"
end
| 9.25 | 19 | 0.648649 |
ff3605893e1f08b37105b52c0f2e7df9875a5a91 | 212 | require 'serverspec'
require 'docker'
require 'spec_init'
describe "Dockerfile" do
before(:all) do
set :docker_image, ENV['DOCKERIMAGE_ID']
end
include_examples 'collection::liquibase'
end
| 16.307692 | 48 | 0.707547 |
611f0e2b407d153e02aa923189b271296a7c82f3 | 1,066 | require 'test_helper'
class MembershipsControllerTest < ActionController::TestCase
def test_should_get_index
get :index
assert_response :success
assert_not_nil assigns(:memberships)
end
def test_should_get_new
get :new
assert_response :success
end
def test_should_create_membership
assert_difference('Membership.count') do
post :create, :membership => { }
end
assert_redirected_to membership_path(assigns(:membership))
end
def test_should_show_membership
get :show, :id => memberships(:one).id
assert_response :success
end
def test_should_get_edit
get :edit, :id => memberships(:one).id
assert_response :success
end
def test_should_update_membership
put :update, :id => memberships(:one).id, :membership => { }
assert_redirected_to membership_path(assigns(:membership))
end
def test_should_destroy_membership
assert_difference('Membership.count', -1) do
delete :destroy, :id => memberships(:one).id
end
assert_redirected_to memberships_path
end
end
| 23.173913 | 64 | 0.730769 |
33d6076a9acb1db970e3b172b3ee1cc63b3fcbe1 | 151,903 | # frozen_string_literal: true
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# Auto-generated by gapic-generator-ruby. DO NOT EDIT!
require "google/cloud/errors"
require "google/spanner/admin/database/v1/spanner_database_admin_pb"
module Google
module Cloud
module Spanner
module Admin
module Database
module V1
module DatabaseAdmin
##
# Client for the DatabaseAdmin service.
#
# Cloud Spanner Database Admin API
#
# The Cloud Spanner Database Admin API can be used to:
# * create, drop, and list databases
# * update the schema of pre-existing databases
# * create, delete and list backups for a database
# * restore a database from an existing backup
#
class Client
include Paths
# @private
attr_reader :database_admin_stub
##
# Configure the DatabaseAdmin Client class.
#
# See {::Google::Cloud::Spanner::Admin::Database::V1::DatabaseAdmin::Client::Configuration}
# for a description of the configuration fields.
#
# @example
#
# # Modify the configuration for all DatabaseAdmin clients
# ::Google::Cloud::Spanner::Admin::Database::V1::DatabaseAdmin::Client.configure do |config|
# config.timeout = 10.0
# end
#
# @yield [config] Configure the Client client.
# @yieldparam config [Client::Configuration]
#
# @return [Client::Configuration]
#
def self.configure
@configure ||= begin
namespace = ["Google", "Cloud", "Spanner", "Admin", "Database", "V1"]
parent_config = while namespace.any?
parent_name = namespace.join "::"
parent_const = const_get parent_name
break parent_const.configure if parent_const.respond_to? :configure
namespace.pop
end
default_config = Client::Configuration.new parent_config
default_config.rpcs.list_databases.timeout = 3600.0
default_config.rpcs.list_databases.retry_policy = {
initial_delay: 1.0, max_delay: 32.0, multiplier: 1.3, retry_codes: [14, 4]
}
default_config.rpcs.create_database.timeout = 3600.0
default_config.rpcs.get_database.timeout = 3600.0
default_config.rpcs.get_database.retry_policy = {
initial_delay: 1.0, max_delay: 32.0, multiplier: 1.3, retry_codes: [14, 4]
}
default_config.rpcs.update_database_ddl.timeout = 3600.0
default_config.rpcs.update_database_ddl.retry_policy = {
initial_delay: 1.0, max_delay: 32.0, multiplier: 1.3, retry_codes: [14, 4]
}
default_config.rpcs.drop_database.timeout = 3600.0
default_config.rpcs.drop_database.retry_policy = {
initial_delay: 1.0, max_delay: 32.0, multiplier: 1.3, retry_codes: [14, 4]
}
default_config.rpcs.get_database_ddl.timeout = 3600.0
default_config.rpcs.get_database_ddl.retry_policy = {
initial_delay: 1.0, max_delay: 32.0, multiplier: 1.3, retry_codes: [14, 4]
}
default_config.rpcs.set_iam_policy.timeout = 30.0
default_config.rpcs.get_iam_policy.timeout = 30.0
default_config.rpcs.get_iam_policy.retry_policy = {
initial_delay: 1.0, max_delay: 32.0, multiplier: 1.3, retry_codes: [14, 4]
}
default_config.rpcs.test_iam_permissions.timeout = 30.0
default_config.rpcs.create_backup.timeout = 3600.0
default_config.rpcs.copy_backup.timeout = 3600.0
default_config.rpcs.get_backup.timeout = 3600.0
default_config.rpcs.get_backup.retry_policy = {
initial_delay: 1.0, max_delay: 32.0, multiplier: 1.3, retry_codes: [14, 4]
}
default_config.rpcs.update_backup.timeout = 3600.0
default_config.rpcs.update_backup.retry_policy = {
initial_delay: 1.0, max_delay: 32.0, multiplier: 1.3, retry_codes: [14, 4]
}
default_config.rpcs.delete_backup.timeout = 3600.0
default_config.rpcs.delete_backup.retry_policy = {
initial_delay: 1.0, max_delay: 32.0, multiplier: 1.3, retry_codes: [14, 4]
}
default_config.rpcs.list_backups.timeout = 3600.0
default_config.rpcs.list_backups.retry_policy = {
initial_delay: 1.0, max_delay: 32.0, multiplier: 1.3, retry_codes: [14, 4]
}
default_config.rpcs.restore_database.timeout = 3600.0
default_config.rpcs.list_database_operations.timeout = 3600.0
default_config.rpcs.list_database_operations.retry_policy = {
initial_delay: 1.0, max_delay: 32.0, multiplier: 1.3, retry_codes: [14, 4]
}
default_config.rpcs.list_backup_operations.timeout = 3600.0
default_config.rpcs.list_backup_operations.retry_policy = {
initial_delay: 1.0, max_delay: 32.0, multiplier: 1.3, retry_codes: [14, 4]
}
default_config
end
yield @configure if block_given?
@configure
end
##
# Configure the DatabaseAdmin Client instance.
#
# The configuration is set to the derived mode, meaning that values can be changed,
# but structural changes (adding new fields, etc.) are not allowed. Structural changes
# should be made on {Client.configure}.
#
# See {::Google::Cloud::Spanner::Admin::Database::V1::DatabaseAdmin::Client::Configuration}
# for a description of the configuration fields.
#
# @yield [config] Configure the Client client.
# @yieldparam config [Client::Configuration]
#
# @return [Client::Configuration]
#
def configure
yield @config if block_given?
@config
end
##
# Create a new DatabaseAdmin client object.
#
# @example
#
# # Create a client using the default configuration
# client = ::Google::Cloud::Spanner::Admin::Database::V1::DatabaseAdmin::Client.new
#
# # Create a client using a custom configuration
# client = ::Google::Cloud::Spanner::Admin::Database::V1::DatabaseAdmin::Client.new do |config|
# config.timeout = 10.0
# end
#
# @yield [config] Configure the DatabaseAdmin client.
# @yieldparam config [Client::Configuration]
#
def initialize
# These require statements are intentionally placed here to initialize
# the gRPC module only when it's required.
# See https://github.com/googleapis/toolkit/issues/446
require "gapic/grpc"
require "google/spanner/admin/database/v1/spanner_database_admin_services_pb"
# Create the configuration object
@config = Configuration.new Client.configure
# Yield the configuration if needed
yield @config if block_given?
# Create credentials
credentials = @config.credentials
# Use self-signed JWT if the endpoint is unchanged from default,
# but only if the default endpoint does not have a region prefix.
enable_self_signed_jwt = @config.endpoint == Client.configure.endpoint &&
[email protected](".").first.include?("-")
credentials ||= Credentials.default scope: @config.scope,
enable_self_signed_jwt: enable_self_signed_jwt
if credentials.is_a?(::String) || credentials.is_a?(::Hash)
credentials = Credentials.new credentials, scope: @config.scope
end
@quota_project_id = @config.quota_project
@quota_project_id ||= credentials.quota_project_id if credentials.respond_to? :quota_project_id
@operations_client = Operations.new do |config|
config.credentials = credentials
config.quota_project = @quota_project_id
config.endpoint = @config.endpoint
end
@database_admin_stub = ::Gapic::ServiceStub.new(
::Google::Cloud::Spanner::Admin::Database::V1::DatabaseAdmin::Stub,
credentials: credentials,
endpoint: @config.endpoint,
channel_args: @config.channel_args,
interceptors: @config.interceptors
)
end
##
# Get the associated client for long-running operations.
#
# @return [::Google::Cloud::Spanner::Admin::Database::V1::DatabaseAdmin::Operations]
#
attr_reader :operations_client
# Service calls
##
# Lists Cloud Spanner databases.
#
# @overload list_databases(request, options = nil)
# Pass arguments to `list_databases` via a request object, either of type
# {::Google::Cloud::Spanner::Admin::Database::V1::ListDatabasesRequest} or an equivalent Hash.
#
# @param request [::Google::Cloud::Spanner::Admin::Database::V1::ListDatabasesRequest, ::Hash]
# A request object representing the call parameters. Required. To specify no
# parameters, or to keep all the default parameter values, pass an empty Hash.
# @param options [::Gapic::CallOptions, ::Hash]
# Overrides the default settings for this call, e.g, timeout, retries, etc. Optional.
#
# @overload list_databases(parent: nil, page_size: nil, page_token: nil)
# Pass arguments to `list_databases` via keyword arguments. Note that at
# least one keyword argument is required. To specify no parameters, or to keep all
# the default parameter values, pass an empty Hash as a request object (see above).
#
# @param parent [::String]
# Required. The instance whose databases should be listed.
# Values are of the form `projects/<project>/instances/<instance>`.
# @param page_size [::Integer]
# Number of databases to be returned in the response. If 0 or less,
# defaults to the server's maximum allowed page size.
# @param page_token [::String]
# If non-empty, `page_token` should contain a
# {::Google::Cloud::Spanner::Admin::Database::V1::ListDatabasesResponse#next_page_token next_page_token} from a
# previous {::Google::Cloud::Spanner::Admin::Database::V1::ListDatabasesResponse ListDatabasesResponse}.
#
# @yield [response, operation] Access the result along with the RPC operation
# @yieldparam response [::Gapic::PagedEnumerable<::Google::Cloud::Spanner::Admin::Database::V1::Database>]
# @yieldparam operation [::GRPC::ActiveCall::Operation]
#
# @return [::Gapic::PagedEnumerable<::Google::Cloud::Spanner::Admin::Database::V1::Database>]
#
# @raise [::Google::Cloud::Error] if the RPC is aborted.
#
# @example Basic example
# require "google/cloud/spanner/admin/database/v1"
#
# # Create a client object. The client can be reused for multiple calls.
# client = Google::Cloud::Spanner::Admin::Database::V1::DatabaseAdmin::Client.new
#
# # Create a request. To set request fields, pass in keyword arguments.
# request = Google::Cloud::Spanner::Admin::Database::V1::ListDatabasesRequest.new
#
# # Call the list_databases method.
# result = client.list_databases request
#
# # The returned object is of type Gapic::PagedEnumerable. You can
# # iterate over all elements by calling #each, and the enumerable
# # will lazily make API calls to fetch subsequent pages. Other
# # methods are also available for managing paging directly.
# result.each do |response|
# # Each element is of type ::Google::Cloud::Spanner::Admin::Database::V1::Database.
# p response
# end
#
def list_databases request, options = nil
raise ::ArgumentError, "request must be provided" if request.nil?
request = ::Gapic::Protobuf.coerce request, to: ::Google::Cloud::Spanner::Admin::Database::V1::ListDatabasesRequest
# Converts hash and nil to an options object
options = ::Gapic::CallOptions.new(**options.to_h) if options.respond_to? :to_h
# Customize the options with defaults
metadata = @config.rpcs.list_databases.metadata.to_h
# Set x-goog-api-client and x-goog-user-project headers
metadata[:"x-goog-api-client"] ||= ::Gapic::Headers.x_goog_api_client \
lib_name: @config.lib_name, lib_version: @config.lib_version,
gapic_version: ::Google::Cloud::Spanner::Admin::Database::V1::VERSION
metadata[:"x-goog-user-project"] = @quota_project_id if @quota_project_id
header_params = {}
if request.parent
header_params["parent"] = request.parent
end
request_params_header = header_params.map { |k, v| "#{k}=#{v}" }.join("&")
metadata[:"x-goog-request-params"] ||= request_params_header
options.apply_defaults timeout: @config.rpcs.list_databases.timeout,
metadata: metadata,
retry_policy: @config.rpcs.list_databases.retry_policy
options.apply_defaults timeout: @config.timeout,
metadata: @config.metadata,
retry_policy: @config.retry_policy
@database_admin_stub.call_rpc :list_databases, request, options: options do |response, operation|
response = ::Gapic::PagedEnumerable.new @database_admin_stub, :list_databases, request, response, operation, options
yield response, operation if block_given?
return response
end
rescue ::GRPC::BadStatus => e
raise ::Google::Cloud::Error.from_error(e)
end
##
# Creates a new Cloud Spanner database and starts to prepare it for serving.
# The returned {::Google::Longrunning::Operation long-running operation} will
# have a name of the format `<database_name>/operations/<operation_id>` and
# can be used to track preparation of the database. The
# {::Google::Longrunning::Operation#metadata metadata} field type is
# {::Google::Cloud::Spanner::Admin::Database::V1::CreateDatabaseMetadata CreateDatabaseMetadata}. The
# {::Google::Longrunning::Operation#response response} field type is
# {::Google::Cloud::Spanner::Admin::Database::V1::Database Database}, if successful.
#
# @overload create_database(request, options = nil)
# Pass arguments to `create_database` via a request object, either of type
# {::Google::Cloud::Spanner::Admin::Database::V1::CreateDatabaseRequest} or an equivalent Hash.
#
# @param request [::Google::Cloud::Spanner::Admin::Database::V1::CreateDatabaseRequest, ::Hash]
# A request object representing the call parameters. Required. To specify no
# parameters, or to keep all the default parameter values, pass an empty Hash.
# @param options [::Gapic::CallOptions, ::Hash]
# Overrides the default settings for this call, e.g, timeout, retries, etc. Optional.
#
# @overload create_database(parent: nil, create_statement: nil, extra_statements: nil, encryption_config: nil, database_dialect: nil)
# Pass arguments to `create_database` via keyword arguments. Note that at
# least one keyword argument is required. To specify no parameters, or to keep all
# the default parameter values, pass an empty Hash as a request object (see above).
#
# @param parent [::String]
# Required. The name of the instance that will serve the new database.
# Values are of the form `projects/<project>/instances/<instance>`.
# @param create_statement [::String]
# Required. A `CREATE DATABASE` statement, which specifies the ID of the
# new database. The database ID must conform to the regular expression
# `[a-z][a-z0-9_\-]*[a-z0-9]` and be between 2 and 30 characters in length.
# If the database ID is a reserved word or if it contains a hyphen, the
# database ID must be enclosed in backticks (`` ` ``).
# @param extra_statements [::Array<::String>]
# Optional. A list of DDL statements to run inside the newly created
# database. Statements can create tables, indexes, etc. These
# statements execute atomically with the creation of the database:
# if there is an error in any statement, the database is not created.
# @param encryption_config [::Google::Cloud::Spanner::Admin::Database::V1::EncryptionConfig, ::Hash]
# Optional. The encryption configuration for the database. If this field is not
# specified, Cloud Spanner will encrypt/decrypt all data at rest using
# Google default encryption.
# @param database_dialect [::Google::Cloud::Spanner::Admin::Database::V1::DatabaseDialect]
# Optional. The dialect of the Cloud Spanner Database.
#
# @yield [response, operation] Access the result along with the RPC operation
# @yieldparam response [::Gapic::Operation]
# @yieldparam operation [::GRPC::ActiveCall::Operation]
#
# @return [::Gapic::Operation]
#
# @raise [::Google::Cloud::Error] if the RPC is aborted.
#
# @example Basic example
# require "google/cloud/spanner/admin/database/v1"
#
# # Create a client object. The client can be reused for multiple calls.
# client = Google::Cloud::Spanner::Admin::Database::V1::DatabaseAdmin::Client.new
#
# # Create a request. To set request fields, pass in keyword arguments.
# request = Google::Cloud::Spanner::Admin::Database::V1::CreateDatabaseRequest.new
#
# # Call the create_database method.
# result = client.create_database request
#
# # The returned object is of type Gapic::Operation. You can use this
# # object to check the status of an operation, cancel it, or wait
# # for results. Here is how to block until completion:
# result.wait_until_done! timeout: 60
# if result.response?
# p result.response
# else
# puts "Error!"
# end
#
def create_database request, options = nil
raise ::ArgumentError, "request must be provided" if request.nil?
request = ::Gapic::Protobuf.coerce request, to: ::Google::Cloud::Spanner::Admin::Database::V1::CreateDatabaseRequest
# Converts hash and nil to an options object
options = ::Gapic::CallOptions.new(**options.to_h) if options.respond_to? :to_h
# Customize the options with defaults
metadata = @config.rpcs.create_database.metadata.to_h
# Set x-goog-api-client and x-goog-user-project headers
metadata[:"x-goog-api-client"] ||= ::Gapic::Headers.x_goog_api_client \
lib_name: @config.lib_name, lib_version: @config.lib_version,
gapic_version: ::Google::Cloud::Spanner::Admin::Database::V1::VERSION
metadata[:"x-goog-user-project"] = @quota_project_id if @quota_project_id
header_params = {}
if request.parent
header_params["parent"] = request.parent
end
request_params_header = header_params.map { |k, v| "#{k}=#{v}" }.join("&")
metadata[:"x-goog-request-params"] ||= request_params_header
options.apply_defaults timeout: @config.rpcs.create_database.timeout,
metadata: metadata,
retry_policy: @config.rpcs.create_database.retry_policy
options.apply_defaults timeout: @config.timeout,
metadata: @config.metadata,
retry_policy: @config.retry_policy
@database_admin_stub.call_rpc :create_database, request, options: options do |response, operation|
response = ::Gapic::Operation.new response, @operations_client, options: options
yield response, operation if block_given?
return response
end
rescue ::GRPC::BadStatus => e
raise ::Google::Cloud::Error.from_error(e)
end
##
# Gets the state of a Cloud Spanner database.
#
# @overload get_database(request, options = nil)
# Pass arguments to `get_database` via a request object, either of type
# {::Google::Cloud::Spanner::Admin::Database::V1::GetDatabaseRequest} or an equivalent Hash.
#
# @param request [::Google::Cloud::Spanner::Admin::Database::V1::GetDatabaseRequest, ::Hash]
# A request object representing the call parameters. Required. To specify no
# parameters, or to keep all the default parameter values, pass an empty Hash.
# @param options [::Gapic::CallOptions, ::Hash]
# Overrides the default settings for this call, e.g, timeout, retries, etc. Optional.
#
# @overload get_database(name: nil)
# Pass arguments to `get_database` via keyword arguments. Note that at
# least one keyword argument is required. To specify no parameters, or to keep all
# the default parameter values, pass an empty Hash as a request object (see above).
#
# @param name [::String]
# Required. The name of the requested database. Values are of the form
# `projects/<project>/instances/<instance>/databases/<database>`.
#
# @yield [response, operation] Access the result along with the RPC operation
# @yieldparam response [::Google::Cloud::Spanner::Admin::Database::V1::Database]
# @yieldparam operation [::GRPC::ActiveCall::Operation]
#
# @return [::Google::Cloud::Spanner::Admin::Database::V1::Database]
#
# @raise [::Google::Cloud::Error] if the RPC is aborted.
#
# @example Basic example
# require "google/cloud/spanner/admin/database/v1"
#
# # Create a client object. The client can be reused for multiple calls.
# client = Google::Cloud::Spanner::Admin::Database::V1::DatabaseAdmin::Client.new
#
# # Create a request. To set request fields, pass in keyword arguments.
# request = Google::Cloud::Spanner::Admin::Database::V1::GetDatabaseRequest.new
#
# # Call the get_database method.
# result = client.get_database request
#
# # The returned object is of type Google::Cloud::Spanner::Admin::Database::V1::Database.
# p result
#
def get_database request, options = nil
raise ::ArgumentError, "request must be provided" if request.nil?
request = ::Gapic::Protobuf.coerce request, to: ::Google::Cloud::Spanner::Admin::Database::V1::GetDatabaseRequest
# Converts hash and nil to an options object
options = ::Gapic::CallOptions.new(**options.to_h) if options.respond_to? :to_h
# Customize the options with defaults
metadata = @config.rpcs.get_database.metadata.to_h
# Set x-goog-api-client and x-goog-user-project headers
metadata[:"x-goog-api-client"] ||= ::Gapic::Headers.x_goog_api_client \
lib_name: @config.lib_name, lib_version: @config.lib_version,
gapic_version: ::Google::Cloud::Spanner::Admin::Database::V1::VERSION
metadata[:"x-goog-user-project"] = @quota_project_id if @quota_project_id
header_params = {}
if request.name
header_params["name"] = request.name
end
request_params_header = header_params.map { |k, v| "#{k}=#{v}" }.join("&")
metadata[:"x-goog-request-params"] ||= request_params_header
options.apply_defaults timeout: @config.rpcs.get_database.timeout,
metadata: metadata,
retry_policy: @config.rpcs.get_database.retry_policy
options.apply_defaults timeout: @config.timeout,
metadata: @config.metadata,
retry_policy: @config.retry_policy
@database_admin_stub.call_rpc :get_database, request, options: options do |response, operation|
yield response, operation if block_given?
return response
end
rescue ::GRPC::BadStatus => e
raise ::Google::Cloud::Error.from_error(e)
end
##
# Updates the schema of a Cloud Spanner database by
# creating/altering/dropping tables, columns, indexes, etc. The returned
# {::Google::Longrunning::Operation long-running operation} will have a name of
# the format `<database_name>/operations/<operation_id>` and can be used to
# track execution of the schema change(s). The
# {::Google::Longrunning::Operation#metadata metadata} field type is
# {::Google::Cloud::Spanner::Admin::Database::V1::UpdateDatabaseDdlMetadata UpdateDatabaseDdlMetadata}. The operation has no response.
#
# @overload update_database_ddl(request, options = nil)
# Pass arguments to `update_database_ddl` via a request object, either of type
# {::Google::Cloud::Spanner::Admin::Database::V1::UpdateDatabaseDdlRequest} or an equivalent Hash.
#
# @param request [::Google::Cloud::Spanner::Admin::Database::V1::UpdateDatabaseDdlRequest, ::Hash]
# A request object representing the call parameters. Required. To specify no
# parameters, or to keep all the default parameter values, pass an empty Hash.
# @param options [::Gapic::CallOptions, ::Hash]
# Overrides the default settings for this call, e.g, timeout, retries, etc. Optional.
#
# @overload update_database_ddl(database: nil, statements: nil, operation_id: nil)
# Pass arguments to `update_database_ddl` via keyword arguments. Note that at
# least one keyword argument is required. To specify no parameters, or to keep all
# the default parameter values, pass an empty Hash as a request object (see above).
#
# @param database [::String]
# Required. The database to update.
# @param statements [::Array<::String>]
# Required. DDL statements to be applied to the database.
# @param operation_id [::String]
# If empty, the new update request is assigned an
# automatically-generated operation ID. Otherwise, `operation_id`
# is used to construct the name of the resulting
# {::Google::Longrunning::Operation Operation}.
#
# Specifying an explicit operation ID simplifies determining
# whether the statements were executed in the event that the
# {::Google::Cloud::Spanner::Admin::Database::V1::DatabaseAdmin::Client#update_database_ddl UpdateDatabaseDdl} call is replayed,
# or the return value is otherwise lost: the {::Google::Cloud::Spanner::Admin::Database::V1::UpdateDatabaseDdlRequest#database database} and
# `operation_id` fields can be combined to form the
# {::Google::Longrunning::Operation#name name} of the resulting
# {::Google::Longrunning::Operation longrunning.Operation}: `<database>/operations/<operation_id>`.
#
# `operation_id` should be unique within the database, and must be
# a valid identifier: `[a-z][a-z0-9_]*`. Note that
# automatically-generated operation IDs always begin with an
# underscore. If the named operation already exists,
# {::Google::Cloud::Spanner::Admin::Database::V1::DatabaseAdmin::Client#update_database_ddl UpdateDatabaseDdl} returns
# `ALREADY_EXISTS`.
#
# @yield [response, operation] Access the result along with the RPC operation
# @yieldparam response [::Gapic::Operation]
# @yieldparam operation [::GRPC::ActiveCall::Operation]
#
# @return [::Gapic::Operation]
#
# @raise [::Google::Cloud::Error] if the RPC is aborted.
#
# @example Basic example
# require "google/cloud/spanner/admin/database/v1"
#
# # Create a client object. The client can be reused for multiple calls.
# client = Google::Cloud::Spanner::Admin::Database::V1::DatabaseAdmin::Client.new
#
# # Create a request. To set request fields, pass in keyword arguments.
# request = Google::Cloud::Spanner::Admin::Database::V1::UpdateDatabaseDdlRequest.new
#
# # Call the update_database_ddl method.
# result = client.update_database_ddl request
#
# # The returned object is of type Gapic::Operation. You can use this
# # object to check the status of an operation, cancel it, or wait
# # for results. Here is how to block until completion:
# result.wait_until_done! timeout: 60
# if result.response?
# p result.response
# else
# puts "Error!"
# end
#
def update_database_ddl request, options = nil
raise ::ArgumentError, "request must be provided" if request.nil?
request = ::Gapic::Protobuf.coerce request, to: ::Google::Cloud::Spanner::Admin::Database::V1::UpdateDatabaseDdlRequest
# Converts hash and nil to an options object
options = ::Gapic::CallOptions.new(**options.to_h) if options.respond_to? :to_h
# Customize the options with defaults
metadata = @config.rpcs.update_database_ddl.metadata.to_h
# Set x-goog-api-client and x-goog-user-project headers
metadata[:"x-goog-api-client"] ||= ::Gapic::Headers.x_goog_api_client \
lib_name: @config.lib_name, lib_version: @config.lib_version,
gapic_version: ::Google::Cloud::Spanner::Admin::Database::V1::VERSION
metadata[:"x-goog-user-project"] = @quota_project_id if @quota_project_id
header_params = {}
if request.database
header_params["database"] = request.database
end
request_params_header = header_params.map { |k, v| "#{k}=#{v}" }.join("&")
metadata[:"x-goog-request-params"] ||= request_params_header
options.apply_defaults timeout: @config.rpcs.update_database_ddl.timeout,
metadata: metadata,
retry_policy: @config.rpcs.update_database_ddl.retry_policy
options.apply_defaults timeout: @config.timeout,
metadata: @config.metadata,
retry_policy: @config.retry_policy
@database_admin_stub.call_rpc :update_database_ddl, request, options: options do |response, operation|
response = ::Gapic::Operation.new response, @operations_client, options: options
yield response, operation if block_given?
return response
end
rescue ::GRPC::BadStatus => e
raise ::Google::Cloud::Error.from_error(e)
end
##
# Drops (aka deletes) a Cloud Spanner database.
# Completed backups for the database will be retained according to their
# `expire_time`.
# Note: Cloud Spanner might continue to accept requests for a few seconds
# after the database has been deleted.
#
# @overload drop_database(request, options = nil)
# Pass arguments to `drop_database` via a request object, either of type
# {::Google::Cloud::Spanner::Admin::Database::V1::DropDatabaseRequest} or an equivalent Hash.
#
# @param request [::Google::Cloud::Spanner::Admin::Database::V1::DropDatabaseRequest, ::Hash]
# A request object representing the call parameters. Required. To specify no
# parameters, or to keep all the default parameter values, pass an empty Hash.
# @param options [::Gapic::CallOptions, ::Hash]
# Overrides the default settings for this call, e.g, timeout, retries, etc. Optional.
#
# @overload drop_database(database: nil)
# Pass arguments to `drop_database` via keyword arguments. Note that at
# least one keyword argument is required. To specify no parameters, or to keep all
# the default parameter values, pass an empty Hash as a request object (see above).
#
# @param database [::String]
# Required. The database to be dropped.
#
# @yield [response, operation] Access the result along with the RPC operation
# @yieldparam response [::Google::Protobuf::Empty]
# @yieldparam operation [::GRPC::ActiveCall::Operation]
#
# @return [::Google::Protobuf::Empty]
#
# @raise [::Google::Cloud::Error] if the RPC is aborted.
#
# @example Basic example
# require "google/cloud/spanner/admin/database/v1"
#
# # Create a client object. The client can be reused for multiple calls.
# client = Google::Cloud::Spanner::Admin::Database::V1::DatabaseAdmin::Client.new
#
# # Create a request. To set request fields, pass in keyword arguments.
# request = Google::Cloud::Spanner::Admin::Database::V1::DropDatabaseRequest.new
#
# # Call the drop_database method.
# result = client.drop_database request
#
# # The returned object is of type Google::Protobuf::Empty.
# p result
#
def drop_database request, options = nil
raise ::ArgumentError, "request must be provided" if request.nil?
request = ::Gapic::Protobuf.coerce request, to: ::Google::Cloud::Spanner::Admin::Database::V1::DropDatabaseRequest
# Converts hash and nil to an options object
options = ::Gapic::CallOptions.new(**options.to_h) if options.respond_to? :to_h
# Customize the options with defaults
metadata = @config.rpcs.drop_database.metadata.to_h
# Set x-goog-api-client and x-goog-user-project headers
metadata[:"x-goog-api-client"] ||= ::Gapic::Headers.x_goog_api_client \
lib_name: @config.lib_name, lib_version: @config.lib_version,
gapic_version: ::Google::Cloud::Spanner::Admin::Database::V1::VERSION
metadata[:"x-goog-user-project"] = @quota_project_id if @quota_project_id
header_params = {}
if request.database
header_params["database"] = request.database
end
request_params_header = header_params.map { |k, v| "#{k}=#{v}" }.join("&")
metadata[:"x-goog-request-params"] ||= request_params_header
options.apply_defaults timeout: @config.rpcs.drop_database.timeout,
metadata: metadata,
retry_policy: @config.rpcs.drop_database.retry_policy
options.apply_defaults timeout: @config.timeout,
metadata: @config.metadata,
retry_policy: @config.retry_policy
@database_admin_stub.call_rpc :drop_database, request, options: options do |response, operation|
yield response, operation if block_given?
return response
end
rescue ::GRPC::BadStatus => e
raise ::Google::Cloud::Error.from_error(e)
end
##
# Returns the schema of a Cloud Spanner database as a list of formatted
# DDL statements. This method does not show pending schema updates, those may
# be queried using the Operations API.
#
# @overload get_database_ddl(request, options = nil)
# Pass arguments to `get_database_ddl` via a request object, either of type
# {::Google::Cloud::Spanner::Admin::Database::V1::GetDatabaseDdlRequest} or an equivalent Hash.
#
# @param request [::Google::Cloud::Spanner::Admin::Database::V1::GetDatabaseDdlRequest, ::Hash]
# A request object representing the call parameters. Required. To specify no
# parameters, or to keep all the default parameter values, pass an empty Hash.
# @param options [::Gapic::CallOptions, ::Hash]
# Overrides the default settings for this call, e.g, timeout, retries, etc. Optional.
#
# @overload get_database_ddl(database: nil)
# Pass arguments to `get_database_ddl` via keyword arguments. Note that at
# least one keyword argument is required. To specify no parameters, or to keep all
# the default parameter values, pass an empty Hash as a request object (see above).
#
# @param database [::String]
# Required. The database whose schema we wish to get.
# Values are of the form
# `projects/<project>/instances/<instance>/databases/<database>`
#
# @yield [response, operation] Access the result along with the RPC operation
# @yieldparam response [::Google::Cloud::Spanner::Admin::Database::V1::GetDatabaseDdlResponse]
# @yieldparam operation [::GRPC::ActiveCall::Operation]
#
# @return [::Google::Cloud::Spanner::Admin::Database::V1::GetDatabaseDdlResponse]
#
# @raise [::Google::Cloud::Error] if the RPC is aborted.
#
# @example Basic example
# require "google/cloud/spanner/admin/database/v1"
#
# # Create a client object. The client can be reused for multiple calls.
# client = Google::Cloud::Spanner::Admin::Database::V1::DatabaseAdmin::Client.new
#
# # Create a request. To set request fields, pass in keyword arguments.
# request = Google::Cloud::Spanner::Admin::Database::V1::GetDatabaseDdlRequest.new
#
# # Call the get_database_ddl method.
# result = client.get_database_ddl request
#
# # The returned object is of type Google::Cloud::Spanner::Admin::Database::V1::GetDatabaseDdlResponse.
# p result
#
def get_database_ddl request, options = nil
raise ::ArgumentError, "request must be provided" if request.nil?
request = ::Gapic::Protobuf.coerce request, to: ::Google::Cloud::Spanner::Admin::Database::V1::GetDatabaseDdlRequest
# Converts hash and nil to an options object
options = ::Gapic::CallOptions.new(**options.to_h) if options.respond_to? :to_h
# Customize the options with defaults
metadata = @config.rpcs.get_database_ddl.metadata.to_h
# Set x-goog-api-client and x-goog-user-project headers
metadata[:"x-goog-api-client"] ||= ::Gapic::Headers.x_goog_api_client \
lib_name: @config.lib_name, lib_version: @config.lib_version,
gapic_version: ::Google::Cloud::Spanner::Admin::Database::V1::VERSION
metadata[:"x-goog-user-project"] = @quota_project_id if @quota_project_id
header_params = {}
if request.database
header_params["database"] = request.database
end
request_params_header = header_params.map { |k, v| "#{k}=#{v}" }.join("&")
metadata[:"x-goog-request-params"] ||= request_params_header
options.apply_defaults timeout: @config.rpcs.get_database_ddl.timeout,
metadata: metadata,
retry_policy: @config.rpcs.get_database_ddl.retry_policy
options.apply_defaults timeout: @config.timeout,
metadata: @config.metadata,
retry_policy: @config.retry_policy
@database_admin_stub.call_rpc :get_database_ddl, request, options: options do |response, operation|
yield response, operation if block_given?
return response
end
rescue ::GRPC::BadStatus => e
raise ::Google::Cloud::Error.from_error(e)
end
##
# Sets the access control policy on a database or backup resource.
# Replaces any existing policy.
#
# Authorization requires `spanner.databases.setIamPolicy`
# permission on {::Google::Iam::V1::SetIamPolicyRequest#resource resource}.
# For backups, authorization requires `spanner.backups.setIamPolicy`
# permission on {::Google::Iam::V1::SetIamPolicyRequest#resource resource}.
#
# @overload set_iam_policy(request, options = nil)
# Pass arguments to `set_iam_policy` via a request object, either of type
# {::Google::Iam::V1::SetIamPolicyRequest} or an equivalent Hash.
#
# @param request [::Google::Iam::V1::SetIamPolicyRequest, ::Hash]
# A request object representing the call parameters. Required. To specify no
# parameters, or to keep all the default parameter values, pass an empty Hash.
# @param options [::Gapic::CallOptions, ::Hash]
# Overrides the default settings for this call, e.g, timeout, retries, etc. Optional.
#
# @overload set_iam_policy(resource: nil, policy: nil, update_mask: nil)
# Pass arguments to `set_iam_policy` via keyword arguments. Note that at
# least one keyword argument is required. To specify no parameters, or to keep all
# the default parameter values, pass an empty Hash as a request object (see above).
#
# @param resource [::String]
# REQUIRED: The resource for which the policy is being specified.
# See the operation documentation for the appropriate value for this field.
# @param policy [::Google::Iam::V1::Policy, ::Hash]
# REQUIRED: The complete policy to be applied to the `resource`. The size of
# the policy is limited to a few 10s of KB. An empty policy is a
# valid policy but certain Cloud Platform services (such as Projects)
# might reject them.
# @param update_mask [::Google::Protobuf::FieldMask, ::Hash]
# OPTIONAL: A FieldMask specifying which fields of the policy to modify. Only
# the fields in the mask will be modified. If no mask is provided, the
# following default mask is used:
#
# `paths: "bindings, etag"`
#
# @yield [response, operation] Access the result along with the RPC operation
# @yieldparam response [::Google::Iam::V1::Policy]
# @yieldparam operation [::GRPC::ActiveCall::Operation]
#
# @return [::Google::Iam::V1::Policy]
#
# @raise [::Google::Cloud::Error] if the RPC is aborted.
#
# @example Basic example
# require "google/cloud/spanner/admin/database/v1"
#
# # Create a client object. The client can be reused for multiple calls.
# client = Google::Cloud::Spanner::Admin::Database::V1::DatabaseAdmin::Client.new
#
# # Create a request. To set request fields, pass in keyword arguments.
# request = Google::Iam::V1::SetIamPolicyRequest.new
#
# # Call the set_iam_policy method.
# result = client.set_iam_policy request
#
# # The returned object is of type Google::Iam::V1::Policy.
# p result
#
def set_iam_policy request, options = nil
raise ::ArgumentError, "request must be provided" if request.nil?
request = ::Gapic::Protobuf.coerce request, to: ::Google::Iam::V1::SetIamPolicyRequest
# Converts hash and nil to an options object
options = ::Gapic::CallOptions.new(**options.to_h) if options.respond_to? :to_h
# Customize the options with defaults
metadata = @config.rpcs.set_iam_policy.metadata.to_h
# Set x-goog-api-client and x-goog-user-project headers
metadata[:"x-goog-api-client"] ||= ::Gapic::Headers.x_goog_api_client \
lib_name: @config.lib_name, lib_version: @config.lib_version,
gapic_version: ::Google::Cloud::Spanner::Admin::Database::V1::VERSION
metadata[:"x-goog-user-project"] = @quota_project_id if @quota_project_id
header_params = {}
if request.resource
header_params["resource"] = request.resource
end
request_params_header = header_params.map { |k, v| "#{k}=#{v}" }.join("&")
metadata[:"x-goog-request-params"] ||= request_params_header
options.apply_defaults timeout: @config.rpcs.set_iam_policy.timeout,
metadata: metadata,
retry_policy: @config.rpcs.set_iam_policy.retry_policy
options.apply_defaults timeout: @config.timeout,
metadata: @config.metadata,
retry_policy: @config.retry_policy
@database_admin_stub.call_rpc :set_iam_policy, request, options: options do |response, operation|
yield response, operation if block_given?
return response
end
rescue ::GRPC::BadStatus => e
raise ::Google::Cloud::Error.from_error(e)
end
##
# Gets the access control policy for a database or backup resource.
# Returns an empty policy if a database or backup exists but does not have a
# policy set.
#
# Authorization requires `spanner.databases.getIamPolicy` permission on
# {::Google::Iam::V1::GetIamPolicyRequest#resource resource}.
# For backups, authorization requires `spanner.backups.getIamPolicy`
# permission on {::Google::Iam::V1::GetIamPolicyRequest#resource resource}.
#
# @overload get_iam_policy(request, options = nil)
# Pass arguments to `get_iam_policy` via a request object, either of type
# {::Google::Iam::V1::GetIamPolicyRequest} or an equivalent Hash.
#
# @param request [::Google::Iam::V1::GetIamPolicyRequest, ::Hash]
# A request object representing the call parameters. Required. To specify no
# parameters, or to keep all the default parameter values, pass an empty Hash.
# @param options [::Gapic::CallOptions, ::Hash]
# Overrides the default settings for this call, e.g, timeout, retries, etc. Optional.
#
# @overload get_iam_policy(resource: nil, options: nil)
# Pass arguments to `get_iam_policy` via keyword arguments. Note that at
# least one keyword argument is required. To specify no parameters, or to keep all
# the default parameter values, pass an empty Hash as a request object (see above).
#
# @param resource [::String]
# REQUIRED: The resource for which the policy is being requested.
# See the operation documentation for the appropriate value for this field.
# @param options [::Google::Iam::V1::GetPolicyOptions, ::Hash]
# OPTIONAL: A `GetPolicyOptions` object for specifying options to
# `GetIamPolicy`.
#
# @yield [response, operation] Access the result along with the RPC operation
# @yieldparam response [::Google::Iam::V1::Policy]
# @yieldparam operation [::GRPC::ActiveCall::Operation]
#
# @return [::Google::Iam::V1::Policy]
#
# @raise [::Google::Cloud::Error] if the RPC is aborted.
#
# @example Basic example
# require "google/cloud/spanner/admin/database/v1"
#
# # Create a client object. The client can be reused for multiple calls.
# client = Google::Cloud::Spanner::Admin::Database::V1::DatabaseAdmin::Client.new
#
# # Create a request. To set request fields, pass in keyword arguments.
# request = Google::Iam::V1::GetIamPolicyRequest.new
#
# # Call the get_iam_policy method.
# result = client.get_iam_policy request
#
# # The returned object is of type Google::Iam::V1::Policy.
# p result
#
def get_iam_policy request, options = nil
raise ::ArgumentError, "request must be provided" if request.nil?
request = ::Gapic::Protobuf.coerce request, to: ::Google::Iam::V1::GetIamPolicyRequest
# Converts hash and nil to an options object
options = ::Gapic::CallOptions.new(**options.to_h) if options.respond_to? :to_h
# Customize the options with defaults
metadata = @config.rpcs.get_iam_policy.metadata.to_h
# Set x-goog-api-client and x-goog-user-project headers
metadata[:"x-goog-api-client"] ||= ::Gapic::Headers.x_goog_api_client \
lib_name: @config.lib_name, lib_version: @config.lib_version,
gapic_version: ::Google::Cloud::Spanner::Admin::Database::V1::VERSION
metadata[:"x-goog-user-project"] = @quota_project_id if @quota_project_id
header_params = {}
if request.resource
header_params["resource"] = request.resource
end
request_params_header = header_params.map { |k, v| "#{k}=#{v}" }.join("&")
metadata[:"x-goog-request-params"] ||= request_params_header
options.apply_defaults timeout: @config.rpcs.get_iam_policy.timeout,
metadata: metadata,
retry_policy: @config.rpcs.get_iam_policy.retry_policy
options.apply_defaults timeout: @config.timeout,
metadata: @config.metadata,
retry_policy: @config.retry_policy
@database_admin_stub.call_rpc :get_iam_policy, request, options: options do |response, operation|
yield response, operation if block_given?
return response
end
rescue ::GRPC::BadStatus => e
raise ::Google::Cloud::Error.from_error(e)
end
##
# Returns permissions that the caller has on the specified database or backup
# resource.
#
# Attempting this RPC on a non-existent Cloud Spanner database will
# result in a NOT_FOUND error if the user has
# `spanner.databases.list` permission on the containing Cloud
# Spanner instance. Otherwise returns an empty set of permissions.
# Calling this method on a backup that does not exist will
# result in a NOT_FOUND error if the user has
# `spanner.backups.list` permission on the containing instance.
#
# @overload test_iam_permissions(request, options = nil)
# Pass arguments to `test_iam_permissions` via a request object, either of type
# {::Google::Iam::V1::TestIamPermissionsRequest} or an equivalent Hash.
#
# @param request [::Google::Iam::V1::TestIamPermissionsRequest, ::Hash]
# A request object representing the call parameters. Required. To specify no
# parameters, or to keep all the default parameter values, pass an empty Hash.
# @param options [::Gapic::CallOptions, ::Hash]
# Overrides the default settings for this call, e.g, timeout, retries, etc. Optional.
#
# @overload test_iam_permissions(resource: nil, permissions: nil)
# Pass arguments to `test_iam_permissions` via keyword arguments. Note that at
# least one keyword argument is required. To specify no parameters, or to keep all
# the default parameter values, pass an empty Hash as a request object (see above).
#
# @param resource [::String]
# REQUIRED: The resource for which the policy detail is being requested.
# See the operation documentation for the appropriate value for this field.
# @param permissions [::Array<::String>]
# The set of permissions to check for the `resource`. Permissions with
# wildcards (such as '*' or 'storage.*') are not allowed. For more
# information see
# [IAM Overview](https://cloud.google.com/iam/docs/overview#permissions).
#
# @yield [response, operation] Access the result along with the RPC operation
# @yieldparam response [::Google::Iam::V1::TestIamPermissionsResponse]
# @yieldparam operation [::GRPC::ActiveCall::Operation]
#
# @return [::Google::Iam::V1::TestIamPermissionsResponse]
#
# @raise [::Google::Cloud::Error] if the RPC is aborted.
#
# @example Basic example
# require "google/cloud/spanner/admin/database/v1"
#
# # Create a client object. The client can be reused for multiple calls.
# client = Google::Cloud::Spanner::Admin::Database::V1::DatabaseAdmin::Client.new
#
# # Create a request. To set request fields, pass in keyword arguments.
# request = Google::Iam::V1::TestIamPermissionsRequest.new
#
# # Call the test_iam_permissions method.
# result = client.test_iam_permissions request
#
# # The returned object is of type Google::Iam::V1::TestIamPermissionsResponse.
# p result
#
def test_iam_permissions request, options = nil
raise ::ArgumentError, "request must be provided" if request.nil?
request = ::Gapic::Protobuf.coerce request, to: ::Google::Iam::V1::TestIamPermissionsRequest
# Converts hash and nil to an options object
options = ::Gapic::CallOptions.new(**options.to_h) if options.respond_to? :to_h
# Customize the options with defaults
metadata = @config.rpcs.test_iam_permissions.metadata.to_h
# Set x-goog-api-client and x-goog-user-project headers
metadata[:"x-goog-api-client"] ||= ::Gapic::Headers.x_goog_api_client \
lib_name: @config.lib_name, lib_version: @config.lib_version,
gapic_version: ::Google::Cloud::Spanner::Admin::Database::V1::VERSION
metadata[:"x-goog-user-project"] = @quota_project_id if @quota_project_id
header_params = {}
if request.resource
header_params["resource"] = request.resource
end
request_params_header = header_params.map { |k, v| "#{k}=#{v}" }.join("&")
metadata[:"x-goog-request-params"] ||= request_params_header
options.apply_defaults timeout: @config.rpcs.test_iam_permissions.timeout,
metadata: metadata,
retry_policy: @config.rpcs.test_iam_permissions.retry_policy
options.apply_defaults timeout: @config.timeout,
metadata: @config.metadata,
retry_policy: @config.retry_policy
@database_admin_stub.call_rpc :test_iam_permissions, request, options: options do |response, operation|
yield response, operation if block_given?
return response
end
rescue ::GRPC::BadStatus => e
raise ::Google::Cloud::Error.from_error(e)
end
##
# Starts creating a new Cloud Spanner Backup.
# The returned backup {::Google::Longrunning::Operation long-running operation}
# will have a name of the format
# `projects/<project>/instances/<instance>/backups/<backup>/operations/<operation_id>`
# and can be used to track creation of the backup. The
# {::Google::Longrunning::Operation#metadata metadata} field type is
# {::Google::Cloud::Spanner::Admin::Database::V1::CreateBackupMetadata CreateBackupMetadata}. The
# {::Google::Longrunning::Operation#response response} field type is
# {::Google::Cloud::Spanner::Admin::Database::V1::Backup Backup}, if successful. Cancelling the returned operation will stop the
# creation and delete the backup.
# There can be only one pending backup creation per database. Backup creation
# of different databases can run concurrently.
#
# @overload create_backup(request, options = nil)
# Pass arguments to `create_backup` via a request object, either of type
# {::Google::Cloud::Spanner::Admin::Database::V1::CreateBackupRequest} or an equivalent Hash.
#
# @param request [::Google::Cloud::Spanner::Admin::Database::V1::CreateBackupRequest, ::Hash]
# A request object representing the call parameters. Required. To specify no
# parameters, or to keep all the default parameter values, pass an empty Hash.
# @param options [::Gapic::CallOptions, ::Hash]
# Overrides the default settings for this call, e.g, timeout, retries, etc. Optional.
#
# @overload create_backup(parent: nil, backup_id: nil, backup: nil, encryption_config: nil)
# Pass arguments to `create_backup` via keyword arguments. Note that at
# least one keyword argument is required. To specify no parameters, or to keep all
# the default parameter values, pass an empty Hash as a request object (see above).
#
# @param parent [::String]
# Required. The name of the instance in which the backup will be
# created. This must be the same instance that contains the database the
# backup will be created from. The backup will be stored in the
# location(s) specified in the instance configuration of this
# instance. Values are of the form
# `projects/<project>/instances/<instance>`.
# @param backup_id [::String]
# Required. The id of the backup to be created. The `backup_id` appended to
# `parent` forms the full backup name of the form
# `projects/<project>/instances/<instance>/backups/<backup_id>`.
# @param backup [::Google::Cloud::Spanner::Admin::Database::V1::Backup, ::Hash]
# Required. The backup to create.
# @param encryption_config [::Google::Cloud::Spanner::Admin::Database::V1::CreateBackupEncryptionConfig, ::Hash]
# Optional. The encryption configuration used to encrypt the backup. If this field is
# not specified, the backup will use the same
# encryption configuration as the database by default, namely
# {::Google::Cloud::Spanner::Admin::Database::V1::CreateBackupEncryptionConfig#encryption_type encryption_type} =
# `USE_DATABASE_ENCRYPTION`.
#
# @yield [response, operation] Access the result along with the RPC operation
# @yieldparam response [::Gapic::Operation]
# @yieldparam operation [::GRPC::ActiveCall::Operation]
#
# @return [::Gapic::Operation]
#
# @raise [::Google::Cloud::Error] if the RPC is aborted.
#
# @example Basic example
# require "google/cloud/spanner/admin/database/v1"
#
# # Create a client object. The client can be reused for multiple calls.
# client = Google::Cloud::Spanner::Admin::Database::V1::DatabaseAdmin::Client.new
#
# # Create a request. To set request fields, pass in keyword arguments.
# request = Google::Cloud::Spanner::Admin::Database::V1::CreateBackupRequest.new
#
# # Call the create_backup method.
# result = client.create_backup request
#
# # The returned object is of type Gapic::Operation. You can use this
# # object to check the status of an operation, cancel it, or wait
# # for results. Here is how to block until completion:
# result.wait_until_done! timeout: 60
# if result.response?
# p result.response
# else
# puts "Error!"
# end
#
def create_backup request, options = nil
raise ::ArgumentError, "request must be provided" if request.nil?
request = ::Gapic::Protobuf.coerce request, to: ::Google::Cloud::Spanner::Admin::Database::V1::CreateBackupRequest
# Converts hash and nil to an options object
options = ::Gapic::CallOptions.new(**options.to_h) if options.respond_to? :to_h
# Customize the options with defaults
metadata = @config.rpcs.create_backup.metadata.to_h
# Set x-goog-api-client and x-goog-user-project headers
metadata[:"x-goog-api-client"] ||= ::Gapic::Headers.x_goog_api_client \
lib_name: @config.lib_name, lib_version: @config.lib_version,
gapic_version: ::Google::Cloud::Spanner::Admin::Database::V1::VERSION
metadata[:"x-goog-user-project"] = @quota_project_id if @quota_project_id
header_params = {}
if request.parent
header_params["parent"] = request.parent
end
request_params_header = header_params.map { |k, v| "#{k}=#{v}" }.join("&")
metadata[:"x-goog-request-params"] ||= request_params_header
options.apply_defaults timeout: @config.rpcs.create_backup.timeout,
metadata: metadata,
retry_policy: @config.rpcs.create_backup.retry_policy
options.apply_defaults timeout: @config.timeout,
metadata: @config.metadata,
retry_policy: @config.retry_policy
@database_admin_stub.call_rpc :create_backup, request, options: options do |response, operation|
response = ::Gapic::Operation.new response, @operations_client, options: options
yield response, operation if block_given?
return response
end
rescue ::GRPC::BadStatus => e
raise ::Google::Cloud::Error.from_error(e)
end
##
# Starts copying a Cloud Spanner Backup.
# The returned backup {::Google::Longrunning::Operation long-running operation}
# will have a name of the format
# `projects/<project>/instances/<instance>/backups/<backup>/operations/<operation_id>`
# and can be used to track copying of the backup. The operation is associated
# with the destination backup.
# The {::Google::Longrunning::Operation#metadata metadata} field type is
# {::Google::Cloud::Spanner::Admin::Database::V1::CopyBackupMetadata CopyBackupMetadata}.
# The {::Google::Longrunning::Operation#response response} field type is
# {::Google::Cloud::Spanner::Admin::Database::V1::Backup Backup}, if successful. Cancelling the returned operation will stop the
# copying and delete the backup.
# Concurrent CopyBackup requests can run on the same source backup.
#
# @overload copy_backup(request, options = nil)
# Pass arguments to `copy_backup` via a request object, either of type
# {::Google::Cloud::Spanner::Admin::Database::V1::CopyBackupRequest} or an equivalent Hash.
#
# @param request [::Google::Cloud::Spanner::Admin::Database::V1::CopyBackupRequest, ::Hash]
# A request object representing the call parameters. Required. To specify no
# parameters, or to keep all the default parameter values, pass an empty Hash.
# @param options [::Gapic::CallOptions, ::Hash]
# Overrides the default settings for this call, e.g, timeout, retries, etc. Optional.
#
# @overload copy_backup(parent: nil, backup_id: nil, source_backup: nil, expire_time: nil, encryption_config: nil)
# Pass arguments to `copy_backup` via keyword arguments. Note that at
# least one keyword argument is required. To specify no parameters, or to keep all
# the default parameter values, pass an empty Hash as a request object (see above).
#
# @param parent [::String]
# Required. The name of the destination instance that will contain the backup copy.
# Values are of the form: `projects/<project>/instances/<instance>`.
# @param backup_id [::String]
# Required. The id of the backup copy.
# The `backup_id` appended to `parent` forms the full backup_uri of the form
# `projects/<project>/instances/<instance>/backups/<backup>`.
# @param source_backup [::String]
# Required. The source backup to be copied.
# The source backup needs to be in READY state for it to be copied.
# Once CopyBackup is in progress, the source backup cannot be deleted or
# cleaned up on expiration until CopyBackup is finished.
# Values are of the form:
# `projects/<project>/instances/<instance>/backups/<backup>`.
# @param expire_time [::Google::Protobuf::Timestamp, ::Hash]
# Required. The expiration time of the backup in microsecond granularity.
# The expiration time must be at least 6 hours and at most 366 days
# from the `create_time` of the source backup. Once the `expire_time` has
# passed, the backup is eligible to be automatically deleted by Cloud Spanner
# to free the resources used by the backup.
# @param encryption_config [::Google::Cloud::Spanner::Admin::Database::V1::CopyBackupEncryptionConfig, ::Hash]
# Optional. The encryption configuration used to encrypt the backup. If this field is
# not specified, the backup will use the same
# encryption configuration as the source backup by default, namely
# {::Google::Cloud::Spanner::Admin::Database::V1::CopyBackupEncryptionConfig#encryption_type encryption_type} =
# `USE_CONFIG_DEFAULT_OR_BACKUP_ENCRYPTION`.
#
# @yield [response, operation] Access the result along with the RPC operation
# @yieldparam response [::Gapic::Operation]
# @yieldparam operation [::GRPC::ActiveCall::Operation]
#
# @return [::Gapic::Operation]
#
# @raise [::Google::Cloud::Error] if the RPC is aborted.
#
# @example Basic example
# require "google/cloud/spanner/admin/database/v1"
#
# # Create a client object. The client can be reused for multiple calls.
# client = Google::Cloud::Spanner::Admin::Database::V1::DatabaseAdmin::Client.new
#
# # Create a request. To set request fields, pass in keyword arguments.
# request = Google::Cloud::Spanner::Admin::Database::V1::CopyBackupRequest.new
#
# # Call the copy_backup method.
# result = client.copy_backup request
#
# # The returned object is of type Gapic::Operation. You can use this
# # object to check the status of an operation, cancel it, or wait
# # for results. Here is how to block until completion:
# result.wait_until_done! timeout: 60
# if result.response?
# p result.response
# else
# puts "Error!"
# end
#
def copy_backup request, options = nil
raise ::ArgumentError, "request must be provided" if request.nil?
request = ::Gapic::Protobuf.coerce request, to: ::Google::Cloud::Spanner::Admin::Database::V1::CopyBackupRequest
# Converts hash and nil to an options object
options = ::Gapic::CallOptions.new(**options.to_h) if options.respond_to? :to_h
# Customize the options with defaults
metadata = @config.rpcs.copy_backup.metadata.to_h
# Set x-goog-api-client and x-goog-user-project headers
metadata[:"x-goog-api-client"] ||= ::Gapic::Headers.x_goog_api_client \
lib_name: @config.lib_name, lib_version: @config.lib_version,
gapic_version: ::Google::Cloud::Spanner::Admin::Database::V1::VERSION
metadata[:"x-goog-user-project"] = @quota_project_id if @quota_project_id
header_params = {}
if request.parent
header_params["parent"] = request.parent
end
request_params_header = header_params.map { |k, v| "#{k}=#{v}" }.join("&")
metadata[:"x-goog-request-params"] ||= request_params_header
options.apply_defaults timeout: @config.rpcs.copy_backup.timeout,
metadata: metadata,
retry_policy: @config.rpcs.copy_backup.retry_policy
options.apply_defaults timeout: @config.timeout,
metadata: @config.metadata,
retry_policy: @config.retry_policy
@database_admin_stub.call_rpc :copy_backup, request, options: options do |response, operation|
response = ::Gapic::Operation.new response, @operations_client, options: options
yield response, operation if block_given?
return response
end
rescue ::GRPC::BadStatus => e
raise ::Google::Cloud::Error.from_error(e)
end
##
# Gets metadata on a pending or completed {::Google::Cloud::Spanner::Admin::Database::V1::Backup Backup}.
#
# @overload get_backup(request, options = nil)
# Pass arguments to `get_backup` via a request object, either of type
# {::Google::Cloud::Spanner::Admin::Database::V1::GetBackupRequest} or an equivalent Hash.
#
# @param request [::Google::Cloud::Spanner::Admin::Database::V1::GetBackupRequest, ::Hash]
# A request object representing the call parameters. Required. To specify no
# parameters, or to keep all the default parameter values, pass an empty Hash.
# @param options [::Gapic::CallOptions, ::Hash]
# Overrides the default settings for this call, e.g, timeout, retries, etc. Optional.
#
# @overload get_backup(name: nil)
# Pass arguments to `get_backup` via keyword arguments. Note that at
# least one keyword argument is required. To specify no parameters, or to keep all
# the default parameter values, pass an empty Hash as a request object (see above).
#
# @param name [::String]
# Required. Name of the backup.
# Values are of the form
# `projects/<project>/instances/<instance>/backups/<backup>`.
#
# @yield [response, operation] Access the result along with the RPC operation
# @yieldparam response [::Google::Cloud::Spanner::Admin::Database::V1::Backup]
# @yieldparam operation [::GRPC::ActiveCall::Operation]
#
# @return [::Google::Cloud::Spanner::Admin::Database::V1::Backup]
#
# @raise [::Google::Cloud::Error] if the RPC is aborted.
#
# @example Basic example
# require "google/cloud/spanner/admin/database/v1"
#
# # Create a client object. The client can be reused for multiple calls.
# client = Google::Cloud::Spanner::Admin::Database::V1::DatabaseAdmin::Client.new
#
# # Create a request. To set request fields, pass in keyword arguments.
# request = Google::Cloud::Spanner::Admin::Database::V1::GetBackupRequest.new
#
# # Call the get_backup method.
# result = client.get_backup request
#
# # The returned object is of type Google::Cloud::Spanner::Admin::Database::V1::Backup.
# p result
#
def get_backup request, options = nil
raise ::ArgumentError, "request must be provided" if request.nil?
request = ::Gapic::Protobuf.coerce request, to: ::Google::Cloud::Spanner::Admin::Database::V1::GetBackupRequest
# Converts hash and nil to an options object
options = ::Gapic::CallOptions.new(**options.to_h) if options.respond_to? :to_h
# Customize the options with defaults
metadata = @config.rpcs.get_backup.metadata.to_h
# Set x-goog-api-client and x-goog-user-project headers
metadata[:"x-goog-api-client"] ||= ::Gapic::Headers.x_goog_api_client \
lib_name: @config.lib_name, lib_version: @config.lib_version,
gapic_version: ::Google::Cloud::Spanner::Admin::Database::V1::VERSION
metadata[:"x-goog-user-project"] = @quota_project_id if @quota_project_id
header_params = {}
if request.name
header_params["name"] = request.name
end
request_params_header = header_params.map { |k, v| "#{k}=#{v}" }.join("&")
metadata[:"x-goog-request-params"] ||= request_params_header
options.apply_defaults timeout: @config.rpcs.get_backup.timeout,
metadata: metadata,
retry_policy: @config.rpcs.get_backup.retry_policy
options.apply_defaults timeout: @config.timeout,
metadata: @config.metadata,
retry_policy: @config.retry_policy
@database_admin_stub.call_rpc :get_backup, request, options: options do |response, operation|
yield response, operation if block_given?
return response
end
rescue ::GRPC::BadStatus => e
raise ::Google::Cloud::Error.from_error(e)
end
##
# Updates a pending or completed {::Google::Cloud::Spanner::Admin::Database::V1::Backup Backup}.
#
# @overload update_backup(request, options = nil)
# Pass arguments to `update_backup` via a request object, either of type
# {::Google::Cloud::Spanner::Admin::Database::V1::UpdateBackupRequest} or an equivalent Hash.
#
# @param request [::Google::Cloud::Spanner::Admin::Database::V1::UpdateBackupRequest, ::Hash]
# A request object representing the call parameters. Required. To specify no
# parameters, or to keep all the default parameter values, pass an empty Hash.
# @param options [::Gapic::CallOptions, ::Hash]
# Overrides the default settings for this call, e.g, timeout, retries, etc. Optional.
#
# @overload update_backup(backup: nil, update_mask: nil)
# Pass arguments to `update_backup` via keyword arguments. Note that at
# least one keyword argument is required. To specify no parameters, or to keep all
# the default parameter values, pass an empty Hash as a request object (see above).
#
# @param backup [::Google::Cloud::Spanner::Admin::Database::V1::Backup, ::Hash]
# Required. The backup to update. `backup.name`, and the fields to be updated
# as specified by `update_mask` are required. Other fields are ignored.
# Update is only supported for the following fields:
# * `backup.expire_time`.
# @param update_mask [::Google::Protobuf::FieldMask, ::Hash]
# Required. A mask specifying which fields (e.g. `expire_time`) in the
# Backup resource should be updated. This mask is relative to the Backup
# resource, not to the request message. The field mask must always be
# specified; this prevents any future fields from being erased accidentally
# by clients that do not know about them.
#
# @yield [response, operation] Access the result along with the RPC operation
# @yieldparam response [::Google::Cloud::Spanner::Admin::Database::V1::Backup]
# @yieldparam operation [::GRPC::ActiveCall::Operation]
#
# @return [::Google::Cloud::Spanner::Admin::Database::V1::Backup]
#
# @raise [::Google::Cloud::Error] if the RPC is aborted.
#
# @example Basic example
# require "google/cloud/spanner/admin/database/v1"
#
# # Create a client object. The client can be reused for multiple calls.
# client = Google::Cloud::Spanner::Admin::Database::V1::DatabaseAdmin::Client.new
#
# # Create a request. To set request fields, pass in keyword arguments.
# request = Google::Cloud::Spanner::Admin::Database::V1::UpdateBackupRequest.new
#
# # Call the update_backup method.
# result = client.update_backup request
#
# # The returned object is of type Google::Cloud::Spanner::Admin::Database::V1::Backup.
# p result
#
def update_backup request, options = nil
raise ::ArgumentError, "request must be provided" if request.nil?
request = ::Gapic::Protobuf.coerce request, to: ::Google::Cloud::Spanner::Admin::Database::V1::UpdateBackupRequest
# Converts hash and nil to an options object
options = ::Gapic::CallOptions.new(**options.to_h) if options.respond_to? :to_h
# Customize the options with defaults
metadata = @config.rpcs.update_backup.metadata.to_h
# Set x-goog-api-client and x-goog-user-project headers
metadata[:"x-goog-api-client"] ||= ::Gapic::Headers.x_goog_api_client \
lib_name: @config.lib_name, lib_version: @config.lib_version,
gapic_version: ::Google::Cloud::Spanner::Admin::Database::V1::VERSION
metadata[:"x-goog-user-project"] = @quota_project_id if @quota_project_id
header_params = {}
if request.backup&.name
header_params["backup.name"] = request.backup.name
end
request_params_header = header_params.map { |k, v| "#{k}=#{v}" }.join("&")
metadata[:"x-goog-request-params"] ||= request_params_header
options.apply_defaults timeout: @config.rpcs.update_backup.timeout,
metadata: metadata,
retry_policy: @config.rpcs.update_backup.retry_policy
options.apply_defaults timeout: @config.timeout,
metadata: @config.metadata,
retry_policy: @config.retry_policy
@database_admin_stub.call_rpc :update_backup, request, options: options do |response, operation|
yield response, operation if block_given?
return response
end
rescue ::GRPC::BadStatus => e
raise ::Google::Cloud::Error.from_error(e)
end
##
# Deletes a pending or completed {::Google::Cloud::Spanner::Admin::Database::V1::Backup Backup}.
#
# @overload delete_backup(request, options = nil)
# Pass arguments to `delete_backup` via a request object, either of type
# {::Google::Cloud::Spanner::Admin::Database::V1::DeleteBackupRequest} or an equivalent Hash.
#
# @param request [::Google::Cloud::Spanner::Admin::Database::V1::DeleteBackupRequest, ::Hash]
# A request object representing the call parameters. Required. To specify no
# parameters, or to keep all the default parameter values, pass an empty Hash.
# @param options [::Gapic::CallOptions, ::Hash]
# Overrides the default settings for this call, e.g, timeout, retries, etc. Optional.
#
# @overload delete_backup(name: nil)
# Pass arguments to `delete_backup` via keyword arguments. Note that at
# least one keyword argument is required. To specify no parameters, or to keep all
# the default parameter values, pass an empty Hash as a request object (see above).
#
# @param name [::String]
# Required. Name of the backup to delete.
# Values are of the form
# `projects/<project>/instances/<instance>/backups/<backup>`.
#
# @yield [response, operation] Access the result along with the RPC operation
# @yieldparam response [::Google::Protobuf::Empty]
# @yieldparam operation [::GRPC::ActiveCall::Operation]
#
# @return [::Google::Protobuf::Empty]
#
# @raise [::Google::Cloud::Error] if the RPC is aborted.
#
# @example Basic example
# require "google/cloud/spanner/admin/database/v1"
#
# # Create a client object. The client can be reused for multiple calls.
# client = Google::Cloud::Spanner::Admin::Database::V1::DatabaseAdmin::Client.new
#
# # Create a request. To set request fields, pass in keyword arguments.
# request = Google::Cloud::Spanner::Admin::Database::V1::DeleteBackupRequest.new
#
# # Call the delete_backup method.
# result = client.delete_backup request
#
# # The returned object is of type Google::Protobuf::Empty.
# p result
#
def delete_backup request, options = nil
raise ::ArgumentError, "request must be provided" if request.nil?
request = ::Gapic::Protobuf.coerce request, to: ::Google::Cloud::Spanner::Admin::Database::V1::DeleteBackupRequest
# Converts hash and nil to an options object
options = ::Gapic::CallOptions.new(**options.to_h) if options.respond_to? :to_h
# Customize the options with defaults
metadata = @config.rpcs.delete_backup.metadata.to_h
# Set x-goog-api-client and x-goog-user-project headers
metadata[:"x-goog-api-client"] ||= ::Gapic::Headers.x_goog_api_client \
lib_name: @config.lib_name, lib_version: @config.lib_version,
gapic_version: ::Google::Cloud::Spanner::Admin::Database::V1::VERSION
metadata[:"x-goog-user-project"] = @quota_project_id if @quota_project_id
header_params = {}
if request.name
header_params["name"] = request.name
end
request_params_header = header_params.map { |k, v| "#{k}=#{v}" }.join("&")
metadata[:"x-goog-request-params"] ||= request_params_header
options.apply_defaults timeout: @config.rpcs.delete_backup.timeout,
metadata: metadata,
retry_policy: @config.rpcs.delete_backup.retry_policy
options.apply_defaults timeout: @config.timeout,
metadata: @config.metadata,
retry_policy: @config.retry_policy
@database_admin_stub.call_rpc :delete_backup, request, options: options do |response, operation|
yield response, operation if block_given?
return response
end
rescue ::GRPC::BadStatus => e
raise ::Google::Cloud::Error.from_error(e)
end
##
# Lists completed and pending backups.
# Backups returned are ordered by `create_time` in descending order,
# starting from the most recent `create_time`.
#
# @overload list_backups(request, options = nil)
# Pass arguments to `list_backups` via a request object, either of type
# {::Google::Cloud::Spanner::Admin::Database::V1::ListBackupsRequest} or an equivalent Hash.
#
# @param request [::Google::Cloud::Spanner::Admin::Database::V1::ListBackupsRequest, ::Hash]
# A request object representing the call parameters. Required. To specify no
# parameters, or to keep all the default parameter values, pass an empty Hash.
# @param options [::Gapic::CallOptions, ::Hash]
# Overrides the default settings for this call, e.g, timeout, retries, etc. Optional.
#
# @overload list_backups(parent: nil, filter: nil, page_size: nil, page_token: nil)
# Pass arguments to `list_backups` via keyword arguments. Note that at
# least one keyword argument is required. To specify no parameters, or to keep all
# the default parameter values, pass an empty Hash as a request object (see above).
#
# @param parent [::String]
# Required. The instance to list backups from. Values are of the
# form `projects/<project>/instances/<instance>`.
# @param filter [::String]
# An expression that filters the list of returned backups.
#
# A filter expression consists of a field name, a comparison operator, and a
# value for filtering.
# The value must be a string, a number, or a boolean. The comparison operator
# must be one of: `<`, `>`, `<=`, `>=`, `!=`, `=`, or `:`.
# Colon `:` is the contains operator. Filter rules are not case sensitive.
#
# The following fields in the {::Google::Cloud::Spanner::Admin::Database::V1::Backup Backup} are eligible for filtering:
#
# * `name`
# * `database`
# * `state`
# * `create_time` (and values are of the format YYYY-MM-DDTHH:MM:SSZ)
# * `expire_time` (and values are of the format YYYY-MM-DDTHH:MM:SSZ)
# * `version_time` (and values are of the format YYYY-MM-DDTHH:MM:SSZ)
# * `size_bytes`
#
# You can combine multiple expressions by enclosing each expression in
# parentheses. By default, expressions are combined with AND logic, but
# you can specify AND, OR, and NOT logic explicitly.
#
# Here are a few examples:
#
# * `name:Howl` - The backup's name contains the string "howl".
# * `database:prod`
# - The database's name contains the string "prod".
# * `state:CREATING` - The backup is pending creation.
# * `state:READY` - The backup is fully created and ready for use.
# * `(name:howl) AND (create_time < \"2018-03-28T14:50:00Z\")`
# - The backup name contains the string "howl" and `create_time`
# of the backup is before 2018-03-28T14:50:00Z.
# * `expire_time < \"2018-03-28T14:50:00Z\"`
# - The backup `expire_time` is before 2018-03-28T14:50:00Z.
# * `size_bytes > 10000000000` - The backup's size is greater than 10GB
# @param page_size [::Integer]
# Number of backups to be returned in the response. If 0 or
# less, defaults to the server's maximum allowed page size.
# @param page_token [::String]
# If non-empty, `page_token` should contain a
# {::Google::Cloud::Spanner::Admin::Database::V1::ListBackupsResponse#next_page_token next_page_token} from a
# previous {::Google::Cloud::Spanner::Admin::Database::V1::ListBackupsResponse ListBackupsResponse} to the same `parent` and with the same
# `filter`.
#
# @yield [response, operation] Access the result along with the RPC operation
# @yieldparam response [::Gapic::PagedEnumerable<::Google::Cloud::Spanner::Admin::Database::V1::Backup>]
# @yieldparam operation [::GRPC::ActiveCall::Operation]
#
# @return [::Gapic::PagedEnumerable<::Google::Cloud::Spanner::Admin::Database::V1::Backup>]
#
# @raise [::Google::Cloud::Error] if the RPC is aborted.
#
# @example Basic example
# require "google/cloud/spanner/admin/database/v1"
#
# # Create a client object. The client can be reused for multiple calls.
# client = Google::Cloud::Spanner::Admin::Database::V1::DatabaseAdmin::Client.new
#
# # Create a request. To set request fields, pass in keyword arguments.
# request = Google::Cloud::Spanner::Admin::Database::V1::ListBackupsRequest.new
#
# # Call the list_backups method.
# result = client.list_backups request
#
# # The returned object is of type Gapic::PagedEnumerable. You can
# # iterate over all elements by calling #each, and the enumerable
# # will lazily make API calls to fetch subsequent pages. Other
# # methods are also available for managing paging directly.
# result.each do |response|
# # Each element is of type ::Google::Cloud::Spanner::Admin::Database::V1::Backup.
# p response
# end
#
def list_backups request, options = nil
raise ::ArgumentError, "request must be provided" if request.nil?
request = ::Gapic::Protobuf.coerce request, to: ::Google::Cloud::Spanner::Admin::Database::V1::ListBackupsRequest
# Converts hash and nil to an options object
options = ::Gapic::CallOptions.new(**options.to_h) if options.respond_to? :to_h
# Customize the options with defaults
metadata = @config.rpcs.list_backups.metadata.to_h
# Set x-goog-api-client and x-goog-user-project headers
metadata[:"x-goog-api-client"] ||= ::Gapic::Headers.x_goog_api_client \
lib_name: @config.lib_name, lib_version: @config.lib_version,
gapic_version: ::Google::Cloud::Spanner::Admin::Database::V1::VERSION
metadata[:"x-goog-user-project"] = @quota_project_id if @quota_project_id
header_params = {}
if request.parent
header_params["parent"] = request.parent
end
request_params_header = header_params.map { |k, v| "#{k}=#{v}" }.join("&")
metadata[:"x-goog-request-params"] ||= request_params_header
options.apply_defaults timeout: @config.rpcs.list_backups.timeout,
metadata: metadata,
retry_policy: @config.rpcs.list_backups.retry_policy
options.apply_defaults timeout: @config.timeout,
metadata: @config.metadata,
retry_policy: @config.retry_policy
@database_admin_stub.call_rpc :list_backups, request, options: options do |response, operation|
response = ::Gapic::PagedEnumerable.new @database_admin_stub, :list_backups, request, response, operation, options
yield response, operation if block_given?
return response
end
rescue ::GRPC::BadStatus => e
raise ::Google::Cloud::Error.from_error(e)
end
##
# Create a new database by restoring from a completed backup. The new
# database must be in the same project and in an instance with the same
# instance configuration as the instance containing
# the backup. The returned database [long-running
# operation][google.longrunning.Operation] has a name of the format
# `projects/<project>/instances/<instance>/databases/<database>/operations/<operation_id>`,
# and can be used to track the progress of the operation, and to cancel it.
# The {::Google::Longrunning::Operation#metadata metadata} field type is
# {::Google::Cloud::Spanner::Admin::Database::V1::RestoreDatabaseMetadata RestoreDatabaseMetadata}.
# The {::Google::Longrunning::Operation#response response} type
# is {::Google::Cloud::Spanner::Admin::Database::V1::Database Database}, if
# successful. Cancelling the returned operation will stop the restore and
# delete the database.
# There can be only one database being restored into an instance at a time.
# Once the restore operation completes, a new restore operation can be
# initiated, without waiting for the optimize operation associated with the
# first restore to complete.
#
# @overload restore_database(request, options = nil)
# Pass arguments to `restore_database` via a request object, either of type
# {::Google::Cloud::Spanner::Admin::Database::V1::RestoreDatabaseRequest} or an equivalent Hash.
#
# @param request [::Google::Cloud::Spanner::Admin::Database::V1::RestoreDatabaseRequest, ::Hash]
# A request object representing the call parameters. Required. To specify no
# parameters, or to keep all the default parameter values, pass an empty Hash.
# @param options [::Gapic::CallOptions, ::Hash]
# Overrides the default settings for this call, e.g, timeout, retries, etc. Optional.
#
# @overload restore_database(parent: nil, database_id: nil, backup: nil, encryption_config: nil)
# Pass arguments to `restore_database` via keyword arguments. Note that at
# least one keyword argument is required. To specify no parameters, or to keep all
# the default parameter values, pass an empty Hash as a request object (see above).
#
# @param parent [::String]
# Required. The name of the instance in which to create the
# restored database. This instance must be in the same project and
# have the same instance configuration as the instance containing
# the source backup. Values are of the form
# `projects/<project>/instances/<instance>`.
# @param database_id [::String]
# Required. The id of the database to create and restore to. This
# database must not already exist. The `database_id` appended to
# `parent` forms the full database name of the form
# `projects/<project>/instances/<instance>/databases/<database_id>`.
# @param backup [::String]
# Name of the backup from which to restore. Values are of the form
# `projects/<project>/instances/<instance>/backups/<backup>`.
# @param encryption_config [::Google::Cloud::Spanner::Admin::Database::V1::RestoreDatabaseEncryptionConfig, ::Hash]
# Optional. An encryption configuration describing the encryption type and key
# resources in Cloud KMS used to encrypt/decrypt the database to restore to.
# If this field is not specified, the restored database will use
# the same encryption configuration as the backup by default, namely
# {::Google::Cloud::Spanner::Admin::Database::V1::RestoreDatabaseEncryptionConfig#encryption_type encryption_type} =
# `USE_CONFIG_DEFAULT_OR_BACKUP_ENCRYPTION`.
#
# @yield [response, operation] Access the result along with the RPC operation
# @yieldparam response [::Gapic::Operation]
# @yieldparam operation [::GRPC::ActiveCall::Operation]
#
# @return [::Gapic::Operation]
#
# @raise [::Google::Cloud::Error] if the RPC is aborted.
#
# @example Basic example
# require "google/cloud/spanner/admin/database/v1"
#
# # Create a client object. The client can be reused for multiple calls.
# client = Google::Cloud::Spanner::Admin::Database::V1::DatabaseAdmin::Client.new
#
# # Create a request. To set request fields, pass in keyword arguments.
# request = Google::Cloud::Spanner::Admin::Database::V1::RestoreDatabaseRequest.new
#
# # Call the restore_database method.
# result = client.restore_database request
#
# # The returned object is of type Gapic::Operation. You can use this
# # object to check the status of an operation, cancel it, or wait
# # for results. Here is how to block until completion:
# result.wait_until_done! timeout: 60
# if result.response?
# p result.response
# else
# puts "Error!"
# end
#
def restore_database request, options = nil
raise ::ArgumentError, "request must be provided" if request.nil?
request = ::Gapic::Protobuf.coerce request, to: ::Google::Cloud::Spanner::Admin::Database::V1::RestoreDatabaseRequest
# Converts hash and nil to an options object
options = ::Gapic::CallOptions.new(**options.to_h) if options.respond_to? :to_h
# Customize the options with defaults
metadata = @config.rpcs.restore_database.metadata.to_h
# Set x-goog-api-client and x-goog-user-project headers
metadata[:"x-goog-api-client"] ||= ::Gapic::Headers.x_goog_api_client \
lib_name: @config.lib_name, lib_version: @config.lib_version,
gapic_version: ::Google::Cloud::Spanner::Admin::Database::V1::VERSION
metadata[:"x-goog-user-project"] = @quota_project_id if @quota_project_id
header_params = {}
if request.parent
header_params["parent"] = request.parent
end
request_params_header = header_params.map { |k, v| "#{k}=#{v}" }.join("&")
metadata[:"x-goog-request-params"] ||= request_params_header
options.apply_defaults timeout: @config.rpcs.restore_database.timeout,
metadata: metadata,
retry_policy: @config.rpcs.restore_database.retry_policy
options.apply_defaults timeout: @config.timeout,
metadata: @config.metadata,
retry_policy: @config.retry_policy
@database_admin_stub.call_rpc :restore_database, request, options: options do |response, operation|
response = ::Gapic::Operation.new response, @operations_client, options: options
yield response, operation if block_given?
return response
end
rescue ::GRPC::BadStatus => e
raise ::Google::Cloud::Error.from_error(e)
end
##
# Lists database {::Google::Longrunning::Operation longrunning-operations}.
# A database operation has a name of the form
# `projects/<project>/instances/<instance>/databases/<database>/operations/<operation>`.
# The long-running operation
# {::Google::Longrunning::Operation#metadata metadata} field type
# `metadata.type_url` describes the type of the metadata. Operations returned
# include those that have completed/failed/canceled within the last 7 days,
# and pending operations.
#
# @overload list_database_operations(request, options = nil)
# Pass arguments to `list_database_operations` via a request object, either of type
# {::Google::Cloud::Spanner::Admin::Database::V1::ListDatabaseOperationsRequest} or an equivalent Hash.
#
# @param request [::Google::Cloud::Spanner::Admin::Database::V1::ListDatabaseOperationsRequest, ::Hash]
# A request object representing the call parameters. Required. To specify no
# parameters, or to keep all the default parameter values, pass an empty Hash.
# @param options [::Gapic::CallOptions, ::Hash]
# Overrides the default settings for this call, e.g, timeout, retries, etc. Optional.
#
# @overload list_database_operations(parent: nil, filter: nil, page_size: nil, page_token: nil)
# Pass arguments to `list_database_operations` via keyword arguments. Note that at
# least one keyword argument is required. To specify no parameters, or to keep all
# the default parameter values, pass an empty Hash as a request object (see above).
#
# @param parent [::String]
# Required. The instance of the database operations.
# Values are of the form `projects/<project>/instances/<instance>`.
# @param filter [::String]
# An expression that filters the list of returned operations.
#
# A filter expression consists of a field name, a
# comparison operator, and a value for filtering.
# The value must be a string, a number, or a boolean. The comparison operator
# must be one of: `<`, `>`, `<=`, `>=`, `!=`, `=`, or `:`.
# Colon `:` is the contains operator. Filter rules are not case sensitive.
#
# The following fields in the {::Google::Longrunning::Operation Operation}
# are eligible for filtering:
#
# * `name` - The name of the long-running operation
# * `done` - False if the operation is in progress, else true.
# * `metadata.@type` - the type of metadata. For example, the type string
# for {::Google::Cloud::Spanner::Admin::Database::V1::RestoreDatabaseMetadata RestoreDatabaseMetadata} is
# `type.googleapis.com/google.spanner.admin.database.v1.RestoreDatabaseMetadata`.
# * `metadata.<field_name>` - any field in metadata.value.
# `metadata.@type` must be specified first, if filtering on metadata
# fields.
# * `error` - Error associated with the long-running operation.
# * `response.@type` - the type of response.
# * `response.<field_name>` - any field in response.value.
#
# You can combine multiple expressions by enclosing each expression in
# parentheses. By default, expressions are combined with AND logic. However,
# you can specify AND, OR, and NOT logic explicitly.
#
# Here are a few examples:
#
# * `done:true` - The operation is complete.
# * `(metadata.@type=type.googleapis.com/google.spanner.admin.database.v1.RestoreDatabaseMetadata) AND` \
# `(metadata.source_type:BACKUP) AND` \
# `(metadata.backup_info.backup:backup_howl) AND` \
# `(metadata.name:restored_howl) AND` \
# `(metadata.progress.start_time < \"2018-03-28T14:50:00Z\") AND` \
# `(error:*)` - Return operations where:
# * The operation's metadata type is {::Google::Cloud::Spanner::Admin::Database::V1::RestoreDatabaseMetadata RestoreDatabaseMetadata}.
# * The database is restored from a backup.
# * The backup name contains "backup_howl".
# * The restored database's name contains "restored_howl".
# * The operation started before 2018-03-28T14:50:00Z.
# * The operation resulted in an error.
# @param page_size [::Integer]
# Number of operations to be returned in the response. If 0 or
# less, defaults to the server's maximum allowed page size.
# @param page_token [::String]
# If non-empty, `page_token` should contain a
# {::Google::Cloud::Spanner::Admin::Database::V1::ListDatabaseOperationsResponse#next_page_token next_page_token}
# from a previous {::Google::Cloud::Spanner::Admin::Database::V1::ListDatabaseOperationsResponse ListDatabaseOperationsResponse} to the
# same `parent` and with the same `filter`.
#
# @yield [response, operation] Access the result along with the RPC operation
# @yieldparam response [::Gapic::PagedEnumerable<::Gapic::Operation>]
# @yieldparam operation [::GRPC::ActiveCall::Operation]
#
# @return [::Gapic::PagedEnumerable<::Gapic::Operation>]
#
# @raise [::Google::Cloud::Error] if the RPC is aborted.
#
# @example Basic example
# require "google/cloud/spanner/admin/database/v1"
#
# # Create a client object. The client can be reused for multiple calls.
# client = Google::Cloud::Spanner::Admin::Database::V1::DatabaseAdmin::Client.new
#
# # Create a request. To set request fields, pass in keyword arguments.
# request = Google::Cloud::Spanner::Admin::Database::V1::ListDatabaseOperationsRequest.new
#
# # Call the list_database_operations method.
# result = client.list_database_operations request
#
# # The returned object is of type Gapic::PagedEnumerable. You can
# # iterate over all elements by calling #each, and the enumerable
# # will lazily make API calls to fetch subsequent pages. Other
# # methods are also available for managing paging directly.
# result.each do |response|
# # Each element is of type ::Google::Longrunning::Operation.
# p response
# end
#
def list_database_operations request, options = nil
raise ::ArgumentError, "request must be provided" if request.nil?
request = ::Gapic::Protobuf.coerce request, to: ::Google::Cloud::Spanner::Admin::Database::V1::ListDatabaseOperationsRequest
# Converts hash and nil to an options object
options = ::Gapic::CallOptions.new(**options.to_h) if options.respond_to? :to_h
# Customize the options with defaults
metadata = @config.rpcs.list_database_operations.metadata.to_h
# Set x-goog-api-client and x-goog-user-project headers
metadata[:"x-goog-api-client"] ||= ::Gapic::Headers.x_goog_api_client \
lib_name: @config.lib_name, lib_version: @config.lib_version,
gapic_version: ::Google::Cloud::Spanner::Admin::Database::V1::VERSION
metadata[:"x-goog-user-project"] = @quota_project_id if @quota_project_id
header_params = {}
if request.parent
header_params["parent"] = request.parent
end
request_params_header = header_params.map { |k, v| "#{k}=#{v}" }.join("&")
metadata[:"x-goog-request-params"] ||= request_params_header
options.apply_defaults timeout: @config.rpcs.list_database_operations.timeout,
metadata: metadata,
retry_policy: @config.rpcs.list_database_operations.retry_policy
options.apply_defaults timeout: @config.timeout,
metadata: @config.metadata,
retry_policy: @config.retry_policy
@database_admin_stub.call_rpc :list_database_operations, request, options: options do |response, operation|
wrap_lro_operation = ->(op_response) { ::Gapic::Operation.new op_response, @operations_client }
response = ::Gapic::PagedEnumerable.new @database_admin_stub, :list_database_operations, request, response, operation, options, format_resource: wrap_lro_operation
yield response, operation if block_given?
return response
end
rescue ::GRPC::BadStatus => e
raise ::Google::Cloud::Error.from_error(e)
end
##
# Lists the backup {::Google::Longrunning::Operation long-running operations} in
# the given instance. A backup operation has a name of the form
# `projects/<project>/instances/<instance>/backups/<backup>/operations/<operation>`.
# The long-running operation
# {::Google::Longrunning::Operation#metadata metadata} field type
# `metadata.type_url` describes the type of the metadata. Operations returned
# include those that have completed/failed/canceled within the last 7 days,
# and pending operations. Operations returned are ordered by
# `operation.metadata.value.progress.start_time` in descending order starting
# from the most recently started operation.
#
# @overload list_backup_operations(request, options = nil)
# Pass arguments to `list_backup_operations` via a request object, either of type
# {::Google::Cloud::Spanner::Admin::Database::V1::ListBackupOperationsRequest} or an equivalent Hash.
#
# @param request [::Google::Cloud::Spanner::Admin::Database::V1::ListBackupOperationsRequest, ::Hash]
# A request object representing the call parameters. Required. To specify no
# parameters, or to keep all the default parameter values, pass an empty Hash.
# @param options [::Gapic::CallOptions, ::Hash]
# Overrides the default settings for this call, e.g, timeout, retries, etc. Optional.
#
# @overload list_backup_operations(parent: nil, filter: nil, page_size: nil, page_token: nil)
# Pass arguments to `list_backup_operations` via keyword arguments. Note that at
# least one keyword argument is required. To specify no parameters, or to keep all
# the default parameter values, pass an empty Hash as a request object (see above).
#
# @param parent [::String]
# Required. The instance of the backup operations. Values are of
# the form `projects/<project>/instances/<instance>`.
# @param filter [::String]
# An expression that filters the list of returned backup operations.
#
# A filter expression consists of a field name, a
# comparison operator, and a value for filtering.
# The value must be a string, a number, or a boolean. The comparison operator
# must be one of: `<`, `>`, `<=`, `>=`, `!=`, `=`, or `:`.
# Colon `:` is the contains operator. Filter rules are not case sensitive.
#
# The following fields in the {::Google::Longrunning::Operation operation}
# are eligible for filtering:
#
# * `name` - The name of the long-running operation
# * `done` - False if the operation is in progress, else true.
# * `metadata.@type` - the type of metadata. For example, the type string
# for {::Google::Cloud::Spanner::Admin::Database::V1::CreateBackupMetadata CreateBackupMetadata} is
# `type.googleapis.com/google.spanner.admin.database.v1.CreateBackupMetadata`.
# * `metadata.<field_name>` - any field in metadata.value.
# `metadata.@type` must be specified first if filtering on metadata
# fields.
# * `error` - Error associated with the long-running operation.
# * `response.@type` - the type of response.
# * `response.<field_name>` - any field in response.value.
#
# You can combine multiple expressions by enclosing each expression in
# parentheses. By default, expressions are combined with AND logic, but
# you can specify AND, OR, and NOT logic explicitly.
#
# Here are a few examples:
#
# * `done:true` - The operation is complete.
# * `(metadata.@type=type.googleapis.com/google.spanner.admin.database.v1.CreateBackupMetadata) AND` \
# `metadata.database:prod` - Returns operations where:
# * The operation's metadata type is {::Google::Cloud::Spanner::Admin::Database::V1::CreateBackupMetadata CreateBackupMetadata}.
# * The database the backup was taken from has a name containing the
# string "prod".
# * `(metadata.@type=type.googleapis.com/google.spanner.admin.database.v1.CreateBackupMetadata) AND` \
# `(metadata.name:howl) AND` \
# `(metadata.progress.start_time < \"2018-03-28T14:50:00Z\") AND` \
# `(error:*)` - Returns operations where:
# * The operation's metadata type is {::Google::Cloud::Spanner::Admin::Database::V1::CreateBackupMetadata CreateBackupMetadata}.
# * The backup name contains the string "howl".
# * The operation started before 2018-03-28T14:50:00Z.
# * The operation resulted in an error.
# * `(metadata.@type=type.googleapis.com/google.spanner.admin.database.v1.CopyBackupMetadata) AND` \
# `(metadata.source_backup:test) AND` \
# `(metadata.progress.start_time < \"2022-01-18T14:50:00Z\") AND` \
# `(error:*)` - Returns operations where:
# * The operation's metadata type is {::Google::Cloud::Spanner::Admin::Database::V1::CopyBackupMetadata CopyBackupMetadata}.
# * The source backup of the copied backup name contains the string
# "test".
# * The operation started before 2022-01-18T14:50:00Z.
# * The operation resulted in an error.
# * `((metadata.@type=type.googleapis.com/google.spanner.admin.database.v1.CreateBackupMetadata) AND` \
# `(metadata.database:test_db)) OR` \
# `((metadata.@type=type.googleapis.com/google.spanner.admin.database.v1.CopyBackupMetadata)
# AND` \
# `(metadata.source_backup:test_bkp)) AND` \
# `(error:*)` - Returns operations where:
# * The operation's metadata matches either of criteria:
# * The operation's metadata type is {::Google::Cloud::Spanner::Admin::Database::V1::CreateBackupMetadata CreateBackupMetadata} AND the
# database the backup was taken from has name containing string
# "test_db"
# * The operation's metadata type is {::Google::Cloud::Spanner::Admin::Database::V1::CopyBackupMetadata CopyBackupMetadata} AND the
# backup the backup was copied from has name containing string
# "test_bkp"
# * The operation resulted in an error.
# @param page_size [::Integer]
# Number of operations to be returned in the response. If 0 or
# less, defaults to the server's maximum allowed page size.
# @param page_token [::String]
# If non-empty, `page_token` should contain a
# {::Google::Cloud::Spanner::Admin::Database::V1::ListBackupOperationsResponse#next_page_token next_page_token}
# from a previous {::Google::Cloud::Spanner::Admin::Database::V1::ListBackupOperationsResponse ListBackupOperationsResponse} to the
# same `parent` and with the same `filter`.
#
# @yield [response, operation] Access the result along with the RPC operation
# @yieldparam response [::Gapic::PagedEnumerable<::Gapic::Operation>]
# @yieldparam operation [::GRPC::ActiveCall::Operation]
#
# @return [::Gapic::PagedEnumerable<::Gapic::Operation>]
#
# @raise [::Google::Cloud::Error] if the RPC is aborted.
#
# @example Basic example
# require "google/cloud/spanner/admin/database/v1"
#
# # Create a client object. The client can be reused for multiple calls.
# client = Google::Cloud::Spanner::Admin::Database::V1::DatabaseAdmin::Client.new
#
# # Create a request. To set request fields, pass in keyword arguments.
# request = Google::Cloud::Spanner::Admin::Database::V1::ListBackupOperationsRequest.new
#
# # Call the list_backup_operations method.
# result = client.list_backup_operations request
#
# # The returned object is of type Gapic::PagedEnumerable. You can
# # iterate over all elements by calling #each, and the enumerable
# # will lazily make API calls to fetch subsequent pages. Other
# # methods are also available for managing paging directly.
# result.each do |response|
# # Each element is of type ::Google::Longrunning::Operation.
# p response
# end
#
def list_backup_operations request, options = nil
raise ::ArgumentError, "request must be provided" if request.nil?
request = ::Gapic::Protobuf.coerce request, to: ::Google::Cloud::Spanner::Admin::Database::V1::ListBackupOperationsRequest
# Converts hash and nil to an options object
options = ::Gapic::CallOptions.new(**options.to_h) if options.respond_to? :to_h
# Customize the options with defaults
metadata = @config.rpcs.list_backup_operations.metadata.to_h
# Set x-goog-api-client and x-goog-user-project headers
metadata[:"x-goog-api-client"] ||= ::Gapic::Headers.x_goog_api_client \
lib_name: @config.lib_name, lib_version: @config.lib_version,
gapic_version: ::Google::Cloud::Spanner::Admin::Database::V1::VERSION
metadata[:"x-goog-user-project"] = @quota_project_id if @quota_project_id
header_params = {}
if request.parent
header_params["parent"] = request.parent
end
request_params_header = header_params.map { |k, v| "#{k}=#{v}" }.join("&")
metadata[:"x-goog-request-params"] ||= request_params_header
options.apply_defaults timeout: @config.rpcs.list_backup_operations.timeout,
metadata: metadata,
retry_policy: @config.rpcs.list_backup_operations.retry_policy
options.apply_defaults timeout: @config.timeout,
metadata: @config.metadata,
retry_policy: @config.retry_policy
@database_admin_stub.call_rpc :list_backup_operations, request, options: options do |response, operation|
wrap_lro_operation = ->(op_response) { ::Gapic::Operation.new op_response, @operations_client }
response = ::Gapic::PagedEnumerable.new @database_admin_stub, :list_backup_operations, request, response, operation, options, format_resource: wrap_lro_operation
yield response, operation if block_given?
return response
end
rescue ::GRPC::BadStatus => e
raise ::Google::Cloud::Error.from_error(e)
end
##
# Configuration class for the DatabaseAdmin API.
#
# This class represents the configuration for DatabaseAdmin,
# providing control over timeouts, retry behavior, logging, transport
# parameters, and other low-level controls. Certain parameters can also be
# applied individually to specific RPCs. See
# {::Google::Cloud::Spanner::Admin::Database::V1::DatabaseAdmin::Client::Configuration::Rpcs}
# for a list of RPCs that can be configured independently.
#
# Configuration can be applied globally to all clients, or to a single client
# on construction.
#
# @example
#
# # Modify the global config, setting the timeout for
# # list_databases to 20 seconds,
# # and all remaining timeouts to 10 seconds.
# ::Google::Cloud::Spanner::Admin::Database::V1::DatabaseAdmin::Client.configure do |config|
# config.timeout = 10.0
# config.rpcs.list_databases.timeout = 20.0
# end
#
# # Apply the above configuration only to a new client.
# client = ::Google::Cloud::Spanner::Admin::Database::V1::DatabaseAdmin::Client.new do |config|
# config.timeout = 10.0
# config.rpcs.list_databases.timeout = 20.0
# end
#
# @!attribute [rw] endpoint
# The hostname or hostname:port of the service endpoint.
# Defaults to `"spanner.googleapis.com"`.
# @return [::String]
# @!attribute [rw] credentials
# Credentials to send with calls. You may provide any of the following types:
# * (`String`) The path to a service account key file in JSON format
# * (`Hash`) A service account key as a Hash
# * (`Google::Auth::Credentials`) A googleauth credentials object
# (see the [googleauth docs](https://googleapis.dev/ruby/googleauth/latest/index.html))
# * (`Signet::OAuth2::Client`) A signet oauth2 client object
# (see the [signet docs](https://googleapis.dev/ruby/signet/latest/Signet/OAuth2/Client.html))
# * (`GRPC::Core::Channel`) a gRPC channel with included credentials
# * (`GRPC::Core::ChannelCredentials`) a gRPC credentails object
# * (`nil`) indicating no credentials
# @return [::Object]
# @!attribute [rw] scope
# The OAuth scopes
# @return [::Array<::String>]
# @!attribute [rw] lib_name
# The library name as recorded in instrumentation and logging
# @return [::String]
# @!attribute [rw] lib_version
# The library version as recorded in instrumentation and logging
# @return [::String]
# @!attribute [rw] channel_args
# Extra parameters passed to the gRPC channel. Note: this is ignored if a
# `GRPC::Core::Channel` object is provided as the credential.
# @return [::Hash]
# @!attribute [rw] interceptors
# An array of interceptors that are run before calls are executed.
# @return [::Array<::GRPC::ClientInterceptor>]
# @!attribute [rw] timeout
# The call timeout in seconds.
# @return [::Numeric]
# @!attribute [rw] metadata
# Additional gRPC headers to be sent with the call.
# @return [::Hash{::Symbol=>::String}]
# @!attribute [rw] retry_policy
# The retry policy. The value is a hash with the following keys:
# * `:initial_delay` (*type:* `Numeric`) - The initial delay in seconds.
# * `:max_delay` (*type:* `Numeric`) - The max delay in seconds.
# * `:multiplier` (*type:* `Numeric`) - The incremental backoff multiplier.
# * `:retry_codes` (*type:* `Array<String>`) - The error codes that should
# trigger a retry.
# @return [::Hash]
# @!attribute [rw] quota_project
# A separate project against which to charge quota.
# @return [::String]
#
class Configuration
extend ::Gapic::Config
config_attr :endpoint, "spanner.googleapis.com", ::String
config_attr :credentials, nil do |value|
allowed = [::String, ::Hash, ::Proc, ::Symbol, ::Google::Auth::Credentials, ::Signet::OAuth2::Client, nil]
allowed += [::GRPC::Core::Channel, ::GRPC::Core::ChannelCredentials] if defined? ::GRPC
allowed.any? { |klass| klass === value }
end
config_attr :scope, nil, ::String, ::Array, nil
config_attr :lib_name, nil, ::String, nil
config_attr :lib_version, nil, ::String, nil
config_attr(:channel_args, { "grpc.service_config_disable_resolution" => 1 }, ::Hash, nil)
config_attr :interceptors, nil, ::Array, nil
config_attr :timeout, nil, ::Numeric, nil
config_attr :metadata, nil, ::Hash, nil
config_attr :retry_policy, nil, ::Hash, ::Proc, nil
config_attr :quota_project, nil, ::String, nil
# @private
def initialize parent_config = nil
@parent_config = parent_config unless parent_config.nil?
yield self if block_given?
end
##
# Configurations for individual RPCs
# @return [Rpcs]
#
def rpcs
@rpcs ||= begin
parent_rpcs = nil
parent_rpcs = @parent_config.rpcs if defined?(@parent_config) && @parent_config.respond_to?(:rpcs)
Rpcs.new parent_rpcs
end
end
##
# Configuration RPC class for the DatabaseAdmin API.
#
# Includes fields providing the configuration for each RPC in this service.
# Each configuration object is of type `Gapic::Config::Method` and includes
# the following configuration fields:
#
# * `timeout` (*type:* `Numeric`) - The call timeout in seconds
# * `metadata` (*type:* `Hash{Symbol=>String}`) - Additional gRPC headers
# * `retry_policy (*type:* `Hash`) - The retry policy. The policy fields
# include the following keys:
# * `:initial_delay` (*type:* `Numeric`) - The initial delay in seconds.
# * `:max_delay` (*type:* `Numeric`) - The max delay in seconds.
# * `:multiplier` (*type:* `Numeric`) - The incremental backoff multiplier.
# * `:retry_codes` (*type:* `Array<String>`) - The error codes that should
# trigger a retry.
#
class Rpcs
##
# RPC-specific configuration for `list_databases`
# @return [::Gapic::Config::Method]
#
attr_reader :list_databases
##
# RPC-specific configuration for `create_database`
# @return [::Gapic::Config::Method]
#
attr_reader :create_database
##
# RPC-specific configuration for `get_database`
# @return [::Gapic::Config::Method]
#
attr_reader :get_database
##
# RPC-specific configuration for `update_database_ddl`
# @return [::Gapic::Config::Method]
#
attr_reader :update_database_ddl
##
# RPC-specific configuration for `drop_database`
# @return [::Gapic::Config::Method]
#
attr_reader :drop_database
##
# RPC-specific configuration for `get_database_ddl`
# @return [::Gapic::Config::Method]
#
attr_reader :get_database_ddl
##
# RPC-specific configuration for `set_iam_policy`
# @return [::Gapic::Config::Method]
#
attr_reader :set_iam_policy
##
# RPC-specific configuration for `get_iam_policy`
# @return [::Gapic::Config::Method]
#
attr_reader :get_iam_policy
##
# RPC-specific configuration for `test_iam_permissions`
# @return [::Gapic::Config::Method]
#
attr_reader :test_iam_permissions
##
# RPC-specific configuration for `create_backup`
# @return [::Gapic::Config::Method]
#
attr_reader :create_backup
##
# RPC-specific configuration for `copy_backup`
# @return [::Gapic::Config::Method]
#
attr_reader :copy_backup
##
# RPC-specific configuration for `get_backup`
# @return [::Gapic::Config::Method]
#
attr_reader :get_backup
##
# RPC-specific configuration for `update_backup`
# @return [::Gapic::Config::Method]
#
attr_reader :update_backup
##
# RPC-specific configuration for `delete_backup`
# @return [::Gapic::Config::Method]
#
attr_reader :delete_backup
##
# RPC-specific configuration for `list_backups`
# @return [::Gapic::Config::Method]
#
attr_reader :list_backups
##
# RPC-specific configuration for `restore_database`
# @return [::Gapic::Config::Method]
#
attr_reader :restore_database
##
# RPC-specific configuration for `list_database_operations`
# @return [::Gapic::Config::Method]
#
attr_reader :list_database_operations
##
# RPC-specific configuration for `list_backup_operations`
# @return [::Gapic::Config::Method]
#
attr_reader :list_backup_operations
# @private
def initialize parent_rpcs = nil
list_databases_config = parent_rpcs.list_databases if parent_rpcs.respond_to? :list_databases
@list_databases = ::Gapic::Config::Method.new list_databases_config
create_database_config = parent_rpcs.create_database if parent_rpcs.respond_to? :create_database
@create_database = ::Gapic::Config::Method.new create_database_config
get_database_config = parent_rpcs.get_database if parent_rpcs.respond_to? :get_database
@get_database = ::Gapic::Config::Method.new get_database_config
update_database_ddl_config = parent_rpcs.update_database_ddl if parent_rpcs.respond_to? :update_database_ddl
@update_database_ddl = ::Gapic::Config::Method.new update_database_ddl_config
drop_database_config = parent_rpcs.drop_database if parent_rpcs.respond_to? :drop_database
@drop_database = ::Gapic::Config::Method.new drop_database_config
get_database_ddl_config = parent_rpcs.get_database_ddl if parent_rpcs.respond_to? :get_database_ddl
@get_database_ddl = ::Gapic::Config::Method.new get_database_ddl_config
set_iam_policy_config = parent_rpcs.set_iam_policy if parent_rpcs.respond_to? :set_iam_policy
@set_iam_policy = ::Gapic::Config::Method.new set_iam_policy_config
get_iam_policy_config = parent_rpcs.get_iam_policy if parent_rpcs.respond_to? :get_iam_policy
@get_iam_policy = ::Gapic::Config::Method.new get_iam_policy_config
test_iam_permissions_config = parent_rpcs.test_iam_permissions if parent_rpcs.respond_to? :test_iam_permissions
@test_iam_permissions = ::Gapic::Config::Method.new test_iam_permissions_config
create_backup_config = parent_rpcs.create_backup if parent_rpcs.respond_to? :create_backup
@create_backup = ::Gapic::Config::Method.new create_backup_config
copy_backup_config = parent_rpcs.copy_backup if parent_rpcs.respond_to? :copy_backup
@copy_backup = ::Gapic::Config::Method.new copy_backup_config
get_backup_config = parent_rpcs.get_backup if parent_rpcs.respond_to? :get_backup
@get_backup = ::Gapic::Config::Method.new get_backup_config
update_backup_config = parent_rpcs.update_backup if parent_rpcs.respond_to? :update_backup
@update_backup = ::Gapic::Config::Method.new update_backup_config
delete_backup_config = parent_rpcs.delete_backup if parent_rpcs.respond_to? :delete_backup
@delete_backup = ::Gapic::Config::Method.new delete_backup_config
list_backups_config = parent_rpcs.list_backups if parent_rpcs.respond_to? :list_backups
@list_backups = ::Gapic::Config::Method.new list_backups_config
restore_database_config = parent_rpcs.restore_database if parent_rpcs.respond_to? :restore_database
@restore_database = ::Gapic::Config::Method.new restore_database_config
list_database_operations_config = parent_rpcs.list_database_operations if parent_rpcs.respond_to? :list_database_operations
@list_database_operations = ::Gapic::Config::Method.new list_database_operations_config
list_backup_operations_config = parent_rpcs.list_backup_operations if parent_rpcs.respond_to? :list_backup_operations
@list_backup_operations = ::Gapic::Config::Method.new list_backup_operations_config
yield self if block_given?
end
end
end
end
end
end
end
end
end
end
end
| 59.969601 | 183 | 0.540318 |
1c8ef9bb6f0a31df877ac235936fe1f85135c6b0 | 1,689 | class SliceLoader # :nodoc:
# Loads all slices found using slice_locations and extends relevant objects.
def self.load
self.slice_locations.each do |slice_location|
mixin_slice_extentions slice_location
end
end
private
# Returns all the locations in which eschaton slices are located.
def self.slice_locations
locations = []
locations << "#{File.dirname(__FILE__)}/../../slices"
locations << "#{RAILS_ROOT}/lib/eschaton_slices"
locations.collect{|location|
Dir["#{location}/*"]
}.flatten
end
def self.mixin_slice_extentions(location)
_logger_info "loading slice '#{File.basename(location)}'"
Eschaton.dependencies.load_paths << location
Dir["#{location}/*.rb"].each do |file|
Eschaton.dependencies.require_or_load file
end
# Generator extentions
mixin_extentions :path => location, :pattern => /([a-z_\d]*_generator_ext).rb/,
:extend => ActionView::Helpers::PrototypeHelper::JavaScriptGenerator
# View extentions
mixin_extentions :path => location, :pattern => /([a-z_\d]*_view_ext).rb/,
:extend => ActionView::Base
# Controller extentions
mixin_extentions :path => location, :pattern => /([a-z_\d]*_controller_ext).rb/,
:extend => ActionController::Base
end
def self.mixin_extentions(options)
Dir["#{options[:path]}/*.rb"].each do |file|
if module_name = options[:pattern].match(file)
options[:extend].extend_with_slice module_name[1].camelize.constantize
end
end
end
end | 33.117647 | 91 | 0.623446 |
2152b443ad51257de297d2590939a376a9192a90 | 6,942 | # frozen_string_literal: true
require 'rails_helper'
RSpec.describe Admin::CategoriesController, type: :controller do
describe 'GET index' do
context 'when there are categories' do
before :each do
FactoryBot.create(:session, user: organizer)
login_as organizer
@challenge = FactoryBot.create(:challenge)
get :index
end
it 'returns success' do
expect(response).to be_successful
end
it 'renders index' do
expect(response).to render_template('index')
end
it 'list all the challenges' do
expect(assigns[:categories]).to eq([@challenge.category])
end
end
context 'when there are no categories' do
before :each do
FactoryBot.create(:session, user: organizer)
login_as organizer
get :index
end
it 'returns success' do
expect(response).to be_successful
end
it 'renders index' do
expect(response).to render_template('index')
end
it 'returns empty list for challenges and categories' do
expect(assigns[:categories]).to eq([])
end
end
end
describe 'GET show' do
context 'when the category exists' do
before :each do
FactoryBot.create(:session, user: organizer)
login_as organizer
@challenge = FactoryBot.create(:challenge)
get :show, params: { id: @challenge.category.id }
end
it 'returns success' do
expect(response).to be_successful
end
it 'renders index' do
expect(response).to render_template('show')
end
it 'list all the challenges for that category' do
expect(assigns[:challenges]).to eq([@challenge])
end
it 'includes all team submissions' do
expect(assigns[:team_submissions]).to eq({})
end
end
context 'when the category does not exists' do
before :each do
FactoryBot.create(:session, user: organizer)
login_as organizer
get :show, params: { id: 1337 }
end
it 'returns not found' do
expect(response).to have_http_status(:not_found)
end
end
end
describe 'GET new' do
context 'when the user is an organizer' do
before :each do
FactoryBot.create(:session, user: organizer)
login_as organizer
get :new
end
it 'returns success' do
expect(response).to be_successful
end
it 'renders new' do
expect(response).to render_template('new')
end
end
context 'when the user is not an organizer' do
before :each do
FactoryBot.create(:session, user: user)
login_as user
get :new
end
it 'returns forbidden' do
expect(response).to have_http_status(:forbidden)
end
end
end
describe 'POST new' do
context 'when the user is an organizer' do
before :each do
FactoryBot.create(:session, user: organizer)
login_as organizer
@category_params = FactoryBot.attributes_for(:category)
post :create, params: { category: @category_params }
end
it 'creates a challenge' do
expect(Category.first&.name).to eq(@category_params[:name])
end
it 'redirects to the challenge' do
category = Category.first
expect(response).to redirect_to(admin_category_path(category))
end
end
context 'when the user is not an organizer' do
before :each do
FactoryBot.create(:session, user: user)
login_as user
@category_params = FactoryBot.attributes_for(:category)
post :create, params: { challenge: @category_params }
end
it 'returns forbidden' do
expect(response).to have_http_status(:forbidden)
end
end
end
describe 'GET edit' do
context 'when the user is an organizer' do
before :each do
FactoryBot.create(:session, user: organizer)
login_as organizer
category = FactoryBot.create(:category)
get :edit, params: { id: category.id }
end
it 'returns success' do
expect(response).to be_successful
end
end
context 'when the user is not an organizer' do
before :each do
FactoryBot.create(:session, user: user)
login_as user
challenge = FactoryBot.create(:challenge)
get :edit, params: { category_id: challenge.category_id, id: challenge.id }
end
it 'returns forbidden' do
expect(response).to have_http_status(:forbidden)
end
end
end
describe 'PATCH update' do
context 'when the user is an organizer' do
before :each do
FactoryBot.create(:session, user: organizer)
login_as organizer
@category = FactoryBot.create(:category)
@category_params = @category.attributes
@category_params['name'] = 'reverse engineering'
@category_params['description'] = 'New description here!'
patch :update, params: { id: @category.id, category: @category_params }
end
it 'redirects to category' do
expect(response).to redirect_to(admin_category_path(@category))
end
it 'updates the category record' do
expect(Category.first.name).to eq(@category_params['name'])
expect(Category.first.description).to eq(@category_params['description'])
end
end
context 'when the user is not an organizer' do
before :each do
FactoryBot.create(:session, user: user)
login_as user
@category = FactoryBot.create(:category)
@category_params = @category.attributes
@category_params[:name] = 'reverse engineering'
patch :update, params: { id: @category.id, category: @category_params }
end
it 'returns forbidden' do
expect(response).to have_http_status(:forbidden)
end
end
end
describe 'DELETE destroy' do
context 'when the user is an organizer' do
before :each do
FactoryBot.create(:session, user: organizer)
login_as organizer
category = FactoryBot.create(:category)
delete :destroy, params: { id: category.id }
end
it 'redirects to categories' do
expect(response).to redirect_to(admin_categories_path)
end
it 'deletes the category' do
expect(Category.count).to eq(0)
end
end
context 'when the user is not an organizer' do
before :each do
FactoryBot.create(:session, user: user)
login_as user
category = FactoryBot.create(:category)
delete :destroy, params: { id: category.id }
end
it 'returns forbidden' do
expect(response).to have_http_status(:forbidden)
end
end
end
private
def user
@user ||= FactoryBot.create(:user)
end
def organizer
@organizer ||= FactoryBot.create(:user, organizer: true)
end
end
| 26.7 | 83 | 0.629934 |
e8403da21b310b72dc12494667d2c30510ada5c5 | 798 | Pod::Spec.new do |spec|
spec.name = 'SipHash'
spec.version = '1.2.0'
spec.ios.deployment_target = "8.0"
spec.osx.deployment_target = "10.9"
spec.tvos.deployment_target = "9.0"
spec.watchos.deployment_target = "2.0"
spec.summary = 'Simple and secure hashing in Swift with the SipHash algorithm'
spec.author = 'Károly Lőrentey'
spec.homepage = 'https://github.com/attaswift/SipHash'
spec.license = { :type => 'MIT', :file => 'LICENSE.md' }
spec.source = { :git => 'https://github.com/attaswift/SipHash.git', :tag => 'v' + String(spec.version) }
spec.source_files = 'SipHash/*.swift'
spec.social_media_url = 'https://twitter.com/lorentey'
spec.documentation_url = 'http://attaswift.github.io/SipHash/'
end
| 46.941176 | 114 | 0.632832 |
62bcb37f67c912cae4e412317cf304afbe4e750e | 659 | require_relative 'boot'
require 'rails/all'
# Require the gems listed in Gemfile, including any gems
# you've limited to :test, :development, or :production.
Bundler.require(*Rails.groups)
module ItemCart
class Application < Rails::Application
# Initialize configuration defaults for originally generated Rails version.
config.load_defaults 5.2
# Settings in config/environments/* take precedence over those specified here.
# Application configuration can go into files in config/initializers
# -- all .rb files in that directory are automatically loaded after loading
# the framework and any gems in your application.
end
end
| 32.95 | 82 | 0.764795 |
b9ce33023cef62989075004919e1b92681e3b607 | 443 | # == Schema Information
#
# Table name: coupons
#
# id :integer not null, primary key
# code :string not null
# percents :integer not null
# used :boolean default(FALSE), not null
# created_at :datetime not null
# updated_at :datetime not null
#
require 'rails_helper'
RSpec.describe Coupon, type: :model do
it { should validate_presence_of(:percents) }
end
| 24.611111 | 56 | 0.600451 |
4a363215e5a5c0afef298a60512422d2d0f2d740 | 1,025 | #
# Be sure to run `pod lib lint SwiftyVISPER.podspec' to ensure this is a
# valid spec before submitting.
Pod::Spec.new do |s|
s.name = 'VISPER-Presenter'
s.version = '4.0.1'
s.summary = 'Presenter layer of the VISPER Application Framework'
s.description = <<-DESC
Implementation of the presenter layer of the VISPER Application Framework.
(VISPER is a framework for building component based apps with the viper architecture).
DESC
s.homepage = 'https://github.com/barteljan/VISPER'
s.license = { :type => 'MIT', :file => 'LICENSE' }
s.author = { 'Jan Bartel' => '[email protected]' }
s.source = { :git => 'https://github.com/barteljan/VISPER.git', :tag => 'VISPER-Presenter-'+String(s.version) }
s.social_media_url = 'https://twitter.com/janbartel'
s.ios.deployment_target = '8.0'
s.swift_version = '4.2'
s.source_files = 'VISPER-Presenter/Classes/**/*'
s.dependency 'VISPER-Core','~> 4.0.0'
end
| 39.423077 | 123 | 0.628293 |
bf827fb3914888d96e4fb073cdaa6671cdd52aee | 3,055 | # frozen_string_literal: true
require 'spec_helper'
describe Backup::Repository do
let(:progress) { StringIO.new }
let!(:project) { create(:project, :wiki_repo) }
subject { described_class.new(progress) }
before do
allow(progress).to receive(:puts)
allow(progress).to receive(:print)
allow(FileUtils).to receive(:mv).and_return(true)
allow_any_instance_of(described_class).to receive(:progress).and_return(progress)
end
describe '#dump' do
describe 'repo failure' do
before do
allow(Gitlab::Popen).to receive(:popen).and_return(['normal output', 0])
end
it 'does not raise error' do
expect { subject.dump }.not_to raise_error
end
end
end
describe '#restore' do
let(:timestamp) { Time.utc(2017, 3, 22) }
let(:temp_dirs) do
Gitlab.config.repositories.storages.map do |name, storage|
Gitlab::GitalyClient::StorageSettings.allow_disk_access do
File.join(storage.legacy_disk_path, '..', 'repositories.old.' + timestamp.to_i.to_s)
end
end
end
around do |example|
Timecop.freeze(timestamp) { example.run }
end
after do
temp_dirs.each { |path| FileUtils.rm_rf(path) }
end
describe 'command failure' do
before do
allow_any_instance_of(Gitlab::Shell).to receive(:create_repository).and_return(false)
end
context 'hashed storage' do
it 'shows the appropriate error' do
subject.restore
expect(progress).to have_received(:puts).with("[Failed] restoring #{project.full_path} repository")
end
end
context 'legacy storage' do
let!(:project) { create(:project, :legacy_storage) }
it 'shows the appropriate error' do
subject.restore
expect(progress).to have_received(:puts).with("[Failed] restoring #{project.full_path} repository")
end
end
end
context 'restoring object pools' do
it 'schedules restoring of the pool' do
pool_repository = create(:pool_repository, :failed)
pool_repository.delete_object_pool
subject.restore
pool_repository.reload
expect(pool_repository).not_to be_failed
expect(pool_repository.object_pool.exists?).to be(true)
end
end
end
describe '#empty_repo?' do
context 'for a wiki' do
let(:wiki) { create(:project_wiki) }
it 'invalidates the emptiness cache' do
expect(wiki.repository).to receive(:expire_emptiness_caches).once
subject.send(:empty_repo?, wiki)
end
context 'wiki repo has content' do
let!(:wiki_page) { create(:wiki_page, wiki: wiki) }
it 'returns true, regardless of bad cache value' do
expect(subject.send(:empty_repo?, wiki)).to be(false)
end
end
context 'wiki repo does not have content' do
it 'returns true, regardless of bad cache value' do
expect(subject.send(:empty_repo?, wiki)).to be_truthy
end
end
end
end
end
| 27.276786 | 109 | 0.648118 |
268fb2727a8870d55411f016f67b94c574d60c29 | 535 | # frozen_string_literal: true
require_relative '../../../step/buy_train'
module Engine
module Game
module G1894
module Step
class BuyTrain < Engine::Step::BuyTrain
def setup
super
@exchanged = false
end
def buy_train_action(action, entity = nil)
super
@exchanged = true if action.exchange
end
def discountable_trains_allowed?(_entity)
!@exchanged
end
end
end
end
end
end
| 19.107143 | 52 | 0.547664 |
118045e3af23787b9aff69ebf47a33f60247c273 | 6,675 | # frozen_string_literal: true
module API
# Snippets API
class Snippets < Grape::API::Instance
include PaginationParams
before { authenticate! }
resource :snippets do
helpers Helpers::SnippetsHelpers
helpers do
def snippets_for_current_user
SnippetsFinder.new(current_user, author: current_user).execute
end
def public_snippets
Snippet.only_personal_snippets.are_public.fresh
end
def snippets
SnippetsFinder.new(current_user).execute
end
end
desc 'Get a snippets list for authenticated user' do
detail 'This feature was introduced in GitLab 8.15.'
success Entities::Snippet
end
params do
use :pagination
end
get do
present paginate(snippets_for_current_user), with: Entities::Snippet, current_user: current_user
end
desc 'List all public personal snippets current_user has access to' do
detail 'This feature was introduced in GitLab 8.15.'
success Entities::PersonalSnippet
end
params do
use :pagination
end
get 'public' do
present paginate(public_snippets), with: Entities::PersonalSnippet, current_user: current_user
end
desc 'Get a single snippet' do
detail 'This feature was introduced in GitLab 8.15.'
success Entities::PersonalSnippet
end
params do
requires :id, type: Integer, desc: 'The ID of a snippet'
end
get ':id' do
snippet = snippets.find_by_id(params[:id])
break not_found!('Snippet') unless snippet
present snippet, with: Entities::PersonalSnippet, current_user: current_user
end
desc 'Create new snippet' do
detail 'This feature was introduced in GitLab 8.15.'
success Entities::PersonalSnippet
end
params do
requires :title, type: String, allow_blank: false, desc: 'The title of a snippet'
requires :file_name, type: String, desc: 'The name of a snippet file'
requires :content, type: String, allow_blank: false, desc: 'The content of a snippet'
optional :description, type: String, desc: 'The description of a snippet'
optional :visibility, type: String,
values: Gitlab::VisibilityLevel.string_values,
default: 'internal',
desc: 'The visibility of the snippet'
end
post do
authorize! :create_snippet
attrs = declared_params(include_missing: false).merge(request: request, api: true)
service_response = ::Snippets::CreateService.new(nil, current_user, attrs).execute
snippet = service_response.payload[:snippet]
if service_response.success?
present snippet, with: Entities::PersonalSnippet, current_user: current_user
else
render_spam_error! if snippet.spam?
render_api_error!({ error: service_response.message }, service_response.http_status)
end
end
desc 'Update an existing snippet' do
detail 'This feature was introduced in GitLab 8.15.'
success Entities::PersonalSnippet
end
params do
requires :id, type: Integer, desc: 'The ID of a snippet'
optional :title, type: String, allow_blank: false, desc: 'The title of a snippet'
optional :file_name, type: String, desc: 'The name of a snippet file'
optional :content, type: String, allow_blank: false, desc: 'The content of a snippet'
optional :description, type: String, desc: 'The description of a snippet'
optional :visibility, type: String,
values: Gitlab::VisibilityLevel.string_values,
desc: 'The visibility of the snippet'
at_least_one_of :title, :file_name, :content, :visibility
end
put ':id' do
snippet = snippets_for_current_user.find_by_id(params.delete(:id))
break not_found!('Snippet') unless snippet
authorize! :update_snippet, snippet
attrs = declared_params(include_missing: false).merge(request: request, api: true)
service_response = ::Snippets::UpdateService.new(nil, current_user, attrs).execute(snippet)
snippet = service_response.payload[:snippet]
if service_response.success?
present snippet, with: Entities::PersonalSnippet, current_user: current_user
else
render_spam_error! if snippet.spam?
render_api_error!({ error: service_response.message }, service_response.http_status)
end
end
desc 'Remove snippet' do
detail 'This feature was introduced in GitLab 8.15.'
success Entities::PersonalSnippet
end
params do
requires :id, type: Integer, desc: 'The ID of a snippet'
end
delete ':id' do
snippet = snippets_for_current_user.find_by_id(params.delete(:id))
break not_found!('Snippet') unless snippet
authorize! :admin_snippet, snippet
destroy_conditionally!(snippet) do |snippet|
service = ::Snippets::DestroyService.new(current_user, snippet)
response = service.execute
if response.error?
render_api_error!({ error: response.message }, response.http_status)
end
end
end
desc 'Get a raw snippet' do
detail 'This feature was introduced in GitLab 8.15.'
end
params do
requires :id, type: Integer, desc: 'The ID of a snippet'
end
get ":id/raw" do
snippet = snippets.find_by_id(params.delete(:id))
not_found!('Snippet') unless snippet
present content_for(snippet)
end
desc 'Get raw snippet file contents from the repository'
params do
use :raw_file_params
end
get ":id/files/:ref/:file_path/raw", requirements: { file_path: API::NO_SLASH_URL_PART_REGEX } do
snippet = snippets.find_by_id(params.delete(:id))
not_found!('Snippet') unless snippet&.repo_exists?
present file_content_for(snippet)
end
desc 'Get the user agent details for a snippet' do
success Entities::UserAgentDetail
end
params do
requires :id, type: Integer, desc: 'The ID of a snippet'
end
get ":id/user_agent_detail" do
authenticated_as_admin!
snippet = Snippet.find_by_id!(params[:id])
break not_found!('UserAgentDetail') unless snippet.user_agent_detail
present snippet.user_agent_detail, with: Entities::UserAgentDetail
end
end
end
end
| 34.765625 | 104 | 0.644644 |
382327449e4f4f2894c6f7fb3ea65046aed7481d | 792 | module Api
class ActivityReportsController
class JobsFailed < AbstractData
def total
collection.count
end
def hourly_data
collection.select_count.group_by_hour.map do |event|
[event.date.in_time_zone("UTC").to_i * 1000, event.count.to_i]
end.to_h
end
def daily_data
collection.select_count.group_by_day.map do |event|
[event.date.to_time.to_i * 1000, event.count.to_i]
end.to_h
end
def monthly_data
collection.select_count.group_by_month.map do |event|
[event.date.to_time.to_i * 1000, event.count.to_i]
end.to_h
end
private
def collection
Event::JobClosed.failed.date_range(start_date, end_date)
end
end
end
end
| 22 | 72 | 0.632576 |
bb044e47757d74d2a99cd9b053f2bfe31cbc60d9 | 1,348 | #Author: Piotr Wojcieszonek
#e-mail: [email protected]
# Copyright 21.02.2016 by Piotr Wojcieszonek
require_relative '../../test_helper'
require 'lib/dhcp'
class Option46 < Minitest::Test
# Called before every test method runs. Can be used
# to set up fixture information.
def setup
@option = Lib::DHCP::Option46.new(112)
@option_unpack = Lib::DHCP::Option.unpack([46,1,112].pack('C3'))
end
# Called after every test method runs. Can be used to tear
# down fixture information.
def test_type
assert_instance_of Lib::DHCP::Option46, @option
end
def test_to_string
assert_equal '112', @option.payload.to_s
end
def test_to_integer
assert_equal 112, @option.payload.to_i
end
def test_length
assert_equal 1, @option.len
end
def test_oid
assert_equal 46, @option.oid
end
def test_pack
expected = [46,1,112].pack('C3')
assert_equal expected, @option.pack
end
def test_type_unpack
assert_instance_of Lib::DHCP::Option46, @option_unpack
end
def test_to_string_unpack
assert_equal '112', @option_unpack.payload.to_s
end
def test_to_integer_unpack
assert_equal 112 , @option_unpack.payload.to_i
end
def test_oid_unpack
assert_equal 46, @option_unpack.oid
end
def test_length_unpack
assert_equal 1, @option_unpack.len
end
end | 20.119403 | 68 | 0.720326 |
ab079ba13f77eddfa557f5150d9e3b52f0ead369 | 3,968 | # frozen_string_literal: true
require 'rails_helper'
require "#{Rails.root}/lib/wiki_edits"
# Wait one second after loading a path
# Allows React to properly load the page
# Remove this after implementing server-side rendering
def js_visit(path)
visit path
sleep 1
end
describe 'Students Page', type: :feature, js: true do
before do
include type: :feature
include Devise::TestHelpers
page.current_window.resize_to(1920, 1080)
allow_any_instance_of(WikiEdits).to receive(:oauth_credentials_valid?).and_return(true)
@course = create(:course,
id: 10001,
title: 'This.course',
slug: 'This_university.foo/This.course_(term_2015)',
start: 3.months.ago,
end: 3.months.from_now,
school: 'This university.foo',
term: 'term 2015',
description: 'This is a great course')
campaign = create(:campaign)
@course.campaigns << campaign
@user = create(:user, username: 'Mr_Tester',
real_name: 'Mr. Tester',
trained: true)
create(:courses_user,
id: 1,
course_id: @course.id,
user_id: @user.id,
real_name: @user.real_name)
article = create(:article,
id: 1,
title: 'Article_Title',
namespace: 0,
language: 'es',
rating: 'fa')
create(:articles_course,
article_id: article.id,
course_id: @course.id)
create(:revision,
id: 1,
user_id: @user.id,
article_id: article.id,
date: Time.zone.today,
characters: 2,
views: 10,
new_article: false)
end
it 'should display a list of students' do
js_visit "/courses/#{@course.slug}/students"
sleep 1 # Try to avoid issue where this test fails with 0 rows found.
expect(page).to have_content @user.username
end
it 'should open a list of individual student revisions' do
js_visit "/courses/#{@course.slug}/students"
sleep 1 # Try to avoid issue where this test fails with 0 rows found.
expect(page).not_to have_content 'Article Title'
page.first('tr.students').click
sleep 1
within 'table.users' do
expect(page).to have_content 'User Contributions'
expect(page).to have_content 'Article Title'
end
end
describe 'display of user name' do
let(:user) { create(:user) }
context 'logged out' do
it 'does not display real name' do
js_visit "/courses/#{@course.slug}/students"
sleep 1 # Try to avoid issue where this test fails with 0 rows found.
within 'table.users' do
expect(page).not_to have_content @user.real_name
end
end
end
context 'logged in' do
before do
login_as user
js_visit "/courses/#{@course.slug}/students"
sleep 1 # Try to avoid issue where this test fails with 0 rows found.
end
after do
logout user
end
context 'non-admin' do
it 'does not display real name' do
within 'table.users' do
expect(page).not_to have_content @user.real_name
end
end
end
context 'admin' do
let(:user) { create(:admin) }
it 'displays real name' do
within 'table.users' do
expect(page).to have_content @user.real_name
end
end
end
context 'instructor' do
let(:user) { create(:user, permissions: 1) }
let!(:courses_user) do
create(:courses_user, course_id: @course.id, user_id: user.id, role: 1)
end
it 'displays real name' do
within 'table.users' do
expect(page).to have_content @user.real_name
end
end
end
end
end
end
| 30.060606 | 91 | 0.576865 |
d5b6ede617587b3c37ee90d5bfd30acc3bd922b7 | 2,539 | # This file should contain all the record creation needed to seed the database with its default values.
# The data can then be loaded with the rake db:seed (or created alongside the db with db:setup).
#
# Examples:
#
# cities = City.create([{ name: 'Chicago' }, { name: 'Copenhagen' }])
# Mayor.create(name: 'Emanuel', city: cities.first)
Page.create! name: "Welcome", content: <<-HTML
<p>In this RailsCasts episode we are going to look at the <a href="http://jejacks0n.github.com/mercury/">Mercury Editor</a>. It allows you to edit a document in-place, right in the HTML. It works in the following browsers.</p>
<ul>
<li>Firefox 4+</li>
<li>Chrome 10+</li>
<li>Safari 5+</li>
</ul>
<p>Try it out here by clicking on the <strong><em>Edit Page</em></strong> link below. There you will be able to change this page content and even the title above.</p>
HTML
Page.create! name: "Products", content: <<-HTML
<p>Lorem ipsum dolor sit amet, consectetur adipisicing elit, sed do eiusmod tempor incididunt ut labore et dolore magna aliqua. Ut enim ad minim veniam, quis nostrud exercitation ullamco laboris nisi ut aliquip ex ea commodo consequat. Duis aute irure dolor in reprehenderit in voluptate velit esse cillum dolore eu fugiat nulla pariatur. Excepteur sint occaecat cupidatat non proident, sunt in culpa qui officia deserunt mollit anim id est laborum.</p>
<p>Ut enim ad minim veniam, quis nostrud exercitation ullamco laboris nisi ut aliquip ex ea commodo consequat. Duis aute irure dolor in reprehenderit in voluptate velit esse cillum dolore eu fugiat nulla pariatur. Excepteur sint occaecat cupidatat non proident, sunt in culpa qui officia deserunt mollit anim id est laborum.</p>
HTML
Page.create! name: "About Us", content: <<-HTML
<p>Lorem ipsum dolor sit amet, consectetur adipisicing elit, sed do eiusmod tempor incididunt ut labore et dolore magna aliqua. Ut enim ad minim veniam, quis nostrud exercitation ullamco laboris nisi ut aliquip ex ea commodo consequat. Duis aute irure dolor in reprehenderit in voluptate velit esse cillum dolore eu fugiat nulla pariatur. Excepteur sint occaecat cupidatat non proident, sunt in culpa qui officia deserunt mollit anim id est laborum.</p>
<p>Ut enim ad minim veniam, quis nostrud exercitation ullamco laboris nisi ut aliquip ex ea commodo consequat. Duis aute irure dolor in reprehenderit in voluptate velit esse cillum dolore eu fugiat nulla pariatur. Excepteur sint occaecat cupidatat non proident, sunt in culpa qui officia deserunt mollit anim id est laborum.</p>
HTML
| 94.037037 | 453 | 0.774321 |
bff6cd34eea8e2eea5f39cf57936f597925bd468 | 2,180 | describe BooksController do
include_context 'with JSON API'
# While automatically inferred, they can be also manually specified:
base_path '/topics/{topic_id}/books'
resource_name 'book'
resource_summary 'Handles books organized by tags.'
resource_description <<~HEREDOC
This will fail if the topic does not exists.
The results are not paginated.
HEREDOC
resource_tags 'topics', 'list'
describe action 'GET #index' do
subject(:make_request) { get :index, params: params }
params_in :path do
topic_id schema: { type: 'integer' }, description: "The topic's id"
end
postman_for :action do
pre_request script: 'console.log("A");'
end
# This is also auto detected, but can be manually changed
action_summary 'List all books from a specific topic'
action_description 'By giving an specific topic, this endpoint retrieves all books from it'
path ''
deprecate!
action_docs url: 'http://docstoaction.com'
let(:params) { { topic_id: topic_id } }
let(:topic_id) { 1 }
describe example 'when a valid topic is given' do
before do
request.headers.merge!('X-Extra-Header': 'Some value')
make_request
end
default_example
it 'responds with ok status' do
expect(response).to have_http_status(:ok)
end
end
describe example 'when an invalid topic is given' do
let(:topic_id) { -1 }
include_examples 'not found examples'
end
end
describe action 'PATCH #promote' do
subject(:make_request) { patch :promote, params: params }
let(:params) { { id: book_id } }
let(:book_id) { 1 }
describe example 'when a valid book is given' do
include_context 'with account login'
before do
make_request
end
default_example
it 'responds with ok status' do
expect(response).to have_http_status(:ok)
end
end
describe example 'when an invalid book is promoted' do
include_context 'with account login'
let(:book_id) { -1 }
include_examples 'not found examples'
end
include_examples 'unauthorized when not logged in'
end
end
| 25.647059 | 95 | 0.666055 |
628417077d6998bff22f70bc5c7140b94b56e7eb | 617 | class Nextflow < Formula
desc "Data-driven computational pipelines"
homepage "https://www.nextflow.io/"
# doi "10.1038/nbt.3820"
# tag "bioinformatics"
url "https://www.nextflow.io/releases/v0.25.7/nextflow"
sha256 "79a1695bca00500ac4c3f7f2781835c7cc29913bf96da0b1594e0f5019cba7c0"
head "https://github.com/nextflow-io/nextflow.git"
bottle :unneeded
depends_on :java => "1.7+"
def install
bin.install "nextflow"
end
test do
system bin/"nextflow", "-download"
output = pipe_output("#{bin}/nextflow -q run -", "println 'hello'").chomp
assert_equal "hello", output
end
end
| 24.68 | 77 | 0.706645 |
6ab4b56e74a5f288ddc104903296899a8b17be99 | 1,393 | class Texi2html < Formula
desc "Convert TeXinfo files to HTML"
homepage "https://www.nongnu.org/texi2html/"
url "https://download.savannah.gnu.org/releases/texi2html/texi2html-5.0.tar.gz"
sha256 "e60edd2a9b8399ca615c6e81e06fa61946ba2f2406c76cd63eb829c91d3a3d7d"
license "GPL-2.0"
livecheck do
skip "No longer developed or maintained"
end
bottle do
cellar :any_skip_relocation
rebuild 2
sha256 "4ad9c71802c3258a3c0c7ff8800ddd70cc230ddfecc095080d0144ba153bc2dc" => :big_sur
sha256 "10f6d76de400799fb21dc900a2344ef444d43658dd502f0c040ad7c0a4bf0fbb" => :catalina
sha256 "10f6d76de400799fb21dc900a2344ef444d43658dd502f0c040ad7c0a4bf0fbb" => :mojave
sha256 "10f6d76de400799fb21dc900a2344ef444d43658dd502f0c040ad7c0a4bf0fbb" => :high_sierra
sha256 "7f306764ce0a5c0d3db56c36806cf61b596bc762ba558108b568095329f0a1dd" => :x86_64_linux
end
depends_on "gettext"
def install
system "./configure", "--disable-dependency-tracking", "--prefix=#{prefix}",
"--mandir=#{man}", "--infodir=#{info}"
chmod 0755, "./install-sh"
system "make", "install"
end
test do
(testpath/"test.texinfo").write <<~EOS
@ifnottex
@node Top
@top Hello World!
@end ifnottex
@bye
EOS
system "#{bin}/texi2html", "test.texinfo"
assert_match /Hello World!/, File.read("test.html")
end
end
| 32.395349 | 94 | 0.719311 |
039f3077c8f43ae814feca8c296540c372fa1887 | 1,120 | class Plowshare < Formula
desc "Download/upload tool for popular file sharing websites"
homepage "https://github.com/mcrapet/plowshare"
url "https://github.com/mcrapet/plowshare/archive/v2.1.7.tar.gz"
sha256 "c17d0cc1b3323f72b2c1a5b183a9fcef04e8bfc53c9679a4e1523642310d22ad"
license "GPL-3.0"
revision 1
bottle do
cellar :any_skip_relocation
sha256 "c46e0ffe3e874eed0628aaf03ddd6711b9076e5ae0ba5c6f678c66babc2fa3f7" => :catalina
sha256 "b9099a281503dde8b677db021242fa0e495d036ce2e9b13838badf8ff17caff9" => :mojave
sha256 "fe966c7db7ceb08f5d9c522c68f332eb87927dccf71b53947824d541ae889904" => :high_sierra
sha256 "fe966c7db7ceb08f5d9c522c68f332eb87927dccf71b53947824d541ae889904" => :sierra
sha256 "fe966c7db7ceb08f5d9c522c68f332eb87927dccf71b53947824d541ae889904" => :el_capitan
end
depends_on "bash"
depends_on "coreutils"
depends_on "feh"
depends_on "gnu-sed"
depends_on "libcaca"
depends_on "recode"
depends_on "spidermonkey"
def install
system "make", "install", "patch_gnused", "GNU_SED=#{Formula["gnu-sed"].opt_bin}/gsed", "PREFIX=#{prefix}"
end
end
| 37.333333 | 110 | 0.7875 |
ab52179b61ce706e9a42576f3e61bf0c0475d3b5 | 1,755 | # Copyright © 2011 MUSC Foundation for Research Development
# All rights reserved.
# Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met:
# 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer.
# 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following
# disclaimer in the documentation and/or other materials provided with the distribution.
# 3. Neither the name of the copyright holder nor the names of its contributors may be used to endorse or promote products
# derived from this software without specific prior written permission.
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING,
# BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT
# SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR
# TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
module Audited
module Auditor
module AuditedInstanceMethods
def audit_trail(start_date, end_date)
self.audits.where("created_at between ? and ?", start_date, end_date)
end
end
end
end
| 58.5 | 145 | 0.790313 |
bbcc4e75468a75fadb8cdde03f06d654aa80128f | 1,710 | class Neomutt < Formula
desc "E-mail reader with support for Notmuch, NNTP and much more"
homepage "https://neomutt.org/"
url "https://github.com/neomutt/neomutt/archive/neomutt-20180716.tar.gz"
sha256 "bd89826980b493ba312228c9c14ffe2403e268571aea6008c6dc7ed3848de200"
revision 1
head "https://github.com/neomutt/neomutt.git"
bottle do
sha256 "e4cf9f442f87c2092a5721d7d9e0f8832b645fda68c01a779a45f2bec6cd403c" => :mojave
sha256 "757e4871abd3a6f474a84f8ea9c541a14557ad3263f597779b11caa428cfcd5d" => :high_sierra
sha256 "0e0e9c5a2abb6c6e379ab2823180ed3eb695e24654d3ab368cf9c8c5524bb1bd" => :sierra
sha256 "3400b46483f6b4e7d698c9838a8c151393b9e25896e457758c66d3c4b8958f59" => :x86_64_linux
end
depends_on "docbook-xsl" => :build
depends_on "gettext"
depends_on "gpgme"
depends_on "libidn"
depends_on "lmdb"
depends_on "notmuch"
depends_on "openssl"
depends_on "tokyo-cabinet"
unless OS.mac?
depends_on "krb5"
depends_on "libsasl2"
end
def install
ENV["XML_CATALOG_FILES"] = "#{etc}/xml/catalog"
system "./configure", "--prefix=#{prefix}",
"--enable-gpgme",
"--with-gpgme=#{Formula["gpgme"].opt_prefix}",
"--gss",
"--lmdb",
"--notmuch",
"--sasl",
"--tokyocabinet",
"--with-ssl=#{Formula["openssl"].opt_prefix}",
"--with-ui=ncurses"
system "make", "install"
end
test do
output = shell_output("#{bin}/neomutt -F /dev/null -Q debug_level")
assert_equal "debug_level=0", output.chomp
end
end
| 34.897959 | 94 | 0.632164 |
01c00cf5b6535e9c746d71dfb959a4d11a4e7f00 | 2,174 | # encoding: utf-8
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
module Azure::Compute::Mgmt::V2019_03_01
module Models
#
# Describes a Encryption Settings for a Disk
#
class DiskEncryptionSettings
include MsRestAzure
# @return [KeyVaultSecretReference] Specifies the location of the disk
# encryption key, which is a Key Vault Secret.
attr_accessor :disk_encryption_key
# @return [KeyVaultKeyReference] Specifies the location of the key
# encryption key in Key Vault.
attr_accessor :key_encryption_key
# @return [Boolean] Specifies whether disk encryption should be enabled
# on the virtual machine.
attr_accessor :enabled
#
# Mapper for DiskEncryptionSettings class as Ruby Hash.
# This will be used for serialization/deserialization.
#
def self.mapper()
{
client_side_validation: true,
required: false,
serialized_name: 'DiskEncryptionSettings',
type: {
name: 'Composite',
class_name: 'DiskEncryptionSettings',
model_properties: {
disk_encryption_key: {
client_side_validation: true,
required: false,
serialized_name: 'diskEncryptionKey',
type: {
name: 'Composite',
class_name: 'KeyVaultSecretReference'
}
},
key_encryption_key: {
client_side_validation: true,
required: false,
serialized_name: 'keyEncryptionKey',
type: {
name: 'Composite',
class_name: 'KeyVaultKeyReference'
}
},
enabled: {
client_side_validation: true,
required: false,
serialized_name: 'enabled',
type: {
name: 'Boolean'
}
}
}
}
}
end
end
end
end
| 29.378378 | 77 | 0.550138 |
188713c38295e0c49a492af7db6cf738a3a91aaf | 237 | # frozen_string_literal: true
RSpec.describe PolyAnalyst6API do
it 'has a version number' do
expect(PolyAnalyst6API::VERSION).not_to be nil
end
it 'does something useful' do
expect(false).to eq(true)
end
end
| 19.75 | 51 | 0.700422 |
1c7320d9fb54f8b6aff2b882017c937d08f6ce51 | 5,639 | require 'stringio'
class WorkfileMigrator < AbstractMigrator
class FakeFileUpload < StringIO
attr_accessor :content_type, :original_filename
end
class LegacyFilePath
def initialize(*args)
@args = args
end
def path
File.join(@args)
end
end
class << self
def prerequisites
UserMigrator.migrate
WorkspaceMigrator.migrate
MembershipMigrator.migrate
end
def classes_to_validate
[Workfile, WorkfileVersion, WorkfileDraft]
end
def migrate(options = {})
raise RuntimeError, "Need to have workfile_path set to migrate workfiles" unless options['workfile_path']
prerequisites
Legacy.connection.exec_query("
INSERT INTO public.workfiles(
legacy_id,
workspace_id,
owner_id,
description,
created_at,
file_name,
updated_at,
deleted_at
)
SELECT
edc_work_file.id,
workspace.id,
owner.id,
description,
created_tx_stamp,
file_name,
last_updated_tx_stamp,
CASE is_deleted
WHEN 't' THEN last_updated_tx_stamp
ELSE null
END
FROM edc_work_file
INNER JOIN users owner
ON owner.username = edc_work_file.owner
INNER JOIN workspaces workspace
ON workspace.legacy_id = edc_work_file.workspace_id
WHERE edc_work_file.id NOT IN (SELECT legacy_id FROM workfiles);")
Legacy.connection.exec_query("
INSERT INTO public.workfile_versions(
legacy_id,
workfile_id,
version_num,
owner_id,
modifier_id,
created_at,
updated_at,
commit_message
)
SELECT
edc_workfile_version.id,
workfiles.id,
version_num,
owner.id,
modifier.id,
created_tx_stamp,
last_updated_tx_stamp,
commit_message
FROM edc_workfile_version
INNER JOIN users owner
ON owner.username = edc_workfile_version.version_owner
INNER JOIN users modifier
ON modifier.username = edc_workfile_version.modified_by
INNER JOIN workfiles
ON edc_workfile_version.workfile_id = workfiles.legacy_id
WHERE edc_workfile_version.id NOT IN (SELECT legacy_id FROM workfile_versions);")
Legacy.connection.exec_query("
INSERT INTO public.workfile_drafts(
legacy_id,
workfile_id,
base_version,
owner_id,
created_at,
updated_at
)
SELECT
edc_workfile_draft.id,
workfiles.id,
base_version_num,
owner.id,
created_tx_stamp,
last_updated_tx_stamp
FROM edc_workfile_draft
INNER JOIN users owner
ON owner.username = edc_workfile_draft.draft_owner
INNER JOIN workfiles
ON edc_workfile_draft.workfile_id = workfiles.legacy_id
WHERE is_deleted = 'f'
AND edc_workfile_draft.id NOT IN (SELECT legacy_id FROM workfile_drafts);")
Legacy.connection.exec_query("
UPDATE public.workfiles
SET latest_workfile_version_id = (SELECT public.workfile_versions.id
FROM public.workfile_versions
JOIN edc_workfile_version on public.workfile_versions.legacy_id = edc_workfile_version.id
JOIN edc_work_file on edc_work_file.latest_version_num = edc_workfile_version.version_num
AND edc_work_file.id = edc_workfile_version.workfile_id
WHERE edc_work_file.id = public.workfiles.legacy_id)")
silence_activerecord do
WorkfileVersion.where("contents_file_name IS NULL").each do |workfile_version|
row = Legacy.connection.exec_query("
SELECT
version_file_id,
workspace_id,
file_name,
mime_type
FROM edc_workfile_version
INNER JOIN
edc_work_file
ON edc_workfile_version.workfile_id = edc_work_file.id
WHERE edc_workfile_version.id = '#{workfile_version.legacy_id}';
").first
path = LegacyFilePath.new(options['workfile_path'], "workfile", row["workspace_id"], row["version_file_id"])
fake_file = FakeFileUpload.new(File.read(path.path))
fake_file.original_filename = row['file_name']
fake_file.content_type = row['mime_type']
fake_file.content_type = 'text/plain' if fake_file.size == 0 # workaround for empty images
workfile_version.contents = fake_file
workfile_version.save!
end
WorkfileDraft.where("content IS NULL").each do |workfile_draft|
row = Legacy.connection.exec_query("
SELECT
draft_file_id,
workspace_id
FROM edc_workfile_draft
INNER JOIN
edc_work_file
ON edc_workfile_draft.workfile_id = edc_work_file.id
WHERE edc_workfile_draft.id = '#{workfile_draft.legacy_id}';
").first
path = LegacyFilePath.new(options['workfile_path'], "workfile", row["workspace_id"], row["draft_file_id"])
workfile_draft.content = StringIO.new(File.read(path.path))
workfile_draft.save!
end
Workfile.unscoped.where(:content_type => nil).find_each do |wf|
wf.update_attributes({:content_type => wf.latest_workfile_version.file_type}, :without_protection => true)
end
end
end
end
end
| 32.784884 | 119 | 0.626352 |
7a6bf78eae5f56b0ca599b29094b7ecb7e23843f | 1,790 | class Gitbucket < Formula
desc "Git platform powered by Scala offering"
homepage "https://github.com/gitbucket/gitbucket"
url "https://github.com/gitbucket/gitbucket/releases/download/4.35.0/gitbucket.war"
sha256 "931da6b27f473f8df9d4c0fc573665f6565c298f0281e2d2eff7d16f98b7bb71"
license "Apache-2.0"
head do
url "https://github.com/gitbucket/gitbucket.git"
depends_on "ant" => :build
end
bottle :unneeded
depends_on "openjdk"
def install
if build.head?
system "ant"
libexec.install "war/target/gitbucket.war", "."
else
libexec.install "gitbucket.war"
end
end
def caveats
<<~EOS
Note: When using launchctl the port will be 8080.
EOS
end
plist_options manual: "java -jar #{HOMEBREW_PREFIX}/opt/gitbucket/libexec/gitbucket.war"
def plist
<<~EOS
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
<plist version="1.0">
<dict>
<key>Label</key>
<string>gitbucket</string>
<key>ProgramArguments</key>
<array>
<string>#{Formula["openjdk"].opt_bin}/java</string>
<string>-Dmail.smtp.starttls.enable=true</string>
<string>-jar</string>
<string>#{opt_libexec}/gitbucket.war</string>
<string>--host=127.0.0.1</string>
<string>--port=8080</string>
</array>
<key>RunAtLoad</key>
<true/>
</dict>
</plist>
EOS
end
test do
java = Formula["openjdk"].opt_bin/"java"
fork do
exec "'#{java}' -jar #{libexec}/gitbucket.war --port=#{free_port} > output"
end
sleep 12
File.read("output") !~ /Exception/
end
end
| 26.716418 | 108 | 0.612291 |
6a499366444ab74f50591abb8571b61167cf0c08 | 1,022 | require 'active_support/notifications'
require 'active_record/explain_registry'
module ActiveRecord
class ExplainSubscriber # :nodoc:
def start(name, id, payload)
# unused
end
def finish(name, id, payload)
if ExplainRegistry.collect? && !ignore_payload?(payload)
ExplainRegistry.queries << payload.values_at(:sql, :binds)
end
end
# SCHEMA queries cannot be EXPLAINed, also we do not want to run EXPLAIN on
# our own EXPLAINs now matter how loopingly beautiful that would be.
#
# On the other hand, we want to monitor the performance of our real database
# queries, not the performance of the access to the query cache.
IGNORED_PAYLOADS = %w(SCHEMA EXPLAIN CACHE)
EXPLAINED_SQLS = /\A\s*(select|update|delete|insert)\b/i
def ignore_payload?(payload)
payload[:exception] || IGNORED_PAYLOADS.include?(payload[:name]) || payload[:sql] !~ EXPLAINED_SQLS
end
ActiveSupport::Notifications.subscribe("sql.active_record", new)
end
end
| 34.066667 | 105 | 0.710372 |
6a3ac300c90e48d114661d3baee81591bb5d8c48 | 263 | module Rulers
class Application
def get_controller_and_action(env)
_, cont, action, after =
env["PATH_INFO"].split('/', 4)
cont = cont.capitalize
cont += "Controller"
[Object.const_get(cont), action]
end
end
end
| 20.230769 | 40 | 0.604563 |
21ae47a892b133737e0d2832db6dae4003a51371 | 1,559 | # encoding: utf-8
require "logstash/codecs/base"
require "logstash/util/charset"
# Line-oriented text data.
#
# Decoding behavior: Only whole line events will be emitted.
#
# Encoding behavior: Each event will be emitted with a trailing newline.
class LogStash::Codecs::Line < LogStash::Codecs::Base
config_name "line"
milestone 3
# Set the desired text format for encoding.
config :format, :validate => :string
# The character encoding used in this input. Examples include "UTF-8"
# and "cp1252"
#
# This setting is useful if your log files are in Latin-1 (aka cp1252)
# or in another character set other than UTF-8.
#
# This only affects "plain" format logs since json is UTF-8 already.
config :charset, :validate => ::Encoding.name_list, :default => "UTF-8"
public
def register
require "logstash/util/buftok"
@buffer = FileWatch::BufferedTokenizer.new
@converter = LogStash::Util::Charset.new(@charset)
@converter.logger = @logger
end
public
def decode(data)
@buffer.extract(data).each do |line|
yield LogStash::Event.new("message" => @converter.convert(line))
end
end # def decode
public
def flush(&block)
remainder = @buffer.flush
if !remainder.empty?
block.call(LogStash::Event.new({"message" => remainder}))
end
end
public
def encode(data)
if data.is_a? LogStash::Event and @format
@on_event.call(data.sprintf(@format) + "\n")
else
@on_event.call(data.to_s + "\n")
end
end # def encode
end # class LogStash::Codecs::Plain
| 26.423729 | 73 | 0.683772 |
d5e829ce142d49963f781f44b53479fc7d5e5020 | 1,722 | class AliyunCli < Formula
desc "Universal Command-Line Interface for Alibaba Cloud"
homepage "https://github.com/aliyun/aliyun-cli"
url "https://github.com/aliyun/aliyun-cli/archive/v3.0.49.tar.gz"
sha256 "44b0788c975d519d70fd0f746ed98767d74bbffa0f1317ec1ce4442cad4aa6c4"
bottle do
cellar :any_skip_relocation
sha256 "7d3a55efa0636f84b0d0baf9fe309621690e942d82d825928d24d5b4f6432259" => :catalina
sha256 "77423cbab96b7c081cba834235e8636314078647d5552714c15515e59719f3a9" => :mojave
sha256 "97cb6c13f9593dc0d9f79d6f8c65160069337c3b4bcae040834f0871b2d963d9" => :high_sierra
end
depends_on "go" => :build
def install
ENV["GO111MODULE"] = "off"
ENV["GOPATH"] = buildpath
ENV["PATH"] = "#{ENV["PATH"]}:#{buildpath}/bin"
(buildpath/"src/github.com/aliyun/aliyun-cli").install buildpath.children
cd "src/github.com/aliyun/aliyun-cli" do
system "make", "metas"
system "go", "build", "-o", bin/"aliyun", "-ldflags",
"-X 'github.com/aliyun/aliyun-cli/cli.Version=#{version}'", "main/main.go"
prefix.install_metafiles
end
end
test do
version_out = shell_output("#{bin}/aliyun version")
assert_match version.to_s, version_out
help_out = shell_output("#{bin}/aliyun --help")
assert_match "Alibaba Cloud Command Line Interface Version #{version}", help_out
assert_match "", help_out
assert_match "Usage:", help_out
assert_match "aliyun <product> <operation> [--parameter1 value1 --parameter2 value2 ...]", help_out
oss_out = shell_output("#{bin}/aliyun oss")
assert_match "Object Storage Service", oss_out
assert_match "aliyun oss [command] [args...] [options...]", oss_out
end
end
| 39.136364 | 103 | 0.708479 |
edb3fc403fd24edffa55b884c5d429f32d1dae81 | 705 | module Inciweb
class Incident
def initialize(id = nil)
@id = id
end
def all
fetch_incidents
end
def find
fetch_incident
end
def self.all
new.all
end
def self.find(incident_id)
new(incident_id).find
end
def self.find_by_link(link)
incident_id = link.to_s.scan(/\d+/)
new(incident_id).find
end
private
attr_reader :id
def fetch_incidents
Inciweb::Response.from_xml(
Inciweb::Request.new("feeds/rss/incidents/").run
)
end
def fetch_incident
Inciweb::Response.from_html(
Inciweb::Request.new(["incident", id, ""].join("/")).run
)
end
end
end
| 15.666667 | 64 | 0.587234 |
03a8bcbfb28d3782e147ac7e9151a956ffa3effc | 332 | require "#{File.dirname(__FILE__)}/test_helper"
class TagListTest < Test::Unit::TestCase
def test_tag_list_is_array
m = Stammer::TagList.new([])
assert m.is_a?(Array)
end
def test_taglist_elements_become_tags
m = Stammer::TagList.new({'tags' => [{'id' => 1}]})
assert m.first.is_a?(Stammer::Tag)
end
end
| 23.714286 | 55 | 0.674699 |
7aaeffecccd5a0a482f36faf5411c938ca6d84e7 | 82 | class Gluttonberg::Application < Merb::Controller
controller_for_slice
end | 16.4 | 49 | 0.780488 |
d5fd8ac9bb7c6186d522a926c895df0f7dcefded | 125 | class ApplicationController < ActionController::Base
protect_from_forgery with: :exception
# @sections = Section.all
end
| 25 | 52 | 0.8 |
ff44848208fae9d481f42ba992e3ac8cb1b5cf86 | 1,586 | # frozen_string_literal: true
require 'octokit'
require 'faraday-http-cache'
# we don't want to forever pay the price of redirects, but make users fix them
# https://github.com/octokit/octokit.rb/issues/771
#
# to reproduce/remove: rename a repository and try to create a diff with the old name
# it should return a NullComparison and not a broken Changeset with nil commits
# tested via test/models/changeset_test.rb
class Octokit::RedirectAsError < Faraday::Response::Middleware
private
def on_complete(response)
if [301, 302].include?(response[:status].to_i)
raise Octokit::RepositoryUnavailable, response
end
end
end
Octokit.middleware = Faraday::RackBuilder.new do |builder|
builder.use Faraday::HttpCache, shared_cache: false, store: Rails.cache, serializer: Marshal
builder.response :logger, Rails.logger
builder.use Octokit::Response::RaiseError
builder.use Octokit::RedirectAsError
builder.adapter Faraday.default_adapter
end
Octokit.connection_options[:request] = { open_timeout: 2 }
token = ENV['GITHUB_TOKEN']
unless Rails.env.test? || ENV['PRECOMPILE']
raise "No GitHub token available" if token.blank?
end
Octokit.api_endpoint = Rails.application.config.samson.github.api_url
Octokit.web_endpoint = Rails.application.config.samson.github.web_url
GITHUB = Octokit::Client.new(access_token: token)
# Log github request timing so it is more obvious what we spent our time on
Sawyer::Response.prepend(Module.new do
def initialize(*)
super
Rails.logger.info("GITHUB #{@env.method.upcase} (#{timing}s) #{@env.url}")
end
end)
| 32.367347 | 94 | 0.764817 |
7a76a2db45f52cd2a4231230cf2f3ce31ece64ce | 4,940 | # This file is auto-generated from the current state of the database. Instead
# of editing this file, please use the migrations feature of Active Record to
# incrementally modify your database, and then regenerate this schema definition.
#
# This file is the source Rails uses to define your schema when running `bin/rails
# db:schema:load`. When creating a new database, `bin/rails db:schema:load` tends to
# be faster and is potentially less error prone than running all of your
# migrations from scratch. Old migrations may fail to apply correctly if those
# migrations use external dependencies or application code.
#
# It's strongly recommended that you check this file into your version control system.
ActiveRecord::Schema.define(version: 2020_03_16_070737) do
create_table "clusters", force: :cascade do |t|
t.string "title"
t.datetime "created_at", null: false
t.datetime "updated_at", null: false
end
create_table "collections", force: :cascade do |t|
t.string "title"
t.datetime "created_at", null: false
t.datetime "updated_at", null: false
end
create_table "episodes", force: :cascade do |t|
t.string "episode"
t.integer "podcast_id"
t.string "title"
t.text "description"
t.boolean "published"
t.string "episode_number"
t.text "streaming_url"
t.date "published_at"
t.text "tags"
t.integer "tier_required"
t.text "guid"
t.index ["podcast_id"], name: "index_episodes_on_podcast_id"
end
create_table "genres", force: :cascade do |t|
t.integer "user_id"
t.integer "podcast_id"
t.string "title"
t.datetime "created_at", null: false
t.datetime "updated_at", null: false
t.index ["podcast_id"], name: "index_genres_on_podcast_id"
t.index ["user_id"], name: "index_genres_on_user_id"
end
create_table "groups", force: :cascade do |t|
t.string "title"
t.text "description"
t.integer "podcast_id"
t.integer "genre_id"
t.datetime "created_at", null: false
t.datetime "updated_at", null: false
t.index ["genre_id"], name: "index_groups_on_genre_id"
t.index ["podcast_id"], name: "index_groups_on_podcast_id"
end
create_table "likes", force: :cascade do |t|
t.integer "update_id"
t.integer "user_id"
t.datetime "created_at", null: false
t.datetime "updated_at", null: false
t.index ["update_id"], name: "index_likes_on_update_id"
t.index ["user_id"], name: "index_likes_on_user_id"
end
create_table "networks", force: :cascade do |t|
t.string "title"
t.datetime "created_at", null: false
t.datetime "updated_at", null: false
end
create_table "podcasts", force: :cascade do |t|
t.integer "network_id"
t.integer "cluster_id"
t.string "title"
t.string "itunes_url"
t.string "feed_url"
t.integer "user_id"
t.datetime "created_at", null: false
t.datetime "updated_at", null: false
t.string "logo_url"
t.integer "ranking"
t.text "bio"
t.text "genre"
t.text "logo_url_large"
t.integer "collection_id"
t.boolean "xml_valid"
t.date "last_fetched_at"
t.index ["cluster_id"], name: "index_podcasts_on_cluster_id"
t.index ["network_id"], name: "index_podcasts_on_network_id"
end
create_table "updates", force: :cascade do |t|
t.integer "user_id"
t.string "title"
t.text "body"
t.datetime "created_at", null: false
t.datetime "updated_at", null: false
t.integer "podcast_id"
t.index ["podcast_id"], name: "index_updates_on_podcast_id"
t.index ["user_id"], name: "index_updates_on_user_id"
end
create_table "user_podcast_statuses", force: :cascade do |t|
t.integer "user_id"
t.integer "podcast_id"
t.text "status"
t.index ["podcast_id"], name: "index_user_podcast_statuses_on_podcast_id"
t.index ["user_id"], name: "index_user_podcast_statuses_on_user_id"
end
create_table "users", force: :cascade do |t|
t.string "email", default: "", null: false
t.string "encrypted_password", default: "", null: false
t.string "reset_password_token"
t.datetime "reset_password_sent_at"
t.datetime "remember_created_at"
t.datetime "created_at", null: false
t.datetime "updated_at", null: false
t.index ["email"], name: "index_users_on_email", unique: true
t.index ["reset_password_token"], name: "index_users_on_reset_password_token", unique: true
end
add_foreign_key "episodes", "podcasts"
add_foreign_key "genres", "podcasts"
add_foreign_key "genres", "users"
add_foreign_key "groups", "genres"
add_foreign_key "groups", "podcasts"
add_foreign_key "likes", "updates"
add_foreign_key "likes", "users"
add_foreign_key "podcasts", "clusters"
add_foreign_key "podcasts", "networks"
add_foreign_key "updates", "podcasts"
add_foreign_key "updates", "users"
add_foreign_key "user_podcast_statuses", "podcasts"
add_foreign_key "user_podcast_statuses", "users"
end
| 34.305556 | 95 | 0.705263 |
f7630ec5eda1af328f954d3495dbf2be64ebc004 | 333 | module Auth
class SignOutAPI < Grape::API
before do
authenticate!
end
resource :sign_out do
desc 'Sign out' do
headers ApiDescHelper.with_common_headers
end
delete do
current_authentication_token.expire!
status 200
{ success: true }
end
end
end
end
| 16.65 | 49 | 0.60961 |
e24b5f05847e887e07be94f489aae8884575eadc | 1,935 | # Alias import callbacks under the /users/auth endpoint so that
# the OAuth2 callback URL can be restricted under http://example.com/users/auth
# instead of http://example.com.
Devise.omniauth_providers.map(&:downcase).each do |provider|
next if provider == 'ldapmain'
get "/users/auth/-/import/#{provider}/callback", to: "import/#{provider}#callback", as: "users_import_#{provider}_callback"
end
namespace :import do
resource :github, only: [:create, :new], controller: :github do
post :personal_access_token
get :status
get :callback
get :realtime_changes
end
resource :gitea, only: [:create, :new], controller: :gitea do
post :personal_access_token
get :status
get :realtime_changes
end
resource :gitlab, only: [:create], controller: :gitlab do
get :status
get :callback
get :jobs
end
resource :bitbucket, only: [:create], controller: :bitbucket do
get :status
get :callback
get :jobs
end
resource :bitbucket_server, only: [:create, :new], controller: :bitbucket_server do
post :configure
get :status
get :callback
get :jobs
end
resource :google_code, only: [:create, :new], controller: :google_code do
get :status
post :callback
get :jobs
get :new_user_map, path: :user_map
post :create_user_map, path: :user_map
end
resource :fogbugz, only: [:create, :new], controller: :fogbugz do
get :status
post :callback
get :jobs
get :new_user_map, path: :user_map
post :create_user_map, path: :user_map
end
resource :gitlab_project, only: [:create, :new] do
post :create
post :authorize
end
resource :gitlab_group, only: [:create] do
post :authorize
end
resource :manifest, only: [:create, :new], controller: :manifest do
get :status
get :jobs
post :upload
end
resource :phabricator, only: [:create, :new], controller: :phabricator
end
| 24.807692 | 125 | 0.678036 |
39de20dad59abb07b787c21863750792713626fd | 3,409 | module Locomotive
module Steam
module Liquid
module Drops
class ContentEntry < I18nBase
delegate :_slug, :_translated, :seo_title, :meta_keywords, :meta_description, :created_at, :updated_at, to: :@_source
alias :_permalink :_slug
def _id
@_source._id.to_s
end
def _label
@_label ||= @_source._label
end
# Returns the next content for the parent content type.
# If no content is found, nil is returned.
#
# Usage:
#
# {% if article.next %}
# <a href="{% path_to article.next %}">Read next article</a>
# {% endif %}
#
def next
@next ||= repository(@_source).next(@_source).to_liquid
end
# Returns the previous content for the parent content type.
# If no content is found, nil is returned.
#
# Usage:
#
# {% if article.previous %}
# <a href="{% path_to article.previous %}">Read previous article</a>
# {% endif %}
#
def previous
@previous ||= repository(@_source).previous(@_source).to_liquid
end
def errors
if @_source.errors.blank?
false
else
@_source.errors.messages.to_hash.stringify_keys
end
end
def before_method(meth)
return '' if @_source.nil?
if not @@forbidden_attributes.include?(meth.to_s)
repository(@_source).value_for(@_source, meth, conditions_for(meth))
else
nil
end
end
def to_hash
@_source.to_hash.tap do |hash|
hash['id'] = hash['_id']
@_source.content_type.fields_by_name.each do |name, field|
case field.type
when :belongs_to
hash[name] = liquify_entry(@_source.send(name))._slug if hash["#{name}_id"].present?
when :many_to_many
hash[name] = (@_source.send(name) || []).all.map { |e| liquify_entry(e)._slug }.compact
when :file
hash[name] = hash["#{name}_url"] = file_field_to_url(hash[name.to_s]) if hash[name.to_s].present?
when :select
hash[name] = @_source.send(name) if hash["#{name}_id"].present?
end
end
end
end
def as_json(options = nil)
self.to_hash.as_json(options)
end
protected
def liquify_entry(entry)
self.class.new(entry).tap { |drop| drop.context = @context }
end
def file_field_to_url(field)
field.to_liquid.tap { |drop| drop.context = @context }.url
end
def repository(entry)
repository = @context.registers[:services].repositories.content_entry
repository.with(entry.content_type)
end
def conditions_for(name)
# note: treat conditions only they apply to the content type (if it's a has_many/many_to_many relationships)
_name = @context['with_scope_content_type']
!_name || _name == name ? @context['with_scope'] : nil
end
end
end
end
end
end
| 30.4375 | 127 | 0.519801 |
ff47639ca1633071bb7d2ec1b4767f696e299185 | 384 | class ShellLogger < Formula
desc "Logger for shell script."
homepage "https://github.com/rcmdnk/shell-logger/"
url "https://github.com/rcmdnk/shell-logger/archive/v0.2.0.tar.gz"
sha256 "59897974a8d12d20f3cdbc780c6afb5a4fe9599e82a1d94952b7f9b6738d15a1"
head "https://github.com/rcmdnk/shell-logger.git"
def install
(prefix/"etc").install "etc/shell-logger"
end
end
| 29.538462 | 75 | 0.755208 |
d5cd0e740d1827ab884ba71030d8c775fc3f2c56 | 169 | class AddImageUrlColumn < ActiveRecord::Migration[5.0]
def change
add_column :recipes, :image_url, :string, :default => 'http://i.imgur.com/VDI7NXx.jpg'
end
end
| 28.166667 | 90 | 0.721893 |
6225d02092b266f7c7351ca2d9d626c7b18d39ba | 143 | require 'spec_helper'
describe "v1/default/empty.rabl" do
it "renders empty JSON array" do
render
rendered.should == "[]"
end
end
| 15.888889 | 35 | 0.678322 |
18438e24adc606ad2d97b474ad24dd5bb7c0ab94 | 1,230 | require 'test_helper'
class PipelineMembersControllerTest < ActionDispatch::IntegrationTest
setup do
@pipeline_member = pipeline_members(:one)
end
test "should get index" do
get pipeline_members_url
assert_response :success
end
test "should get new" do
get new_pipeline_member_url
assert_response :success
end
test "should create pipeline_member" do
assert_difference('PipelineMember.count') do
post pipeline_members_url, params: { pipeline_member: { } }
end
assert_redirected_to pipeline_member_url(PipelineMember.last)
end
test "should show pipeline_member" do
get pipeline_member_url(@pipeline_member)
assert_response :success
end
test "should get edit" do
get edit_pipeline_member_url(@pipeline_member)
assert_response :success
end
test "should update pipeline_member" do
patch pipeline_member_url(@pipeline_member), params: { pipeline_member: { } }
assert_redirected_to pipeline_member_url(@pipeline_member)
end
test "should destroy pipeline_member" do
assert_difference('PipelineMember.count', -1) do
delete pipeline_member_url(@pipeline_member)
end
assert_redirected_to pipeline_members_url
end
end
| 25.102041 | 82 | 0.757724 |
abaa94c807804ea259ebf094ca34b03a6f1f8901 | 645 | require 'aasm/version'
require 'aasm/errors'
require 'aasm/configuration'
require 'aasm/base'
require 'aasm/dsl_helper'
require 'aasm/instance_base'
require 'aasm/core/transition'
require 'aasm/core/event'
require 'aasm/core/state'
require 'aasm/core/invoker'
require 'aasm/core/invokers/base_invoker'
require 'aasm/core/invokers/class_invoker'
require 'aasm/core/invokers/literal_invoker'
require 'aasm/core/invokers/proc_invoker'
require 'aasm/localizer'
require 'aasm/state_machine_store'
require 'aasm/state_machine'
require 'aasm/persistence'
require 'aasm/persistence/base'
require 'aasm/persistence/plain_persistence'
require 'aasm/aasm'
| 29.318182 | 44 | 0.815504 |
5d1f38547410fd4c87755ccbfe9f15412fa801f9 | 196 | class AddUserToAttendances < ActiveRecord::Migration[5.2]
def change
add_reference :attendances, :user, foreign_key: true
add_reference :attendances, :event, foreign_key: true
end
end
| 28 | 57 | 0.765306 |
ed957dc704fcf1355ad8128009298fbb78d76e31 | 115 | require_relative 'formwandler/version'
require_relative 'formwandler/form'
require_relative 'formwandler/railtie'
| 23 | 38 | 0.86087 |
bf1875e8dfbb533d95337eca0c0d554e7b377d97 | 760 | require 'sinatra'
require 'csv'
# I want to cache results
before do
cache_control :public, max_age: 60
end
# I am making a transformation
# on the raw data set because
# I don't know my data structures up front
def chart_data
# This is a a reference to the file so that it can be read
@data = File.new("./public/data/clean_train.csv", "r")
# Initializes a CSV representation for Ruby
@csv = CSV.new(@data, :headers => true, :converters => :all)
# Parses the CSV to the @accounts_to_create array as JSON object
@csv.to_a.map {|row| ret = row.to_hash; ret.delete(ret.keys.first); ret.slice("Overall Qual","Year Built","Overall Cond", "SalePrice","Year Remod/Add", "Yr Sold") }.to_json
end
get '/' do
@chart_data = chart_data
erb :"index"
end | 28.148148 | 174 | 0.705263 |
62579a5f6af5c5eb6708eef264691e92c8b21c1c | 210 | class ReAddHasLeftToGroupMemberships < ActiveRecord::Migration[5.2]
def change
add_column :group_memberships, :has_left, :boolean, default: false
GroupMembership.update_all(has_left: false)
end
end
| 30 | 70 | 0.785714 |
1c6c02ffcc62361ce9fea138e448b30126271ec1 | 515 | class FontexplorerXPro < Cask
version '4.2.1'
sha256 '9fd225ff73804231d094f16bdb435355a3b7557d74ec1aeb9d89e925f0673350'
url "http://fast.fontexplorerx.com/FontExplorerXPro#{version.gsub('.','')}.dmg"
homepage 'http://www.fontexplorerx.com/'
license :unknown
app 'FontExplorer X Pro.app'
zap :delete => [
# todo: is this user-created content?
# '~/FontExplorer X',
'~/Library/Application Support/Linotype/FontExplorer X',
]
end
| 30.294118 | 81 | 0.636893 |
7aaf0060a83bf055ff1781e8212e6524ed8d5a02 | 2,036 | require 'spec_helper'
describe 'validate_ipv6_address' do
describe 'signature validation' do
it { is_expected.not_to eq(nil) }
it { is_expected.to run.with_params().and_raise_error(Puppet::ParseError, /wrong number of arguments/i) }
describe 'valid inputs' do
it { is_expected.to run.with_params('3ffe:0505:0002::') }
it { is_expected.to run.with_params('3ffe:0505:0002::', '3ffe:0505:0002::2') }
it { is_expected.to run.with_params('::1/64') }
it { is_expected.to run.with_params('fe80::a00:27ff:fe94:44d6/64') }
end
describe 'invalid inputs' do
it { is_expected.to run.with_params({}).and_raise_error(Puppet::ParseError, /is not a string/) }
it { is_expected.to run.with_params(true).and_raise_error(Puppet::ParseError, /is not a string/) }
it { is_expected.to run.with_params('one').and_raise_error(Puppet::ParseError, /is not a valid IPv6/) }
it { is_expected.to run.with_params('0.0.0').and_raise_error(Puppet::ParseError, /is not a valid IPv6/) }
it { is_expected.to run.with_params('0.0.0.256').and_raise_error(Puppet::ParseError, /is not a valid IPv6/) }
it { is_expected.to run.with_params('0.0.0.0.0').and_raise_error(Puppet::ParseError, /is not a valid IPv6/) }
it { is_expected.to run.with_params('affe:beef').and_raise_error(Puppet::ParseError, /is not a valid IPv6/) }
it { is_expected.to run.with_params('::1', {}).and_raise_error(Puppet::ParseError, /is not a string/) }
it { is_expected.to run.with_params('::1', true).and_raise_error(Puppet::ParseError, /is not a string/) }
it { is_expected.to run.with_params('::1', 'one').and_raise_error(Puppet::ParseError, /is not a valid IPv6/) }
context 'unless running on ruby 1.8.7', :if => RUBY_VERSION != '1.8.7' do
it { is_expected.to run.with_params(1).and_raise_error(Puppet::ParseError, /is not a string/) }
it { is_expected.to run.with_params('::1', 1).and_raise_error(Puppet::ParseError, /is not a string/) }
end
end
end
end
| 61.69697 | 116 | 0.684185 |
e93e44048ede6506801d5c2c5c08706abd7c835d | 13,301 | require 'zip'
require 'nokogiri'
require 'octokit'
require 'pathname'
def expand(content, path, &get_content)
content.gsub(/include::(\S+)\[\]/) do |line|
if File.dirname(path)=="."
new_fname = $1
else
new_fname = (Pathname.new(path).dirname + Pathname.new($1)).cleanpath.to_s
end
new_content = get_content.call(new_fname)
if new_content
expand(new_content.gsub("\xEF\xBB\xBF".force_encoding("UTF-8"), ''), new_fname) {|c| get_content.call (c)}
else
puts "#{new_fname} could not be resolved for expansion"
""
end
end
end
desc "Reset book html to trigger re-build"
task :reset_book2 => :environment do
Book.where(:edition => 2).each do |book|
book.ebook_html = '0000000000000000000000000000000000000000'
book.save
end
end
desc "Generate book html directly from git repo"
task :remote_genbook2 => :environment do
template_dir = File.join(Rails.root, 'templates')
nav = '<div id="nav"><a href="[[nav-prev]]">prev</a> | <a href="[[nav-next]]">next</a></div>'
@octokit = Octokit::Client.new(:login => ENV['API_USER'], :password => ENV['API_PASS'])
all_books = {
"be" => "progit/progit2-be",
"cs" => "progit-cs/progit2-cs",
"en" => "progit/progit2",
"es" => "progit/progit2-es",
"fr" => "progit/progit2-fr",
"gr" => "progit2-gr/progit2",
"id" => "progit/progit2-id",
"it" => "progit/progit2-it",
"ja" => "progit/progit2-ja",
"ko" => "progit/progit2-ko",
"nl" => "progit/progit2-nl",
"ru" => "progit/progit2-ru",
"sl" => "progit/progit2-sl",
"sr" => "progit/progit2-sr",
"tr" => "progit/progit2-tr",
"uk" => "progit/progit2-uk",
"uz" => "progit/progit2-uz",
"zh" => "progit/progit2-zh",
"zh-tw" => "progit/progit2-zh-tw"
}
if ENV['GENLANG']
books = all_books.select { |code, repo| code == ENV['GENLANG']}
else
books = all_books.select do |code, repo|
repo_head = @octokit.ref(repo, "heads/master").object[:sha]
book = Book.where(:edition => 2, :code => code).first_or_create
repo_head != book.ebook_html
end
end
books.each do |code, repo|
book = Book.where(:edition => 2, :code => code).first_or_create
blob_content = Hash.new do |blobs, sha|
content = Base64.decode64( @octokit.blob(repo, sha, :encoding => 'base64' ).content )
blobs[sha] = content.force_encoding('UTF-8')
end
begin # this section is protected against exceptions
repo_tree = @octokit.tree(repo, "HEAD", :recursive => true)
atlas = JSON.parse(blob_content[repo_tree.tree.detect { |node| node[:path]=="atlas.json"}[:sha]])
chapters = {}
appnumber = 0
chnumber = 0
secnumber = 0
ids = {}
atlas['files'].each_with_index do |filename, index|
if filename =~ /book\/[0-9].*\/1-[^\/]*\.asc/
chnumber += 1
chapters ["ch#{secnumber}"] = ['chapter', chnumber, filename]
secnumber += 1
end
if filename =~ /book\/[A-C].*\.asc/
appnumber += 1
chapters ["ch#{secnumber}"] = ['appendix', appnumber, filename]
secnumber += 1
end
end
chapter_list = atlas['files'].select {|filename| filename =~ /book\/[0-9A-C].*\/1-[^\/]*\.asc/}
initial_content = "include::" + chapter_list.join("[]\n\ninclude::") + "[]\n"
content = expand(initial_content, "root.asc") do |filename|
file_handle = repo_tree.tree.detect { |tree| tree[:path] == filename }
if file_handle
blob_content[file_handle[:sha]]
end
end
asciidoc = Asciidoctor::Document.new(content,template_dir: template_dir, attributes: { 'compat-mode' => true})
html = asciidoc.render
alldoc = Nokogiri::HTML(html)
number = 1
alldoc.xpath("//div[@class='sect1']").each_with_index do |entry, index |
chapter_type, chapter_number, filename = chapters ["ch#{index}"]
chapter = entry
chapter_title = entry.at("h2").content
next if !chapter_title
next if !chapter_number
number = chapter_number
if chapter_type == 'appendix'
number = 100 + chapter_number
end
pretext = entry.search("div[@class=sectionbody]/div/p").to_html
id_xref = chapter.at("h2").attribute('id').to_s
schapter = book.chapters.where(:number => number).first_or_create
schapter.title = chapter_title.to_s
schapter.chapter_type = chapter_type
schapter.chapter_number = chapter_number
schapter.sha = book.ebook_html
schapter.save
# create xref
csection = schapter.sections.where(:number => 1).first_or_create
xref = Xref.where(:book_id => book.id, :name => id_xref).first_or_create
xref.section = csection
xref.save
section = 1
chapter.search("div[@class=sect2]").each do |sec|
id_xref = sec.at("h3").attribute('id').to_s
section_title = sec.at("h3").content
html = sec.inner_html.to_s + nav
html.gsub!('<h3', '<h2')
html.gsub!(/\/h3>/, '/h2>')
html.gsub!('<h4', '<h3')
html.gsub!(/\/h4>/, '/h3>')
html.gsub!('<h5', '<h4')
html.gsub!(/\/h5>/, '/h4>')
if xlink = html.scan(/href=\"1-.*?\.html\#(.*?)\"/)
xlink.each do |link|
xref = link.first
html.gsub!(/href=\"1-.*?\.html\##{xref}\"/, "href=\"ch00/#{xref}\"") rescue nil
end
end
if xlink = html.scan(/href=\"\#(.*?)\"/)
xlink.each do |link|
xref = link.first
html.gsub!(/href=\"\##{xref}\"/, "href=\"ch00/#{xref}\"") rescue nil
end
end
if subsec = html.scan(/<img src="(.*?)"/)
subsec.each do |sub|
sub = sub.first
html.gsub!(/<img src="#{sub}"/, "<img src=\"/book/en/v2/#{sub}\"") rescue nil
end
end
puts "\t\t#{chapter_type} #{chapter_number}.#{section} : #{chapter_title} . #{section_title} - #{html.size}"
csection = schapter.sections.where(:number => section).first_or_create
csection.title = section_title.to_s
csection.html = pretext + html
csection.save
xref = Xref.where(:book_id => book.id, :name => id_xref).first_or_create
xref.section = csection
xref.save
# record all the xrefs
(sec.search(".//*[@id]")).each do |id|
id_xref = id.attribute('id').to_s
xref = Xref.where(:book_id => book.id, :name => id_xref).first_or_create
xref.section = csection
xref.save
end
section += 1
pretext = ""
end
end
book.sections.each do |section|
section.set_slug
section.save
end
repo_head = @octokit.ref(repo, "heads/master").object[:sha]
book.ebook_html = repo_head
book.save
rescue Exception => msg
puts msg
end
end
end
desc "Generate the book html for the sites (by downloading from atlas)"
task :genbook2 => :environment do
if ENV['GENLANG']
books = Book.where(:edition => 2, :code => ENV['GENLANG'])
else
books = Book.where(:edition => 2, :processed => false)
end
nav = '<div id="nav"><a href="[[nav-prev]]">prev</a> | <a href="[[nav-next]]">next</a></div>'
books.each do |book|
html_file = download(book.ebook_html) # download processed html ebook
Zip::File.open(html_file) do |zip_file|
# Handle entries one by one
max_chapter = 0
chapters = {}
appnumber = 0
chnumber = 0
ids = {}
toc = JSON.parse(zip_file.find_entry("build.json").get_input_stream.read)
navi = nil
if toc['navigations']
navi = toc['navigations']['navigation']
elsif toc['navigation']
navi = toc['navigation']['navigation']
end
navi.each_with_index do |chthing, index|
if chthing['type'] == 'appendix'
appnumber += 1
chapters["xapp#{index}"] = ['appendix', appnumber, chthing['href'], chthing['label']]
end
if chthing['type'] == 'chapter'
chnumber += 1
chapters["ch#{index}"] = ['chapter', chnumber, chthing['href'], chthing['label']]
end
chthing['children'].each do |child|
ids[child['id']] = child['label']
end
end
# sort and create the numbers in order
number = 0
chapters.sort.each_with_index do |entry, index|
p entry
chapter_type, chapter_number, file, title = entry[1]
p file
content = zip_file.find_entry(file).get_input_stream.read
doc = Nokogiri::HTML(content)
chapter = doc.at("section[@data-type=#{chapter_type}]")
chapter_title = title
next if !chapter_title
next if !chapter_number
puts chapter_title
puts chapter_number
number = chapter_number
if chapter_type == 'appendix'
number = 100 + chapter_number
end
id_xref = chapter.attribute('id').to_s
pretext = "<a id=\"#{id_xref}\"></a>"
pretext += doc.search("section[@data-type=#{chapter_type}] > p").to_html
schapter = book.chapters.where(:number => number).first_or_create
schapter.title = chapter_title.to_s
schapter.chapter_type = chapter_type
schapter.chapter_number = chapter_number
schapter.sha = book.ebook_html
schapter.save
# create xref
csection = schapter.sections.where(:number => 1).first_or_create
xref = Xref.where(:book_id => book.id, :name => id_xref).first_or_create
xref.section = csection
xref.save
section = 1
chapter.search("section[@data-type=sect1]").each do |sec|
id_xref = sec.attribute('id').to_s
section_title = ids[id_xref]
pretext += "<a id=\"#{id_xref}\"></a>"
html = pretext + sec.inner_html.to_s + nav
html.gsub!('<h3', '<h4')
html.gsub!(/\/h3>/, '/h4>')
html.gsub!('<h2', '<h3')
html.gsub!(/\/h2>/, '/h3>')
html.gsub!('<h1', '<h2')
html.gsub!(/\/h1>/, '/h2>')
if xlink = html.scan(/href=\"1-.*?\.html\#(.*?)\"/)
xlink.each do |link|
xref = link.first
html.gsub!(/href=\"1-.*?\.html\##{xref}\"/, "href=\"ch00/#{xref}\"") rescue nil
end
end
if xlink = html.scan(/href=\"\#(.*?)\"/)
xlink.each do |link|
xref = link.first
html.gsub!(/href=\"\##{xref}\"/, "href=\"ch00/#{xref}\"") rescue nil
end
end
html.gsub!(%r{&(gt|lt|amp);}, '&\1;')
html.gsub!(%r{&</code>(<code class="n">)?(gt|lt|amp)(</code>)?<code class=".">;}, '&\2;')
if subsec = html.scan(/<h3>(.*?)<\/h3>/)
subsec.each do |sub|
sub = sub.first
id = sub.gsub(' ', '-')
html.gsub!(/<h3>#{sub}<\/h3>/, "<h3 id=\"#{id}\"><a href=\"##{id}\">#{sub}</a></h3>") rescue nil
end
end
if subsec = html.scan(/<img src="(.*?)"/)
subsec.each do |sub|
sub = sub.first
html.gsub!(/<img src="#{sub}"/, "<img src=\"/book/en/v2/#{sub}\"") rescue nil
end
end
puts "\t\t#{chapter_type} #{chapter_number}.#{section} : #{chapter_title} . #{section_title} - #{html.size}"
csection = schapter.sections.where(:number => section).first_or_create
csection.title = section_title.to_s
csection.html = html
csection.save
xref = Xref.where(:book_id => book.id, :name => id_xref).first_or_create
xref.section = csection
xref.save
# record all the xrefs
(sec.search("section[@id]")+sec.search("figure[@id]")+sec.search("table[@id]")).each do |id|
id_xref = id.attribute('id').to_s
if id_xref[0,3] != 'idp'
xref = Xref.where(:book_id => book.id, :name => id_xref).first_or_create
xref.section = csection
xref.save
end
end
section += 1
pretext = ""
end # loop through sections
#extra = schapter.sections.where("number >= #{section}")
#extra.delete_all
end # if it's a chapter
#extra = book.chapters.where("number > #{number}")
#extra.delete_all
end
book.processed = true
book.save
book.sections.each do |section|
section.set_slug
section.save
end
end
end
def self.download(url)
puts "downloading #{url}"
#return "/Users/schacon/github/progit/gitscm2/ugh/progit-en.661.zip" # for testing
file = File.new("#{Rails.root}/tmp/download" + Time.now.to_i.to_s + Random.new.rand(100).to_s, 'wb')
begin
uri = URI.parse(url)
Net::HTTP.start(uri.host,uri.port, :use_ssl => uri.scheme == 'https') do |http|
http.request_get(uri.path) do |resp|
resp.read_body do |segment|
file.write(segment)
end
end
end
puts "Done."
ensure
file.close
end
file.path
end
| 32.283981 | 118 | 0.553643 |
e82368192279cc7e975e322b88ac15fdf0ed694b | 5,070 | require 'puppet/util/network_device'
require 'puppet/util/network_device/sorter'
module Puppet::Util::NetworkDevice::Dsl
def register_param(params, klass = nil, &block)
# Make it so that we can register multiple Params at the same time
# and assign every Param an index number that must match the Regex
klass ||= param_class
@params ||= {}
[params].flatten.each_with_index do |param, idx|
@params[param] = klass.new(param, transport, facts, idx, &block)
end
end
def register_scoped(params, scope_match, klass = nil, &block)
int_name = name
register_param(params, klass) do
raise "no name set" if name.nil?
scope scope_match
scope_name int_name
# Pass the Block to a Helper Method so we are in the right Scope
# when evaluating the block
evaluate &block
end
end
def params_to_hash
@params.inject({}) {|res, data|
unless respond_to?(:skip_params_to_hash) && skip_params_to_hash.include?(data[0])
unless data[1].value.nil? || data[1].value.to_s.empty?
if data[1].value.is_a?(Hash)
res.merge!(data[1].value)
else
res[data[0]] = data[1].value
end
end
end
res
}
end
def register_module_after(param, mod, path_addition = "", &block)
# Register a new Module after the required Fact has been evaluated
# Pass a Block that must evaluate to true or false to make sure we dont
# include Modules by accident
@after_hooks ||= {}
@after_hooks[param] ||= []
@after_hooks[param] << {:mod => mod, :path_addition => path_addition, :block => block}
end
def register_new_module(mod, path_addition = "")
@included_modules ||= []
unless @included_modules.include?(mod)
Puppet::Util::Autoload.new(self, File.join(mod_path_base, path_addition), :wrap => false).load(mod)
if path_addition.empty?
mod_const_base.const_get(mod.to_s.capitalize).register(self)
@included_modules << mod
else
mod_const_base.const_get(path_addition.to_s.capitalize).const_get(mod.to_s.capitalize).register(self)
@included_modules << mod
end
end
end
def evaluate_new_params
Puppet::Util::NetworkDevice::Sorter.new(@params).tsort.each do |param|
#Skip if the param has already been evaluated
next if param.evaluated
if param.cmd != false
# Let the Transport Cache the Command for us since we are only dealing here with 'show' type commands
out = @transport.command(param.cmd, :cache => true, :noop => false)
# This is here for the Specs
# FIXME
if out.nil?
param.evaluated = true
next
end
param.parse(out)
elsif param.match_param.is_a? Array
param.parse([param.match_param].flatten.collect{|parameter|@params[parameter].value})
else
param.parse(@params[param.match_param].value)
end
@after_hooks ||= {}
if @after_hooks[param.name]
@after_hooks[param.name].each do |mod|
register_new_module(mod[:mod], mod[:path_addition]) if mod[:block].call
end
end
end
evaluate_new_params unless @params.each_value.select {|param| param.evaluated == false}.empty?
end
def retrieve
register_new_module(:base)
evaluate_new_params
params_to_hash
end
# register a simple param using the specified regexp and commands
def register_simple(param, match_re, fetch_cmd, cmd)
register_param param do
match match_re
cmd fetch_cmd
add do |transport, value|
transport.command("#{cmd} #{value}")
end
remove do |transport, old_value|
transport.command("no #{cmd} #{old_value}")
end
end
end
# register a model based param
def register_model(param, klass, match_re, fetch_cmd)
register_param param, Puppet::Util::NetworkDevice::Dell_ftos::Model::ModelValue do
model klass
match match_re
cmd fetch_cmd
end
end
# register a simple yes/no param. the regexp must match if the param is present
def register_bool(param, match_re, fetch_cmd, cmd)
register_param param do
match do |txt|
if !!txt.match(match_re)
:present
else
:absent
end
end
cmd fetch_cmd
add do |transport, _|
transport.command(cmd)
end
remove do |transport, _|
transport.command("no #{cmd}")
end
end
end
# register a simple array-valued param
# transform the array using a block if necessary
def register_array(param, match_re, fetch_cmd, cmd, &block)
register_param param do
match do |txt|
result = txt.scan(match_re).flatten
if block_given?
yield result
else
result
end
end
cmd fetch_cmd
add do |transport, value|
transport.command("#{cmd} #{value}")
end
remove do |transport, old_value|
transport.command("no #{cmd} #{old_value}")
end
end
end
end
| 30.727273 | 109 | 0.641617 |
87c62de3f2992469c919749aea1ce6ecb4f63774 | 620 |
Pod::Spec.new do |spec|
spec.name = "XHMainProject-User"
spec.version = "0.0.1"
spec.summary = "XHMainProject"
spec.description = "XHMainProject第一个"
spec.homepage = "https://github.com/GSmallSea/XHMainProject-User.git"
spec.license = { :type => "MIT", :file => "FILE_LICENSE" }
spec.author = { "GSmallSea" => "[email protected]" }
spec.source = { :git => "https://github.com/GSmallSea/XHMainProject-User.git", :tag => "#{spec.version}" }
spec.source_files = "Pod/Classes", "Pod/Classes/**/*.{h,m}"
spec.exclude_files = "Classes/Exclude"
end
| 31 | 114 | 0.604839 |
01739498a22a8285eeb4eef0de34b3f766e6fdd7 | 2,239 | module GOVUKDesignSystemFormBuilder
module Traits
module Input
def initialize(builder, object_name, attribute_name, hint_text:, label:, width:, **extra_args, &block)
super(builder, object_name, attribute_name, &block)
@width = width
@extra_args = extra_args
@label = label
@hint_text = hint_text
end
def html
Containers::FormGroup.new(@builder, @object_name, @attribute_name).html do
safe_join(
[
label_element.html,
hint_element.html,
supplemental_content.html,
error_element.html,
@builder.send(
builder_method,
@attribute_name,
id: field_id(link_errors: true),
class: input_classes,
aria: {
describedby: described_by(
hint_id,
error_id,
supplemental_id
)
},
**@extra_args
)
]
)
end
end
private
def input_classes
%w(govuk-input).push(width_classes, error_classes).compact
end
def error_classes
'govuk-input--error' if has_errors?
end
def width_classes
return if @width.blank?
case @width
when nil then nil
# fixed (character) widths
when 20 then 'govuk-input--width-20'
when 10 then 'govuk-input--width-10'
when 5 then 'govuk-input--width-5'
when 4 then 'govuk-input--width-4'
when 3 then 'govuk-input--width-3'
when 2 then 'govuk-input--width-2'
# fluid widths
when 'full' then 'govuk-!-width-full'
when 'three-quarters' then 'govuk-!-width-three-quarters'
when 'two-thirds' then 'govuk-!-width-two-thirds'
when 'one-half' then 'govuk-!-width-one-half'
when 'one-third' then 'govuk-!-width-one-third'
when 'one-quarter' then 'govuk-!-width-one-quarter'
else fail(ArgumentError, "invalid width '#{@width}'")
end
end
end
end
end
| 28.705128 | 108 | 0.522108 |
389a7e0217ee4322520b1e0b91ca751c9ac54d2e | 197 | require 'active_support/concern'
module ThecoreUiRailsAdminUsedToken
extend ActiveSupport::Concern
included do
rails_admin do
visible false
end
end
end | 17.909091 | 35 | 0.670051 |
1d52b4ba36aade565631276d964891e37cc91aa3 | 3,127 | =begin
Copyright (c) 2009-2012 Christian Nau
Permission is hereby granted, free of charge, to any person obtaining
a copy of this software and associated documentation files (the
"Software"), to deal in the Software without restriction, including
without limitation the rights to use, copy, modify, merge, publish,
distribute, sublicense, and/or sell copies of the Software, and to
permit persons to whom the Software is furnished to do so, subject to
the following conditions:
The above copyright notice and this permission notice shall be
included in all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
=end
require_relative '../spec_helper'
Given /^a mock class setup$/ do
obj_hash = {:game_object_id => 'test_class_1',
:super => 'BasicGameObject',
:mixins => 'Logging',
:properties => 'game_object_id,foo,bar,foo_text,foo_obj',
:foo_bar => 'self.foo + self.bar',
:foo_log => 'log.info "some info"'}#,
#:foo_m(param) => 'log.info param'}
# setup mock game object to prevent database hit
GameObjects.expects(:get).with('test_class_1').returns(obj_hash)
end
When /^I load test class object$/ do
@obj_c = GameObjectLoader.load_object 'test_class_1'
@obj_c.should_not be_nil
end
Then /^I verify class structure$/ do
@obj_c.superclass.name.should eql 'BasicGameObject'
@obj_c.public_instance_methods.should include :foo
@obj_c.public_instance_methods.should include :bar
@obj_c.public_instance_methods.should include :game_object_id
@obj_c.public_instance_methods.should include :foo_bar
@obj_c.public_instance_methods.should include :foo_log
@obj_c.public_instance_methods.should include :foo_obj
@obj_c.included_modules.should include Logging
end
And /^a mock object setup$/ do
obj_hash = {:game_object_id => 'test_object_1',
:parent => 'test_class_1',
:foo => '1',
:bar => '2',
:foo_text => 'some text',
:foo_obj => '$${BasicGameObject}.new'}
# setup mock game object to prevent database hit
GameObjects.expects(:get).with('test_object_1').returns(obj_hash)
end
And /^I load test object$/ do
@obj = GameObjectLoader.load_object 'test_object_1'
@obj.should_not be_nil
end
Then /^I verify object structure$/ do
@obj.game_object_id.should eql 'test_object_1'
@obj.should be_an_instance_of @obj_c
@obj.foo.should eql 1
@obj.bar.should eql 2
@obj.foo_bar.should eql 3
@obj.foo_text.should eql 'some text'
@obj.foo_obj.should be_an_instance_of BasicGameObject
end | 39.0875 | 75 | 0.707067 |
bb9f72860cfd974327a583cd7a3cf8865d13053f | 338 | module Queries
class MemberByHbxIdQuery
def initialize(dcas_no)
@dcas_no = dcas_no
end
def execute
person = Person.unscoped.where("members.hbx_member_id" => @dcas_no).first
return(nil) if person.blank?
person.nil? ? nil : (person.members.detect { |m| m.hbx_member_id == @dcas_no})
end
end
end
| 24.142857 | 84 | 0.662722 |
ed06533bb99244a68daaa13914baf99d760e3a91 | 3,638 | require 'database_cleaner'
# This file is copied to spec/ when you run 'rails generate rspec:install'
require 'spec_helper'
ENV['RAILS_ENV'] ||= 'test'
require File.expand_path('../../config/environment', __FILE__)
# Prevent database truncation if the environment is production
abort("The Rails environment is running in production mode!") if Rails.env.production?
require 'rspec/rails'
# Add additional requires below this line. Rails is not loaded until this point!
# Requires supporting ruby files with custom matchers and macros, etc, in
# spec/support/ and its subdirectories. Files matching `spec/**/*_spec.rb` are
# run as spec files by default. This means that files in spec/support that end
# in _spec.rb will both be required and run as specs, causing the specs to be
# run twice. It is recommended that you do not name files matching this glob to
# end with _spec.rb. You can configure this pattern with the --pattern
# option on the command line or in ~/.rspec, .rspec or `.rspec-local`.
#
# The following line is provided for convenience purposes. It has the downside
# of increasing the boot-up time by auto-requiring all files in the support
# directory. Alternatively, in the individual `*_spec.rb` files, manually
# require only the support files necessary.
#
# Dir[Rails.root.join('spec', 'support', '**', '*.rb')].each { |f| require f }
Dir[Rails.root.join('spec/support/**/*.rb')].each { |f| require f }
# Checks for pending migrations and applies them before tests are run.
# If you are not using ActiveRecord, you can remove these lines.
begin
ActiveRecord::Migration.maintain_test_schema!
rescue ActiveRecord::PendingMigrationError => e
puts e.to_s.strip
exit 1
end
# configure shoulda matchers to use rspec as the test framework and full matcher libraries for rails
Shoulda::Matchers.configure do |config|
config.integrate do |with|
with.test_framework :rspec
with.library :rails
end
end
RSpec.configure do |config|
# Remove this line if you're not using ActiveRecord or ActiveRecord fixtures
config.fixture_path = "#{::Rails.root}/spec/fixtures"
# If you're not using ActiveRecord, or you'd prefer not to run each of your
# examples within a transaction, remove the following line or assign false
# instead of true.
config.use_transactional_fixtures = true
# RSpec Rails can automatically mix in different behaviours to your tests
# based on their file location, for example enabling you to call `get` and
# `post` in specs under `spec/controllers`.
#
# You can disable this behaviour by removing the line below, and instead
# explicitly tag your specs with their type, e.g.:
#
# RSpec.describe UsersController, :type => :controller do
# # ...
# end
#
# The different available types are documented in the features, such as in
# https://relishapp.com/rspec/rspec-rails/docs
config.infer_spec_type_from_file_location!
# Filter lines from Rails gems in backtraces.
config.filter_rails_from_backtrace!
# arbitrary gems may also be filtered via:
# config.filter_gems_from_backtrace("gem name")
# add `FactoryBot` methods
config.include FactoryBot::Syntax::Methods
# Support requests
config.include RequestSpecHelper, type: :request
# start by truncating all the tables but then use the faster transaction strategy the rest of the time.
config.before(:suite) do
DatabaseCleaner.clean_with(:truncation)
DatabaseCleaner.strategy = :transaction
end
# start the transaction strategy as examples are run
config.around(:each) do |example|
DatabaseCleaner.cleaning do
example.run
end
end
end
| 39.543478 | 105 | 0.747114 |
e21f0356699f48e2d9467ff31be6a57aa6f8e76f | 196 | class AddCustodyNumberToDefenceRequest < ActiveRecord::Migration
def change
add_column :defence_requests, :custody_number, :string
add_index :defence_requests, :custody_number
end
end
| 28 | 64 | 0.806122 |
e9c49882d75e3bca685c694f6d27618e6b64511f | 644 | #
# Copyright 2015, Noah Kantrowitz
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
require 'poise_boiler/spec_helper'
require 'poise_application'
| 33.894737 | 74 | 0.771739 |
f8d6426ad94fb5a8b239a1554490afffd887aafd | 785 | PRINT_OPERATION_FORMATTER = '%{operator} %{a} %{b}'.freeze
def log_operation(operator, a, b)
puts format(PRINT_OPERATION_FORMATTER, operator: operator, a: a, b: b)
end
def add(a, b)
log_operation('ADDING', a, b)
return a + b
end
def subtract(a, b)
log_operation('SUBTRACTING', a, b)
return a - b
end
def multiply(a, b)
log_operation('MULTIPLYING', a, b)
return a * b
end
def divide(a, b)
log_operation('DIVIDING', a, b)
return a / b
end
puts "Let's do some math with just functions!"
age = add(30, 5)
height = subtract(78,4)
weight = multiply(30, 2)
iq = divide(200, 2)
puts "Age: #{age}, Height: #{height}, Weight: #{weight}, IQ: #{iq}"
what = add(age, subtract(height, multiply(weight, divide(iq, 2))))
puts "That becomes: #{what}. Can you do it by hand?" | 20.657895 | 72 | 0.66242 |
bbdb1e7dd52493290828e820ba87d4c129e51949 | 2,512 | require 'wukong/encoding'
module Wuclan
module JsonModel
#
# The JSON tweets records come off the wire a bit more heavyweight than we'd like.
#
# A sample JSON file, reformatted for clarity:
#
#
# {
# "id" : 1012519767,
# "created_at" : "Wed Nov 19 07:16:58 +0000 2008",
# // twitter_user_id
# "favorited" : false,
# "truncated" : false,
# "in_reply_to_user_id" : null,
# "in_reply_to_status_id" : null,
# "text" : "[Our lander (RIP) had the best name. The next rover to Mars, @MarsScienceLab, needs a name. A contest for kids: http:\/\/is.gd\/85rQ ]"
# "source" : "web",
# }
#
class JsonTweet < GenericJsonParser
attr_accessor :raw
def initialize raw, twitter_user_id = nil
self.raw = raw; return unless healthy?
if twitter_user_id
raw['twitter_user_id'] = twitter_user_id
elsif raw['user'] && raw['user']['id']
raw['twitter_user_id'] = raw['user']['id']
end
self.fix_raw!
end
def healthy?() raw && raw.is_a?(Hash) end
#
#
# Make the data easier for batch flat-record processing
#
def fix_raw!
raw['id'] = ModelCommon.zeropad_id( raw['id'])
raw['created_at'] = ModelCommon.flatten_date(raw['created_at'])
raw['favorited'] = ModelCommon.unbooleanize(raw['favorited'])
raw['truncated'] = ModelCommon.unbooleanize(raw['truncated'])
raw['twitter_user_id'] = ModelCommon.zeropad_id(raw['twitter_user_id'] )
raw['in_reply_to_user_id'] = ModelCommon.zeropad_id(raw['in_reply_to_user_id']) unless raw['in_reply_to_user_id'].blank? || (raw['in_reply_to_user_id'].to_i == 0)
raw['in_reply_to_status_id'] = ModelCommon.zeropad_id(raw['in_reply_to_status_id']) unless raw['in_reply_to_status_id'].blank? || (raw['in_reply_to_status_id'].to_i == 0)
Wukong.encode_components raw, 'text'
end
def generate_tweet
return unless healthy?
Tweet.from_hash(raw)
end
#
# produce the included last tweet
#
def generate_user_partial
raw_user = raw['user'] or return
JsonTwitterUser.new(raw_user, raw['created_at']).generate_user_partial
end
end
end
end
| 39.25 | 179 | 0.566083 |
ffec8ed77f6838b1245bd99a84e41cb63b0845d4 | 2,123 | # frozen_string_literal: true
# Helper method to add files to the work
#
# @return [void]
def add_files
click_link "Files" # switch tab
expect(page).to have_content "Add files"
expect(page).to have_content "Add folder"
within("span#addfiles") do
attach_file("files[]", Rails.root.join("spec", "fixtures", "hyrax", "image.jp2"), visible: false)
attach_file("files[]", Rails.root.join("spec", "fixtures", "hyrax", "jp2_fits.xml"), visible: false)
end
end
# Helper method to set the work form visibility
#
# @param visibility [Symbol] the work visibility, :open, :restricted, :lease, :authenticated, :embargo
# @return [void]
# Click the body, then the last li on the page, which will be the last in the visibility options, to help Capybara find it
def add_visibility(visibility = :open)
find("body").click
find_all("li").last.click
choose("#{work_type}_visibility_#{visibility}")
end
# Helper method to check the agreement checkbox
#
# @return [void]
def add_agreement
within(".panel-footer") do
have_selector("#agreement", wait: 5)
find("#agreement").check
ss
end
end
# Helper method to submit the form
#
# @return [void]
def submit
within(".panel-footer") do
have_selector("[name=save_with_files]", wait: 5)
find("[name=save_with_files]").click
ss
end
end
# Get the actual work from the URL param
def work_uuid_from_url
current_uri = URI.parse(page.current_url)
current_uri.path.split("/").last
end
# Helper method to save a screenshot. Screenshots will be saved to spec/internal_test_hyku/tmp/capybara
#
# @return [void]
def ss
page.save_screenshot
end
# Scroll the page by a given amount
#
# @param by [Integer] the amount to scroll by
# @return [void]
def scroll(by = 1000)
page.execute_script "window.scrollBy(0,#{by})"
end
# Concert from zero prefixed to non-zero prefixed
#
# @param value [Array, Hash] the Hash or Array of hashes to be conerted
# @return [Array]
def normalize_date(value)
dates = Array.wrap(value)
dates.map do |date|
date = Date.new(*date.values.map(&:to_i))
[date.year, date.month, date.day].join("-")
end
end
| 25.890244 | 122 | 0.708902 |
26fdeca8a3dd2c227b9974416099df588ffaf83c | 542 | =begin
* Created by PSU Beeminder Capstone Team on 3/12/2017.
* Copyright (c) 2017 PSU Beeminder Capstone Team
* This code is available under the "MIT License".
* Please see the file LICENSE in this distribution for license terms.
=end
PROVIDERS.fetch(:twitter).register_metric :total_followings do |metric|
metric.description = "The number of users this account is following (AKA their “followings”)."
metric.title = "Followings Count"
metric.block = proc do |adapter|
Datapoint.new(value: adapter.fetch_friends)
end
end
| 30.111111 | 96 | 0.750923 |
61218a95add0faf60a8de05944d6155cdd6433c9 | 5,214 | # frozen_string_literal: true
class RegistrationsController < Devise::RegistrationsController
include Recaptcha::Verify
include AcceptsPendingInvitations
include RecaptchaHelper
include InvisibleCaptchaOnSignup
layout 'devise'
prepend_before_action :check_captcha, only: :create
before_action :ensure_destroy_prerequisites_met, only: [:destroy]
before_action :load_recaptcha, only: :new
before_action :set_invite_params, only: :new
feature_category :authentication_and_authorization
def new
@resource = build_resource
end
def create
set_user_state
accept_pending_invitations
super do |new_user|
persist_accepted_terms_if_required(new_user)
set_role_required(new_user)
if pending_approval?
NotificationService.new.new_instance_access_request(new_user)
end
after_request_hook(new_user)
yield new_user if block_given?
end
# Devise sets a flash message on both successful & failed signups,
# but we only want to show a message if the resource is blocked by a pending approval.
flash[:notice] = nil unless resource.blocked_pending_approval?
rescue Gitlab::Access::AccessDeniedError
redirect_to(new_user_session_path)
end
def destroy
if destroy_confirmation_valid?
current_user.delete_async(deleted_by: current_user)
session.try(:destroy)
redirect_to new_user_session_path, status: :see_other, notice: s_('Profiles|Account scheduled for removal.')
else
redirect_to profile_account_path, status: :see_other, alert: destroy_confirmation_failure_message
end
end
protected
def persist_accepted_terms_if_required(new_user)
return unless new_user.persisted?
return unless Gitlab::CurrentSettings.current_application_settings.enforce_terms?
terms = ApplicationSetting::Term.latest
Users::RespondToTermsService.new(new_user, terms).execute(accepted: true)
end
def set_role_required(new_user)
new_user.set_role_required! if new_user.persisted?
end
def destroy_confirmation_valid?
if current_user.confirm_deletion_with_password?
current_user.valid_password?(params[:password])
else
current_user.username == params[:username]
end
end
def destroy_confirmation_failure_message
if current_user.confirm_deletion_with_password?
s_('Profiles|Invalid password')
else
s_('Profiles|Invalid username')
end
end
def build_resource(hash = nil)
super
end
def after_request_hook(user)
# overridden by EE module
end
def after_sign_up_path_for(user)
Gitlab::AppLogger.info(user_created_message(confirmed: user.confirmed?))
users_sign_up_welcome_path
end
def after_inactive_sign_up_path_for(resource)
Gitlab::AppLogger.info(user_created_message)
return new_user_session_path(anchor: 'login-pane') if resource.blocked_pending_approval?
Feature.enabled?(:soft_email_confirmation) ? dashboard_projects_path : users_almost_there_path
end
private
def ensure_destroy_prerequisites_met
if current_user.solo_owned_groups.present?
redirect_to profile_account_path,
status: :see_other,
alert: s_('Profiles|You must transfer ownership or delete groups you are an owner of before you can delete your account')
end
end
def user_created_message(confirmed: false)
"User Created: username=#{resource.username} email=#{resource.email} ip=#{request.remote_ip} confirmed:#{confirmed}"
end
def ensure_correct_params!
# To avoid duplicate form fields on the login page, the registration form
# names fields using `new_user`, but Devise still wants the params in
# `user`.
if params["new_#{resource_name}"].present? && params[resource_name].blank?
params[resource_name] = params.delete(:"new_#{resource_name}")
end
end
def check_captcha
ensure_correct_params!
return unless show_recaptcha_sign_up?
return unless Gitlab::Recaptcha.load_configurations!
return if verify_recaptcha
flash[:alert] = _('There was an error with the reCAPTCHA. Please solve the reCAPTCHA again.')
flash.delete :recaptcha_error
render action: 'new'
end
def pending_approval?
return false unless Gitlab::CurrentSettings.require_admin_approval_after_user_signup
resource.persisted? && resource.blocked_pending_approval?
end
def sign_up_params
params.require(:user).permit(:username, :email, :name, :first_name, :last_name, :password)
end
def resource_name
:user
end
def resource
@resource ||= Users::BuildService.new(current_user, sign_up_params).execute
end
def devise_mapping
@devise_mapping ||= Devise.mappings[:user]
end
def load_recaptcha
Gitlab::Recaptcha.load_configurations!
end
def set_user_state
return unless set_blocked_pending_approval?
resource.state = User::BLOCKED_PENDING_APPROVAL_STATE
end
def set_blocked_pending_approval?
Gitlab::CurrentSettings.require_admin_approval_after_user_signup
end
def set_invite_params
@invite_email = ActionController::Base.helpers.sanitize(params[:invite_email])
end
end
RegistrationsController.prepend_if_ee('EE::RegistrationsController')
| 28.183784 | 129 | 0.761795 |
b92847fbef2ead10a25369403c91c1507b96b109 | 796 | require 'utils/rust'
module SortingAlgorithms
##
# Implementation of Quicksort, but in Rust!
# Uses FFI to cross the boundaries
# @return [Array] the sorted array
module Rustsort
include Utils::Rust
##
# Attach a function that hooks into the Rust library
attach_function :rust_sort, # Name
:rustsort, # Rust function
[:pointer, :size_t], # Args
RustArray.by_value # Return
##
# Dup the array, create a pointer containing the array data
# then pass the pointer over to Rust to let the magic happen
def rustsort
arr = dup
buf = FFI::MemoryPointer.new :int32, arr.size
buf.write_array_of_int32(arr)
rust_sort(buf, arr.size).to_a
end
end
end
| 29.481481 | 64 | 0.613065 |
ff821ccb320be68708e6d1d180f6d0b19c31d0da | 6,509 | require File.join(File.dirname(__FILE__), '..', 'vcsrepo')
Puppet::Type.type(:vcsrepo).provide(:p4, parent: Puppet::Provider::Vcsrepo) do
desc 'Supports Perforce depots'
has_features :filesystem_types, :reference_tracking, :p4config
def create
check_force
# create or update client
create_client(client_name)
# if source provided, sync client
source = @resource.value(:source)
if source
revision = @resource.value(:revision)
sync_client(source, revision)
end
update_owner
end
def working_copy_exists?
# Check if the server is there, or raise error
p4(['info'], marshal: false)
# Check if workspace is setup
args = ['where']
args.push(@resource.value(:path) + '/...')
hash = p4(args, raise: false)
(hash['code'] != 'error')
end
def exists?
working_copy_exists?
end
def destroy
args = ['client']
args.push('-d', '-f')
args.push(client_name)
p4(args)
FileUtils.rm_rf(@resource.value(:path))
end
def latest?
rev = revision
if rev
(rev >= latest)
else
true
end
end
def latest
args = ['changes']
args.push('-m1', @resource.value(:source))
hash = p4(args)
hash['change'].to_i
end
def revision
args = ['cstat']
args.push(@resource.value(:source))
hash = p4(args, marshal: false)
hash = marshal_cstat(hash)
revision = 0
if hash && hash['code'] != 'error'
hash['data'].each do |c|
if c['status'] == 'have'
change = c['change'].to_i
revision = change if change > revision
end
end
end
revision
end
def revision=(desired)
sync_client(@resource.value(:source), desired)
update_owner
end
def source
args = ['where']
args.push(@resource.value(:path) + '/...')
hash = p4(args, raise: false)
hash['depotFile']
end
def source=(_desired)
create # recreate
end
private
def update_owner
set_ownership if @resource.value(:owner) || @resource.value(:group)
end
# Sync the client workspace files to head or specified revision.
# Params:
# +source+:: Depot path to sync
# +revision+:: Perforce change list to sync to (optional)
def sync_client(source, revision)
Puppet.debug "Syncing: #{source}"
args = ['sync']
if revision
args.push(source + "@#{revision}")
else
args.push(source)
end
p4(args)
end
# Returns the name of the Perforce client workspace
def client_name
p4config = @resource.value(:p4config)
# default (generated) client name
path = @resource.value(:path)
host = Facter.value('hostname')
default = 'puppet-' + Digest::MD5.hexdigest(path + host)
# check config for client name
set_client = nil
if p4config && File.file?(p4config)
open(p4config) do |f|
m = f.grep(%r{^P4CLIENT=}).pop
p = %r{^P4CLIENT=(.*)$}
set_client = p.match(m)[1] if m
end
end
set_client || ENV['P4CLIENT'] || default
end
# Create (or update) a client workspace spec.
# If a client name is not provided then a hash based on the path is used.
# Params:
# +client+:: Name of client workspace
# +path+:: The Root location of the Perforce client workspace
def create_client(client)
Puppet.debug "Creating client: #{client}"
# fetch client spec
hash = parse_client(client)
hash['Root'] = @resource.value(:path)
hash['Description'] = 'Generated by Puppet VCSrepo'
# check is source is a Stream
source = @resource.value(:source)
if source
parts = source.split(%r{/})
if parts && parts.length >= 4
source = '//' + parts[2] + '/' + parts[3]
streams = p4(['streams', source], raise: false)
if streams['code'] == 'stat'
hash['Stream'] = streams['Stream']
notice 'Streams' + streams['Stream'].inspect
end
end
end
# save client spec
save_client(hash)
end
# Fetches a client workspace spec from Perforce and returns a hash map representation.
# Params:
# +client+:: name of the client workspace
def parse_client(client)
args = ['client']
args.push('-o', client)
hash = p4(args)
hash
end
# Saves the client workspace spec from the given hash
# Params:
# +hash+:: hash map of client spec
def save_client(hash)
spec = ''
view = "\nView:\n"
hash.keys.sort.each do |k|
v = hash[k]
next if k == 'code'
if k.to_s =~ %r{View}
view += "\t#{v}\n"
else
spec += "#{k}: #{v}\n"
end
end
spec += view
args = ['client']
args.push('-i')
p4(args, input: spec, marshal: false)
end
# Sets Perforce Configuration environment.
# P4CLIENT generated, but overwitten if defined in config.
def config
p4config = @resource.value(:p4config)
cfg = {}
cfg.store 'P4CONFIG', p4config if p4config
cfg.store 'P4CLIENT', client_name
cfg
end
def p4(args, options = {})
# Merge custom options with defaults
opts = {
raise: true, # Raise errors
marshal: true, # Marshal output
}.merge(options)
cmd = ['p4']
cmd.push '-R' if opts[:marshal]
cmd.push args
cmd_str = cmd.respond_to?(:join) ? cmd.join(' ') : cmd
Puppet.debug "environment: #{config}"
Puppet.debug "command: #{cmd_str}"
hash = {}
Open3.popen3(config, cmd_str) do |i, o, e, t|
# Send input stream if provided
if opts[:input]
Puppet.debug "input:\n" + opts[:input]
i.write opts[:input]
i.close
end
if opts[:marshal]
hash = Marshal.dump(o)
else
hash['data'] = o.read
end
# Raise errors, Perforce or Exec
if opts[:raise] && !e.eof && t.value != 0
raise Puppet::Error, "\nP4: #{e.read}"
end
if opts[:raise] && hash['code'] == 'error' && t.value != 0
raise Puppet::Error, "\nP4: #{hash['data']}"
end
end
Puppet.debug "hash: #{hash}\n"
hash
end
# helper method as cstat does not Marshal
def marshal_cstat(hash)
data = hash['data']
code = 'error'
list = []
change = {}
data.each_line do |l|
p = %r{^\.\.\. (.*) (.*)$}
m = p.match(l)
next unless m
change[m[1]] = m[2]
next unless m[1] == 'status'
code = 'stat'
list.push change
change = {}
end
hash = {}
hash.store 'code', code
hash.store 'data', list
hash
end
end
| 22.838596 | 88 | 0.587494 |
1db4a5e923cdd5870477baea56a514af37a786c4 | 270 | rule "FC095", "Cookbook uses deprecated cloud_v2 ohai plugin data" do
tags %w{deprecated chef14}
recipe do |ast|
ast.xpath('//aref[vcall/ident/@value="node"]
/args_add_block/args_add/string_literal/string_add/tstring_content[@value="cloud_v2"]')
end
end
| 33.75 | 93 | 0.733333 |
113502b4e3c2025c9a1c1f0f02ad8c7e905d15bc | 3,481 | # frozen_string_literal: true
module Gitlab
module ImportExport
module Project
class TreeRestorer
include Gitlab::Utils::StrongMemoize
attr_reader :user
attr_reader :shared
attr_reader :project
def initialize(user:, shared:, project:)
@user = user
@shared = shared
@project = project
end
def restore
unless relation_reader
raise Gitlab::ImportExport::Error, 'invalid import format'
end
@project_attributes = relation_reader.consume_attributes(importable_path)
@project_members = relation_reader.consume_relation(importable_path, 'project_members')
.map(&:first)
# ensure users are mapped before tree restoration
# so that even if there is no content to associate
# users with, they are still added to the project
members_mapper.map
if relation_tree_restorer.restore
import_failure_service.with_retry(action: 'set_latest_merge_request_diff_ids!') do
@project.merge_requests.set_latest_merge_request_diff_ids!
end
true
else
false
end
rescue StandardError => e
@shared.error(e)
false
end
private
def relation_reader
strong_memoize(:relation_reader) do
[ndjson_relation_reader, legacy_relation_reader]
.compact.find(&:exist?)
end
end
def ndjson_relation_reader
return unless Feature.enabled?(:project_import_ndjson, project.namespace, default_enabled: true)
ImportExport::JSON::NdjsonReader.new(
File.join(shared.export_path, 'tree')
)
end
def legacy_relation_reader
ImportExport::JSON::LegacyReader::File.new(
File.join(shared.export_path, 'project.json'),
relation_names: reader.project_relation_names,
allowed_path: importable_path
)
end
def relation_tree_restorer
@relation_tree_restorer ||= relation_tree_restorer_class.new(
user: @user,
shared: @shared,
relation_reader: relation_reader,
object_builder: object_builder,
members_mapper: members_mapper,
relation_factory: relation_factory,
reader: reader,
importable: @project,
importable_attributes: @project_attributes,
importable_path: importable_path
)
end
def relation_tree_restorer_class
RelationTreeRestorer
end
def members_mapper
@members_mapper ||= Gitlab::ImportExport::MembersMapper.new(exported_members: @project_members,
user: @user,
importable: @project)
end
def object_builder
Project::ObjectBuilder
end
def relation_factory
Project::RelationFactory
end
def reader
@reader ||= Gitlab::ImportExport::Reader.new(shared: @shared)
end
def import_failure_service
@import_failure_service ||= ImportFailureService.new(@project)
end
def importable_path
"project"
end
end
end
end
end
| 29.008333 | 106 | 0.58374 |
bfe711ed513bc909f614e9918a808d4820df6579 | 220 | class ChangeSyncSourceIdOnGpCalendarEvents < ActiveRecord::Migration[4.2]
def up
change_column :gp_calendar_events, :sync_source_id, :string
end
def down
fail ActiveRecord::IrreversibleMigration
end
end
| 22 | 73 | 0.786364 |
28c94455c2a024800e0f6d6d74e32e13dcc7a0b7 | 5,177 | require_relative '../zabbix'
Puppet::Type.type(:zabbix_host).provide(:ruby, parent: Puppet::Provider::Zabbix) do
confine feature: :zabbixapi
def self.instances
proxies = zbx.proxies.all
api_hosts = zbx.query(
method: 'host.get',
params: {
selectParentTemplates: ['host'],
selectInterfaces: %w[interfaceid type main ip port useip],
selectGroups: ['name'],
selectMacros: %w[macro value],
output: %w[host proxy_hostid]
}
)
api_hosts.map do |h|
interface = h['interfaces'].select { |i| i['type'].to_i == 1 && i['main'].to_i == 1 }.first
use_ip = !interface['useip'].to_i.zero?
new(
ensure: :present,
id: h['hostid'].to_i,
name: h['host'],
interfaceid: interface['interfaceid'].to_i,
ipaddress: interface['ip'],
use_ip: use_ip,
port: interface['port'].to_i,
groups: h['groups'].map { |g| g['name'] },
group_create: nil,
templates: h['parentTemplates'].map { |x| x['host'] },
macros: h['macros'].map { |macro| { macro['macro'] => macro['value'] } },
proxy: proxies.select { |_name, id| id == h['proxy_hostid'] }.keys.first
)
end
end
def self.prefetch(resources)
instances.each do |prov|
if (resource = resources[prov.name])
resource.provider = prov
end
end
end
def create
template_ids = get_templateids(@resource[:templates])
templates = transform_to_array_hash('templateid', template_ids)
gids = get_groupids(@resource[:groups], @resource[:group_create])
groups = transform_to_array_hash('groupid', gids)
proxy_hostid = @resource[:proxy].nil? || @resource[:proxy].empty? ? nil : zbx.proxies.get_id(host: @resource[:proxy])
# Now we create the host
zbx.hosts.create(
host: @resource[:hostname],
proxy_hostid: proxy_hostid,
interfaces: [
{
type: 1,
main: 1,
ip: @resource[:ipaddress],
dns: @resource[:hostname],
port: @resource[:port],
useip: @resource[:use_ip] ? 1 : 0
}
],
templates: templates,
groups: groups
)
end
def exists?
@property_hash[:ensure] == :present
end
def destroy
zbx.hosts.delete(zbx.hosts.get_id(host: @resource[:hostname]))
end
#
# Helper methods
#
def get_groupids(group_array, create)
groupids = []
group_array.each do |g|
id = zbx.hostgroups.get_id(name: g)
if id.nil?
raise Puppet::Error, 'The hostgroup (' + g + ') does not exist in zabbix. Please use the correct one or set group_create => true.' unless create
groupids << zbx.hostgroups.create(name: g)
else
groupids << id
end
end
groupids
end
def get_templateids(template_array)
templateids = []
template_array.each do |t|
template_id = zbx.templates.get_id(host: t)
raise Puppet::Error, "The template #{t} does not exist in Zabbix. Please use a correct one." if template_id.nil?
templateids << template_id
end
templateids
end
#
# zabbix_host properties
#
mk_resource_methods
def ipaddress=(string)
zbx.query(
method: 'hostinterface.update',
params: {
interfaceid: @property_hash[:interfaceid],
ip: string
}
)
end
def use_ip=(boolean)
zbx.query(
method: 'hostinterface.update',
params: {
interfaceid: @property_hash[:interfaceid],
useip: boolean ? 1 : 0,
dns: @resource[:hostname]
}
)
end
def port=(int)
zbx.query(
method: 'hostinterface.update',
params: {
interfaceid: @property_hash[:interfaceid],
port: int
}
)
end
def groups=(hostgroups)
gids = get_groupids(hostgroups, @resource[:group_create])
groups = transform_to_array_hash('groupid', gids)
zbx.hosts.create_or_update(
host: @resource[:hostname],
groups: groups
)
end
def templates=(array)
should_template_ids = get_templateids(array)
# Get templates we have to clear. Unlinking only isn't really helpful.
is_template_ids = zbx.query(
method: 'host.get',
params: {
hostids: @property_hash[:id],
selectParentTemplates: ['templateid'],
output: ['host']
}
).first['parentTemplates'].map { |t| t['templateid'].to_i }
templates_clear = is_template_ids - should_template_ids
zbx.query(
method: 'host.update',
params: {
hostid: @property_hash[:id],
templates: transform_to_array_hash('templateid', should_template_ids),
templates_clear: transform_to_array_hash('templateid', templates_clear)
}
)
end
def macros=(array)
macroarray = array.map { |macro| { 'macro' => macro.first[0], 'value' => macro.first[1] } }
zbx.query(
method: 'host.update',
params: {
hostid: @property_hash[:id],
macros: macroarray
}
)
end
def proxy=(string)
zbx.hosts.create_or_update(
host: @resource[:hostname],
proxy_hostid: zbx.proxies.get_id(host: string)
)
end
end
| 26.279188 | 152 | 0.603825 |
08d27079db746a638d4333ff9613eb7cbf503068 | 805 | # frozen_string_literal: true
Rails.application.routes.draw do
get 'attendances/event_schedule'
get '/signup', to: 'users#new'
post '/signup', to: 'users#create'
get '/profile', to: 'users#show'
root 'static_pages#home'
get '/events', to: 'events#show'
get '/create', to: 'events#new'
post '/create', to: 'events#create'
get '/index', to: 'events#index'
resources :events do
member do
post :schedule_event
post :remove_event
end
end
get '/login', to: 'sessions#new'
post '/login', to: 'sessions#create'
delete '/logout', to: 'sessions#destroy'
resources :users
resources :events
resources :attendances , except: %i[create new]
# For details on the DSL available within this file, see http://guides.rubyonrails.org/routing.html
end
| 23.676471 | 101 | 0.667081 |
1cacde14a6e4b789e4cb12ea31eff7bb31d93720 | 1,222 | require 'spec_helper'
require 'webmock_helper'
describe Nem::Endpoint::Local::Account do
shared_examples 'call endpoint methods' do
let(:webmock) { true }
let(:node) { Nem::Node.new }
let(:endpoint) { described_class.new(node) }
before { WebMock.disable! unless webmock }
after { WebMock.enable! }
describe '#transfers_incoming' do
subject { endpoint.transfers_incoming(
'68e4f79f886927de698df4f857de2aada41ccca6617e56bb0d61623b35b08cc0'
) }
it { expect(subject).to be_a Array }
end
describe '#transfers_outgoing' do
subject { endpoint.transfers_outgoing(
'68e4f79f886927de698df4f857de2aada41ccca6617e56bb0d61623b35b08cc0'
) }
it { expect(subject).to be_a Array }
end
describe '#transfers_all' do
subject { endpoint.transfers_all(
'68e4f79f886927de698df4f857de2aada41ccca6617e56bb0d61623b35b08cc0'
) }
it { expect(subject).to be_a Array }
end
end
context 'webmock' do
it_behaves_like 'call endpoint methods'
end
if ENV['remote'] == 'enable'
context 'remote node' do
it_behaves_like 'call endpoint methods' do
let(:webmock) { false }
end
end
end
end
| 26 | 74 | 0.679214 |
39920901a21dd20bbe65a1646073c529b2e44cd1 | 952 | cask 'displaycal' do
version '3.2.2.0'
sha256 '9544f33d698747c385cd9d0a6eb69a2ec7e498de33ef9ee2075c8235bdada93c'
# sourceforge.net/dispcalgui was verified as official when first introduced to the cask
url "https://downloads.sourceforge.net/dispcalgui/release/#{version}/DisplayCAL-#{version}.dmg"
appcast 'https://sourceforge.net/projects/dispcalgui/rss?path=/release',
checkpoint: '5417daad12c7f1e2e710524ef6755a30262abc17ade327f620846b0d2bf3ffaf'
name 'DisplayCAL'
homepage 'https://displaycal.net/'
depends_on formula: 'argyll-cms'
suite 'DisplayCAL'
preflight do
# There is no sub-folder in the DMG; the root *is* the folder
FileUtils.mv(staged_path.children, staged_path.join('DisplayCAL').tap(&:mkpath))
end
zap delete: [
'~/Library/Application Support/dispcalGUI',
'~/Library/Logs/dispcalGUI',
'~/Library/Preferences/dispcalGUI',
]
end
| 35.259259 | 97 | 0.710084 |
e9785c5f7d9896ec5a0c7c6388d17d62257c3b23 | 1,101 | require_relative '../lib/responder'
class GithubActionResponder < Responder
keyname :github_action
def define_listening
required_params :workflow_repo, :workflow_name, :command
@event_action = "issue_comment.created"
@event_regex = /\A@#{bot_name} #{command}\.?\s*\z/i
end
def process_message(message)
inputs = params[:inputs] || {}
inputs_from_issue = params[:data_from_issue] || []
mapping = params[:mapping] || {}
ref = params[:ref] || "main"
mapped_parameters = {}
inputs_from_issue.each do |input_from_issue|
mapped_parameters[input_from_issue] = locals[input_from_issue].to_s
end
mapping.each_pair do |k, v|
mapped_parameters[k] = locals[v].to_s
mapped_parameters.delete(v)
end
parameters = {}.merge(inputs, mapped_parameters)
if trigger_workflow(workflow_repo, workflow_name, parameters, ref)
respond(params[:message]) if params[:message]
end
end
def description
params[:description] || "Runs a GitHub workflow"
end
def example_invocation
"@#{bot_name} #{command}"
end
end
| 24.466667 | 73 | 0.687557 |
ffb2557b01d982797c8803b284fdb12a24c76f40 | 1,348 | # frozen_string_literal: true
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# Auto-generated by gapic-generator-ruby. DO NOT EDIT!
gem "google-cloud-core"
require "google/cloud" unless defined? Google::Cloud.new
require "google/cloud/config"
require "googleauth"
# Set the default configuration
Google::Cloud.configure.add_config! :language do |config|
config.add_field! :credentials, nil, match: [String, Hash, Google::Auth::Credentials]
config.add_field! :lib_name, nil, match: String
config.add_field! :lib_version, nil, match: String
config.add_field! :interceptors, nil, match: Array
config.add_field! :timeout, nil, match: Numeric
config.add_field! :metadata, nil, match: Hash
config.add_field! :retry_policy, nil, match: [Hash, Proc]
end
require "google/cloud/language/version"
| 37.444444 | 88 | 0.752967 |
f7b5455d657c8e4b5f4f072001101529a0396ac1 | 42 | module DoViewTool
VERSION = "0.1.0"
end
| 10.5 | 19 | 0.690476 |
333184d80ca50f8578d8d0469b581e9ccdc394c2 | 4,579 | #-- encoding: UTF-8
#-- copyright
# OpenProject is an open source project management software.
# Copyright (C) 2012-2021 the OpenProject GmbH
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License version 3.
#
# OpenProject is a fork of ChiliProject, which is a fork of Redmine. The copyright follows:
# Copyright (C) 2006-2013 Jean-Philippe Lang
# Copyright (C) 2010-2013 the ChiliProject Team
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
#
# See docs/COPYRIGHT.rdoc for more details.
#++
module ::TypesHelper
def types_tabs
[
{
name: 'settings',
partial: 'types/form/settings',
path: edit_type_tab_path(id: @type.id, tab: :settings),
label: 'types.edit.settings'
},
{
name: 'form_configuration',
partial: 'types/form/form_configuration',
path: edit_type_tab_path(id: @type.id, tab: :form_configuration),
label: 'types.edit.form_configuration'
},
{
name: 'projects',
partial: 'types/form/projects',
path: edit_type_tab_path(id: @type.id, tab: :projects),
label: 'types.edit.projects'
}
]
end
def icon_for_type(type)
return unless type
css_class = if type.is_milestone?
'color--milestone-icon'
else
'color--phase-icon'
end
color = if type.color.present?
type.color.hexcode
else
'#CCC'
end
content_tag(:span, ' ',
class: css_class,
style: "background-color: #{color}")
end
##
# Collect active and inactive form configuration groups for editing.
def form_configuration_groups(type)
available = type.work_package_attributes
# First we create a complete list of all attributes.
# Later we will remove those that are members of an attribute group.
# This way attributes that were created after the las group definitions
# will fall back into the inactives group.
inactive = available.clone
active_form = get_active_groups(type, available, inactive)
inactive_form = inactive
.map { |key, attribute| attr_form_map(key, attribute) }
.sort_by { |attr| attr[:translation] }
{
actives: active_form,
inactives: inactive_form
}
end
def active_group_attributes_map(group, available, inactive)
return nil unless group.group_type == :attribute
group.attributes
.select { |key| inactive.delete(key) }
.map! { |key| attr_form_map(key, available[key]) }
end
def query_to_query_props(group)
return nil unless group.group_type == :query
# Modify the hash to match Rails array based +to_query+ transforms:
# e.g., { columns: [1,2] }.to_query == "columns[]=1&columns[]=2" (unescaped)
# The frontend will do that IFF the hash key is an array
::API::V3::Queries::QueryParamsRepresenter.new(group.attributes).to_json
end
private
##
# Collect active attributes from the current form configuration.
# Using the available attributes from +work_package_attributes+,
# determines which attributes are not used
def get_active_groups(type, available, inactive)
type.attribute_groups.map do |group|
{
type: group.group_type,
name: group.translated_key,
attributes: active_group_attributes_map(group, available, inactive),
query: query_to_query_props(group)
}.tap do |group_obj|
group_obj[:key] = group.key if group.internal_key?
end
end
end
def attr_form_map(key, represented)
{
key: key,
is_cf: CustomField.custom_field_attribute?(key),
is_required: represented[:required] && !represented[:has_default],
translation: Type.translated_attribute_name(key, represented)
}
end
end
| 32.475177 | 91 | 0.668705 |
6aac015ddc7d44de3500531423ad34ded070c4fd | 1,286 | # This file is auto-generated from the current state of the database. Instead
# of editing this file, please use the migrations feature of Active Record to
# incrementally modify your database, and then regenerate this schema definition.
#
# Note that this schema.rb definition is the authoritative source for your
# database schema. If you need to create the application database on another
# system, you should be using db:schema:load, not running all the migrations
# from scratch. The latter is a flawed and unsustainable approach (the more migrations
# you'll amass, the slower it'll run and the greater likelihood for issues).
#
# It's strongly recommended that you check this file into your version control system.
ActiveRecord::Schema.define(version: 20161020061951) do
create_table "posts", force: :cascade, options: "ENGINE=InnoDB DEFAULT CHARSET=utf8mb4" do |t|
t.integer "user_id"
t.string "title"
t.text "content"
t.datetime "created_at", null: false
t.datetime "updated_at", null: false
end
create_table "users", force: :cascade, options: "ENGINE=InnoDB DEFAULT CHARSET=utf8mb4" do |t|
t.string "name"
t.integer "gender"
t.string "phone"
t.datetime "created_at", null: false
t.datetime "updated_at", null: false
end
end
| 40.1875 | 96 | 0.747278 |
2674cd8e6a08dba61c5b7f91183312602c69a5c1 | 2,555 | require 'test_helper'
class InstanceAgentConfigTest < InstanceAgentTestCase
context 'The instance agent configuration' do
should 'have a default configuration' do
InstanceAgent::Config.init
assert_equal({
:wait_between_spawning_children => 1,
:log_dir => nil,
:pid_dir => nil,
:shared_dir => nil,
:user => nil,
:group=>nil,
:program_name => "codedeploy-agent",
:wait_after_throttle_error => 60,
:wait_between_runs => 30,
:verbose => false,
:config_file => nil,
:wait_after_connection_problem => 5,
:children => 1,
:max_runs_per_worker => 0,
:http_read_timeout => 80,
:instance_service_region => nil,
:instance_service_endpoint => nil,
:instance_service_port => nil,
:wait_between_runs => 30,
:wait_after_error => 30,
:codedeploy_test_profile => 'prod',
:on_premises_config_file => '/etc/codedeploy-agent/conf/codedeploy.onpremises.yml',
:proxy_uri => nil,
:enable_deployments_log => true
}, InstanceAgent::Config.config)
end
should 'be the same as the ProcessManager configuration for the current object' do
config = InstanceAgent::Config.instance_variable_get(:@config)
assert_equal config, ProcessManager::Config.instance_variable_get(:@config)
end
should 'execute all available validation methods' do
validations = sequence('validation')
err = []
InstanceAgent::Config.any_instance.expects(:validate_children).with(err).in_sequence(validations)
InstanceAgent::Config.validate_config
end
context 'validate configuration' do
setup do
InstanceAgent::Config.config[:instance_service_region] = 'eu-west-1'
InstanceAgent::Config.config[:instance_service_endpoint] = 'api-endpoint.example.com'
InstanceAgent::Config.config[:instance_service_port] = 123
end
should 'validate the children setting' do
InstanceAgent::Config.config[:children] = nil
puts InstanceAgent::Config.config.inspect
assert_equal 'children can only be set to 1', InstanceAgent::Config.validate_config.pop
InstanceAgent::Config.config[:children] = 2
assert_equal 'children can only be set to 1', InstanceAgent::Config.validate_config.pop
InstanceAgent::Config.config[:children] = 1
assert InstanceAgent::Config.validate_config.empty?, InstanceAgent::Config.validate_config.inspect
end
end
end
end
| 37.573529 | 106 | 0.673581 |
1c168ea262d5011356312abe44afdd3349a245c8 | 1,908 | # This file should contain all the record creation needed to seed the database with its default values.
# The data can then be loaded with the rails db:seed command (or created alongside the database with db:setup).
#
# Examples:
#
# movies = Movie.create([{ name: 'Star Wars' }, { name: 'Lord of the Rings' }])
# Character.create(name: 'Luke', movie: movies.first)
seeded_users = [
{
name: 'admin',
email: '[email protected]',
password: 'admin_password',
password_confirmation: 'admin_password',
activated: true,
activated_at: Time.zone.now
}
]
seeded_sampa = {
name: 'Default',
phonemes: "a ai ai: au au: a: c c_h ei ei: f h i i: j k k_h l l_0 m m_0 n n_0 ou ou: p p_h r r_0 s t t_h u u: v x C D N N_0 9 9Y 9Y: 9: O Oi O: E E: G I I: J J_0 Y Yi Y: T".split.uniq.join(' ')
}
#
# Create models
#
seeded_users.each do |a_user|
user = User.find_by(email: a_user[:email])
unless user
user = User.create!(name: a_user[:name],
email: a_user[:email],
password: a_user[:password],
password_digest: User.digest(a_user[:password]),
activated: a_user[:activated],
activated_at: a_user[:activated_at])
end
Current.user = user
end
sampa = Sampa.find_by(name: seeded_sampa[:name])
unless sampa
sampa = Sampa.create!(name: seeded_sampa[:name],
phonemes: seeded_sampa[:phonemes])
end
test_dict = Rails.root.join('sample-data', 'pedi_extended_test.csv')
dict_name = 'TestDict'
a_dict = Dictionary.find_by(name: dict_name)
unless a_dict
uf = ActionDispatch::Http::UploadedFile.new(filename: 'pedi_extended_test.csv', type: 'text/plain', tempfile: test_dict)
Dictionary.create!(name: dict_name, sampa_id: sampa.id, import_data: uf)
end
| 34.690909 | 252 | 0.616876 |
bfef87e2bed857b36fa663833bf2b4dcfeb6619c | 246 | class AddAttachmentVideoAttachmentToLessons < ActiveRecord::Migration
def self.up
change_table :lessons do |t|
t.attachment :video_attachment
end
end
def self.down
drop_attached_file :lessons, :video_attachment
end
end
| 20.5 | 69 | 0.752033 |
616ab537431c305b0ed5ed4b37539be9010338fd | 521 | module Scan
class TestResultParser
def parse_result(output)
# e.g. ...<testsuites tests='2' failures='1'>...
matched = output.match(%r{\<testsuites tests='(\d+)' failures='(\d+)'/?\>})
if matched and matched.length == 3
tests = matched[1].to_i
failures = matched[2].to_i
return {
tests: tests,
failures: failures
}
else
UI.error("Couldn't parse the number of tests from the output")
return {}
end
end
end
end
| 23.681818 | 81 | 0.552783 |
d5bebdf1cfb612aafea0edec3c654ad552618d25 | 1,500 | require "spec_helper"
describe AdaptivePayments::NodeList do
let(:child_model) do
Class.new(AdaptivePayments::JsonModel) do
attribute :example, String
end
end
let(:model) do
klass = child_model
Class.new(AdaptivePayments::JsonModel) do
attribute :children, AdaptivePayments::NodeList[klass]
end
end
describe "default type" do
it "is a kind of Array" do
expect(model.new.children).to be_a_kind_of(Array)
end
end
describe "coercion" do
context "when appending to" do
let(:object) { model.new.tap { |o| o.children << { :example => "anything" } } }
it "coerces hash to instances of the given type" do
expect(object.children.first).to be_an_instance_of(child_model)
end
end
context "when overwriting" do
let(:object) { model.new.tap { |o| o.children = [{ :example => "anything" }] } }
it "coerces each member to instances of the given type" do
expect(object.children.first).to be_an_instance_of(child_model)
end
end
end
describe "#to_json" do
context "when not empty" do
let(:json) { model.new(:children => [{ :example => "whatever" }]).to_json }
it "is present as a child in the output" do
expect(json).to eq('{"children":[{"example":"whatever"}]}')
end
end
context "when empty" do
let(:json) { model.new.to_json }
it "is omitted from the output" do
expect(json).to eq('{}')
end
end
end
end
| 25.423729 | 86 | 0.631333 |
6a30555fb6fe7e5251eb6ee040247f61ccc10025 | 5,126 | #!/opt/puppetlabs/puppet/bin/ruby
require 'json'
require 'puppet'
def packet_captures_get(*args)
header_params = {}
argstring = args[0].delete('\\')
arg_hash = JSON.parse(argstring)
# Remove task name from arguments - should contain all necessary parameters for URI
arg_hash.delete('_task')
operation_verb = 'Get'
query_params, body_params, path_params = format_params(arg_hash)
uri_string = "https://management.azure.com//subscriptions/%{subscription_id}/resourceGroups/%{resource_group_name}/providers/Microsoft.Network/networkWatchers/%{network_watcher_name}/packetCaptures/%{packet_capture_name}" % path_params
unless query_params.empty?
uri_string = uri_string + '?' + to_query(query_params)
end
header_params['Content-Type'] = 'application/json' # first of #{parent_consumes}
return nil unless authenticate(header_params) == true
uri = URI(uri_string)
Net::HTTP.start(uri.host, uri.port, use_ssl: uri.scheme == 'https') do |http|
if operation_verb == 'Get'
req = Net::HTTP::Get.new(uri)
elsif operation_verb == 'Put'
req = Net::HTTP::Put.new(uri)
elsif operation_verb == 'Delete'
req = Net::HTTP::Delete.new(uri)
end
header_params.each { |x, v| req[x] = v } unless header_params.empty?
unless body_params.empty?
req.body=body_params.to_json
end
Puppet.debug("URI is (#{operation_verb}) #{uri} headers are #{header_params}")
response = http.request req # Net::HTTPResponse object
Puppet.debug("Called (#{operation_verb}) endpoint at #{uri}")
Puppet.debug("response code is #{response.code} and body is #{response.body}")
response
end
end
def to_query(hash)
if hash
return_value = hash.map { |x, v| "#{x}=#{v}" }.reduce { |x, v| "#{x}&#{v}" }
if !return_value.nil?
return return_value
end
end
return ''
end
def op_param(name, inquery, paramalias, namesnake)
operation_param = { :name => name, :location => inquery, :paramalias => paramalias, :namesnake => namesnake }
return operation_param
end
def format_params(key_values)
query_params = {}
body_params = {}
path_params = {}
key_values.each do |key,value|
if value.include? '{'
key_values[key]=JSON.parse(value.gsub("\'","\""))
end
end
op_params = [
op_param('api-version', 'query', 'api_version', 'api_version'),
op_param('networkWatcherName', 'path', 'network_watcher_name', 'network_watcher_name'),
op_param('packetCaptureName', 'path', 'packet_capture_name', 'packet_capture_name'),
op_param('properties', 'body', 'properties', 'properties'),
op_param('resourceGroupName', 'path', 'resource_group_name', 'resource_group_name'),
op_param('subscriptionId', 'path', 'subscription_id', 'subscription_id'),
]
op_params.each do |i|
location = i[:location]
name = i[:name]
paramalias = i[:paramalias]
name_snake = i[:namesnake]
if location == 'query'
query_params[name] = key_values[name_snake] unless key_values[name_snake].nil?
query_params[name] = ENV["azure__#{name_snake}"] unless ENV["<no value>_#{name_snake}"].nil?
elsif location == 'body'
body_params[name] = key_values[name_snake] unless key_values[name_snake].nil?
body_params[name] = ENV["azure_#{name_snake}"] unless ENV["<no value>_#{name_snake}"].nil?
else
path_params[name_snake.to_sym] = key_values[name_snake] unless key_values[name_snake].nil?
path_params[name_snake.to_sym] = ENV["azure__#{name_snake}"] unless ENV["<no value>_#{name_snake}"].nil?
end
end
return query_params,body_params,path_params
end
def fetch_oauth2_token
Puppet.debug('Getting oauth2 token')
@client_id = ENV['azure_client_id']
@client_secret = ENV['azure_client_secret']
@tenant_id = ENV['azure_tenant_id']
uri = URI("https://login.microsoftonline.com/#{@tenant_id}/oauth2/token")
response = Net::HTTP.post_form(uri,
'grant_type' => 'client_credentials',
'client_id' => @client_id,
'client_secret' => @client_secret,
'resource' => 'https://management.azure.com/')
Puppet.debug("get oauth2 token response code is #{response.code} and body is #{response.body}")
success = response.is_a? Net::HTTPSuccess
if success
return JSON[response.body]["access_token"]
else
raise Puppet::Error, "Unable to get oauth2 token - response is #{response} and body is #{response.body}"
end
end
def authenticate(header_params)
token = fetch_oauth2_token
if token
header_params['Authorization'] = "Bearer #{token}"
return true
else
return false
end
end
def task
# Get operation parameters from an input JSON
params = STDIN.read
result = packet_captures_get(params)
if result.is_a? Net::HTTPSuccess
puts result.body
else
raise result.body
end
rescue StandardError => e
result = {}
result[:_error] = {
msg: e.message,
kind: 'puppetlabs-azure_arm/error',
details: { class: e.class.to_s },
}
puts result
exit 1
end
task | 32.858974 | 237 | 0.672454 |
bf0ce95b4eafb8e7b207b4c6f22f66c902e70c09 | 281 | class CreateLineItems < ActiveRecord::Migration[5.2]
def change
create_table :line_items do |t|
t.datetime :post_date
t.text :description
t.decimal :amount, precision: 15, scale: 2
t.timestamps
end
add_index :line_items, :post_date
end
end
| 21.615385 | 52 | 0.676157 |
91bf25326fc145f8cdeecac7071b9ec50adca3c4 | 943 | # -*- encoding: utf-8 -*-
$:.push File.expand_path("../lib", __FILE__)
require "zeal/version"
Gem::Specification.new do |s|
s.name = "zeal"
s.version = Zeal::VERSION
s.authors = ["Ryan Fitzgerald"]
s.email = ["[email protected]"]
s.homepage = "https://github.com/academia-edu/zeal"
s.summary = %q{eager loading (but not too eager) for ActiveRecord collections}
s.description = %q{Zeal allows you to eager-load associations on ActiveRecord objects that have already been loaded from the database.}
s.rubyforge_project = "zeal"
s.files = `git ls-files`.split("\n")
s.test_files = `git ls-files -- {test,spec,features}/*`.split("\n")
s.executables = `git ls-files -- bin/*`.split("\n").map{ |f| File.basename(f) }
s.require_paths = ["lib"]
# specify any dependencies here; for example:
# s.add_development_dependency "rspec"
# s.add_runtime_dependency "rest-client"
end
| 37.72 | 137 | 0.656416 |
ac478cbb019632b60033284220be517a3635b84b | 57,931 | require "cases/helper"
require 'models/post'
require 'models/tagging'
require 'models/tag'
require 'models/comment'
require 'models/author'
require 'models/essay'
require 'models/category'
require 'models/company'
require 'models/person'
require 'models/reader'
require 'models/owner'
require 'models/pet'
require 'models/reference'
require 'models/job'
require 'models/subscriber'
require 'models/subscription'
require 'models/book'
require 'models/developer'
require 'models/computer'
require 'models/project'
require 'models/member'
require 'models/membership'
require 'models/club'
require 'models/categorization'
require 'models/sponsor'
require 'models/mentor'
require 'models/contract'
class EagerAssociationTest < ActiveRecord::TestCase
fixtures :posts, :comments, :authors, :essays, :author_addresses, :categories, :categories_posts,
:companies, :accounts, :tags, :taggings, :people, :readers, :categorizations,
:owners, :pets, :author_favorites, :jobs, :references, :subscribers, :subscriptions, :books,
:developers, :projects, :developers_projects, :members, :memberships, :clubs, :sponsors
def test_eager_with_has_one_through_join_model_with_conditions_on_the_through
member = Member.all.merge!(:includes => :favourite_club).find(members(:some_other_guy).id)
assert_nil member.favourite_club
end
def test_loading_with_one_association
posts = Post.all.merge!(:includes => :comments).to_a
post = posts.find { |p| p.id == 1 }
assert_equal 2, post.comments.size
assert post.comments.include?(comments(:greetings))
post = Post.all.merge!(:includes => :comments, :where => "posts.title = 'Welcome to the weblog'").first
assert_equal 2, post.comments.size
assert post.comments.include?(comments(:greetings))
posts = Post.all.merge!(:includes => :last_comment).to_a
post = posts.find { |p| p.id == 1 }
assert_equal Post.find(1).last_comment, post.last_comment
end
def test_loading_with_one_association_with_non_preload
posts = Post.all.merge!(:includes => :last_comment, :order => 'comments.id DESC').to_a
post = posts.find { |p| p.id == 1 }
assert_equal Post.find(1).last_comment, post.last_comment
end
def test_loading_conditions_with_or
posts = authors(:david).posts.references(:comments).merge(
:includes => :comments,
:where => "comments.body like 'Normal%' OR comments.#{QUOTED_TYPE} = 'SpecialComment'"
).to_a
assert_nil posts.detect { |p| p.author_id != authors(:david).id },
"expected to find only david's posts"
end
def test_with_ordering
list = Post.all.merge!(:includes => :comments, :order => "posts.id DESC").to_a
[:other_by_mary, :other_by_bob, :misc_by_mary, :misc_by_bob, :eager_other,
:sti_habtm, :sti_post_and_comments, :sti_comments, :authorless, :thinking, :welcome
].each_with_index do |post, index|
assert_equal posts(post), list[index]
end
end
def test_has_many_through_with_order
authors = Author.includes(:favorite_authors).to_a
assert authors.count > 0
assert_no_queries { authors.map(&:favorite_authors) }
end
def test_eager_loaded_has_one_association_with_references_does_not_run_additional_queries
Post.update_all(author_id: nil)
authors = Author.includes(:post).references(:post).to_a
assert authors.count > 0
assert_no_queries { authors.map(&:post) }
end
def test_with_two_tables_in_from_without_getting_double_quoted
posts = Post.select("posts.*").from("authors, posts").eager_load(:comments).where("posts.author_id = authors.id").order("posts.id").to_a
assert_equal 2, posts.first.comments.size
end
def test_loading_with_multiple_associations
posts = Post.all.merge!(:includes => [ :comments, :author, :categories ], :order => "posts.id").to_a
assert_equal 2, posts.first.comments.size
assert_equal 2, posts.first.categories.size
assert posts.first.comments.include?(comments(:greetings))
end
def test_duplicate_middle_objects
comments = Comment.all.merge!(:where => 'post_id = 1', :includes => [:post => :author]).to_a
assert_no_queries do
comments.each {|comment| comment.post.author.name}
end
end
def test_preloading_has_many_in_multiple_queries_with_more_ids_than_database_can_handle
assert_called(Comment.connection, :in_clause_length, returns: 5) do
posts = Post.all.merge!(:includes=>:comments).to_a
assert_equal 11, posts.size
end
end
def test_preloading_has_many_in_one_queries_when_database_has_no_limit_on_ids_it_can_handle
assert_called(Comment.connection, :in_clause_length, returns: nil) do
posts = Post.all.merge!(:includes=>:comments).to_a
assert_equal 11, posts.size
end
end
def test_preloading_habtm_in_multiple_queries_with_more_ids_than_database_can_handle
assert_called(Comment.connection, :in_clause_length, times: 2, returns: 5) do
posts = Post.all.merge!(:includes=>:categories).to_a
assert_equal 11, posts.size
end
end
def test_preloading_habtm_in_one_queries_when_database_has_no_limit_on_ids_it_can_handle
assert_called(Comment.connection, :in_clause_length, times: 2, returns: nil) do
posts = Post.all.merge!(:includes=>:categories).to_a
assert_equal 11, posts.size
end
end
def test_load_associated_records_in_one_query_when_adapter_has_no_limit
assert_called(Comment.connection, :in_clause_length, returns: nil) do
post = posts(:welcome)
assert_queries(2) do
Post.includes(:comments).where(:id => post.id).to_a
end
end
end
def test_load_associated_records_in_several_queries_when_many_ids_passed
assert_called(Comment.connection, :in_clause_length, returns: 1) do
post1, post2 = posts(:welcome), posts(:thinking)
assert_queries(3) do
Post.includes(:comments).where(:id => [post1.id, post2.id]).to_a
end
end
end
def test_load_associated_records_in_one_query_when_a_few_ids_passed
assert_called(Comment.connection, :in_clause_length, returns: 3) do
post = posts(:welcome)
assert_queries(2) do
Post.includes(:comments).where(:id => post.id).to_a
end
end
end
def test_including_duplicate_objects_from_belongs_to
popular_post = Post.create!(:title => 'foo', :body => "I like cars!")
comment = popular_post.comments.create!(:body => "lol")
popular_post.readers.create!(:person => people(:michael))
popular_post.readers.create!(:person => people(:david))
readers = Reader.all.merge!(:where => ["post_id = ?", popular_post.id],
:includes => {:post => :comments}).to_a
readers.each do |reader|
assert_equal [comment], reader.post.comments
end
end
def test_including_duplicate_objects_from_has_many
car_post = Post.create!(:title => 'foo', :body => "I like cars!")
car_post.categories << categories(:general)
car_post.categories << categories(:technology)
comment = car_post.comments.create!(:body => "hmm")
categories = Category.all.merge!(:where => { 'posts.id' => car_post.id },
:includes => {:posts => :comments}).to_a
categories.each do |category|
assert_equal [comment], category.posts[0].comments
end
end
def test_associations_loaded_for_all_records
post = Post.create!(:title => 'foo', :body => "I like cars!")
SpecialComment.create!(:body => 'Come on!', :post => post)
first_category = Category.create! :name => 'First!', :posts => [post]
second_category = Category.create! :name => 'Second!', :posts => [post]
categories = Category.where(:id => [first_category.id, second_category.id]).includes(:posts => :special_comments)
assert_equal categories.map { |category| category.posts.first.special_comments.loaded? }, [true, true]
end
def test_finding_with_includes_on_has_many_association_with_same_include_includes_only_once
author_id = authors(:david).id
author = assert_queries(3) { Author.all.merge!(:includes => {:posts_with_comments => :comments}).find(author_id) } # find the author, then find the posts, then find the comments
author.posts_with_comments.each do |post_with_comments|
assert_equal post_with_comments.comments.length, post_with_comments.comments.count
assert_nil post_with_comments.comments.to_a.uniq!
end
end
def test_finding_with_includes_on_has_one_association_with_same_include_includes_only_once
author = authors(:david)
post = author.post_about_thinking_with_last_comment
last_comment = post.last_comment
author = assert_queries(3) { Author.all.merge!(:includes => {:post_about_thinking_with_last_comment => :last_comment}).find(author.id)} # find the author, then find the posts, then find the comments
assert_no_queries do
assert_equal post, author.post_about_thinking_with_last_comment
assert_equal last_comment, author.post_about_thinking_with_last_comment.last_comment
end
end
def test_finding_with_includes_on_belongs_to_association_with_same_include_includes_only_once
post = posts(:welcome)
author = post.author
author_address = author.author_address
post = assert_queries(3) { Post.all.merge!(:includes => {:author_with_address => :author_address}).find(post.id) } # find the post, then find the author, then find the address
assert_no_queries do
assert_equal author, post.author_with_address
assert_equal author_address, post.author_with_address.author_address
end
end
def test_finding_with_includes_on_null_belongs_to_association_with_same_include_includes_only_once
post = posts(:welcome)
post.update!(author: nil)
post = assert_queries(1) { Post.all.merge!(includes: {author_with_address: :author_address}).find(post.id) }
# find the post, then find the author which is null so no query for the author or address
assert_no_queries do
assert_equal nil, post.author_with_address
end
end
def test_finding_with_includes_on_null_belongs_to_polymorphic_association
sponsor = sponsors(:moustache_club_sponsor_for_groucho)
sponsor.update!(sponsorable: nil)
sponsor = assert_queries(1) { Sponsor.all.merge!(:includes => :sponsorable).find(sponsor.id) }
assert_no_queries do
assert_equal nil, sponsor.sponsorable
end
end
def test_finding_with_includes_on_empty_polymorphic_type_column
sponsor = sponsors(:moustache_club_sponsor_for_groucho)
sponsor.update!(sponsorable_type: '', sponsorable_id: nil) # sponsorable_type column might be declared NOT NULL
sponsor = assert_queries(1) do
assert_nothing_raised { Sponsor.all.merge!(:includes => :sponsorable).find(sponsor.id) }
end
assert_no_queries do
assert_equal nil, sponsor.sponsorable
end
end
def test_loading_from_an_association
posts = authors(:david).posts.merge(:includes => :comments, :order => "posts.id").to_a
assert_equal 2, posts.first.comments.size
end
def test_loading_from_an_association_that_has_a_hash_of_conditions
assert_nothing_raised do
Author.all.merge!(:includes => :hello_posts_with_hash_conditions).to_a
end
assert !Author.all.merge!(:includes => :hello_posts_with_hash_conditions).find(authors(:david).id).hello_posts.empty?
end
def test_loading_with_no_associations
assert_nil Post.all.merge!(:includes => :author).find(posts(:authorless).id).author
end
# Regression test for 21c75e5
def test_nested_loading_does_not_raise_exception_when_association_does_not_exist
assert_nothing_raised do
Post.all.merge!(:includes => {:author => :author_addresss}).find(posts(:authorless).id)
end
end
def test_three_level_nested_preloading_does_not_raise_exception_when_association_does_not_exist
post_id = Comment.where(author_id: nil).where.not(post_id: nil).first.post_id
assert_nothing_raised do
Post.preload(:comments => [{:author => :essays}]).find(post_id)
end
end
def test_nested_loading_through_has_one_association
aa = AuthorAddress.all.merge!(:includes => {:author => :posts}).find(author_addresses(:david_address).id)
assert_equal aa.author.posts.count, aa.author.posts.length
end
def test_nested_loading_through_has_one_association_with_order
aa = AuthorAddress.all.merge!(:includes => {:author => :posts}, :order => 'author_addresses.id').find(author_addresses(:david_address).id)
assert_equal aa.author.posts.count, aa.author.posts.length
end
def test_nested_loading_through_has_one_association_with_order_on_association
aa = AuthorAddress.all.merge!(:includes => {:author => :posts}, :order => 'authors.id').find(author_addresses(:david_address).id)
assert_equal aa.author.posts.count, aa.author.posts.length
end
def test_nested_loading_through_has_one_association_with_order_on_nested_association
aa = AuthorAddress.all.merge!(:includes => {:author => :posts}, :order => 'posts.id').find(author_addresses(:david_address).id)
assert_equal aa.author.posts.count, aa.author.posts.length
end
def test_nested_loading_through_has_one_association_with_conditions
aa = AuthorAddress.references(:author_addresses).merge(
:includes => {:author => :posts},
:where => "author_addresses.id > 0"
).find author_addresses(:david_address).id
assert_equal aa.author.posts.count, aa.author.posts.length
end
def test_nested_loading_through_has_one_association_with_conditions_on_association
aa = AuthorAddress.references(:authors).merge(
:includes => {:author => :posts},
:where => "authors.id > 0"
).find author_addresses(:david_address).id
assert_equal aa.author.posts.count, aa.author.posts.length
end
def test_nested_loading_through_has_one_association_with_conditions_on_nested_association
aa = AuthorAddress.references(:posts).merge(
:includes => {:author => :posts},
:where => "posts.id > 0"
).find author_addresses(:david_address).id
assert_equal aa.author.posts.count, aa.author.posts.length
end
def test_eager_association_loading_with_belongs_to_and_foreign_keys
pets = Pet.all.merge!(:includes => :owner).to_a
assert_equal 4, pets.length
end
def test_eager_association_loading_with_belongs_to
comments = Comment.all.merge!(:includes => :post).to_a
assert_equal 11, comments.length
titles = comments.map { |c| c.post.title }
assert titles.include?(posts(:welcome).title)
assert titles.include?(posts(:sti_post_and_comments).title)
end
def test_eager_association_loading_with_belongs_to_and_limit
comments = Comment.all.merge!(:includes => :post, :limit => 5, :order => 'comments.id').to_a
assert_equal 5, comments.length
assert_equal [1,2,3,5,6], comments.collect(&:id)
end
def test_eager_association_loading_with_belongs_to_and_limit_and_conditions
comments = Comment.all.merge!(:includes => :post, :where => 'post_id = 4', :limit => 3, :order => 'comments.id').to_a
assert_equal 3, comments.length
assert_equal [5,6,7], comments.collect(&:id)
end
def test_eager_association_loading_with_belongs_to_and_limit_and_offset
comments = Comment.all.merge!(:includes => :post, :limit => 3, :offset => 2, :order => 'comments.id').to_a
assert_equal 3, comments.length
assert_equal [3,5,6], comments.collect(&:id)
end
def test_eager_association_loading_with_belongs_to_and_limit_and_offset_and_conditions
comments = Comment.all.merge!(:includes => :post, :where => 'post_id = 4', :limit => 3, :offset => 1, :order => 'comments.id').to_a
assert_equal 3, comments.length
assert_equal [6,7,8], comments.collect(&:id)
end
def test_eager_association_loading_with_belongs_to_and_limit_and_offset_and_conditions_array
comments = Comment.all.merge!(:includes => :post, :where => ['post_id = ?',4], :limit => 3, :offset => 1, :order => 'comments.id').to_a
assert_equal 3, comments.length
assert_equal [6,7,8], comments.collect(&:id)
end
def test_eager_association_loading_with_belongs_to_and_conditions_string_with_unquoted_table_name
assert_nothing_raised do
Comment.includes(:post).references(:posts).where('posts.id = ?', 4)
end
end
def test_eager_association_loading_with_belongs_to_and_conditions_hash
comments = []
assert_nothing_raised do
comments = Comment.all.merge!(:includes => :post, :where => {:posts => {:id => 4}}, :limit => 3, :order => 'comments.id').to_a
end
assert_equal 3, comments.length
assert_equal [5,6,7], comments.collect(&:id)
assert_no_queries do
comments.first.post
end
end
def test_eager_association_loading_with_belongs_to_and_conditions_string_with_quoted_table_name
quoted_posts_id= Comment.connection.quote_table_name('posts') + '.' + Comment.connection.quote_column_name('id')
assert_nothing_raised do
Comment.includes(:post).references(:posts).where("#{quoted_posts_id} = ?", 4)
end
end
def test_eager_association_loading_with_belongs_to_and_order_string_with_unquoted_table_name
assert_nothing_raised do
Comment.all.merge!(:includes => :post, :order => 'posts.id').to_a
end
end
def test_eager_association_loading_with_belongs_to_and_order_string_with_quoted_table_name
quoted_posts_id= Comment.connection.quote_table_name('posts') + '.' + Comment.connection.quote_column_name('id')
assert_nothing_raised do
Comment.includes(:post).references(:posts).order(quoted_posts_id)
end
end
def test_eager_association_loading_with_belongs_to_and_limit_and_multiple_associations
posts = Post.all.merge!(:includes => [:author, :very_special_comment], :limit => 1, :order => 'posts.id').to_a
assert_equal 1, posts.length
assert_equal [1], posts.collect(&:id)
end
def test_eager_association_loading_with_belongs_to_and_limit_and_offset_and_multiple_associations
posts = Post.all.merge!(:includes => [:author, :very_special_comment], :limit => 1, :offset => 1, :order => 'posts.id').to_a
assert_equal 1, posts.length
assert_equal [2], posts.collect(&:id)
end
def test_eager_association_loading_with_belongs_to_inferred_foreign_key_from_association_name
author_favorite = AuthorFavorite.all.merge!(:includes => :favorite_author).first
assert_equal authors(:mary), assert_no_queries { author_favorite.favorite_author }
end
def test_eager_load_belongs_to_quotes_table_and_column_names
job = Job.includes(:ideal_reference).find jobs(:unicyclist).id
references(:michael_unicyclist)
assert_no_queries{ assert_equal references(:michael_unicyclist), job.ideal_reference}
end
def test_eager_load_has_one_quotes_table_and_column_names
michael = Person.all.merge!(:includes => :favourite_reference).find(people(:michael).id)
references(:michael_unicyclist)
assert_no_queries{ assert_equal references(:michael_unicyclist), michael.favourite_reference}
end
def test_eager_load_has_many_quotes_table_and_column_names
michael = Person.all.merge!(:includes => :references).find(people(:michael).id)
references(:michael_magician,:michael_unicyclist)
assert_no_queries{ assert_equal references(:michael_magician,:michael_unicyclist), michael.references.sort_by(&:id) }
end
def test_eager_load_has_many_through_quotes_table_and_column_names
michael = Person.all.merge!(:includes => :jobs).find(people(:michael).id)
jobs(:magician, :unicyclist)
assert_no_queries{ assert_equal jobs(:unicyclist, :magician), michael.jobs.sort_by(&:id) }
end
def test_eager_load_has_many_with_string_keys
subscriptions = subscriptions(:webster_awdr, :webster_rfr)
subscriber =Subscriber.all.merge!(:includes => :subscriptions).find(subscribers(:second).id)
assert_equal subscriptions, subscriber.subscriptions.sort_by(&:id)
end
def test_string_id_column_joins
s = Subscriber.create! do |c|
c.id = "PL"
end
b = Book.create!
Subscription.create!(:subscriber_id => "PL", :book_id => b.id)
s.reload
s.book_ids = s.book_ids
end
def test_eager_load_has_many_through_with_string_keys
books = books(:awdr, :rfr)
subscriber = Subscriber.all.merge!(:includes => :books).find(subscribers(:second).id)
assert_equal books, subscriber.books.sort_by(&:id)
end
def test_eager_load_belongs_to_with_string_keys
subscriber = subscribers(:second)
subscription = Subscription.all.merge!(:includes => :subscriber).find(subscriptions(:webster_awdr).id)
assert_equal subscriber, subscription.subscriber
end
def test_eager_association_loading_with_explicit_join
posts = Post.all.merge!(:includes => :comments, :joins => "INNER JOIN authors ON posts.author_id = authors.id AND authors.name = 'Mary'", :limit => 1, :order => 'author_id').to_a
assert_equal 1, posts.length
end
def test_eager_with_has_many_through
posts_with_comments = people(:michael).posts.merge(:includes => :comments, :order => 'posts.id').to_a
posts_with_author = people(:michael).posts.merge(:includes => :author, :order => 'posts.id').to_a
posts_with_comments_and_author = people(:michael).posts.merge(:includes => [ :comments, :author ], :order => 'posts.id').to_a
assert_equal 2, posts_with_comments.inject(0) { |sum, post| sum + post.comments.size }
assert_equal authors(:david), assert_no_queries { posts_with_author.first.author }
assert_equal authors(:david), assert_no_queries { posts_with_comments_and_author.first.author }
end
def test_eager_with_has_many_through_a_belongs_to_association
author = authors(:mary)
Post.create!(:author => author, :title => "TITLE", :body => "BODY")
author.author_favorites.create(:favorite_author_id => 1)
author.author_favorites.create(:favorite_author_id => 2)
posts_with_author_favorites = author.posts.merge(:includes => :author_favorites).to_a
assert_no_queries { posts_with_author_favorites.first.author_favorites.first.author_id }
end
def test_eager_with_has_many_through_an_sti_join_model
author = Author.all.merge!(:includes => :special_post_comments, :order => 'authors.id').first
assert_equal [comments(:does_it_hurt)], assert_no_queries { author.special_post_comments }
end
def test_eager_with_has_many_through_an_sti_join_model_with_conditions_on_both
author = Author.all.merge!(:includes => :special_nonexistent_post_comments, :order => 'authors.id').first
assert_equal [], author.special_nonexistent_post_comments
end
def test_eager_with_has_many_through_join_model_with_conditions
assert_equal Author.all.merge!(:includes => :hello_post_comments,
:order => 'authors.id').first.hello_post_comments.sort_by(&:id),
Author.all.merge!(:order => 'authors.id').first.hello_post_comments.sort_by(&:id)
end
def test_eager_with_has_many_through_join_model_with_conditions_on_top_level
assert_equal comments(:more_greetings), Author.all.merge!(:includes => :comments_with_order_and_conditions).find(authors(:david).id).comments_with_order_and_conditions.first
end
def test_eager_with_has_many_through_join_model_with_include
author_comments = Author.all.merge!(:includes => :comments_with_include).find(authors(:david).id).comments_with_include.to_a
assert_no_queries do
author_comments.first.post.title
end
end
def test_eager_with_has_many_through_with_conditions_join_model_with_include
post_tags = Post.find(posts(:welcome).id).misc_tags
eager_post_tags = Post.all.merge!(:includes => :misc_tags).find(1).misc_tags
assert_equal post_tags, eager_post_tags
end
def test_eager_with_has_many_through_join_model_ignores_default_includes
assert_nothing_raised do
authors(:david).comments_on_posts_with_default_include.to_a
end
end
def test_eager_with_has_many_and_limit
posts = Post.all.merge!(:order => 'posts.id asc', :includes => [ :author, :comments ], :limit => 2).to_a
assert_equal 2, posts.size
assert_equal 3, posts.inject(0) { |sum, post| sum + post.comments.size }
end
def test_eager_with_has_many_and_limit_and_conditions
posts = Post.all.merge!(:includes => [ :author, :comments ], :limit => 2, :where => "posts.body = 'hello'", :order => "posts.id").to_a
assert_equal 2, posts.size
assert_equal [4,5], posts.collect(&:id)
end
def test_eager_with_has_many_and_limit_and_conditions_array
posts = Post.all.merge!(:includes => [ :author, :comments ], :limit => 2, :where => [ "posts.body = ?", 'hello' ], :order => "posts.id").to_a
assert_equal 2, posts.size
assert_equal [4,5], posts.collect(&:id)
end
def test_eager_with_has_many_and_limit_and_conditions_array_on_the_eagers
posts = Post.includes(:author, :comments).limit(2).references(:author).where("authors.name = ?", 'David')
assert_equal 2, posts.size
count = Post.includes(:author, :comments).limit(2).references(:author).where("authors.name = ?", 'David').count
assert_equal posts.size, count
end
def test_eager_with_has_many_and_limit_and_high_offset
posts = Post.all.merge!(:includes => [ :author, :comments ], :limit => 2, :offset => 10, :where => { 'authors.name' => 'David' }).to_a
assert_equal 0, posts.size
end
def test_eager_with_has_many_and_limit_and_high_offset_and_multiple_array_conditions
assert_queries(1) do
posts = Post.references(:authors, :comments).
merge(:includes => [ :author, :comments ], :limit => 2, :offset => 10,
:where => [ "authors.name = ? and comments.body = ?", 'David', 'go crazy' ]).to_a
assert_equal 0, posts.size
end
end
def test_eager_with_has_many_and_limit_and_high_offset_and_multiple_hash_conditions
assert_queries(1) do
posts = Post.all.merge!(:includes => [ :author, :comments ], :limit => 2, :offset => 10,
:where => { 'authors.name' => 'David', 'comments.body' => 'go crazy' }).to_a
assert_equal 0, posts.size
end
end
def test_count_eager_with_has_many_and_limit_and_high_offset
posts = Post.all.merge!(:includes => [ :author, :comments ], :limit => 2, :offset => 10, :where => { 'authors.name' => 'David' }).count(:all)
assert_equal 0, posts
end
def test_eager_with_has_many_and_limit_with_no_results
posts = Post.all.merge!(:includes => [ :author, :comments ], :limit => 2, :where => "posts.title = 'magic forest'").to_a
assert_equal 0, posts.size
end
def test_eager_count_performed_on_a_has_many_association_with_multi_table_conditional
author = authors(:david)
author_posts_without_comments = author.posts.select { |post| post.comments.blank? }
assert_equal author_posts_without_comments.size, author.posts.includes(:comments).where('comments.id is null').references(:comments).count
end
def test_eager_count_performed_on_a_has_many_through_association_with_multi_table_conditional
person = people(:michael)
person_posts_without_comments = person.posts.select { |post| post.comments.blank? }
assert_equal person_posts_without_comments.size, person.posts_with_no_comments.count
end
def test_eager_with_has_and_belongs_to_many_and_limit
posts = Post.all.merge!(:includes => :categories, :order => "posts.id", :limit => 3).to_a
assert_equal 3, posts.size
assert_equal 2, posts[0].categories.size
assert_equal 1, posts[1].categories.size
assert_equal 0, posts[2].categories.size
assert posts[0].categories.include?(categories(:technology))
assert posts[1].categories.include?(categories(:general))
end
# Since the preloader for habtm gets raw row hashes from the database and then
# instantiates them, this test ensures that it only instantiates one actual
# object per record from the database.
def test_has_and_belongs_to_many_should_not_instantiate_same_records_multiple_times
welcome = posts(:welcome)
categories = Category.includes(:posts)
general = categories.find { |c| c == categories(:general) }
technology = categories.find { |c| c == categories(:technology) }
post1 = general.posts.to_a.find { |p| p == welcome }
post2 = technology.posts.to_a.find { |p| p == welcome }
assert_equal post1.object_id, post2.object_id
end
def test_eager_with_has_many_and_limit_and_conditions_on_the_eagers
posts =
authors(:david).posts
.includes(:comments)
.where("comments.body like 'Normal%' OR comments.#{QUOTED_TYPE}= 'SpecialComment'")
.references(:comments)
.limit(2)
.to_a
assert_equal 2, posts.size
count =
Post.includes(:comments, :author)
.where("authors.name = 'David' AND (comments.body like 'Normal%' OR comments.#{QUOTED_TYPE}= 'SpecialComment')")
.references(:authors, :comments)
.limit(2)
.count
assert_equal count, posts.size
end
def test_eager_with_has_many_and_limit_and_scoped_conditions_on_the_eagers
posts = nil
Post.includes(:comments)
.where("comments.body like 'Normal%' OR comments.#{QUOTED_TYPE}= 'SpecialComment'")
.references(:comments)
.scoping do
posts = authors(:david).posts.limit(2).to_a
assert_equal 2, posts.size
end
Post.includes(:comments, :author)
.where("authors.name = 'David' AND (comments.body like 'Normal%' OR comments.#{QUOTED_TYPE}= 'SpecialComment')")
.references(:authors, :comments)
.scoping do
count = Post.limit(2).count
assert_equal count, posts.size
end
end
def test_eager_association_loading_with_habtm
posts = Post.all.merge!(:includes => :categories, :order => "posts.id").to_a
assert_equal 2, posts[0].categories.size
assert_equal 1, posts[1].categories.size
assert_equal 0, posts[2].categories.size
assert posts[0].categories.include?(categories(:technology))
assert posts[1].categories.include?(categories(:general))
end
def test_eager_with_inheritance
SpecialPost.all.merge!(:includes => [ :comments ]).to_a
end
def test_eager_has_one_with_association_inheritance
post = Post.all.merge!(:includes => [ :very_special_comment ]).find(4)
assert_equal "VerySpecialComment", post.very_special_comment.class.to_s
end
def test_eager_has_many_with_association_inheritance
post = Post.all.merge!(:includes => [ :special_comments ]).find(4)
post.special_comments.each do |special_comment|
assert special_comment.is_a?(SpecialComment)
end
end
def test_eager_habtm_with_association_inheritance
post = Post.all.merge!(:includes => [ :special_categories ]).find(6)
assert_equal 1, post.special_categories.size
post.special_categories.each do |special_category|
assert_equal "SpecialCategory", special_category.class.to_s
end
end
def test_eager_with_has_one_dependent_does_not_destroy_dependent
assert_not_nil companies(:first_firm).account
f = Firm.all.merge!(:includes => :account,
:where => ["companies.name = ?", "37signals"]).first
assert_not_nil f.account
assert_equal companies(:first_firm, :reload).account, f.account
end
def test_eager_with_multi_table_conditional_properly_counts_the_records_when_using_size
author = authors(:david)
posts_with_no_comments = author.posts.select { |post| post.comments.blank? }
assert_equal posts_with_no_comments.size, author.posts_with_no_comments.size
assert_equal posts_with_no_comments, author.posts_with_no_comments
end
def test_eager_with_invalid_association_reference
assert_raise(ActiveRecord::AssociationNotFoundError, "Association was not found; perhaps you misspelled it? You specified :include => :monkeys") {
Post.all.merge!(:includes=> :monkeys ).find(6)
}
assert_raise(ActiveRecord::AssociationNotFoundError, "Association was not found; perhaps you misspelled it? You specified :include => :monkeys") {
Post.all.merge!(:includes=>[ :monkeys ]).find(6)
}
assert_raise(ActiveRecord::AssociationNotFoundError, "Association was not found; perhaps you misspelled it? You specified :include => :monkeys") {
Post.all.merge!(:includes=>[ 'monkeys' ]).find(6)
}
assert_raise(ActiveRecord::AssociationNotFoundError, "Association was not found; perhaps you misspelled it? You specified :include => :monkeys, :elephants") {
Post.all.merge!(:includes=>[ :monkeys, :elephants ]).find(6)
}
end
def test_eager_with_default_scope
developer = EagerDeveloperWithDefaultScope.where(:name => 'David').first
projects = Project.order(:id).to_a
assert_no_queries do
assert_equal(projects, developer.projects)
end
end
def test_eager_with_default_scope_as_class_method
developer = EagerDeveloperWithClassMethodDefaultScope.where(:name => 'David').first
projects = Project.order(:id).to_a
assert_no_queries do
assert_equal(projects, developer.projects)
end
end
def test_eager_with_default_scope_as_class_method_using_find_method
david = developers(:david)
developer = EagerDeveloperWithClassMethodDefaultScope.find(david.id)
projects = Project.order(:id).to_a
assert_no_queries do
assert_equal(projects, developer.projects)
end
end
def test_eager_with_default_scope_as_class_method_using_find_by_method
developer = EagerDeveloperWithClassMethodDefaultScope.find_by(name: 'David')
projects = Project.order(:id).to_a
assert_no_queries do
assert_equal(projects, developer.projects)
end
end
def test_eager_with_default_scope_as_lambda
developer = EagerDeveloperWithLambdaDefaultScope.where(:name => 'David').first
projects = Project.order(:id).to_a
assert_no_queries do
assert_equal(projects, developer.projects)
end
end
def test_eager_with_default_scope_as_block
# warm up the habtm cache
EagerDeveloperWithBlockDefaultScope.where(:name => 'David').first.projects
developer = EagerDeveloperWithBlockDefaultScope.where(:name => 'David').first
projects = Project.order(:id).to_a
assert_no_queries do
assert_equal(projects, developer.projects)
end
end
def test_eager_with_default_scope_as_callable
developer = EagerDeveloperWithCallableDefaultScope.where(:name => 'David').first
projects = Project.order(:id).to_a
assert_no_queries do
assert_equal(projects, developer.projects)
end
end
def find_all_ordered(className, include=nil)
className.all.merge!(:order=>"#{className.table_name}.#{className.primary_key}", :includes=>include).to_a
end
def test_limited_eager_with_order
assert_equal(
posts(:thinking, :sti_comments),
Post.all.merge!(
:includes => [:author, :comments], :where => { 'authors.name' => 'David' },
:order => 'UPPER(posts.title)', :limit => 2, :offset => 1
).to_a
)
assert_equal(
posts(:sti_post_and_comments, :sti_comments),
Post.all.merge!(
:includes => [:author, :comments], :where => { 'authors.name' => 'David' },
:order => 'UPPER(posts.title) DESC', :limit => 2, :offset => 1
).to_a
)
end
def test_limited_eager_with_multiple_order_columns
assert_equal(
posts(:thinking, :sti_comments),
Post.all.merge!(
:includes => [:author, :comments], :where => { 'authors.name' => 'David' },
:order => ['UPPER(posts.title)', 'posts.id'], :limit => 2, :offset => 1
).to_a
)
assert_equal(
posts(:sti_post_and_comments, :sti_comments),
Post.all.merge!(
:includes => [:author, :comments], :where => { 'authors.name' => 'David' },
:order => ['UPPER(posts.title) DESC', 'posts.id'], :limit => 2, :offset => 1
).to_a
)
end
def test_limited_eager_with_numeric_in_association
assert_equal(
people(:david, :susan),
Person.references(:number1_fans_people).merge(
:includes => [:readers, :primary_contact, :number1_fan],
:where => "number1_fans_people.first_name like 'M%'",
:order => 'people.id', :limit => 2, :offset => 0
).to_a
)
end
def test_polymorphic_type_condition
post = Post.all.merge!(:includes => :taggings).find(posts(:thinking).id)
assert post.taggings.include?(taggings(:thinking_general))
post = SpecialPost.all.merge!(:includes => :taggings).find(posts(:thinking).id)
assert post.taggings.include?(taggings(:thinking_general))
end
def test_eager_with_multiple_associations_with_same_table_has_many_and_habtm
# Eager includes of has many and habtm associations aren't necessarily sorted in the same way
def assert_equal_after_sort(item1, item2, item3 = nil)
assert_equal(item1.sort{|a,b| a.id <=> b.id}, item2.sort{|a,b| a.id <=> b.id})
assert_equal(item3.sort{|a,b| a.id <=> b.id}, item2.sort{|a,b| a.id <=> b.id}) if item3
end
# Test regular association, association with conditions, association with
# STI, and association with conditions assured not to be true
post_types = [:posts, :other_posts, :special_posts]
# test both has_many and has_and_belongs_to_many
[Author, Category].each do |className|
d1 = find_all_ordered(className)
# test including all post types at once
d2 = find_all_ordered(className, post_types)
d1.each_index do |i|
assert_equal(d1[i], d2[i])
assert_equal_after_sort(d1[i].posts, d2[i].posts)
post_types[1..-1].each do |post_type|
# test including post_types together
d3 = find_all_ordered(className, [:posts, post_type])
assert_equal(d1[i], d3[i])
assert_equal_after_sort(d1[i].posts, d3[i].posts)
assert_equal_after_sort(d1[i].send(post_type), d2[i].send(post_type), d3[i].send(post_type))
end
end
end
end
def test_eager_with_multiple_associations_with_same_table_has_one
d1 = find_all_ordered(Firm)
d2 = find_all_ordered(Firm, :account)
d1.each_index do |i|
assert_equal(d1[i], d2[i])
assert_equal(d1[i].account, d2[i].account)
end
end
def test_eager_with_multiple_associations_with_same_table_belongs_to
firm_types = [:firm, :firm_with_basic_id, :firm_with_other_name, :firm_with_condition]
d1 = find_all_ordered(Client)
d2 = find_all_ordered(Client, firm_types)
d1.each_index do |i|
assert_equal(d1[i], d2[i])
firm_types.each { |type| assert_equal(d1[i].send(type), d2[i].send(type)) }
end
end
def test_eager_with_valid_association_as_string_not_symbol
assert_nothing_raised { Post.all.merge!(:includes => 'comments').to_a }
end
def test_eager_with_floating_point_numbers
assert_queries(2) do
# Before changes, the floating point numbers will be interpreted as table names and will cause this to run in one query
Comment.all.merge!(:where => "123.456 = 123.456", :includes => :post).to_a
end
end
def test_preconfigured_includes_with_belongs_to
author = posts(:welcome).author_with_posts
assert_no_queries {assert_equal 5, author.posts.size}
end
def test_preconfigured_includes_with_has_one
comment = posts(:sti_comments).very_special_comment_with_post
assert_no_queries {assert_equal posts(:sti_comments), comment.post}
end
def test_eager_association_with_scope_with_joins
assert_nothing_raised do
Post.includes(:very_special_comment_with_post_with_joins).to_a
end
end
def test_preconfigured_includes_with_has_many
posts = authors(:david).posts_with_comments
one = posts.detect { |p| p.id == 1 }
assert_no_queries do
assert_equal 5, posts.size
assert_equal 2, one.comments.size
end
end
def test_preconfigured_includes_with_habtm
posts = authors(:david).posts_with_categories
one = posts.detect { |p| p.id == 1 }
assert_no_queries do
assert_equal 5, posts.size
assert_equal 2, one.categories.size
end
end
def test_preconfigured_includes_with_has_many_and_habtm
posts = authors(:david).posts_with_comments_and_categories
one = posts.detect { |p| p.id == 1 }
assert_no_queries do
assert_equal 5, posts.size
assert_equal 2, one.comments.size
assert_equal 2, one.categories.size
end
end
def test_count_with_include
assert_equal 3, authors(:david).posts_with_comments.where("length(comments.body) > 15").references(:comments).count
end
def test_association_loading_notification
notifications = messages_for('instantiation.active_record') do
Developer.all.merge!(:includes => 'projects', :where => { 'developers_projects.access_level' => 1 }, :limit => 5).to_a.size
end
message = notifications.first
payload = message.last
count = Developer.all.merge!(:includes => 'projects', :where => { 'developers_projects.access_level' => 1 }, :limit => 5).to_a.size
# eagerloaded row count should be greater than just developer count
assert_operator payload[:record_count], :>, count
assert_equal Developer.name, payload[:class_name]
end
def test_base_messages
notifications = messages_for('instantiation.active_record') do
Developer.all.to_a
end
message = notifications.first
payload = message.last
assert_equal Developer.all.to_a.count, payload[:record_count]
assert_equal Developer.name, payload[:class_name]
end
def messages_for(name)
notifications = []
ActiveSupport::Notifications.subscribe(name) do |*args|
notifications << args
end
yield
notifications
ensure
ActiveSupport::Notifications.unsubscribe(name)
end
def test_load_with_sti_sharing_association
assert_queries(2) do #should not do 1 query per subclass
Comment.includes(:post).to_a
end
end
def test_conditions_on_join_table_with_include_and_limit
assert_equal 3, Developer.all.merge!(:includes => 'projects', :where => { 'developers_projects.access_level' => 1 }, :limit => 5).to_a.size
end
def test_dont_create_temporary_active_record_instances
Developer.instance_count = 0
developers = Developer.all.merge!(:includes => 'projects', :where => { 'developers_projects.access_level' => 1 }, :limit => 5).to_a
assert_equal developers.count, Developer.instance_count
end
def test_order_on_join_table_with_include_and_limit
assert_equal 5, Developer.all.merge!(:includes => 'projects', :order => 'developers_projects.joined_on DESC', :limit => 5).to_a.size
end
def test_eager_loading_with_order_on_joined_table_preloads
posts = assert_queries(2) do
Post.all.merge!(:joins => :comments, :includes => :author, :order => 'comments.id DESC').to_a
end
assert_equal posts(:eager_other), posts[1]
assert_equal authors(:mary), assert_no_queries { posts[1].author}
end
def test_eager_loading_with_conditions_on_joined_table_preloads
posts = assert_queries(2) do
Post.all.merge!(:select => 'distinct posts.*', :includes => :author, :joins => [:comments], :where => "comments.body like 'Thank you%'", :order => 'posts.id').to_a
end
assert_equal [posts(:welcome)], posts
assert_equal authors(:david), assert_no_queries { posts[0].author}
posts = assert_queries(2) do
Post.all.merge!(:select => 'distinct posts.*', :includes => :author, :joins => [:comments], :where => "comments.body like 'Thank you%'", :order => 'posts.id').to_a
end
assert_equal [posts(:welcome)], posts
assert_equal authors(:david), assert_no_queries { posts[0].author}
posts = assert_queries(2) do
Post.all.merge!(:includes => :author, :joins => {:taggings => :tag}, :where => "tags.name = 'General'", :order => 'posts.id').to_a
end
assert_equal posts(:welcome, :thinking), posts
posts = assert_queries(2) do
Post.all.merge!(:includes => :author, :joins => {:taggings => {:tag => :taggings}}, :where => "taggings_tags.super_tag_id=2", :order => 'posts.id').to_a
end
assert_equal posts(:welcome, :thinking), posts
end
def test_preload_has_many_with_association_condition_and_default_scope
post = Post.create!(:title => 'Beaches', :body => "I like beaches!")
Reader.create! :person => people(:david), :post => post
LazyReader.create! :person => people(:susan), :post => post
assert_equal 1, post.lazy_readers.to_a.size
assert_equal 2, post.lazy_readers_skimmers_or_not.to_a.size
post_with_readers = Post.includes(:lazy_readers_skimmers_or_not).find(post.id)
assert_equal 2, post_with_readers.lazy_readers_skimmers_or_not.to_a.size
end
def test_eager_loading_with_conditions_on_string_joined_table_preloads
posts = assert_queries(2) do
Post.all.merge!(:select => 'distinct posts.*', :includes => :author, :joins => "INNER JOIN comments on comments.post_id = posts.id", :where => "comments.body like 'Thank you%'", :order => 'posts.id').to_a
end
assert_equal [posts(:welcome)], posts
assert_equal authors(:david), assert_no_queries { posts[0].author}
posts = assert_queries(2) do
Post.all.merge!(:select => 'distinct posts.*', :includes => :author, :joins => ["INNER JOIN comments on comments.post_id = posts.id"], :where => "comments.body like 'Thank you%'", :order => 'posts.id').to_a
end
assert_equal [posts(:welcome)], posts
assert_equal authors(:david), assert_no_queries { posts[0].author}
end
def test_eager_loading_with_select_on_joined_table_preloads
posts = assert_queries(2) do
Post.all.merge!(:select => 'posts.*, authors.name as author_name', :includes => :comments, :joins => :author, :order => 'posts.id').to_a
end
assert_equal 'David', posts[0].author_name
assert_equal posts(:welcome).comments, assert_no_queries { posts[0].comments}
end
def test_eager_loading_with_conditions_on_join_model_preloads
authors = assert_queries(2) do
Author.all.merge!(:includes => :author_address, :joins => :comments, :where => "posts.title like 'Welcome%'").to_a
end
assert_equal authors(:david), authors[0]
assert_equal author_addresses(:david_address), authors[0].author_address
end
def test_preload_belongs_to_uses_exclusive_scope
people = Person.males.merge(:includes => :primary_contact).to_a
assert_not_equal people.length, 0
people.each do |person|
assert_no_queries {assert_not_nil person.primary_contact}
assert_equal Person.find(person.id).primary_contact, person.primary_contact
end
end
def test_preload_has_many_uses_exclusive_scope
people = Person.males.includes(:agents).to_a
people.each do |person|
assert_equal Person.find(person.id).agents, person.agents
end
end
def test_preload_has_many_using_primary_key
expected = Firm.first.clients_using_primary_key.to_a
firm = Firm.includes(:clients_using_primary_key).first
assert_no_queries do
assert_equal expected, firm.clients_using_primary_key
end
end
def test_include_has_many_using_primary_key
expected = Firm.find(1).clients_using_primary_key.sort_by(&:name)
# Oracle adapter truncates alias to 30 characters
if current_adapter?(:OracleAdapter)
firm = Firm.all.merge!(:includes => :clients_using_primary_key, :order => 'clients_using_primary_keys_companies'[0,30]+'.name').find(1)
else
firm = Firm.all.merge!(:includes => :clients_using_primary_key, :order => 'clients_using_primary_keys_companies.name').find(1)
end
assert_no_queries do
assert_equal expected, firm.clients_using_primary_key
end
end
def test_preload_has_one_using_primary_key
expected = accounts(:signals37)
firm = Firm.all.merge!(:includes => :account_using_primary_key, :order => 'companies.id').first
assert_no_queries do
assert_equal expected, firm.account_using_primary_key
end
end
def test_include_has_one_using_primary_key
expected = accounts(:signals37)
firm = Firm.all.merge!(:includes => :account_using_primary_key, :order => 'accounts.id').to_a.detect {|f| f.id == 1}
assert_no_queries do
assert_equal expected, firm.account_using_primary_key
end
end
def test_preloading_empty_belongs_to
c = Client.create!(:name => 'Foo', :client_of => Company.maximum(:id) + 1)
client = assert_queries(2) { Client.preload(:firm).find(c.id) }
assert_no_queries { assert_nil client.firm }
end
def test_preloading_empty_belongs_to_polymorphic
t = Tagging.create!(:taggable_type => 'Post', :taggable_id => Post.maximum(:id) + 1, :tag => tags(:general))
tagging = assert_queries(2) { Tagging.preload(:taggable).find(t.id) }
assert_no_queries { assert_nil tagging.taggable }
end
def test_preloading_through_empty_belongs_to
c = Client.create!(:name => 'Foo', :client_of => Company.maximum(:id) + 1)
client = assert_queries(2) { Client.preload(:accounts).find(c.id) }
assert_no_queries { assert client.accounts.empty? }
end
def test_preloading_has_many_through_with_distinct
mary = Author.includes(:unique_categorized_posts).where(:id => authors(:mary).id).first
assert_equal 1, mary.unique_categorized_posts.length
assert_equal 1, mary.unique_categorized_post_ids.length
end
def test_preloading_has_one_using_reorder
klass = Class.new(ActiveRecord::Base) do
def self.name; "TempAuthor"; end
self.table_name = "authors"
has_one :post, class_name: "PostWithDefaultScope", foreign_key: :author_id
has_one :reorderd_post, -> { reorder(title: :desc) }, class_name: "PostWithDefaultScope", foreign_key: :author_id
end
author = klass.first
# PRECONDITION: make sure ordering results in different results
assert_not_equal author.post, author.reorderd_post
preloaded_reorderd_post = klass.preload(:reorderd_post).first.reorderd_post
assert_equal author.reorderd_post, preloaded_reorderd_post
assert_equal Post.order(title: :desc).first.title, preloaded_reorderd_post.title
end
def test_preloading_polymorphic_with_custom_foreign_type
sponsor = sponsors(:moustache_club_sponsor_for_groucho)
groucho = members(:groucho)
sponsor = assert_queries(2) {
Sponsor.includes(:thing).where(:id => sponsor.id).first
}
assert_no_queries { assert_equal groucho, sponsor.thing }
end
def test_joins_with_includes_should_preload_via_joins
post = assert_queries(1) { Post.includes(:comments).joins(:comments).order('posts.id desc').to_a.first }
assert_queries(0) do
assert_not_equal 0, post.comments.to_a.count
end
end
def test_join_eager_with_empty_order_should_generate_valid_sql
assert_nothing_raised do
Post.includes(:comments).order("").where(:comments => {:body => "Thank you for the welcome"}).first
end
end
def test_deep_including_through_habtm
# warm up habtm cache
posts = Post.all.merge!(:includes => {:categories => :categorizations}, :order => "posts.id").to_a
posts[0].categories[0].categorizations.length
posts = Post.all.merge!(:includes => {:categories => :categorizations}, :order => "posts.id").to_a
assert_no_queries { assert_equal 2, posts[0].categories[0].categorizations.length }
assert_no_queries { assert_equal 1, posts[0].categories[1].categorizations.length }
assert_no_queries { assert_equal 2, posts[1].categories[0].categorizations.length }
end
def test_eager_load_multiple_associations_with_references
mentor = Mentor.create!(name: "Barış Can DAYLIK")
developer = Developer.create!(name: "Mehmet Emin İNAÇ", mentor: mentor)
Contract.create!(developer: developer)
project = Project.create!(name: "VNGRS", mentor: mentor)
project.developers << developer
projects = Project.references(:mentors).includes(mentor: { developers: :contracts }, developers: :contracts)
assert_equal projects.last.mentor.developers.first.contracts, projects.last.developers.last.contracts
end
test "scoping with a circular preload" do
assert_equal Comment.find(1), Comment.preload(:post => :comments).scoping { Comment.find(1) }
end
test "circular preload does not modify unscoped" do
expected = FirstPost.unscoped.find(2)
FirstPost.preload(:comments => :first_post).find(1)
assert_equal expected, FirstPost.unscoped.find(2)
end
test "preload ignores the scoping" do
assert_equal(
Comment.find(1).post,
Post.where('1 = 0').scoping { Comment.preload(:post).find(1).post }
)
end
test "deep preload" do
post = Post.preload(author: :posts, comments: :post).first
assert_predicate post.author.association(:posts), :loaded?
assert_predicate post.comments.first.association(:post), :loaded?
end
test "preloading does not cache has many association subset when preloaded with a through association" do
author = Author.includes(:comments_with_order_and_conditions, :posts).first
assert_no_queries { assert_equal 2, author.comments_with_order_and_conditions.size }
assert_no_queries { assert_equal 5, author.posts.size, "should not cache a subset of the association" }
end
test "preloading a through association twice does not reset it" do
members = Member.includes(current_membership: :club).includes(:club).to_a
assert_no_queries {
assert_equal 3, members.map(&:current_membership).map(&:club).size
}
end
test "works in combination with order(:symbol) and reorder(:symbol)" do
author = Author.includes(:posts).references(:posts).order(:name).find_by('posts.title IS NOT NULL')
assert_equal authors(:bob), author
author = Author.includes(:posts).references(:posts).reorder(:name).find_by('posts.title IS NOT NULL')
assert_equal authors(:bob), author
end
test "preloading with a polymorphic association and using the existential predicate but also using a select" do
assert_equal authors(:david), authors(:david).essays.includes(:writer).first.writer
assert_nothing_raised do
authors(:david).essays.includes(:writer).select(:name).any?
end
end
test "preloading the same association twice works" do
Member.create!
members = Member.preload(:current_membership).includes(current_membership: :club).all.to_a
assert_no_queries {
members_with_membership = members.select(&:current_membership)
assert_equal 3, members_with_membership.map(&:current_membership).map(&:club).size
}
end
test "preloading with a polymorphic association and using the existential predicate" do
assert_equal authors(:david), authors(:david).essays.includes(:writer).first.writer
assert_nothing_raised do
authors(:david).essays.includes(:writer).any?
end
end
test "preloading associations with string joins and order references" do
author = assert_queries(2) {
Author.includes(:posts).joins("LEFT JOIN posts ON posts.author_id = authors.id").order("posts.title DESC").first
}
assert_no_queries {
assert_equal 5, author.posts.size
}
end
test "including associations with where.not adds implicit references" do
author = assert_queries(2) {
Author.includes(:posts).where.not(posts: { title: 'Welcome to the weblog'} ).last
}
assert_no_queries {
assert_equal 2, author.posts.size
}
end
test "including association based on sql condition and no database column" do
assert_equal pets(:parrot), Owner.including_last_pet.first.last_pet
end
test "preloading and eager loading of instance dependent associations is not supported" do
message = "association scope 'posts_with_signature' is"
error = assert_raises(ArgumentError) do
Author.includes(:posts_with_signature).to_a
end
assert_match message, error.message
error = assert_raises(ArgumentError) do
Author.preload(:posts_with_signature).to_a
end
assert_match message, error.message
error = assert_raises(ArgumentError) do
Author.eager_load(:posts_with_signature).to_a
end
assert_match message, error.message
end
test "preload with invalid argument" do
exception = assert_raises(ArgumentError) do
Author.preload(10).to_a
end
assert_equal('10 was not recognized for preload', exception.message)
end
test "preloading readonly association" do
# has-one
firm = Firm.where(id: "1").preload(:readonly_account).first!
assert firm.readonly_account.readonly?
# has_and_belongs_to_many
project = Project.where(id: "2").preload(:readonly_developers).first!
assert project.readonly_developers.first.readonly?
# has-many :through
david = Author.where(id: "1").preload(:readonly_comments).first!
assert david.readonly_comments.first.readonly?
end
test "eager-loading readonly association" do
# has-one
firm = Firm.where(id: "1").eager_load(:readonly_account).first!
assert firm.readonly_account.readonly?
# has_and_belongs_to_many
project = Project.where(id: "2").eager_load(:readonly_developers).first!
assert project.readonly_developers.first.readonly?
# has-many :through
david = Author.where(id: "1").eager_load(:readonly_comments).first!
assert david.readonly_comments.first.readonly?
# belongs_to
post = Post.where(id: "1").eager_load(:author).first!
assert post.author.readonly?
end
test "preloading a polymorphic association with references to the associated table" do
post = Post.includes(:tags).references(:tags).where('tags.name = ?', 'General').first
assert_equal posts(:welcome), post
end
test "eager-loading a polymorphic association with references to the associated table" do
post = Post.eager_load(:tags).where('tags.name = ?', 'General').first
assert_equal posts(:welcome), post
end
# CollectionProxy#reader is expensive, so the preloader avoids calling it.
test "preloading has_many_through association avoids calling association.reader" do
ActiveRecord::Associations::HasManyAssociation.any_instance.expects(:reader).never
Author.preload(:readonly_comments).first!
end
end
| 41.02762 | 212 | 0.72973 |
ab0e277249d2a5f00ce4c3ce91d1a0e55d3fe24b | 5,335 | require "active_support/core_ext/integer/time"
Rails.application.configure do
# Settings specified here will take precedence over those in config/application.rb.
# Code is not reloaded between requests.
config.cache_classes = true
# Eager load code on boot. This eager loads most of Rails and
# your application in memory, allowing both threaded web servers
# and those relying on copy on write to perform better.
# Rake tasks automatically ignore this option for performance.
config.eager_load = true
# Full error reports are disabled and caching is turned on.
config.consider_all_requests_local = false
config.action_controller.perform_caching = true
# Ensures that a master key has been made available in either ENV["RAILS_MASTER_KEY"]
# or in config/master.key. This key is used to decrypt credentials (and other encrypted files).
# config.require_master_key = true
# Disable serving static files from the `/public` folder by default since
# Apache or NGINX already handles this.
config.public_file_server.enabled = ENV['RAILS_SERVE_STATIC_FILES'].present?
# Compress CSS using a preprocessor.
# config.assets.css_compressor = :sass
# Do not fallback to assets pipeline if a precompiled asset is missed.
config.assets.compile = false
# Enable serving of images, stylesheets, and JavaScripts from an asset server.
# config.asset_host = 'http://assets.example.com'
# Specifies the header that your server uses for sending files.
# config.action_dispatch.x_sendfile_header = 'X-Sendfile' # for Apache
# config.action_dispatch.x_sendfile_header = 'X-Accel-Redirect' # for NGINX
# Store uploaded files on the local file system (see config/storage.yml for options).
config.active_storage.service = :local
# Mount Action Cable outside main process or domain.
# config.action_cable.mount_path = nil
# config.action_cable.url = 'wss://example.com/cable'
# config.action_cable.allowed_request_origins = [ 'http://example.com', /http:\/\/example.*/ ]
# Force all access to the app over SSL, use Strict-Transport-Security, and use secure cookies.
# config.force_ssl = true
# Include generic and useful information about system operation, but avoid logging too much
# information to avoid inadvertent exposure of personally identifiable information (PII).
config.log_level = :info
# Prepend all log lines with the following tags.
config.log_tags = [ :request_id ]
# Use a different cache store in production.
# config.cache_store = :mem_cache_store
# Use a real queuing backend for Active Job (and separate queues per environment).
# config.active_job.queue_adapter = :resque
# config.active_job.queue_name_prefix = "crisalix_production"
config.action_mailer.perform_caching = false
# Ignore bad email addresses and do not raise email delivery errors.
# Set this to true and configure the email server for immediate delivery to raise delivery errors.
# config.action_mailer.raise_delivery_errors = false
# Enable locale fallbacks for I18n (makes lookups for any locale fall back to
# the I18n.default_locale when a translation cannot be found).
config.i18n.fallbacks = true
# Send deprecation notices to registered listeners.
config.active_support.deprecation = :notify
# Log disallowed deprecations.
config.active_support.disallowed_deprecation = :log
# Tell Active Support which deprecation messages to disallow.
config.active_support.disallowed_deprecation_warnings = []
# Use default logging formatter so that PID and timestamp are not suppressed.
config.log_formatter = ::Logger::Formatter.new
# Use a different logger for distributed setups.
# require "syslog/logger"
# config.logger = ActiveSupport::TaggedLogging.new(Syslog::Logger.new 'app-name')
if ENV["RAILS_LOG_TO_STDOUT"].present?
logger = ActiveSupport::Logger.new(STDOUT)
logger.formatter = config.log_formatter
config.logger = ActiveSupport::TaggedLogging.new(logger)
end
# Do not dump schema after migrations.
config.active_record.dump_schema_after_migration = false
# Inserts middleware to perform automatic connection switching.
# The `database_selector` hash is used to pass options to the DatabaseSelector
# middleware. The `delay` is used to determine how long to wait after a write
# to send a subsequent read to the primary.
#
# The `database_resolver` class is used by the middleware to determine which
# database is appropriate to use based on the time delay.
#
# The `database_resolver_context` class is used by the middleware to set
# timestamps for the last write to the primary. The resolver uses the context
# class timestamps to determine how long to wait before reading from the
# replica.
#
# By default Rails will store a last write timestamp in the session. The
# DatabaseSelector middleware is designed as such you can define your own
# strategy for connection switching and pass that into the middleware through
# these configuration options.
# config.active_record.database_selector = { delay: 2.seconds }
# config.active_record.database_resolver = ActiveRecord::Middleware::DatabaseSelector::Resolver
# config.active_record.database_resolver_context = ActiveRecord::Middleware::DatabaseSelector::Resolver::Session
end
| 44.090909 | 114 | 0.765323 |
1a6e65d506b52f1cd9d6a1a18b7c35772a5bd28a | 99 | class Post < ApplicationRecord
has_many :comments
accepts_nested_attributes_for :comments
end
| 16.5 | 41 | 0.828283 |
Subsets and Splits
No saved queries yet
Save your SQL queries to embed, download, and access them later. Queries will appear here once saved.