hexsha
stringlengths 40
40
| size
int64 2
1.01M
| content
stringlengths 2
1.01M
| avg_line_length
float64 1.5
100
| max_line_length
int64 2
1k
| alphanum_fraction
float64 0.25
1
|
---|---|---|---|---|---|
7a8c5154fb135a39e4a83f54dc83d86651d05352
| 121 |
FactoryGirl.define do
factory :dummy_model do
name { rand(36 * 4).to_s(36) }
something { rand(100) }
end
end
| 17.285714 | 34 | 0.644628 |
393919c03f4ecdb8378ce8beeb07f110390ef9e5
| 7,650 |
module FFaker
VERSION = '2.9.0'.freeze
require 'ffaker/utils/array_utils'
require 'ffaker/utils/module_utils'
extend ModuleUtils
BASE_LIB_PATH = File.expand_path('..', __FILE__)
LETTERS = [*'a'..'z'].freeze
HEX = %w[0 1 2 3 4 5 6 7 8 9 A B C D E F].freeze
def self.hexify(*masks)
fetch_sample(masks.flatten).gsub(/#/) { fetch_sample(HEX) }
end
def self.numerify(*masks)
fetch_sample(masks.flatten).gsub(/#/) { rand(0..9).to_s }
end
def self.letterify(*masks)
fetch_sample(masks.flatten).gsub(/\?/) { fetch_sample(LETTERS) }
end
def self.bothify(masks)
letterify(numerify(masks))
end
autoload :Address, 'ffaker/address'
autoload :AddressAU, 'ffaker/address_au'
autoload :AddressBR, 'ffaker/address_br'
autoload :AddressCA, 'ffaker/address_ca'
autoload :AddressCH, 'ffaker/address_ch'
autoload :AddressCHDE, 'ffaker/address_ch_de'
autoload :AddressCHFR, 'ffaker/address_ch_fr'
autoload :AddressCHIT, 'ffaker/address_ch_it'
autoload :AddressDA, 'ffaker/address_da'
autoload :AddressDE, 'ffaker/address_de'
autoload :AddressFI, 'ffaker/address_fi'
autoload :AddressFR, 'ffaker/address_fr'
autoload :AddressGR, 'ffaker/address_gr'
autoload :AddressID, 'ffaker/address_id'
autoload :AddressIN, 'ffaker/address_in'
autoload :AddressJA, 'ffaker/address_ja'
autoload :AddressKR, 'ffaker/address_kr'
autoload :AddressMX, 'ffaker/address_mx'
autoload :AddressNL, 'ffaker/address_nl'
autoload :AddressPL, 'ffaker/address_pl'
autoload :AddressRU, 'ffaker/address_ru'
autoload :AddressSE, 'ffaker/address_se'
autoload :AddressSN, 'ffaker/address_sn'
autoload :AddressUA, 'ffaker/address_ua'
autoload :AddressUK, 'ffaker/address_uk'
autoload :AddressUS, 'ffaker/address_us'
autoload :Airline, 'ffaker/airline'
autoload :Animal, 'ffaker/animals'
autoload :Avatar, 'ffaker/avatar'
autoload :AWS, 'ffaker/aws'
autoload :BaconIpsum, 'ffaker/bacon_ipsum'
autoload :Book, 'ffaker/book'
autoload :Boolean, 'ffaker/boolean'
autoload :CheesyLingo, 'ffaker/cheesy_lingo'
autoload :Code, 'ffaker/code'
autoload :Color, 'ffaker/color'
autoload :ColorUA, 'ffaker/color_ua'
autoload :Company, 'ffaker/company'
autoload :CompanyCN, 'ffaker/company_cn'
autoload :CompanyIT, 'ffaker/company_it'
autoload :CompanySE, 'ffaker/company_se'
autoload :Conference, 'ffaker/conference'
autoload :CoursesFR, 'ffaker/courses'
autoload :Currency, 'ffaker/currency'
autoload :DizzleIpsum, 'ffaker/dizzle_ipsum'
autoload :Education, 'ffaker/education'
autoload :Filesystem, 'ffaker/filesystem'
autoload :Food, 'ffaker/food'
autoload :Gender, 'ffaker/gender'
autoload :GenderBR, 'ffaker/gender_br'
autoload :GenderCN, 'ffaker/gender_cn'
autoload :GenderID, 'ffaker/gender_id'
autoload :GenderKR, 'ffaker/gender_kr'
autoload :Geolocation, 'ffaker/geolocation'
autoload :Guid, 'ffaker/guid'
autoload :HealthcareIpsum, 'ffaker/healthcare_ipsum'
autoload :HipsterIpsum, 'ffaker/hipster_ipsum'
autoload :HTMLIpsum, 'ffaker/html_ipsum'
autoload :Identification, 'ffaker/identification'
autoload :IdentificationBR, 'ffaker/identification_br'
autoload :IdentificationES, 'ffaker/identification_es'
autoload :IdentificationESCL, 'ffaker/identification_es_cl'
autoload :IdentificationESCO, 'ffaker/identification_es_co'
autoload :IdentificationKr, 'ffaker/identification_kr'
autoload :IdentificationMX, 'ffaker/identification_mx'
autoload :Image, 'ffaker/image'
autoload :Internet, 'ffaker/internet'
autoload :InternetSE, 'ffaker/internet_se'
autoload :Job, 'ffaker/job'
autoload :JobBR, 'ffaker/job_br'
autoload :JobCN, 'ffaker/job_cn'
autoload :JobFR, 'ffaker/job_fr'
autoload :JobJA, 'ffaker/job_ja'
autoload :JobKR, 'ffaker/job_kr'
autoload :JobVN, 'ffaker/job_vn'
autoload :Locale, 'ffaker/locale'
autoload :Lorem, 'ffaker/lorem'
autoload :LoremAR, 'ffaker/lorem_ar'
autoload :LoremCN, 'ffaker/lorem_cn'
autoload :LoremFR, 'ffaker/lorem_fr'
autoload :LoremIE, 'ffaker/lorem_ie'
autoload :LoremJA, 'ffaker/lorem_ja'
autoload :LoremKR, 'ffaker/lorem_kr'
autoload :LoremPL, 'ffaker/lorem_pl'
autoload :LoremRU, 'ffaker/lorem_ru'
autoload :LoremUA, 'ffaker/lorem_ua'
autoload :Movie, 'ffaker/movie'
autoload :Music, 'ffaker/music'
autoload :Name, 'ffaker/name'
autoload :NameAR, 'ffaker/name_ar'
autoload :NameBR, 'ffaker/name_br'
autoload :NameCN, 'ffaker/name_cn'
autoload :NameCS, 'ffaker/name_cs'
autoload :NameDA, 'ffaker/name_da'
autoload :NameDE, 'ffaker/name_de'
autoload :NameES, 'ffaker/name_es'
autoload :NameFR, 'ffaker/name_fr'
autoload :NameGA, 'ffaker/name_ga'
autoload :NameGR, 'ffaker/name_gr'
autoload :NameID, 'ffaker/name_id'
autoload :NameIT, 'ffaker/name_it'
autoload :NameJA, 'ffaker/name_ja'
autoload :NameKH, 'ffaker/name_kh'
autoload :NameKR, 'ffaker/name_kr'
autoload :NameMX, 'ffaker/name_mx'
autoload :NameNB, 'ffaker/name_nb'
autoload :NameNL, 'ffaker/name_nl'
autoload :NamePH, 'ffaker/name_ph'
autoload :NamePL, 'ffaker/name_pl'
autoload :NameRU, 'ffaker/name_ru'
autoload :NameSE, 'ffaker/name_se'
autoload :NameSN, 'ffaker/name_sn'
autoload :NameTH, 'ffaker/name_th'
autoload :NameTHEN, 'ffaker/name_th_en'
autoload :NameUA, 'ffaker/name_ua'
autoload :NameVN, 'ffaker/name_vn'
autoload :NatoAlphabet, 'ffaker/nato_alphabet'
autoload :PhoneNumber, 'ffaker/phone_number'
autoload :PhoneNumberAU, 'ffaker/phone_number_au'
autoload :PhoneNumberBR, 'ffaker/phone_number_br'
autoload :PhoneNumberCH, 'ffaker/phone_number_ch'
autoload :PhoneNumberCU, 'ffaker/phone_number_cu'
autoload :PhoneNumberDA, 'ffaker/phone_number_da'
autoload :PhoneNumberDE, 'ffaker/phone_number_de'
autoload :PhoneNumberFR, 'ffaker/phone_number_fr'
autoload :PhoneNumberID, 'ffaker/phone_number_id'
autoload :PhoneNumberIT, 'ffaker/phone_number_it'
autoload :PhoneNumberKR, 'ffaker/phone_number_kr'
autoload :PhoneNumberMX, 'ffaker/phone_number_mx'
autoload :PhoneNumberNL, 'ffaker/phone_number_nl'
autoload :PhoneNumberSE, 'ffaker/phone_number_se'
autoload :PhoneNumberSG, 'ffaker/phone_number_sg'
autoload :PhoneNumberSN, 'ffaker/phone_number_sn'
autoload :Product, 'ffaker/product'
autoload :Skill, 'ffaker/skill'
autoload :Sport, 'ffaker/sport'
autoload :SSN, 'ffaker/ssn'
autoload :SSNMX, 'ffaker/ssn_mx'
autoload :SSNSE, 'ffaker/ssn_se'
autoload :String, 'ffaker/string'
autoload :Time, 'ffaker/time'
autoload :Tweet, 'ffaker/tweet'
autoload :Unit, 'ffaker/unit'
autoload :UnitEnglish, 'ffaker/unit_english'
autoload :UnitMetric, 'ffaker/unit_metric'
autoload :Vehicle, 'ffaker/vehicle'
autoload :Venue, 'ffaker/venue'
autoload :Youtube, 'ffaker/youtube'
# Random Number Generator (RNG) used with ModuleUtils#fetch, #shuffle, #rand
# in order to provide deterministic repeatability.
module Random
# Returns the current RNG seed.
def self.seed
@random_seed ||= ::Random.new_seed
end
# Sets the RNG seed and creates a new internal RNG.
def self.seed=(new_seed)
@random_seed = new_seed
reset!
new_seed
end
# Reset the RNG back to its initial state.
def self.reset!
@rng = new_rng
end
# Returns a random number using an RNG with a known seed.
def self.rand(max = nil)
if max
rng.rand(max)
else
rng.rand
end
end
# Returns the current Random object.
def self.rng
@rng ||= new_rng
end
# Returns a new Random object instantiated with #seed.
def self.new_rng
::Random.new(seed)
end
end
end
| 34.772727 | 78 | 0.733856 |
18a1b64729e2b6d67aa83196c16387485a95df64
| 14,564 |
# frozen_string_literal: true
module Gitlab
module BitbucketServerImport
class Importer
attr_reader :recover_missing_commits
attr_reader :project, :project_key, :repository_slug, :client, :errors, :users
attr_accessor :logger
REMOTE_NAME = 'bitbucket_server'
BATCH_SIZE = 100
TempBranch = Struct.new(:name, :sha)
def self.imports_repository?
true
end
def self.refmap
[:heads, :tags, '+refs/pull-requests/*/to:refs/merge-requests/*/head']
end
# Unlike GitHub, you can't grab the commit SHAs for pull requests that
# have been closed but not merged even though Bitbucket has these
# commits internally. We can recover these pull requests by creating a
# branch with the Bitbucket REST API, but by default we turn this
# behavior off.
def initialize(project, recover_missing_commits: false)
@project = project
@recover_missing_commits = recover_missing_commits
@project_key = project.import_data.data['project_key']
@repository_slug = project.import_data.data['repo_slug']
@client = BitbucketServer::Client.new(project.import_data.credentials)
@formatter = Gitlab::ImportFormatter.new
@errors = []
@users = {}
@temp_branches = []
@logger = Gitlab::Import::Logger.build
end
def execute
import_repository
import_pull_requests
delete_temp_branches
handle_errors
metrics.track_finished_import
log_info(stage: "complete")
true
end
private
def handle_errors
return unless errors.any?
project.import_state.update_column(:last_error, {
message: 'The remote data could not be fully imported.',
errors: errors
}.to_json)
end
def gitlab_user_id(email)
find_user_id(email) || project.creator_id
end
def find_user_id(email)
return unless email
return users[email] if users.key?(email)
user = User.find_by_any_email(email, confirmed: true)
users[email] = user&.id
user&.id
end
def repo
@repo ||= client.repo(project_key, repository_slug)
end
def sha_exists?(sha)
project.repository.commit(sha)
end
def temp_branch_name(pull_request, suffix)
"gitlab/import/pull-request/#{pull_request.iid}/#{suffix}"
end
# This method restores required SHAs that GitLab needs to create diffs
# into branch names as the following:
#
# gitlab/import/pull-request/N/{to,from}
def restore_branches(pull_requests)
shas_to_restore = []
pull_requests.each do |pull_request|
shas_to_restore << TempBranch.new(temp_branch_name(pull_request, :from),
pull_request.source_branch_sha)
shas_to_restore << TempBranch.new(temp_branch_name(pull_request, :to),
pull_request.target_branch_sha)
end
# Create the branches on the Bitbucket Server first
created_branches = restore_branch_shas(shas_to_restore)
@temp_branches += created_branches
# Now sync the repository so we get the new branches
import_repository unless created_branches.empty?
end
def restore_branch_shas(shas_to_restore)
shas_to_restore.each_with_object([]) do |temp_branch, branches_created|
branch_name = temp_branch.name
sha = temp_branch.sha
next if sha_exists?(sha)
begin
client.create_branch(project_key, repository_slug, branch_name, sha)
branches_created << temp_branch
rescue BitbucketServer::Connection::ConnectionError => e
log_warn(message: "Unable to recreate branch", sha: sha, error: e.message)
end
end
end
def import_repository
log_info(stage: 'import_repository', message: 'starting import')
project.ensure_repository
project.repository.fetch_as_mirror(project.import_url, refmap: self.class.refmap, remote_name: REMOTE_NAME)
log_info(stage: 'import_repository', message: 'finished import')
rescue Gitlab::Shell::Error => e
Gitlab::ErrorTracking.log_exception(
e,
stage: 'import_repository', message: 'failed import', error: e.message
)
# Expire cache to prevent scenarios such as:
# 1. First import failed, but the repo was imported successfully, so +exists?+ returns true
# 2. Retried import, repo is broken or not imported but +exists?+ still returns true
project.repository.expire_content_cache if project.repository_exists?
raise
end
# Bitbucket Server keeps tracks of references for open pull requests in
# refs/heads/pull-requests, but closed and merged requests get moved
# into hidden internal refs under stash-refs/pull-requests. Unless the
# SHAs involved are at the tip of a branch or tag, there is no way to
# retrieve the server for those commits.
#
# To avoid losing history, we use the Bitbucket API to re-create the branch
# on the remote server. Then we have to issue a `git fetch` to download these
# branches.
def import_pull_requests
pull_requests = client.pull_requests(project_key, repository_slug).to_a
# Creating branches on the server and fetching the newly-created branches
# may take a number of network round-trips. Do this in batches so that we can
# avoid doing a git fetch for every new branch.
pull_requests.each_slice(BATCH_SIZE) do |batch|
restore_branches(batch) if recover_missing_commits
batch.each do |pull_request|
import_bitbucket_pull_request(pull_request)
rescue StandardError => e
Gitlab::ErrorTracking.log_exception(
e,
stage: 'import_pull_requests', iid: pull_request.iid, error: e.message
)
backtrace = Gitlab::BacktraceCleaner.clean_backtrace(e.backtrace)
errors << { type: :pull_request, iid: pull_request.iid, errors: e.message, backtrace: backtrace.join("\n"), raw_response: pull_request.raw }
end
end
end
def delete_temp_branches
@temp_branches.each do |branch|
client.delete_branch(project_key, repository_slug, branch.name, branch.sha)
project.repository.delete_branch(branch.name)
rescue BitbucketServer::Connection::ConnectionError => e
Gitlab::ErrorTracking.log_exception(
e,
stage: 'delete_temp_branches', branch: branch.name, error: e.message
)
@errors << { type: :delete_temp_branches, branch_name: branch.name, errors: e.message }
end
end
def import_bitbucket_pull_request(pull_request)
log_info(stage: 'import_bitbucket_pull_requests', message: 'starting', iid: pull_request.iid)
description = ''
description += @formatter.author_line(pull_request.author) unless find_user_id(pull_request.author_email)
description += pull_request.description if pull_request.description
author_id = gitlab_user_id(pull_request.author_email)
attributes = {
iid: pull_request.iid,
title: pull_request.title,
description: description,
source_project_id: project.id,
source_branch: Gitlab::Git.ref_name(pull_request.source_branch_name),
source_branch_sha: pull_request.source_branch_sha,
target_project_id: project.id,
target_branch: Gitlab::Git.ref_name(pull_request.target_branch_name),
target_branch_sha: pull_request.target_branch_sha,
state_id: MergeRequest.available_states[pull_request.state],
author_id: author_id,
created_at: pull_request.created_at,
updated_at: pull_request.updated_at
}
creator = Gitlab::Import::MergeRequestCreator.new(project)
merge_request = creator.execute(attributes)
if merge_request.persisted?
import_pull_request_comments(pull_request, merge_request)
metrics.merge_requests_counter.increment
end
log_info(stage: 'import_bitbucket_pull_requests', message: 'finished', iid: pull_request.iid)
end
def import_pull_request_comments(pull_request, merge_request)
log_info(stage: 'import_pull_request_comments', message: 'starting', iid: merge_request.iid)
comments, other_activities = client.activities(project_key, repository_slug, pull_request.iid).partition(&:comment?)
merge_event = other_activities.find(&:merge_event?)
import_merge_event(merge_request, merge_event) if merge_event
inline_comments, pr_comments = comments.partition(&:inline_comment?)
import_inline_comments(inline_comments.map(&:comment), merge_request)
import_standalone_pr_comments(pr_comments.map(&:comment), merge_request)
log_info(stage: 'import_pull_request_comments', message: 'finished', iid: merge_request.iid,
merge_event_found: merge_event.present?,
inline_comments_count: inline_comments.count,
standalone_pr_comments: pr_comments.count)
end
# rubocop: disable CodeReuse/ActiveRecord
def import_merge_event(merge_request, merge_event)
log_info(stage: 'import_merge_event', message: 'starting', iid: merge_request.iid)
committer = merge_event.committer_email
user_id = gitlab_user_id(committer)
timestamp = merge_event.merge_timestamp
merge_request.update({ merge_commit_sha: merge_event.merge_commit })
metric = MergeRequest::Metrics.find_or_initialize_by(merge_request: merge_request)
metric.update(merged_by_id: user_id, merged_at: timestamp)
log_info(stage: 'import_merge_event', message: 'finished', iid: merge_request.iid)
end
# rubocop: enable CodeReuse/ActiveRecord
def import_inline_comments(inline_comments, merge_request)
log_info(stage: 'import_inline_comments', message: 'starting', iid: merge_request.iid)
inline_comments.each do |comment|
position = build_position(merge_request, comment)
parent = create_diff_note(merge_request, comment, position)
next unless parent&.persisted?
discussion_id = parent.discussion_id
comment.comments.each do |reply|
create_diff_note(merge_request, reply, position, discussion_id)
end
end
log_info(stage: 'import_inline_comments', message: 'finished', iid: merge_request.iid)
end
def create_diff_note(merge_request, comment, position, discussion_id = nil)
attributes = pull_request_comment_attributes(comment)
attributes.merge!(position: position, type: 'DiffNote')
attributes[:discussion_id] = discussion_id if discussion_id
note = merge_request.notes.build(attributes)
if note.valid?
note.save
return note
end
log_info(stage: 'create_diff_note', message: 'creating fallback DiffNote', iid: merge_request.iid)
# Bitbucket Server supports the ability to comment on any line, not just the
# line in the diff. If we can't add the note as a DiffNote, fallback to creating
# a regular note.
create_fallback_diff_note(merge_request, comment, position)
rescue StandardError => e
Gitlab::ErrorTracking.log_exception(
e,
stage: 'create_diff_note', comment_id: comment.id, error: e.message
)
errors << { type: :pull_request, id: comment.id, errors: e.message }
nil
end
def create_fallback_diff_note(merge_request, comment, position)
attributes = pull_request_comment_attributes(comment)
note = "*Comment on"
note += " #{position.old_path}:#{position.old_line} -->" if position.old_line
note += " #{position.new_path}:#{position.new_line}" if position.new_line
note += "*\n\n#{comment.note}"
attributes[:note] = note
merge_request.notes.create!(attributes)
end
def build_position(merge_request, pr_comment)
params = {
diff_refs: merge_request.diff_refs,
old_path: pr_comment.file_path,
new_path: pr_comment.file_path,
old_line: pr_comment.old_pos,
new_line: pr_comment.new_pos
}
Gitlab::Diff::Position.new(params)
end
def import_standalone_pr_comments(pr_comments, merge_request)
pr_comments.each do |comment|
merge_request.notes.create!(pull_request_comment_attributes(comment))
comment.comments.each do |replies|
merge_request.notes.create!(pull_request_comment_attributes(replies))
end
rescue StandardError => e
Gitlab::ErrorTracking.log_exception(
e,
stage: 'import_standalone_pr_comments', merge_request_id: merge_request.id, comment_id: comment.id, error: e.message
)
errors << { type: :pull_request, comment_id: comment.id, errors: e.message }
end
end
def pull_request_comment_attributes(comment)
author = find_user_id(comment.author_email)
note = ''
unless author
author = project.creator_id
note = "*By #{comment.author_username} (#{comment.author_email})*\n\n"
end
note +=
# Provide some context for replying
if comment.parent_comment
"> #{comment.parent_comment.note.truncate(80)}\n\n#{comment.note}"
else
comment.note
end
{
project: project,
note: note,
author_id: author,
created_at: comment.created_at,
updated_at: comment.updated_at
}
end
def log_info(details)
logger.info(log_base_data.merge(details))
end
def log_warn(details)
logger.warn(log_base_data.merge(details))
end
def log_base_data
{
class: self.class.name,
project_id: project.id,
project_path: project.full_path
}
end
def metrics
@metrics ||= Gitlab::Import::Metrics.new(:bitbucket_server_importer, @project)
end
end
end
end
| 36.138958 | 152 | 0.659366 |
d50bf14125af024e4ac8a8017c06bbe7a2051338
| 1,316 |
require 'uri'
require 'openssl'
module SimpleAWS
module Signing
##
# Implementation of "Signature Version 2" signing
##
module Version2
##
# Build and sign the final request, as per the rules here:
# http://docs.amazonwebservices.com/AWSEC2/latest/UserGuide/index.html?using-query-api.html
##
def finish_and_sign_request(request)
request.params.merge!({
"AWSAccessKeyId" => self.access_key,
"SignatureMethod" => "HmacSHA256",
"SignatureVersion" => "2",
"Timestamp" => Time.now.utc.strftime("%Y-%m-%dT%H:%M:%SZ"),
"Version" => self.version
})
request.params["Signature"] = Base64.encode64(sign_request(request)).chomp
request
end
def sign_request(request)
signing_params = request.params.clone
list = signing_params.map {|k, v| [k, Util.uri_escape(v.to_s)] }
list.sort! do |a, b|
if a[0] == "AWSAccessKeyId"
-1
else
a[0] <=> b[0]
end
end
plain_host = URI.parse(request.host).host
to_sign = "POST\n#{plain_host}\n#{request.path}\n#{list.map {|p| p.join("=") }.join("&")}"
OpenSSL::HMAC.digest("sha256", self.secret_key, to_sign)
end
end
end
end
| 26.32 | 98 | 0.571429 |
7a93fbb0e046e7c6760a91308efb050d4611d0eb
| 753 |
# frozen_string_literal: true
require 'json_marshal/marshaller'
module CovidVaccine
module V0
class RegistrationSubmission < ApplicationRecord
scope :for_user, ->(user) { where(account_id: user.account_uuid).order(created_at: :asc) }
after_initialize do |reg|
reg.form_data&.symbolize_keys!
end
attr_encrypted :form_data, key: Settings.db_encryption_key, marshal: true, marshaler: JsonMarshal::Marshaller
attr_encrypted :raw_form_data, key: Settings.db_encryption_key, marshal: true, marshaler: JsonMarshal::Marshaller
serialize :form_data, JsonMarshal::Marshaller
serialize :raw_form_data, JsonMarshal::Marshaller
encrypts :form_data, :raw_form_data, migrating: true
end
end
end
| 32.73913 | 119 | 0.746348 |
1c9cfdf7e60aee876203bc1c39690ef2ce56858d
| 3,133 |
# frozen_string_literal: true
module Arclight
##
# Extends Blacklight::Solr::Document to provide Arclight specific behavior
module SolrDocument
extend Blacklight::Solr::Document
def repository_config
return unless repository
@repository_config ||= Arclight::Repository.find_by(name: repository)
end
def parent_ids
fetch('parent_ssm', [])
end
def parent_labels
fetch('parent_unittitles_ssm', [])
end
def parent_levels
fetch('parent_levels_ssm', [])
end
def parent_document
self.class.new fetch('parent').fetch('docs', []).first
end
def eadid
fetch('ead_ssi', nil)
end
def unitid
first('unitid_ssm')
end
def repository
first('repository_ssm')
end
def repository_and_unitid
[repository, unitid].compact.join(': ')
end
def collection_name
first('collection_ssm')
end
def collection_unitid
first('collection_unitid_ssm')
end
def extent
first('extent_ssm')
end
def abstract_or_scope
first('abstract_ssm') || first('scopecontent_ssm')
end
def creator
first('creator_ssm')
end
def collection_creator
first('collection_creator_ssm')
end
def online_content?
first('has_online_content_ssim') == 'true'
end
def number_of_children
first('child_component_count_isim') || 0
end
def children?
number_of_children.positive?
end
def reference
first('ref_ssm')
end
def component_level
first('component_level_isim')
end
def level
first('level_ssm')
end
def digital_object_viewer
@digital_object_viewer ||= Arclight::Viewer.render(self)
end
def terms
first('userestrict_ssm')
end
# Restrictions for component sidebar
def parent_restrictions
first('parent_access_restrict_ssm')
end
# Terms for component sidebar
def parent_terms
first('parent_access_terms_ssm')
end
def digital_objects
digital_objects_field = fetch('digital_objects_ssm', []).reject(&:empty?)
return [] if digital_objects_field.blank?
digital_objects_field.map do |object|
Arclight::DigitalObject.from_json(object)
end
end
def containers
# note that .titlecase strips punctuation, like hyphens, we want to keep
fetch('containers_ssim', []).map(&:capitalize)
end
def normalized_title
first('normalized_title_ssm')
end
def normalized_date
first('normalized_date_ssm')
end
# @return [Array<String>] with embedded highlights using <em>...</em>
def highlights
highlight_response = response[:highlighting]
return if highlight_response.blank? ||
highlight_response[id].blank? ||
highlight_response[id][:text].blank?
highlight_response[id][:text]
end
# Factory method for constructing the Object modeling downloads
# @return [DocumentDownloads]
def downloads
@downloads ||= DocumentDownloads.new(self)
end
end
end
| 20.611842 | 79 | 0.654325 |
0846f1b8c64f0dd4218f745b3453c7c2e4fa6dc3
| 1,879 |
# [2] Add Two Numbers
#
# https://leetcode.com/problems/add-two-numbers/description/
#
# * algorithms
# * Medium (28.99%)
# * Source Code: 2.add-two-numbers.rb
# * Total Accepted: 551.3K
# * Total Submissions: 1.9M
# * Testcase Example: '[2,4,3]\n[5,6,4]'
#
# You are given two non-empty linked lists representing two non-negative integers. The digits are stored in reverse order and each of their nodes contain a single digit. Add the two numbers and return it as a linked list.
#
# You may assume the two numbers do not contain any leading zero, except the number 0 itself.
#
# Example:
#
#
# Input: (2 -> 4 -> 3) + (5 -> 6 -> 4)
# Output: 7 -> 0 -> 8
# Explanation: 342 + 465 = 807.
# Definition for singly-linked list.
# class ListNode
# attr_accessor :val, :next
# def initialize(val)
# @val = val
# @next = nil
# end
# end
# @param {ListNode} l1
# @param {ListNode} l2
# @return {ListNode}
def add_two_numbers(l1, l2)
head = ListNode.new(0)
carry = 0
tmp = head
while l1 || l2
val = (l1 ? l1.val : 0) + (l2 ? l2.val : 0) + carry
carry = (val > 9 ? 1 : 0)
tmp.val = val % 10
l1 = l1.next if l1
l2 = l2.next if l2
next unless l1 || l2
tmp.next = ListNode.new(0) if tmp.next.nil?
tmp = tmp.next
end
if carry == 1
tmp.next = ListNode.new(1) if tmp
head.next = ListNode.new(1) if tmp.nil?
end
head
end
# @param {ListNode} l1
# @param {ListNode} l2
# @return {ListNode}
def add_two_numbers1(l1, l2)
h1 = l1
s1 = ''
while l1
s1 += l1.val.to_s
l1 = l1.next
end
s2 = ''
while l2
s2 += l2.val.to_s
l2 = l2.next
end
res = (s1.reverse.to_i + s2.reverse.to_i).to_s.chars
tmp = h1
res.reverse.each_with_index do |re, i|
tmp.val = re.to_i
tmp.next = ListNode.new(0) if tmp.next.nil? && i != res.count - 1
tmp = tmp.next
end
h1
end
| 21.352273 | 221 | 0.606706 |
62a6d34e46bf417d6493f4ac7a8094aa3bd4e585
| 3,393 |
require "spec_helper"
require "flex_commerce_api"
require "flex_commerce_api/api_base"
require "uri"
RSpec.describe "capturing surrogate keys" do
# Global context for all specs - defines things you dont see defined in here
# such as flex_root_url, api_root, default_headers and page_size
# see api_globals.rb in spec/support for the source code
include_context "global context"
let(:subject_class) do
TempClass ||= Class.new(FlexCommerceApi::ApiBase) do
end
end
let(:empty_data) do
{
id: "1",
type: "base",
data: {}
}
end
it "should capture surrogate keys from a single request" do
headers = { "Content-Type": "application/json", "external-surrogate-key": "key1 key2" }
stub_request(:get, /\/temp_classes\/test\.json_api$/).to_return do |req|
{ body: empty_data.to_json, headers: headers, status: 200 }
end
keys = FlexCommerceApi::ApiBase.capture_surrogate_keys do
subject_class.find('test')
end
expect(keys).to eq('key1 key2')
end
it "should combine surrogate keys from multiple requests" do
headers_one = { "Content-Type": "application/json", "external-surrogate-key": "key1 key2" }
headers_two = { "Content-Type": "application/json", "external-surrogate-key": "key3 key4" }
stub_request(:get, /\/temp_classes\/test\.json_api$/).to_return do |req|
{ body: empty_data.to_json, headers: headers_one, status: 200 }
end
stub_request(:get, /\/temp_classes\/test2\.json_api$/).to_return do |req|
{ body: empty_data.to_json, headers: headers_two, status: 200 }
end
keys = FlexCommerceApi::ApiBase.capture_surrogate_keys do
subject_class.find('test')
subject_class.find('test2')
end
expect(keys).to eq('key1 key2 key3 key4')
end
it "should ensure duplicate surrogate keys are removed" do
headers_one = { "Content-Type": "application/json", "external-surrogate-key": "key1 key2" }
headers_two = { "Content-Type": "application/json", "external-surrogate-key": "key2 key3" }
stub_request(:get, /\/temp_classes\/test\.json_api$/).to_return do |req|
{ body: empty_data.to_json, headers: headers_one, status: 200 }
end
stub_request(:get, /\/temp_classes\/test2\.json_api$/).to_return do |req|
{ body: empty_data.to_json, headers: headers_two, status: 200 }
end
keys = FlexCommerceApi::ApiBase.capture_surrogate_keys do
subject_class.find('test')
subject_class.find('test2')
end
expect(keys).to eq('key1 key2 key3')
end
it "should allow blank surrogate keys" do
headers = { "Content-Type": "application/json" }
stub_request(:get, /\/temp_classes\/test\.json_api$/).to_return do |req|
{ body: empty_data.to_json, headers: headers, status: 200 }
end
keys = FlexCommerceApi::ApiBase.capture_surrogate_keys do
subject_class.find('test')
end
expect(keys).to eq('')
end
it "should ensure surrogate keys are cleared between requests" do
headers = { "Content-Type": "application/json", "external-surrogate-key": "key1 key2" }
stub_request(:get, /\/temp_classes\/test\.json_api$/).to_return do |req|
{ body: empty_data.to_json, headers: headers, status: 200 }
end
Thread.current[:shift_surrogate_keys] = nil
subject_class.find('test')
expect(Thread.current[:shift_surrogate_keys]).to eq(nil)
end
end
| 32.625 | 95 | 0.685234 |
1dcb847c3f8491dbb6c9e176b65eea3d9632bada
| 1,865 |
require 'net/http'
require 'test/unit'
require 'stringio'
class HTTPRequestTest < Test::Unit::TestCase
def test_initialize_GET
req = Net::HTTP::Get.new '/'
assert_equal 'GET', req.method
refute req.request_body_permitted?
assert req.response_body_permitted?
expected = {
'accept' => %w[*/*],
'user-agent' => %w[Ruby],
}
expected['accept-encoding'] = %w[gzip;q=1.0,deflate;q=0.6,identity;q=0.3] if
Net::HTTP::HAVE_ZLIB
assert_equal expected, req.to_hash
end
def test_initialize_GET_range
req = Net::HTTP::Get.new '/', 'Range' => 'bytes=0-9'
assert_equal 'GET', req.method
refute req.request_body_permitted?
assert req.response_body_permitted?
expected = {
'accept' => %w[*/*],
'user-agent' => %w[Ruby],
'range' => %w[bytes=0-9],
}
assert_equal expected, req.to_hash
end
def test_initialize_HEAD
req = Net::HTTP::Head.new '/'
assert_equal 'HEAD', req.method
refute req.request_body_permitted?
refute req.response_body_permitted?
expected = {
'accept' => %w[*/*],
'user-agent' => %w[Ruby],
}
assert_equal expected, req.to_hash
end
def test_initialize_accept_encoding
req1 = Net::HTTP::Get.new '/'
assert req1.decode_content, 'Bug #7831 - automatically decode content'
req2 = Net::HTTP::Get.new '/', 'accept-encoding' => 'identity'
refute req2.decode_content,
'Bug #7381 - do not decode content if the user overrides'
end if Net::HTTP::HAVE_ZLIB
def test_header_set
req = Net::HTTP::Get.new '/'
assert req.decode_content, 'Bug #7831 - automatically decode content'
req['accept-encoding'] = 'identity'
refute req.decode_content,
'Bug #7831 - do not decode content if the user overrides'
end if Net::HTTP::HAVE_ZLIB
end
| 23.3125 | 80 | 0.634853 |
b98e347dbe28382ec2dc0a0d700d3b294171492b
| 1,055 |
require 'pathname'
ROOT = Pathname.new(File.expand_path('..', __dir__))
$LOAD_PATH.unshift((ROOT + 'lib').to_s)
$LOAD_PATH.unshift((ROOT + 'spec').to_s)
require 'bundler/setup'
require 'pry'
require 'rspec'
require 'danger'
RSpec.configure do |config|
config.filter_gems_from_backtrace 'bundler'
config.color = true
config.tty = true
config.before do
Danger::Changelog::Config.reset
end
end
require 'danger_plugin'
def testing_ui
Cork::Board.new(silent: true)
end
def testing_env
{
'HAS_JOSH_K_SEAL_OF_APPROVAL' => 'true',
'TRAVIS_PULL_REQUEST' => '800',
'TRAVIS_REPO_SLUG' => 'dblock/danger-changelog',
'TRAVIS_COMMIT_RANGE' => '759adcbd0d8f...13c4dc8bb61d',
'DANGER_GITHUB_API_TOKEN' => '123sbdq54erfsd3422gdfio'
}
end
# A stubbed out Dangerfile for use in tests
def testing_dangerfile
env = Danger::EnvironmentManager.new(testing_env)
Danger::Dangerfile.new(env, testing_ui)
end
require 'active_support'
Dir[File.join(File.dirname(__FILE__), 'support', '**/*.rb')].each do |file|
require file
end
| 21.979167 | 75 | 0.725118 |
3933487a66a421cf2d09e6fab69f1b732d7fb145
| 244 |
class CreateVariantGroupBrowseTableRows < ActiveRecord::Migration[6.1]
def up
create_view :variant_group_browse_table_rows, materialized: true
end
def down
drop_view :variant_group_browse_table_rows, materialized: true
end
end
| 24.4 | 70 | 0.803279 |
5da117f1ecc86de012d9c01b50e1212e42fa9b3b
| 219 |
class AddUniqueConstraintCountryLanguageCodes < ActiveRecord::Migration[6.0]
def change
add_index :country_codes, [:name, :code], unique: true
add_index :language_codes, [:name, :code], unique: true
end
end
| 31.285714 | 76 | 0.744292 |
f7b194abcee50f077b4f485808a686b68f383ce7
| 1,544 |
# frozen_string_literal: true
require 'spec_helper'
describe Email do
describe 'modules' do
subject { described_class }
it { is_expected.to include_module(AsyncDeviseEmail) }
end
describe 'validations' do
it_behaves_like 'an object with RFC3696 compliant email-formated attributes', :email do
subject { build(:email) }
end
end
it 'normalize email value' do
expect(described_class.new(email: ' [email protected] ').email)
.to eq '[email protected]'
end
describe '#update_invalid_gpg_signatures' do
let(:user) { create(:user) }
it 'synchronizes the gpg keys when the email is updated' do
email = user.emails.create(email: '[email protected]')
expect(user).to receive(:update_invalid_gpg_signatures)
email.confirm
end
end
describe 'scopes' do
let(:user) { create(:user) }
it 'scopes confirmed emails' do
create(:email, :confirmed, user: user)
create(:email, user: user)
expect(user.emails.count).to eq 2
expect(user.emails.confirmed.count).to eq 1
end
end
describe 'delegation' do
let(:user) { create(:user) }
it 'delegates to :user' do
expect(build(:email, user: user).username).to eq user.username
end
end
describe 'Devise emails' do
let!(:user) { create(:user) }
describe 'behaviour' do
it 'sends emails asynchronously' do
expect do
user.emails.create!(email: '[email protected]')
end.to have_enqueued_job.on_queue('mailers')
end
end
end
end
| 23.044776 | 91 | 0.660622 |
e256f3a2a550a90976a1079ca248bd08cf178696
| 752 |
require File.dirname(__FILE__) + '/test_helper.rb'
class TestAccessToken < Test::Unit::TestCase
def setup
@fake_response = {
:user_id => 5734758743895,
:oauth_token => "key",
:oauth_token_secret => "secret"
}
# setup a fake req. token. mocking Consumer would be more appropriate...
@access_token = OAuth::AccessToken.from_hash(
OAuth::Consumer.new("key", "secret", {}),
@fake_response
)
end
def test_provides_response_parameters
assert @access_token
assert_respond_to @access_token, :params
end
def test_access_token_makes_non_oauth_response_params_available
assert_not_nil @access_token.params[:user_id]
assert_equal 5734758743895, @access_token.params[:user_id]
end
end
| 28.923077 | 76 | 0.712766 |
e2be734aca411b8a8d8119ab9ba12c76a61e5140
| 1,249 |
require_relative 'boot'
require "rails"
# Pick the frameworks you want:
require "active_model/railtie"
require "active_job/railtie"
require "active_record/railtie"
require "action_controller/railtie"
require "action_mailer/railtie"
require "action_view/railtie"
require "action_cable/engine"
# require "sprockets/railtie"
require "rails/test_unit/railtie"
# Require the gems listed in Gemfile, including any gems
# you've limited to :test, :development, or :production.
Bundler.require(*Rails.groups)
module SearchApi
class Application < Rails::Application
# Settings in config/environments/* take precedence over those specified here.
# Application configuration should go into files in config/initializers
# -- all .rb files in that directory are automatically loaded.
# Only loads a smaller set of middleware suitable for API only apps.
# Middleware like session, flash, cookies can be added back manually.
# Skip views, helpers and assets when generating a new resource.
config.api_only = true
# cors
config.middleware.insert_before 0, Rack::Cors do
allow do
origins 'localhost:9000'
resource '*', :headers => :any, :methods => [:get, :post, :options]
end
end
end
end
| 32.025641 | 82 | 0.738191 |
f89f5736650e828046f39ed22374587fc4688738
| 1,576 |
# Copyright (C) 2016-2019 MongoDB, Inc.
#
# Licensed under the Apache License, Version 2.0 (the 'License');
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an 'AS IS' BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
module Mongo
class Monitoring
module Event
# Event fired when the topology closes.
#
# @since 2.4.0
class TopologyClosed < Mongo::Event::Base
# @return [ Topology ] topology The topology.
attr_reader :topology
# Create the event.
#
# @example Create the event.
# TopologyClosed.new(topology)
#
# @param [ Integer ] topology The topology.
#
# @since 2.4.0
def initialize(topology)
@topology = topology
end
# Returns a concise yet useful summary of the event.
#
# @return [ String ] String summary of the event.
#
# @note This method is experimental and subject to change.
#
# @since 2.7.0
# @api experimental
def summary
"#<#{self.class.name.sub(/^Mongo::Monitoring::Event::/, '')}" +
" topology=#{topology.summary}>"
end
end
end
end
end
| 28.654545 | 74 | 0.617386 |
4adede350a343acc3f5e93294991ecb052a970b2
| 150 |
FactoryGirl.define do
factory :database_group_flag, class: "Detour::DatabaseGroupFlag" do
feature
flaggable_type "User"
group
end
end
| 18.75 | 69 | 0.74 |
ff44a9b1eb2157e6e320720f4d23e51f2cad9275
| 121,649 |
# frozen_string_literal: true
# WARNING ABOUT GENERATED CODE
#
# This file is generated. See the contributing guide for more information:
# https://github.com/aws/aws-sdk-ruby/blob/master/CONTRIBUTING.md
#
# WARNING ABOUT GENERATED CODE
module Aws::Personalize
module Types
# Describes a custom algorithm.
#
# @!attribute [rw] name
# The name of the algorithm.
# @return [String]
#
# @!attribute [rw] algorithm_arn
# The Amazon Resource Name (ARN) of the algorithm.
# @return [String]
#
# @!attribute [rw] algorithm_image
# The URI of the Docker container for the algorithm image.
# @return [Types::AlgorithmImage]
#
# @!attribute [rw] default_hyper_parameters
# Specifies the default hyperparameters.
# @return [Hash<String,String>]
#
# @!attribute [rw] default_hyper_parameter_ranges
# Specifies the default hyperparameters, their ranges, and whether
# they are tunable. A tunable hyperparameter can have its value
# determined during hyperparameter optimization (HPO).
# @return [Types::DefaultHyperParameterRanges]
#
# @!attribute [rw] default_resource_config
# Specifies the default maximum number of training jobs and parallel
# training jobs.
# @return [Hash<String,String>]
#
# @!attribute [rw] training_input_mode
# The training input mode.
# @return [String]
#
# @!attribute [rw] role_arn
# The Amazon Resource Name (ARN) of the role.
# @return [String]
#
# @!attribute [rw] creation_date_time
# The date and time (in Unix time) that the algorithm was created.
# @return [Time]
#
# @!attribute [rw] last_updated_date_time
# The date and time (in Unix time) that the algorithm was last
# updated.
# @return [Time]
#
# @see http://docs.aws.amazon.com/goto/WebAPI/personalize-2018-05-22/Algorithm AWS API Documentation
#
class Algorithm < Struct.new(
:name,
:algorithm_arn,
:algorithm_image,
:default_hyper_parameters,
:default_hyper_parameter_ranges,
:default_resource_config,
:training_input_mode,
:role_arn,
:creation_date_time,
:last_updated_date_time)
include Aws::Structure
end
# Describes an algorithm image.
#
# @!attribute [rw] name
# The name of the algorithm image.
# @return [String]
#
# @!attribute [rw] docker_uri
# The URI of the Docker container for the algorithm image.
# @return [String]
#
# @see http://docs.aws.amazon.com/goto/WebAPI/personalize-2018-05-22/AlgorithmImage AWS API Documentation
#
class AlgorithmImage < Struct.new(
:name,
:docker_uri)
include Aws::Structure
end
# When the solution performs AutoML (`performAutoML` is true in
# CreateSolution), Amazon Personalize determines which recipe, from the
# specified list, optimizes the given metric. Amazon Personalize then
# uses that recipe for the solution.
#
# @note When making an API call, you may pass AutoMLConfig
# data as a hash:
#
# {
# metric_name: "MetricName",
# recipe_list: ["Arn"],
# }
#
# @!attribute [rw] metric_name
# The metric to optimize.
# @return [String]
#
# @!attribute [rw] recipe_list
# The list of candidate recipes.
# @return [Array<String>]
#
# @see http://docs.aws.amazon.com/goto/WebAPI/personalize-2018-05-22/AutoMLConfig AWS API Documentation
#
class AutoMLConfig < Struct.new(
:metric_name,
:recipe_list)
include Aws::Structure
end
# When the solution performs AutoML (`performAutoML` is true in
# CreateSolution), specifies the recipe that best optimized the
# specified metric.
#
# @!attribute [rw] best_recipe_arn
# The Amazon Resource Name (ARN) of the best recipe.
# @return [String]
#
# @see http://docs.aws.amazon.com/goto/WebAPI/personalize-2018-05-22/AutoMLResult AWS API Documentation
#
class AutoMLResult < Struct.new(
:best_recipe_arn)
include Aws::Structure
end
# Contains information on a batch inference job.
#
# @!attribute [rw] job_name
# The name of the batch inference job.
# @return [String]
#
# @!attribute [rw] batch_inference_job_arn
# The Amazon Resource Name (ARN) of the batch inference job.
# @return [String]
#
# @!attribute [rw] filter_arn
# The ARN of the filter used on the batch inference job.
# @return [String]
#
# @!attribute [rw] failure_reason
# If the batch inference job failed, the reason for the failure.
# @return [String]
#
# @!attribute [rw] solution_version_arn
# The Amazon Resource Name (ARN) of the solution version from which
# the batch inference job was created.
# @return [String]
#
# @!attribute [rw] num_results
# The number of recommendations generated by the batch inference job.
# This number includes the error messages generated for failed input
# records.
# @return [Integer]
#
# @!attribute [rw] job_input
# The Amazon S3 path that leads to the input data used to generate the
# batch inference job.
# @return [Types::BatchInferenceJobInput]
#
# @!attribute [rw] job_output
# The Amazon S3 bucket that contains the output data generated by the
# batch inference job.
# @return [Types::BatchInferenceJobOutput]
#
# @!attribute [rw] role_arn
# The ARN of the Amazon Identity and Access Management (IAM) role that
# requested the batch inference job.
# @return [String]
#
# @!attribute [rw] status
# The status of the batch inference job. The status is one of the
# following values:
#
# * PENDING
#
# * IN PROGRESS
#
# * ACTIVE
#
# * CREATE FAILED
# @return [String]
#
# @!attribute [rw] creation_date_time
# The time at which the batch inference job was created.
# @return [Time]
#
# @!attribute [rw] last_updated_date_time
# The time at which the batch inference job was last updated.
# @return [Time]
#
# @see http://docs.aws.amazon.com/goto/WebAPI/personalize-2018-05-22/BatchInferenceJob AWS API Documentation
#
class BatchInferenceJob < Struct.new(
:job_name,
:batch_inference_job_arn,
:filter_arn,
:failure_reason,
:solution_version_arn,
:num_results,
:job_input,
:job_output,
:role_arn,
:status,
:creation_date_time,
:last_updated_date_time)
include Aws::Structure
end
# The input configuration of a batch inference job.
#
# @note When making an API call, you may pass BatchInferenceJobInput
# data as a hash:
#
# {
# s3_data_source: { # required
# path: "S3Location", # required
# kms_key_arn: "KmsKeyArn",
# },
# }
#
# @!attribute [rw] s3_data_source
# The URI of the Amazon S3 location that contains your input data. The
# Amazon S3 bucket must be in the same region as the API endpoint you
# are calling.
# @return [Types::S3DataConfig]
#
# @see http://docs.aws.amazon.com/goto/WebAPI/personalize-2018-05-22/BatchInferenceJobInput AWS API Documentation
#
class BatchInferenceJobInput < Struct.new(
:s3_data_source)
include Aws::Structure
end
# The output configuration parameters of a batch inference job.
#
# @note When making an API call, you may pass BatchInferenceJobOutput
# data as a hash:
#
# {
# s3_data_destination: { # required
# path: "S3Location", # required
# kms_key_arn: "KmsKeyArn",
# },
# }
#
# @!attribute [rw] s3_data_destination
# Information on the Amazon S3 bucket in which the batch inference
# job's output is stored.
# @return [Types::S3DataConfig]
#
# @see http://docs.aws.amazon.com/goto/WebAPI/personalize-2018-05-22/BatchInferenceJobOutput AWS API Documentation
#
class BatchInferenceJobOutput < Struct.new(
:s3_data_destination)
include Aws::Structure
end
# A truncated version of the BatchInferenceJob datatype. The
# ListBatchInferenceJobs operation returns a list of batch inference job
# summaries.
#
# @!attribute [rw] batch_inference_job_arn
# The Amazon Resource Name (ARN) of the batch inference job.
# @return [String]
#
# @!attribute [rw] job_name
# The name of the batch inference job.
# @return [String]
#
# @!attribute [rw] status
# The status of the batch inference job. The status is one of the
# following values:
#
# * PENDING
#
# * IN PROGRESS
#
# * ACTIVE
#
# * CREATE FAILED
# @return [String]
#
# @!attribute [rw] creation_date_time
# The time at which the batch inference job was created.
# @return [Time]
#
# @!attribute [rw] last_updated_date_time
# The time at which the batch inference job was last updated.
# @return [Time]
#
# @!attribute [rw] failure_reason
# If the batch inference job failed, the reason for the failure.
# @return [String]
#
# @!attribute [rw] solution_version_arn
# The ARN of the solution version used by the batch inference job.
# @return [String]
#
# @see http://docs.aws.amazon.com/goto/WebAPI/personalize-2018-05-22/BatchInferenceJobSummary AWS API Documentation
#
class BatchInferenceJobSummary < Struct.new(
:batch_inference_job_arn,
:job_name,
:status,
:creation_date_time,
:last_updated_date_time,
:failure_reason,
:solution_version_arn)
include Aws::Structure
end
# Describes a deployed solution version, otherwise known as a campaign.
# For more information on campaigns, see CreateCampaign.
#
# @!attribute [rw] name
# The name of the campaign.
# @return [String]
#
# @!attribute [rw] campaign_arn
# The Amazon Resource Name (ARN) of the campaign.
# @return [String]
#
# @!attribute [rw] solution_version_arn
# The Amazon Resource Name (ARN) of a specific version of the
# solution.
# @return [String]
#
# @!attribute [rw] min_provisioned_tps
# Specifies the requested minimum provisioned transactions
# (recommendations) per second.
# @return [Integer]
#
# @!attribute [rw] status
# The status of the campaign.
#
# A campaign can be in one of the following states:
#
# * CREATE PENDING > CREATE IN\_PROGRESS > ACTIVE -or- CREATE
# FAILED
#
# * DELETE PENDING > DELETE IN\_PROGRESS
# @return [String]
#
# @!attribute [rw] failure_reason
# If a campaign fails, the reason behind the failure.
# @return [String]
#
# @!attribute [rw] creation_date_time
# The date and time (in Unix format) that the campaign was created.
# @return [Time]
#
# @!attribute [rw] last_updated_date_time
# The date and time (in Unix format) that the campaign was last
# updated.
# @return [Time]
#
# @!attribute [rw] latest_campaign_update
# Provides a summary of the properties of a campaign update. For a
# complete listing, call the DescribeCampaign API.
# @return [Types::CampaignUpdateSummary]
#
# @see http://docs.aws.amazon.com/goto/WebAPI/personalize-2018-05-22/Campaign AWS API Documentation
#
class Campaign < Struct.new(
:name,
:campaign_arn,
:solution_version_arn,
:min_provisioned_tps,
:status,
:failure_reason,
:creation_date_time,
:last_updated_date_time,
:latest_campaign_update)
include Aws::Structure
end
# Provides a summary of the properties of a campaign. For a complete
# listing, call the DescribeCampaign API.
#
# @!attribute [rw] name
# The name of the campaign.
# @return [String]
#
# @!attribute [rw] campaign_arn
# The Amazon Resource Name (ARN) of the campaign.
# @return [String]
#
# @!attribute [rw] status
# The status of the campaign.
#
# A campaign can be in one of the following states:
#
# * CREATE PENDING > CREATE IN\_PROGRESS > ACTIVE -or- CREATE
# FAILED
#
# * DELETE PENDING > DELETE IN\_PROGRESS
# @return [String]
#
# @!attribute [rw] creation_date_time
# The date and time (in Unix time) that the campaign was created.
# @return [Time]
#
# @!attribute [rw] last_updated_date_time
# The date and time (in Unix time) that the campaign was last updated.
# @return [Time]
#
# @!attribute [rw] failure_reason
# If a campaign fails, the reason behind the failure.
# @return [String]
#
# @see http://docs.aws.amazon.com/goto/WebAPI/personalize-2018-05-22/CampaignSummary AWS API Documentation
#
class CampaignSummary < Struct.new(
:name,
:campaign_arn,
:status,
:creation_date_time,
:last_updated_date_time,
:failure_reason)
include Aws::Structure
end
# Provides a summary of the properties of a campaign update. For a
# complete listing, call the DescribeCampaign API.
#
# @!attribute [rw] solution_version_arn
# The Amazon Resource Name (ARN) of the deployed solution version.
# @return [String]
#
# @!attribute [rw] min_provisioned_tps
# Specifies the requested minimum provisioned transactions
# (recommendations) per second that Amazon Personalize will support.
# @return [Integer]
#
# @!attribute [rw] status
# The status of the campaign update.
#
# A campaign update can be in one of the following states:
#
# * CREATE PENDING > CREATE IN\_PROGRESS > ACTIVE -or- CREATE
# FAILED
#
# * DELETE PENDING > DELETE IN\_PROGRESS
# @return [String]
#
# @!attribute [rw] failure_reason
# If a campaign update fails, the reason behind the failure.
# @return [String]
#
# @!attribute [rw] creation_date_time
# The date and time (in Unix time) that the campaign update was
# created.
# @return [Time]
#
# @!attribute [rw] last_updated_date_time
# The date and time (in Unix time) that the campaign update was last
# updated.
# @return [Time]
#
# @see http://docs.aws.amazon.com/goto/WebAPI/personalize-2018-05-22/CampaignUpdateSummary AWS API Documentation
#
class CampaignUpdateSummary < Struct.new(
:solution_version_arn,
:min_provisioned_tps,
:status,
:failure_reason,
:creation_date_time,
:last_updated_date_time)
include Aws::Structure
end
# Provides the name and range of a categorical hyperparameter.
#
# @note When making an API call, you may pass CategoricalHyperParameterRange
# data as a hash:
#
# {
# name: "ParameterName",
# values: ["CategoricalValue"],
# }
#
# @!attribute [rw] name
# The name of the hyperparameter.
# @return [String]
#
# @!attribute [rw] values
# A list of the categories for the hyperparameter.
# @return [Array<String>]
#
# @see http://docs.aws.amazon.com/goto/WebAPI/personalize-2018-05-22/CategoricalHyperParameterRange AWS API Documentation
#
class CategoricalHyperParameterRange < Struct.new(
:name,
:values)
include Aws::Structure
end
# Provides the name and range of a continuous hyperparameter.
#
# @note When making an API call, you may pass ContinuousHyperParameterRange
# data as a hash:
#
# {
# name: "ParameterName",
# min_value: 1.0,
# max_value: 1.0,
# }
#
# @!attribute [rw] name
# The name of the hyperparameter.
# @return [String]
#
# @!attribute [rw] min_value
# The minimum allowable value for the hyperparameter.
# @return [Float]
#
# @!attribute [rw] max_value
# The maximum allowable value for the hyperparameter.
# @return [Float]
#
# @see http://docs.aws.amazon.com/goto/WebAPI/personalize-2018-05-22/ContinuousHyperParameterRange AWS API Documentation
#
class ContinuousHyperParameterRange < Struct.new(
:name,
:min_value,
:max_value)
include Aws::Structure
end
# @note When making an API call, you may pass CreateBatchInferenceJobRequest
# data as a hash:
#
# {
# job_name: "Name", # required
# solution_version_arn: "Arn", # required
# filter_arn: "Arn",
# num_results: 1,
# job_input: { # required
# s3_data_source: { # required
# path: "S3Location", # required
# kms_key_arn: "KmsKeyArn",
# },
# },
# job_output: { # required
# s3_data_destination: { # required
# path: "S3Location", # required
# kms_key_arn: "KmsKeyArn",
# },
# },
# role_arn: "RoleArn", # required
# }
#
# @!attribute [rw] job_name
# The name of the batch inference job to create.
# @return [String]
#
# @!attribute [rw] solution_version_arn
# The Amazon Resource Name (ARN) of the solution version that will be
# used to generate the batch inference recommendations.
# @return [String]
#
# @!attribute [rw] filter_arn
# The ARN of the filter to apply to the batch inference job. For more
# information on using filters, see Using Filters with Amazon
# Personalize.
# @return [String]
#
# @!attribute [rw] num_results
# The number of recommendations to retreive.
# @return [Integer]
#
# @!attribute [rw] job_input
# The Amazon S3 path that leads to the input file to base your
# recommendations on. The input material must be in JSON format.
# @return [Types::BatchInferenceJobInput]
#
# @!attribute [rw] job_output
# The path to the Amazon S3 bucket where the job's output will be
# stored.
# @return [Types::BatchInferenceJobOutput]
#
# @!attribute [rw] role_arn
# The ARN of the Amazon Identity and Access Management role that has
# permissions to read and write to your input and out Amazon S3
# buckets respectively.
# @return [String]
#
# @see http://docs.aws.amazon.com/goto/WebAPI/personalize-2018-05-22/CreateBatchInferenceJobRequest AWS API Documentation
#
class CreateBatchInferenceJobRequest < Struct.new(
:job_name,
:solution_version_arn,
:filter_arn,
:num_results,
:job_input,
:job_output,
:role_arn)
include Aws::Structure
end
# @!attribute [rw] batch_inference_job_arn
# The ARN of the batch inference job.
# @return [String]
#
# @see http://docs.aws.amazon.com/goto/WebAPI/personalize-2018-05-22/CreateBatchInferenceJobResponse AWS API Documentation
#
class CreateBatchInferenceJobResponse < Struct.new(
:batch_inference_job_arn)
include Aws::Structure
end
# @note When making an API call, you may pass CreateCampaignRequest
# data as a hash:
#
# {
# name: "Name", # required
# solution_version_arn: "Arn", # required
# min_provisioned_tps: 1, # required
# }
#
# @!attribute [rw] name
# A name for the new campaign. The campaign name must be unique within
# your account.
# @return [String]
#
# @!attribute [rw] solution_version_arn
# The Amazon Resource Name (ARN) of the solution version to deploy.
# @return [String]
#
# @!attribute [rw] min_provisioned_tps
# Specifies the requested minimum provisioned transactions
# (recommendations) per second that Amazon Personalize will support.
# @return [Integer]
#
# @see http://docs.aws.amazon.com/goto/WebAPI/personalize-2018-05-22/CreateCampaignRequest AWS API Documentation
#
class CreateCampaignRequest < Struct.new(
:name,
:solution_version_arn,
:min_provisioned_tps)
include Aws::Structure
end
# @!attribute [rw] campaign_arn
# The Amazon Resource Name (ARN) of the campaign.
# @return [String]
#
# @see http://docs.aws.amazon.com/goto/WebAPI/personalize-2018-05-22/CreateCampaignResponse AWS API Documentation
#
class CreateCampaignResponse < Struct.new(
:campaign_arn)
include Aws::Structure
end
# @note When making an API call, you may pass CreateDatasetGroupRequest
# data as a hash:
#
# {
# name: "Name", # required
# role_arn: "RoleArn",
# kms_key_arn: "KmsKeyArn",
# }
#
# @!attribute [rw] name
# The name for the new dataset group.
# @return [String]
#
# @!attribute [rw] role_arn
# The ARN of the IAM role that has permissions to access the KMS key.
# Supplying an IAM role is only valid when also specifying a KMS key.
# @return [String]
#
# @!attribute [rw] kms_key_arn
# The Amazon Resource Name (ARN) of a KMS key used to encrypt the
# datasets.
# @return [String]
#
# @see http://docs.aws.amazon.com/goto/WebAPI/personalize-2018-05-22/CreateDatasetGroupRequest AWS API Documentation
#
class CreateDatasetGroupRequest < Struct.new(
:name,
:role_arn,
:kms_key_arn)
include Aws::Structure
end
# @!attribute [rw] dataset_group_arn
# The Amazon Resource Name (ARN) of the new dataset group.
# @return [String]
#
# @see http://docs.aws.amazon.com/goto/WebAPI/personalize-2018-05-22/CreateDatasetGroupResponse AWS API Documentation
#
class CreateDatasetGroupResponse < Struct.new(
:dataset_group_arn)
include Aws::Structure
end
# @note When making an API call, you may pass CreateDatasetImportJobRequest
# data as a hash:
#
# {
# job_name: "Name", # required
# dataset_arn: "Arn", # required
# data_source: { # required
# data_location: "S3Location",
# },
# role_arn: "RoleArn", # required
# }
#
# @!attribute [rw] job_name
# The name for the dataset import job.
# @return [String]
#
# @!attribute [rw] dataset_arn
# The ARN of the dataset that receives the imported data.
# @return [String]
#
# @!attribute [rw] data_source
# The Amazon S3 bucket that contains the training data to import.
# @return [Types::DataSource]
#
# @!attribute [rw] role_arn
# The ARN of the IAM role that has permissions to read from the Amazon
# S3 data source.
# @return [String]
#
# @see http://docs.aws.amazon.com/goto/WebAPI/personalize-2018-05-22/CreateDatasetImportJobRequest AWS API Documentation
#
class CreateDatasetImportJobRequest < Struct.new(
:job_name,
:dataset_arn,
:data_source,
:role_arn)
include Aws::Structure
end
# @!attribute [rw] dataset_import_job_arn
# The ARN of the dataset import job.
# @return [String]
#
# @see http://docs.aws.amazon.com/goto/WebAPI/personalize-2018-05-22/CreateDatasetImportJobResponse AWS API Documentation
#
class CreateDatasetImportJobResponse < Struct.new(
:dataset_import_job_arn)
include Aws::Structure
end
# @note When making an API call, you may pass CreateDatasetRequest
# data as a hash:
#
# {
# name: "Name", # required
# schema_arn: "Arn", # required
# dataset_group_arn: "Arn", # required
# dataset_type: "DatasetType", # required
# }
#
# @!attribute [rw] name
# The name for the dataset.
# @return [String]
#
# @!attribute [rw] schema_arn
# The ARN of the schema to associate with the dataset. The schema
# defines the dataset fields.
# @return [String]
#
# @!attribute [rw] dataset_group_arn
# The Amazon Resource Name (ARN) of the dataset group to add the
# dataset to.
# @return [String]
#
# @!attribute [rw] dataset_type
# The type of dataset.
#
# One of the following (case insensitive) values:
#
# * Interactions
#
# * Items
#
# * Users
# @return [String]
#
# @see http://docs.aws.amazon.com/goto/WebAPI/personalize-2018-05-22/CreateDatasetRequest AWS API Documentation
#
class CreateDatasetRequest < Struct.new(
:name,
:schema_arn,
:dataset_group_arn,
:dataset_type)
include Aws::Structure
end
# @!attribute [rw] dataset_arn
# The ARN of the dataset.
# @return [String]
#
# @see http://docs.aws.amazon.com/goto/WebAPI/personalize-2018-05-22/CreateDatasetResponse AWS API Documentation
#
class CreateDatasetResponse < Struct.new(
:dataset_arn)
include Aws::Structure
end
# @note When making an API call, you may pass CreateEventTrackerRequest
# data as a hash:
#
# {
# name: "Name", # required
# dataset_group_arn: "Arn", # required
# }
#
# @!attribute [rw] name
# The name for the event tracker.
# @return [String]
#
# @!attribute [rw] dataset_group_arn
# The Amazon Resource Name (ARN) of the dataset group that receives
# the event data.
# @return [String]
#
# @see http://docs.aws.amazon.com/goto/WebAPI/personalize-2018-05-22/CreateEventTrackerRequest AWS API Documentation
#
class CreateEventTrackerRequest < Struct.new(
:name,
:dataset_group_arn)
include Aws::Structure
end
# @!attribute [rw] event_tracker_arn
# The ARN of the event tracker.
# @return [String]
#
# @!attribute [rw] tracking_id
# The ID of the event tracker. Include this ID in requests to the
# [PutEvents][1] API.
#
#
#
# [1]: https://docs.aws.amazon.com/personalize/latest/dg/API_UBS_PutEvents.html
# @return [String]
#
# @see http://docs.aws.amazon.com/goto/WebAPI/personalize-2018-05-22/CreateEventTrackerResponse AWS API Documentation
#
class CreateEventTrackerResponse < Struct.new(
:event_tracker_arn,
:tracking_id)
include Aws::Structure
end
# @note When making an API call, you may pass CreateFilterRequest
# data as a hash:
#
# {
# name: "Name", # required
# dataset_group_arn: "Arn", # required
# filter_expression: "FilterExpression", # required
# }
#
# @!attribute [rw] name
# The name of the filter to create.
# @return [String]
#
# @!attribute [rw] dataset_group_arn
# The ARN of the dataset group that the filter will belong to.
# @return [String]
#
# @!attribute [rw] filter_expression
# The filter expression that designates the interaction types that the
# filter will filter out. A filter expression must follow the
# following format:
#
# `EXCLUDE itemId WHERE INTERACTIONS.event_type in ("EVENT_TYPE")`
#
# Where "EVENT\_TYPE" is the type of event to filter out. To filter
# out all items with any interactions history, set `"*"` as the
# EVENT\_TYPE. For more information, see Using Filters with Amazon
# Personalize.
# @return [String]
#
# @see http://docs.aws.amazon.com/goto/WebAPI/personalize-2018-05-22/CreateFilterRequest AWS API Documentation
#
class CreateFilterRequest < Struct.new(
:name,
:dataset_group_arn,
:filter_expression)
include Aws::Structure
end
# @!attribute [rw] filter_arn
# The ARN of the new filter.
# @return [String]
#
# @see http://docs.aws.amazon.com/goto/WebAPI/personalize-2018-05-22/CreateFilterResponse AWS API Documentation
#
class CreateFilterResponse < Struct.new(
:filter_arn)
include Aws::Structure
end
# @note When making an API call, you may pass CreateSchemaRequest
# data as a hash:
#
# {
# name: "Name", # required
# schema: "AvroSchema", # required
# }
#
# @!attribute [rw] name
# The name for the schema.
# @return [String]
#
# @!attribute [rw] schema
# A schema in Avro JSON format.
# @return [String]
#
# @see http://docs.aws.amazon.com/goto/WebAPI/personalize-2018-05-22/CreateSchemaRequest AWS API Documentation
#
class CreateSchemaRequest < Struct.new(
:name,
:schema)
include Aws::Structure
end
# @!attribute [rw] schema_arn
# The Amazon Resource Name (ARN) of the created schema.
# @return [String]
#
# @see http://docs.aws.amazon.com/goto/WebAPI/personalize-2018-05-22/CreateSchemaResponse AWS API Documentation
#
class CreateSchemaResponse < Struct.new(
:schema_arn)
include Aws::Structure
end
# @note When making an API call, you may pass CreateSolutionRequest
# data as a hash:
#
# {
# name: "Name", # required
# perform_hpo: false,
# perform_auto_ml: false,
# recipe_arn: "Arn",
# dataset_group_arn: "Arn", # required
# event_type: "EventType",
# solution_config: {
# event_value_threshold: "EventValueThreshold",
# hpo_config: {
# hpo_objective: {
# type: "HPOObjectiveType",
# metric_name: "MetricName",
# metric_regex: "MetricRegex",
# },
# hpo_resource_config: {
# max_number_of_training_jobs: "HPOResource",
# max_parallel_training_jobs: "HPOResource",
# },
# algorithm_hyper_parameter_ranges: {
# integer_hyper_parameter_ranges: [
# {
# name: "ParameterName",
# min_value: 1,
# max_value: 1,
# },
# ],
# continuous_hyper_parameter_ranges: [
# {
# name: "ParameterName",
# min_value: 1.0,
# max_value: 1.0,
# },
# ],
# categorical_hyper_parameter_ranges: [
# {
# name: "ParameterName",
# values: ["CategoricalValue"],
# },
# ],
# },
# },
# algorithm_hyper_parameters: {
# "ParameterName" => "ParameterValue",
# },
# feature_transformation_parameters: {
# "ParameterName" => "ParameterValue",
# },
# auto_ml_config: {
# metric_name: "MetricName",
# recipe_list: ["Arn"],
# },
# },
# }
#
# @!attribute [rw] name
# The name for the solution.
# @return [String]
#
# @!attribute [rw] perform_hpo
# Whether to perform hyperparameter optimization (HPO) on the
# specified or selected recipe. The default is `false`.
#
# When performing AutoML, this parameter is always `true` and you
# should not set it to `false`.
# @return [Boolean]
#
# @!attribute [rw] perform_auto_ml
# Whether to perform automated machine learning (AutoML). The default
# is `false`. For this case, you must specify `recipeArn`.
#
# When set to `true`, Amazon Personalize analyzes your training data
# and selects the optimal USER\_PERSONALIZATION recipe and
# hyperparameters. In this case, you must omit `recipeArn`. Amazon
# Personalize determines the optimal recipe by running tests with
# different values for the hyperparameters. AutoML lengthens the
# training process as compared to selecting a specific recipe.
# @return [Boolean]
#
# @!attribute [rw] recipe_arn
# The ARN of the recipe to use for model training. Only specified when
# `performAutoML` is false.
# @return [String]
#
# @!attribute [rw] dataset_group_arn
# The Amazon Resource Name (ARN) of the dataset group that provides
# the training data.
# @return [String]
#
# @!attribute [rw] event_type
# When your have multiple event types (using an `EVENT_TYPE` schema
# field), this parameter specifies which event type (for example,
# 'click' or 'like') is used for training the model.
# @return [String]
#
# @!attribute [rw] solution_config
# The configuration to use with the solution. When `performAutoML` is
# set to true, Amazon Personalize only evaluates the `autoMLConfig`
# section of the solution configuration.
# @return [Types::SolutionConfig]
#
# @see http://docs.aws.amazon.com/goto/WebAPI/personalize-2018-05-22/CreateSolutionRequest AWS API Documentation
#
class CreateSolutionRequest < Struct.new(
:name,
:perform_hpo,
:perform_auto_ml,
:recipe_arn,
:dataset_group_arn,
:event_type,
:solution_config)
include Aws::Structure
end
# @!attribute [rw] solution_arn
# The ARN of the solution.
# @return [String]
#
# @see http://docs.aws.amazon.com/goto/WebAPI/personalize-2018-05-22/CreateSolutionResponse AWS API Documentation
#
class CreateSolutionResponse < Struct.new(
:solution_arn)
include Aws::Structure
end
# @note When making an API call, you may pass CreateSolutionVersionRequest
# data as a hash:
#
# {
# solution_arn: "Arn", # required
# training_mode: "FULL", # accepts FULL, UPDATE
# }
#
# @!attribute [rw] solution_arn
# The Amazon Resource Name (ARN) of the solution containing the
# training configuration information.
# @return [String]
#
# @!attribute [rw] training_mode
# The scope of training to be performed when creating the solution
# version. The `FULL` option trains the solution version based on the
# entirety of the input solution's training data, while the `UPDATE`
# option processes only the data that has changed in comparison to the
# input solution. Choose `UPDATE` when you want to incrementally
# update your solution version instead of creating an entirely new
# one.
#
# The `UPDATE` option can only be used when you already have an active
# solution version created from the input solution using the `FULL`
# option and the input solution was trained with the
# native-recipe-hrnn-coldstart recipe.
# @return [String]
#
# @see http://docs.aws.amazon.com/goto/WebAPI/personalize-2018-05-22/CreateSolutionVersionRequest AWS API Documentation
#
class CreateSolutionVersionRequest < Struct.new(
:solution_arn,
:training_mode)
include Aws::Structure
end
# @!attribute [rw] solution_version_arn
# The ARN of the new solution version.
# @return [String]
#
# @see http://docs.aws.amazon.com/goto/WebAPI/personalize-2018-05-22/CreateSolutionVersionResponse AWS API Documentation
#
class CreateSolutionVersionResponse < Struct.new(
:solution_version_arn)
include Aws::Structure
end
# Describes the data source that contains the data to upload to a
# dataset.
#
# @note When making an API call, you may pass DataSource
# data as a hash:
#
# {
# data_location: "S3Location",
# }
#
# @!attribute [rw] data_location
# The path to the Amazon S3 bucket where the data that you want to
# upload to your dataset is stored. For example:
#
# `s3://bucket-name/training-data.csv`
# @return [String]
#
# @see http://docs.aws.amazon.com/goto/WebAPI/personalize-2018-05-22/DataSource AWS API Documentation
#
class DataSource < Struct.new(
:data_location)
include Aws::Structure
end
# Provides metadata for a dataset.
#
# @!attribute [rw] name
# The name of the dataset.
# @return [String]
#
# @!attribute [rw] dataset_arn
# The Amazon Resource Name (ARN) of the dataset that you want metadata
# for.
# @return [String]
#
# @!attribute [rw] dataset_group_arn
# The Amazon Resource Name (ARN) of the dataset group.
# @return [String]
#
# @!attribute [rw] dataset_type
# One of the following values:
#
# * Interactions
#
# * Items
#
# * Users
# @return [String]
#
# @!attribute [rw] schema_arn
# The ARN of the associated schema.
# @return [String]
#
# @!attribute [rw] status
# The status of the dataset.
#
# A dataset can be in one of the following states:
#
# * CREATE PENDING > CREATE IN\_PROGRESS > ACTIVE -or- CREATE
# FAILED
#
# * DELETE PENDING > DELETE IN\_PROGRESS
# @return [String]
#
# @!attribute [rw] creation_date_time
# The creation date and time (in Unix time) of the dataset.
# @return [Time]
#
# @!attribute [rw] last_updated_date_time
# A time stamp that shows when the dataset was updated.
# @return [Time]
#
# @see http://docs.aws.amazon.com/goto/WebAPI/personalize-2018-05-22/Dataset AWS API Documentation
#
class Dataset < Struct.new(
:name,
:dataset_arn,
:dataset_group_arn,
:dataset_type,
:schema_arn,
:status,
:creation_date_time,
:last_updated_date_time)
include Aws::Structure
end
# A dataset group is a collection of related datasets (Interactions,
# User, and Item). You create a dataset group by calling
# CreateDatasetGroup. You then create a dataset and add it to a dataset
# group by calling CreateDataset. The dataset group is used to create
# and train a solution by calling CreateSolution. A dataset group can
# contain only one of each type of dataset.
#
# You can specify an AWS Key Management Service (KMS) key to encrypt the
# datasets in the group.
#
# @!attribute [rw] name
# The name of the dataset group.
# @return [String]
#
# @!attribute [rw] dataset_group_arn
# The Amazon Resource Name (ARN) of the dataset group.
# @return [String]
#
# @!attribute [rw] status
# The current status of the dataset group.
#
# A dataset group can be in one of the following states:
#
# * CREATE PENDING > CREATE IN\_PROGRESS > ACTIVE -or- CREATE
# FAILED
#
# * DELETE PENDING
# @return [String]
#
# @!attribute [rw] role_arn
# The ARN of the IAM role that has permissions to create the dataset
# group.
# @return [String]
#
# @!attribute [rw] kms_key_arn
# The Amazon Resource Name (ARN) of the KMS key used to encrypt the
# datasets.
# @return [String]
#
# @!attribute [rw] creation_date_time
# The creation date and time (in Unix time) of the dataset group.
# @return [Time]
#
# @!attribute [rw] last_updated_date_time
# The last update date and time (in Unix time) of the dataset group.
# @return [Time]
#
# @!attribute [rw] failure_reason
# If creating a dataset group fails, provides the reason why.
# @return [String]
#
# @see http://docs.aws.amazon.com/goto/WebAPI/personalize-2018-05-22/DatasetGroup AWS API Documentation
#
class DatasetGroup < Struct.new(
:name,
:dataset_group_arn,
:status,
:role_arn,
:kms_key_arn,
:creation_date_time,
:last_updated_date_time,
:failure_reason)
include Aws::Structure
end
# Provides a summary of the properties of a dataset group. For a
# complete listing, call the DescribeDatasetGroup API.
#
# @!attribute [rw] name
# The name of the dataset group.
# @return [String]
#
# @!attribute [rw] dataset_group_arn
# The Amazon Resource Name (ARN) of the dataset group.
# @return [String]
#
# @!attribute [rw] status
# The status of the dataset group.
#
# A dataset group can be in one of the following states:
#
# * CREATE PENDING > CREATE IN\_PROGRESS > ACTIVE -or- CREATE
# FAILED
#
# * DELETE PENDING
# @return [String]
#
# @!attribute [rw] creation_date_time
# The date and time (in Unix time) that the dataset group was created.
# @return [Time]
#
# @!attribute [rw] last_updated_date_time
# The date and time (in Unix time) that the dataset group was last
# updated.
# @return [Time]
#
# @!attribute [rw] failure_reason
# If creating a dataset group fails, the reason behind the failure.
# @return [String]
#
# @see http://docs.aws.amazon.com/goto/WebAPI/personalize-2018-05-22/DatasetGroupSummary AWS API Documentation
#
class DatasetGroupSummary < Struct.new(
:name,
:dataset_group_arn,
:status,
:creation_date_time,
:last_updated_date_time,
:failure_reason)
include Aws::Structure
end
# Describes a job that imports training data from a data source (Amazon
# S3 bucket) to an Amazon Personalize dataset. For more information, see
# CreateDatasetImportJob.
#
# A dataset import job can be in one of the following states:
#
# * CREATE PENDING > CREATE IN\_PROGRESS > ACTIVE -or- CREATE
# FAILED
#
# ^
#
# @!attribute [rw] job_name
# The name of the import job.
# @return [String]
#
# @!attribute [rw] dataset_import_job_arn
# The ARN of the dataset import job.
# @return [String]
#
# @!attribute [rw] dataset_arn
# The Amazon Resource Name (ARN) of the dataset that receives the
# imported data.
# @return [String]
#
# @!attribute [rw] data_source
# The Amazon S3 bucket that contains the training data to import.
# @return [Types::DataSource]
#
# @!attribute [rw] role_arn
# The ARN of the AWS Identity and Access Management (IAM) role that
# has permissions to read from the Amazon S3 data source.
# @return [String]
#
# @!attribute [rw] status
# The status of the dataset import job.
#
# A dataset import job can be in one of the following states:
#
# * CREATE PENDING > CREATE IN\_PROGRESS > ACTIVE -or- CREATE
# FAILED
#
# ^
# @return [String]
#
# @!attribute [rw] creation_date_time
# The creation date and time (in Unix time) of the dataset import job.
# @return [Time]
#
# @!attribute [rw] last_updated_date_time
# The date and time (in Unix time) the dataset was last updated.
# @return [Time]
#
# @!attribute [rw] failure_reason
# If a dataset import job fails, provides the reason why.
# @return [String]
#
# @see http://docs.aws.amazon.com/goto/WebAPI/personalize-2018-05-22/DatasetImportJob AWS API Documentation
#
class DatasetImportJob < Struct.new(
:job_name,
:dataset_import_job_arn,
:dataset_arn,
:data_source,
:role_arn,
:status,
:creation_date_time,
:last_updated_date_time,
:failure_reason)
include Aws::Structure
end
# Provides a summary of the properties of a dataset import job. For a
# complete listing, call the DescribeDatasetImportJob API.
#
# @!attribute [rw] dataset_import_job_arn
# The Amazon Resource Name (ARN) of the dataset import job.
# @return [String]
#
# @!attribute [rw] job_name
# The name of the dataset import job.
# @return [String]
#
# @!attribute [rw] status
# The status of the dataset import job.
#
# A dataset import job can be in one of the following states:
#
# * CREATE PENDING > CREATE IN\_PROGRESS > ACTIVE -or- CREATE
# FAILED
#
# ^
# @return [String]
#
# @!attribute [rw] creation_date_time
# The date and time (in Unix time) that the dataset import job was
# created.
# @return [Time]
#
# @!attribute [rw] last_updated_date_time
# The date and time (in Unix time) that the dataset was last updated.
# @return [Time]
#
# @!attribute [rw] failure_reason
# If a dataset import job fails, the reason behind the failure.
# @return [String]
#
# @see http://docs.aws.amazon.com/goto/WebAPI/personalize-2018-05-22/DatasetImportJobSummary AWS API Documentation
#
class DatasetImportJobSummary < Struct.new(
:dataset_import_job_arn,
:job_name,
:status,
:creation_date_time,
:last_updated_date_time,
:failure_reason)
include Aws::Structure
end
# Describes the schema for a dataset. For more information on schemas,
# see CreateSchema.
#
# @!attribute [rw] name
# The name of the schema.
# @return [String]
#
# @!attribute [rw] schema_arn
# The Amazon Resource Name (ARN) of the schema.
# @return [String]
#
# @!attribute [rw] schema
# The schema.
# @return [String]
#
# @!attribute [rw] creation_date_time
# The date and time (in Unix time) that the schema was created.
# @return [Time]
#
# @!attribute [rw] last_updated_date_time
# The date and time (in Unix time) that the schema was last updated.
# @return [Time]
#
# @see http://docs.aws.amazon.com/goto/WebAPI/personalize-2018-05-22/DatasetSchema AWS API Documentation
#
class DatasetSchema < Struct.new(
:name,
:schema_arn,
:schema,
:creation_date_time,
:last_updated_date_time)
include Aws::Structure
end
# Provides a summary of the properties of a dataset schema. For a
# complete listing, call the DescribeSchema API.
#
# @!attribute [rw] name
# The name of the schema.
# @return [String]
#
# @!attribute [rw] schema_arn
# The Amazon Resource Name (ARN) of the schema.
# @return [String]
#
# @!attribute [rw] creation_date_time
# The date and time (in Unix time) that the schema was created.
# @return [Time]
#
# @!attribute [rw] last_updated_date_time
# The date and time (in Unix time) that the schema was last updated.
# @return [Time]
#
# @see http://docs.aws.amazon.com/goto/WebAPI/personalize-2018-05-22/DatasetSchemaSummary AWS API Documentation
#
class DatasetSchemaSummary < Struct.new(
:name,
:schema_arn,
:creation_date_time,
:last_updated_date_time)
include Aws::Structure
end
# Provides a summary of the properties of a dataset. For a complete
# listing, call the DescribeDataset API.
#
# @!attribute [rw] name
# The name of the dataset.
# @return [String]
#
# @!attribute [rw] dataset_arn
# The Amazon Resource Name (ARN) of the dataset.
# @return [String]
#
# @!attribute [rw] dataset_type
# The dataset type. One of the following values:
#
# * Interactions
#
# * Items
#
# * Users
#
# * Event-Interactions
# @return [String]
#
# @!attribute [rw] status
# The status of the dataset.
#
# A dataset can be in one of the following states:
#
# * CREATE PENDING > CREATE IN\_PROGRESS > ACTIVE -or- CREATE
# FAILED
#
# * DELETE PENDING > DELETE IN\_PROGRESS
# @return [String]
#
# @!attribute [rw] creation_date_time
# The date and time (in Unix time) that the dataset was created.
# @return [Time]
#
# @!attribute [rw] last_updated_date_time
# The date and time (in Unix time) that the dataset was last updated.
# @return [Time]
#
# @see http://docs.aws.amazon.com/goto/WebAPI/personalize-2018-05-22/DatasetSummary AWS API Documentation
#
class DatasetSummary < Struct.new(
:name,
:dataset_arn,
:dataset_type,
:status,
:creation_date_time,
:last_updated_date_time)
include Aws::Structure
end
# Provides the name and default range of a categorical hyperparameter
# and whether the hyperparameter is tunable. A tunable hyperparameter
# can have its value determined during hyperparameter optimization
# (HPO).
#
# @!attribute [rw] name
# The name of the hyperparameter.
# @return [String]
#
# @!attribute [rw] values
# A list of the categories for the hyperparameter.
# @return [Array<String>]
#
# @!attribute [rw] is_tunable
# Whether the hyperparameter is tunable.
# @return [Boolean]
#
# @see http://docs.aws.amazon.com/goto/WebAPI/personalize-2018-05-22/DefaultCategoricalHyperParameterRange AWS API Documentation
#
class DefaultCategoricalHyperParameterRange < Struct.new(
:name,
:values,
:is_tunable)
include Aws::Structure
end
# Provides the name and default range of a continuous hyperparameter and
# whether the hyperparameter is tunable. A tunable hyperparameter can
# have its value determined during hyperparameter optimization (HPO).
#
# @!attribute [rw] name
# The name of the hyperparameter.
# @return [String]
#
# @!attribute [rw] min_value
# The minimum allowable value for the hyperparameter.
# @return [Float]
#
# @!attribute [rw] max_value
# The maximum allowable value for the hyperparameter.
# @return [Float]
#
# @!attribute [rw] is_tunable
# Whether the hyperparameter is tunable.
# @return [Boolean]
#
# @see http://docs.aws.amazon.com/goto/WebAPI/personalize-2018-05-22/DefaultContinuousHyperParameterRange AWS API Documentation
#
class DefaultContinuousHyperParameterRange < Struct.new(
:name,
:min_value,
:max_value,
:is_tunable)
include Aws::Structure
end
# Specifies the hyperparameters and their default ranges.
# Hyperparameters can be categorical, continuous, or integer-valued.
#
# @!attribute [rw] integer_hyper_parameter_ranges
# The integer-valued hyperparameters and their default ranges.
# @return [Array<Types::DefaultIntegerHyperParameterRange>]
#
# @!attribute [rw] continuous_hyper_parameter_ranges
# The continuous hyperparameters and their default ranges.
# @return [Array<Types::DefaultContinuousHyperParameterRange>]
#
# @!attribute [rw] categorical_hyper_parameter_ranges
# The categorical hyperparameters and their default ranges.
# @return [Array<Types::DefaultCategoricalHyperParameterRange>]
#
# @see http://docs.aws.amazon.com/goto/WebAPI/personalize-2018-05-22/DefaultHyperParameterRanges AWS API Documentation
#
class DefaultHyperParameterRanges < Struct.new(
:integer_hyper_parameter_ranges,
:continuous_hyper_parameter_ranges,
:categorical_hyper_parameter_ranges)
include Aws::Structure
end
# Provides the name and default range of a integer-valued hyperparameter
# and whether the hyperparameter is tunable. A tunable hyperparameter
# can have its value determined during hyperparameter optimization
# (HPO).
#
# @!attribute [rw] name
# The name of the hyperparameter.
# @return [String]
#
# @!attribute [rw] min_value
# The minimum allowable value for the hyperparameter.
# @return [Integer]
#
# @!attribute [rw] max_value
# The maximum allowable value for the hyperparameter.
# @return [Integer]
#
# @!attribute [rw] is_tunable
# Indicates whether the hyperparameter is tunable.
# @return [Boolean]
#
# @see http://docs.aws.amazon.com/goto/WebAPI/personalize-2018-05-22/DefaultIntegerHyperParameterRange AWS API Documentation
#
class DefaultIntegerHyperParameterRange < Struct.new(
:name,
:min_value,
:max_value,
:is_tunable)
include Aws::Structure
end
# @note When making an API call, you may pass DeleteCampaignRequest
# data as a hash:
#
# {
# campaign_arn: "Arn", # required
# }
#
# @!attribute [rw] campaign_arn
# The Amazon Resource Name (ARN) of the campaign to delete.
# @return [String]
#
# @see http://docs.aws.amazon.com/goto/WebAPI/personalize-2018-05-22/DeleteCampaignRequest AWS API Documentation
#
class DeleteCampaignRequest < Struct.new(
:campaign_arn)
include Aws::Structure
end
# @note When making an API call, you may pass DeleteDatasetGroupRequest
# data as a hash:
#
# {
# dataset_group_arn: "Arn", # required
# }
#
# @!attribute [rw] dataset_group_arn
# The ARN of the dataset group to delete.
# @return [String]
#
# @see http://docs.aws.amazon.com/goto/WebAPI/personalize-2018-05-22/DeleteDatasetGroupRequest AWS API Documentation
#
class DeleteDatasetGroupRequest < Struct.new(
:dataset_group_arn)
include Aws::Structure
end
# @note When making an API call, you may pass DeleteDatasetRequest
# data as a hash:
#
# {
# dataset_arn: "Arn", # required
# }
#
# @!attribute [rw] dataset_arn
# The Amazon Resource Name (ARN) of the dataset to delete.
# @return [String]
#
# @see http://docs.aws.amazon.com/goto/WebAPI/personalize-2018-05-22/DeleteDatasetRequest AWS API Documentation
#
class DeleteDatasetRequest < Struct.new(
:dataset_arn)
include Aws::Structure
end
# @note When making an API call, you may pass DeleteEventTrackerRequest
# data as a hash:
#
# {
# event_tracker_arn: "Arn", # required
# }
#
# @!attribute [rw] event_tracker_arn
# The Amazon Resource Name (ARN) of the event tracker to delete.
# @return [String]
#
# @see http://docs.aws.amazon.com/goto/WebAPI/personalize-2018-05-22/DeleteEventTrackerRequest AWS API Documentation
#
class DeleteEventTrackerRequest < Struct.new(
:event_tracker_arn)
include Aws::Structure
end
# @note When making an API call, you may pass DeleteFilterRequest
# data as a hash:
#
# {
# filter_arn: "Arn", # required
# }
#
# @!attribute [rw] filter_arn
# The ARN of the filter to delete.
# @return [String]
#
# @see http://docs.aws.amazon.com/goto/WebAPI/personalize-2018-05-22/DeleteFilterRequest AWS API Documentation
#
class DeleteFilterRequest < Struct.new(
:filter_arn)
include Aws::Structure
end
# @note When making an API call, you may pass DeleteSchemaRequest
# data as a hash:
#
# {
# schema_arn: "Arn", # required
# }
#
# @!attribute [rw] schema_arn
# The Amazon Resource Name (ARN) of the schema to delete.
# @return [String]
#
# @see http://docs.aws.amazon.com/goto/WebAPI/personalize-2018-05-22/DeleteSchemaRequest AWS API Documentation
#
class DeleteSchemaRequest < Struct.new(
:schema_arn)
include Aws::Structure
end
# @note When making an API call, you may pass DeleteSolutionRequest
# data as a hash:
#
# {
# solution_arn: "Arn", # required
# }
#
# @!attribute [rw] solution_arn
# The ARN of the solution to delete.
# @return [String]
#
# @see http://docs.aws.amazon.com/goto/WebAPI/personalize-2018-05-22/DeleteSolutionRequest AWS API Documentation
#
class DeleteSolutionRequest < Struct.new(
:solution_arn)
include Aws::Structure
end
# @note When making an API call, you may pass DescribeAlgorithmRequest
# data as a hash:
#
# {
# algorithm_arn: "Arn", # required
# }
#
# @!attribute [rw] algorithm_arn
# The Amazon Resource Name (ARN) of the algorithm to describe.
# @return [String]
#
# @see http://docs.aws.amazon.com/goto/WebAPI/personalize-2018-05-22/DescribeAlgorithmRequest AWS API Documentation
#
class DescribeAlgorithmRequest < Struct.new(
:algorithm_arn)
include Aws::Structure
end
# @!attribute [rw] algorithm
# A listing of the properties of the algorithm.
# @return [Types::Algorithm]
#
# @see http://docs.aws.amazon.com/goto/WebAPI/personalize-2018-05-22/DescribeAlgorithmResponse AWS API Documentation
#
class DescribeAlgorithmResponse < Struct.new(
:algorithm)
include Aws::Structure
end
# @note When making an API call, you may pass DescribeBatchInferenceJobRequest
# data as a hash:
#
# {
# batch_inference_job_arn: "Arn", # required
# }
#
# @!attribute [rw] batch_inference_job_arn
# The ARN of the batch inference job to describe.
# @return [String]
#
# @see http://docs.aws.amazon.com/goto/WebAPI/personalize-2018-05-22/DescribeBatchInferenceJobRequest AWS API Documentation
#
class DescribeBatchInferenceJobRequest < Struct.new(
:batch_inference_job_arn)
include Aws::Structure
end
# @!attribute [rw] batch_inference_job
# Information on the specified batch inference job.
# @return [Types::BatchInferenceJob]
#
# @see http://docs.aws.amazon.com/goto/WebAPI/personalize-2018-05-22/DescribeBatchInferenceJobResponse AWS API Documentation
#
class DescribeBatchInferenceJobResponse < Struct.new(
:batch_inference_job)
include Aws::Structure
end
# @note When making an API call, you may pass DescribeCampaignRequest
# data as a hash:
#
# {
# campaign_arn: "Arn", # required
# }
#
# @!attribute [rw] campaign_arn
# The Amazon Resource Name (ARN) of the campaign.
# @return [String]
#
# @see http://docs.aws.amazon.com/goto/WebAPI/personalize-2018-05-22/DescribeCampaignRequest AWS API Documentation
#
class DescribeCampaignRequest < Struct.new(
:campaign_arn)
include Aws::Structure
end
# @!attribute [rw] campaign
# The properties of the campaign.
# @return [Types::Campaign]
#
# @see http://docs.aws.amazon.com/goto/WebAPI/personalize-2018-05-22/DescribeCampaignResponse AWS API Documentation
#
class DescribeCampaignResponse < Struct.new(
:campaign)
include Aws::Structure
end
# @note When making an API call, you may pass DescribeDatasetGroupRequest
# data as a hash:
#
# {
# dataset_group_arn: "Arn", # required
# }
#
# @!attribute [rw] dataset_group_arn
# The Amazon Resource Name (ARN) of the dataset group to describe.
# @return [String]
#
# @see http://docs.aws.amazon.com/goto/WebAPI/personalize-2018-05-22/DescribeDatasetGroupRequest AWS API Documentation
#
class DescribeDatasetGroupRequest < Struct.new(
:dataset_group_arn)
include Aws::Structure
end
# @!attribute [rw] dataset_group
# A listing of the dataset group's properties.
# @return [Types::DatasetGroup]
#
# @see http://docs.aws.amazon.com/goto/WebAPI/personalize-2018-05-22/DescribeDatasetGroupResponse AWS API Documentation
#
class DescribeDatasetGroupResponse < Struct.new(
:dataset_group)
include Aws::Structure
end
# @note When making an API call, you may pass DescribeDatasetImportJobRequest
# data as a hash:
#
# {
# dataset_import_job_arn: "Arn", # required
# }
#
# @!attribute [rw] dataset_import_job_arn
# The Amazon Resource Name (ARN) of the dataset import job to
# describe.
# @return [String]
#
# @see http://docs.aws.amazon.com/goto/WebAPI/personalize-2018-05-22/DescribeDatasetImportJobRequest AWS API Documentation
#
class DescribeDatasetImportJobRequest < Struct.new(
:dataset_import_job_arn)
include Aws::Structure
end
# @!attribute [rw] dataset_import_job
# Information about the dataset import job, including the status.
#
# The status is one of the following values:
#
# * CREATE PENDING
#
# * CREATE IN\_PROGRESS
#
# * ACTIVE
#
# * CREATE FAILED
# @return [Types::DatasetImportJob]
#
# @see http://docs.aws.amazon.com/goto/WebAPI/personalize-2018-05-22/DescribeDatasetImportJobResponse AWS API Documentation
#
class DescribeDatasetImportJobResponse < Struct.new(
:dataset_import_job)
include Aws::Structure
end
# @note When making an API call, you may pass DescribeDatasetRequest
# data as a hash:
#
# {
# dataset_arn: "Arn", # required
# }
#
# @!attribute [rw] dataset_arn
# The Amazon Resource Name (ARN) of the dataset to describe.
# @return [String]
#
# @see http://docs.aws.amazon.com/goto/WebAPI/personalize-2018-05-22/DescribeDatasetRequest AWS API Documentation
#
class DescribeDatasetRequest < Struct.new(
:dataset_arn)
include Aws::Structure
end
# @!attribute [rw] dataset
# A listing of the dataset's properties.
# @return [Types::Dataset]
#
# @see http://docs.aws.amazon.com/goto/WebAPI/personalize-2018-05-22/DescribeDatasetResponse AWS API Documentation
#
class DescribeDatasetResponse < Struct.new(
:dataset)
include Aws::Structure
end
# @note When making an API call, you may pass DescribeEventTrackerRequest
# data as a hash:
#
# {
# event_tracker_arn: "Arn", # required
# }
#
# @!attribute [rw] event_tracker_arn
# The Amazon Resource Name (ARN) of the event tracker to describe.
# @return [String]
#
# @see http://docs.aws.amazon.com/goto/WebAPI/personalize-2018-05-22/DescribeEventTrackerRequest AWS API Documentation
#
class DescribeEventTrackerRequest < Struct.new(
:event_tracker_arn)
include Aws::Structure
end
# @!attribute [rw] event_tracker
# An object that describes the event tracker.
# @return [Types::EventTracker]
#
# @see http://docs.aws.amazon.com/goto/WebAPI/personalize-2018-05-22/DescribeEventTrackerResponse AWS API Documentation
#
class DescribeEventTrackerResponse < Struct.new(
:event_tracker)
include Aws::Structure
end
# @note When making an API call, you may pass DescribeFeatureTransformationRequest
# data as a hash:
#
# {
# feature_transformation_arn: "Arn", # required
# }
#
# @!attribute [rw] feature_transformation_arn
# The Amazon Resource Name (ARN) of the feature transformation to
# describe.
# @return [String]
#
# @see http://docs.aws.amazon.com/goto/WebAPI/personalize-2018-05-22/DescribeFeatureTransformationRequest AWS API Documentation
#
class DescribeFeatureTransformationRequest < Struct.new(
:feature_transformation_arn)
include Aws::Structure
end
# @!attribute [rw] feature_transformation
# A listing of the FeatureTransformation properties.
# @return [Types::FeatureTransformation]
#
# @see http://docs.aws.amazon.com/goto/WebAPI/personalize-2018-05-22/DescribeFeatureTransformationResponse AWS API Documentation
#
class DescribeFeatureTransformationResponse < Struct.new(
:feature_transformation)
include Aws::Structure
end
# @note When making an API call, you may pass DescribeFilterRequest
# data as a hash:
#
# {
# filter_arn: "Arn", # required
# }
#
# @!attribute [rw] filter_arn
# The ARN of the filter to describe.
# @return [String]
#
# @see http://docs.aws.amazon.com/goto/WebAPI/personalize-2018-05-22/DescribeFilterRequest AWS API Documentation
#
class DescribeFilterRequest < Struct.new(
:filter_arn)
include Aws::Structure
end
# @!attribute [rw] filter
# The filter's details.
# @return [Types::Filter]
#
# @see http://docs.aws.amazon.com/goto/WebAPI/personalize-2018-05-22/DescribeFilterResponse AWS API Documentation
#
class DescribeFilterResponse < Struct.new(
:filter)
include Aws::Structure
end
# @note When making an API call, you may pass DescribeRecipeRequest
# data as a hash:
#
# {
# recipe_arn: "Arn", # required
# }
#
# @!attribute [rw] recipe_arn
# The Amazon Resource Name (ARN) of the recipe to describe.
# @return [String]
#
# @see http://docs.aws.amazon.com/goto/WebAPI/personalize-2018-05-22/DescribeRecipeRequest AWS API Documentation
#
class DescribeRecipeRequest < Struct.new(
:recipe_arn)
include Aws::Structure
end
# @!attribute [rw] recipe
# An object that describes the recipe.
# @return [Types::Recipe]
#
# @see http://docs.aws.amazon.com/goto/WebAPI/personalize-2018-05-22/DescribeRecipeResponse AWS API Documentation
#
class DescribeRecipeResponse < Struct.new(
:recipe)
include Aws::Structure
end
# @note When making an API call, you may pass DescribeSchemaRequest
# data as a hash:
#
# {
# schema_arn: "Arn", # required
# }
#
# @!attribute [rw] schema_arn
# The Amazon Resource Name (ARN) of the schema to retrieve.
# @return [String]
#
# @see http://docs.aws.amazon.com/goto/WebAPI/personalize-2018-05-22/DescribeSchemaRequest AWS API Documentation
#
class DescribeSchemaRequest < Struct.new(
:schema_arn)
include Aws::Structure
end
# @!attribute [rw] schema
# The requested schema.
# @return [Types::DatasetSchema]
#
# @see http://docs.aws.amazon.com/goto/WebAPI/personalize-2018-05-22/DescribeSchemaResponse AWS API Documentation
#
class DescribeSchemaResponse < Struct.new(
:schema)
include Aws::Structure
end
# @note When making an API call, you may pass DescribeSolutionRequest
# data as a hash:
#
# {
# solution_arn: "Arn", # required
# }
#
# @!attribute [rw] solution_arn
# The Amazon Resource Name (ARN) of the solution to describe.
# @return [String]
#
# @see http://docs.aws.amazon.com/goto/WebAPI/personalize-2018-05-22/DescribeSolutionRequest AWS API Documentation
#
class DescribeSolutionRequest < Struct.new(
:solution_arn)
include Aws::Structure
end
# @!attribute [rw] solution
# An object that describes the solution.
# @return [Types::Solution]
#
# @see http://docs.aws.amazon.com/goto/WebAPI/personalize-2018-05-22/DescribeSolutionResponse AWS API Documentation
#
class DescribeSolutionResponse < Struct.new(
:solution)
include Aws::Structure
end
# @note When making an API call, you may pass DescribeSolutionVersionRequest
# data as a hash:
#
# {
# solution_version_arn: "Arn", # required
# }
#
# @!attribute [rw] solution_version_arn
# The Amazon Resource Name (ARN) of the solution version.
# @return [String]
#
# @see http://docs.aws.amazon.com/goto/WebAPI/personalize-2018-05-22/DescribeSolutionVersionRequest AWS API Documentation
#
class DescribeSolutionVersionRequest < Struct.new(
:solution_version_arn)
include Aws::Structure
end
# @!attribute [rw] solution_version
# The solution version.
# @return [Types::SolutionVersion]
#
# @see http://docs.aws.amazon.com/goto/WebAPI/personalize-2018-05-22/DescribeSolutionVersionResponse AWS API Documentation
#
class DescribeSolutionVersionResponse < Struct.new(
:solution_version)
include Aws::Structure
end
# Provides information about an event tracker.
#
# @!attribute [rw] name
# The name of the event tracker.
# @return [String]
#
# @!attribute [rw] event_tracker_arn
# The ARN of the event tracker.
# @return [String]
#
# @!attribute [rw] account_id
# The Amazon AWS account that owns the event tracker.
# @return [String]
#
# @!attribute [rw] tracking_id
# The ID of the event tracker. Include this ID in requests to the
# [PutEvents][1] API.
#
#
#
# [1]: https://docs.aws.amazon.com/personalize/latest/dg/API_UBS_PutEvents.html
# @return [String]
#
# @!attribute [rw] dataset_group_arn
# The Amazon Resource Name (ARN) of the dataset group that receives
# the event data.
# @return [String]
#
# @!attribute [rw] status
# The status of the event tracker.
#
# An event tracker can be in one of the following states:
#
# * CREATE PENDING > CREATE IN\_PROGRESS > ACTIVE -or- CREATE
# FAILED
#
# * DELETE PENDING > DELETE IN\_PROGRESS
# @return [String]
#
# @!attribute [rw] creation_date_time
# The date and time (in Unix format) that the event tracker was
# created.
# @return [Time]
#
# @!attribute [rw] last_updated_date_time
# The date and time (in Unix time) that the event tracker was last
# updated.
# @return [Time]
#
# @see http://docs.aws.amazon.com/goto/WebAPI/personalize-2018-05-22/EventTracker AWS API Documentation
#
class EventTracker < Struct.new(
:name,
:event_tracker_arn,
:account_id,
:tracking_id,
:dataset_group_arn,
:status,
:creation_date_time,
:last_updated_date_time)
include Aws::Structure
end
# Provides a summary of the properties of an event tracker. For a
# complete listing, call the DescribeEventTracker API.
#
# @!attribute [rw] name
# The name of the event tracker.
# @return [String]
#
# @!attribute [rw] event_tracker_arn
# The Amazon Resource Name (ARN) of the event tracker.
# @return [String]
#
# @!attribute [rw] status
# The status of the event tracker.
#
# An event tracker can be in one of the following states:
#
# * CREATE PENDING > CREATE IN\_PROGRESS > ACTIVE -or- CREATE
# FAILED
#
# * DELETE PENDING > DELETE IN\_PROGRESS
# @return [String]
#
# @!attribute [rw] creation_date_time
# The date and time (in Unix time) that the event tracker was created.
# @return [Time]
#
# @!attribute [rw] last_updated_date_time
# The date and time (in Unix time) that the event tracker was last
# updated.
# @return [Time]
#
# @see http://docs.aws.amazon.com/goto/WebAPI/personalize-2018-05-22/EventTrackerSummary AWS API Documentation
#
class EventTrackerSummary < Struct.new(
:name,
:event_tracker_arn,
:status,
:creation_date_time,
:last_updated_date_time)
include Aws::Structure
end
# Provides feature transformation information. Feature transformation is
# the process of modifying raw input data into a form more suitable for
# model training.
#
# @!attribute [rw] name
# The name of the feature transformation.
# @return [String]
#
# @!attribute [rw] feature_transformation_arn
# The Amazon Resource Name (ARN) of the FeatureTransformation object.
# @return [String]
#
# @!attribute [rw] default_parameters
# Provides the default parameters for feature transformation.
# @return [Hash<String,String>]
#
# @!attribute [rw] creation_date_time
# The creation date and time (in Unix time) of the feature
# transformation.
# @return [Time]
#
# @!attribute [rw] last_updated_date_time
# The last update date and time (in Unix time) of the feature
# transformation.
# @return [Time]
#
# @!attribute [rw] status
# The status of the feature transformation.
#
# A feature transformation can be in one of the following states:
#
# * CREATE PENDING > CREATE IN\_PROGRESS > ACTIVE -or- CREATE
# FAILED
#
# ^
# @return [String]
#
# @see http://docs.aws.amazon.com/goto/WebAPI/personalize-2018-05-22/FeatureTransformation AWS API Documentation
#
class FeatureTransformation < Struct.new(
:name,
:feature_transformation_arn,
:default_parameters,
:creation_date_time,
:last_updated_date_time,
:status)
include Aws::Structure
end
# Contains information on a recommendation filter, including its ARN,
# status, and filter expression.
#
# @!attribute [rw] name
# The name of the filter.
# @return [String]
#
# @!attribute [rw] filter_arn
# The ARN of the filter.
# @return [String]
#
# @!attribute [rw] creation_date_time
# The time at which the filter was created.
# @return [Time]
#
# @!attribute [rw] last_updated_date_time
# The time at which the filter was last updated.
# @return [Time]
#
# @!attribute [rw] dataset_group_arn
# The ARN of the dataset group to which the filter belongs.
# @return [String]
#
# @!attribute [rw] failure_reason
# If the filter failed, the reason for its failure.
# @return [String]
#
# @!attribute [rw] filter_expression
# Specifies the type of item interactions to filter out of
# recommendation results. The filter expression must follow the
# following format:
#
# `EXCLUDE itemId WHERE INTERACTIONS.event_type in ("EVENT_TYPE")`
#
# Where "EVENT\_TYPE" is the type of event to filter out. For more
# information, see Using Filters with Amazon Personalize.
# @return [String]
#
# @!attribute [rw] status
# The status of the filter.
# @return [String]
#
# @see http://docs.aws.amazon.com/goto/WebAPI/personalize-2018-05-22/Filter AWS API Documentation
#
class Filter < Struct.new(
:name,
:filter_arn,
:creation_date_time,
:last_updated_date_time,
:dataset_group_arn,
:failure_reason,
:filter_expression,
:status)
include Aws::Structure
end
# A short summary of a filter's attributes.
#
# @!attribute [rw] name
# The name of the filter.
# @return [String]
#
# @!attribute [rw] filter_arn
# The ARN of the filter.
# @return [String]
#
# @!attribute [rw] creation_date_time
# The time at which the filter was created.
# @return [Time]
#
# @!attribute [rw] last_updated_date_time
# The time at which the filter was last updated.
# @return [Time]
#
# @!attribute [rw] dataset_group_arn
# The ARN of the dataset group to which the filter belongs.
# @return [String]
#
# @!attribute [rw] failure_reason
# If the filter failed, the reason for the failure.
# @return [String]
#
# @!attribute [rw] status
# The status of the filter.
# @return [String]
#
# @see http://docs.aws.amazon.com/goto/WebAPI/personalize-2018-05-22/FilterSummary AWS API Documentation
#
class FilterSummary < Struct.new(
:name,
:filter_arn,
:creation_date_time,
:last_updated_date_time,
:dataset_group_arn,
:failure_reason,
:status)
include Aws::Structure
end
# @note When making an API call, you may pass GetSolutionMetricsRequest
# data as a hash:
#
# {
# solution_version_arn: "Arn", # required
# }
#
# @!attribute [rw] solution_version_arn
# The Amazon Resource Name (ARN) of the solution version for which to
# get metrics.
# @return [String]
#
# @see http://docs.aws.amazon.com/goto/WebAPI/personalize-2018-05-22/GetSolutionMetricsRequest AWS API Documentation
#
class GetSolutionMetricsRequest < Struct.new(
:solution_version_arn)
include Aws::Structure
end
# @!attribute [rw] solution_version_arn
# The same solution version ARN as specified in the request.
# @return [String]
#
# @!attribute [rw] metrics
# The metrics for the solution version.
# @return [Hash<String,Float>]
#
# @see http://docs.aws.amazon.com/goto/WebAPI/personalize-2018-05-22/GetSolutionMetricsResponse AWS API Documentation
#
class GetSolutionMetricsResponse < Struct.new(
:solution_version_arn,
:metrics)
include Aws::Structure
end
# Describes the properties for hyperparameter optimization (HPO). For
# use with the bring-your-own-recipe feature. Do not use for Amazon
# Personalize native recipes.
#
# @note When making an API call, you may pass HPOConfig
# data as a hash:
#
# {
# hpo_objective: {
# type: "HPOObjectiveType",
# metric_name: "MetricName",
# metric_regex: "MetricRegex",
# },
# hpo_resource_config: {
# max_number_of_training_jobs: "HPOResource",
# max_parallel_training_jobs: "HPOResource",
# },
# algorithm_hyper_parameter_ranges: {
# integer_hyper_parameter_ranges: [
# {
# name: "ParameterName",
# min_value: 1,
# max_value: 1,
# },
# ],
# continuous_hyper_parameter_ranges: [
# {
# name: "ParameterName",
# min_value: 1.0,
# max_value: 1.0,
# },
# ],
# categorical_hyper_parameter_ranges: [
# {
# name: "ParameterName",
# values: ["CategoricalValue"],
# },
# ],
# },
# }
#
# @!attribute [rw] hpo_objective
# The metric to optimize during HPO.
# @return [Types::HPOObjective]
#
# @!attribute [rw] hpo_resource_config
# Describes the resource configuration for HPO.
# @return [Types::HPOResourceConfig]
#
# @!attribute [rw] algorithm_hyper_parameter_ranges
# The hyperparameters and their allowable ranges.
# @return [Types::HyperParameterRanges]
#
# @see http://docs.aws.amazon.com/goto/WebAPI/personalize-2018-05-22/HPOConfig AWS API Documentation
#
class HPOConfig < Struct.new(
:hpo_objective,
:hpo_resource_config,
:algorithm_hyper_parameter_ranges)
include Aws::Structure
end
# The metric to optimize during hyperparameter optimization (HPO).
#
# @note When making an API call, you may pass HPOObjective
# data as a hash:
#
# {
# type: "HPOObjectiveType",
# metric_name: "MetricName",
# metric_regex: "MetricRegex",
# }
#
# @!attribute [rw] type
# The type of the metric. Valid values are `Maximize` and `Minimize`.
# @return [String]
#
# @!attribute [rw] metric_name
# The name of the metric.
# @return [String]
#
# @!attribute [rw] metric_regex
# A regular expression for finding the metric in the training job
# logs.
# @return [String]
#
# @see http://docs.aws.amazon.com/goto/WebAPI/personalize-2018-05-22/HPOObjective AWS API Documentation
#
class HPOObjective < Struct.new(
:type,
:metric_name,
:metric_regex)
include Aws::Structure
end
# Describes the resource configuration for hyperparameter optimization
# (HPO).
#
# @note When making an API call, you may pass HPOResourceConfig
# data as a hash:
#
# {
# max_number_of_training_jobs: "HPOResource",
# max_parallel_training_jobs: "HPOResource",
# }
#
# @!attribute [rw] max_number_of_training_jobs
# The maximum number of training jobs when you create a solution
# version. The maximum value for `maxNumberOfTrainingJobs` is `40`.
# @return [String]
#
# @!attribute [rw] max_parallel_training_jobs
# The maximum number of parallel training jobs when you create a
# solution version. The maximum value for `maxParallelTrainingJobs` is
# `10`.
# @return [String]
#
# @see http://docs.aws.amazon.com/goto/WebAPI/personalize-2018-05-22/HPOResourceConfig AWS API Documentation
#
class HPOResourceConfig < Struct.new(
:max_number_of_training_jobs,
:max_parallel_training_jobs)
include Aws::Structure
end
# Specifies the hyperparameters and their ranges. Hyperparameters can be
# categorical, continuous, or integer-valued.
#
# @note When making an API call, you may pass HyperParameterRanges
# data as a hash:
#
# {
# integer_hyper_parameter_ranges: [
# {
# name: "ParameterName",
# min_value: 1,
# max_value: 1,
# },
# ],
# continuous_hyper_parameter_ranges: [
# {
# name: "ParameterName",
# min_value: 1.0,
# max_value: 1.0,
# },
# ],
# categorical_hyper_parameter_ranges: [
# {
# name: "ParameterName",
# values: ["CategoricalValue"],
# },
# ],
# }
#
# @!attribute [rw] integer_hyper_parameter_ranges
# The integer-valued hyperparameters and their ranges.
# @return [Array<Types::IntegerHyperParameterRange>]
#
# @!attribute [rw] continuous_hyper_parameter_ranges
# The continuous hyperparameters and their ranges.
# @return [Array<Types::ContinuousHyperParameterRange>]
#
# @!attribute [rw] categorical_hyper_parameter_ranges
# The categorical hyperparameters and their ranges.
# @return [Array<Types::CategoricalHyperParameterRange>]
#
# @see http://docs.aws.amazon.com/goto/WebAPI/personalize-2018-05-22/HyperParameterRanges AWS API Documentation
#
class HyperParameterRanges < Struct.new(
:integer_hyper_parameter_ranges,
:continuous_hyper_parameter_ranges,
:categorical_hyper_parameter_ranges)
include Aws::Structure
end
# Provides the name and range of an integer-valued hyperparameter.
#
# @note When making an API call, you may pass IntegerHyperParameterRange
# data as a hash:
#
# {
# name: "ParameterName",
# min_value: 1,
# max_value: 1,
# }
#
# @!attribute [rw] name
# The name of the hyperparameter.
# @return [String]
#
# @!attribute [rw] min_value
# The minimum allowable value for the hyperparameter.
# @return [Integer]
#
# @!attribute [rw] max_value
# The maximum allowable value for the hyperparameter.
# @return [Integer]
#
# @see http://docs.aws.amazon.com/goto/WebAPI/personalize-2018-05-22/IntegerHyperParameterRange AWS API Documentation
#
class IntegerHyperParameterRange < Struct.new(
:name,
:min_value,
:max_value)
include Aws::Structure
end
# Provide a valid value for the field or parameter.
#
# @!attribute [rw] message
# @return [String]
#
# @see http://docs.aws.amazon.com/goto/WebAPI/personalize-2018-05-22/InvalidInputException AWS API Documentation
#
class InvalidInputException < Struct.new(
:message)
include Aws::Structure
end
# The token is not valid.
#
# @!attribute [rw] message
# @return [String]
#
# @see http://docs.aws.amazon.com/goto/WebAPI/personalize-2018-05-22/InvalidNextTokenException AWS API Documentation
#
class InvalidNextTokenException < Struct.new(
:message)
include Aws::Structure
end
# The limit on the number of requests per second has been exceeded.
#
# @!attribute [rw] message
# @return [String]
#
# @see http://docs.aws.amazon.com/goto/WebAPI/personalize-2018-05-22/LimitExceededException AWS API Documentation
#
class LimitExceededException < Struct.new(
:message)
include Aws::Structure
end
# @note When making an API call, you may pass ListBatchInferenceJobsRequest
# data as a hash:
#
# {
# solution_version_arn: "Arn",
# next_token: "NextToken",
# max_results: 1,
# }
#
# @!attribute [rw] solution_version_arn
# The Amazon Resource Name (ARN) of the solution version from which
# the batch inference jobs were created.
# @return [String]
#
# @!attribute [rw] next_token
# The token to request the next page of results.
# @return [String]
#
# @!attribute [rw] max_results
# The maximum number of batch inference job results to return in each
# page. The default value is 100.
# @return [Integer]
#
# @see http://docs.aws.amazon.com/goto/WebAPI/personalize-2018-05-22/ListBatchInferenceJobsRequest AWS API Documentation
#
class ListBatchInferenceJobsRequest < Struct.new(
:solution_version_arn,
:next_token,
:max_results)
include Aws::Structure
end
# @!attribute [rw] batch_inference_jobs
# A list containing information on each job that is returned.
# @return [Array<Types::BatchInferenceJobSummary>]
#
# @!attribute [rw] next_token
# The token to use to retreive the next page of results. The value is
# `null` when there are no more results to return.
# @return [String]
#
# @see http://docs.aws.amazon.com/goto/WebAPI/personalize-2018-05-22/ListBatchInferenceJobsResponse AWS API Documentation
#
class ListBatchInferenceJobsResponse < Struct.new(
:batch_inference_jobs,
:next_token)
include Aws::Structure
end
# @note When making an API call, you may pass ListCampaignsRequest
# data as a hash:
#
# {
# solution_arn: "Arn",
# next_token: "NextToken",
# max_results: 1,
# }
#
# @!attribute [rw] solution_arn
# The Amazon Resource Name (ARN) of the solution to list the campaigns
# for. When a solution is not specified, all the campaigns associated
# with the account are listed.
# @return [String]
#
# @!attribute [rw] next_token
# A token returned from the previous call to `ListCampaigns` for
# getting the next set of campaigns (if they exist).
# @return [String]
#
# @!attribute [rw] max_results
# The maximum number of campaigns to return.
# @return [Integer]
#
# @see http://docs.aws.amazon.com/goto/WebAPI/personalize-2018-05-22/ListCampaignsRequest AWS API Documentation
#
class ListCampaignsRequest < Struct.new(
:solution_arn,
:next_token,
:max_results)
include Aws::Structure
end
# @!attribute [rw] campaigns
# A list of the campaigns.
# @return [Array<Types::CampaignSummary>]
#
# @!attribute [rw] next_token
# A token for getting the next set of campaigns (if they exist).
# @return [String]
#
# @see http://docs.aws.amazon.com/goto/WebAPI/personalize-2018-05-22/ListCampaignsResponse AWS API Documentation
#
class ListCampaignsResponse < Struct.new(
:campaigns,
:next_token)
include Aws::Structure
end
# @note When making an API call, you may pass ListDatasetGroupsRequest
# data as a hash:
#
# {
# next_token: "NextToken",
# max_results: 1,
# }
#
# @!attribute [rw] next_token
# A token returned from the previous call to `ListDatasetGroups` for
# getting the next set of dataset groups (if they exist).
# @return [String]
#
# @!attribute [rw] max_results
# The maximum number of dataset groups to return.
# @return [Integer]
#
# @see http://docs.aws.amazon.com/goto/WebAPI/personalize-2018-05-22/ListDatasetGroupsRequest AWS API Documentation
#
class ListDatasetGroupsRequest < Struct.new(
:next_token,
:max_results)
include Aws::Structure
end
# @!attribute [rw] dataset_groups
# The list of your dataset groups.
# @return [Array<Types::DatasetGroupSummary>]
#
# @!attribute [rw] next_token
# A token for getting the next set of dataset groups (if they exist).
# @return [String]
#
# @see http://docs.aws.amazon.com/goto/WebAPI/personalize-2018-05-22/ListDatasetGroupsResponse AWS API Documentation
#
class ListDatasetGroupsResponse < Struct.new(
:dataset_groups,
:next_token)
include Aws::Structure
end
# @note When making an API call, you may pass ListDatasetImportJobsRequest
# data as a hash:
#
# {
# dataset_arn: "Arn",
# next_token: "NextToken",
# max_results: 1,
# }
#
# @!attribute [rw] dataset_arn
# The Amazon Resource Name (ARN) of the dataset to list the dataset
# import jobs for.
# @return [String]
#
# @!attribute [rw] next_token
# A token returned from the previous call to `ListDatasetImportJobs`
# for getting the next set of dataset import jobs (if they exist).
# @return [String]
#
# @!attribute [rw] max_results
# The maximum number of dataset import jobs to return.
# @return [Integer]
#
# @see http://docs.aws.amazon.com/goto/WebAPI/personalize-2018-05-22/ListDatasetImportJobsRequest AWS API Documentation
#
class ListDatasetImportJobsRequest < Struct.new(
:dataset_arn,
:next_token,
:max_results)
include Aws::Structure
end
# @!attribute [rw] dataset_import_jobs
# The list of dataset import jobs.
# @return [Array<Types::DatasetImportJobSummary>]
#
# @!attribute [rw] next_token
# A token for getting the next set of dataset import jobs (if they
# exist).
# @return [String]
#
# @see http://docs.aws.amazon.com/goto/WebAPI/personalize-2018-05-22/ListDatasetImportJobsResponse AWS API Documentation
#
class ListDatasetImportJobsResponse < Struct.new(
:dataset_import_jobs,
:next_token)
include Aws::Structure
end
# @note When making an API call, you may pass ListDatasetsRequest
# data as a hash:
#
# {
# dataset_group_arn: "Arn",
# next_token: "NextToken",
# max_results: 1,
# }
#
# @!attribute [rw] dataset_group_arn
# The Amazon Resource Name (ARN) of the dataset group that contains
# the datasets to list.
# @return [String]
#
# @!attribute [rw] next_token
# A token returned from the previous call to `ListDatasetImportJobs`
# for getting the next set of dataset import jobs (if they exist).
# @return [String]
#
# @!attribute [rw] max_results
# The maximum number of datasets to return.
# @return [Integer]
#
# @see http://docs.aws.amazon.com/goto/WebAPI/personalize-2018-05-22/ListDatasetsRequest AWS API Documentation
#
class ListDatasetsRequest < Struct.new(
:dataset_group_arn,
:next_token,
:max_results)
include Aws::Structure
end
# @!attribute [rw] datasets
# An array of `Dataset` objects. Each object provides metadata
# information.
# @return [Array<Types::DatasetSummary>]
#
# @!attribute [rw] next_token
# A token for getting the next set of datasets (if they exist).
# @return [String]
#
# @see http://docs.aws.amazon.com/goto/WebAPI/personalize-2018-05-22/ListDatasetsResponse AWS API Documentation
#
class ListDatasetsResponse < Struct.new(
:datasets,
:next_token)
include Aws::Structure
end
# @note When making an API call, you may pass ListEventTrackersRequest
# data as a hash:
#
# {
# dataset_group_arn: "Arn",
# next_token: "NextToken",
# max_results: 1,
# }
#
# @!attribute [rw] dataset_group_arn
# The ARN of a dataset group used to filter the response.
# @return [String]
#
# @!attribute [rw] next_token
# A token returned from the previous call to `ListEventTrackers` for
# getting the next set of event trackers (if they exist).
# @return [String]
#
# @!attribute [rw] max_results
# The maximum number of event trackers to return.
# @return [Integer]
#
# @see http://docs.aws.amazon.com/goto/WebAPI/personalize-2018-05-22/ListEventTrackersRequest AWS API Documentation
#
class ListEventTrackersRequest < Struct.new(
:dataset_group_arn,
:next_token,
:max_results)
include Aws::Structure
end
# @!attribute [rw] event_trackers
# A list of event trackers.
# @return [Array<Types::EventTrackerSummary>]
#
# @!attribute [rw] next_token
# A token for getting the next set of event trackers (if they exist).
# @return [String]
#
# @see http://docs.aws.amazon.com/goto/WebAPI/personalize-2018-05-22/ListEventTrackersResponse AWS API Documentation
#
class ListEventTrackersResponse < Struct.new(
:event_trackers,
:next_token)
include Aws::Structure
end
# @note When making an API call, you may pass ListFiltersRequest
# data as a hash:
#
# {
# dataset_group_arn: "Arn",
# next_token: "NextToken",
# max_results: 1,
# }
#
# @!attribute [rw] dataset_group_arn
# The ARN of the dataset group that contains the filters.
# @return [String]
#
# @!attribute [rw] next_token
# A token returned from the previous call to `ListFilters` for getting
# the next set of filters (if they exist).
# @return [String]
#
# @!attribute [rw] max_results
# The maximum number of filters to return.
# @return [Integer]
#
# @see http://docs.aws.amazon.com/goto/WebAPI/personalize-2018-05-22/ListFiltersRequest AWS API Documentation
#
class ListFiltersRequest < Struct.new(
:dataset_group_arn,
:next_token,
:max_results)
include Aws::Structure
end
# @!attribute [rw] filters
# A list of returned filters.
# @return [Array<Types::FilterSummary>]
#
# @!attribute [rw] next_token
# A token for getting the next set of filters (if they exist).
# @return [String]
#
# @see http://docs.aws.amazon.com/goto/WebAPI/personalize-2018-05-22/ListFiltersResponse AWS API Documentation
#
class ListFiltersResponse < Struct.new(
:filters,
:next_token)
include Aws::Structure
end
# @note When making an API call, you may pass ListRecipesRequest
# data as a hash:
#
# {
# recipe_provider: "SERVICE", # accepts SERVICE
# next_token: "NextToken",
# max_results: 1,
# }
#
# @!attribute [rw] recipe_provider
# The default is `SERVICE`.
# @return [String]
#
# @!attribute [rw] next_token
# A token returned from the previous call to `ListRecipes` for getting
# the next set of recipes (if they exist).
# @return [String]
#
# @!attribute [rw] max_results
# The maximum number of recipes to return.
# @return [Integer]
#
# @see http://docs.aws.amazon.com/goto/WebAPI/personalize-2018-05-22/ListRecipesRequest AWS API Documentation
#
class ListRecipesRequest < Struct.new(
:recipe_provider,
:next_token,
:max_results)
include Aws::Structure
end
# @!attribute [rw] recipes
# The list of available recipes.
# @return [Array<Types::RecipeSummary>]
#
# @!attribute [rw] next_token
# A token for getting the next set of recipes.
# @return [String]
#
# @see http://docs.aws.amazon.com/goto/WebAPI/personalize-2018-05-22/ListRecipesResponse AWS API Documentation
#
class ListRecipesResponse < Struct.new(
:recipes,
:next_token)
include Aws::Structure
end
# @note When making an API call, you may pass ListSchemasRequest
# data as a hash:
#
# {
# next_token: "NextToken",
# max_results: 1,
# }
#
# @!attribute [rw] next_token
# A token returned from the previous call to `ListSchemas` for getting
# the next set of schemas (if they exist).
# @return [String]
#
# @!attribute [rw] max_results
# The maximum number of schemas to return.
# @return [Integer]
#
# @see http://docs.aws.amazon.com/goto/WebAPI/personalize-2018-05-22/ListSchemasRequest AWS API Documentation
#
class ListSchemasRequest < Struct.new(
:next_token,
:max_results)
include Aws::Structure
end
# @!attribute [rw] schemas
# A list of schemas.
# @return [Array<Types::DatasetSchemaSummary>]
#
# @!attribute [rw] next_token
# A token used to get the next set of schemas (if they exist).
# @return [String]
#
# @see http://docs.aws.amazon.com/goto/WebAPI/personalize-2018-05-22/ListSchemasResponse AWS API Documentation
#
class ListSchemasResponse < Struct.new(
:schemas,
:next_token)
include Aws::Structure
end
# @note When making an API call, you may pass ListSolutionVersionsRequest
# data as a hash:
#
# {
# solution_arn: "Arn",
# next_token: "NextToken",
# max_results: 1,
# }
#
# @!attribute [rw] solution_arn
# The Amazon Resource Name (ARN) of the solution.
# @return [String]
#
# @!attribute [rw] next_token
# A token returned from the previous call to `ListSolutionVersions`
# for getting the next set of solution versions (if they exist).
# @return [String]
#
# @!attribute [rw] max_results
# The maximum number of solution versions to return.
# @return [Integer]
#
# @see http://docs.aws.amazon.com/goto/WebAPI/personalize-2018-05-22/ListSolutionVersionsRequest AWS API Documentation
#
class ListSolutionVersionsRequest < Struct.new(
:solution_arn,
:next_token,
:max_results)
include Aws::Structure
end
# @!attribute [rw] solution_versions
# A list of solution versions describing the version properties.
# @return [Array<Types::SolutionVersionSummary>]
#
# @!attribute [rw] next_token
# A token for getting the next set of solution versions (if they
# exist).
# @return [String]
#
# @see http://docs.aws.amazon.com/goto/WebAPI/personalize-2018-05-22/ListSolutionVersionsResponse AWS API Documentation
#
class ListSolutionVersionsResponse < Struct.new(
:solution_versions,
:next_token)
include Aws::Structure
end
# @note When making an API call, you may pass ListSolutionsRequest
# data as a hash:
#
# {
# dataset_group_arn: "Arn",
# next_token: "NextToken",
# max_results: 1,
# }
#
# @!attribute [rw] dataset_group_arn
# The Amazon Resource Name (ARN) of the dataset group.
# @return [String]
#
# @!attribute [rw] next_token
# A token returned from the previous call to `ListSolutions` for
# getting the next set of solutions (if they exist).
# @return [String]
#
# @!attribute [rw] max_results
# The maximum number of solutions to return.
# @return [Integer]
#
# @see http://docs.aws.amazon.com/goto/WebAPI/personalize-2018-05-22/ListSolutionsRequest AWS API Documentation
#
class ListSolutionsRequest < Struct.new(
:dataset_group_arn,
:next_token,
:max_results)
include Aws::Structure
end
# @!attribute [rw] solutions
# A list of the current solutions.
# @return [Array<Types::SolutionSummary>]
#
# @!attribute [rw] next_token
# A token for getting the next set of solutions (if they exist).
# @return [String]
#
# @see http://docs.aws.amazon.com/goto/WebAPI/personalize-2018-05-22/ListSolutionsResponse AWS API Documentation
#
class ListSolutionsResponse < Struct.new(
:solutions,
:next_token)
include Aws::Structure
end
# Provides information about a recipe. Each recipe provides an algorithm
# that Amazon Personalize uses in model training when you use the
# CreateSolution operation.
#
# @!attribute [rw] name
# The name of the recipe.
# @return [String]
#
# @!attribute [rw] recipe_arn
# The Amazon Resource Name (ARN) of the recipe.
# @return [String]
#
# @!attribute [rw] algorithm_arn
# The Amazon Resource Name (ARN) of the algorithm that Amazon
# Personalize uses to train the model.
# @return [String]
#
# @!attribute [rw] feature_transformation_arn
# The ARN of the FeatureTransformation object.
# @return [String]
#
# @!attribute [rw] status
# The status of the recipe.
# @return [String]
#
# @!attribute [rw] description
# The description of the recipe.
# @return [String]
#
# @!attribute [rw] creation_date_time
# The date and time (in Unix format) that the recipe was created.
# @return [Time]
#
# @!attribute [rw] recipe_type
# One of the following values:
#
# * PERSONALIZED\_RANKING
#
# * RELATED\_ITEMS
#
# * USER\_PERSONALIZATION
# @return [String]
#
# @!attribute [rw] last_updated_date_time
# The date and time (in Unix format) that the recipe was last updated.
# @return [Time]
#
# @see http://docs.aws.amazon.com/goto/WebAPI/personalize-2018-05-22/Recipe AWS API Documentation
#
class Recipe < Struct.new(
:name,
:recipe_arn,
:algorithm_arn,
:feature_transformation_arn,
:status,
:description,
:creation_date_time,
:recipe_type,
:last_updated_date_time)
include Aws::Structure
end
# Provides a summary of the properties of a recipe. For a complete
# listing, call the DescribeRecipe API.
#
# @!attribute [rw] name
# The name of the recipe.
# @return [String]
#
# @!attribute [rw] recipe_arn
# The Amazon Resource Name (ARN) of the recipe.
# @return [String]
#
# @!attribute [rw] status
# The status of the recipe.
# @return [String]
#
# @!attribute [rw] creation_date_time
# The date and time (in Unix time) that the recipe was created.
# @return [Time]
#
# @!attribute [rw] last_updated_date_time
# The date and time (in Unix time) that the recipe was last updated.
# @return [Time]
#
# @see http://docs.aws.amazon.com/goto/WebAPI/personalize-2018-05-22/RecipeSummary AWS API Documentation
#
class RecipeSummary < Struct.new(
:name,
:recipe_arn,
:status,
:creation_date_time,
:last_updated_date_time)
include Aws::Structure
end
# The specified resource already exists.
#
# @!attribute [rw] message
# @return [String]
#
# @see http://docs.aws.amazon.com/goto/WebAPI/personalize-2018-05-22/ResourceAlreadyExistsException AWS API Documentation
#
class ResourceAlreadyExistsException < Struct.new(
:message)
include Aws::Structure
end
# The specified resource is in use.
#
# @!attribute [rw] message
# @return [String]
#
# @see http://docs.aws.amazon.com/goto/WebAPI/personalize-2018-05-22/ResourceInUseException AWS API Documentation
#
class ResourceInUseException < Struct.new(
:message)
include Aws::Structure
end
# Could not find the specified resource.
#
# @!attribute [rw] message
# @return [String]
#
# @see http://docs.aws.amazon.com/goto/WebAPI/personalize-2018-05-22/ResourceNotFoundException AWS API Documentation
#
class ResourceNotFoundException < Struct.new(
:message)
include Aws::Structure
end
# The configuration details of an Amazon S3 input or output bucket.
#
# @note When making an API call, you may pass S3DataConfig
# data as a hash:
#
# {
# path: "S3Location", # required
# kms_key_arn: "KmsKeyArn",
# }
#
# @!attribute [rw] path
# The file path of the Amazon S3 bucket.
# @return [String]
#
# @!attribute [rw] kms_key_arn
# The Amazon Resource Name (ARN) of the Amazon Key Management Service
# (KMS) key that Amazon Personalize uses to encrypt or decrypt the
# input and output files of a batch inference job.
# @return [String]
#
# @see http://docs.aws.amazon.com/goto/WebAPI/personalize-2018-05-22/S3DataConfig AWS API Documentation
#
class S3DataConfig < Struct.new(
:path,
:kms_key_arn)
include Aws::Structure
end
# An object that provides information about a solution. A solution is a
# trained model that can be deployed as a campaign.
#
# @!attribute [rw] name
# The name of the solution.
# @return [String]
#
# @!attribute [rw] solution_arn
# The ARN of the solution.
# @return [String]
#
# @!attribute [rw] perform_hpo
# Whether to perform hyperparameter optimization (HPO) on the chosen
# recipe. The default is `false`.
# @return [Boolean]
#
# @!attribute [rw] perform_auto_ml
# When true, Amazon Personalize performs a search for the best
# USER\_PERSONALIZATION recipe from the list specified in the solution
# configuration (`recipeArn` must not be specified). When false (the
# default), Amazon Personalize uses `recipeArn` for training.
# @return [Boolean]
#
# @!attribute [rw] recipe_arn
# The ARN of the recipe used to create the solution.
# @return [String]
#
# @!attribute [rw] dataset_group_arn
# The Amazon Resource Name (ARN) of the dataset group that provides
# the training data.
# @return [String]
#
# @!attribute [rw] event_type
# The event type (for example, 'click' or 'like') that is used for
# training the model.
# @return [String]
#
# @!attribute [rw] solution_config
# Describes the configuration properties for the solution.
# @return [Types::SolutionConfig]
#
# @!attribute [rw] auto_ml_result
# When `performAutoML` is true, specifies the best recipe found.
# @return [Types::AutoMLResult]
#
# @!attribute [rw] status
# The status of the solution.
#
# A solution can be in one of the following states:
#
# * CREATE PENDING > CREATE IN\_PROGRESS > ACTIVE -or- CREATE
# FAILED
#
# * DELETE PENDING > DELETE IN\_PROGRESS
# @return [String]
#
# @!attribute [rw] creation_date_time
# The creation date and time (in Unix time) of the solution.
# @return [Time]
#
# @!attribute [rw] last_updated_date_time
# The date and time (in Unix time) that the solution was last updated.
# @return [Time]
#
# @!attribute [rw] latest_solution_version
# Describes the latest version of the solution, including the status
# and the ARN.
# @return [Types::SolutionVersionSummary]
#
# @see http://docs.aws.amazon.com/goto/WebAPI/personalize-2018-05-22/Solution AWS API Documentation
#
class Solution < Struct.new(
:name,
:solution_arn,
:perform_hpo,
:perform_auto_ml,
:recipe_arn,
:dataset_group_arn,
:event_type,
:solution_config,
:auto_ml_result,
:status,
:creation_date_time,
:last_updated_date_time,
:latest_solution_version)
include Aws::Structure
end
# Describes the configuration properties for the solution.
#
# @note When making an API call, you may pass SolutionConfig
# data as a hash:
#
# {
# event_value_threshold: "EventValueThreshold",
# hpo_config: {
# hpo_objective: {
# type: "HPOObjectiveType",
# metric_name: "MetricName",
# metric_regex: "MetricRegex",
# },
# hpo_resource_config: {
# max_number_of_training_jobs: "HPOResource",
# max_parallel_training_jobs: "HPOResource",
# },
# algorithm_hyper_parameter_ranges: {
# integer_hyper_parameter_ranges: [
# {
# name: "ParameterName",
# min_value: 1,
# max_value: 1,
# },
# ],
# continuous_hyper_parameter_ranges: [
# {
# name: "ParameterName",
# min_value: 1.0,
# max_value: 1.0,
# },
# ],
# categorical_hyper_parameter_ranges: [
# {
# name: "ParameterName",
# values: ["CategoricalValue"],
# },
# ],
# },
# },
# algorithm_hyper_parameters: {
# "ParameterName" => "ParameterValue",
# },
# feature_transformation_parameters: {
# "ParameterName" => "ParameterValue",
# },
# auto_ml_config: {
# metric_name: "MetricName",
# recipe_list: ["Arn"],
# },
# }
#
# @!attribute [rw] event_value_threshold
# Only events with a value greater than or equal to this threshold are
# used for training a model.
# @return [String]
#
# @!attribute [rw] hpo_config
# Describes the properties for hyperparameter optimization (HPO).
# @return [Types::HPOConfig]
#
# @!attribute [rw] algorithm_hyper_parameters
# Lists the hyperparameter names and ranges.
# @return [Hash<String,String>]
#
# @!attribute [rw] feature_transformation_parameters
# Lists the feature transformation parameters.
# @return [Hash<String,String>]
#
# @!attribute [rw] auto_ml_config
# The AutoMLConfig object containing a list of recipes to search when
# AutoML is performed.
# @return [Types::AutoMLConfig]
#
# @see http://docs.aws.amazon.com/goto/WebAPI/personalize-2018-05-22/SolutionConfig AWS API Documentation
#
class SolutionConfig < Struct.new(
:event_value_threshold,
:hpo_config,
:algorithm_hyper_parameters,
:feature_transformation_parameters,
:auto_ml_config)
include Aws::Structure
end
# Provides a summary of the properties of a solution. For a complete
# listing, call the DescribeSolution API.
#
# @!attribute [rw] name
# The name of the solution.
# @return [String]
#
# @!attribute [rw] solution_arn
# The Amazon Resource Name (ARN) of the solution.
# @return [String]
#
# @!attribute [rw] status
# The status of the solution.
#
# A solution can be in one of the following states:
#
# * CREATE PENDING > CREATE IN\_PROGRESS > ACTIVE -or- CREATE
# FAILED
#
# * DELETE PENDING > DELETE IN\_PROGRESS
# @return [String]
#
# @!attribute [rw] creation_date_time
# The date and time (in Unix time) that the solution was created.
# @return [Time]
#
# @!attribute [rw] last_updated_date_time
# The date and time (in Unix time) that the solution was last updated.
# @return [Time]
#
# @see http://docs.aws.amazon.com/goto/WebAPI/personalize-2018-05-22/SolutionSummary AWS API Documentation
#
class SolutionSummary < Struct.new(
:name,
:solution_arn,
:status,
:creation_date_time,
:last_updated_date_time)
include Aws::Structure
end
# An object that provides information about a specific version of a
# Solution.
#
# @!attribute [rw] solution_version_arn
# The ARN of the solution version.
# @return [String]
#
# @!attribute [rw] solution_arn
# The ARN of the solution.
# @return [String]
#
# @!attribute [rw] perform_hpo
# Whether to perform hyperparameter optimization (HPO) on the chosen
# recipe. The default is `false`.
# @return [Boolean]
#
# @!attribute [rw] perform_auto_ml
# When true, Amazon Personalize searches for the most optimal recipe
# according to the solution configuration. When false (the default),
# Amazon Personalize uses `recipeArn`.
# @return [Boolean]
#
# @!attribute [rw] recipe_arn
# The ARN of the recipe used in the solution.
# @return [String]
#
# @!attribute [rw] event_type
# The event type (for example, 'click' or 'like') that is used for
# training the model.
# @return [String]
#
# @!attribute [rw] dataset_group_arn
# The Amazon Resource Name (ARN) of the dataset group providing the
# training data.
# @return [String]
#
# @!attribute [rw] solution_config
# Describes the configuration properties for the solution.
# @return [Types::SolutionConfig]
#
# @!attribute [rw] training_hours
# The time used to train the model. You are billed for the time it
# takes to train a model. This field is visible only after Amazon
# Personalize successfully trains a model.
# @return [Float]
#
# @!attribute [rw] training_mode
# The scope of training used to create the solution version. The
# `FULL` option trains the solution version based on the entirety of
# the input solution's training data, while the `UPDATE` option
# processes only the training data that has changed since the creation
# of the last solution version. Choose `UPDATE` when you want to start
# recommending items added to the dataset without retraining the
# model.
#
# The `UPDATE` option can only be used after you've created a
# solution version with the `FULL` option and the training solution
# uses the native-recipe-hrnn-coldstart.
# @return [String]
#
# @!attribute [rw] tuned_hpo_params
# If hyperparameter optimization was performed, contains the
# hyperparameter values of the best performing model.
# @return [Types::TunedHPOParams]
#
# @!attribute [rw] status
# The status of the solution version.
#
# A solution version can be in one of the following states:
#
# * CREATE PENDING
#
# * CREATE IN\_PROGRESS
#
# * ACTIVE
#
# * CREATE FAILED
# @return [String]
#
# @!attribute [rw] failure_reason
# If training a solution version fails, the reason for the failure.
# @return [String]
#
# @!attribute [rw] creation_date_time
# The date and time (in Unix time) that this version of the solution
# was created.
# @return [Time]
#
# @!attribute [rw] last_updated_date_time
# The date and time (in Unix time) that the solution was last updated.
# @return [Time]
#
# @see http://docs.aws.amazon.com/goto/WebAPI/personalize-2018-05-22/SolutionVersion AWS API Documentation
#
class SolutionVersion < Struct.new(
:solution_version_arn,
:solution_arn,
:perform_hpo,
:perform_auto_ml,
:recipe_arn,
:event_type,
:dataset_group_arn,
:solution_config,
:training_hours,
:training_mode,
:tuned_hpo_params,
:status,
:failure_reason,
:creation_date_time,
:last_updated_date_time)
include Aws::Structure
end
# Provides a summary of the properties of a solution version. For a
# complete listing, call the DescribeSolutionVersion API.
#
# @!attribute [rw] solution_version_arn
# The Amazon Resource Name (ARN) of the solution version.
# @return [String]
#
# @!attribute [rw] status
# The status of the solution version.
#
# A solution version can be in one of the following states:
#
# * CREATE PENDING > CREATE IN\_PROGRESS > ACTIVE -or- CREATE
# FAILED
#
# ^
# @return [String]
#
# @!attribute [rw] creation_date_time
# The date and time (in Unix time) that this version of a solution was
# created.
# @return [Time]
#
# @!attribute [rw] last_updated_date_time
# The date and time (in Unix time) that the solution version was last
# updated.
# @return [Time]
#
# @!attribute [rw] failure_reason
# If a solution version fails, the reason behind the failure.
# @return [String]
#
# @see http://docs.aws.amazon.com/goto/WebAPI/personalize-2018-05-22/SolutionVersionSummary AWS API Documentation
#
class SolutionVersionSummary < Struct.new(
:solution_version_arn,
:status,
:creation_date_time,
:last_updated_date_time,
:failure_reason)
include Aws::Structure
end
# If hyperparameter optimization (HPO) was performed, contains the
# hyperparameter values of the best performing model.
#
# @!attribute [rw] algorithm_hyper_parameters
# A list of the hyperparameter values of the best performing model.
# @return [Hash<String,String>]
#
# @see http://docs.aws.amazon.com/goto/WebAPI/personalize-2018-05-22/TunedHPOParams AWS API Documentation
#
class TunedHPOParams < Struct.new(
:algorithm_hyper_parameters)
include Aws::Structure
end
# @note When making an API call, you may pass UpdateCampaignRequest
# data as a hash:
#
# {
# campaign_arn: "Arn", # required
# solution_version_arn: "Arn",
# min_provisioned_tps: 1,
# }
#
# @!attribute [rw] campaign_arn
# The Amazon Resource Name (ARN) of the campaign.
# @return [String]
#
# @!attribute [rw] solution_version_arn
# The ARN of a new solution version to deploy.
# @return [String]
#
# @!attribute [rw] min_provisioned_tps
# Specifies the requested minimum provisioned transactions
# (recommendations) per second that Amazon Personalize will support.
# @return [Integer]
#
# @see http://docs.aws.amazon.com/goto/WebAPI/personalize-2018-05-22/UpdateCampaignRequest AWS API Documentation
#
class UpdateCampaignRequest < Struct.new(
:campaign_arn,
:solution_version_arn,
:min_provisioned_tps)
include Aws::Structure
end
# @!attribute [rw] campaign_arn
# The same campaign ARN as given in the request.
# @return [String]
#
# @see http://docs.aws.amazon.com/goto/WebAPI/personalize-2018-05-22/UpdateCampaignResponse AWS API Documentation
#
class UpdateCampaignResponse < Struct.new(
:campaign_arn)
include Aws::Structure
end
end
end
| 31.895385 | 132 | 0.615706 |
08772db1ee758619bf5bc90a1fbf353e7e5d60a0
| 1,424 |
# frozen_string_literal: true
module EvilEvents::Core::Events::EventExtensions
# @api private
# @since 0.1.0
module Payloadable
class << self
# @param base_class [Class]
#
# @since 0.1.0
def included(base_class)
base_class.extend(ClassMethods)
end
end
private
# @return [Class{AbstractPayload}]
#
# @since 0.1.0
def build_payload(**payload_attributes)
self.class.payload_class.new(**payload_attributes)
end
# @since 0.1.0
module ClassMethods
# @param child_class [Class]
#
# @since 0.1.0
def inherited(child_class)
child_class.const_set(:Payload, Class.new(AbstractPayload))
super
end
# @return [Class{AbstractPayload}]
#
# @since 0.2.0
def payload_class
const_get(:Payload)
end
# @param key [Symbol]
# @param type [EvilEvents::Shared::Types::Any]
# @param options [Hash]
# @return void
#
# @since 0.1.0
def payload(key, type = EvilEvents::Types::Any, **options)
if type.is_a?(Symbol)
type = EvilEvents::Core::Bootstrap[:event_system].resolve_type(type, **options)
end
payload_class.attribute(key, type)
end
# @return [Array<Symbol>]
#
# @since 0.1.0
def payload_fields
payload_class.attribute_names
end
end
end
end
| 21.907692 | 89 | 0.587781 |
d597dc87bcebb450ad6211603d19a9923f09d186
| 11,740 |
require "application_system_test_case"
class InvitationDetailsTest < ApplicationSystemTestCase
def within_membership_row(membership)
within "tr[data-id='#{membership.id}']" do
yield
end
end
def within_current_memberships_table
within "tbody[data-model='Membership'][data-scope='current']" do
yield
end
end
def within_former_memberships_table
within "tbody[data-model='Membership'][data-scope='tombstones']" do
yield
end
end
@@test_devices.each do |device_name, display_details|
test "visitors can sign-up and manage team members with subscriptions #{subscriptions_enabled? ? "enabled" : "disabled"} on a #{device_name}" do
resize_for(display_details)
be_invited_to_sign_up
visit root_path
sign_up_from_homepage_for(display_details)
# try non-matching passwords.
fill_in "Your Email Address", with: "[email protected]"
fill_in "Set Password", with: example_password
fill_in "Confirm Password", with: example_password
click_on "Sign Up"
complete_pricing_page if subscriptions_enabled?
# we should now be on an onboarding step.
assert page.has_content?("Tell us about you")
fill_in "First Name", with: "Hanako"
fill_in "Last Name", with: "Tanaka"
fill_in "Your Team Name", with: "The Testing Team"
click_on "Next"
assert page.has_content?("The Testing Team’s Dashboard")
within_team_menu_for(display_details) do
click_on "Team Members"
end
first_membership = Membership.order(:id).last
assert page.has_content?("The Testing Team Team Members")
# Paths that begin with "/account/" are whitelisted when accessing
# invitation#new while passing a cancel_path to the params.
hanakos_team = Team.first
path_for_new_invitation = /invitations\/new/
path_with_cancel_path_params = /invitations\/new\?cancel_path=/
visit new_account_team_invitation_path(hanakos_team, cancel_path: account_team_memberships_path(hanakos_team))
assert page.current_url.match?(path_with_cancel_path_params)
# Make sure we cannot embed JavaScript when accessing the new invitation path.
js_str = "javascript:alert('Testing')"
visit new_account_team_invitation_path(hanakos_team, cancel_path: js_str)
assert page.current_path.match?(path_for_new_invitation)
assert !page.current_path.match?(js_str)
assert !page.current_path.match?(path_with_cancel_path_params)
# Paths that don't start with /account/ are not accepted either.
faulty_link = "/memberships/"
assert page.current_path.match?(path_for_new_invitation)
visit new_account_team_invitation_path(hanakos_team, cancel_path: faulty_link)
assert !page.current_path.match?(path_with_cancel_path_params)
perform_enqueued_jobs do
clear_emails
fill_in "Email Address", with: "[email protected]"
fill_in "First Name", with: "Takashi"
fill_in "Last Name", with: "Yamaguchi"
find("label", text: "Invite as Team Administrator").click
click_on "Create Invitation"
assert page.has_content?("Invitation was successfully created.")
end
# we need the id of the membership that's created so we can address it's row in the table specifically.
invited_membership = Membership.order(:id).last
invited_membership.invitation
within_current_memberships_table do
assert page.has_content?("Takashi Yamaguchi")
within_membership_row(invited_membership) do
assert page.has_content?("Invited")
assert page.has_content?("Team Administrator")
click_on "Details"
end
end
# TODO we should first test that a canceled invitation can't be claimed.
assert page.has_content?("Invitation Details")
accept_alert { click_on "Remove from Team" }
assert page.has_content?("That user has been successfully removed from the team.")
# click the link in the email.
# yes, this is a totally valid thing to do if you have access to the invitation email.
# practically speaking this would almost never happen, but it's a legitimate way to test this functionality without
# introducing more time-consuming steps.
open_email "[email protected]"
current_email.click_link "Join The Testing Team"
# if we're back on the team's dashboard, then we're *not* on the accept invitation page, which means the
# invitation wasn't claimable.
assert page.has_content?("The Testing Team’s Dashboard")
within_team_menu_for(display_details) do
click_on "Team Members"
end
assert page.has_content?("The Testing Team Team Members")
perform_enqueued_jobs do
clear_emails
within_former_memberships_table do
assert page.has_content?("Takashi Yamaguchi")
within_membership_row(invited_membership) do
assert page.has_no_content?("Invited")
assert page.has_content?("Team Administrator")
end
end
accept_alert { click_on "Re-Invite to Team" }
assert page.has_content?("The user has been successfully re-invited. They will receive an email to rejoin the team.")
end
# sign out.
sign_out_for(display_details)
# click the link in the email.
open_email "[email protected]"
current_email.click_link "Join The Testing Team"
assert page.has_content?("Create Your Account")
# this email address is purposefully different than the one they were invited via.
fill_in "Your Email Address", with: "[email protected]"
fill_in "Set Password", with: another_example_password
fill_in "Confirm Password", with: another_example_password
click_on "Sign Up"
# this first name is purposefully different than the name they were invited with.
# assert page.has_content?('Create Your Account')
assert page.has_content?("Tell us about you")
fill_in "First Name", with: "Taka"
fill_in "Last Name", with: "Yamaguchi"
click_on "Next"
assert page.has_content?("The Testing Team’s Dashboard")
within_team_menu_for(display_details) do
click_on "Team Members"
end
assert page.has_content?("Hanako Tanaka")
last_membership = Membership.order(:id).last
within_current_memberships_table do
assert page.has_content?("Taka Yamaguchi")
within_membership_row(last_membership) do
assert page.has_no_content?("Invited")
assert page.has_content?("Team Administrator")
click_on "Details"
end
end
assert page.has_content?("Taka Yamaguchi’s Membership on The Testing Team")
within_team_menu_for(display_details) do
click_on "Add New Team"
end
if invitation_only?
assert page.has_content?("Creating new teams is currently limited")
# this will take them to the create new team page.
be_invited_to_sign_up
end
assert page.has_content?("Create a New Team")
fill_in "Team Name", with: "Another Team"
click_on "Create Team"
if subscriptions_enabled?
complete_pricing_page
# TODO this feels like a bug. after the subscription creation, we should go to the dashboard.
assert page.has_content?("Your Teams")
click_on "Another Team"
end
assert page.has_content?("Another Team’s Dashboard")
within_team_menu_for(display_details) do
click_on "Team Members"
end
assert page.has_content?("Another Team Team Members")
click_on "Invite a New Team Member"
assert page.has_content?("New Invitation Details")
perform_enqueued_jobs do
clear_emails
# this is specifically a different email address than the one they signed up with originally.
fill_in "Email Address", with: "[email protected]"
click_on "Create Invitation"
assert page.has_content?("Invitation was successfully created.")
end
# sign out.
sign_out_for(display_details)
# we need the id of the membership that's created so we can address it's row in the table specifically.
invited_membership = Membership.order(:id).last
# # click the link in the email.
open_email "[email protected]"
current_email.click_link "Join Another Team"
assert page.has_content?("Create Your Account")
click_link "Already have an account?"
assert page.has_content?("Sign In")
fill_in "Your Email Address", with: "[email protected]"
click_on "Next" if two_factor_authentication_enabled?
fill_in "Your Password", with: example_password
click_on "Sign In"
assert page.has_content?("Join Another Team")
assert page.has_content?("Taka Yamaguchi has invited you to join Another Team")
assert page.has_content?("This invitation was emailed to [email protected]")
assert page.has_content?("but you're currently signed in as [email protected]")
click_on "Join Another Team"
assert page.has_content?("Welcome to Another Team!")
within_team_menu_for(display_details) do
click_on "Team Members"
end
last_membership = Membership.order(:id).last
within_current_memberships_table do
assert page.has_content?("Hanako Tanaka")
within_membership_row(last_membership) do
assert page.has_no_content?("Invited")
assert page.has_content?("Viewer")
click_on "Details"
end
end
accept_alert { click_on "Leave This Team" }
assert page.has_content?("You've successfully removed yourself from Another Team.")
assert page.has_content?("The Testing Team’s Dashboard")
within_team_menu_for(display_details) do
click_on "Team Members"
end
assert page.has_content?("The Testing Team Team Members")
within_current_memberships_table do
assert page.has_content?("Hanako Tanaka")
within_membership_row(first_membership) do
assert page.has_content?("Team Administrator")
click_on "Details"
end
end
assert page.has_content?("Hanako Tanaka’s Membership on The Testing Team")
accept_alert { click_on "Demote from Admin" }
assert page.has_content?("The Testing Team Team Members")
within_current_memberships_table do
assert page.has_content?("Hanako Tanaka")
within_membership_row(first_membership) do
assert page.has_no_content?("Team Administrator")
click_on "Details"
end
end
assert page.has_content?("Hanako Tanaka’s Membership on The Testing Team")
# since the user is no longer an admin, they shouldn't see either of these options anymore.
assert page.has_content?("Viewer")
assert page.has_no_content?("Promote to Admin")
assert page.has_no_content?("Demote from Admin")
accept_alert { click_on "Leave This Team" }
# if this is happening, it shouldn't be.
assert page.has_no_content?("You are not authorized to access this page.")
assert page.has_content?("You've successfully removed yourself from The Testing Team.")
assert page.has_content?("Join a Team")
assert page.has_content?("The account [email protected] is not currently a member of any teams.")
assert page.has_content?("Accept an invitation")
assert page.has_content?("Log out of this account")
assert page.has_content?("Create a new team")
click_on "Logout"
end
end
end
| 37.151899 | 148 | 0.694037 |
28a7a77cc7d7b71c95b95c424c12984941637eab
| 3,307 |
class Fluent::GangliaOutput < Fluent::Output
Fluent::Plugin.register_output('ganglia', self)
HOSTNAME = Socket.gethostname
HOSTADDR = IPSocket.getaddress(HOSTNAME)
## Define 'log' method to support log method for v0.10.42 or earlier
unless method_defined?(:log)
define_method("log") { $log }
end
def initialize
super
require "gmetric"
require "socket"
end
config_param :port, :integer, :default => 8649
config_param :host, :string, :default => '127.0.0.1'
config_param :name_keys, :string, :default => nil
config_param :name_key_pattern, :string, :default => nil
config_param :add_key_prefix, :string, :default => nil
config_param :value_type, :string, :default => 'uint32'
config_param :units, :string, :default => ''
config_param :group, :string, :default => ''
config_param :title, :string, :default => ''
config_param :tmax, :integer, :default => 60
config_param :dmax, :integer, :default => 0
config_param :slope, :string, :default => 'both'
config_param :spoof, :string, :default => nil
config_param :bind_hostname, :bool, :default => false
def configure(conf)
super
if @name_keys.nil? and @name_key_pattern.nil?
raise Fluent::ConfigError, "missing both of name_keys and name_key_pattern"
end
if not @name_keys.nil? and not @name_key_pattern.nil?
raise Fluent::ConfigError, "cannot specify both of name_keys and name_key_pattern"
end
if @name_keys
@name_keys = @name_keys.split(/ *, */)
end
if @name_key_pattern
@name_key_pattern = Regexp.new(@name_key_pattern)
end
end
def start
super
end
def shutdown
super
end
def send(tag, name, value, time)
if @add_key_prefix
name = "#{@add_key_prefix} #{name}"
end
begin
log.debug("ganglia: #{name}: #{value}, ts: #{time}")
gmetric = Ganglia::GMetric.pack(
:name => name,
:value => value.to_s,
:type => @value_type,
:units => @units,
:tmax => @tmax,
:dmax => @dmax,
:title => @title,
:group => @group,
:slope => @slope,
:spoof => @spoof ? 1 : 0,
:hostname => @spoof ? @spoof : HOSTNAME,
)
conn = UDPSocket.new
conn.bind(HOSTADDR, 0) if @bind_hostname
conn.send gmetric[0], 0, @host, @port
conn.send gmetric[1], 0, @host, @port
conn.close
status = true
rescue IOError, EOFError, SystemCallError
log.warn "Ganglia::GMetric.send raises exception: #{$!.class}, '#{$!.message}'"
end
unless status
log.warn "failed to send to ganglia: #{@host}:#{@port}, '#{name}': #{value}"
end
end
def emit(tag, es, chain)
if @name_keys
es.each {|time,record|
@name_keys.each {|name|
if record[name]
send(tag, name, record[name], time)
end
}
}
else # for name_key_pattern
es.each {|time,record|
record.keys.each {|key|
if @name_key_pattern.match(key) and record[key]
send(tag, key, record[key], time)
end
}
}
end
chain.next
end
end
| 29.526786 | 88 | 0.57726 |
8734a5ba8eed578e56a51a892dc5dc6486c7f14d
| 6,033 |
require 'simple_deploy/stack/deployment'
require 'simple_deploy/stack/execute'
require 'simple_deploy/stack/output_mapper'
require 'simple_deploy/stack/stack_attribute_formatter'
require 'simple_deploy/stack/stack_creator'
require 'simple_deploy/stack/stack_destroyer'
require 'simple_deploy/stack/stack_formatter'
require 'simple_deploy/stack/stack_lister'
require 'simple_deploy/stack/stack_reader'
require 'simple_deploy/stack/stack_updater'
require 'simple_deploy/stack/status'
module SimpleDeploy
class Stack
def initialize(args)
@environment = args[:environment]
@name = args[:name]
@config = SimpleDeploy.config
@logger = SimpleDeploy.logger
@use_internal_ips = !!args[:internal]
@use_external_ips = !!args[:external]
@entry = Entry.new :name => @name
end
def create(args)
attributes = stack_attribute_formatter.updated_attributes args[:attributes]
@template_file = args[:template]
@entry.set_attributes attributes
stack_creator.create
@entry.save
end
def update(args)
if !deployment.clear_for_deployment? && args[:force]
deployment.clear_deployment_lock true
Backoff.exp_periods do |p|
sleep p
break if deployment.clear_for_deployment?
end
end
if deployment.clear_for_deployment?
@logger.info "Updating #{@name}."
attributes = stack_attribute_formatter.updated_attributes args[:attributes]
@template_body = args[:template_body] || template
@entry.set_attributes attributes
stack_updater.update_stack attributes
@logger.info "Update complete for #{@name}."
@entry.save
true
else
@logger.info "Not clear to update."
false
end
end
def in_progress_update(args)
if args[:caller].kind_of? Stack::Deployment::Status
@logger.info "Updating #{@name}."
attributes = stack_attribute_formatter.updated_attributes args[:attributes]
@template_body = args[:template_body] || template
@entry.set_attributes attributes
stack_updater.update_stack attributes
@logger.info "Update complete for #{@name}."
@entry.save
true
else
false
end
end
def deploy(force = false)
deployment.execute force
end
def execute(args)
executer.execute args
end
def ssh
deployment.ssh
end
def destroy
unless exists?
@logger.error "#{@name} does not exist"
return false
end
if attributes['protection'] != 'on'
stack_destroyer.destroy
@entry.delete_attributes
@logger.info "#{@name} destroyed."
true
else
@logger.warn "#{@name} could not be destroyed because it is protected. Run the protect subcommand to unprotect it"
false
end
end
def events(limit)
stack_reader.events limit
end
def outputs
stack_reader.outputs
end
def resources
stack_reader.resources
end
def instances
stack_reader.instances.map do |instance|
instance['instancesSet'].map do |info|
determine_ip_address(info)
end
end.flatten.compact
end
def raw_instances
stack_reader.instances
end
def status
stack_reader.status
end
def wait_for_stable
stack_status.wait_for_stable
end
def exists?
status
true
rescue Exceptions::UnknownStack
false
end
def attributes
stack_reader.attributes
end
def parameters
stack_reader.parameters
end
def template
stack_reader.template
end
private
def stack_creator
@stack_creator ||= StackCreator.new :name => @name,
:entry => @entry,
:template_file => @template_file
end
def stack_updater
@stack_updater ||= StackUpdater.new :name => @name,
:entry => @entry,
:template_body => @template_body
end
def stack_reader
@stack_reader ||= StackReader.new :name => @name
end
def stack_destroyer
@stack_destroyer ||= StackDestroyer.new :name => @name
end
def stack_status
@status ||= Status.new :name => @name
end
def stack_attribute_formatter
@saf ||= StackAttributeFormatter.new :main_attributes => attributes
end
def executer
@executer ||= Stack::Execute.new :environment => @environment,
:name => @name,
:stack => self,
:instances => instances,
:ssh_user => ssh_user,
:ssh_key => ssh_key
end
def deployment
@deployment ||= Stack::Deployment.new :environment => @environment,
:name => @name,
:stack => self,
:instances => instances,
:ssh_user => ssh_user,
:ssh_key => ssh_key
end
def determine_ip_address(info)
if info['vpcId']
address = @use_external_ips ? info['ipAddress'] : info['privateIpAddress']
unless address
@logger.warn "Instance '#{info['instanceId']}' does not have an external address, skipping."
end
address
else
@use_internal_ips ? info['privateIpAddress'] : info['ipAddress']
end
end
def ssh_key
ENV['SIMPLE_DEPLOY_SSH_KEY'] ||= "#{ENV['HOME']}/.ssh/id_rsa"
end
def ssh_user
ENV['SIMPLE_DEPLOY_SSH_USER'] ||= ENV['USER']
end
end
end
| 26.00431 | 122 | 0.577325 |
79ba5fd6840763b1bfd592808b608768e3d194ab
| 492 |
class CreateVideos < ActiveRecord::Migration
def self.up
create_table :videos, :force => true do |t|
t.integer :post_id
t.string :caption
t.string :url, :limit => 3000
t.string :vimeo_id
t.timestamps
t.integer :display_order, :default => 0
t.string :embed, :limit => 3000
t.integer :width
t.integer :height
t.string :thumb_url
end
add_index :videos, :post_id
end
def self.down
drop_table :videos
end
end
| 19.68 | 47 | 0.619919 |
623686b4f9b3fe27a54bb5ffcef4b31d78fe5fe3
| 1,229 |
#
# Cookbook:: build_cookbook
# Recipe:: quality
#
# The MIT License (MIT)
#
# Copyright:: 2018, Jesse
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
include_recipe 'delivery-truck::quality'
| 43.892857 | 80 | 0.755899 |
f7a4bde0fb57d701ce9c283e5fb20ceba87bd35a
| 272 |
class HomeController < ApplicationController
def index
end
def sign_in
session[:user_id] = if Rails.env.development?
User.first || User.create!
else
User.first
end
redirect_to '/'
end
def callback
render :text => "ok"
end
end
| 15.111111 | 49 | 0.639706 |
4aeff817a85cb4f5a3c7ef081b837e2bb31b324b
| 39 |
module BackOps
VERSION = '1.0.0'
end
| 9.75 | 19 | 0.666667 |
03487c9a1cc7c7dadbc178855ddbd7770518fab4
| 1,109 |
require_domain_file
describe ManageIQ::Automate::Infrastructure::VM::Retirement::StateMachines::Methods::FinishRetirement do
let(:svc_vm) { MiqAeMethodService::MiqAeServiceVm.find(vm.id) }
let(:ems) { FactoryBot.create(:ems_vmware) }
let(:vm) { FactoryBot.create(:vm_vmware, :ems_id => ems.id) }
let(:root_object) { Spec::Support::MiqAeMockObject.new(root_hash) }
let(:root_hash) { { 'vm' => svc_vm } }
let(:ae_service) do
Spec::Support::MiqAeMockService.new(root_object).tap do |service|
current_object = Spec::Support::MiqAeMockObject.new
current_object.parent = root_object
service.object = current_object
end
end
it "retires vm" do
expect(svc_vm).to receive(:finish_retirement)
expect(ae_service).to receive(:create_notification).with(:type => :vm_retired, :subject => svc_vm)
described_class.new(ae_service).main
end
describe "doesn't retire vm" do
let(:root_hash) {}
it 'vm is nil' do
expect(ae_service).not_to receive(:create_notification)
described_class.new(ae_service).main
end
end
end
| 33.606061 | 104 | 0.697024 |
612238a5986ca82f378dee2bf1a1b805e416e788
| 129 |
require "currency_converter/version"
module CurrencyConverter
class Error < StandardError; end
# Your code goes here...
end
| 18.428571 | 36 | 0.775194 |
0167891e1be53a6071e2856854111483964d1b3e
| 872 |
Pod::Spec.new do |s|
s.name = 'ChaosSwiftUI'
s.version = '1.0.0'
s.summary = 'Collection of all Chaos iOS Frameworks'
s.description = <<-DESC
Collection of all Chaos iOS Frameworks. Blablbalba
DESC
s.homepage = 'https://github.com/chaosarts/Chaos'
s.license = { :type => 'Apache License', :file => 'LICENSE' }
s.author = { 'Fu Lam Diep' => '[email protected]' }
s.source = {
:git => 'https://github.com/chaosarts/Chaos.git',
:tag => s.version.to_s
}
s.ios.deployment_target = '14.0'
s.osx.deployment_target = '11.0'
s.watchos.deployment_target = '6.0'
s.source_files = 'ChaosSwiftUI/Classes/**/*'
s.frameworks = 'Foundation'
s.dependency 'ChaosCore'
s.dependency 'ChaosMath'
s.dependency 'ChaosGraphics'
end
| 32.296296 | 72 | 0.579128 |
b9dae5c1aa8591773b3ab25f06755b20f5f7de77
| 11,507 |
# Use this hook to configure devise mailer, warden hooks and so forth.
# Many of these configuration options can be set straight in your model.
Devise.setup do |config|
# ==> Mailer Configuration
# Configure the e-mail address which will be shown in Devise::Mailer,
# note that it will be overwritten if you use your own mailer class with default "from" parameter.
config.mailer_sender = SETTINGS.auth_email_from
# Configure the class responsible to send e-mails.
# config.mailer = "Devise::Mailer"
# ==> ORM configuration
# Load and configure the ORM. Supports :active_record (default) and
# :mongoid (bson_ext recommended) by default. Other ORMs may be
# available as additional gems.
require 'devise/orm/active_record'
# ==> Configuration for any authentication mechanism
# Configure which keys are used when authenticating a user. The default is
# just :email. You can configure it to use [:username, :subdomain], so for
# authenticating a user, both parameters are required. Remember that those
# parameters are used only when authenticating and not when retrieving from
# session. If you need permissions, you should implement that in a before filter.
# You can also supply a hash where the value is a boolean determining whether
# or not authentication should be aborted when the value is not present.
# config.authentication_keys = [ :email ]
# Configure parameters from the request object used for authentication. Each entry
# given should be a request method and it will automatically be passed to the
# find_for_authentication method and considered in your model lookup. For instance,
# if you set :request_keys to [:subdomain], :subdomain will be used on authentication.
# The same considerations mentioned for authentication_keys also apply to request_keys.
# config.request_keys = []
# Configure which authentication keys should be case-insensitive.
# These keys will be downcased upon creating or modifying a user and when used
# to authenticate or find a user. Default is :email.
config.case_insensitive_keys = [ :email ]
# Configure which authentication keys should have whitespace stripped.
# These keys will have whitespace before and after removed upon creating or
# modifying a user and when used to authenticate or find a user. Default is :email.
config.strip_whitespace_keys = [ :email ]
# Tell if authentication through request.params is enabled. True by default.
# It can be set to an array that will enable params authentication only for the
# given strategies, for example, `config.params_authenticatable = [:database]` will
# enable it only for database (email + password) authentication.
# config.params_authenticatable = true
# Tell if authentication through HTTP Basic Auth is enabled. False by default.
# It can be set to an array that will enable http authentication only for the
# given strategies, for example, `config.http_authenticatable = [:token]` will
# enable it only for token authentication.
# config.http_authenticatable = false
# If http headers should be returned for AJAX requests. True by default.
# config.http_authenticatable_on_xhr = true
# The realm used in Http Basic Authentication. "Application" by default.
# config.http_authentication_realm = "Application"
# It will change confirmation, password recovery and other workflows
# to behave the same regardless if the e-mail provided was right or wrong.
# Does not affect registerable.
# config.paranoid = true
# By default Devise will store the user in session. You can skip storage for
# :http_auth and :token_auth by adding those symbols to the array below.
# Notice that if you are skipping storage for all authentication paths, you
# may want to disable generating routes to Devise's sessions controller by
# passing :skip => :sessions to `devise_for` in your config/routes.rb
config.skip_session_storage = [:http_auth, :token_auth]
# ==> Configuration for :database_authenticatable
# For bcrypt, this is the cost for hashing the password and defaults to 10. If
# using other encryptors, it sets how many times you want the password re-encrypted.
#
# Limiting the stretches to just one in testing will increase the performance of
# your test suite dramatically. However, it is STRONGLY RECOMMENDED to not use
# a value less than 10 in other environments.
config.stretches = Rails.env.test? ? 1 : 10
# Setup a pepper to generate the encrypted password.
# config.pepper = "b2955cdf7b99391f88714e67fca083ad34dbaa04b7ddc7c0848c1e105957d34239334a1321f46c15728ccab36b45001d5266b3157d2b17a71bf26283fe2c9412"
# ==> Configuration for :confirmable
# A period that the user is allowed to access the website even without
# confirming his account. For instance, if set to 2.days, the user will be
# able to access the website for two days without confirming his account,
# access will be blocked just in the third day. Default is 0.days, meaning
# the user cannot access the website without confirming his account.
# config.allow_unconfirmed_access_for = 2.days
# If true, requires any email changes to be confirmed (exactly the same way as
# initial account confirmation) to be applied. Requires additional unconfirmed_email
# db field (see migrations). Until confirmed new email is stored in
# unconfirmed email column, and copied to email column on successful confirmation.
config.reconfirmable = true
# Defines which key will be used when confirming an account
# config.confirmation_keys = [ :email ]
# ==> Configuration for :rememberable
# The time the user will be remembered without asking for credentials again.
# config.remember_for = 2.weeks
# If true, extends the user's remember period when remembered via cookie.
# config.extend_remember_period = false
# Options to be passed to the created cookie. For instance, you can set
# :secure => true in order to force SSL only cookies.
# config.rememberable_options = {}
# ==> Configuration for :validatable
# Range for password length. Default is 6..128.
# config.password_length = 6..128
# Email regex used to validate email formats. It simply asserts that
# an one (and only one) @ exists in the given string. This is mainly
# to give user feedback and not to assert the e-mail validity.
# config.email_regexp = /\A[^@]+@[^@]+\z/
# ==> Configuration for :timeoutable
# The time you want to timeout the user session without activity. After this
# time the user will be asked for credentials again. Default is 30 minutes.
# config.timeout_in = 30.minutes
# If true, expires auth token on session timeout.
# config.expire_auth_token_on_timeout = false
# ==> Configuration for :lockable
# Defines which strategy will be used to lock an account.
# :failed_attempts = Locks an account after a number of failed attempts to sign in.
# :none = No lock strategy. You should handle locking by yourself.
config.lock_strategy = :failed_attempts
# Defines which key will be used when locking and unlocking an account
# config.unlock_keys = [ :email ]
# Defines which strategy will be used to unlock an account.
# :email = Sends an unlock link to the user email
# :time = Re-enables login after a certain amount of time (see :unlock_in below)
# :both = Enables both strategies
# :none = No unlock strategy. You should handle unlocking by yourself.
# config.unlock_strategy = :both
# Number of authentication tries before locking an account if lock_strategy
# is failed attempts.
# config.maximum_attempts = 20
# Time interval to unlock the account if :time is enabled as unlock_strategy.
# config.unlock_in = 1.hour
# ==> Configuration for :recoverable
#
# Defines which key will be used when recovering the password for an account
# config.reset_password_keys = [ :email ]
# Time interval you can reset your password with a reset password key.
# Don't put a too small interval or your users won't have the time to
# change their passwords.
config.reset_password_within = 2.hours
# ==> Configuration for :encryptable
# Allow you to use another encryption algorithm besides bcrypt (default). You can use
# :sha1, :sha512 or encryptors from others authentication tools as :clearance_sha1,
# :authlogic_sha512 (then you should set stretches above to 20 for default behavior)
# and :restful_authentication_sha1 (then you should set stretches to 10, and copy
# REST_AUTH_SITE_KEY to pepper)
# config.encryptor = :sha512
# ==> Configuration for :token_authenticatable
# Defines name of the authentication token params key
# config.token_authentication_key = :auth_token
# ==> Scopes configuration
# Turn scoped views on. Before rendering "sessions/new", it will first check for
# "users/sessions/new". It's turned off by default because it's slower if you
# are using only default views.
# config.scoped_views = false
# Configure the default scope given to Warden. By default it's the first
# devise role declared in your routes (usually :user).
# config.default_scope = :user
# Set this configuration to false if you want /users/sign_out to sign out
# only the current scope. By default, Devise signs out all scopes.
# config.sign_out_all_scopes = true
# ==> Navigation configuration
# Lists the formats that should be treated as navigational. Formats like
# :html, should redirect to the sign in page when the user does not have
# access, but formats like :xml or :json, should return 401.
#
# If you have any extra navigational formats, like :iphone or :mobile, you
# should add them to the navigational formats lists.
#
# The "*/*" below is required to match Internet Explorer requests.
# config.navigational_formats = ["*/*", :html]
# The default HTTP method used to sign out a resource. Default is :delete.
config.sign_out_via = :delete
# ==> OmniAuth
# Add a new OmniAuth provider. Check the wiki for more information on setting
# up on your models and hooks.
# config.omniauth :github, 'APP_ID', 'APP_SECRET', :scope => 'user,public_repo'
# ==> Warden configuration
# If you want to use other strategies, that are not supported by Devise, or
# change the failure app, you can configure them inside the config.warden block.
#
# config.warden do |manager|
# manager.intercept_401 = false
# manager.default_strategies(:scope => :user).unshift :some_external_strategy
# end
# ==> Mountable engine configurations
# When using Devise inside an engine, let's call it `MyEngine`, and this engine
# is mountable, there are some extra configurations to be taken into account.
# The following options are available, assuming the engine is mounted as:
#
# mount MyEngine, at: "/my_engine"
#
# The router that invoked `devise_for`, in the example above, would be:
# config.router_name = :my_engine
#
# When using omniauth, Devise cannot automatically set Omniauth path,
# so you need to do it manually. For the users scope, it would be:
# config.omniauth_path_prefix = "/my_engine/users/auth"
if SETTINGS.auth_providers
SETTINGS.auth_providers.each_pair do |provider, args|
if args.is_a? Array
# If args is an array, splat it.
config.omniauth provider.to_sym, *args
else
# For everything else (Hash, etc.) pass it through
config.omniauth provider.to_sym, args
end
end
end
end
| 46.967347 | 150 | 0.743026 |
5df5445d5d9d3e062eb77320f417bdb25bccb8c5
| 3,947 |
require 'log4r'
require 'logging-helper'
# Enable logging if it is requested. We do this before
# anything else so that we can setup the output before
# any logging occurs.
if ENV['VAGRANT_LOG'] && ENV['VAGRANT_LOG'] != ""
# Require Log4r and define the levels we'll be using
require 'log4r/config'
Log4r.define_levels(*Log4r::Log4rConfig::LogLevels)
level = nil
begin
level = Log4r.const_get(ENV['VAGRANT_LOG'].upcase)
rescue NameError
# This means that the logging constant wasn't found,
# which is fine. We just keep `level` as `nil`. But
# we tell the user.
level = nil
end
# Some constants, such as 'true' resolve to booleans, so the
# above error checking doesn't catch it. This will check to make
# sure that the log level is an integer, as Log4r requires.
level = nil if !level.is_a?(Integer)
if !level
# We directly write to stderr here because the VagrantError system
# is not setup yet.
$stderr.puts "Invalid VAGRANT_LOG level is set: #{ENV['VAGRANT_LOG']}"
$stderr.puts ""
$stderr.puts "Please use one of the standard log levels: debug, info, warn, or error"
exit 1
end
logger = Log4r::Logger.new('vagrant')
LoggingHelper::Log4r.add_outputters(logger)
logger.level = level
Log4r::Logger.root.level = level
end
original_log_level = ENV['VAGRANT_LOG']
ENV['VAGRANT_LOG'] = nil
require 'vagrant'
module Vagrant
def self.plugins_enabled?
!ENV["VAGRANT_NO_PLUGINS"]
end
end
if Vagrant.plugins_enabled?
begin
Log4r::Logger.new("vagrant::global").info("Loading plugins!")
Bundler.require(:plugins)
rescue Exception => e
raise Vagrant::Errors::PluginLoadError, message: e.to_s
end
end
require 'vagrant/ui'
module Vagrant
module UI
class Colored
class << self
@@self_lock = Mutex.new
@@errored = false
def errored
thread = Thread.new do
@@self_lock.synchronize do
@@errored
end
end
thread.join
thread.value
end
def errored=(value)
thread = Thread.new do
@@self_lock.synchronize do
@@errored = value
end
end.join
end
end
alias :original_format_message :format_message
def format_message(type, message, **opts)
original_color = opts[:color]
original_type = type
message_buffer = StringIO.new
lines = message.split("\n")
lines.each{|message|
opts[:color] = original_color
type = original_type
in_back_trace = false
if message.include?('================================================================================')
self.class.errored = true
elsif message.include?('ERROR')
opts[:color] = :red
elsif message.include?('DEBUG')
opts[:color] = :blue
elsif message.include?('WARN')
opts[:color] = :yellow
elsif message.include?('INFO')
opts[:color] = :white
elsif message =~ /.*\.rb:\d+:in \`.*'/
opts[:color] = :magenta
in_back_trace = true
end
if self.class.errored && !in_back_trace
type = :error
opts[:color] = :red
end
opts[:bold] = false
message_buffer.puts original_format_message(type, message, opts)
}
message_buffer.string
end
end
class Prefixed
def colored_ui
return @colored_ui unless @colored_ui.nil?
@colored_ui = ::Vagrant::UI::Colored.new
end
alias :original_format_message :format_message
def format_message(type, message, **opts)
message = original_format_message(type, message, **opts)
colored_ui.format_message(type, message, **opts)
end
end
end
end
ENV['VAGRANT_LOG'] = original_log_level
| 26.313333 | 113 | 0.600456 |
f782a1fa4a27e7ccb98004f24f6077930120ae26
| 4,734 |
# (C) Copyright 2017 Hewlett Packard Enterprise Development LP
#
# Licensed under the Apache License, Version 2.0 (the "License");
# You may not use this file except in compliance with the License.
# You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software distributed
# under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
# CONDITIONS OF ANY KIND, either express or implied. See the License for the specific
# language governing permissions and limitations under the License.
require_relative 'resource'
module OneviewSDK
module API500
module C7000
# Storage pool resource implementation for API500 C7000
class StoragePool < OneviewSDK::API500::C7000::Resource
BASE_URI = '/rest/storage-pools'.freeze
UNIQUE_IDENTIFIERS = %w[uri].freeze
# Create a resource object, associate it with a client, and set its properties.
# @param [OneviewSDK::Client] client The client object for the OneView appliance
# @param [Hash] params The options for this resource (key-value pairs)
# @param [Integer] api_ver The api version to use when interracting with this resource.
def initialize(client, params = {}, api_ver = nil)
super
# Default values:
@data['type'] ||= 'StoragePoolV3'
end
# Method is not available
# @raise [OneviewSDK::MethodUnavailable] method is not available
def create(*)
unavailable_method
end
# Method is not available
# @raise [OneviewSDK::MethodUnavailable] method is not available
def create!(*)
unavailable_method
end
# Method is not available
# @raise [OneviewSDK::MethodUnavailable] method is not available
def delete(*)
unavailable_method
end
# Retrieve resource details based on this resource's name or URI.
# @note Name or URI must be specified inside the resource
# @return [Boolean] Whether or not retrieve was successful
def retrieve!
return super if @data['uri']
unless @data['name'] && @data['storageSystemUri']
raise IncompleteResource, 'Must set resource name and storageSystemUri, or uri, before trying to retrieve!'
end
results = self.class.find_by(@client, name: @data['name'], storageSystemUri: @data['storageSystemUri'])
if results.size == 1
set_all(results[0].data)
return true
end
false
end
# Check if a resource exists
# @note name or uri must be specified inside resource
# @return [Boolean] Whether or not resource exists
def exists?
temp = self.class.new(@client, data)
temp.retrieve!
end
# Gets the storage pools that are connected on the specified networks based on the storage system port's expected network connectivity.
# @param [OneviewSDK::Client] client The client object for the OneView appliance
# @param [Array<Resource>] networks The list of networks with URI to be used as a filter
# @return [Array<OneviewSDK::StoragePool>] the list of storage pools
def self.reachable(client, networks = [])
uri = self::BASE_URI + '/reachable-storage-pools'
unless networks.empty?
network_uris = networks.map { |item| item['uri'] }
uri += "?networks='#{network_uris.join(',')}'"
end
find_with_pagination(client, uri)
end
# To manage/unmanage a storage pool
# @param [Boolean] be_managed Set true to manage or false to unmanage
# @note Storage Pool that belongs to Storage System with family StoreVirtual can't be changed to unmanaged
def manage(be_managed)
if !be_managed && self['family'] == 'StoreVirtual'
raise ArgumentError, 'Attempting to unmanage a StoreVirtual pool is not allowed'
end
self['isManaged'] = be_managed
update
refresh
end
# To request a refresh of a storage pool
def request_refresh
self['requestingRefresh'] = true
update
refresh
end
# Sets the storage system
# @param [OneviewSDK::StorageSystem] storage_system
# @raise [OneviewSDK::IncompleteResource] if Storage System not found
def set_storage_system(storage_system)
raise 'Storage System could not be found!' unless storage_system.retrieve!
set('storageSystemUri', storage_system['uri'])
end
end
end
end
end
| 40.461538 | 143 | 0.645754 |
bfb0f0c3bb19841c17dc4181ff52bae66a6cd369
| 439 |
require "test_helper"
module PromotionalCheckout
class LineItemTest < Minitest::Test
def setup
product = OpenStruct.new(price: 1.23)
@line_item = LineItem.new(product)
@line_item.quantity = 3
@line_item.discounts << OpenStruct.new(amount: 0.69)
end
def test_subtotal
assert_equal(3.69, @line_item.subtotal)
end
def test_total
assert_equal(3, @line_item.total)
end
end
end
| 20.904762 | 58 | 0.67426 |
2186e398f7ef521328bc13387a81d50bc8791bf5
| 1,731 |
class CollectionChallengeSetup < SimpleService
def initialize(collection:, user: nil)
@collection = collection
@user = user
end
def call
return unless @collection.collection_type_challenge? ||
(@collection.challenge_admin_group.blank? && @collection.challenge_reviewer_group.blank? &&
@collection.challenge_participant_group.blank?)
# collections that become a challenge gets their roles unanchored
if @collection.roles_anchor_collection_id.present?
@collection.unanchor_and_inherit_roles_from_anchor!
end
collection_name = @collection.name
organization = @collection.organization
admin_group = @collection.create_challenge_admin_group(
name: "#{collection_name} Admins",
organization: organization,
)
reviewer_group = @collection.create_challenge_reviewer_group(
name: "#{collection_name} Reviewers",
organization: organization,
)
participant_group = @collection.create_challenge_participant_group(
name: "#{collection_name} Participants",
organization: organization,
)
# user won't be present e.g. on OrganizationShellBuilder setup
@user&.add_role(Role::ADMIN, admin_group)
admin_group.add_role(Role::ADMIN, reviewer_group)
admin_group.add_role(Role::ADMIN, participant_group)
admin_group.add_role(Role::EDITOR, @collection)
participant_group.add_role(Role::VIEWER, @collection)
reviewer_group.add_role(Role::VIEWER, @collection)
@collection.update(challenge_admin_group_id: admin_group.id,
challenge_reviewer_group_id: reviewer_group.id,
challenge_participant_group_id: participant_group.id)
end
end
| 36.0625 | 109 | 0.725014 |
d57176e1a891545c1431c4acf7f83a18008f2e6a
| 1,352 |
#
# Cookbook:: artifactory_ii
# Recipe:: default
#
# Copyright:: 2017, Corey Hemminger
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
if node['artifactory_ii']['install_java']
node.default['java']['install_flavor'] = 'oracle'
node.default['java']['jdk_version'] = '8'
node.default['java']['oracle']['accept_oracle_download_terms'] = true
include_recipe 'java'
end
yum_repository 'artifactory' do
description 'bintray-jfrog-artifactory repo'
baseurl 'http://jfrog.bintray.com/artifactory-rpms'
enabled true
gpgcheck false
repo_gpgcheck false
end
package 'jfrog-artifactory-oss'
template '/etc/opt/jfrog/artifactory/default' do
source 'jvm_parameters.erb'
owner 'artifactory'
group 'artifactory'
mode '0770'
notifies :restart, 'service[artifactory]', :delayed
end
service 'artifactory' do
action [:enable, :start]
end
| 27.591837 | 74 | 0.745562 |
6ab226fb32ae08cd23e916ce7541c7b95ff3e800
| 441 |
cask "unpkg" do
version "4.7"
sha256 "fee4f5ee286573f00a46dbe0e372d671ba82ce270ba4104cac9743b2e528d5b1"
url "https://www.timdoug.com/unpkg/unpkg-#{version}.zip"
name "unpkg"
desc "Unarchiver for .pkg and .mpkg that unpacks all the files in a package"
homepage "https://www.timdoug.com/unpkg/"
livecheck do
url :homepage
regex(/href=.*?unpkg[._-]v?(\d+(?:\.\d+)+)\.zip/i)
end
app "unpkg #{version}/unpkg.app"
end
| 25.941176 | 78 | 0.687075 |
61dadeab3bf163c2f057ae4bf446d23c60aebbd2
| 108 |
class AddEmailToUsers < ActiveRecord::Migration
def change
add_column :users, :email, :text
end
end
| 18 | 47 | 0.740741 |
6186ccca15178ce03dc9ec7d8eac015d50895aff
| 642 |
require File.dirname(__FILE__) + '/../../../spec_helper'
require File.dirname(__FILE__) + '/fixtures/classes'
with_profiler do
describe "Rubinius::Profiler::Instrumenter#show" do
before :each do
@stdout, $stdout = $stdout, IOStub.new
@profiler = Rubinius::Profiler::Instrumenter.new
end
after :each do
$stdout = @stdout
end
it "prints out the profile" do
@profiler.start
ProfilerSpecs.work 10
@profiler.stop
@profiler.show
$stdout.should =~ %r[ time seconds seconds calls ms/call ms/call name]
$stdout.should =~ /ProfilerSpecs.work/
end
end
end
| 25.68 | 88 | 0.640187 |
ab0fe8c9c5b4b78f5784dfda46c5e1bee2301291
| 1,693 |
module SessionsHelper
# 渡されたユーザーでログインする
def log_in(user)
session[:user_id] = user.id
end
# ユーザーのセッションを永続的にする
def remember(user)
user.remember
cookies.permanent.signed[:user_id] = user.id
cookies.permanent[:remember_token] = user.remember_token
end
# 渡されたユーザーがログイン済みユーザーであればtrueを返す
def current_user?(user)
user == current_user
end
def current_user
if (user_id = session[:user_id])
@current_user ||= User.find_by(id: user_id)
elsif (user_id = cookies.signed[:user_id])
user = User.find_by(id: user_id)
if user && user.authenticated?(cookies[:remember_token])
log_in user
@current_user = user
end
end
end
# ユーザーがログインしていればtrue、その他ならfalseを返す
def logged_in?
!current_user.nil?
end
# 永続的セッションを破棄する
def forget(user)
user.forget
cookies.delete(:user_id)
cookies.delete(:remember_token)
end
# 現在のユーザーをログアウトする
def log_out
forget(current_user)
session.delete(:user_id)
@current_user = nil
end
# 記憶したURL (もしくはデフォルト値) にリダイレクト
def redirect_back_or(default)
redirect_to(session[:forwarding_url] || default)
session.delete(:forwarding_url)
end
# アクセスしようとしたURLを覚えておく
def store_location
session[:forwarding_url] = request.original_url if request.get?
end
# 現在ログイン中のユーザーを返す (いる場合)
def current_user
if (user_id = session[:user_id])
@current_user ||= User.find_by(id: user_id)
elsif (user_id = cookies.signed[:user_id])
user = User.find_by(id: user_id)
if user && user.authenticated?(:remember, cookies[:remember_token])
log_in user
@current_user = user
end
end
end
end
| 21.705128 | 73 | 0.677496 |
7a985b3e5db2cd3cacf84ebd856fcb0fa6e4aa2d
| 778 |
module TD::Types
# Contains information about a proxy server.
#
# @attr id [Integer] Unique identifier of the proxy.
# @attr server [TD::Types::String] Proxy server IP address.
# @attr port [Integer] Proxy server port.
# @attr last_used_date [Integer] Point in time (Unix timestamp) when the proxy was last used; 0 if never.
# @attr is_enabled [Boolean] True, if the proxy is enabled now.
# @attr type [TD::Types::ProxyType] Type of the proxy.
class Proxy < Base
attribute :id, TD::Types::Coercible::Integer
attribute :server, TD::Types::String
attribute :port, TD::Types::Coercible::Integer
attribute :last_used_date, TD::Types::Coercible::Integer
attribute :is_enabled, TD::Types::Bool
attribute :type, TD::Types::ProxyType
end
end
| 40.947368 | 107 | 0.705656 |
21ab3d000ca4cf7427779c6cdb6efdf9ecd9a242
| 6,807 |
# encoding: utf-8
module BackupSpec
PROJECT_ROOT = '/backup.git'
CONFIG_TEMPLATE = File.readlines(File.join(PROJECT_ROOT, 'templates/cli/config'))
LOCAL_STORAGE_PATH = '/home/vagrant/Storage'
ALT_CONFIG_PATH = '/home/vagrant/Backup_alt'
LOCAL_SYNC_PATH = '/home/vagrant/sync_root'
GPG_HOME_DIR = '/home/vagrant/gpg_home' # default would be ~/.gnupg
module ExampleHelpers
# Creates the config.rb file.
#
# By default, this will be created as ~/Backup/config.rb,
# since Backup::Config is reset before each example.
#
# If paths will be changed when calling backup_perform(),
# like --config-file or --root-path, then the full path to
# the config file must be given here in +config_file+.
#
# The config file created here will disable console log output
# and file logging, but this may be overridden in +text+.
#
# Note that the first line in +text+ will set the indent for the text being
# given and that indent will be removed from all lines in +text+
#
# If you don't intend to change the default config.rb contents or path,
# you can omit this method from your example. Calling create_model()
# will call this method if the +config_file+ does not exist.
def create_config(text = nil, config_file = nil)
config_file ||= Backup::Config.config_file
config_path = File.dirname(config_file)
unless text.to_s.empty?
indent = text.lines.first.match(/^ */)[0].length
text = text.lines.map {|l| l[indent..-1] }.join
end
config = <<-EOS.gsub(/^ /, '')
# encoding: utf-8
Backup::Utilities.configure do
# silence the log output produced by the auto-detection
tar_dist :gnu
end
Backup::Logger.configure do
console.quiet = true
logfile.enabled = false
end
Backup::Storage::Local.defaults do |local|
local.path = '#{ LOCAL_STORAGE_PATH }'
end
#{ text }
#{ CONFIG_TEMPLATE.join }
EOS
# Create models path, since models are always relative to the config file.
FileUtils.mkdir_p File.join(config_path, 'models')
File.open(config_file, 'w') {|f| f.write config }
end
# Creates a model file.
#
# Pass +config_file+ if it won't be at the default path +~/Backup/+.
#
# Creates the model as +/models/<trigger>.rb+, relative to the path
# of +config_file+.
#
# Note that the first line in +text+ will set the indent for the text being
# given and that indent will be removed from all lines in +text+
def create_model(trigger, text, config_file = nil)
config_file ||= Backup::Config.config_file
model_path = File.join(File.dirname(config_file), 'models')
model_file = File.join(model_path, trigger.to_s + '.rb')
create_config(nil, config_file) unless File.exist?(config_file)
indent = text.lines.first.match(/^ */)[0].length
text = text.lines.map {|l| l[indent..-1] }.join
config = <<-EOS.gsub(/^ /, '')
# encoding: utf-8
#{ text }
EOS
File.open(model_file, 'w') {|f| f.write config }
end
# Runs the given trigger(s).
#
# Any +options+ given are passed as command line options to the
# `backup perform` command. These should be given as String arguments.
# e.g. job = backup_perform :my_backup, '--tmp-path=/tmp'
#
# The last argument given for +options+ may be a Hash, which is used
# as options for this method. If { :exit_status => Integer } is set,
# this method will rescue SystemExit and assert that the exit status
# is correct. This allows jobs that log warnings to continue and return
# the performed job(s).
#
# When :focus is added to an example, '--no-quiet' will be appended to
# +options+ so you can see the log output as the backup is performed.
def backup_perform(triggers, *options)
triggers = Array(triggers).map(&:to_s)
opts = options.last.is_a?(Hash) ? options.pop : {}
exit_status = opts.delete(:exit_status)
options << '--no-quiet' if example.metadata[:focus] || ENV['VERBOSE']
argv = ['perform', '-t', triggers.join(',')] + options
# Reset config paths, utility paths and the logger.
Backup::Config.send(:reset!)
Backup::Utilities.send(:reset!)
Backup::Logger.send(:reset!)
# Ensure multiple runs have different timestamps
sleep 1 unless Backup::Model.all.empty?
# Clear previously loaded models and other class instance variables
Backup::Model.send(:reset!)
ARGV.replace(argv)
if exit_status
expect do
Backup::CLI.start
end.to raise_error(SystemExit) {|exit|
expect( exit.status ).to be(exit_status)
}
else
Backup::CLI.start
end
models = triggers.map {|t| Backup::Model.find_by_trigger(t).first }
jobs = models.map {|m| BackupSpec::PerformedJob.new(m) }
jobs.count > 1 ? jobs : jobs.first
end
# Return the sorted contents of the given +path+,
# relative to the path so the contents may be matched against
# the contents of another path.
def dir_contents(path)
path = File.expand_path(path)
Dir["#{ path }/**/*"].map {|e| e.sub(/^#{ path }/, '') }.sort
end
# Initial Files are MD5: d3b07384d113edec49eaa6238ad5ff00
#
# ├── dir_a
# │ └── one.file
# └── dir_b
# ├── dir_c
# │ └── three.file
# ├── bad\xFFfile
# └── two.file
#
def prepare_local_sync_files
FileUtils.rm_rf LOCAL_SYNC_PATH
%w{ dir_a
dir_b/dir_c }.each do |path|
FileUtils.mkdir_p File.join(LOCAL_SYNC_PATH, path)
end
%W{ dir_a/one.file
dir_b/two.file
dir_b/bad\xFFfile
dir_b/dir_c/three.file }.each do |path|
File.open(File.join(LOCAL_SYNC_PATH, path), 'w') do |file|
file.puts 'foo'
end
end
end
# Added/Updated Files are MD5: 14758f1afd44c09b7992073ccf00b43d
#
# ├── dir_a
# │ ├── dir_d (add)
# │ │ └── two.new (add)
# │ └── one.file (update)
# └── dir_b
# ├── dir_c
# │ └── three.file
# ├── bad\377file
# ├── one.new (add)
# └── two.file (remove)
#
def update_local_sync_files
FileUtils.mkdir_p File.join(LOCAL_SYNC_PATH, 'dir_a/dir_d')
%w{ dir_a/one.file
dir_b/one.new
dir_a/dir_d/two.new }.each do |path|
File.open(File.join(LOCAL_SYNC_PATH, path), 'w') do |file|
file.puts 'foobar'
end
end
FileUtils.rm File.join(LOCAL_SYNC_PATH, 'dir_b/two.file')
end
end
end
| 33.53202 | 83 | 0.60952 |
bb444f5b407eb510f7ed25e43bee406ec1fb6c21
| 836 |
# frozen_string_literal: true
class BackfillPushEventPayloadEventIdForBigintConversion < ActiveRecord::Migration[6.0]
disable_ddl_transaction!
include Gitlab::Database::MigrationHelpers
DOWNTIME = false
def up
return unless should_run?
backfill_conversion_of_integer_to_bigint :push_event_payloads, :event_id, primary_key: :event_id,
batch_size: 15000, sub_batch_size: 100
end
def down
return unless should_run?
Gitlab::Database::BackgroundMigration::BatchedMigration
.where(job_class_name: 'CopyColumnUsingBackgroundMigrationJob')
.where(table_name: 'push_event_payloads', column_name: 'event_id')
.where(job_arguments: %w[event_id event_id_convert_to_bigint].to_json)
.delete_all
end
private
def should_run?
Gitlab.dev_or_test_env? || Gitlab.com?
end
end
| 25.333333 | 101 | 0.76555 |
79a5bd768dea5f31e5beb7c026e16534a6ed5d41
| 880 |
SimpleForm.setup do |config|
config.wrappers :default, class: :input, hint_class: :field_with_hint, error_class: :field_with_errors do |b|
b.use :html5
b.use :placeholder
b.optional :maxlength
b.optional :pattern
b.optional :min_max
b.optional :readonly
## Inputs
b.use :label_input
b.use :hint, wrap_with: { tag: :span, class: :hint }
b.use :error, wrap_with: { tag: :span, class: :error }
end
config.boolean_label_class = "checkbox"
config.boolean_style = :nested
config.button_class = "btn"
config.default_wrapper = :default
config.error_notification_class = "error_notification"
config.error_notification_tag = :div
# The asterisk for required fields is added by CSS - make it simply be the label text
config.label_text = ->(label, _required, _explicit_label) { label }
config.browser_validations = true
end
| 30.344828 | 111 | 0.7125 |
87fe8e3aa1708ac7fb9d1b000afaefa802cf0c13
| 9,234 |
# frozen_string_literal: true
module PaperTrail
# Configures an ActiveRecord model, mostly at application boot time, but also
# sometimes mid-request, with methods like enable/disable.
class ModelConfig
E_CANNOT_RECORD_AFTER_DESTROY = <<-STR.strip_heredoc.freeze
paper_trail.on_destroy(:after) is incompatible with ActiveRecord's
belongs_to_required_by_default. Use on_destroy(:before)
or disable belongs_to_required_by_default.
STR
E_HPT_ABSTRACT_CLASS = <<~STR.squish.freeze
An application model (%s) has been configured to use PaperTrail (via
`has_paper_trail`), but the version model it has been told to use (%s) is
an `abstract_class`. This could happen when an advanced feature called
Custom Version Classes (http://bit.ly/2G4ch0G) is misconfigured. When all
version classes are custom, PaperTrail::Version is configured to be an
`abstract_class`. This is fine, but all application models must be
configured to use concrete (not abstract) version models.
STR
E_MODEL_LIMIT_REQUIRES_ITEM_SUBTYPE = <<~STR.squish.freeze
To use PaperTrail's per-model limit in your %s model, you must have an
item_subtype column in your versions table. See documentation sections
2.e.1 Per-model limit, and 4.b.1 The optional item_subtype column.
STR
DPR_PASSING_ASSOC_NAME_DIRECTLY_TO_VERSIONS_OPTION = <<~STR.squish
Passing versions association name as `has_paper_trail versions: %{versions_name}`
is deprecated. Use `has_paper_trail versions: {name: %{versions_name}}` instead.
The hash you pass to `versions:` is now passed directly to `has_many`.
STR
DPR_CLASS_NAME_OPTION = <<~STR.squish
Passing Version class name as `has_paper_trail class_name: %{class_name}`
is deprecated. Use `has_paper_trail versions: {class_name: %{class_name}}`
instead. The hash you pass to `versions:` is now passed directly to `has_many`.
STR
def initialize(model_class)
@model_class = model_class
end
# Adds a callback that records a version after a "create" event.
#
# @api public
def on_create
@model_class.after_create { |r|
r.paper_trail.record_create if r.paper_trail.save_version?
}
return if @model_class.paper_trail_options[:on].include?(:create)
@model_class.paper_trail_options[:on] << :create
end
# Adds a callback that records a version before or after a "destroy" event.
#
# @api public
def on_destroy(recording_order = "before")
unless %w[after before].include?(recording_order.to_s)
raise ArgumentError, 'recording order can only be "after" or "before"'
end
if recording_order.to_s == "after" && cannot_record_after_destroy?
raise E_CANNOT_RECORD_AFTER_DESTROY
end
@model_class.send(
"#{recording_order}_destroy",
lambda do |r|
return unless r.paper_trail.save_version?
r.paper_trail.record_destroy(recording_order)
end
)
return if @model_class.paper_trail_options[:on].include?(:destroy)
@model_class.paper_trail_options[:on] << :destroy
end
# Adds a callback that records a version after an "update" event.
#
# @api public
def on_update
@model_class.before_save { |r|
r.paper_trail.reset_timestamp_attrs_for_update_if_needed
}
@model_class.after_update { |r|
if r.paper_trail.save_version?
r.paper_trail.record_update(
force: false,
in_after_callback: true,
is_touch: false
)
end
}
@model_class.after_update { |r|
r.paper_trail.clear_version_instance
}
return if @model_class.paper_trail_options[:on].include?(:update)
@model_class.paper_trail_options[:on] << :update
end
# Adds a callback that records a version after a "touch" event.
# @api public
def on_touch
@model_class.after_touch { |r|
r.paper_trail.record_update(
force: true,
in_after_callback: true,
is_touch: true
)
}
end
# Set up `@model_class` for PaperTrail. Installs callbacks, associations,
# "class attributes", instance methods, and more.
# @api private
def setup(options = {})
options[:on] ||= %i[create update destroy touch]
options[:on] = Array(options[:on]) # Support single symbol
@model_class.send :include, ::PaperTrail::Model::InstanceMethods
setup_options(options)
setup_associations(options)
check_presence_of_item_subtype_column(options)
@model_class.after_rollback { paper_trail.clear_rolled_back_versions }
setup_callbacks_from_options options[:on]
end
def version_class
@_version_class ||= @model_class.version_class_name.constantize
end
private
def active_record_gem_version
Gem::Version.new(ActiveRecord::VERSION::STRING)
end
# Raises an error if the provided class is an `abstract_class`.
# @api private
def assert_concrete_activerecord_class(class_name)
if class_name.constantize.abstract_class?
raise format(E_HPT_ABSTRACT_CLASS, @model_class, class_name)
end
end
def cannot_record_after_destroy?
Gem::Version.new(ActiveRecord::VERSION::STRING).release >= Gem::Version.new("5") &&
::ActiveRecord::Base.belongs_to_required_by_default
end
# Some options require the presence of the `item_subtype` column. Currently
# only `limit`, but in the future there may be others.
#
# @api private
def check_presence_of_item_subtype_column(options)
return unless options.key?(:limit)
return if version_class.item_subtype_column_present?
raise format(E_MODEL_LIMIT_REQUIRES_ITEM_SUBTYPE, @model_class.name)
end
def check_version_class_name(options)
# @api private - `version_class_name`
@model_class.class_attribute :version_class_name
if options[:class_name]
::ActiveSupport::Deprecation.warn(
format(
DPR_CLASS_NAME_OPTION,
class_name: options[:class_name].inspect
),
caller(1)
)
options[:versions][:class_name] = options[:class_name]
end
@model_class.version_class_name = options[:versions][:class_name] || "PaperTrail::Version"
assert_concrete_activerecord_class(@model_class.version_class_name)
end
def check_versions_association_name(options)
# @api private - versions_association_name
@model_class.class_attribute :versions_association_name
@model_class.versions_association_name = options[:versions][:name] || :versions
end
def define_has_many_versions(options)
options = ensure_versions_option_is_hash(options)
check_version_class_name(options)
check_versions_association_name(options)
scope = get_versions_scope(options)
@model_class.has_many(
@model_class.versions_association_name,
scope,
class_name: @model_class.version_class_name,
primary_key: options[:id_key] || @model_class.primary_key,
as: :item,
**options[:versions].except(:name, :scope)
)
end
def ensure_versions_option_is_hash(options)
unless options[:versions].is_a?(Hash)
if options[:versions]
::ActiveSupport::Deprecation.warn(
format(
DPR_PASSING_ASSOC_NAME_DIRECTLY_TO_VERSIONS_OPTION,
versions_name: options[:versions].inspect
),
caller(1)
)
end
options[:versions] = {
name: options[:versions]
}
end
options
end
def get_versions_scope(options)
options[:versions][:scope] || -> { order(model.timestamp_sort_order) }
end
def setup_associations(options)
# @api private - version_association_name
@model_class.class_attribute :version_association_name
@model_class.version_association_name = options[:version] || :version
# The version this instance was reified from.
# @api public
@model_class.send :attr_accessor, @model_class.version_association_name
# @api public - paper_trail_event
@model_class.send :attr_accessor, :paper_trail_event
define_has_many_versions(options)
end
def setup_callbacks_from_options(options_on = [])
options_on.each do |event|
public_send("on_#{event}")
end
end
def setup_options(options)
# @api public - paper_trail_options - Let's encourage plugins to use eg.
# `paper_trail_options[:versions][:class_name]` rather than
# `version_class_name` because the former is documented and the latter is
# not.
@model_class.class_attribute :paper_trail_options
@model_class.paper_trail_options = options.dup
%i[ignore skip only].each do |k|
@model_class.paper_trail_options[k] = [@model_class.paper_trail_options[k]].
flatten.
compact.
map { |attr| attr.is_a?(Hash) ? attr.stringify_keys : attr.to_s }
end
@model_class.paper_trail_options[:meta] ||= {}
end
end
end
| 35.790698 | 96 | 0.682261 |
0893ce3544bb45854429c4324f7e6d6ae90ac419
| 3,852 |
unless Rails.env.production?
module RspecApiDocumentation
class RackTestClient < ClientBase
def response_body
last_response.body.encode('utf-8')
end
end
end
# Values listed are the default values
RspecApiDocumentation.configure do |config|
# Set the application that Rack::Test uses
# config.app = Rails.application
# Used to provide a configuration for the specification
# (supported only by 'open_api' format for now)
# config.configurations_dir = Rails.root.join("doc", "configurations", "api")
# Output folder
# Careful! Use a dedicated folder cause its content will get deleted
# config.docs_dir = Rails.root.join("doc", "api")
# An array of output format(s).
# Possible values are :json, :html, :combined_text, :combined_json,
# :json_iodocs, :textile, :markdown, :append_json, :slate,
# :api_blueprint, :open_api
config.format = [:api_blueprint]
# Location of templates
# config.template_path = "inside of the gem"
# Filter by example document type
# config.filter = :all
# Filter by example document type
# config.exclusion_filter = nil
# Used when adding a cURL output to the docs
# config.curl_host = nil
# Used when adding a cURL output to the docs
# Allows you to filter out headers that are not needed in the cURL request,
# such as "Host" and "Cookie". Set as an array.
# config.curl_headers_to_filter = nil
# By default, when these settings are nil, all headers are shown,
# which is sometimes too chatty. Setting the parameters to an
# array of headers will render *only* those headers.
config.request_headers_to_include = %w[access-token uid client]
config.response_headers_to_include = %w[access-token expiry token-type uid client]
# By default examples and resources are ordered by description. Set to true keep
# the source order.
# config.keep_source_order = false
# Change the name of the API on index pages
config.api_name = 'Rails API Template'
# Change the description of the API on index pages
# config.api_explanation = "API Description"
# Redefine what method the DSL thinks is the client
# This is useful if you need to `let` your own client, most likely a model.
# config.client_method = :client
# Change the IODocs writer protocol
# config.io_docs_protocol = "http"
# You can define documentation groups as well. A group allows you generate multiple
# sets of documentation.
# config.define_group :public do |config|
# # By default the group's doc_dir is a subfolder under the parent group, based
# # on the group's name.
# config.docs_dir = Rails.root.join("doc", "api", "public")
# # Change the filter to only include :public examples
# config.filter = :public
# end
# Change how the post body is formatted by default, you can still override by `raw_post`
# Can be :json, :xml, or a proc that will be passed the params
config.request_body_formatter = :json
# Change how the response body is formatted by default
# Is proc that will be called with the response_content_type & response_body
# by default response_content_type of `application/json` are pretty formated.
# config.response_body_formatter =
# Proc.new { |response_content_type, response_body| response_body }
# Change the embedded style for HTML output. This file will not be processed by
# RspecApiDocumentation and should be plain CSS.
# config.html_embedded_css_file = nil
# Removes the DSL method `status`, this is required if you have a parameter named status
# config.disable_dsl_status!
# Removes the DSL method `method`, this is required if you have a parameter named method
# config.disable_dsl_method!
end
end
| 38.138614 | 92 | 0.704829 |
ab0548804443ee27e5de90505c7506d1f24ffb5d
| 53 |
QuickSearchSummonSearcher::Engine.routes.draw do
end
| 17.666667 | 48 | 0.867925 |
79db352727f2929e02974360a15de931f2c22091
| 420 |
require "rails_helper"
describe Deployment::Credentials do
it "correctly sets up the environment" do
root = "#{Dir.pwd}/tmp"
credentials = Deployment::Credentials.new(root)
expect { credentials.setup! }.to_not raise_error
expect(File.exist?("#{root}/.netrc")).to eq(true)
expect(File.exist?("#{root}/.ssh/config")).to eq(true)
expect(File.exist?("#{root}/.ssh/id_rsa")).to eq(true)
end
end
| 30 | 58 | 0.67619 |
01020ca578ef7f5d044d9da77e9e3a29a3ae0487
| 1,024 |
# frozen_string_literal: true
lib = File.expand_path('lib', __dir__)
$LOAD_PATH.unshift(lib) unless $LOAD_PATH.include?(lib)
require 'Metrc/version'
Gem::Specification.new do |spec|
spec.name = 'Metrc'
spec.version = Metrc::VERSION
spec.authors = ['Emanuele Tozzato']
spec.email = ['[email protected]']
spec.summary = 'Pull and push lab data between a LIMS and Metrc'
spec.description = 'A ruby gem to pull lab tests and push results to Metrc'
spec.homepage = 'https://www.cannabisLIMS.com/'
spec.license = 'MIT'
spec.files = `git ls-files -z`.split("\x0").reject { |f| f.match(%r{^(test|spec|features)/}) }
spec.bindir = 'exe'
spec.executables = spec.files.grep(%r{^exe/}) { |f| File.basename(f) }
spec.require_paths = ['lib']
spec.add_dependency 'bundler', '~> 1.11'
spec.add_dependency 'httparty'
spec.add_dependency 'rake', '~> 10.1.0'
spec.add_dependency 'rspec', '~> 3.0'
spec.add_development_dependency 'pry'
end
| 36.571429 | 104 | 0.645508 |
878351fc44a55ef0067e94b05eee6d39b22e2370
| 74 |
module RedisDataViewer
class ApplicationJob < ActiveJob::Base
end
end
| 14.8 | 40 | 0.797297 |
87c7f4bc6483bdf61e0efae1eee00e70931678a0
| 1,923 |
module Dhl
module Intraship
class Address
attr_accessor :street, :house_number, :street_additional,
:zip, :city, :country_code, :contact_person, :email, :phone
def initialize(attributes = {})
attributes.each do |key, value|
setter = :"#{key.to_s}="
if self.respond_to?(setter)
self.send(setter, value)
end
end
end
def company?
!self.company.blank?
end
def country_code=(country_code)
raise "Country code must be an ISO-3166 two digit code" unless country_code.length == 2
@country_code = country_code
end
def append_to_xml(xml)
xml.Company do |xml|
company_xml(xml)
end
xml.Address do |xml|
xml.cis(:streetName, street)
xml.cis(:streetNumber, house_number)
xml.cis(:careOfName, street_additional) unless street_additional.blank?
xml.cis(:Zip) do |xml|
if country_code == 'DE'
xml.cis(:germany, zip)
elsif ['GB','UK'].include?(country_code)
xml.cis(:england, zip)
else
xml.cis(:other, zip)
end
end
xml.cis(:city, city)
xml.cis(:Origin) do |xml|
xml.cis(:countryISOCode, country_code)
end
end
xml.Communication do |xml|
xml.cis(:phone, self.phone) unless self.phone.blank?
xml.cis(:email, self.email) unless self.email.blank?
xml.cis(:contactPerson, contact_person.blank? ? "" : contact_person)
end
end
protected
def company_xml(xml)
raise "Use one of the two subclasses: PersonAddress or CompanyAddress!"
end
def communication_xml(xml)
raise "Use one of the two subclasses: PersonAddress or CompanyAddress!"
end
end
end
end
| 29.584615 | 95 | 0.566303 |
08a5076351888e6b473df5de876459731e0486ca
| 1,470 |
# encoding: utf-8
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
module Azure::ADHybridHealthService::Mgmt::V2014_01_01
module Models
#
# The list of hotfixes installed in the server.
#
class Hotfixes
include MsRestAzure
# @return [Array<Hotfix>] The value returned by the operation.
attr_accessor :value
#
# Mapper for Hotfixes class as Ruby Hash.
# This will be used for serialization/deserialization.
#
def self.mapper()
{
client_side_validation: true,
required: false,
serialized_name: 'Hotfixes',
type: {
name: 'Composite',
class_name: 'Hotfixes',
model_properties: {
value: {
client_side_validation: true,
required: false,
serialized_name: 'value',
type: {
name: 'Sequence',
element: {
client_side_validation: true,
required: false,
serialized_name: 'HotfixElementType',
type: {
name: 'Composite',
class_name: 'Hotfix'
}
}
}
}
}
}
}
end
end
end
end
| 26.25 | 70 | 0.491837 |
03626a2c2e70fad79826716dd40e0d185a6bfb74
| 1,069 |
#
# Cookbook:: my_cookbook
# Spec:: default
#
# Copyright:: 2018, The Authors, All Rights Reserved.
require 'spec_helper'
describe 'my_cookbook::pandpgw003' do
context 'When all attributes are default, on Ubuntu 16.04' do
let(:chef_run) do
# for a complete list of available platforms and versions see:
# https://github.com/customink/fauxhai/blob/master/PLATFORMS.md
runner = ChefSpec::ServerRunner.new(platform: 'ubuntu', version: '16.04')
runner.converge(described_recipe)
end
it 'converges successfully' do
expect { chef_run }.to_not raise_error
end
end
context 'When all attributes are default, on CentOS 7.4.1708' do
let(:chef_run) do
# for a complete list of available platforms and versions see:
# https://github.com/customink/fauxhai/blob/master/PLATFORMS.md
runner = ChefSpec::ServerRunner.new(platform: 'centos', version: '7.4.1708')
runner.converge(described_recipe)
end
it 'converges successfully' do
expect { chef_run }.to_not raise_error
end
end
end
| 29.694444 | 82 | 0.699719 |
01e9d8404954eeaa8ee2bf0b8595a2185d7658f3
| 45 |
require "google/cloud/redis/v1beta1/version"
| 22.5 | 44 | 0.822222 |
62668a375a566ece4de0777a2983845d8ff99584
| 748 |
require "rails_helper"
describe Cards::ChatOnlineComponent, type: "component" do
subject { render_inline(instance) && page }
around do |example|
travel_to(time.in_time_zone("London")) { example.run }
end
let(:instance) { described_class.new card: {} }
let(:time) { DateTime.new(2021, 1, 1, 12, 30) }
it { is_expected.to have_css ".card" }
it { is_expected.to have_css ".card.card--no-border" }
it { is_expected.to have_css ".card header", text: "Get the answers you need" }
it { is_expected.to have_css "img[alt='A photograph of a child with their hand raised']" }
it { is_expected.to have_content "If you have questions" }
it { is_expected.to have_css("span[data-controller='talk-to-us']", text: "Chat online") }
end
| 37.4 | 92 | 0.69385 |
d52740c5c3faea8c411e26a2c8afd5041cc7525e
| 746 |
require 'test_helper'
class StaticPagesControllerTest < ActionDispatch::IntegrationTest
def setup
@base_title = "Ruby on Rails Tutorial Sample App" #instance vairable
end
test "should get home" do
get root_path
assert_response :success
assert_select "title", "#{@base_title}" #string interpolation
end
test "should get help" do
get help_path
assert_response :success
assert_select "title", "Help | #{@base_title}"
end
test "should get about" do
get about_path
assert_response :success
assert_select "title", "About | #{@base_title}"
end
test "should get contact" do
get contact_path
assert_response :success
assert_select "title", "Contact | #{@base_title}"
end
end
| 22.606061 | 72 | 0.702413 |
1a8a24aa65907869e4abca398cc6a84e6033861d
| 9,749 |
require 'set'
require 'date'
# Parses cron expressions and computes the next occurence of the "job"
#
class CronParser
# internal "mutable" time representation
class InternalTime
attr_accessor :year, :month, :day, :hour, :min, :sec
attr_accessor :time_source
def initialize(time = Time.now, time_source = Time)
@year = time.year
@month = time.month
@day = time.day
@hour = time.hour
@min = time.min
@sec = time.sec
@time_source = time_source
end
def to_time
time_source.local(@year, @month, @day, @hour, @min, @sec)
end
def inspect
[year, month, day, hour, min, sec].inspect
end
end
SYMBOLS = {
"jan" => "1",
"feb" => "2",
"mar" => "3",
"apr" => "4",
"may" => "5",
"jun" => "6",
"jul" => "7",
"aug" => "8",
"sep" => "9",
"oct" => "10",
"nov" => "11",
"dec" => "12",
"sun" => "0",
"mon" => "1",
"tue" => "2",
"wed" => "3",
"thu" => "4",
"fri" => "5",
"sat" => "6"
}
def initialize(source,time_source = Time)
@source = interpret_vixieisms(source)
@time_source = time_source
validate_source
end
def interpret_vixieisms(spec)
case spec
when '@reboot'
raise ArgumentError, "Can't predict last/next run of @reboot"
when '@yearly', '@annually'
'0 0 1 1 *'
when '@monthly'
'0 0 1 * *'
when '@weekly'
'0 0 * * 0'
when '@daily', '@midnight'
'0 0 * * *'
when '@hourly'
'0 * * * *'
when '@minutely'
'* * * * *'
else
spec
end
end
# returns the next occurence after the given date
def next(now = @time_source.now, num = 1)
t = InternalTime.new(now, @time_source)
unless time_specs[:year][0].include?(t.year)
nudge_year(t)
t.month = 0
end
unless time_specs[:month][0].include?(t.month)
nudge_month(t)
t.day = 0
end
unless interpolate_weekdays(t.year, t.month)[0].include?(t.day)
nudge_date(t)
t.hour = -1
end
unless time_specs[:hour][0].include?(t.hour)
nudge_hour(t)
t.min = -1
end
unless time_specs[:minute][0].include?(t.min)
nudge_minute(t)
t.sec = -1
end
# always nudge the second
nudge_second(t)
t = t.to_time
if num > 1
recursive_calculate(:next,t,num)
else
t
end
end
# returns the last occurence before the given date
def last(now = @time_source.now, num=1)
t = InternalTime.new(now,@time_source)
unless time_specs[:year][0].include?(t.year)
nudge_year(t, :last)
t.month = 13
end
unless time_specs[:month][0].include?(t.month)
nudge_month(t, :last)
t.day = 32
end
if t.day == 32 || !interpolate_weekdays(t.year, t.month)[0].include?(t.day)
nudge_date(t, :last)
t.hour = 24
end
unless time_specs[:hour][0].include?(t.hour)
nudge_hour(t, :last)
t.min = 60
end
unless time_specs[:minute][0].include?(t.min)
nudge_minute(t, :last)
t.sec = 60
end
# always nudge the second
nudge_second(t, :last)
t = t.to_time
if num > 1
recursive_calculate(:last,t,num)
else
t
end
end
SUBELEMENT_REGEX = %r{^(\d+)(-(\d+)(/(\d+))?)?$}
def parse_element(elem, allowed_range)
values = elem.split(',').map do |subel|
if subel =~ /^\*/
step = subel.length > 1 ? subel[2..-1].to_i : 1
stepped_range(allowed_range, step)
elsif subel =~ /^\?$/ && (allowed_range == (1..31) || allowed_range == (0..6))
step = subel.length > 1 ? subel[2..-1].to_i : 1
stepped_range(allowed_range, step)
else
if SUBELEMENT_REGEX === subel
if $5 # with range
stepped_range($1.to_i..$3.to_i, $5.to_i)
elsif $3 # range without step
stepped_range($1.to_i..$3.to_i, 1)
else # just a numeric
[$1.to_i]
end
else
raise ArgumentError, "Bad Vixie-style specification #{subel}"
end
end
end.flatten.sort
[Set.new(values), values, elem]
end
protected
def recursive_calculate(meth,time,num)
array = [time]
num.-(1).times do |num|
array << self.send(meth, array.last)
end
array
end
# returns a list of days which do both match time_spec[:dom] or time_spec[:dow]
def interpolate_weekdays(year, month)
@_interpolate_weekdays_cache ||= {}
@_interpolate_weekdays_cache["#{year}-#{month}"] ||= interpolate_weekdays_without_cache(year, month)
end
def interpolate_weekdays_without_cache(year, month)
t = Date.new(year, month, 1)
valid_mday, _, mday_field = time_specs[:dom]
valid_wday, _, wday_field = time_specs[:dow]
# Careful, if both DOW and DOM fields are non-wildcard,
# then we only need to match *one* for cron to run the job:
if not (mday_field == '*' and wday_field == '*')
valid_mday = [] if mday_field == '*'
valid_wday = [] if wday_field == '*'
end
# Careful: crontabs may use either 0 or 7 for Sunday:
valid_wday << 0 if valid_wday.include?(7)
result = []
while t.month == month
result << t.mday if valid_mday.include?(t.mday) || valid_wday.include?(t.wday)
t = t.succ
end
[Set.new(result), result]
end
def nudge_year(t, dir = :next)
spec = time_specs[:year][1]
next_value = find_best_next(t.year, spec, dir)
t.year = next_value || (dir == :next ? spec.first : spec.last)
# We've exhausted all years in the range
raise "No matching dates exist" if next_value.nil?
end
def nudge_month(t, dir = :next)
spec = time_specs[:month][1]
next_value = find_best_next(t.month, spec, dir)
t.month = next_value || (dir == :next ? spec.first : spec.last)
nudge_year(t, dir) if next_value.nil?
# we changed the month, so its likely that the date is incorrect now
valid_days = interpolate_weekdays(t.year, t.month)[1]
t.day = dir == :next ? valid_days.first : valid_days.last
end
def date_valid?(t, dir = :next)
interpolate_weekdays(t.year, t.month)[0].include?(t.day)
end
def nudge_date(t, dir = :next, can_nudge_month = true)
spec = interpolate_weekdays(t.year, t.month)[1]
next_value = find_best_next(t.day, spec, dir)
t.day = next_value || (dir == :next ? spec.first : spec.last)
nudge_month(t, dir) if next_value.nil? && can_nudge_month
end
def nudge_hour(t, dir = :next)
spec = time_specs[:hour][1]
next_value = find_best_next(t.hour, spec, dir)
t.hour = next_value || (dir == :next ? spec.first : spec.last)
nudge_date(t, dir) if next_value.nil?
end
def nudge_minute(t, dir = :next)
spec = time_specs[:minute][1]
next_value = find_best_next(t.min, spec, dir)
t.min = next_value || (dir == :next ? spec.first : spec.last)
nudge_hour(t, dir) if next_value.nil?
end
def nudge_second(t, dir = :next)
spec = time_specs[:second][1]
next_value = find_best_next(t.sec, spec, dir)
t.sec = next_value || (dir == :next ? spec.first : spec.last)
nudge_minute(t, dir) if next_value.nil?
end
def time_specs
@time_specs ||= begin
tokens = substitute_parse_symbols(@source).split(/\s+/)
# tokens now contains the 5 or 7 fields
if tokens.count == 5
{
:second => parse_element("0", 0..59), #second
:minute => parse_element(tokens[0], 0..59), #minute
:hour => parse_element(tokens[1], 0..23), #hour
:dom => parse_element(tokens[2], 1..31), #DOM
:month => parse_element(tokens[3], 1..12), #mon
:dow => parse_element(tokens[4], 0..6), #DOW
:year => parse_element("*", 2000..2050) #year
}
elsif tokens.count == 6
{
:second => parse_element(tokens[0], 0..59), #second
:minute => parse_element(tokens[1], 0..59), #minute
:hour => parse_element(tokens[2], 0..23), #hour
:dom => parse_element(tokens[3], 1..31), #DOM
:month => parse_element(tokens[4], 1..12), #mon
:dow => parse_element(tokens[5], 0..6), #DOW
:year => parse_element("*", 2000..2050) #year
}
else
{
:second => parse_element(tokens[0], 0..59), #second
:minute => parse_element(tokens[1], 0..59), #minute
:hour => parse_element(tokens[2], 0..23), #hour
:dom => parse_element(tokens[3], 1..31), #DOM
:month => parse_element(tokens[4], 1..12), #mon
:dow => parse_element(tokens[5], 0..6), #DOW
:year => parse_element(tokens[6], 2000..2050) #year
}
end
end
end
def substitute_parse_symbols(str)
SYMBOLS.inject(str.downcase) do |s, (symbol, replacement)|
s.gsub(symbol, replacement)
end
end
def stepped_range(rng, step = 1)
len = rng.last - rng.first
num = len.div(step)
result = (0..num).map { |i| rng.first + step * i }
result.pop if result[-1] == rng.last and rng.exclude_end?
result
end
# returns the smallest element from allowed which is greater than current
# returns nil if no matching value was found
def find_best_next(current, allowed, dir)
if dir == :next
allowed.sort.find { |val| val > current }
else
allowed.sort.reverse.find { |val| val < current }
end
end
def validate_source
unless @source.respond_to?(:split)
raise ArgumentError, 'not a valid cronline'
end
source_length = @source.split(/\s+/).length
unless (source_length >= 5 && source_length < 8)
raise ArgumentError, 'not a valid cronline'
end
end
end
| 26.856749 | 104 | 0.580778 |
ed53bbeceb64edfa79032ae05d1faacc2d5909a5
| 410 |
# == Schema Information
#
# Table name: statistics
#
# id :integer not null, primary key
# lang :string
# source :string
# target :string
# timestamp :integer
# url :string
# created_at :datetime not null
# updated_at :datetime not null
# session_id :string
# source_id :integer
# target_id :integer
#
class Statistic < ApplicationRecord
end
| 21.578947 | 53 | 0.617073 |
bfa6e02300ae5abdf280a0c0aa0e151adc2c63cd
| 6,849 |
# encoding: UTF-8
require 'pathname'
class DocuBot::Bundle
attr_reader :toc, :extras, :glossary, :index, :source, :global
attr_reader :internal_links, :external_links, :file_links, :broken_links
attr_reader :pages, :pages_by_title, :page_by_file_path, :page_by_html_path
def initialize( source_directory )
@source = File.expand_path( source_directory )
raise "DocuBot cannot find directory #{@source}. Exiting." unless File.exists?( @source )
@pages = []
@extras = []
@pages_by_title = Hash.new{ |h,k| h[k]=[] }
@page_by_file_path = {}
@page_by_html_path = {}
@glossary = DocuBot::Glossary.new( self, @source/'_glossary' )
@index = DocuBot::Index.new( self )
@toc = DocuBot::LinkTree::Root.new( self )
Dir.chdir( @source ) do
# This might be nil; MetaSection.new is OK with that.
index_file = Dir[ *DocuBot::Converter.types.map{|t| "index.#{t}"} ][ 0 ]
@global = DocuBot::MetaSection.new( {:title=>'DocuBot Documentation'}, index_file )
@global.glossary = @glossary
@global.index = @index
@global.toc = @toc
files_and_folders = Dir[ '**/*' ]
# index files are handled by Page.new for a directory; no sections for special folders (but process contents)
files_and_folders.reject!{ |path| name = File.basename( path ); name =~ /^(?:index\.[^.]+)$/ }
# All files in the _templates directory should be ignored
files_and_folders.reject!{ |f| f =~ /(?:^|\/)_/ }
files_and_folders.concat Dir[ '_static/**/*' ].reject{ |f| File.directory?(f) }
files_and_folders.concat Dir[ '_glossary/**/*' ].reject{ |f| File.directory?(f) }
@global.ignore.as_list.each do |glob|
files_and_folders = files_and_folders - Dir[glob]
end
# Sort by leading digits, if present, interpreted as numbers
files_and_folders.sort_by!{ |path| path.split(%r{[/\\]}).map{ |name| name.tr('_',' ').scan(/\A(?:(\d+)\s+)?(.+)/)[0].tap{ |parts| parts[0] = parts[0] ? parts[0].to_i : 9e9 } } }
create_pages( files_and_folders )
end
# puts @toc.to_txt
# Regenerate pages whose templates require full scaning to have completed
# TODO: make this based off of a metasection attribute.
@pages.select do |page|
%w[ glossary ].include?( page.template )
end.each do |page|
page.dirty_template
end
# TODO: make this optional via global variable
validate_links
warn_for_broken_links
# TODO: make this optional via global variable
warn_for_missing_glossary_terms
find_page_collisions
end
def create_pages( files_and_folders )
files_and_folders.each do |path|
extension = File.extname( path )[ 1..-1 ]
item_is_page = File.directory?(path) || DocuBot::Converter.by_type[extension]
if !item_is_page
@extras << path
else
page = DocuBot::Page.new( self, path )
next if page.skip
if path =~ %r{^_glossary/}
@glossary << page
else
@pages << page
@page_by_file_path[path] = page
@page_by_html_path[page.html_path] = page
@pages_by_title[page.title] << page
@index.process_page( page )
# Add the page (and any sub-links) to the toc
unless page.hide
@toc.add_to_link_hierarchy( page.title, page.html_path, page )
page.toc.as_list.each do |id_or_text|
if id_or_text[0..0] == '#'
if ele = page.nokodoc.at_css(id_or_text)
@toc.add_to_link_hierarchy( ele.inner_text, page.html_path + id_or_text, page )
else
warn "Could not find requested toc anchor #{id_or_text.inspect} on #{page.html_path}"
end
else
# TODO: Find an elegant way to handle quotes in XPath, for speed
# Kramdown 'helpfully' converts quotes in the body to be curly, breaking direct text matching
quotes = /['‘’"“”]+/
quoteless = id_or_text.gsub(quotes,'')
if t=page.nokodoc.xpath('text()|.//text()').find{ |t| t.content.gsub(quotes,'')==quoteless }
ele = t.parent
# FIXME: better unique ID generator
ele['id'] = "item-#{Time.now.to_i}-#{rand 999999}" unless ele['id']
@toc.add_to_link_hierarchy( id_or_text, page.html_path + '#' + ele['id'], page )
else
warn "Could not find requested toc anchor for #{id_or_text.inspect} on #{page.html_path}"
end
end
end
end
end
end
end
end
def validate_links
@external_links = Hash.new{ |h,k| h[k]=[] }
@internal_links = Hash.new{ |h,k| h[k]=[] }
@file_links = Hash.new{ |h,k| h[k]=[] }
@broken_links = Hash.new{ |h,k| h[k]=[] }
Dir.chdir( @source ) do
@pages.each do |page|
# TODO: set the xpath to .//a/@href once this is fixed: http://github.com/tenderlove/nokogiri/issues/#issue/213
page.nokodoc.xpath('.//a').each do |a|
next unless href = a['href']
href = CGI.unescape(href)
if href=~%r{\A[a-z]+:}i
@external_links[page] << href
else
id = href[/#([a-z][\w.:-]*)?/i]
file = href.sub(/#.*/,'')
path = file.empty? ? page.html_path : Pathname.new( File.dirname(page.html_path) / file ).cleanpath.to_s
if target=@page_by_html_path[path]
if !id || id == "#" || target.nokodoc.at_css(id)
@internal_links[page] << href
else
warn "Could not find internal link for #{id.inspect} on #{page.html_path.inspect}" if id
@broken_links[page] << href
end
else
if File.file?(path) && !@page_by_file_path[path]
@file_links[page] << href
else
@broken_links[page] << href
end
end
end
end
end
end
end
def warn_for_broken_links
@broken_links.each do |page,links|
links.each do |link|
warn "Broken link on #{page.file}: '#{link}'"
end
end
end
def warn_for_missing_glossary_terms
@glossary.missing_terms.each do |term,referrers|
warn "Glossary term '#{term}' never defined."
referrers.each do |referring_page|
warn "...seen on #{referring_page.file}."
end
end
end
def find_page_collisions
# Find any and all pages that would collide
pages_by_html_path = Hash.new{ |h,k| h[k] = [] }
@pages.each do |page|
pages_by_html_path[page.html_path] << page
end
collisions = pages_by_html_path.select{ |path,pages| pages.length>1 }
unless collisions.empty?
message = collisions.map do |path,pages|
"#{path}: #{pages.map{ |page| "'#{page.title}' (#{page.file})" }.join(', ')}"
end.join("\n")
raise PageCollision.new, message
end
end
def write( writer_type, destination=nil )
writer = DocuBot::Writer.by_type[ writer_type.to_s.downcase ]
if writer
writer.new( self ).write( destination )
else
raise "Unknown writer '#{writer_type}'; available types: #{DocuBot::Writer::INSTALLED_WRITERS.join ', '}"
end
end
end
class DocuBot::Bundle::PageCollision < RuntimeError; end
| 34.074627 | 180 | 0.635859 |
38b9345e99d371cd89037798b77772e4a9119356
| 203 |
class User
attr_accessor :name,:email
def initialize(attributes = {})
@name = attributes[:name]
@email = attributes[:email]
end
def formatted_email
"#{@name} <#{@email}>"
end
end
| 16.916667 | 33 | 0.630542 |
281d8ae93981867900437479f7b529bd0010e192
| 171 |
class NoteDecorator < Draper::Decorator
delegate_all
def as_json(*args)
{
id: object.id,
title: object.title,
body: object.body
}
end
end
| 14.25 | 39 | 0.614035 |
28c0334b860739bb23c1289bc78d9e8ad328ec37
| 11,594 |
# frozen_string_literal: true
# Run tests using:
# BUNDLE_GEMFILE=gemfiles/rails_5_2.gemfile bundle exec rake test
# BUNDLE_GEMFILE=gemfiles/rails_6_0.gemfile bundle exec rake test
require 'test_helper'
class ActiveStorageValidations::Test < ActiveSupport::TestCase
test 'truth' do
assert_kind_of Module, ActiveStorageValidations
end
test 'validates presence' do
u = User.new(name: 'John Smith')
assert !u.valid?
assert_equal u.errors.full_messages, ["Avatar can't be blank", "Photos can't be blank"]
u = User.new(name: 'John Smith')
u.avatar.attach(dummy_file)
assert !u.valid?
assert_equal u.errors.full_messages, ["Photos can't be blank"]
u = User.new(name: 'John Smith')
u.photos.attach(dummy_file)
assert !u.valid?
assert_equal u.errors.full_messages, ["Avatar can't be blank"]
end
test 'validates content type' do
u = User.new(name: 'John Smith')
u.avatar.attach(dummy_file)
u.image_regex.attach(dummy_file)
u.photos.attach(bad_dummy_file)
assert !u.valid?
assert_equal u.errors.full_messages, ['Photos has an invalid content type']
u = User.new(name: 'John Smith')
u.avatar.attach(bad_dummy_file)
u.image_regex.attach(dummy_file)
u.photos.attach(dummy_file)
assert !u.valid?
assert_equal u.errors.full_messages, ['Avatar has an invalid content type']
assert_equal u.errors.details, avatar: [
{
error: :content_type_invalid,
authorized_types: 'PNG',
content_type: 'text/plain'
}
]
u = User.new(name: 'John Smith')
u.avatar.attach(dummy_file)
u.image_regex.attach(dummy_file)
u.photos.attach(pdf_file) # Should be handled by regex match.
assert u.valid?
u = User.new(name: 'John Smith')
u.avatar.attach(dummy_file)
u.image_regex.attach(bad_dummy_file)
u.photos.attach(dummy_file)
assert !u.valid?
assert_equal u.errors.full_messages, ['Image regex has an invalid content type']
u = User.new(name: 'John Smith')
u.avatar.attach(bad_dummy_file)
u.image_regex.attach(bad_dummy_file)
u.photos.attach(bad_dummy_file)
assert !u.valid?
assert_equal u.errors.full_messages, ['Avatar has an invalid content type', 'Photos has an invalid content type', 'Image regex has an invalid content type']
end
# reads content type from file, not from webp_file_wrong method
test 'webp content type 1' do
u = User.new(name: 'John Smith')
u.avatar.attach(webp_file_wrong)
u.image_regex.attach(webp_file_wrong)
u.photos.attach(webp_file_wrong)
assert !u.valid?
assert_equal u.errors.full_messages, ['Avatar has an invalid content type', 'Photos has an invalid content type']
end
# trying to attache webp file with PNG extension, but real content type is detected
test 'webp content type 2' do
u = User.new(name: 'John Smith')
u.avatar.attach(webp_file)
u.image_regex.attach(webp_file)
u.photos.attach(webp_file)
assert !u.valid?
assert_equal u.errors.full_messages, ['Avatar has an invalid content type', 'Photos has an invalid content type']
end
test 'validates size' do
e = Project.new(title: 'Death Star')
e.preview.attach(big_file)
e.small_file.attach(big_file)
e.attachment.attach(pdf_file)
assert !e.valid?
assert_equal e.errors.full_messages, ['Small file size 1.6 KB is not between required range']
end
test 'validates number of files' do
e = Project.new(title: 'Death Star')
e.preview.attach(big_file)
e.small_file.attach(dummy_file)
e.attachment.attach(pdf_file)
e.documents.attach(pdf_file)
e.documents.attach(pdf_file)
e.documents.attach(pdf_file)
e.documents.attach(pdf_file)
assert !e.valid?
assert_equal e.errors.full_messages, ['Documents total number is out of range']
end
test 'validates number of files for Rails 6' do
la = LimitAttachment.create(name: 'klingon')
la.files.attach([pdf_file, pdf_file, pdf_file, pdf_file, pdf_file, pdf_file])
assert !la.valid?
assert_equal 6, la.files.count
if Rails.version < "6.0.0"
assert_equal 6, la.files_blobs.count
else
assert_equal 0, la.files_blobs.count
end
assert_equal ['Files total number is out of range'], la.errors.full_messages
if Rails.version < "6.0.0"
la.files.first.purge
la.files.first.purge
la.files.first.purge
la.files.first.purge
end
assert !la.valid?
assert_equal ['Files total number is out of range'], la.errors.full_messages
end
test 'validates number of files v2' do
la = LimitAttachment.create(name: 'klingon')
la.files.attach([pdf_file, pdf_file, pdf_file])
assert la.valid?
assert_equal 3, la.files.count
assert la.save
la.reload
assert_equal 3, la.files_blobs.count
la.files.first.purge
assert la.valid?
la.reload
assert_equal 2, la.files_blobs.count
end
test 'validates number of files v3' do
la = LimitAttachment.create(name: 'klingon')
la.files.attach([pdf_file, pdf_file, pdf_file, pdf_file, pdf_file])
assert !la.valid?
assert_equal 5, la.files.count
assert !la.save
end
test 'dimensions and is image' do
e = OnlyImage.new
e.image.attach(html_file)
assert !e.valid?
assert_equal e.errors.full_messages, ["Image is not a valid image", "Image has an invalid content type"]
e = OnlyImage.new
e.image.attach(image_1920x1080_file)
assert e.valid?
e = OnlyImage.new
e.image.attach(pdf_file)
assert !e.valid?
assert e.errors.full_messages.include?("Image has an invalid content type")
rescue Exception => ex
puts ex.message
puts ex.backtrace.take(20).join("\n")
raise ex
end
test 'dimensions test' do
e = Project.new(title: 'Death Star')
e.preview.attach(big_file)
e.small_file.attach(dummy_file)
e.attachment.attach(pdf_file)
e.dimension_exact.attach(html_file)
assert !e.valid?
assert_equal e.errors.full_messages, ['Dimension exact is not a valid image']
e = Project.new(title: 'Death Star')
e.preview.attach(big_file)
e.small_file.attach(dummy_file)
e.attachment.attach(pdf_file)
e.documents.attach(pdf_file)
e.documents.attach(pdf_file)
e.valid?
assert e.valid?
e = Project.new(title: 'Death Star')
e.preview.attach(big_file)
e.small_file.attach(dummy_file)
e.attachment.attach(pdf_file)
e.dimension_exact.attach(image_150x150_file)
assert e.valid?, 'Dimension exact: width and height must be equal to 150 x 150 pixel.'
e = Project.new(title: 'Death Star')
e.preview.attach(big_file)
e.small_file.attach(dummy_file)
e.attachment.attach(pdf_file)
e.dimension_range.attach(image_800x600_file)
assert e.valid?, 'Dimension range: width and height must be greater than or equal to 800 x 600 pixel.'
e = Project.new(title: 'Death Star')
e.preview.attach(big_file)
e.small_file.attach(dummy_file)
e.attachment.attach(pdf_file)
e.dimension_range.attach(image_1200x900_file)
assert e.valid?, 'Dimension range: width and height must be less than or equal to 1200 x 900 pixel.'
e = Project.new(title: 'Death Star')
e.preview.attach(big_file)
e.small_file.attach(dummy_file)
e.attachment.attach(pdf_file)
e.dimension_min.attach(image_800x600_file)
assert e.valid?, 'Dimension min: width and height must be greater than or equal to 800 x 600 pixel.'
e = Project.new(title: 'Death Star')
e.preview.attach(big_file)
e.small_file.attach(dummy_file)
e.attachment.attach(pdf_file)
e.dimension_max.attach(image_1200x900_file)
assert e.valid?, 'Dimension max: width and height must be greater than or equal to 1200 x 900 pixel.'
e = Project.new(title: 'Death Star')
e.preview.attach(big_file)
e.small_file.attach(dummy_file)
e.attachment.attach(pdf_file)
e.dimension_images.attach([image_800x600_file, image_1200x900_file])
assert e.valid?, 'Dimension many: width and height must be between or equal to 800 x 600 and 1200 x 900 pixel.'
e = Project.new(title: 'Death Star')
e.preview.attach(big_file)
e.small_file.attach(dummy_file)
e.attachment.attach(pdf_file)
e.dimension_images.attach([image_800x600_file])
e.save!
e.dimension_images.attach([image_800x600_file])
e.title = "Changed"
e.save!
e.reload
assert e.title, "Changed"
assert_nil e.dimension_min.attachment
blob = ActiveStorage::Blob.create_after_upload!(**image_800x600_file)
e.dimension_min = blob.signed_id
e.save!
e.reload
assert_not_nil e.dimension_min.attachment
assert_not_nil e.dimension_min.blob.signed_id
rescue Exception => ex
puts ex.message
puts ex.backtrace.join("\n")
raise ex
end
test 'aspect ratio validation' do
e = RatioModel.new(name: 'Princess Leia')
e.ratio_one.attach(image_150x150_file)
e.ratio_many.attach([image_600x800_file])
e.save!
e = RatioModel.new(name: 'Princess Leia')
e.ratio_one.attach(image_150x150_file)
e.ratio_many.attach([image_150x150_file])
e.save
assert !e.valid?
assert_equal e.errors.full_messages, ["Ratio many must be a portrait image"]
e = RatioModel.new(name: 'Princess Leia')
e.ratio_one.attach(image_150x150_file)
e.ratio_many.attach([image_600x800_file])
e.image1.attach(image_150x150_file)
assert !e.valid?
assert_equal e.errors.full_messages, ["Image1 must have an aspect ratio of 16x9"]
e = RatioModel.new(name: 'Princess Leia')
e.ratio_one.attach(html_file)
e.ratio_many.attach([image_600x800_file])
e.image1.attach(image_1920x1080_file)
assert !e.valid?
assert_equal e.errors.full_messages, ["Ratio one is not a valid image"]
end
end
def dummy_file
{ io: File.open(Rails.root.join('public', 'apple-touch-icon.png')), filename: 'dummy_file.png', content_type: 'image/png' }
end
def big_file
{ io: File.open(Rails.root.join('public', '500.html')), filename: 'big_file.png', content_type: 'image/png' }
end
def pdf_file
{ io: File.open(Rails.root.join('public', 'pdf.pdf')), filename: 'pdf_file.pdf', content_type: 'application/pdf' }
end
def bad_dummy_file
{ io: File.open(Rails.root.join('public', 'apple-touch-icon.png')), filename: 'bad_dummy_file.png', content_type: 'text/plain' }
end
def image_150x150_file
{ io: File.open(Rails.root.join('public', 'image_150x150.png')), filename: 'image_150x150_file.png', content_type: 'image/png' }
end
def image_800x600_file
{ io: File.open(Rails.root.join('public', 'image_800x600.png')), filename: 'image_800x600_file.png', content_type: 'image/png' }
end
def image_600x800_file
{ io: File.open(Rails.root.join('public', 'image_600x800.png')), filename: 'image_600x800_file.png', content_type: 'image/png' }
end
def image_1200x900_file
{ io: File.open(Rails.root.join('public', 'image_1200x900.png')), filename: 'image_1200x900_file.png', content_type: 'image/png' }
end
def image_1920x1080_file
{ io: File.open(Rails.root.join('public', 'image_1920x1080.png')), filename: 'image_1920x1080_file.png', content_type: 'image/png' }
end
def html_file
{ io: File.open(Rails.root.join('public', '500.html')), filename: 'html_file.html', content_type: 'text/html' }
end
def webp_file
{ io: File.open(Rails.root.join('public', '1_sm_webp.png')), filename: '1_sm_webp.png', content_type: 'image/webp' }
end
def webp_file_wrong
{ io: File.open(Rails.root.join('public', '1_sm_webp.png')), filename: '1_sm_webp.png', content_type: 'image/png' }
end
| 33.125714 | 160 | 0.708125 |
1cdcfff4891c98879f73870ea666cb14e8dddcbb
| 111 |
module PgMantenimiento
class ApplicationRecord < ActiveRecord::Base
self.abstract_class = true
end
end
| 18.5 | 46 | 0.783784 |
f8e63e46a580db5470bc30d31812ec0bdad7390a
| 73 |
Rails.application.routes.draw do
mount Forest::Engine => "/forest"
end
| 18.25 | 35 | 0.739726 |
acdf14e2c5889dc4ee7a0415f3b04b5f3bd28b1b
| 2,235 |
class SettingsController < ApplicationController
before_filter :authenticate_user!
# GET /settings
# GET /settings.json
def index
# HACKY!!!
current_user.create_settings
@settings = Setting.all
respond_to do |format|
format.html # index.html.erb
format.json { render json: @settings }
end
end
# GET /settings/1
# GET /settings/1.json
def show
@setting = Setting.find(params[:id])
respond_to do |format|
format.html # show.html.erb
format.json { render json: @setting }
end
end
# GET /settings/new
# GET /settings/new.json
def new
@setting = Setting.new
respond_to do |format|
format.html # new.html.erb
format.json { render json: @setting }
end
end
# GET /settings/1/edit
def edit
@setting = Setting.find(params[:id])
end
# POST /settings
# POST /settings.json
def create
@setting = Setting.new(
:tenant => Tenant.current,
:user => current_user,
:name => params[:name],
:value => params[:value])
respond_to do |format|
if @setting.save
format.html { redirect_to "/settings/#{@setting.id}", notice: 'Setting was successfully created.' }
format.json { render json: @setting, status: :created, location: @setting }
else
format.html { render action: "new" }
format.json { render json: @setting.errors, status: :unprocessable_entity }
end
end
end
# PUT /settings/1
# PUT /settings/1.json
def update
@setting = Setting.find(params[:oid])
#binding.pry
respond_to do |format|
if @setting.update_attributes(params)
format.html { redirect_to "/settings/#{@setting.id}", notice: 'Setting was successfully updated.' }
format.json { head :no_content }
else
format.html { render action: "edit" }
format.json { render json: @setting.errors, status: :unprocessable_entity }
end
end
end
# DELETE /settings/1
# DELETE /settings/1.json
def destroy
@setting = Setting.find(params[:id])
@setting.destroy
respond_to do |format|
format.html { redirect_to settings_url }
format.json { head :no_content }
end
end
end
| 22.806122 | 108 | 0.628635 |
626d1783d6020589e954909798786e83707a9276
| 2,556 |
# -*- encoding: utf-8 -*-
# stub: guard 2.13.0 ruby lib
Gem::Specification.new do |s|
s.name = "guard".freeze
s.version = "2.13.0"
s.required_rubygems_version = Gem::Requirement.new(">= 0".freeze) if s.respond_to? :required_rubygems_version=
s.require_paths = ["lib".freeze]
s.authors = ["Thibaud Guillaume-Gentil".freeze]
s.date = "2015-07-27"
s.description = "Guard is a command line tool to easily handle events on file system modifications.".freeze
s.email = ["[email protected]".freeze]
s.executables = ["guard".freeze, "_guard-core".freeze]
s.files = ["bin/_guard-core".freeze, "bin/guard".freeze]
s.homepage = "http://guardgem.org".freeze
s.licenses = ["MIT".freeze]
s.required_ruby_version = Gem::Requirement.new(">= 1.9.3".freeze)
s.rubygems_version = "2.6.13".freeze
s.summary = "Guard keeps an eye on your file modifications".freeze
s.installed_by_version = "2.6.13" if s.respond_to? :installed_by_version
if s.respond_to? :specification_version then
s.specification_version = 4
if Gem::Version.new(Gem::VERSION) >= Gem::Version.new('1.2.0') then
s.add_runtime_dependency(%q<thor>.freeze, [">= 0.18.1"])
s.add_runtime_dependency(%q<listen>.freeze, ["<= 4.0", ">= 2.7"])
s.add_runtime_dependency(%q<pry>.freeze, [">= 0.9.12"])
s.add_runtime_dependency(%q<lumberjack>.freeze, ["~> 1.0"])
s.add_runtime_dependency(%q<formatador>.freeze, [">= 0.2.4"])
s.add_runtime_dependency(%q<nenv>.freeze, ["~> 0.1"])
s.add_runtime_dependency(%q<shellany>.freeze, ["~> 0.0"])
s.add_runtime_dependency(%q<notiffany>.freeze, ["~> 0.0"])
else
s.add_dependency(%q<thor>.freeze, [">= 0.18.1"])
s.add_dependency(%q<listen>.freeze, ["<= 4.0", ">= 2.7"])
s.add_dependency(%q<pry>.freeze, [">= 0.9.12"])
s.add_dependency(%q<lumberjack>.freeze, ["~> 1.0"])
s.add_dependency(%q<formatador>.freeze, [">= 0.2.4"])
s.add_dependency(%q<nenv>.freeze, ["~> 0.1"])
s.add_dependency(%q<shellany>.freeze, ["~> 0.0"])
s.add_dependency(%q<notiffany>.freeze, ["~> 0.0"])
end
else
s.add_dependency(%q<thor>.freeze, [">= 0.18.1"])
s.add_dependency(%q<listen>.freeze, ["<= 4.0", ">= 2.7"])
s.add_dependency(%q<pry>.freeze, [">= 0.9.12"])
s.add_dependency(%q<lumberjack>.freeze, ["~> 1.0"])
s.add_dependency(%q<formatador>.freeze, [">= 0.2.4"])
s.add_dependency(%q<nenv>.freeze, ["~> 0.1"])
s.add_dependency(%q<shellany>.freeze, ["~> 0.0"])
s.add_dependency(%q<notiffany>.freeze, ["~> 0.0"])
end
end
| 44.842105 | 112 | 0.631064 |
5dec1e88c7b72c8b46460f99e856be98261363db
| 1,111 |
ENV['RAILS_ENV'] ||= 'test'
require_relative '../config/environment'
require 'rails/test_help'
require "minitest/reporters"
Minitest::Reporters.use!
#this is a file for helper methods to use throughout tests
class ActiveSupport::TestCase
# Setup all fixtures in test/fixtures/*.yml for all tests in alphabetical order.
fixtures :all
include ApplicationHelper
# Add more helper methods to be used by all tests here...
# helper methods can't be used in tests, so we create a new one like the one
# in the helper..."logged_in" , but use a different name so the methods don't
# conflict with each other
def is_logged_in?
!session[:user_id].nil?
end
# Log in as a particular user.
def log_in_as(user)
session[:user_id] = user.id
end
end
class ActionDispatch::IntegrationTest
# Log in as a particular user.
def log_in_as(user, password: 'password', remember_me: '1')
post login_path, params: { session: { email: user.email,
password: password,
remember_me: remember_me } }
end
end
| 28.487179 | 82 | 0.670567 |
2176e95042f92989d38b6ecc847bc260ebdd56d0
| 699 |
require 'opengl'
require 'sdl'
#init
SDL.init(SDL::INIT_VIDEO)
SDL.setGLAttr(SDL::GL_DOUBLEBUFFER,1)
SDL.setVideoMode(512,512,32,SDL::OPENGL)
# main loop
while true
while event = SDL::Event2.poll
case event
when SDL::Event2::KeyDown, SDL::Event2::Quit
exit
end
end
Gl.glClear( Gl::GL_COLOR_BUFFER_BIT | Gl::GL_DEPTH_BUFFER_BIT )
Gl.glBegin( Gl::GL_POLYGON )
Gl.glColor3f( 1.0, 0.0, 0.0 )
Gl.glVertex2f( -0.5, -0.5 )
Gl.glColor3f( 0.0, 1.0, 0.0 )
Gl.glVertex2f( -0.5, 0.5 )
Gl.glColor3f( 0.0, 0.0, 1.0 )
Gl.glVertex2f( 0.5, 0.5 )
Gl.glColor3f( 1.0, 0.0, 1.0 )
Gl.glVertex2f( 0.5, -0.5 )
Gl.glEnd
SDL.GLSwapBuffers()
sleep 0.01 # to avoid consuming all CPU power
end
| 19.971429 | 64 | 0.668097 |
62fee9ef8488f7056a9cc9249a5a98e3a5b8139a
| 32,424 |
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
module Azure::Migrate::Mgmt::V2018_02_02
#
# Move your workloads to Azure.
#
class Projects
include MsRestAzure
#
# Creates and initializes a new instance of the Projects class.
# @param client service class for accessing basic functionality.
#
def initialize(client)
@client = client
end
# @return [AzureMigrate] reference to the AzureMigrate
attr_reader :client
#
# Get all projects.
#
# Get all the projects in the subscription.
#
# @param custom_headers [Hash{String => String}] A hash of custom headers that
# will be added to the HTTP request.
#
# @return [ProjectResultList] operation results.
#
def list_by_subscription(custom_headers:nil)
response = list_by_subscription_async(custom_headers:custom_headers).value!
response.body unless response.nil?
end
#
# Get all projects.
#
# Get all the projects in the subscription.
#
# @param custom_headers [Hash{String => String}] A hash of custom headers that
# will be added to the HTTP request.
#
# @return [MsRestAzure::AzureOperationResponse] HTTP response information.
#
def list_by_subscription_with_http_info(custom_headers:nil)
list_by_subscription_async(custom_headers:custom_headers).value!
end
#
# Get all projects.
#
# Get all the projects in the subscription.
#
# @param [Hash{String => String}] A hash of custom headers that will be added
# to the HTTP request.
#
# @return [Concurrent::Promise] Promise object which holds the HTTP response.
#
def list_by_subscription_async(custom_headers:nil)
fail ArgumentError, '@client.subscription_id is nil' if @client.subscription_id.nil?
@client.api_version = '2018-02-02'
request_headers = {}
request_headers['Content-Type'] = 'application/json; charset=utf-8'
# Set Headers
request_headers['x-ms-client-request-id'] = SecureRandom.uuid
request_headers['Accept-Language'] = @client.accept_language unless @client.accept_language.nil?
path_template = 'subscriptions/{subscriptionId}/providers/Microsoft.Migrate/projects'
request_url = @base_url || @client.base_url
options = {
middlewares: [[MsRest::RetryPolicyMiddleware, times: 3, retry: 0.02], [:cookie_jar]],
path_params: {'subscriptionId' => @client.subscription_id},
query_params: {'api-version' => @client.api_version},
headers: request_headers.merge(custom_headers || {}),
base_url: request_url
}
promise = @client.make_request_async(:get, path_template, options)
promise = promise.then do |result|
http_response = result.response
status_code = http_response.status
response_content = http_response.body
unless status_code == 200
error_model = JSON.load(response_content)
fail MsRestAzure::AzureOperationError.new(result.request, http_response, error_model)
end
result.request_id = http_response['x-ms-request-id'] unless http_response['x-ms-request-id'].nil?
result.correlation_request_id = http_response['x-ms-correlation-request-id'] unless http_response['x-ms-correlation-request-id'].nil?
result.client_request_id = http_response['x-ms-client-request-id'] unless http_response['x-ms-client-request-id'].nil?
# Deserialize Response
if status_code == 200
begin
parsed_response = response_content.to_s.empty? ? nil : JSON.load(response_content)
result_mapper = Azure::Migrate::Mgmt::V2018_02_02::Models::ProjectResultList.mapper()
result.body = @client.deserialize(result_mapper, parsed_response)
rescue Exception => e
fail MsRest::DeserializationError.new('Error occurred in deserializing the response', e.message, e.backtrace, result)
end
end
result
end
promise.execute
end
#
# Get all projects.
#
# Get all the projects in the resource group.
#
# @param resource_group_name [String] Name of the Azure Resource Group that
# project is part of.
# @param custom_headers [Hash{String => String}] A hash of custom headers that
# will be added to the HTTP request.
#
# @return [ProjectResultList] operation results.
#
def list_by_resource_group(resource_group_name, custom_headers:nil)
response = list_by_resource_group_async(resource_group_name, custom_headers:custom_headers).value!
response.body unless response.nil?
end
#
# Get all projects.
#
# Get all the projects in the resource group.
#
# @param resource_group_name [String] Name of the Azure Resource Group that
# project is part of.
# @param custom_headers [Hash{String => String}] A hash of custom headers that
# will be added to the HTTP request.
#
# @return [MsRestAzure::AzureOperationResponse] HTTP response information.
#
def list_by_resource_group_with_http_info(resource_group_name, custom_headers:nil)
list_by_resource_group_async(resource_group_name, custom_headers:custom_headers).value!
end
#
# Get all projects.
#
# Get all the projects in the resource group.
#
# @param resource_group_name [String] Name of the Azure Resource Group that
# project is part of.
# @param [Hash{String => String}] A hash of custom headers that will be added
# to the HTTP request.
#
# @return [Concurrent::Promise] Promise object which holds the HTTP response.
#
def list_by_resource_group_async(resource_group_name, custom_headers:nil)
fail ArgumentError, '@client.subscription_id is nil' if @client.subscription_id.nil?
fail ArgumentError, 'resource_group_name is nil' if resource_group_name.nil?
@client.api_version = '2018-02-02'
request_headers = {}
request_headers['Content-Type'] = 'application/json; charset=utf-8'
# Set Headers
request_headers['x-ms-client-request-id'] = SecureRandom.uuid
request_headers['Accept-Language'] = @client.accept_language unless @client.accept_language.nil?
path_template = 'subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.Migrate/projects'
request_url = @base_url || @client.base_url
options = {
middlewares: [[MsRest::RetryPolicyMiddleware, times: 3, retry: 0.02], [:cookie_jar]],
path_params: {'subscriptionId' => @client.subscription_id,'resourceGroupName' => resource_group_name},
query_params: {'api-version' => @client.api_version},
headers: request_headers.merge(custom_headers || {}),
base_url: request_url
}
promise = @client.make_request_async(:get, path_template, options)
promise = promise.then do |result|
http_response = result.response
status_code = http_response.status
response_content = http_response.body
unless status_code == 200
error_model = JSON.load(response_content)
fail MsRestAzure::AzureOperationError.new(result.request, http_response, error_model)
end
result.request_id = http_response['x-ms-request-id'] unless http_response['x-ms-request-id'].nil?
result.correlation_request_id = http_response['x-ms-correlation-request-id'] unless http_response['x-ms-correlation-request-id'].nil?
result.client_request_id = http_response['x-ms-client-request-id'] unless http_response['x-ms-client-request-id'].nil?
# Deserialize Response
if status_code == 200
begin
parsed_response = response_content.to_s.empty? ? nil : JSON.load(response_content)
result_mapper = Azure::Migrate::Mgmt::V2018_02_02::Models::ProjectResultList.mapper()
result.body = @client.deserialize(result_mapper, parsed_response)
rescue Exception => e
fail MsRest::DeserializationError.new('Error occurred in deserializing the response', e.message, e.backtrace, result)
end
end
result
end
promise.execute
end
#
# Get the specified project.
#
# Get the project with the specified name.
#
# @param resource_group_name [String] Name of the Azure Resource Group that
# project is part of.
# @param project_name [String] Name of the Azure Migrate project.
# @param custom_headers [Hash{String => String}] A hash of custom headers that
# will be added to the HTTP request.
#
# @return [Project] operation results.
#
def get(resource_group_name, project_name, custom_headers:nil)
response = get_async(resource_group_name, project_name, custom_headers:custom_headers).value!
response.body unless response.nil?
end
#
# Get the specified project.
#
# Get the project with the specified name.
#
# @param resource_group_name [String] Name of the Azure Resource Group that
# project is part of.
# @param project_name [String] Name of the Azure Migrate project.
# @param custom_headers [Hash{String => String}] A hash of custom headers that
# will be added to the HTTP request.
#
# @return [MsRestAzure::AzureOperationResponse] HTTP response information.
#
def get_with_http_info(resource_group_name, project_name, custom_headers:nil)
get_async(resource_group_name, project_name, custom_headers:custom_headers).value!
end
#
# Get the specified project.
#
# Get the project with the specified name.
#
# @param resource_group_name [String] Name of the Azure Resource Group that
# project is part of.
# @param project_name [String] Name of the Azure Migrate project.
# @param [Hash{String => String}] A hash of custom headers that will be added
# to the HTTP request.
#
# @return [Concurrent::Promise] Promise object which holds the HTTP response.
#
def get_async(resource_group_name, project_name, custom_headers:nil)
fail ArgumentError, '@client.subscription_id is nil' if @client.subscription_id.nil?
fail ArgumentError, 'resource_group_name is nil' if resource_group_name.nil?
fail ArgumentError, 'project_name is nil' if project_name.nil?
@client.api_version = '2018-02-02'
request_headers = {}
request_headers['Content-Type'] = 'application/json; charset=utf-8'
# Set Headers
request_headers['x-ms-client-request-id'] = SecureRandom.uuid
request_headers['Accept-Language'] = @client.accept_language unless @client.accept_language.nil?
path_template = 'subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.Migrate/projects/{projectName}'
request_url = @base_url || @client.base_url
options = {
middlewares: [[MsRest::RetryPolicyMiddleware, times: 3, retry: 0.02], [:cookie_jar]],
path_params: {'subscriptionId' => @client.subscription_id,'resourceGroupName' => resource_group_name,'projectName' => project_name},
query_params: {'api-version' => @client.api_version},
headers: request_headers.merge(custom_headers || {}),
base_url: request_url
}
promise = @client.make_request_async(:get, path_template, options)
promise = promise.then do |result|
http_response = result.response
status_code = http_response.status
response_content = http_response.body
unless status_code == 200
error_model = JSON.load(response_content)
fail MsRestAzure::AzureOperationError.new(result.request, http_response, error_model)
end
result.request_id = http_response['x-ms-request-id'] unless http_response['x-ms-request-id'].nil?
result.correlation_request_id = http_response['x-ms-correlation-request-id'] unless http_response['x-ms-correlation-request-id'].nil?
result.client_request_id = http_response['x-ms-client-request-id'] unless http_response['x-ms-client-request-id'].nil?
# Deserialize Response
if status_code == 200
begin
parsed_response = response_content.to_s.empty? ? nil : JSON.load(response_content)
result_mapper = Azure::Migrate::Mgmt::V2018_02_02::Models::Project.mapper()
result.body = @client.deserialize(result_mapper, parsed_response)
rescue Exception => e
fail MsRest::DeserializationError.new('Error occurred in deserializing the response', e.message, e.backtrace, result)
end
end
result
end
promise.execute
end
#
# Create or update project.
#
# Create a project with specified name. If a project already exists, update it.
#
# @param resource_group_name [String] Name of the Azure Resource Group that
# project is part of.
# @param project_name [String] Name of the Azure Migrate project.
# @param project [Project] New or Updated project object.
# @param custom_headers [Hash{String => String}] A hash of custom headers that
# will be added to the HTTP request.
#
# @return [Project] operation results.
#
def create(resource_group_name, project_name, project:nil, custom_headers:nil)
response = create_async(resource_group_name, project_name, project:project, custom_headers:custom_headers).value!
response.body unless response.nil?
end
#
# Create or update project.
#
# Create a project with specified name. If a project already exists, update it.
#
# @param resource_group_name [String] Name of the Azure Resource Group that
# project is part of.
# @param project_name [String] Name of the Azure Migrate project.
# @param project [Project] New or Updated project object.
# @param custom_headers [Hash{String => String}] A hash of custom headers that
# will be added to the HTTP request.
#
# @return [MsRestAzure::AzureOperationResponse] HTTP response information.
#
def create_with_http_info(resource_group_name, project_name, project:nil, custom_headers:nil)
create_async(resource_group_name, project_name, project:project, custom_headers:custom_headers).value!
end
#
# Create or update project.
#
# Create a project with specified name. If a project already exists, update it.
#
# @param resource_group_name [String] Name of the Azure Resource Group that
# project is part of.
# @param project_name [String] Name of the Azure Migrate project.
# @param project [Project] New or Updated project object.
# @param [Hash{String => String}] A hash of custom headers that will be added
# to the HTTP request.
#
# @return [Concurrent::Promise] Promise object which holds the HTTP response.
#
def create_async(resource_group_name, project_name, project:nil, custom_headers:nil)
fail ArgumentError, '@client.subscription_id is nil' if @client.subscription_id.nil?
fail ArgumentError, 'resource_group_name is nil' if resource_group_name.nil?
fail ArgumentError, 'project_name is nil' if project_name.nil?
@client.api_version = '2018-02-02'
request_headers = {}
request_headers['Content-Type'] = 'application/json; charset=utf-8'
# Set Headers
request_headers['x-ms-client-request-id'] = SecureRandom.uuid
request_headers['Accept-Language'] = @client.accept_language unless @client.accept_language.nil?
# Serialize Request
request_mapper = Azure::Migrate::Mgmt::V2018_02_02::Models::Project.mapper()
request_content = @client.serialize(request_mapper, project)
request_content = request_content != nil ? JSON.generate(request_content, quirks_mode: true) : nil
path_template = 'subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.Migrate/projects/{projectName}'
request_url = @base_url || @client.base_url
options = {
middlewares: [[MsRest::RetryPolicyMiddleware, times: 3, retry: 0.02], [:cookie_jar]],
path_params: {'subscriptionId' => @client.subscription_id,'resourceGroupName' => resource_group_name,'projectName' => project_name},
query_params: {'api-version' => @client.api_version},
body: request_content,
headers: request_headers.merge(custom_headers || {}),
base_url: request_url
}
promise = @client.make_request_async(:put, path_template, options)
promise = promise.then do |result|
http_response = result.response
status_code = http_response.status
response_content = http_response.body
unless status_code == 200 || status_code == 201
error_model = JSON.load(response_content)
fail MsRestAzure::AzureOperationError.new(result.request, http_response, error_model)
end
result.request_id = http_response['x-ms-request-id'] unless http_response['x-ms-request-id'].nil?
result.correlation_request_id = http_response['x-ms-correlation-request-id'] unless http_response['x-ms-correlation-request-id'].nil?
result.client_request_id = http_response['x-ms-client-request-id'] unless http_response['x-ms-client-request-id'].nil?
# Deserialize Response
if status_code == 200
begin
parsed_response = response_content.to_s.empty? ? nil : JSON.load(response_content)
result_mapper = Azure::Migrate::Mgmt::V2018_02_02::Models::Project.mapper()
result.body = @client.deserialize(result_mapper, parsed_response)
rescue Exception => e
fail MsRest::DeserializationError.new('Error occurred in deserializing the response', e.message, e.backtrace, result)
end
end
# Deserialize Response
if status_code == 201
begin
parsed_response = response_content.to_s.empty? ? nil : JSON.load(response_content)
result_mapper = Azure::Migrate::Mgmt::V2018_02_02::Models::Project.mapper()
result.body = @client.deserialize(result_mapper, parsed_response)
rescue Exception => e
fail MsRest::DeserializationError.new('Error occurred in deserializing the response', e.message, e.backtrace, result)
end
end
result
end
promise.execute
end
#
# Update project.
#
# Update a project with specified name. Supports partial updates, for example
# only tags can be provided.
#
# @param resource_group_name [String] Name of the Azure Resource Group that
# project is part of.
# @param project_name [String] Name of the Azure Migrate project.
# @param project [Project] Updated project object.
# @param custom_headers [Hash{String => String}] A hash of custom headers that
# will be added to the HTTP request.
#
# @return [Project] operation results.
#
def update(resource_group_name, project_name, project:nil, custom_headers:nil)
response = update_async(resource_group_name, project_name, project:project, custom_headers:custom_headers).value!
response.body unless response.nil?
end
#
# Update project.
#
# Update a project with specified name. Supports partial updates, for example
# only tags can be provided.
#
# @param resource_group_name [String] Name of the Azure Resource Group that
# project is part of.
# @param project_name [String] Name of the Azure Migrate project.
# @param project [Project] Updated project object.
# @param custom_headers [Hash{String => String}] A hash of custom headers that
# will be added to the HTTP request.
#
# @return [MsRestAzure::AzureOperationResponse] HTTP response information.
#
def update_with_http_info(resource_group_name, project_name, project:nil, custom_headers:nil)
update_async(resource_group_name, project_name, project:project, custom_headers:custom_headers).value!
end
#
# Update project.
#
# Update a project with specified name. Supports partial updates, for example
# only tags can be provided.
#
# @param resource_group_name [String] Name of the Azure Resource Group that
# project is part of.
# @param project_name [String] Name of the Azure Migrate project.
# @param project [Project] Updated project object.
# @param [Hash{String => String}] A hash of custom headers that will be added
# to the HTTP request.
#
# @return [Concurrent::Promise] Promise object which holds the HTTP response.
#
def update_async(resource_group_name, project_name, project:nil, custom_headers:nil)
fail ArgumentError, '@client.subscription_id is nil' if @client.subscription_id.nil?
fail ArgumentError, 'resource_group_name is nil' if resource_group_name.nil?
fail ArgumentError, 'project_name is nil' if project_name.nil?
@client.api_version = '2018-02-02'
request_headers = {}
request_headers['Content-Type'] = 'application/json; charset=utf-8'
# Set Headers
request_headers['x-ms-client-request-id'] = SecureRandom.uuid
request_headers['Accept-Language'] = @client.accept_language unless @client.accept_language.nil?
# Serialize Request
request_mapper = Azure::Migrate::Mgmt::V2018_02_02::Models::Project.mapper()
request_content = @client.serialize(request_mapper, project)
request_content = request_content != nil ? JSON.generate(request_content, quirks_mode: true) : nil
path_template = 'subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.Migrate/projects/{projectName}'
request_url = @base_url || @client.base_url
options = {
middlewares: [[MsRest::RetryPolicyMiddleware, times: 3, retry: 0.02], [:cookie_jar]],
path_params: {'subscriptionId' => @client.subscription_id,'resourceGroupName' => resource_group_name,'projectName' => project_name},
query_params: {'api-version' => @client.api_version},
body: request_content,
headers: request_headers.merge(custom_headers || {}),
base_url: request_url
}
promise = @client.make_request_async(:patch, path_template, options)
promise = promise.then do |result|
http_response = result.response
status_code = http_response.status
response_content = http_response.body
unless status_code == 200
error_model = JSON.load(response_content)
fail MsRestAzure::AzureOperationError.new(result.request, http_response, error_model)
end
result.request_id = http_response['x-ms-request-id'] unless http_response['x-ms-request-id'].nil?
result.correlation_request_id = http_response['x-ms-correlation-request-id'] unless http_response['x-ms-correlation-request-id'].nil?
result.client_request_id = http_response['x-ms-client-request-id'] unless http_response['x-ms-client-request-id'].nil?
# Deserialize Response
if status_code == 200
begin
parsed_response = response_content.to_s.empty? ? nil : JSON.load(response_content)
result_mapper = Azure::Migrate::Mgmt::V2018_02_02::Models::Project.mapper()
result.body = @client.deserialize(result_mapper, parsed_response)
rescue Exception => e
fail MsRest::DeserializationError.new('Error occurred in deserializing the response', e.message, e.backtrace, result)
end
end
result
end
promise.execute
end
#
# Delete the project
#
# Delete the project. Deleting non-existent project is a no-operation.
#
# @param resource_group_name [String] Name of the Azure Resource Group that
# project is part of.
# @param project_name [String] Name of the Azure Migrate project.
# @param custom_headers [Hash{String => String}] A hash of custom headers that
# will be added to the HTTP request.
#
#
def delete(resource_group_name, project_name, custom_headers:nil)
response = delete_async(resource_group_name, project_name, custom_headers:custom_headers).value!
nil
end
#
# Delete the project
#
# Delete the project. Deleting non-existent project is a no-operation.
#
# @param resource_group_name [String] Name of the Azure Resource Group that
# project is part of.
# @param project_name [String] Name of the Azure Migrate project.
# @param custom_headers [Hash{String => String}] A hash of custom headers that
# will be added to the HTTP request.
#
# @return [MsRestAzure::AzureOperationResponse] HTTP response information.
#
def delete_with_http_info(resource_group_name, project_name, custom_headers:nil)
delete_async(resource_group_name, project_name, custom_headers:custom_headers).value!
end
#
# Delete the project
#
# Delete the project. Deleting non-existent project is a no-operation.
#
# @param resource_group_name [String] Name of the Azure Resource Group that
# project is part of.
# @param project_name [String] Name of the Azure Migrate project.
# @param [Hash{String => String}] A hash of custom headers that will be added
# to the HTTP request.
#
# @return [Concurrent::Promise] Promise object which holds the HTTP response.
#
def delete_async(resource_group_name, project_name, custom_headers:nil)
fail ArgumentError, '@client.subscription_id is nil' if @client.subscription_id.nil?
fail ArgumentError, 'resource_group_name is nil' if resource_group_name.nil?
fail ArgumentError, 'project_name is nil' if project_name.nil?
@client.api_version = '2018-02-02'
request_headers = {}
request_headers['Content-Type'] = 'application/json; charset=utf-8'
# Set Headers
request_headers['x-ms-client-request-id'] = SecureRandom.uuid
request_headers['Accept-Language'] = @client.accept_language unless @client.accept_language.nil?
path_template = 'subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.Migrate/projects/{projectName}'
request_url = @base_url || @client.base_url
options = {
middlewares: [[MsRest::RetryPolicyMiddleware, times: 3, retry: 0.02], [:cookie_jar]],
path_params: {'subscriptionId' => @client.subscription_id,'resourceGroupName' => resource_group_name,'projectName' => project_name},
query_params: {'api-version' => @client.api_version},
headers: request_headers.merge(custom_headers || {}),
base_url: request_url
}
promise = @client.make_request_async(:delete, path_template, options)
promise = promise.then do |result|
http_response = result.response
status_code = http_response.status
response_content = http_response.body
unless status_code == 200
error_model = JSON.load(response_content)
fail MsRestAzure::AzureOperationError.new(result.request, http_response, error_model)
end
result.request_id = http_response['x-ms-request-id'] unless http_response['x-ms-request-id'].nil?
result.correlation_request_id = http_response['x-ms-correlation-request-id'] unless http_response['x-ms-correlation-request-id'].nil?
result.client_request_id = http_response['x-ms-client-request-id'] unless http_response['x-ms-client-request-id'].nil?
result
end
promise.execute
end
#
# Get shared keys for the project.
#
# Gets the Log Analytics Workspace ID and Primary Key for the specified
# project.
#
# @param resource_group_name [String] Name of the Azure Resource Group that
# project is part of.
# @param project_name [String] Name of the Azure Migrate project.
# @param custom_headers [Hash{String => String}] A hash of custom headers that
# will be added to the HTTP request.
#
# @return [ProjectKey] operation results.
#
def get_keys(resource_group_name, project_name, custom_headers:nil)
response = get_keys_async(resource_group_name, project_name, custom_headers:custom_headers).value!
response.body unless response.nil?
end
#
# Get shared keys for the project.
#
# Gets the Log Analytics Workspace ID and Primary Key for the specified
# project.
#
# @param resource_group_name [String] Name of the Azure Resource Group that
# project is part of.
# @param project_name [String] Name of the Azure Migrate project.
# @param custom_headers [Hash{String => String}] A hash of custom headers that
# will be added to the HTTP request.
#
# @return [MsRestAzure::AzureOperationResponse] HTTP response information.
#
def get_keys_with_http_info(resource_group_name, project_name, custom_headers:nil)
get_keys_async(resource_group_name, project_name, custom_headers:custom_headers).value!
end
#
# Get shared keys for the project.
#
# Gets the Log Analytics Workspace ID and Primary Key for the specified
# project.
#
# @param resource_group_name [String] Name of the Azure Resource Group that
# project is part of.
# @param project_name [String] Name of the Azure Migrate project.
# @param [Hash{String => String}] A hash of custom headers that will be added
# to the HTTP request.
#
# @return [Concurrent::Promise] Promise object which holds the HTTP response.
#
def get_keys_async(resource_group_name, project_name, custom_headers:nil)
fail ArgumentError, '@client.subscription_id is nil' if @client.subscription_id.nil?
fail ArgumentError, 'resource_group_name is nil' if resource_group_name.nil?
fail ArgumentError, 'project_name is nil' if project_name.nil?
@client.api_version = '2018-02-02'
request_headers = {}
request_headers['Content-Type'] = 'application/json; charset=utf-8'
# Set Headers
request_headers['x-ms-client-request-id'] = SecureRandom.uuid
request_headers['Accept-Language'] = @client.accept_language unless @client.accept_language.nil?
path_template = 'subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.Migrate/projects/{projectName}/keys'
request_url = @base_url || @client.base_url
options = {
middlewares: [[MsRest::RetryPolicyMiddleware, times: 3, retry: 0.02], [:cookie_jar]],
path_params: {'subscriptionId' => @client.subscription_id,'resourceGroupName' => resource_group_name,'projectName' => project_name},
query_params: {'api-version' => @client.api_version},
headers: request_headers.merge(custom_headers || {}),
base_url: request_url
}
promise = @client.make_request_async(:post, path_template, options)
promise = promise.then do |result|
http_response = result.response
status_code = http_response.status
response_content = http_response.body
unless status_code == 200
error_model = JSON.load(response_content)
fail MsRestAzure::AzureOperationError.new(result.request, http_response, error_model)
end
result.request_id = http_response['x-ms-request-id'] unless http_response['x-ms-request-id'].nil?
result.correlation_request_id = http_response['x-ms-correlation-request-id'] unless http_response['x-ms-correlation-request-id'].nil?
result.client_request_id = http_response['x-ms-client-request-id'] unless http_response['x-ms-client-request-id'].nil?
# Deserialize Response
if status_code == 200
begin
parsed_response = response_content.to_s.empty? ? nil : JSON.load(response_content)
result_mapper = Azure::Migrate::Mgmt::V2018_02_02::Models::ProjectKey.mapper()
result.body = @client.deserialize(result_mapper, parsed_response)
rescue Exception => e
fail MsRest::DeserializationError.new('Error occurred in deserializing the response', e.message, e.backtrace, result)
end
end
result
end
promise.execute
end
end
end
| 42.888889 | 145 | 0.691926 |
33097eec3b6b88a7e69b5828cb49eca17f712825
| 1,741 |
require 'spec_helper'
require 'support/active_record'
require 'support/helpers'
module PricingDefinition
module Behaviours
describe PriceableModifier do
subject { klass.priceable_modifier(options) }
let(:klass) { ::ModifierWithRequiredAttributes }
context 'priceable modifier behaviour declaration' do
context 'with invalid option keys' do
let(:options) { { unsupported: :configuration } }
it 'does not raise an error' do
expect { subject }.to raise_error
end
end
context 'with valid option keys' do
let(:behaviour) { PricingDefinition::Configuration.behaviour_for(klass) }
let(:options) { { for: :priceable, label: 'Discount', description: 'Free beer!', weight: 10 } }
it 'does not raise an error' do
expect { subject }.to_not raise_error
end
it 'adds update configuration for priceable modifiers' do
subject
expect(behaviour).to eq(:priceable_modifier)
end
end
end
context 'host model validation' do
subject { klass.priceable_modifier(options) }
let(:options) { { label: 'Discount', description: 'Free beer!', weight: 10 } }
context 'with required defined attributed' do
let(:klass) { ::ModifierWithRequiredAttributes }
it 'does not raise an error' do
expect { subject }.to_not raise_error
end
end
context 'with required defined attributed' do
let(:klass) { ::ModifierWithoutRequiredAttributes }
it 'raises an error' do
expect { subject }.to raise_error
end
end
end
end
end
end
| 30.017241 | 105 | 0.614015 |
3323dd40ee63b84117c97c695304dfe21a83a1d0
| 1,340 |
# frozen_string_literal: true
require 'singleton'
class FakeUser
include Singleton
def user_barcodes
@user_barcodes ||= {}
end
def clear
@user_barcodes = {}
end
def user_barcode(user, barcode)
user_barcodes[barcode] = user
end
def login_from_user_code(barcode)
user_barcodes[barcode]
end
def self.install_hooks(target, tags)
target.instance_eval do
Before(tags) do |_scenario|
Capybara.current_session.driver.browser if Capybara.current_driver == Capybara.javascript_driver
stub_request(:get, %r{#{Settings.sequencescape_api_v2}/users\?filter\[user_code\].*}).to_return do |request|
user_code = request.uri.query_values['filter[user_code]']
body_hash = {
data: [
{
attributes: {
login: FakeUser.instance.login_from_user_code(user_code)
}
}
]
}
FakeUser.response_format(body_hash)
end
end
After(tags) do |_scenario|
FakeUser.instance.clear
end
end
end
def self.response_format(body_value)
{
status: 200,
headers: { 'Content-Type': 'application/vnd.api+json' },
body: JSON.generate(body_value)
}
end
end
FakeUser.install_hooks(self, '@user_barcode_service')
| 22.711864 | 116 | 0.623134 |
e2fc690e6f25c2dedee787e598f1fe44eb88a3ef
| 390 |
require 'test_helper'
class TeammanagerRelationshipTest < ActiveSupport::TestCase
# test "the truth" do
# assert true
# end
end
# == Schema Information
#
# Table name: teammanager_relationships
#
# id :integer not null, primary key
# user_id :integer
# team_id :integer
# created_at :datetime not null
# updated_at :datetime not null
#
| 20.526316 | 59 | 0.65641 |
284fa5f8080d5c222fa7ee566af8c26371d4ae3a
| 1,604 |
Dummy::Application.configure do
# Settings specified here will take precedence over those in config/application.rb.
# The test environment is used exclusively to run your application's
# test suite. You never need to work with it otherwise. Remember that
# your test database is "scratch space" for the test suite and is wiped
# and recreated between test runs. Don't rely on the data there!
config.cache_classes = true
# Do not eager load code on boot. This avoids loading your whole application
# just for the purpose of running a single test. If you are using a tool that
# preloads Rails for running tests, you may have to set it to true.
config.eager_load = false
# Configure static asset server for tests with Cache-Control for performance.
config.serve_static_files = true
config.static_cache_control = "public, max-age=3600"
# Show full error reports and disable caching.
config.consider_all_requests_local = true
config.action_controller.perform_caching = false
# Raise exceptions instead of rendering exception templates.
config.action_dispatch.show_exceptions = false
# Disable request forgery protection in test environment.
config.action_controller.allow_forgery_protection = false
# Tell Action Mailer not to deliver emails to the real world.
# The :test delivery method accumulates sent emails in the
# ActionMailer::Base.deliveries array.
config.action_mailer.delivery_method = :test
# Print deprecation notices to the stderr.
config.active_support.deprecation = :stderr
config.active_support.test_order = :sorted
end
| 41.128205 | 85 | 0.774314 |
e8daa170afea3ed5430e32de21c5ad050ac75a3c
| 1,524 |
#
# Cookbook Name:: magentostack
# Recipe :: users
#
# Copyright 2015, Rackspace
#
return if node['magentostack']['users'] == false || node['magentostack']['users'] == 'false'
begin
node.default['authorization']['sudo']['include_sudoers_d'] = true
include_recipe 'sudo'
users = data_bag_item('users', 'users')
groups = {}
all_users = users.to_hash.select { |k, v| k != 'id' } # strip out the id of the bag
# only manage the subset of users defined
Array(all_users.keys).each do |id|
u = all_users[id]
username = u['username'] || u['id'] || id
user_action = Array(u['action']).map(&:to_sym) if u['action']
user_account username do
%w(comment uid gid home shell password system_user manage_home create_group
ssh_keys ssh_keygen non_unique).each do |attr|
send(attr, u[attr]) if u[attr]
end
shell '/usr/sbin/nologin' unless u['shell']
action user_action
end
sudo username do
user username
nopasswd true
only_if { u['sudo'] && u['sudo'] != 'false' }
end
# stop here if the groups are empty or we're removing this user
next if u['groups'].nil? || user_action == 'remove'
u['groups'].each do |groupname|
groups[groupname] = [] unless groups[groupname]
groups[groupname] += [username]
end
end
groups.each do |groupname, membership|
group groupname do
members membership
append true
end
end
rescue
Chef::Log.warn('Failed to retrieve user data from data bags')
end
| 25.830508 | 92 | 0.641076 |
1a5ae55cc0055036f650dfea82989269f4257cfe
| 772 |
require 'spec_helper_acceptance'
# Ensure IP Forwarding is disabled - Section 3.1.1
# Ensure packet redirect sending is disabled - Section 3.1.2
# Ensure source routed packets are not accepted - Section 3.2.1
describe file('/etc/sysctl.d/99-sysctl.conf') do
it { should be_symlink }
it { should be_owned_by 'root' }
it { should be_grouped_into 'root' }
its (:content) { should match /net.ipv4.ip_forward = 0/ }
its (:content) { should match /net.ipv4.conf.all.send_redirects = 0/ }
its (:content) { should match /net.ipv4.conf.default.send_redirects = 0/ }
its (:content) { should match /net.ipv4.conf.all.accept_source_route = 0/ }
its (:content) { should match /net.ipv4.conf.default.accept_source_route = 0/ }
its (:content) { }
end
| 48.25 | 83 | 0.695596 |
392f24789b94f6b0aa0ffe5ca22b779cdba79d6b
| 1,060 |
#
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
module Shell
module Commands
class DropNamespace < Command
def help
return <<-EOF
Drop the named namespace. The namespace must be empty.
EOF
end
def command(namespace)
admin.drop_namespace(namespace)
end
end
end
end
| 30.285714 | 74 | 0.737736 |
627676753d5ced8f3e6e6b23f5e8e51cdea4f2ec
| 1,101 |
require "rails_helper"
RSpec.describe ReservationsController, type: :routing do
describe "routing" do
it "routes to #index" do
expect(:get => "/reservations").to route_to("reservations#index")
end
it "routes to #new" do
expect(:get => "/reservations/new").to route_to("reservations#new")
end
it "routes to #show" do
expect(:get => "/reservations/1").to route_to("reservations#show", :id => "1")
end
it "routes to #edit" do
expect(:get => "/reservations/1/edit").to route_to("reservations#edit", :id => "1")
end
it "routes to #create" do
expect(:post => "/reservations").to route_to("reservations#create")
end
it "routes to #update via PUT" do
expect(:put => "/reservations/1").to route_to("reservations#update", :id => "1")
end
it "routes to #update via PATCH" do
expect(:patch => "/reservations/1").to route_to("reservations#update", :id => "1")
end
it "routes to #destroy" do
expect(:delete => "/reservations/1").to route_to("reservations#destroy", :id => "1")
end
end
end
| 27.525 | 90 | 0.620345 |
3958903b72d08f2ff6fc8510c56f830f08914523
| 1,883 |
# frozen_string_literal: true
require "#{ENV['DOTS']}/ruby/console_colors"
class ConsolePrompt
def initialize(prefix, object=nil, level=0)
@prefix = prefix
@object = object
@level = level
end
def normal
prefix + info + context + level + suffix(:cyan)
end
def continue
prefix + info + context + level + suffix(:yellow)
end
def return
"#{colors[:light_yellow]}=>#{colors[:reset]} %s\n"
end
private
def prefix
prefix = [
separator,
separator(:blue),
separator,
' '
]
unless @prefix.nil?
prefix = [
colors[:bold],
colors[:dark_gray],
@prefix,
colors[:reset],
' '
] + prefix
end
prefix.join
end
def info
info = [
colors[:reset],
colors[:bold],
colors[:blue]
]
info << (
if ENV.key?('PROMPT_NAME')
ENV['PROMPT_NAME']
elsif defined?(Rails) && Rails.respond_to?(:application)
Rails.application.class.name.gsub('::Application', '')
else
RUBY_VERSION.to_s
end
)
info << colors[:reset]
info.join
end
def context
return '' unless defined?(Pry)
context = Pry.view_clip(@object)
return '' if context == 'main'
context = context.to_s.gsub('#<', '').delete('>')
context = '*::' + context.gsub(/.*::/, '') if context.include?('::')
[
' ',
colors[:light_red],
context,
colors[:reset]
].join
end
def level
return '' if @level.zero?
[
' ',
colors[:reset],
colors[:bold],
colors[:magenta],
@level.to_s,
colors[:reset]
].join
end
def suffix(color)
' ' + separator(color) + ' ' + colors[:reset]
end
def separator(color=:magenta)
"#{colors[:bold]}#{colors[color]}\u276F#{colors[:reset]}"
end
def colors
CONSOLE_COLORS
end
end
| 18.105769 | 72 | 0.546468 |
084cd5d205f2848b23171e9a990ef8dda242a79f
| 367 |
class CreateFotLogs < ActiveRecord::Migration[5.1]
def change
# create_table :fot_logs do |t|
# t.integer :stat_zatr_id
# t.integer :budget_id
# t.integer :user_id
# t.decimal :summa, precision: 15, scale: 2
# t.decimal :delta, precision: 15, scale: 2
# t.integer :request_change_id
# t.timestamps
# end
end
end
| 24.466667 | 50 | 0.626703 |
e898a9387137d842eaa633ddb0e287426a839252
| 2,324 |
# frozen_string_literal: true
class Fisk
module Instructions
# Instruction VFMADD213SD: Fused Multiply-Add of Scalar Double-Precision Floating-Point Values
VFMADD213SD = Instruction.new("VFMADD213SD", [
# vfmadd213sd: xmm{k}{z}, xmm, m64
Form.new([
OPERAND_TYPES[79],
OPERAND_TYPES[24],
OPERAND_TYPES[18],
].freeze, [
Class.new(Fisk::Encoding) {
def encode buffer, operands
add_EVEX(buffer, operands)
add_opcode(buffer, 0xA9, 0) +
add_modrm(buffer,
0,
operands[0].op_value,
operands[2].op_value, operands) +
0
end
}.new.freeze,
].freeze).freeze,
# vfmadd213sd: xmm, xmm, xmm
Form.new([
OPERAND_TYPES[23],
OPERAND_TYPES[24],
OPERAND_TYPES[24],
].freeze, [
Class.new(Fisk::Encoding) {
def encode buffer, operands
add_VEX(buffer, operands)
add_opcode(buffer, 0xA9, 0) +
add_modrm(buffer,
3,
operands[0].op_value,
operands[2].op_value, operands) +
0
end
}.new.freeze,
].freeze).freeze,
# vfmadd213sd: xmm, xmm, m64
Form.new([
OPERAND_TYPES[23],
OPERAND_TYPES[24],
OPERAND_TYPES[18],
].freeze, [
Class.new(Fisk::Encoding) {
def encode buffer, operands
add_VEX(buffer, operands)
add_opcode(buffer, 0xA9, 0) +
add_modrm(buffer,
0,
operands[0].op_value,
operands[2].op_value, operands) +
0
end
}.new.freeze,
].freeze).freeze,
# vfmadd213sd: xmm{k}{z}, xmm, xmm, {er}
Form.new([
OPERAND_TYPES[79],
OPERAND_TYPES[24],
OPERAND_TYPES[24],
OPERAND_TYPES[67],
].freeze, [
Class.new(Fisk::Encoding) {
def encode buffer, operands
add_EVEX(buffer, operands)
add_opcode(buffer, 0xA9, 0) +
add_modrm(buffer,
3,
operands[0].op_value,
operands[2].op_value, operands) +
0
end
}.new.freeze,
].freeze).freeze,
].freeze).freeze
end
end
| 28 | 98 | 0.513339 |
f7af21ebf1b006682487af671981bac09c235100
| 637 |
# frozen_string_literal: true
module Nocode
module Steps
module Record
# Create a new hash from an existing hash mapping each key as configured by the
# key_mappings option. The key_mappings option should be in the form of:
# new_key => old_key
class Map < Step
option :key_mappings, :register
def perform
input = registers[register_option] || {}
output = {}
(key_mappings_option || {}).each do |to, from|
output[to.to_s] = input[from.to_s]
end
registers[register_option] = output
end
end
end
end
end
| 24.5 | 85 | 0.599686 |
08fa54f2f5d9d649b32f881f02b3f50e4f557f6f
| 232 |
class FinalSerializer < ActiveModel::Serializer
attributes :id,
:category_title,
:topics_string,
:result,
:third_right,
:second_right,
:first_right
end
| 23.2 | 47 | 0.530172 |
2688f6bd981225d0cff324d48a7f97e49159830e
| 6,637 |
class InsertProjectTransitions < ActiveRecord::Migration[4.2]
def change
execute <<-SQL
SET statement_timeout TO 0;
SQL
execute <<-SQL
-- in_analysis projects
INSERT INTO project_transitions (to_state, sort_key, project_id, most_recent, created_at, updated_at)
SELECT
'in_analysis',
0,
p.id,
true,
coalesce(p.sent_to_analysis_at, p.updated_at),
coalesce(p.sent_to_analysis_at, p.updated_at)
FROM
projects p
WHERE
p.state = 'in_analysis';
-- approved projects
INSERT INTO project_transitions (to_state, sort_key, project_id, most_recent, created_at, updated_at)
SELECT
'in_analysis',
0,
p.id,
false,
coalesce(p.sent_to_analysis_at, p.updated_at),
coalesce(p.sent_to_analysis_at, p.updated_at)
FROM
projects p
WHERE
p.state = 'approved'
UNION
SELECT
'approved',
1,
p.id,
true,
-- we do not have the approval date in the database
coalesce(p.sent_to_analysis_at, p.updated_at),
coalesce(p.sent_to_analysis_at, p.updated_at)
FROM
projects p
WHERE
p.state = 'approved';
-- online projects
INSERT INTO project_transitions (to_state, sort_key, project_id, most_recent, created_at, updated_at)
SELECT
'in_analysis',
0,
p.id,
false,
coalesce(p.sent_to_analysis_at, p.updated_at),
coalesce(p.sent_to_analysis_at, p.updated_at)
FROM
projects p
WHERE
p.state = 'online'
UNION
SELECT
'approved',
1,
p.id,
false,
-- we do not have the approval date in the database
coalesce(p.sent_to_analysis_at, p.updated_at),
coalesce(p.sent_to_analysis_at, p.updated_at)
FROM
projects p
WHERE
p.state = 'online'
UNION
SELECT
'online',
2,
p.id,
true,
-- we do not have the approval date in the database
coalesce(p.online_date, p.updated_at),
coalesce(p.online_date, p.updated_at)
FROM
projects p
WHERE
p.state = 'online';
-- waiting_funds projects
INSERT INTO project_transitions (to_state, sort_key, project_id, most_recent, created_at, updated_at)
SELECT
'in_analysis',
0,
p.id,
false,
coalesce(p.sent_to_analysis_at, p.updated_at),
coalesce(p.sent_to_analysis_at, p.updated_at)
FROM
projects p
WHERE
p.state = 'waiting_funds'
UNION
SELECT
'approved',
1,
p.id,
false,
-- we do not have the approval date in the database
coalesce(p.sent_to_analysis_at, p.updated_at),
coalesce(p.sent_to_analysis_at, p.updated_at)
FROM
projects p
WHERE
p.state = 'waiting_funds'
UNION
SELECT
'online',
2,
p.id,
false,
-- we do not have the approval date in the database
coalesce(p.online_date, p.updated_at),
coalesce(p.online_date, p.updated_at)
FROM
projects p
WHERE
p.state = 'waiting_funds'
UNION
SELECT
'waiting_funds',
3,
p.id,
true,
-- we do not have the approval date in the database
coalesce(p.expires_at, p.updated_at),
coalesce(p.expires_at, p.updated_at)
FROM
projects p
WHERE
p.state = 'waiting_funds';
-- successful projects
INSERT INTO project_transitions (to_state, sort_key, project_id, most_recent, created_at, updated_at)
SELECT
'in_analysis',
0,
p.id,
false,
coalesce(p.sent_to_analysis_at, p.updated_at),
coalesce(p.sent_to_analysis_at, p.updated_at)
FROM
projects p
WHERE
p.state = 'successful'
UNION
SELECT
'approved',
1,
p.id,
false,
-- we do not have the approval date in the database
coalesce(p.sent_to_analysis_at, p.updated_at),
coalesce(p.sent_to_analysis_at, p.updated_at)
FROM
projects p
WHERE
p.state = 'successful'
UNION
SELECT
'online',
2,
p.id,
false,
-- we do not have the approval date in the database
coalesce(p.online_date, p.updated_at),
coalesce(p.online_date, p.updated_at)
FROM
projects p
WHERE
p.state = 'successful'
UNION
SELECT
'waiting_funds',
3,
p.id,
false,
-- we do not have the approval date in the database
coalesce(p.expires_at, p.updated_at),
coalesce(p.expires_at, p.updated_at)
FROM
projects p
WHERE
p.state = 'successful'
UNION
SELECT
'successful',
4,
p.id,
true,
-- we do not have the successful date in the database
coalesce(p.expires_at, p.updated_at),
coalesce(p.expires_at, p.updated_at)
FROM
projects p
WHERE
p.state = 'successful';
-- failed projects
INSERT INTO project_transitions (to_state, sort_key, project_id, most_recent, created_at, updated_at)
SELECT
'in_analysis',
0,
p.id,
false,
coalesce(p.sent_to_analysis_at, p.updated_at),
coalesce(p.sent_to_analysis_at, p.updated_at)
FROM
projects p
WHERE
p.state = 'failed'
UNION
SELECT
'approved',
1,
p.id,
false,
-- we do not have the approval date in the database
coalesce(p.sent_to_analysis_at, p.updated_at),
coalesce(p.sent_to_analysis_at, p.updated_at)
FROM
projects p
WHERE
p.state = 'failed'
UNION
SELECT
'online',
2,
p.id,
false,
-- we do not have the approval date in the database
coalesce(p.online_date, p.updated_at),
coalesce(p.online_date, p.updated_at)
FROM
projects p
WHERE
p.state = 'failed'
UNION
SELECT
'waiting_funds',
3,
p.id,
false,
-- we do not have the approval date in the database
coalesce(p.expires_at, p.updated_at),
coalesce(p.expires_at, p.updated_at)
FROM
projects p
WHERE
p.state = 'failed'
UNION
SELECT
'failed',
4,
p.id,
true,
-- we do not have the failed date in the database
coalesce(p.expires_at, p.updated_at),
coalesce(p.expires_at, p.updated_at)
FROM
projects p
WHERE
p.state = 'failed';
-- deleted projects
INSERT INTO project_transitions (to_state, sort_key, project_id, most_recent, created_at, updated_at)
SELECT
'deleted',
0,
p.id,
true,
coalesce(p.sent_to_analysis_at, p.updated_at),
coalesce(p.sent_to_analysis_at, p.updated_at)
FROM
projects p
WHERE
p.state = 'deleted';
-- rejected projects
INSERT INTO project_transitions (to_state, sort_key, project_id, most_recent, created_at, updated_at)
SELECT
'in_analysis',
0,
p.id,
false,
coalesce(p.sent_to_analysis_at, p.updated_at),
coalesce(p.sent_to_analysis_at, p.updated_at)
FROM
projects p
WHERE
p.state = 'rejected'
UNION
SELECT
'rejected',
1,
p.id,
true,
-- we do not have the approval date in the database
coalesce(p.rejected_at, p.updated_at),
coalesce(p.rejected_at, p.updated_at)
FROM
projects p
WHERE
p.state = 'rejected';
SQL
end
end
| 20.871069 | 101 | 0.681181 |
79f99e361c6cf0cfcaebce6ec87ef883722253e9
| 3,962 |
require 'spec_helper'
module Susply
describe RenewsSubscription do
let(:owner_class) { Susply.subscription_owner_class.constantize }
let(:time) {Time.zone.today}
it "returns nil when owner does not has an active subscription" do
owner = owner_class.create()
plan = create(:susply_plan)
subscription = create(:susply_subscription, :inactive, owner: owner)
s = Susply::RenewsSubscription.call(owner)
expect(s).to be_nil
end
it "returns nil when active subscription is not expired" do
owner = owner_class.create()
plan = create(:susply_plan)
subscription = create(:susply_subscription, :active,
owner: owner, current_period_end: time + 2.days)
s = Susply::RenewsSubscription.call(owner)
expect(s).to be_nil
end
context "when owner has an active subscription" do
it "creates a payment of renovation type" do
owner = owner_class.create()
plan = create(:susply_plan, interval: 'monthly', price: 101)
subscription = create(:susply_subscription, :active,
owner: owner, plan: plan)
s = Susply::RenewsSubscription.call(owner)
payment = owner.payments.last
expect(payment.generated_type).to eq 'plan_renovation'
expect(payment.amount).to eq 101
end
it "does not creates an extra subscription" do
owner = owner_class.create()
subscription = create(:susply_subscription, :active, owner: owner)
Susply::RenewsSubscription.call(owner)
expect(owner.subscriptions.count).to be 1
end
it "return a subscription with updated attributes" do
owner = owner_class.create()
subscription = create(:susply_subscription, :active, owner: owner,
current_period_start: time - 5.days)
s = Susply::RenewsSubscription.call(owner)
expect(s.quantity).not_to eq subscription.quantity
expect(s.current_period_start).
not_to eq subscription.current_period_start
expect(s.current_period_end).
not_to eq subscription.current_period_end
end
it "returns updates by one the subscriptions" do
owner = owner_class.create()
subscription = create(:susply_subscription, :active, owner: owner,
current_period_start: time - 5.days,
quantity: 5)
s = Susply::RenewsSubscription.call(owner)
expect(s.quantity).to eq 6
end
it "sets the subscription initial date o past end" do
owner = owner_class.create()
end_time = time + 6.hours
subscription = create(:susply_subscription, :active, owner: owner,
current_period_start: time - 5.days,
current_period_end: end_time)
s = Susply::RenewsSubscription.call(owner)
expect(s.current_period_start).to eq end_time
end
it "sets the end period to the given month calculation" do
owner = owner_class.create()
end_time = time + 6.hours
plan = create(:susply_plan, interval: 'monthly')
subscription = create(:susply_subscription, :active, owner: owner,
plan: plan, current_period_end: end_time)
s = Susply::RenewsSubscription.call(owner)
expect(s.current_period_end).to eq(end_time + 1.month)
end
it "sets the end period to the given yearly calculation" do
owner = owner_class.create()
end_time = time + 2.hours
plan = create(:susply_plan, interval: 'yearly')
subscription = create(:susply_subscription, :active, owner: owner,
plan: plan, current_period_end: end_time)
s = Susply::RenewsSubscription.call(owner)
expect(s.current_period_end).to eq(end_time + 1.year)
end
end
end
end
| 36.685185 | 76 | 0.629985 |
284d9a780376dfdb8cf25ffbc129fe499c46617c
| 128 |
# frozen_string_literal: true
# typed: strict
class Minitest < PackageSpec
test_import Critic
export Minitest::Tests
end
| 12.8 | 29 | 0.773438 |
1808a4f2fb9049be1180bc9b18027d169906b7ab
| 1,291 |
# frozen_string_literal: true
require 'spec_helper'
describe Gitlab::ObjectifiedHash do
before do
@hash = { a: 1, b: 2, 'string' => 'string', symbol: :symbol }
@oh = described_class.new @hash
end
it 'objectifies a hash' do
expect(@oh.a).to eq(@hash[:a])
expect(@oh.b).to eq(@hash[:b])
end
describe '#to_hash' do
it 'returns an original hash' do
expect(@oh.to_hash).to eq(@hash)
end
it 'has an alias #to_h' do
expect(@oh).to respond_to(:to_h)
end
end
describe '#inspect' do
it 'returns a formatted string' do
pretty_string = "#<#{@oh.class.name}:#{@oh.object_id} {hash: #{@hash}}"
expect(@oh.inspect).to eq(pretty_string)
end
end
describe '#respond_to' do
it 'returns true for methods this object responds to through method_missing as sym' do
expect(@oh).to respond_to(:a)
end
it 'returns true for methods this object responds to through method_missing as string' do
expect(@oh).to respond_to('string')
end
it 'does not care if you use a string or symbol to reference a method' do
expect(@oh).to respond_to(:string)
end
it 'does not care if you use a string or symbol to reference a method' do
expect(@oh).to respond_to('symbol')
end
end
end
| 25.313725 | 93 | 0.646785 |
5d80b7390b5c3b772bb6a19d034de0ba0bf50c55
| 1,640 |
require 'brakeman/checks/base_check'
#Check for bypassing mass assignment protection
#with without_protection => true
#
#Only for Rails 3.1
class Brakeman::CheckWithoutProtection < Brakeman::BaseCheck
Brakeman::Checks.add self
@description = "Check for mass assignment using without_protection"
def run_check
if version_between? "0.0.0", "3.0.99"
return
end
return if active_record_models.empty?
Brakeman.debug "Finding all mass assignments"
calls = tracker.find_call :targets => active_record_models.keys, :methods => [:new,
:attributes=,
:update_attributes,
:update_attributes!,
:create,
:create!]
Brakeman.debug "Processing all mass assignments"
calls.each do |result|
process_result result
end
end
#All results should be Model.new(...) or Model.attributes=() calls
def process_result res
call = res[:call]
last_arg = call.args.last
if hash? last_arg and not call.original_line and not duplicate? res
if value = hash_access(last_arg, :without_protection)
if true? value
add_result res
if input = include_user_input?(call.arglist)
confidence = CONFIDENCE[:high]
user_input = input.match
else
confidence = CONFIDENCE[:med]
user_input = nil
end
warn :result => res,
:warning_type => "Mass Assignment",
:message => "Unprotected mass assignment",
:code => call,
:user_input => user_input,
:confidence => confidence
end
end
end
end
end
| 25.625 | 87 | 0.632927 |
0131657e708a8747293edc14e53248a88c44cef0
| 61 |
puts (1 .. 100).inject(:*).to_s.chars.map(&:to_i).inject(:+)
| 30.5 | 60 | 0.590164 |
e8d0a9fe082eb27b80289744c12c463762d9dab8
| 447 |
require "spec_helper"
describe Onebox::Engine::ImgurOnebox do
let(:link) { "https://imgur.com/gallery/Sdc0Klc" }
let(:imgur) { described_class.new(link) }
let(:html) { imgur.to_html }
before do
fake(link, response("imgur"))
end
it "excludes html tags in title" do
allow(imgur).to receive(:is_album?) { true }
expect(html).to include("<span class='album-title'>[Album] Did you miss me?</span>")
end
end
| 26.294118 | 89 | 0.646532 |
6aa59f72d2a9d815c916a23b63a34144760e8c78
| 8,403 |
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe 'Projects > Settings > User manages merge request settings' do
include ProjectForksHelper
let(:user) { create(:user) }
let(:project) { create(:project, :public, namespace: user.namespace, path: 'gitlab', name: 'sample') }
before do
sign_in(user)
visit edit_project_path(project)
end
it 'shows "Merge commit" strategy' do
page.within '#js-merge-request-settings' do
expect(page).to have_content 'Merge commit'
end
end
it 'shows "Merge commit with semi-linear history " strategy' do
page.within '#js-merge-request-settings' do
expect(page).to have_content 'Merge commit with semi-linear history'
end
end
it 'shows "Fast-forward merge" strategy' do
page.within '#js-merge-request-settings' do
expect(page).to have_content 'Fast-forward merge'
end
end
it 'shows Squash commit options', :aggregate_failures do
page.within '#js-merge-request-settings' do
expect(page).to have_content 'Do not allow'
expect(page).to have_content 'Squashing is never performed and the checkbox is hidden.'
expect(page).to have_content 'Allow'
expect(page).to have_content 'Checkbox is visible and unselected by default.'
expect(page).to have_content 'Encourage'
expect(page).to have_content 'Checkbox is visible and selected by default.'
expect(page).to have_content 'Require'
expect(page).to have_content 'Squashing is always performed. Checkbox is visible and selected, and users cannot change it.'
end
end
context 'when Merge Request and Pipelines are initially enabled', :js do
context 'when Pipelines are initially enabled' do
it 'shows the Merge Requests settings' do
expect(page).to have_content 'Pipelines must succeed'
expect(page).to have_content 'All threads must be resolved'
within('.sharing-permissions-form') do
find('.project-feature-controls[data-for="project[project_feature_attributes][merge_requests_access_level]"] .gl-toggle').click
find('[data-testid="project-features-save-button"]').send_keys(:return)
end
expect(page).not_to have_content 'Pipelines must succeed'
expect(page).not_to have_content 'All threads must be resolved'
end
end
context 'when Pipelines are initially disabled', :js do
before do
project.project_feature.update_attribute('builds_access_level', ProjectFeature::DISABLED)
visit edit_project_path(project)
end
it 'shows the Merge Requests settings that do not depend on Builds feature' do
expect(page).to have_content 'Pipelines must succeed'
expect(page).to have_content 'All threads must be resolved'
within('.sharing-permissions-form') do
find('.project-feature-controls[data-for="project[project_feature_attributes][builds_access_level]"] .gl-toggle').click
find('[data-testid="project-features-save-button"]').send_keys(:return)
end
expect(page).to have_content 'Pipelines must succeed'
expect(page).to have_content 'All threads must be resolved'
end
end
end
context 'when Merge Request are initially disabled', :js do
before do
project.project_feature.update_attribute('merge_requests_access_level', ProjectFeature::DISABLED)
visit edit_project_path(project)
end
it 'does not show the Merge Requests settings' do
expect(page).not_to have_content 'Pipelines must succeed'
expect(page).not_to have_content 'All threads must be resolved'
within('.sharing-permissions-form') do
find('.project-feature-controls[data-for="project[project_feature_attributes][merge_requests_access_level]"] .gl-toggle').click
find('[data-testid="project-features-save-button"]').send_keys(:return)
end
expect(page).to have_content 'Pipelines must succeed'
expect(page).to have_content 'All threads must be resolved'
end
end
describe 'Checkbox to enable merge request link', :js do
it 'is initially checked' do
checkbox = find_field('project_printing_merge_request_link_enabled')
expect(checkbox).to be_checked
end
it 'when unchecked sets :printing_merge_request_link_enabled to false' do
uncheck('project_printing_merge_request_link_enabled')
within('.merge-request-settings-form') do
find('.rspec-save-merge-request-changes')
click_on('Save changes')
end
wait_for_all_requests
checkbox = find_field('project_printing_merge_request_link_enabled')
expect(checkbox).not_to be_checked
project.reload
expect(project.printing_merge_request_link_enabled).to be(false)
end
end
describe 'Checkbox to remove source branch after merge', :js do
it 'is initially checked' do
checkbox = find_field('project_remove_source_branch_after_merge')
expect(checkbox).to be_checked
end
it 'when unchecked sets :remove_source_branch_after_merge to false' do
uncheck('project_remove_source_branch_after_merge')
within('.merge-request-settings-form') do
find('.rspec-save-merge-request-changes')
click_on('Save changes')
end
wait_for_all_requests
checkbox = find_field('project_remove_source_branch_after_merge')
expect(checkbox).not_to be_checked
project.reload
expect(project.remove_source_branch_after_merge).to be(false)
end
end
describe 'Squash commits when merging', :js do
it 'initially has :squash_option set to :default_off' do
radio = find_field('project_project_setting_attributes_squash_option_default_off')
expect(radio).to be_checked
end
it 'allows :squash_option to be set to :default_on' do
choose('project_project_setting_attributes_squash_option_default_on')
within('.merge-request-settings-form') do
find('.rspec-save-merge-request-changes')
click_on('Save changes')
end
wait_for_requests
radio = find_field('project_project_setting_attributes_squash_option_default_on')
expect(radio).to be_checked
expect(project.reload.project_setting.squash_option).to eq('default_on')
end
it 'allows :squash_option to be set to :always' do
choose('project_project_setting_attributes_squash_option_always')
within('.merge-request-settings-form') do
find('.rspec-save-merge-request-changes')
click_on('Save changes')
end
wait_for_requests
radio = find_field('project_project_setting_attributes_squash_option_always')
expect(radio).to be_checked
expect(project.reload.project_setting.squash_option).to eq('always')
end
it 'allows :squash_option to be set to :never' do
choose('project_project_setting_attributes_squash_option_never')
within('.merge-request-settings-form') do
find('.rspec-save-merge-request-changes')
click_on('Save changes')
end
wait_for_requests
radio = find_field('project_project_setting_attributes_squash_option_never')
expect(radio).to be_checked
expect(project.reload.project_setting.squash_option).to eq('never')
end
end
describe 'target project settings' do
context 'when project is a fork' do
let_it_be(:upstream) { create(:project, :public) }
let(:project) { fork_project(upstream, user) }
it 'allows to change merge request target project behavior' do
expect(page).to have_content 'The default target project for merge requests'
radio = find_field('project_project_setting_attributes_mr_default_target_self_false')
expect(radio).to be_checked
choose('project_project_setting_attributes_mr_default_target_self_true')
within('.merge-request-settings-form') do
find('.rspec-save-merge-request-changes')
click_on('Save changes')
end
wait_for_requests
radio = find_field('project_project_setting_attributes_mr_default_target_self_true')
expect(radio).to be_checked
expect(project.reload.project_setting.mr_default_target_self).to be_truthy
end
end
it 'does not show target project section' do
expect(page).not_to have_content 'The default target project for merge requests'
end
end
end
| 34.72314 | 137 | 0.714745 |
e9593149a9f1b4fda8517496e0d3b6c28a54afd2
| 65,804 |
Rails.application.routes.draw do
# rubocop:disable Layout/HashAlignment
# rubocop:disable Layout/MultilineOperationIndentation
# default routes for each controller
default_routes = %w(
report_data
)
# grouped routes
adv_search_post = %w(
adv_search_button
adv_search_clear
adv_search_load_choice
adv_search_name_typed
adv_search_toggle
search_clear
)
button_post = %w(
button_create
button_update
)
compare_get = %w(
compare_miq
compare_to_csv
compare_to_pdf
compare_to_txt
)
compare_post = %w(
compare_cancel
compare_choose_base
compare_compress
compare_miq
compare_miq_all
compare_miq_differences
compare_miq_same
compare_mode
compare_remove
compare_set_state
)
dialog_runner_post = %w(
dialog_field_changed
dialog_form_button_pressed
dynamic_checkbox_refresh
dynamic_date_refresh
dynamic_radio_button_refresh
dynamic_text_box_refresh
open_url_after_dialog
)
drift_get = %w(
drift
drift_history
drift_to_csv
drift_to_pdf
drift_to_txt
)
drift_post = %w(
drift_all
drift_compress
drift_differences
drift_history
drift_mode
drift_same
)
exp_post = %w(
exp_button
exp_changed
exp_token_pressed
)
ownership_post = %w(
ownership
ownership_update
)
perf_post = %w(
perf_chart_chooser
perf_top_chart
)
policy_post = %w(
policy_options
policy_show_options
policy_sim
policy_sim_add
policy_sim_remove
)
pre_prov_post = %w(
pre_prov
pre_prov_continue
)
save_post = %w(
save_default_search
)
snap_post = %w(
snap_pressed
)
x_post = %w(
x_button
x_history
x_search_by_name
x_show
)
controller_routes = {
:ems_storage_dashboard => {
:get => %w[
show
aggregate_status_data
resources_capacity_data
]
},
:auth_key_pair_cloud => {
:get => %w(
download_data
download_summary_pdf
index
new
protect
show
show_list
tagging_edit
download_private_key
ownership
) +
compare_get,
:post => %w(
button
dynamic_checkbox_refresh
listnav_search_selected
ownership_update
protect
quick_search
sections_field_changed
show
show_list
tagging_edit
wait_for_task
) +
adv_search_post +
compare_post +
exp_post +
save_post
},
:automation_manager_configured_system => {
:get => %w(
download_data
download_summary_pdf
show
show_list
tagging_edit
),
:post => %w(
button
listnav_search_selected
quick_search
reload
show
show_list
tagging_edit
) +
adv_search_post +
exp_post
},
:configuration_script => {
:get => %w(
configuration_script_service_dialog
download_summary_pdf
show
show_list
tagging_edit
),
:post => %w(
button
listnav_search_selected
quick_search
reload
show
show_list
tagging_edit
) +
adv_search_post +
exp_post +
save_post
},
:availability_zone => {
:get => %w(
dialog_load
download_data
download_summary_pdf
index
perf_top_chart
protect
show
show_list
tagging_edit
) +
compare_get,
:post => %w(
button
listnav_search_selected
protect
quick_search
sections_field_changed
show
show_list
tagging_edit
tl_chooser
wait_for_task
) +
adv_search_post +
compare_post +
dialog_runner_post +
exp_post +
perf_post +
save_post
},
:host_aggregate => {
:get => %w(
add_host_select
delete_host_aggregates
download_data
download_summary_pdf
edit
host_aggregate_form_fields
index
new
perf_top_chart
protect
remove_host_select
show
show_list
tagging_edit
) +
compare_get,
:post => %w(
add_host
add_host_select
button
delete_host_aggregates
listnav_search_selected
create
protect
quick_search
remove_host
remove_host_select
sections_field_changed
show
show_list
tagging_edit
tl_chooser
update
wait_for_task
) +
adv_search_post +
compare_post +
exp_post +
perf_post +
save_post
},
:catalog => {
:get => %w(
download_data
explorer
ot_edit
ot_orchestration_managers
ot_show
servicetemplates_names
show
),
:post => %w(
ab_group_reorder
accordion_select
ae_tree_select
ae_tree_select_discard
ae_tree_select_toggle
atomic_form_field_changed
atomic_st_edit
automate_button_field_changed
playbook_options_field_changed
explorer
group_create
group_form_field_changed
group_reorder_field_changed
group_update
ot_tags_edit
ownership_update
prov_field_changed
reload
resolve
resource_delete
save_copy_catalog
servicetemplate_edit
servicetemplate_copy
servicetemplate_copy_cancel
servicetemplate_copy_saved
sort_ds_grid
sort_host_grid
sort_iso_img_grid
sort_pxe_img_grid
sort_vc_grid
sort_vm_grid
st_catalog_edit
st_edit
st_form_field_changed
st_tags_edit
st_upload_image
tree_autoload
tree_select
x_button
x_history
x_show
) +
button_post +
exp_post +
dialog_runner_post
},
:chargeback_assignment => {
:get => %w(
change_tab
index
),
:post => %w(
form_field_changed
update
)
},
:chargeback_rate => {
:get => %w(
copy
edit
new
show_list
show
),
:post => %w(
delete
edit
form_field_changed
show_list
show
tier_add
tier_remove
)
},
:chargeback_report => {
:get => %w(
show
show_list
render_csv
render_pdf
render_txt
report_only
),
:post => %w(
saved_report_paging
show
)
},
:configuration_job => {
:get => %w(
download_data
download_summary_pdf
index
parameters
show
show_list
tagging_edit
protect
),
:post => %w(
button
listnav_search_selected
parameters
quick_search
sections_field_changed
show
show_list
protect
tagging_edit
) +
adv_search_post +
exp_post +
save_post
},
:consumption => {
:get => %w(
show
)
},
:cloud_object_store_container => {
:get => %w(
dialog_load
download_data
download_summary_pdf
index
show
show_list
tagging_edit
new
),
:post => %w(
button
dynamic_checkbox_refresh
listnav_search_selected
quick_search
show
show_list
tagging_edit
create
wait_for_task
) + adv_search_post + exp_post + save_post + dialog_runner_post
},
:cloud_tenant => {
:get => %w(
delete_cloud_tenants
dialog_load
download_data
download_summary_pdf
edit
index
new
protect
show
show_list
tagging_edit
) +
compare_get,
:post => %w(
button
listnav_search_selected
protect
quick_search
sections_field_changed
show
show_list
tagging_edit
wait_for_task
) +
adv_search_post +
compare_post +
dialog_runner_post +
exp_post +
save_post
},
:cloud_tenant_dashboard => {
:get => %w(
data
recent_instances_data
recent_images_data
aggregate_status_data
)
},
:cloud_object_store_object => {
:get => %w(
download_data
download_summary_pdf
index
show
show_list
tagging_edit
),
:post => %w(
button
dynamic_checkbox_refresh
listnav_search_selected
quick_search
show
show_list
tagging_edit
) + adv_search_post + exp_post + save_post
},
:cloud_volume => {
:get => %w(
dialog_load
download_data
download_summary_pdf
attach
detach
backup_new
backup_select
snapshot_new
edit
index
new
show
show_list
tagging_edit
) +
compare_get,
:post => %w(
attach_volume
detach_volume
backup_create
backup_restore
snapshot_create
button
create
dynamic_checkbox_refresh
listnav_search_selected
quick_search
reload
sections_field_changed
show
show_list
tagging_edit
update
wait_for_task
) +
adv_search_post +
compare_post +
dialog_runner_post +
exp_post +
save_post
},
:cloud_volume_snapshot => {
:get => %w(
download_data
download_summary_pdf
index
show
show_list
tagging_edit
),
:post => %w(
button
dynamic_checkbox_refresh
listnav_search_selected
quick_search
show
show_list
tagging_edit
) + adv_search_post + exp_post + save_post
},
:cloud_volume_backup => {
:get => %w(
volume_select
volume_form_choices
download_data
index
show
show_list
tagging_edit
),
:post => %w(
backup_restore
button
dynamic_checkbox_refresh
listnav_search_selected
quick_search
show
show_list
tagging_edit
wait_for_task
) + adv_search_post + exp_post + save_post
},
:cloud_volume_type => {
:get => %w(
download_data
index
show
show_list
tagging_edit
),
:post => %w(
button
dynamic_checkbox_refresh
listnav_search_selected
quick_search
show
show_list
tagging_edit
wait_for_task
) + adv_search_post + exp_post + save_post
},
:configuration => {
# TODO: routes for new/edit/copy buttons need to be revisited
# TODO: so they can be changed to send up POST request instead of GET
:get => %w(
change_tab
index
show
timeprofile_copy
timeprofile_edit
timeprofile_new
time_profile_form_fields
),
:post => %w(
button
filters_field_changed
theme_changed
timeprofile_delete
tree_autoload
update
view_selected
)
},
:container => {
:get => %w(
download_data
download_summary_pdf
perf_top_chart
show
tl_chooser
wait_for_task
show_list
tagging_edit
),
:post => %w(
accordion_select
button
show
show_list
tl_chooser
wait_for_task
quick_search
reload
tree_autoload
tree_select
tagging_edit
listnav_search_selected
x_button
x_history
x_search_by_name
) +
adv_search_post +
exp_post +
perf_post +
save_post
},
:container_group => {
:get => %w(
dialog_load
download_data
download_summary_pdf
index
perf_top_chart
show
show_list
tagging_edit
protect
),
:post => %w(
button
dynamic_checkbox_refresh
listnav_search_selected
quick_search
sections_field_changed
show
show_list
tl_chooser
wait_for_task
tagging_edit
protect
) +
adv_search_post +
exp_post +
perf_post +
save_post +
dialog_runner_post
},
:container_node => {
:get => %w(
dialog_load
download_data
download_summary_pdf
index
perf_top_chart
show
show_list
tagging_edit
protect
),
:post => %w(
button
dynamic_checkbox_refresh
listnav_search_selected
quick_search
sections_field_changed
show
show_list
tl_chooser
wait_for_task
tagging_edit
protect
launch_external_logging
) +
adv_search_post +
dialog_runner_post +
exp_post +
perf_post +
save_post
},
:container_replicator => {
:get => %w(
download_data
download_summary_pdf
index
perf_top_chart
show
show_list
tagging_edit
protect
),
:post => %w(
button
dynamic_checkbox_refresh
listnav_search_selected
quick_search
sections_field_changed
show
show_list
tl_chooser
wait_for_task
tagging_edit
protect
) +
adv_search_post +
exp_post +
perf_post +
save_post
},
:container_image => {
:get => %w(
dialog_load
download_data
download_summary_pdf
index
show
show_list
tagging_edit
guest_applications
openscap_rule_results
openscap_html
protect
),
:post => %w(
button
dynamic_checkbox_refresh
listnav_search_selected
quick_search
sections_field_changed
show
show_list
tagging_edit
guest_applications
openscap_rule_results
protect
) + adv_search_post + exp_post + save_post + dialog_runner_post
},
:container_image_registry => {
:get => %w(
download_data
download_summary_pdf
index
show
show_list
tagging_edit
),
:post => %w(
button
dynamic_checkbox_refresh
listnav_search_selected
quick_search
sections_field_changed
show
show_list
tagging_edit
) + adv_search_post + exp_post + save_post
},
:container_service => {
:get => %w(
download_data
download_summary_pdf
index
perf_top_chart
show
show_list
tagging_edit
),
:post => %w(
button
dynamic_checkbox_refresh
listnav_search_selected
quick_search
sections_field_changed
show
show_list
wait_for_task
tagging_edit
) +
adv_search_post +
exp_post +
perf_post +
save_post
},
:container_project => {
:get => %w(
dialog_load
download_data
download_summary_pdf
index
perf_top_chart
show
show_list
tagging_edit
),
:post => %w(
button
dynamic_checkbox_refresh
listnav_search_selected
quick_search
sections_field_changed
show
show_list
tl_chooser
wait_for_task
tagging_edit
) +
adv_search_post +
exp_post +
perf_post +
save_post +
dialog_runner_post
},
:container_route => {
:get => %w(
download_data
download_summary_pdf
index
show
show_list
tagging_edit
),
:post => %w(
button
dynamic_checkbox_refresh
listnav_search_selected
quick_search
sections_field_changed
show
show_list
tagging_edit
) + adv_search_post + exp_post + save_post
},
:persistent_volume => {
:get => %w(
dialog_load
download_data
download_summary_pdf
index
show
show_list
tagging_edit
),
:post => %w(
button
dynamic_checkbox_refresh
listnav_search_selected
quick_search
sections_field_changed
show
show_list
tagging_edit
) + adv_search_post + exp_post + save_post + dialog_runner_post
},
:container_build => {
:get => %w(
download_data
download_summary_pdf
index
show
show_list
tagging_edit
),
:post => %w(
button
dynamic_checkbox_refresh
listnav_search_selected
quick_search
sections_field_changed
show
show_list
tagging_edit
) + adv_search_post + exp_post + save_post
},
:container_template => {
:get => %w(
dialog_load
download_data
download_summary_pdf
index
service_dialog_from_ct
show
show_list
tagging_edit
),
:post => %w(
button
ct_form_field_changed
dynamic_checkbox_refresh
listnav_search_selected
quick_search
sections_field_changed
service_dialog_from_ct_submit
show
show_list
tagging_edit
) + adv_search_post + exp_post + save_post + dialog_runner_post
},
:physical_infra_overview => {
:get => %w(
show
)
},
:container_dashboard => {
:get => %w(
data
ems_utilization_data
heatmaps_data
image_metrics_data
network_metrics_data
pod_metrics_data
project_data
refresh_status_data
show
)
},
:alerts_overview => {
:get => %w(
show
)
},
:alerts_list => {
:get => %w(
show
class_icons
)
},
:alerts_most_recent => {
:get => %w(
show
)
},
:dashboard => {
:get => %w(
auth_error
iframe
change_tab
index
login
logout
saml_login
oidc_login
render_csv
render_pdf
render_txt
render_chart
report_only
show
timeline_data
start_url
widget_to_pdf
widget_chart_data
widget_menu_data
widget_report_data
),
:post => %w(
external_authenticate
kerberos_authenticate
initiate_saml_login
initiate_oidc_login
authenticate
change_group
csp_report
timeline_data
login_retry
reset_widgets
tree_select
wait_for_task
widget_add
widget_close
widget_dd_done
)
},
:ems_cloud => {
:get => %w(
dialog_load
download_data
download_summary_pdf
protect
show
show_list
sync_users
tagging_edit
) +
compare_get,
:post => %w(
new
button
dynamic_checkbox_refresh
dynamic_radio_button_refresh
dynamic_text_box_refresh
listnav_search_selected
protect
quick_search
sections_field_changed
show
show_list
sync_users
tagging_edit
tl_chooser
wait_for_task
launch_console
) +
adv_search_post +
compare_post +
dialog_runner_post +
exp_post +
save_post
},
:ems_cloud_dashboard => {
:get => %w(
data
recent_instances_data
recent_images_data
aggregate_status_data
)
},
:ems_cluster => {
:get => %w(
dialog_load
download_data
download_summary_pdf
index
perf_top_chart
protect
show
show_list
tagging_edit
) +
compare_get +
drift_get,
:post => %w(
button
listnav_search_selected
protect
quick_search
sections_field_changed
show
show_list
tagging_edit
tl_chooser
tree_autoload
wait_for_task
) +
adv_search_post +
compare_post +
dialog_runner_post +
drift_post +
exp_post +
perf_post +
save_post
},
:ems_infra => {
:get => %w(
dialog_load
download_data
download_summary_pdf
register_nodes
protect
scaledown
scaling
show
show_list
tagging_edit
) +
compare_get,
:post => %w(
new
button
register_nodes
listnav_search_selected
protect
quick_search
sections_field_changed
show
show_list
tagging_edit
tl_chooser
tree_autoload
wait_for_task
scaling
scaledown
open_admin_ui
open_admin_ui_done
launch_console
) +
adv_search_post +
compare_post +
dialog_runner_post +
exp_post +
save_post
},
:ems_infra_dashboard => {
:get => %w(
data
cluster_metrics_data
ems_utilization_data
recent_hosts_data
recent_vms_data
aggregate_status_data
)
},
:ems_physical_infra => {
:get => %w(
download_data
download_summary_pdf
protect
show
show_list
tagging_edit
),
:post => %w(
new
button
listnav_search_selected
protect
quick_search
show
show_list
tagging_edit
tl_chooser
tree_autoload
wait_for_task
launch_console
) +
adv_search_post +
dialog_runner_post +
exp_post +
save_post
},
:physical_switch => {
:get => %w(
download_data
download_summary_pdf
show_list
show
),
:post => %w(
listnav_search_selected
show_list
quick_search
) + adv_search_post + save_post,
},
:physical_server => {
:get => %w(
download_data
download_summary_pdf
perf_top_chart
protect
show_list
show
tagging_edit
console_file
),
:post => %w(
button
show_list
listnav_search_selected
protect
tagging_edit
quick_search
tl_chooser
wait_for_task
provision
console
) +
adv_search_post +
exp_post +
save_post
},
:physical_rack => {
:get => %w(
download_data
download_summary_pdf
protect
show_list
show
),
:post => %w(
show_list
quick_search
)
},
:physical_network_port => {
:get => %w(
download_data
download_summary_pdf
show_list
show
),
:post => %w(
show_list
)
},
:physical_storage => {
:get => %w[
download_data
download_summary_pdf
edit
show
show_list
new
],
:post => %w[
button
listnav_search_selected
quick_search
show_list
] + adv_search_post + save_post + exp_post
},
:physical_chassis => {
:get => %w(
download_data
download_summary_pdf
protect
show_list
show
),
:post => %w(
show_list
quick_search
)
},
:guest_device => {
:get => %w(
show_list
show
quick_search
),
:post => %w(
show_list
) +
adv_search_post +
exp_post +
save_post
},
:ems_physical_infra_dashboard => {
:get => %w(
recent_servers_data
aggregate_status_data
servers_group_data
)
},
:ems_container => {
:get => %w(
dialog_load
download_data
download_summary_pdf
perf_top_chart
protect
show
show_list
tagging_edit
),
:post => %w(
new
button
dynamic_checkbox_refresh
listnav_search_selected
protect
quick_search
sections_field_changed
show
show_list
tl_chooser
wait_for_task
tagging_edit
launch_external_logging
) +
adv_search_post +
dialog_runner_post +
exp_post +
perf_post +
save_post
},
:ems_network => {
:get => %w(
dialog_load
download_data
download_summary_pdf
index
protect
show_list
tagging_edit
),
:post => %w(
new
button
dynamic_checkbox_refresh
dynamic_radio_button_refresh
dynamic_text_box_refresh
listnav_search_selected
protect
quick_search
sections_field_changed
show_list
tagging_edit
tl_chooser
wait_for_task
) +
adv_search_post +
dialog_runner_post +
exp_post +
save_post
},
:security_group => {
:get => %w(
dialog_load
edit
download_data
download_summary_pdf
index
new
protect
show
show_list
tagging_edit
) +
compare_get,
:post => %w(
button
create
listnav_search_selected
protect
quick_search
sections_field_changed
show
show_list
tagging_edit
update
wait_for_task
) +
adv_search_post +
compare_post +
save_post +
exp_post +
dialog_runner_post
},
:security_policy => {
:get => %w[
dialog_load
download_data
download_summary_pdf
show
show_list
tagging_edit
],
:post => %w[
button
quick_search
listnav_search_selected
show
show_list
tagging_edit
wait_for_task
] +
adv_search_post +
save_post +
exp_post +
dialog_runner_post
},
:security_policy_rule => {
:get => %w[
dialog_load
download_data
download_summary_pdf
show
show_list
tagging_edit
],
:post => %w[
button
quick_search
listnav_search_selected
show
show_list
tagging_edit
wait_for_task
] +
adv_search_post +
save_post +
exp_post +
dialog_runner_post
},
:floating_ip => {
:get => %w(
download_data
download_summary_pdf
edit
index
new
show
show_list
tagging_edit
),
:post => %w(
button
create
listnav_search_selected
quick_search
show
show_list
tagging_edit
wait_for_task
) +
adv_search_post +
save_post +
exp_post
},
:cloud_subnet => {
:get => %w(
dialog_load
download_data
download_summary_pdf
edit
index
new
protect
show
show_list
tagging_edit
) +
compare_get,
:post => %w(
button
dynamic_checkbox_refresh
listnav_search_selected
protect
quick_search
sections_field_changed
show
show_list
tagging_edit
wait_for_task
) +
adv_search_post +
compare_post +
save_post +
exp_post +
dialog_runner_post
},
:cloud_database => {
:get => %w(
show_list
index
show
download_data
download_summary_pdf
),
:post => %w(
quick_search
show_list
show
) +
adv_search_post +
exp_post
},
:cloud_network => {
:get => %w(
dialog_load
download_data
download_summary_pdf
edit
index
new
protect
show
show_list
tagging_edit
) +
compare_get,
:post => %w(
button
create
dynamic_checkbox_refresh
listnav_search_selected
protect
quick_search
sections_field_changed
show
show_list
tagging_edit
update
wait_for_task
) +
adv_search_post +
compare_post +
save_post +
exp_post +
dialog_runner_post
},
:network_port => {
:get => %w(
download_data
download_summary_pdf
index
show
show_list
tagging_edit
),
:post => %w(
button
quick_search
listnav_search_selected
sections_field_changed
show
show_list
tagging_edit
wait_for_task
) +
adv_search_post +
save_post +
exp_post
},
:network_router => {
:get => %w(
add_interface_select
dialog_load
download_data
download_summary_pdf
edit
index
new
protect
remove_interface_select
show
show_list
tagging_edit
) +
compare_get,
:post => %w(
add_interface
add_interface_select
button
listnav_search_selected
protect
quick_search
remove_interface
remove_interface_select
sections_field_changed
show
show_list
tagging_edit
wait_for_task
) +
adv_search_post +
compare_post +
save_post +
exp_post +
dialog_runner_post
},
:network_service => {
:get => %w[
download_data
download_summary_pdf
show
show_list
tagging_edit
],
:post => %w[
button
quick_search
listnav_search_selected
show
show_list
tagging_edit
wait_for_task
] +
adv_search_post +
save_post +
exp_post
},
:flavor => {
# FIXME: Change tagging_edit to POST only; We need to remove the redirects
# in app/controllers/application_controller/tags.rb#tag that are used in
# a role of a method call.
# Then remove this route from all other controllers too.
:get => %w(
download_data
download_summary_pdf
index
protect
show
show_list
new
tagging_edit
ems_list
cloud_tenants
) +
compare_get,
:post => %w(
button
listnav_search_selected
protect
quick_search
sections_field_changed
show
show_list
tagging_edit
) +
adv_search_post +
compare_post +
exp_post +
save_post
},
:host => {
:get => %w(
advanced_settings
dialog_load
download_data
download_summary_pdf
edit
filesystem_download
filesystems
firewall_rules
timeline_data
groups
guest_applications
host_form_fields
host_services
host_cloud_services
index
patches
perf_top_chart
protect
show
show_list
start
tagging_edit
users
) +
compare_get +
drift_get,
:post => %w(
advanced_settings
button
drift_all
drift_compress
drift_differences
drift_mode
drift_same
filesystems
firewall_rules
groups
guest_applications
host_services
host_cloud_services
listnav_search_selected
quick_search
patches
protect
sections_field_changed
show
show_list
tagging_edit
tl_chooser
tree_autoload
update
users
wait_for_task
) +
adv_search_post +
compare_post +
dialog_runner_post +
exp_post +
perf_post +
save_post
},
:infra_networking => {
:get => %w(
dialog_load
download_data
download_summary_pdf
explorer
hosts
show_list
tagging
tagging_edit
x_show
),
:post => %w(
button
custom_button_events
explorer
hosts
listnav_search_selected
quick_search
reload
show_list
tagging
tagging_edit
tree_select
tree_autoload
x_button
x_show
x_search_by_name
) +
adv_search_post +
exp_post +
save_post +
x_post +
dialog_runner_post
},
:generic_object => {
:get => %w(
show
show_list
tagging_edit
),
:post => %w(
button
tagging_edit
)
},
:generic_object_definition => {
:get => %w(
custom_buttons_in_set
download_data
download_summary_pdf
edit
new
retrieve_distinct_instances_across_domains
service_template_ansible_playbooks
show
show_list
tagging_edit
),
:post => %w(
add_button_in_group
button
custom_button_group_new
custom_button_group_edit
custom_button_new
custom_button_edit
edit
listnav_search_selected
new
quick_search
show_list
tagging_edit
tree_select
) +
adv_search_post +
exp_post +
save_post
},
:ansible_credential => {
:get => %w(
download_data
download_summary_pdf
edit
new
show
show_list
tagging_edit
),
:post => %w(
search_clear
button
show_list
tagging_edit
)
},
:ansible_playbook => {
:get => %w(
download_data
download_summary_pdf
show
show_list
tagging_edit
),
:post => %w(
search_clear
button
show_list
tagging_edit
)
},
:ansible_repository => {
:get => %w(
download_data
download_summary_pdf
edit
new
show
show_list
tagging_edit
),
:post => %w(
search_clear
button
edit
new
repository_refresh
show_list
tagging_edit
)
},
:miq_ae_class => {
:get => %w(
explorer
method_form_fields
namespace
show
),
:post => %w(
add_update_method
ae_tree_select
ae_tree_select_discard
ae_tree_select_toggle
change_tab
copy_objects
create
create_instance
create_method
create_namespace
domains_priority_edit
embedded_methods_add
embedded_methods_remove
explorer
expand_toggle
field_accept
field_delete
field_method_accept
field_method_delete
field_method_select
field_select
fields_form_field_changed
fields_seq_edit
fields_seq_field_changed
form_copy_objects_field_changed
form_field_changed
form_instance_field_changed
form_method_field_changed
priority_form_field_changed
refresh_git_domain
reload
tree_select
tree_autoload
update
update_fields
update_instance
update_method
update_namespace
validate_method_data
x_button
x_history
x_show
) + adv_search_post +
exp_post
},
:miq_ae_customization => {
:get => %w(
explorer
editor
export_service_dialogs
show
),
:post => %w(
ab_group_reorder
accordion_select
automate_button_field_changed
playbook_options_field_changed
change_tab
dialog_copy_editor
dialog_edit_editor
dialog_new_editor
dialog_list
explorer
group_create
group_form_field_changed
group_reorder_field_changed
group_update
import_service_dialogs
old_dialogs_form_field_changed
old_dialogs_list
old_dialogs_update
reload
resolve
tree_autoload
tree_select
upload_import_file
x_button
x_history
x_show
) +
button_post + exp_post
},
:miq_ae_tools => {
:get => %w(
automate_json
check_git_task
export_datastore
fetch_log
import_export
log
resolve
review_import
),
:post => %w(
button
cancel_import
form_field_changed
import_automate_datastore
import_via_git
reset_datastore
resolve
retrieve_git_datastore
upload
upload_import_file
wait_for_task
)
},
:utilization => {
:get => %w(
index
report_download
timeline_data
),
:post => %w(
change_tab
chart_chooser
tree_autoload
tree_select
wait_for_task
)
},
:miq_policy_export => {
:get => %w(
export
fetch_yaml
get_json
import
),
:post => %w(
export
export_field_changed
import
upload
)
},
:miq_policy => {
:get => %w(
copy
edit
miq_event_edit
miq_policy_edit_conditions
miq_policy_edit_events
new
show
show_list
),
:post => %w(
edit
event_build_action_values
miq_event_edit
miq_policy_edit
miq_policy_edit_conditions
miq_policy_edit_events
policy_field_changed
quick_search
reload
show
show_list
) +
adv_search_post +
exp_post
},
:miq_policy_log => {
:get => %w(
fetch_log
),
:post => %w(
button
)
},
:miq_policy_rsop => {
:post => %w(
rsop
rsop_option_changed
rsop_show_options
rsop_toggle
wait_for_task
)
},
:miq_policy_set => {
:get => %w(
edit
new
show
show_list
),
:post => %w(
reload
show
show_list
)
},
:miq_action => {
:get => %w(
edit
new
show
show_list
),
:post => %w(
miq_action_edit
action_field_changed
edit
show
show_list
)
},
:miq_alert => {
:get => %w(
copy
edit
new
show
show_list
),
:post => %w(
alert_field_changed
edit
show
show_list
) +
exp_post
},
:miq_alert_set => {
:get => %w(
edit
edit_assignment
new
show
show_list
),
:post => %w(
alert_profile_assign_changed
alert_profile_field_changed
edit
edit_assignment
new
show
show_list
)
},
:miq_event_definition => {
:get => %w(
show
show_list
),
},
:condition => {
:get => %w(
copy
edit
new
show
show_list
),
:post => %w(
condition_edit
condition_field_changed
edit
show
show_list
) +
adv_search_post +
exp_post
},
:miq_request => {
# FIXME: Change stamp to POST only; We need to remove the redirect
:get => %w(
index
post_install_callback
pre_prov
prov_copy
prov_edit
show
show_list
stamp
),
:post => %w(
button
post_install_callback
pre_prov
prov_continue
prov_edit
prov_field_changed
prov_load_tab
request_copy
request_edit
retrieve_email
show_list
sort_configured_system_grid
sort_ds_grid
sort_host_grid
sort_iso_img_grid
sort_pxe_img_grid
sort_template_grid
sort_vc_grid
sort_vm_grid
sort_windows_image_grid
stamp
stamp_field_changed
vm_pre_prov
) +
dialog_runner_post
},
:miq_task => {
:get => %w(
change_tab
index
jobs
),
:post => %w(
button
jobs
)
},
:miq_template => {
:get => %w(
download_summary_pdf
edit
show
ownership
),
:post => %w(
edit
show
) +
ownership_post
},
:ems_storage => {
:get => %w(
dialog_load
download_data
download_summary_pdf
edit
index
new
protect
show
show_list
tagging_edit
),
:post => %w(
new
button
dynamic_checkbox_refresh
dynamic_radio_button_refresh
dynamic_text_box_refresh
listnav_search_selected
protect
quick_search
sections_field_changed
show
show_list
quick_search
tagging_edit
tl_chooser
wait_for_task
) +
adv_search_post +
dialog_runner_post +
exp_post +
save_post
},
:host_initiator => {
:get => %w[
download_data
download_summary_pdf
index
show
show_list
new
],
:post => %w[
button
listnav_search_selected
quick_search
show_list
] +
adv_search_post +
exp_post +
save_post
},
:host_initiator_group => {
:get => %w[
download_data
download_summary_pdf
show
show_list
new
],
:post => %w[
button
listnav_search_selected
quick_search
show_list
] +
adv_search_post +
exp_post +
save_post
},
:storage_resource => {
:get => %w[
download_data
download_summary_pdf
show
show_list
],
:post => %w[
listnav_search_selected
quick_search
show_list
] +
adv_search_post +
exp_post +
save_post
},
:volume_mapping => {
:get => %w[
download_data
download_summary_pdf
index
show
show_list
new
],
:post => %w[
listnav_search_selected
quick_search
show_list
] +
adv_search_post +
exp_post +
save_post
},
:ops => {
:get => %w(
dialog_load
explorer
fetch_audit_log
fetch_log
fetch_production_log
log_collection_form_fields
pglogical_subscriptions_form_fields
schedule_form_fields
tenant_quotas_form_fields
),
:post => %w(
accordion_select
apply_imports
ap_ce_delete
ap_ce_select
ap_edit
ap_form_field_changed
ap_set_active_tab
aps_list
automate_schedules_set_vars
button
category_delete
category_edit
category_field_changed
ce_accept
ce_delete
ce_new_cat
ce_select
change_tab
cu_collection_field_changed
cu_collection_update
cu_repair
cu_repair_field_changed
diagnostics_server_list
diagnostics_tree_select
explorer
fetch_target_ids
forest_accept
forest_delete
forest_form_field_changed
forest_select
help_menu_form_field_changed
label_tag_mapping_delete
label_tag_mapping_edit
label_tag_mapping_update
label_tag_mapping_field_changed
log_depot_edit
log_depot_validate
orphaned_records_delete
perf_chart_chooser
pglogical_save_subscriptions
pglogical_validate_subscription
rbac_group_edit
rbac_group_field_changed
rbac_group_load_tab
rbac_group_seq_edit
rbac_group_user_lookup
rbac_groups_list
rbac_role_edit
rbac_role_field_changed
rbac_roles_list
rbac_tags_edit
rbac_tenant_edit
rbac_tenants_list
rbac_tenant_manage_quotas
rbac_user_edit
rbac_user_field_changed
rbac_users_list
region_edit
restart_server
schedule_edit
schedule_form_filter_type_field_changed
schedules_list
settings_form_field_changed
settings_update
settings_update_help_menu
show
smartproxy_affinity_field_changed
tl_chooser
tree_autoload
tree_select
upload_csv
upload_form_field_changed
upload_login_brand
upload_login_logo
upload_logo
upload_favicon
wait_for_task
x_button
zone_edit
) + exp_post + dialog_runner_post
},
:optimization => {
:get => %w[
index
show_list
show
json_list
],
:post => %w[
queue_report
],
},
:orchestration_stack => {
:get => %w(
cloud_networks
dialog_load
download_data
download_summary_pdf
index
outputs
parameters
resources
retire
show
show_list
stacks_ot_info
tagging_edit
protect
) +
compare_get,
:post => %w(
button
cloud_networks
outputs
listnav_search_selected
parameters
quick_search
resources
sections_field_changed
show
show_list
stacks_ot_copy
protect
tagging_edit
) +
adv_search_post +
compare_post +
exp_post +
save_post +
dialog_runner_post
},
:ems_automation => {
:get => %w(
download_data
download_summary_pdf
edit
form_fields
new
show
show_list
tagging_edit
),
:post => %w(
authentication_validate
button
edit
new
quick_search
reload
show
show_list
tagging_edit
wait_for_task
) +
adv_search_post +
exp_post
},
:ems_configuration => {
:get => %w[
button
download_data
download_summary_pdf
edit
form_fields
new
show
show_list
tagging_edit
],
:post => %w[
authentication_validate
button
change_tab
edit
new
quick_search
reload
show
show_list
tagging_edit
wait_for_task
] +
adv_search_post +
exp_post +
save_post
},
:configuration_profile => {
:get => %w[
download_data
download_summary_pdf
show
show_list
tagging_edit
],
:post => %w[
button
quick_search
reload
show
show_list
tagging_edit
launch_configuration_profile_console
]
},
:configured_system => {
:get => %w[
download_data
download_summary_pdf
show
show_list
tagging_edit
],
:post => %w[
button
new
quick_search
reload
show
show_list
tagging_edit
wait_for_task
launch_configured_system_console
] +
adv_search_post +
dialog_runner_post +
exp_post +
save_post
},
:pxe => {
:get => %w(
explorer
tagging_edit
),
:post => %w(
accordion_select
explorer
iso_datastore_list
iso_image_edit
log_depot_validate
pxe_image_edit
pxe_image_type_edit
pxe_image_type_list
pxe_server_async_cred_validation
pxe_server_list
pxe_wimg_edit
pxe_wimg_form_field_changed
reload
tagging_edit
template_list
tree_autoload
tree_select
x_button
x_history
)
},
:report => {
:get => %w(
dashboard_get
db_copy
db_widget_dd_done
download_report
explorer
export_widgets
miq_report_edit
miq_report_new
preview_chart
render_chart
report_only
sample_chart
print_report
send_report_data
tree_autoload
tree_select
),
:post => %w(
accordion_select
change_tab
dashboard_render
db_copy
db_edit
db_form_field_changed
db_seq_edit
db_widget_dd_done
db_widget_remove
discard_changes
explorer
export_field_changed
filter_change
form_field_changed
get_report
import_widgets
menu_editor
menu_field_changed
menu_folder_message_display
menu_update
miq_report_edit
reload
rep_change_tab
saved_report_paging
schedule_edit
schedule_form_field_changed
show_preview
tree_autoload
tree_select
upload
upload_widget_import_file
wait_for_task
widget_edit
widget_form_field_changed
widget_shortcut_dd_done
widget_shortcut_remove
widget_shortcut_reset
x_button
x_history
x_show
) +
exp_post
},
:resource_pool => {
:get => %w(
download_data
download_summary_pdf
index
protect
show
show_list
tagging_edit
) +
compare_get,
:post => %w(
button
listnav_search_selected
protect
sections_field_changed
show
show_list
tagging_edit
tree_autoload
quick_search
) +
adv_search_post +
compare_post +
exp_post +
save_post
},
:restful_redirect => {
:get => %w(
index
)
},
:service => {
:get => %w(
dialog_load
download_data
explorer
reconfigure_form_fields
retire
service_form_fields
show
tagging_edit
),
:post => %w(
button
explorer
listnav_search_selected
ownership_update
quick_search
reload
service_edit
service_tag
show
show_list
tagging_edit
tree_autoload
tree_select
x_button
x_history
x_show
wait_for_task
) +
dialog_runner_post +
adv_search_post +
exp_post +
save_post +
x_post
},
:storage => {
:get => %w(
button
debris_files
dialog_load
disk_files
download_data
download_summary_pdf
explorer
files
perf_chart_chooser
protect
show
show_list
snapshot_files
tagging_edit
tree_select
vm_ram_files
vm_misc_files
x_show
) +
compare_get,
:post => %w(
accordion_select
button
debris_files
explorer
files
listnav_search_selected
disk_files
perf_chart_chooser
protect
quick_search
reload
sections_field_changed
show
show_list
storage_list
storage_pod_list
snapshot_files
tagging
tagging_edit
tree_autoload
tree_select
vm_misc_files
vm_ram_files
wait_for_task
x_search_by_name
x_show
) +
adv_search_post +
compare_post +
dialog_runner_post +
exp_post +
save_post +
x_post
},
:support => {
:get => %w(index)
},
:vm => {
:get => %w(
download_data
download_summary_pdf
edit
ownership
policy_sim
reconfigure
reconfigure_form_fields
resize
resize_form_fields
evacuate
evacuate_form_fields
live_migrate
live_migrate_form_fields
associate_floating_ip
associate_floating_ip_form_fields
disassociate_floating_ip
disassociate_floating_ip_form_fields
add_security_group
remove_security_group
retire
right_size
show
show_list
),
:post => %w(
policy_sim
policy_sim_add
policy_sim_cancel
policy_sim_remove
reconfigure
reconfigure_form_fields
reconfigure_update
resize_vm
evacuate_vm
live_migrate_vm
associate_floating_ip_vm
disassociate_floating_ip_vm
right_size
set_checked_items
show_list
tree_autoload
genealogy_tree_selected
ownership_update
wait_for_task
) +
ownership_post +
pre_prov_post
},
:vm_cloud => {
:get => %w(
download_data
download_summary_pdf
drift_to_csv
drift_to_pdf
drift_to_txt
explorer
filesystem_download
reconfigure_form_fields
launch_html5_console
launch_vmrc_console
perf_chart_chooser
protect
retire
right_size_print
show
tagging_edit
resize
resize_form_fields
live_migrate_form_fields
attach
detach
evacuate
evacuate_form_fields
associate_floating_ip
associate_floating_ip_form_fields
disassociate_floating_ip
disassociate_floating_ip_form_fields
add_security_group
remove_security_group
) +
compare_get,
:post => %w(
advanced_settings
accordion_select
button
resize_vm
event_logs
explorer
launch_html5_console
filesystems
filesystem_drivers
guest_applications
groups
html5_console
kernel_drivers
linux_initprocesses
ownership_update
patches
perf_chart_chooser
policies
processes
protect
prov_edit
prov_field_changed
quick_search
registry_items
reload
reconfigure_update
scan_histories
sections_field_changed
security_groups
sort_template_grid
sort_vm_grid
floating_ips
network_routers
network_ports
cloud_subnets
cloud_networks
cloud_volumes
show
tagging_edit
tl_chooser
tree_autoload
tree_select
users
vm_pre_prov
wait_for_task
win32_services
live_migrate_vm
attach_volume
detach_volume
evacuate_vm
ownership_update
associate_floating_ip_vm
disassociate_floating_ip_vm
) +
adv_search_post +
compare_post +
dialog_runner_post +
drift_post +
exp_post +
policy_post +
pre_prov_post +
snap_post +
x_post
},
:vm_infra => {
:get => %w(
download_data
download_summary_pdf
drift_to_csv
drift_to_pdf
drift_to_txt
explorer
filesystem_download
reconfigure_form_fields
right_size_print
launch_html5_console
launch_vmrc_console
launch_native_console
perf_chart_chooser
policies
protect
retire
show
tagging_edit
) +
compare_get,
:post => %w(
accordion_select
advanced_settings
button
event_logs
explorer
filesystems
filesystem_drivers
guest_applications
groups
kernel_drivers
linux_initprocesses
ownership_update
patches
perf_chart_chooser
policies
protect
processes
prov_edit
prov_field_changed
quick_search
reconfigure_update
registry_items
reload
scan_histories
sections_field_changed
security_groups
show
sort_ds_grid
sort_host_grid
sort_iso_img_grid
sort_vc_grid
sort_template_grid
sort_vm_grid
tagging_edit
tl_chooser
tree_autoload
tree_select
users
vmrc_console
vm_pre_prov
html5_console
native_console
wait_for_task
win32_services
ownership_update
) +
adv_search_post +
compare_post +
dialog_runner_post +
drift_post +
exp_post +
policy_post +
pre_prov_post +
snap_post +
x_post
},
:vm_or_template => {
:get => %w(
download_data
download_summary_pdf
drift_to_csv
drift_to_pdf
drift_to_txt
explorer
launch_html5_console
launch_vmrc_console
launch_native_console
reconfigure_form_fields
policies
protect
retire
show
tagging_edit
vm_show
) +
compare_get,
:post => %w(
accordion_select
advanced_settings
button
drift_all
drift_differences
drift_history
drift_mode
drift_same
event_logs
explorer
filesystem_drivers
filesystems
groups
guest_applications
kernel_drivers
linux_initprocesses
ownership_update
patches
perf_chart_chooser
policies
processes
protect
prov_edit
prov_field_changed
quick_search
reconfigure_update
registry_items
reload
scan_histories
sections_field_changed
security_groups
floating_ips
network_routers
network_ports
cloud_subnets
cloud_networks
cloud_volumes
show
sort_ds_grid
sort_host_grid
sort_iso_img_grid
sort_vc_grid
tagging_edit
tl_chooser
tree_select
users
vm_pre_prov
vmrc_console
html5_console
native_console
wait_for_task
win32_services
x_button
x_history
x_search_by_name
x_show
ownership_update
) +
adv_search_post +
compare_post +
dialog_runner_post +
exp_post +
policy_post +
pre_prov_post +
snap_post
},
:firmware_registry => {
:get => %w[
download_data
download_summary_pdf
show
show_list
],
:post => %w[
show_list
]
},
:firmware_binary => {
:get => %w[
download_data
download_summary_pdf
show
]
},
:firmware_target => {
:get => %w[
download_data
download_summary_pdf
show
]
},
}
routes_without_index = %i[
cloud_tenant_dashboard
container_dashboard
ems_cloud
ems_cloud_dashboard
ems_container
ems_infra
ems_storage_dashboard
ems_infra_dashboard
ems_network
ems_physical_infra
ems_physical_infra_dashboard
ems_storage
miq_ae_customization
pxe
storage_resource
].freeze
root :to => 'dashboard#login'
# Let's serve pictures directly from the DB
get '/pictures/:basename' => 'picture#show', :basename => /[\da-zA-Z]+\.[\da-zA-Z]+/
get '/saml_login(/*path)' => 'dashboard#saml_login'
get '/oidc_login(/*path)' => 'dashboard#oidc_login'
# ping response for load balancing
get '/ping' => 'ping#index'
controller_routes.each do |controller_name, controller_actions|
# Default route with no action to controller's index action
unless routes_without_index.include?(controller_name)
match controller_name.to_s, :controller => controller_name, :action => :index, :via => :get
end
default_routes.each do |action_name|
post "#{controller_name}/#{action_name}(/:id)",
:action => action_name,
:controller => controller_name
end
# One-by-one get/post routes for defined controllers
if controller_actions.kind_of?(Hash)
unless controller_actions[:get].nil?
controller_actions[:get].each do |action_name|
get "#{controller_name}/#{action_name}(/:id)",
:action => action_name,
:controller => controller_name
end
end
unless controller_actions[:post].nil?
controller_actions[:post].each do |action_name|
post "#{controller_name}/#{action_name}(/:id)",
:action => action_name,
:controller => controller_name
end
end
end
end
# API-like JSON trees
get '/tree/automate_entrypoint', :to => 'tree#automate_entrypoint'
get '/tree/automate_inline_methods', :to => 'tree#automate_inline_methods'
# pure-angular templates
get '/static/*id' => 'static#show', :format => false
# prevent No route matches [GET] "/favicon.ico"
get '/favicon.ico' => 'static#favicon', :format => false
%w[ems_cloud ems_infra ems_physical_infra ems_container ems_network ems_storage].each do |resource|
resources(resource.to_sym, :as => resource.pluralize.to_sym, :except => %i[create update destroy])
end
# rubocop:enable Layout/HashAlignment
# rubocop:enable Layout/MultilineOperationIndentation
end
| 20.197667 | 102 | 0.52047 |
33ee17b77d7aae7628812c6eace54f4cdd2475eb
| 373 |
require 'active_model/serializer'
module Spree
module Wombat
class ResponderSerializer < ActiveModel::Serializer
attributes :request_id, :summary, :backtrace, :objects
def filter(keys)
keys.delete(:backtrace) unless object.backtrace.present?
keys.delete(:objects) unless object.objects.present?
keys
end
end
end
end
| 21.941176 | 64 | 0.69437 |
7a50218fbcfc1850eaf9c72109c0f8d7e6af979d
| 903 |
require 'facets/cloneable'
test_case Cloneable do
foo = Class.new do
include Cloneable
def initialize
@bar=[]
end
def bar_id
@bar.object_id
end
end
method :dup do
test do
a = foo.new
b = a.dup
a.bar_id.refute == b.bar_id
end
test "tainted" do
a = foo.new
a.taint
b = a.dup
b.assert.tainted?
end
test "frozen" do
a = foo.new
a.freeze
b = a.dup
b.refute.frozen?
end
end
method :clone do
test do
a = foo.new
b = a.clone
assert(a.bar_id != b.bar_id, "should not be equal")
end
test "tainted" do
a = foo.new
a.taint
b = a.clone
assert b.tainted?, "b should be tainted"
end
test "frozen" do
a = foo.new
a.freeze
b = a.clone
assert b.frozen?, "b should be frozen"
end
end
end
| 14.803279 | 57 | 0.526024 |
28ca9c5a1c662493bf81747a36156b3c59389def
| 390 |
module Kernel
# Alternate to standard #p method that outputs
# Kernel#inspect to stdout, but also passes through
# the orginal argument(s).
#
# x = 1
# r = 4 + q(1)
# p r
#
# produces
#
# 1
# 5
#
# DEPRECATE AS OF 1.9, _if_ #p will then do this too.
def p(*x)
x.each{ |e| puts e.inspect } #p(*x)
x.size > 1 ? x : x.last #x.last
end
end
| 15.6 | 55 | 0.54359 |
79a753e3612fc43f66e45616c38dcc4f4a863576
| 3,025 |
# vim: syntax=ruby:expandtab:shiftwidth=2:softtabstop=2:tabstop=2
# Copyright (c) 2016-present, Facebook, Inc.
# All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
default_action :load
property :module_name, :kind_of => String, :name_property => true
property :verbose, :kind_of => [TrueClass, FalseClass], :default => false
property :timeout, :kind_of => Integer, :default => 300
property :fallback, :kind_of => [TrueClass, FalseClass], :default => false
property :module_params, :kind_of => [String, Array], :required => false
action_class do
def modprobe_module(new_resource, unload)
module_name = new_resource.module_name
params = [new_resource.module_params].flatten.compact
timeout = new_resource.timeout
verbose = new_resource.verbose
fallback = new_resource.fallback
flags = []
flags << '-v' if verbose
flags << '-r' if unload
# Correctly handle built-in modules. If no parameters were supplied, we
# just return true. If the caller supplied parameters, we fail the Chef run
# and ask them to fix their cookbook, since we can't apply them.
if ::File.exist?("/sys/module/#{module_name}") &&
!::File.exist?("/sys/module/#{module_name}/initstate")
::Chef::Log.warn(
"fb_modprobe called on built-in module '#{module_name}'",
)
unless params.empty?
fail <<-FAIL
Cannot set parameters for built-in module '#{module_name}'!
Parameters for built-in modules must be passed on the kernel cmdline.
Prefix the parameter with the module name and a dot.
Examples: "ipv6.autoconf=1", "mlx4_en.udp_rss=1"
FAIL
end
return true
end
command = ['/sbin/modprobe'] + flags + [module_name] + params
# Sometimes modprobe fails, like when the module was uninstalled
if fallback && unload
command << '||'
command << 'rmmod'
command << '-v' if verbose
command << module_name
end
execute command.join(' ') do
action :run
notifies :reload, 'ohai[reload kernel]', :immediately
timeout timeout
end
end
end
action :load do
if FB::Modprobe.module_loaded?(new_resource.module_name)
Chef::Log.debug("#{new_resource}: Module already loaded")
else
modprobe_module(new_resource, false)
end
end
action :unload do
if FB::Modprobe.module_loaded?(new_resource.module_name)
modprobe_module(new_resource, true)
else
Chef::Log.debug("#{new_resource}: Module already unloaded")
end
end
| 33.241758 | 79 | 0.69157 |
61c5b2baf8bd74df90ac5bbe21ef7e92acc4aab1
| 1,346 |
# frozen_string_literal: true
require 'rails_helper'
RSpec.describe Locations::Updater do
subject(:updater) { described_class.call }
context 'with young_offender_institution locations' do
it 'leaves existing YOI locations unchanged' do
existing_young_offender_institution = create(:location, young_offender_institution: true, nomis_agency_id: 'WYI')
updater
expect(existing_young_offender_institution.reload.young_offender_institution).to eq true
end
it 'clears flag on retired YOI locations' do
retired_young_offender_institution = create(:location, young_offender_institution: true, nomis_agency_id: 'FOO')
updater
expect(retired_young_offender_institution.reload.young_offender_institution).to eq false
end
it 'sets flag on new retired YOI locations' do
new_young_offender_institution = create(:location, young_offender_institution: false, nomis_agency_id: 'WNI')
updater
expect(new_young_offender_institution.reload.young_offender_institution).to eq true
end
it 'leaves existing non YOI locations unchanged' do
non_young_offender_institution = create(:location, young_offender_institution: false, nomis_agency_id: 'BAR')
updater
expect(non_young_offender_institution.reload.young_offender_institution).to eq false
end
end
end
| 39.588235 | 119 | 0.778603 |
f8eba46eccd13ce5227eb0ca9a78ec22dc13e4c8
| 3,933 |
#
# Implements a simple HTTP-server by using John W. Small's ([email protected])
# ruby-generic-server.
#
# Copyright (C) 2001, 2002, 2003 by Michael Neumann ([email protected])
#
# $Id: httpserver.rb 22784 2009-03-06 03:56:38Z nobu $
#
require "gserver"
class HttpServer < GServer
##
# handle_obj specifies the object, that receives calls to request_handler
# and ip_auth_handler
def initialize(handle_obj, port = 8080, host = DEFAULT_HOST, maxConnections = 4,
stdlog = $stdout, audit = true, debug = true)
@handler = handle_obj
super(port, host, maxConnections, stdlog, audit, debug)
end
private
# Constants -----------------------------------------------
CRLF = "\r\n"
HTTP_PROTO = "HTTP/1.0"
SERVER_NAME = "HttpServer (Ruby #{RUBY_VERSION})"
DEFAULT_HEADER = {
"Server" => SERVER_NAME
}
##
# Mapping of status code and error message
#
StatusCodeMapping = {
200 => "OK",
400 => "Bad Request",
403 => "Forbidden",
405 => "Method Not Allowed",
411 => "Length Required",
500 => "Internal Server Error"
}
# Classes -------------------------------------------------
class Request
attr_reader :data, :header, :method, :path, :proto
def initialize(data, method=nil, path=nil, proto=nil)
@header, @data = Table.new, data
@method, @path, @proto = method, path, proto
end
def content_length
len = @header['Content-Length']
return nil if len.nil?
return len.to_i
end
end
class Response
attr_reader :header
attr_accessor :body, :status, :status_message
def initialize(status=200)
@status = status
@status_message = nil
@header = Table.new
end
end
##
# a case-insensitive Hash class for HTTP header
#
class Table
include Enumerable
def initialize(hash={})
@hash = hash
update(hash)
end
def [](key)
@hash[key.to_s.capitalize]
end
def []=(key, value)
@hash[key.to_s.capitalize] = value
end
def update(hash)
hash.each {|k,v| self[k] = v}
self
end
def each
@hash.each {|k,v| yield k.capitalize, v }
end
def writeTo(port)
each { |k,v| port << "#{k}: #{v}" << CRLF }
end
end # class Table
# Helper Methods ------------------------------------------
def http_header(header=nil)
new_header = Table.new(DEFAULT_HEADER)
new_header.update(header) unless header.nil?
new_header["Connection"] = "close"
new_header["Date"] = http_date(Time.now)
new_header
end
def http_date( aTime )
aTime.gmtime.strftime( "%a, %d %b %Y %H:%M:%S GMT" )
end
def http_resp(status_code, status_message=nil, header=nil, body=nil)
status_message ||= StatusCodeMapping[status_code]
str = ""
str << "#{HTTP_PROTO} #{status_code} #{status_message}" << CRLF
http_header(header).writeTo(str)
str << CRLF
str << body unless body.nil?
str
end
# Main Serve Loop -----------------------------------------
def serve(io)
# perform IP authentification
unless @handler.ip_auth_handler(io)
io << http_resp(403, "Forbidden")
return
end
# parse first line
if io.gets =~ /^(\S+)\s+(\S+)\s+(\S+)/
request = Request.new(io, $1, $2, $3)
else
io << http_resp(400, "Bad Request")
return
end
# parse HTTP headers
while (line=io.gets) !~ /^(\n|\r)/
if line =~ /^([\w-]+):\s*(.*)$/
request.header[$1] = $2.strip
end
end
io.binmode
response = Response.new
# execute request handler
@handler.request_handler(request, response)
# write response back to the client
io << http_resp(response.status, response.status_message,
response.header, response.body)
rescue Exception => e
io << http_resp(500, "Internal Server Error")
end
end # class HttpServer
| 21.972067 | 82 | 0.580727 |
7acd71f35bc42f0ff056229fbd9623477ec1a8db
| 7,614 |
#
# Author:: Thom May (<[email protected]>)
# Author:: Patrick Wright (<[email protected]>)
# Copyright:: Copyright (c) 2015-2018 Chef Software, Inc.
# License:: Apache License, Version 2.0
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
require_relative "util"
require_relative "generator/powershell"
require "cgi"
module Mixlib
class Install
class ScriptGenerator
attr_accessor :version
attr_accessor :powershell
attr_accessor :prerelease
attr_accessor :nightlies
attr_accessor :install_flags
attr_accessor :endpoint
attr_accessor :root
attr_accessor :use_sudo
attr_reader :sudo_command
def sudo_command=(cmd)
if cmd.nil?
@use_sudo = false
else
@sudo_command = cmd
end
end
attr_accessor :http_proxy
attr_accessor :https_proxy
attr_accessor :omnibus_url
attr_accessor :install_msi_url
VALID_INSTALL_OPTS = %w{omnibus_url
endpoint
http_proxy
https_proxy
install_flags
install_msi_url
nightlies
prerelease
project
root
use_sudo
sudo_command}
def initialize(version, powershell = false, opts = {})
@version = (version || "latest").to_s.downcase
@powershell = powershell
@http_proxy = nil
@https_proxy = nil
@install_flags = nil
@prerelease = false
@nightlies = false
@endpoint = "metadata"
@omnibus_url = "https://www.chef.io/chef/install.sh"
@use_sudo = true
@sudo_command = "sudo -E"
@root = if powershell
"$env:systemdrive\\opscode\\chef"
else
"/opt/chef"
end
parse_opts(opts)
end
def install_command
vars = if powershell
install_command_vars_for_powershell
else
install_command_vars_for_bourne
end
shell_code_from_file(vars)
end
private
# Generates the install command variables for Bourne shell-based
# platforms.
#
# @return [String] shell variable lines
# @api private
def install_command_vars_for_bourne
flags = %w{latest true nightlies}.include?(version) ? "" : "-v #{CGI.escape(version)}"
flags << " " << "-n" if nightlies
flags << " " << "-p" if prerelease
flags << " " << install_flags if install_flags
[
shell_var("chef_omnibus_root", root),
shell_var("chef_omnibus_url", omnibus_url),
shell_var("install_flags", flags.strip),
shell_var("pretty_version", Util.pretty_version(version)),
shell_var("sudo_sh", sudo("sh")),
shell_var("version", version),
].join("\n")
end
# Generates the install command variables for PowerShell-based platforms.
#
# @param version [String] version string
# @param metadata_url [String] The metadata URL for the Chef Omnitruck API server
# @param omnibus_root [String] The base directory the project is installed to
# @return [String] shell variable lines
# @api private
def install_command_vars_for_powershell
d_flag = install_flags.nil? ? nil : install_flags.match(/-download_directory (\S+)/)
download_directory = d_flag.nil? ? "$env:TEMP" : d_flag[1]
[
shell_var("chef_omnibus_root", root),
shell_var("msi", "#{download_directory}\\chef-#{version}.msi"),
shell_var("download_directory", download_directory),
].tap do |vars|
if install_msi_url
vars << shell_var("chef_msi_url", install_msi_url)
else
vars << shell_var("chef_metadata_url", windows_metadata_url)
vars << shell_var("pretty_version", Util.pretty_version(version))
vars << shell_var("version", version)
end
end.join("\n")
end
def validate_opts!(opt)
err_msg = ["#{opt} is not a valid option",
"valid options are #{VALID_INSTALL_OPTS.join(" ")}"].join(",")
raise ArgumentError, err_msg unless VALID_INSTALL_OPTS.include?(opt.to_s)
end
def parse_opts(opts)
opts.each do |opt, setting|
validate_opts!(opt)
case opt.to_s
when "project", "endpoint"
self.endpoint = metadata_endpoint_from_project(setting)
else
send("#{opt.to_sym}=", setting)
end
end
end
def shell_code_from_file(vars)
fn = File.join(
File.dirname(__FILE__),
%w{.. .. .. support},
"install_command"
)
code = Util.shell_code_from_file(
vars, fn, powershell,
http_proxy: http_proxy, https_proxy: https_proxy
)
powershell ? powershell_prefix.concat(code) : code
end
# Prefixes the PowerShell install script with helpers and shell vars
# to detect the platform version and architecture.
#
# @return [String] PowerShell helpers and shell vars for platform info
def powershell_prefix
[
Mixlib::Install::Generator::PowerShell.get_script("helpers.ps1"),
"$platform_architecture = Get-PlatformArchitecture",
"$platform_version = Get-PlatformVersion",
].join("\n")
end
# Builds a shell variable assignment string for the required shell type.
#
# @param name [String] variable name
# @param value [String] variable value
# @return [String] shell variable assignment
# @api private
def shell_var(name, value)
Util.shell_var(name, value, powershell)
end
# @return the correct Chef Omnitruck API metadata endpoint, based on project
def metadata_endpoint_from_project(project = nil)
if project.nil? || project.casecmp("chef") == 0
"metadata"
else
"metadata-#{project.downcase}"
end
end
def windows_metadata_url
base = if omnibus_url =~ %r{/install.sh$}
"#{File.dirname(omnibus_url)}/"
end
url = "#{base}#{endpoint}"
url << "?p=windows&m=$platform_architecture&pv=$platform_version"
url << "&v=#{CGI.escape(version)}" unless %w{latest true nightlies}.include?(version)
url << "&prerelease=true" if prerelease
url << "&nightlies=true" if nightlies
url
end
# Conditionally prefixes a command with a sudo command.
#
# @param command [String] command to be prefixed
# @return [String] the command, conditionally prefixed with sudo
# @api private
def sudo(script)
use_sudo ? "#{sudo_command} #{script}" : script
end
end
end
end
| 32.126582 | 94 | 0.590229 |
333f157acf4adc08a7dc4e906dacfc85efde8cce
| 642 |
# -*- coding: binary -*-
module Rex
module Logging
module Sinks
###
#
# This class implements the LogSink interface and backs it against stderr
###
class Stderr
include Rex::Logging::LogSink
#
# Writes log data to stderr
#
def log(sev, src, level, msg, from) # :nodoc:
if (sev == LOG_RAW)
$stderr.write(msg)
else
code = 'i'
case sev
when LOG_DEBUG
code = 'd'
when LOG_ERROR
code = 'e'
when LOG_INFO
code = 'i'
when LOG_WARN
code = 'w'
end
$stderr.write("[#{get_current_timestamp}] [#{code}(#{level})] #{src}: #{msg}\n")
end
$stderr.flush
end
protected
end
end end end
| 14.266667 | 83 | 0.610592 |
e8ab79c6a420fd30cd0b02e254cc81429be2f8d0
| 2,082 |
#!/usr/bin/env ruby
# frozen_string_literal: true
$LOAD_PATH.unshift File.expand_path(File.dirname(__FILE__) + '/../lib/')
require 'messagebird'
ACCESS_KEY = 'YOUR KEY HERE'
unless defined?(ACCESS_KEY)
puts 'You need to set an ACCESS_KEY constant in this file'
exit 1
end
begin
# Create a MessageBird client with the specified ACCESS_KEY.
client = MessageBird::Client.new(ACCESS_KEY)
# Fetch the Contact list with pagination options (skip the first 5 objects and take 10).
limit = 10
offset = 5
contacts = client.contact_list(limit, offset)
# Print the object information.
puts
puts 'The following information was returned as a Contact list:'
puts
puts " count : #{contacts.count}"
puts " limit : #{contacts.limit}"
puts " offset : #{contacts.offset}"
puts " total_count : #{contacts.total_count}"
puts " links : #{contacts.links}"
unless contacts.items.empty?
contact = contacts[0] # Equivalent to contacts.items[0]
puts ' Contact :'
puts " id : #{contact.id}"
puts " href : #{contact.href}"
puts " msisdn : #{contact.msisdn}"
puts " firstName : #{contact.firstName}"
puts " lastName : #{contact.lastName}"
puts " groups : #{contact.groups.href}" # contact.groups.total_count is also available.
puts " messages : #{contact.messages.href}" # contact.messages.total_count is also available.
puts " custom1 : #{contact.custom_details.custom1}"
puts " custom2 : #{contact.custom_details.custom2}"
puts " custom3 : #{contact.custom_details.custom3}"
puts " custom4 : #{contact.custom_details.custom4}"
end
rescue MessageBird::ErrorException => e
puts
puts 'An error occurred while listing your contacts:'
puts
e.errors.each do |error|
puts " code : #{error.code}"
puts " description : #{error.description}"
puts " parameter : #{error.parameter}"
puts
end
end
| 33.580645 | 107 | 0.6244 |
219b8ecb28868064918bbb8545fc9f28a4e407d7
| 2,516 |
require 'rake/clean'
require './rake_helper/spec.rb'
desc "Default Task"
task :default => [:install]
begin
require 'rubygems/package_task'
spec = Gem::Specification.load('cxxproject.gemspec')
Gem::PackageTask.new(spec){|pkg|}
desc "install gem globally"
task :install => [:gem] do
sh "gem install pkg/#{spec.name}-#{spec.version}.gem"
end
desc "deploy gem to rubygems"
task :deploy => :gem do
require 'highline/import'
new_gem = "pkg/#{spec.name}-#{spec.version}.gem"
say "This will deploy #{new_gem} to rubygems server"
if agree("Are you sure you want to continue? [y/n]") then
sh "gem push #{new_gem}"
end
end
begin
require 'rdoc'
require 'rdoc/rdoctask'
RDoc::Task.new do |rd|
rd.rdoc_files.include(spec.files)
end
rescue LoadError => e
task :rdoc do
puts 'please gem install rdoc'
end
end
rescue LoadError => e
puts "please missing gems #{e}"
end
def two_digits(x)
if x.length > 1
x
else
"0#{x}"
end
end
begin
require 'grit'
include Grit
VERSION_REGEXP = Regexp.new("v?_?(?<x>\\d+)\\.(?<y>\\d+)\.(?<z>\\d+)")
def git_history
repo = Repo.new('.')
relevant_tags = repo.tags.select {|t| VERSION_REGEXP.match(t.name) }
sorted_tags = relevant_tags.sort_by.each do |t|
match = VERSION_REGEXP.match(t.name)
"#{two_digits(match[:x])}-#{two_digits(match[:y])}-#{two_digits(match[:z])}"
end
change_text = []
zipped = sorted_tags[0..-2].zip(sorted_tags[1..-1])
zipped.reverse.each do |a,b|
change_text << ""
change_text << "#{a.name} => #{b.name}"
change_text << ""
cs = repo.commits_between(a.commit, b.commit).each do |c|
change_lines = c.message.lines.to_a.delete_if {|x|x.index('Change-Id') || x.strip.size==0}
first = change_lines.first
change_text << " * " + first + "#{change_lines[1..-1].collect {|l| " #{l}"}.join("")}"
end
end
change_text
end
desc 'generate version history'
task :generate_history do
puts git_history
end
desc 'generate and update version history'
task :update_version_history do
change_line = "## Change History:"
readme = 'README.md'
content = File.read(readme)
File.open(readme, 'w') do |f|
f.puts content.gsub(/^#{change_line}.*/m, ([change_line] << git_history).join("\n"))
end
end
rescue LoadError => e
puts 'to build the version history please gem install grit'
end
require './rake_helper/perftools'
| 24.666667 | 102 | 0.624404 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.