hexsha
stringlengths
40
40
size
int64
2
1.01M
content
stringlengths
2
1.01M
avg_line_length
float64
1.5
100
max_line_length
int64
2
1k
alphanum_fraction
float64
0.25
1
39c90e92045ae6a795ac3dd565708c12c177096d
1,035
module AsteriskMesh class IAX include Config include NodeName def friend_static(node) <<~IAX [#{node_name(node)}] type=friend host=#{node['host']} context=#{CONTEXT_FROM_MESH} IAX end def register_dynamic(node_from, node_to, password) "register => #{node_name_dynamic(node_from, node_to)}:#{password}@#{node_to['host']}\n" end def friend_dynamic(node_from, node_to, password) <<~IAX [#{node_name_dynamic(node_from, node_to)}] type=friend host=dynamic context=#{CONTEXT_FROM_MESH} secret=#{password} username=#{node_name_dynamic(node_from, node_to)} IAX end def friend_static_password(node_from, node_to, password) <<~IAX [#{node_name_dynamic(node_from, node_to)}] type=friend host=#{node_to['host']} context=#{CONTEXT_FROM_MESH} secret=#{password} username=#{node_name_dynamic(node_from, node_to)} IAX end end end
23
93
0.611594
b9001e540dd354817ba88da203ea039c184c44a3
2,013
class TreeBuilderSmartproxyAffinity < TreeBuilder has_kids_for Hash, [:x_get_tree_hash_kids] has_kids_for MiqServer, [:x_get_server_kids] def initialize(name, type, sandbox, build = true, data) @data = data super(name, type, sandbox, build) end private def node_builder TreeNodeBuilderSmartproxyAffinity end def tree_init_options(_tree_name) {:full_ids => false, :add_root => false, :lazy => false} end def set_locals_for_render locals = super locals.merge!(:checkboxes => true, :onclick => false, :three_checks => true, :post_check => true, :oncheck => 'miqOnClickSmartProxyAffinityCheck', :check_url => '/ops/smartproxy_affinity_field_changed/') end def root_options [] end def x_get_tree_roots(count_only = false, _options) nodes = @data.miq_servers.select(&:is_a_proxy?).sort_by { |s| [s.name, s.id] } count_only_or_objects(count_only, nodes) end def x_get_server_kids(parent, count_only = false) nodes = %w(host storage).map do |kid| {:id => "#{parent.id}__#{kid}", :image => kid, :parent => parent, :text => Dictionary.gettext(kid.camelcase, :type => :model, :notfound => :titleize, :plural => true), :cfmeNoClick => true, :children => @data.send(kid.pluralize).sort_by(&:name)} end count_only_or_objects(count_only, nodes) end def x_get_tree_hash_kids(parent, count_only = false) affinities = parent[:parent].send("vm_scan_#{parent[:image]}_affinity").collect(&:id) if parent[:parent].present? nodes = parent[:children].map do |kid| {:id => "#{parent[:id]}_#{kid.id}", :image => parent[:image], :text => kid.name, :select => affinities.include?(kid.id), :cfmeNoClick => true, :children => []} end count_only_or_objects(count_only, nodes) end end
31.453125
117
0.607054
ab5839dc6655c3d423bc12256799eb931b92d94b
1,121
# encoding: utf-8 # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is # regenerated. module Azure::Network::Mgmt::V2018_11_01 module Models # # The error object. # class ErrorResponse include MsRestAzure # @return [ErrorDetails] Error. attr_accessor :error # # Mapper for ErrorResponse class as Ruby Hash. # This will be used for serialization/deserialization. # def self.mapper() { client_side_validation: true, required: false, serialized_name: 'ErrorResponse', type: { name: 'Composite', class_name: 'ErrorResponse', model_properties: { error: { client_side_validation: true, required: false, serialized_name: 'error', type: { name: 'Composite', class_name: 'ErrorDetails' } } } } } end end end end
23.354167
70
0.529884
4a3712c9c1d67f4ff575f92cd7af9620113f7f00
371
# encoding: utf-8 # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is # regenerated. module Azure::Resources::Mgmt::V2018_06_01 module Models # # Defines values for ResourceIdentityType # module ResourceIdentityType SystemAssigned = "SystemAssigned" end end end
23.1875
70
0.730458
18a1396816291baff6fe0c3d2e43c989b049cb24
2,314
# == Schema Information # # Table name: payment_settings # # id :integer not null, primary key # active :boolean not null # community_id :integer not null # payment_gateway :string(64) # payment_process :string(64) # commission_from_seller :integer # minimum_price_cents :integer # minimum_price_currency :string(3) # minimum_transaction_fee_cents :integer # minimum_transaction_fee_currency :string(3) # confirmation_after_days :integer not null # created_at :datetime not null # updated_at :datetime not null # api_client_id :string(255) # api_private_key :string(255) # api_publishable_key :string(255) # api_verified :boolean # api_visible_private_key :string(255) # api_country :string(255) # commission_from_buyer :integer # minimum_buyer_transaction_fee_cents :integer # minimum_buyer_transaction_fee_currency :string(3) # key_encryption_padding :boolean default(FALSE) # # Indexes # # index_payment_settings_on_community_id (community_id) # class PaymentSettings < ApplicationRecord belongs_to :community validates_presence_of(:community_id) scope :preauthorize, -> { where(payment_process: :preauthorize) } scope :paypal, -> { preauthorize.where(payment_gateway: :paypal) } scope :stripe, -> { preauthorize.where(payment_gateway: :stripe) } scope :active, -> { where(active: true) } class << self def max_minimum_transaction_fee(community) stripe.or(PaymentSettings.paypal) .active .where(community: community) .pluck(:minimum_transaction_fee_cents) .compact .max end def stripe_sum_transaction_fee(community) stripe .active .where(community: community) .sum('IFNULL(minimum_transaction_fee_cents, 0) + IFNULL(minimum_buyer_transaction_fee_cents, 0)') end end end
37.322581
103
0.579948
8774de0c5b7d0faa907654df83f0d061521dfe2f
1,654
class Websocketd < Formula desc "WebSockets the Unix way" homepage "http://websocketd.com" url "https://github.com/joewalnes/websocketd/archive/v0.4.1.tar.gz" sha256 "6b8fe0fad586d794e002340ee597059b2cfc734ba7579933263aef4743138fe5" license "BSD-2-Clause" bottle do sha256 cellar: :any_skip_relocation, arm64_monterey: "256933f91abb70b0974f791cbbd8158f4399c27ed2ce99438f7ac566a560003e" sha256 cellar: :any_skip_relocation, arm64_big_sur: "8d9e5282df6737a6870a2a750570ab79909fb4463411797b0bf5d20cb269162d" sha256 cellar: :any_skip_relocation, monterey: "5a769dfeb3f3062af01fb6ba0703e1d416dc648736e20103c1e0a31489796ddf" sha256 cellar: :any_skip_relocation, big_sur: "cbdc36c8c64cb2b0f1f149242a4c82e5d3eebff521e45bdfc88aa7dced9d2440" sha256 cellar: :any_skip_relocation, catalina: "944c9e728f5f1a7ba098207a0acf50b1e19209010c9d87c8cdd18758ec9c71b2" sha256 cellar: :any_skip_relocation, mojave: "28f0108f697e146faec81782988e8fd8bd7162b11e7703578f752f7a51f2b6db" sha256 cellar: :any_skip_relocation, x86_64_linux: "74fdd936d2632aaf2e484ef2f796d8d2f4f281f643ab23c414708363116ca9b8" end depends_on "go" => :build def install system "go", "build", "-ldflags", "-X main.version=#{version}", *std_go_args man1.install "release/websocketd.man" => "websocketd.1" end test do port = free_port pid = Process.fork { exec "#{bin}/websocketd", "--port=#{port}", "echo", "ok" } sleep 2 begin assert_equal("404 page not found\n", shell_output("curl -s http://localhost:#{port}")) ensure Process.kill 9, pid Process.wait pid end end end
43.526316
123
0.759976
1d5abaf5bb496f7b7fb811b52fe7d0afefc7ed98
405
cask :v1 => 'bartender' do version :latest sha256 :no_check url 'http://www.macbartender.com/Demo/Bartender.zip' appcast 'http://www.macbartender.com/updates/updates.php' homepage 'http://www.macbartender.com/' license :commercial app 'Bartender.app' postflight do suppress_move_to_applications end zap :delete => '~/Library/Preferences/com.surteesstudios.Bartender.plist' end
22.5
75
0.735802
62a015ee145c1b8efaf33eac30c8f4594e9460e1
2,847
class PremiumNotificationsController < ApplicationController load_and_authorize_resource :store load_and_authorize_resource through: :store before_filter :get_manageable # GET /premium_notifications def index @premium_notifications = @store.premium_notifications end # GET /premium_notifications/1 def show end # GET /premium_notifications/new def new end # GET /premium_notifications/1/edit def edit end # POST /premium_notifications def create @premium_notification = @store.premium_notifications.new(premium_notification_params) respond_to do |format| if @premium_notification.save format.html { redirect_to [@store, @premium_notification], notice: 'Premium notification was successfully created.' } format.json { render :show, status: :created, location: @premium_notification } else format.html { render :new } format.json { render json: @premium_notification.errors, status: :unprocessable_entity } end end end # PATCH/PUT /premium_notifications/1 def update respond_to do |format| if @premium_notification.update(premium_notification_params) format.html { redirect_to [@store, @premium_notification], notice: 'Premium notification was successfully updated.' } format.json { render :show, status: :ok, location: @premium_notification } else format.html { render :edit } format.json { render json: @premium_notification.errors, status: :unprocessable_entity } end end end # DELETE /premium_notifications/1 def destroy @premium_notification.destroy respond_to do |format| format.html { redirect_to store_premium_notifications_url(@store), notice: 'Premium notification was successfully destroyed.' } format.json { head :no_content } end end def change_status @premium_notifications = @store.premium_notifications @premium_notification = @premium_notifications.find(params[:premium_notification_id]) @sucess = @premium_notification.toggle_status if @premium_notification end private # Never trust parameters from the scary internet, only allow the white list through. def premium_notification_params params[:premium_notification][:days] = params[:premium_notification][:days].present? && params[:premium_notification][:days].is_a?(Array) ? params[:premium_notification][:days].join(',') : params[:premium_notification][:days] params.require(:premium_notification).permit(:notification_text, :radius, :latitude, :longitude, :publish, :publish_date, :duration, :notification_time_from, :notification_time_to, :days, :deal_category_id, geo_coordinate_attributes: [:latitude, :longitude, :id]) end def get_manageable @manageable = @store.manageable end end
35.5875
231
0.731296
28f3bff5ef7f9e6826c16e4ec858d4ad0975dde9
12,302
# == Schema Information # # Table name: bounties # # id :integer not null, primary key # amount :decimal(10, 2) not null # person_id :integer # issue_id :integer not null # status :string(12) default("active"), not null # expires_at :datetime # created_at :datetime not null # updated_at :datetime not null # paid_at :datetime # anonymous :boolean default(FALSE), not null # owner_type :string(255) # owner_id :integer # bounty_expiration :string(255) # upon_expiration :string(255) # promotion :string(255) # acknowledged_at :datetime # tweet :boolean default(FALSE), not null # featured :boolean default(FALSE), not null # # Indexes # # index_bounties_on_anonymous (anonymous) # index_bounties_on_github_issue_id (issue_id) # index_bounties_on_owner_id (owner_id) # index_bounties_on_owner_type (owner_type) # index_bounties_on_patron_id (person_id) # index_bounties_on_status (status) # require 'account/team' class Bounty < ApplicationRecord belongs_to :person belongs_to :issue # TODO: this object shouldn't have an account... rework Transaction.build to use "account: bounty.issue" has_one :account, :through => :issue has_many :splits, :as => :item has_many :txns, :through => :splits # Helper defined in config/initializers/has_owner.rb # Gives access to polymorphic owner, which respects object anonymity on read has_owner validates :issue, presence: true validates :amount, numericality: { greater_than_or_equal_to: 5 } # define status constants module Status ACTIVE = 'active' REFUNDED = 'refunded' PAID = 'paid' def self.all [ACTIVE, REFUNDED, PAID] end end validates :status, inclusion: { in: Status.all } scope :active, lambda { where(status: Status::ACTIVE) } scope :refunded, lambda { where(status: Status::REFUNDED) } scope :paid, lambda { where(status: Status::PAID) } scope :not_refunded, lambda { where("status < :status AND status > :status", status: Status::REFUNDED) } # A bounty is visible so long as it has not been refunded, and is not anon scope :visible, lambda { where("anonymous = false AND status != :status", status: Status::REFUNDED) } scope :expiring_soon, lambda { |date=2.weeks.from_now, count=nil| where('expires_at < ?', date).order('expires_at desc').limit(count) } # bounties that count toward the displayed bounty total of issues scope :valuable, lambda { active.where('amount > 0') } scope :acknowledged, lambda { where("acknowledged_at IS NOT NULL") } scope :unacknowledged, lambda { where(acknowledged_at: nil) } before_create do self.owner ||= self.person # If no options were provided, automatically acknowledge the bounty, # meaning that it needs no interaction from us (feature, tweet, newsletter, etc.). unless bounty_expiration? || upon_expiration? || promotion? self.acknowledged_at = DateTime.now end end after_commit do team_ids = [] team_ids << owner_id if owner_type == 'Team' team_ids << previous_changes[:owner_id] if previous_changes[:owner_id] && (previous_changes[:owner_type] || owner_type) == 'Team' team_ids << issue.tracker.team_id Team.where(id: team_ids.compact.uniq).each(&:update_activity_total) update_issue_category end validate do if !new_record? && anonymous_changed? && !can_make_anonymous? && false errors.add(:anonymous, "cannot be changed") end end class CannotRefund < StandardError; end def self.created_this_month now = DateTime.now where(created_at: (now.beginning_of_month..now.end_of_month)).order('created_at desc') end def self.admin_search(query) joins(:person) .where("bounties.id = :id OR people.email like :q OR people.first_name LIKE :q OR people.last_name LIKE :q OR people.display_name LIKE :q", q: "%#{query}%", id: query.to_i) end def self.summary(owner) collection = {} owner.bounties.not_refunded.includes(:issue => { :tracker => :team }).each do |bounty| key = (!bounty.issue.tracker.team || bounty.issue.tracker.team == owner) ? bounty.issue.tracker : bounty.issue.tracker.team collection[key] ||= { paid: 0.0, active: 0.0 } collection[key][:paid] += bounty.amount if bounty.paid? collection[key][:active] += bounty.amount if bounty.active? end collection.map { |k,v| { paid: v[:paid], active: v[:active] }.merge(k.is_a?(Team) ? { team: k } : {}).merge(k.is_a?(Tracker) ? { tracker: k } : {}) }.sort_by { |s| s[:active] }.reverse end def repository issue.tracker end #def default_expires_at # self.expires_at ||= 6.months.from_now #end def amount=(amount) clean_amount = ('%.2f' % amount.to_s.strip.gsub(/[$,]*/, '')) rescue nil write_attribute(:amount, clean_amount) end def to_s "$#{Money.new(100 * amount, 'USD')}" end alias_method :display_amount, :to_s # Used during payment to create Transactions with item def item_name issue.title end def self.amount_paid_since(date) Bounty.paid.where('paid_at > :d', d: date).sum(:amount).to_i end def self.amount_paid_to_date Bounty.paid.sum(:amount).to_i end def self.amount_unclaimed Bounty.active.sum(:amount).to_i end # @return [Bounty] def self.issues_with_largest_bounties self.group(:issue).order('sum_amount desc').sum(:amount) end # @return [OrderedHash] { Person: amount, ... } def self.most_total_bounties(count=nil) self.group(:person).limit(count).order('sum_amount desc').sum(:amount) end def send_bounty_increased_emails targets = [] targets += issue.backers targets += issue.developers targets += issue.tracker.active_followers targets += Person.bounty_hunters(team: owner) if owner.is_a?(Team) targets -= [person] targets.uniq.each { |person| person.send_email(:bounty_increased, bounty: self) } end def send_bounty_placed_emails targets = [] targets += issue.backers targets += issue.developers targets += issue.tracker.active_followers targets += Person.bounty_hunters(team: owner) if owner.is_a?(Team) targets -= [person] targets.uniq.each { |person| person.send_email(:bounty_placed, bounty: self) } end def refundable? # check status, can only be active. unless status == Status::ACTIVE errors.add :base, "Bounty is not active, its status is: #{status}" end # the issue cannot have any claims in the dispute period unless issue.bounty_claims.select(&:in_dispute_period?).empty? errors.add(:issue, "has one or more bounty claims in dispute period") end errors.empty? end def frontend_path "/issues/#{issue.id}/bounties/#{id}/receipt" end def frontend_url File.join(Api::Application.config.www_url, frontend_path) end # Refund bounty to the person who created it. The amount refundable is simply # the amount - (amount * bs fee) def refund! if refundable? self.class.transaction do transaction = Transaction.build do |tr| tr.description = "Refund Bounty(#{id}) - Bounty Amount: $#{amount} Refunded: $#{amount}" tr.splits.create(amount: -amount, item: issue) if owner_type == "Team" tr.splits.create(amount: +amount, item: owner) else tr.splits.create(amount: +amount, item: person) end end transaction or raise ActiveRecord::Rollback # update bounty to 'refunded' status or rollback update_attributes status: Status::REFUNDED or raise ActiveRecord::Rollback # email the backer person.send_email :bounty_refunded, bounty: self, transaction: transaction end # update displayed bounty total on issue issue.delay.update_bounty_total end end def refund_for_deleted_issue if refundable? self.class.transaction do transaction = Transaction.build do |tr| tr.description = "Refund Bounty for deleted issue (#{id}) - Bounty Amount: $#{amount} Refunded: $#{amount}" tr.splits.create(amount: -amount, item: issue) if owner_type == "Team" tr.splits.create(amount: +amount, item: owner) else tr.splits.create(amount: +amount, item: person) end end transaction or raise ActiveRecord::Rollback # update bounty to 'refunded' status or rollback update_attributes status: Status::REFUNDED or raise ActiveRecord::Rollback # email the backer person.send_email :bounty_refunded_for_deleted_issue, bounty: self end # update displayed bounty total on issue issue.delay.update_bounty_total end end def create_account issue.create_account! end def build_account issue.build_account end def active? status == Status::ACTIVE end def paid? status == Status::PAID end def refunded? status == Status::REFUNDED end # TODO move to has_owner.rb ? def can_make_anonymous? owner && owner.is_a?(Person) end def acknowledged? acknowledged_at? end # Did the user specify any options with their bounty? Custom expiration date, featured, etc. def has_options? bounty_expiration.present? || upon_expiration.present? || promotion.present? end def after_purchase(order) # Update budget balance for the user if order.checkout_method.is_a?(Account::Team) relation = order.checkout_method.owner.member_relations.where(person_id: person.id).first relation.update_balance(order.gross) end issue.update_bounty_total issue.update(category: 0) # track Bounty creation in new relic new_relic_data_point "Custom/Bounty/pay_in", amount.to_f # send a receipt email to the backer person.send_email(:bounty_created, bounty: self) if self.issue.bounties.count == 1 # send emails to backers, developers and followers that a bounty has been created on an issue delay.send_bounty_placed_emails else # send emails to backers, developers and followers that the bounty has been increased delay.send_bounty_increased_emails end # if it's a team bounty, add the issue tracker to team if owner.is_a?(Team) owner.delay.add_tracker(issue.tracker) end issue.developer_goals.find_each do |goal| goal.delay.bounty_created_callback end MixpanelEvent.track( person_id: person_id, event: 'Create Order', checkout_method: order.checkout_method.class.name, issue_id: issue.id, product: "bounty", amount: amount, bounty_expiration: bounty_expiration, upon_expiration: upon_expiration, promotion: promotion ) self end # the internal account used to pay for this bounty def source_account Split.where(item: self).reorder('created_at').first.txn.splits.where('amount < 0').reorder('amount').first.account end def move_to_issue(new_issue) old_issue = self.issue # NOTE: technically we could move refunded, just don't move $$ below raise "can't move refunded bounty" if refunded? # move bounty update_attributes(issue_id: new_issue.id) # move monies transaction = Transaction.build do |tr| tr.description = "Move Bounty(#{id}) From ##{old_issue.id} TO ##{new_issue.id}" tr.splits.create(amount: -amount, item: old_issue) tr.splits.create(amount: +amount, item: new_issue) end # update caches new_issue.update_bounty_total old_issue.update_bounty_total end # Info about Bounties. Included data: # count - count of all active bounties that can be claimed (issues with a bounty_total > 0) # amount - sum of all active bounty amounts # paid - sum of all paid out bounties def self.info { count: active.pluck(:issue_id).uniq.count, amount: active.sum(:amount), paid: paid.sum(:amount) } end def update_issue_category if issue.category.nil? issue.fiat! issue.save end end end
30.226044
176
0.671029
6232e4de8de1883639bc94175b0c77a53d3cfcd9
200
module Kea class Dhcp4Pool < KeaRecord self.table_name = 'dhcp4_pool' belongs_to :dhcp4_subnet, foreign_key: 'subnet_id', primary_key: 'subnet_id', inverse_of: :dhcp4_pools end end
22.222222
81
0.725
03371c912a4bcbbb44c6846a8dd163d317d217c7
17,713
# frozen_string_literal: false require 'test/unit' class TestLazyEnumerator < Test::Unit::TestCase class Step include Enumerable attr_reader :current, :args def initialize(enum) @enum = enum @current = nil @args = nil end def each(*args) @args = args @enum.each do |v| @current = v if v.is_a? Enumerable yield *v else yield v end end end end def test_initialize assert_equal([1, 2, 3], [1, 2, 3].lazy.to_a) assert_equal([1, 2, 3], Enumerator::Lazy.new([1, 2, 3]){|y, v| y << v}.to_a) assert_raise(ArgumentError) { Enumerator::Lazy.new([1, 2, 3]) } a = [1, 2, 3].lazy a.freeze assert_raise(FrozenError) { a.__send__ :initialize, [4, 5], &->(y, *v) { y << yield(*v) } } end def test_each_args a = Step.new(1..3) assert_equal(1, a.lazy.each(4).first) assert_equal([4], a.args) end def test_each_line name = lineno = nil File.open(__FILE__) do |f| f.each("").map do |paragraph| paragraph[/\A\s*(.*)/, 1] end.find do |line| if name = line[/^class\s+(\S+)/, 1] lineno = f.lineno true end end end assert_equal(self.class.name, name) assert_operator(lineno, :>, 2) name = lineno = nil File.open(__FILE__) do |f| f.lazy.each("").map do |paragraph| paragraph[/\A\s*(.*)/, 1] end.find do |line| if name = line[/^class\s+(\S+)/, 1] lineno = f.lineno true end end end assert_equal(self.class.name, name) assert_equal(2, lineno) end def test_select a = Step.new(1..6) assert_equal(4, a.select {|x| x > 3}.first) assert_equal(6, a.current) assert_equal(4, a.lazy.select {|x| x > 3}.first) assert_equal(4, a.current) a = Step.new(['word', nil, 1]) assert_raise(TypeError) {a.select {|x| "x"+x}.first} assert_equal(nil, a.current) assert_equal("word", a.lazy.select {|x| "x"+x}.first) assert_equal("word", a.current) end def test_select_multiple_values e = Enumerator.new { |yielder| for i in 1..5 yielder.yield(i, i.to_s) end } assert_equal([[2, "2"], [4, "4"]], e.select {|x| x[0] % 2 == 0}) assert_equal([[2, "2"], [4, "4"]], e.lazy.select {|x| x[0] % 2 == 0}.force) end def test_map a = Step.new(1..3) assert_equal(2, a.map {|x| x * 2}.first) assert_equal(3, a.current) assert_equal(2, a.lazy.map {|x| x * 2}.first) assert_equal(1, a.current) end def test_map_packed_nested bug = '[ruby-core:81638] [Bug#13648]' a = Step.new([[1, 2]]) expected = [[[1, 2]]] assert_equal(expected, a.map {|*args| args}.map {|*args| args}.to_a) assert_equal(expected, a.lazy.map {|*args| args}.map {|*args| args}.to_a, bug) end def test_flat_map a = Step.new(1..3) assert_equal(2, a.flat_map {|x| [x * 2]}.first) assert_equal(3, a.current) assert_equal(2, a.lazy.flat_map {|x| [x * 2]}.first) assert_equal(1, a.current) end def test_flat_map_nested a = Step.new(1..3) assert_equal([1, "a"], a.flat_map {|x| ("a".."c").map {|y| [x, y]}}.first) assert_equal(3, a.current) assert_equal([1, "a"], a.lazy.flat_map {|x| ("a".."c").lazy.map {|y| [x, y]}}.first) assert_equal(1, a.current) end def test_flat_map_to_ary to_ary = Class.new { def initialize(value) @value = value end def to_ary [:to_ary, @value] end } assert_equal([:to_ary, 1, :to_ary, 2, :to_ary, 3], [1, 2, 3].flat_map {|x| to_ary.new(x)}) assert_equal([:to_ary, 1, :to_ary, 2, :to_ary, 3], [1, 2, 3].lazy.flat_map {|x| to_ary.new(x)}.force) end def test_flat_map_non_array assert_equal(["1", "2", "3"], [1, 2, 3].flat_map {|x| x.to_s}) assert_equal(["1", "2", "3"], [1, 2, 3].lazy.flat_map {|x| x.to_s}.force) end def test_flat_map_hash assert_equal([{?a=>97}, {?b=>98}, {?c=>99}], [?a, ?b, ?c].flat_map {|x| {x=>x.ord}}) assert_equal([{?a=>97}, {?b=>98}, {?c=>99}], [?a, ?b, ?c].lazy.flat_map {|x| {x=>x.ord}}.force) end def test_reject a = Step.new(1..6) assert_equal(4, a.reject {|x| x < 4}.first) assert_equal(6, a.current) assert_equal(4, a.lazy.reject {|x| x < 4}.first) assert_equal(4, a.current) a = Step.new(['word', nil, 1]) assert_equal(nil, a.reject {|x| x}.first) assert_equal(1, a.current) assert_equal(nil, a.lazy.reject {|x| x}.first) assert_equal(nil, a.current) end def test_reject_multiple_values e = Enumerator.new { |yielder| for i in 1..5 yielder.yield(i, i.to_s) end } assert_equal([[2, "2"], [4, "4"]], e.reject {|x| x[0] % 2 != 0}) assert_equal([[2, "2"], [4, "4"]], e.lazy.reject {|x| x[0] % 2 != 0}.force) end def test_grep a = Step.new('a'..'f') assert_equal('c', a.grep(/c/).first) assert_equal('f', a.current) assert_equal('c', a.lazy.grep(/c/).first) assert_equal('c', a.current) assert_equal(%w[a e], a.grep(proc {|x| /[aeiou]/ =~ x})) assert_equal(%w[a e], a.lazy.grep(proc {|x| /[aeiou]/ =~ x}).to_a) end def test_grep_with_block a = Step.new('a'..'f') assert_equal('C', a.grep(/c/) {|i| i.upcase}.first) assert_equal('C', a.lazy.grep(/c/) {|i| i.upcase}.first) end def test_grep_multiple_values e = Enumerator.new { |yielder| 3.times { |i| yielder.yield(i, i.to_s) } } assert_equal([[2, "2"]], e.grep(proc {|x| x == [2, "2"]})) assert_equal([[2, "2"]], e.lazy.grep(proc {|x| x == [2, "2"]}).force) assert_equal(["22"], e.lazy.grep(proc {|x| x == [2, "2"]}, &:join).force) end def test_grep_v a = Step.new('a'..'f') assert_equal('b', a.grep_v(/a/).first) assert_equal('f', a.current) assert_equal('a', a.lazy.grep_v(/c/).first) assert_equal('a', a.current) assert_equal(%w[b c d f], a.grep_v(proc {|x| /[aeiou]/ =~ x})) assert_equal(%w[b c d f], a.lazy.grep_v(proc {|x| /[aeiou]/ =~ x}).to_a) end def test_grep_v_with_block a = Step.new('a'..'f') assert_equal('B', a.grep_v(/a/) {|i| i.upcase}.first) assert_equal('B', a.lazy.grep_v(/a/) {|i| i.upcase}.first) end def test_grep_v_multiple_values e = Enumerator.new { |yielder| 3.times { |i| yielder.yield(i, i.to_s) } } assert_equal([[0, "0"], [1, "1"]], e.grep_v(proc {|x| x == [2, "2"]})) assert_equal([[0, "0"], [1, "1"]], e.lazy.grep_v(proc {|x| x == [2, "2"]}).force) assert_equal(["00", "11"], e.lazy.grep_v(proc {|x| x == [2, "2"]}, &:join).force) end def test_zip a = Step.new(1..3) assert_equal([1, "a"], a.zip("a".."c").first) assert_equal(3, a.current) assert_equal([1, "a"], a.lazy.zip("a".."c").first) assert_equal(1, a.current) end def test_zip_short_arg a = Step.new(1..5) assert_equal([5, nil], a.zip("a".."c").last) assert_equal([5, nil], a.lazy.zip("a".."c").force.last) end def test_zip_without_arg a = Step.new(1..3) assert_equal([1], a.zip.first) assert_equal(3, a.current) assert_equal([1], a.lazy.zip.first) assert_equal(1, a.current) end def test_zip_bad_arg a = Step.new(1..3) assert_raise(TypeError){ a.lazy.zip(42) } end def test_zip_with_block # zip should be eager when a block is given a = Step.new(1..3) ary = [] assert_equal(nil, a.lazy.zip("a".."c") {|x, y| ary << [x, y]}) assert_equal(a.zip("a".."c"), ary) assert_equal(3, a.current) end def test_take a = Step.new(1..10) assert_equal(1, a.take(5).first) assert_equal(5, a.current) assert_equal(1, a.lazy.take(5).first) assert_equal(1, a.current) assert_equal((1..5).to_a, a.lazy.take(5).force) assert_equal(5, a.current) a = Step.new(1..10) assert_equal([], a.lazy.take(0).force) assert_equal(nil, a.current) end def test_take_bad_arg a = Step.new(1..10) assert_raise(ArgumentError) { a.lazy.take(-1) } end def test_take_recycle bug6428 = '[ruby-dev:45634]' a = Step.new(1..10) take5 = a.lazy.take(5) assert_equal((1..5).to_a, take5.force, bug6428) assert_equal((1..5).to_a, take5.force, bug6428) end def test_take_nested bug7696 = '[ruby-core:51470]' a = Step.new(1..10) take5 = a.lazy.take(5) assert_equal([*(1..5)]*5, take5.flat_map{take5}.force, bug7696) end def test_drop_while_nested bug7696 = '[ruby-core:51470]' a = Step.new(1..10) drop5 = a.lazy.drop_while{|x| x < 6} assert_equal([*(6..10)]*5, drop5.flat_map{drop5}.force, bug7696) end def test_drop_nested bug7696 = '[ruby-core:51470]' a = Step.new(1..10) drop5 = a.lazy.drop(5) assert_equal([*(6..10)]*5, drop5.flat_map{drop5}.force, bug7696) end def test_zip_nested bug7696 = '[ruby-core:51470]' enum = ('a'..'z').each enum.next zip = (1..3).lazy.zip(enum, enum) assert_equal([[1, 'a', 'a'], [2, 'b', 'b'], [3, 'c', 'c']]*3, zip.flat_map{zip}.force, bug7696) end def test_zip_lazy_on_args zip = Step.new(1..2).lazy.zip(42..Float::INFINITY) assert_equal [[1, 42], [2, 43]], zip.force end def test_zip_efficient_on_array_args ary = [42, :foo] %i[to_enum enum_for lazy each].each do |forbid| ary.define_singleton_method(forbid){ fail "#{forbid} was called"} end zip = Step.new(1..2).lazy.zip(ary) assert_equal [[1, 42], [2, :foo]], zip.force end def test_zip_nonsingle bug8735 = '[ruby-core:56383] [Bug #8735]' obj = Object.new def obj.each yield yield 1, 2 end assert_equal(obj.to_enum.zip(obj.to_enum), obj.to_enum.lazy.zip(obj.to_enum).force, bug8735) end def test_take_rewound bug7696 = '[ruby-core:51470]' e=(1..42).lazy.take(2) assert_equal 1, e.next, bug7696 assert_equal 2, e.next, bug7696 e.rewind assert_equal 1, e.next, bug7696 assert_equal 2, e.next, bug7696 end def test_take_while a = Step.new(1..10) assert_equal(1, a.take_while {|i| i < 5}.first) assert_equal(5, a.current) assert_equal(1, a.lazy.take_while {|i| i < 5}.first) assert_equal(1, a.current) assert_equal((1..4).to_a, a.lazy.take_while {|i| i < 5}.to_a) end def test_drop a = Step.new(1..10) assert_equal(6, a.drop(5).first) assert_equal(10, a.current) assert_equal(6, a.lazy.drop(5).first) assert_equal(6, a.current) assert_equal((6..10).to_a, a.lazy.drop(5).to_a) end def test_drop_while a = Step.new(1..10) assert_equal(5, a.drop_while {|i| i % 5 > 0}.first) assert_equal(10, a.current) assert_equal(5, a.lazy.drop_while {|i| i % 5 > 0}.first) assert_equal(5, a.current) assert_equal((5..10).to_a, a.lazy.drop_while {|i| i % 5 > 0}.to_a) end def test_drop_and_take assert_equal([4, 5], (1..Float::INFINITY).lazy.drop(3).take(2).to_a) end def test_cycle a = Step.new(1..3) assert_equal("1", a.cycle(2).map(&:to_s).first) assert_equal(3, a.current) assert_equal("1", a.lazy.cycle(2).map(&:to_s).first) assert_equal(1, a.current) end def test_cycle_with_block # cycle should be eager when a block is given a = Step.new(1..3) ary = [] assert_equal(nil, a.lazy.cycle(2) {|i| ary << i}) assert_equal(a.cycle(2).to_a, ary) assert_equal(3, a.current) end def test_cycle_chain a = 1..3 assert_equal([1,2,3,1,2,3,1,2,3,1], a.lazy.cycle.take(10).force) assert_equal([2,2,2,2,2,2,2,2,2,2], a.lazy.cycle.select {|x| x == 2}.take(10).force) assert_equal([2,2,2,2,2,2,2,2,2,2], a.lazy.select {|x| x == 2}.cycle.take(10).force) end def test_force assert_equal([1, 2, 3], (1..Float::INFINITY).lazy.take(3).force) end def test_inspect assert_equal("#<Enumerator::Lazy: 1..10>", (1..10).lazy.inspect) assert_equal('#<Enumerator::Lazy: #<Enumerator: "foo":each_char>>', "foo".each_char.lazy.inspect) assert_equal("#<Enumerator::Lazy: #<Enumerator::Lazy: 1..10>:map>", (1..10).lazy.map {}.inspect) assert_equal("#<Enumerator::Lazy: #<Enumerator::Lazy: 1..10>:take(0)>", (1..10).lazy.take(0).inspect) assert_equal("#<Enumerator::Lazy: #<Enumerator::Lazy: 1..10>:take(3)>", (1..10).lazy.take(3).inspect) assert_equal('#<Enumerator::Lazy: #<Enumerator::Lazy: "a".."c">:grep(/b/)>', ("a".."c").lazy.grep(/b/).inspect) assert_equal("#<Enumerator::Lazy: #<Enumerator::Lazy: 1..10>:cycle(3)>", (1..10).lazy.cycle(3).inspect) assert_equal("#<Enumerator::Lazy: #<Enumerator::Lazy: 1..10>:cycle>", (1..10).lazy.cycle.inspect) assert_equal("#<Enumerator::Lazy: #<Enumerator::Lazy: 1..10>:cycle(3)>", (1..10).lazy.cycle(3).inspect) l = (1..10).lazy.map {}.collect {}.flat_map {}.collect_concat {}.select {}.find_all {}.reject {}.grep(1).zip(?a..?c).take(10).take_while {}.drop(3).drop_while {}.cycle(3) assert_equal(<<EOS.chomp, l.inspect) #<Enumerator::Lazy: #<Enumerator::Lazy: #<Enumerator::Lazy: #<Enumerator::Lazy: #<Enumerator::Lazy: #<Enumerator::Lazy: #<Enumerator::Lazy: #<Enumerator::Lazy: #<Enumerator::Lazy: #<Enumerator::Lazy: #<Enumerator::Lazy: #<Enumerator::Lazy: #<Enumerator::Lazy: #<Enumerator::Lazy: #<Enumerator::Lazy: 1..10>:map>:collect>:flat_map>:collect_concat>:select>:find_all>:reject>:grep(1)>:zip("a".."c")>:take(10)>:take_while>:drop(3)>:drop_while>:cycle(3)> EOS end def test_lazy_to_enum lazy = [1, 2, 3].lazy def lazy.foo(*args) yield args yield args end enum = lazy.to_enum(:foo, :hello, :world) assert_equal Enumerator::Lazy, enum.class assert_equal nil, enum.size assert_equal [[:hello, :world], [:hello, :world]], enum.to_a assert_equal [1, 2, 3], lazy.to_enum.to_a end def test_size lazy = [1, 2, 3].lazy assert_equal 3, lazy.size assert_equal 42, Enumerator::Lazy.new([],->{42}){}.size assert_equal 42, Enumerator::Lazy.new([],42){}.size assert_equal 42, Enumerator::Lazy.new([],42){}.lazy.size assert_equal 42, lazy.to_enum{ 42 }.size %i[map collect].each do |m| assert_equal 3, lazy.send(m){}.size end assert_equal 3, lazy.zip([4]).size %i[flat_map collect_concat select find_all reject take_while drop_while].each do |m| assert_equal nil, lazy.send(m){}.size end assert_equal nil, lazy.grep(//).size assert_equal 2, lazy.take(2).size assert_equal 3, lazy.take(4).size assert_equal 4, loop.lazy.take(4).size assert_equal nil, lazy.select{}.take(4).size assert_equal 1, lazy.drop(2).size assert_equal 0, lazy.drop(4).size assert_equal Float::INFINITY, loop.lazy.drop(4).size assert_equal nil, lazy.select{}.drop(4).size assert_equal 0, lazy.cycle(0).size assert_equal 6, lazy.cycle(2).size assert_equal 3 << 80, 4.times.inject(lazy){|enum| enum.cycle(1 << 20)}.size assert_equal Float::INFINITY, lazy.cycle.size assert_equal Float::INFINITY, loop.lazy.cycle(4).size assert_equal Float::INFINITY, loop.lazy.cycle.size assert_equal nil, lazy.select{}.cycle(4).size assert_equal nil, lazy.select{}.cycle.size class << (obj = Object.new) def each; end def size; 0; end include Enumerable end lazy = obj.lazy assert_equal 0, lazy.cycle.size assert_raise(TypeError) {lazy.cycle("").size} end def test_map_zip bug7507 = '[ruby-core:50545]' assert_ruby_status(["-e", "GC.stress = true", "-e", "(1..10).lazy.map{}.zip(){}"], "", bug7507) assert_ruby_status(["-e", "GC.stress = true", "-e", "(1..10).lazy.map{}.zip().to_a"], "", bug7507) end def test_require_block %i[select reject drop_while take_while map flat_map].each do |method| assert_raise(ArgumentError){ [].lazy.send(method) } end end def test_laziness_conservation bug7507 = '[ruby-core:51510]' { slice_before: //, slice_after: //, with_index: nil, cycle: nil, each_with_object: 42, each_slice: 42, each_entry: nil, each_cons: 42, }.each do |method, arg| assert_equal Enumerator::Lazy, [].lazy.send(method, *arg).class, bug7507 end assert_equal Enumerator::Lazy, [].lazy.chunk{}.class, bug7507 assert_equal Enumerator::Lazy, [].lazy.slice_when{}.class, bug7507 end def test_each_cons_limit n = 1 << 120 assert_equal([1, 2], (1..n).lazy.each_cons(2).first) assert_equal([[1, 2], [2, 3]], (1..n).lazy.each_cons(2).first(2)) end def test_each_slice_limit n = 1 << 120 assert_equal([1, 2], (1..n).lazy.each_slice(2).first) assert_equal([[1, 2], [3, 4]], (1..n).lazy.each_slice(2).first(2)) end def test_no_warnings le = (1..3).lazy assert_warning("") {le.zip([4,5,6]).force} assert_warning("") {le.zip(4..6).force} assert_warning("") {le.take(1).force} assert_warning("") {le.drop(1).force} assert_warning("") {le.drop_while{false}.force} end def test_symbol_chain assert_equal(["1", "3"], [1, 2, 3].lazy.reject(&:even?).map(&:to_s).force) assert_raise(NoMethodError) do [1, 2, 3].lazy.map(&:undefined).map(&:to_s).force end end def test_uniq u = (1..Float::INFINITY).lazy.uniq do |x| raise "too big" if x > 10000 (x**2) % 10 end assert_equal([1, 2, 3, 4, 5, 10], u.first(6)) assert_equal([1, 2, 3, 4, 5, 10], u.first(6)) end end
30.434708
449
0.589454
0816d3d433d504bb8fd4f4bd68fdca6ec9d5828d
1,961
class KubernetesCli < Formula desc "Kubernetes command-line interface" homepage "https://kubernetes.io/" url "https://github.com/kubernetes/kubernetes.git", :tag => "v1.6.0", :revision => "fff5156092b56e6bd60fff75aad4dc9de6b6ef37" head "https://github.com/kubernetes/kubernetes.git" bottle do cellar :any_skip_relocation sha256 "ec0fe7de3bb8def900e8f238a57f4645fc870b64e79346e0a3108e47bc01d701" => :sierra sha256 "558c06887bdcac310c5a18d2e1d0aed5152f35491984f50bba945e44fab0fa37" => :el_capitan sha256 "9bcfb52b91a113603851736d9a1924ae8ab11c17462e0b41499e593d79fea7cf" => :yosemite end devel do url "https://github.com/kubernetes/kubernetes.git", :tag => "v1.6.1-beta.0", :revision => "8d26223577ea3c9ad94c6e858a6ec43ef8927a9c" version "1.6.1-beta.0" end depends_on "go" => :build def install ENV["GOPATH"] = buildpath arch = MacOS.prefer_64_bit? ? "amd64" : "x86" dir = buildpath/"src/k8s.io/kubernetes" dir.install buildpath.children - [buildpath/".brew_home"] cd dir do # Race condition still exists in OSX Yosemite # Filed issue: https://github.com/kubernetes/kubernetes/issues/34635 ENV.deparallelize { system "make", "generated_files" } # Make binary system "make", "kubectl" bin.install "_output/local/bin/darwin/#{arch}/kubectl" # Install bash completion output = Utils.popen_read("#{bin}/kubectl completion bash") (bash_completion/"kubectl").write output # Install zsh completion output = Utils.popen_read("#{bin}/kubectl completion zsh") (zsh_completion/"kubectl").write output end end test do run_output = shell_output("#{bin}/kubectl 2>&1") assert_match "kubectl controls the Kubernetes cluster manager.", run_output version_output = shell_output("#{bin}/kubectl version --client 2>&1") assert_match "GitTreeState:\"clean\"", version_output end end
33.810345
92
0.703213
08e0484766f8780f16070fe162e9e4fb5b5935dc
904
module Metricstore class Incrementer < Updater def increment(key, delta, ttl=nil) return if delta.zero? update(key, delta, ttl) end protected def prepare_data(delta) delta end def consolidate_data(delta1, delta2) delta1 + delta2 end def handle_update(key, delta, ttl, errors) stored_value, cas = kvstore.fetch(key, :ttl => ttl) if stored_value.nil? if kvstore.add(key, delta, :ttl => ttl) return delta else # collision retry_update(key, delta, ttl, errors) return nil end else new_value = stored_value + delta if kvstore.set(key, new_value, :ttl => ttl, :cas => cas) return new_value else # collision retry_update(key, min_max, ttl, errors) return nil end end end end end
22.04878
64
0.566372
0840403b9928bb77903c858f3e578648150c41b6
704
# frozen_string_literal: true require "motion" module Motion class Event def self.from_raw(raw) new(raw) if raw end attr_reader :raw def initialize(raw) @raw = raw.freeze end def type raw["type"] end alias_method :name, :type def details raw.fetch("details", {}) end def extra_data raw["extraData"] end def target return @target if defined?(@target) @target = Motion::Element.from_raw(raw["target"]) end def element return @element if defined?(@element) @element = Motion::Element.from_raw(raw["element"]) end def form_data element&.form_data end end end
14.666667
57
0.599432
91b5bc51ea62e44f8ae878030093b7525a446525
533
# encoding: utf-8 require 'spec_helper' describe Algebra::Rename::Methods, '#rename' do subject { object.rename(aliases) } let(:described_class) { Relation } let(:attribute) { Attribute::Integer.new(:id) } let(:aliases) { { id: :other_id } } let(:object) { described_class.new([attribute], LazyEnumerable.new([[1]])) } it { should be_instance_of(Algebra::Rename) } end
35.533333
87
0.493433
ed86853fdb1829848680963f3dd243892029ea81
840
require 'test_helper' class UsersIndexTest < ActionDispatch::IntegrationTest def setup @admin = users(:michael) @non_admin = users(:archer) end test "index as admin including pagination and delete links" do log_in_as(@admin) get users_path assert_template 'users/index' assert_select 'div.pagination', count: 2 first_page_of_users = User.paginate(page: 1) first_page_of_users.each do |user| assert_select 'a[href=?]', user_path(user), text: user.name unless user == @admin assert_select 'a[href=?]', user_path(user), text: 'delete' end end assert_difference 'User.count', -1 do delete user_path(@non_admin) end end test "index as non_admin" do log_in_as(@non_admin) get users_path assert_select 'a', text: 'delete', count: 0 end end
24.705882
66
0.675
330ddbb05f571f1e1481b022c7116627400057eb
155
class AddIndexerApiBaseUrlToTezosChains < ActiveRecord::Migration[5.2] def change add_column :tezos_chains, :indexer_api_base_url, :string end end
25.833333
70
0.8
bfd96270f639ce0de29f33a451cf949a4347d698
1,504
require "language/node" class Autocode < Formula desc "Code automation for every language, library and framework" homepage "https://autocode.readme.io/" url "https://registry.npmjs.org/autocode/-/autocode-1.3.1.tgz" sha256 "952364766e645d4ddae30f9d6cc106fdb74d05afc4028066f75eeeb17c4b0247" bottle do cellar :any_skip_relocation rebuild 1 sha256 "44742d0ccc3af3f27590445dbf2e89dffd8e684ff81521b5dc421449507879cd" => :mojave sha256 "a11f1fbbbf04052b9885a00abc88e7539a6c1992e35a62c6776df7ea32daf890" => :high_sierra sha256 "f369819b2f33327071a68455a14f66855286c7614977f06704f21c38e2df5f89" => :sierra sha256 "c321c73e1662332392c5949467c544e18db30849019555086ad14eeb097656d2" => :el_capitan sha256 "a0b7c969db9e2870e818587c7d832bbe0bb187cbc01346b85bb81a6097a9e015" => :yosemite sha256 "04effb5aecdd48e2a3c38435079424fd83f08dff206096f9807ff7c4ccd68b93" => :mavericks sha256 "4376dc40af997dae743476e80fb878e02b05d62e22d4af3f4e38f8ea23df3c8e" => :x86_64_linux # glibc 2.19 end depends_on "node" def install system "npm", "install", *Language::Node.std_npm_install_args(libexec) bin.install_symlink Dir["#{libexec}/bin/*"] end test do (testpath/".autocode/config.yml").write <<~EOS name: test version: 0.1.0 description: test description author: name: Test User email: [email protected] url: https://example.com copyright: 2015 Test EOS system bin/"autocode", "build" end end
35.809524
107
0.757979
085e803c6c59ce0299f50a83a0f1a70b7553d5cf
5,122
require "abstract_unit" class ViewLoadPathsTest < ActionController::TestCase class TestController < ActionController::Base def self.controller_path() "test" end before_action :add_view_path, only: :hello_world_at_request_time def hello_world() end def hello_world_at_request_time() render(action: "hello_world") end private def add_view_path prepend_view_path "#{FIXTURE_LOAD_PATH}/override" end end module Test class SubController < ActionController::Base layout "test/sub" def hello_world; render(template: "test/hello_world"); end end end def setup @request = ActionController::TestRequest.create @response = ActionDispatch::TestResponse.new @controller = TestController.new @paths = TestController.view_paths end def teardown TestController.view_paths = @paths end def expand(array) array.map {|x| File.expand_path(x.to_s)} end def assert_paths(*paths) controller = paths.first.is_a?(Class) ? paths.shift : @controller assert_equal expand(paths), controller.view_paths.map(&:to_s) end def test_template_load_path_was_set_correctly assert_paths FIXTURE_LOAD_PATH end def test_controller_appends_view_path_correctly @controller.append_view_path "foo" assert_paths(FIXTURE_LOAD_PATH, "foo") @controller.append_view_path(%w(bar baz)) assert_paths(FIXTURE_LOAD_PATH, "foo", "bar", "baz") @controller.append_view_path(FIXTURE_LOAD_PATH) assert_paths(FIXTURE_LOAD_PATH, "foo", "bar", "baz", FIXTURE_LOAD_PATH) end def test_controller_prepends_view_path_correctly @controller.prepend_view_path "baz" assert_paths("baz", FIXTURE_LOAD_PATH) @controller.prepend_view_path(%w(foo bar)) assert_paths "foo", "bar", "baz", FIXTURE_LOAD_PATH @controller.prepend_view_path(FIXTURE_LOAD_PATH) assert_paths FIXTURE_LOAD_PATH, "foo", "bar", "baz", FIXTURE_LOAD_PATH end def test_template_appends_view_path_correctly @controller.instance_variable_set :@template, ActionView::Base.new(TestController.view_paths, {}, @controller) class_view_paths = TestController.view_paths @controller.append_view_path "foo" assert_paths FIXTURE_LOAD_PATH, "foo" @controller.append_view_path(%w(bar baz)) assert_paths FIXTURE_LOAD_PATH, "foo", "bar", "baz" assert_paths TestController, *class_view_paths end def test_template_prepends_view_path_correctly @controller.instance_variable_set :@template, ActionView::Base.new(TestController.view_paths, {}, @controller) class_view_paths = TestController.view_paths @controller.prepend_view_path "baz" assert_paths "baz", FIXTURE_LOAD_PATH @controller.prepend_view_path(%w(foo bar)) assert_paths "foo", "bar", "baz", FIXTURE_LOAD_PATH assert_paths TestController, *class_view_paths end def test_view_paths get :hello_world assert_response :success assert_equal "Hello world!", @response.body end def test_view_paths_override TestController.prepend_view_path "#{FIXTURE_LOAD_PATH}/override" get :hello_world assert_response :success assert_equal "Hello overridden world!", @response.body end def test_view_paths_override_for_layouts_in_controllers_with_a_module @controller = Test::SubController.new Test::SubController.view_paths = [ "#{FIXTURE_LOAD_PATH}/override", FIXTURE_LOAD_PATH, "#{FIXTURE_LOAD_PATH}/override2" ] get :hello_world assert_response :success assert_equal "layout: Hello overridden world!", @response.body end def test_view_paths_override_at_request_time get :hello_world_at_request_time assert_response :success assert_equal "Hello overridden world!", @response.body end def test_decorate_view_paths_with_custom_resolver decorator_class = Class.new(ActionView::PathResolver) do def initialize(path_set) @path_set = path_set end def find_all(*args) @path_set.find_all(*args).collect do |template| ::ActionView::Template.new( "Decorated body", template.identifier, template.handler, virtual_path: template.virtual_path, format: template.formats ) end end end decorator = decorator_class.new(TestController.view_paths) TestController.view_paths = ActionView::PathSet.new.push(decorator) get :hello_world assert_response :success assert_equal "Decorated body", @response.body end def test_inheritance original_load_paths = ActionController::Base.view_paths self.class.class_eval %{ class A < ActionController::Base; end class B < A; end class C < ActionController::Base; end } A.view_paths = ["a/path"] assert_paths A, "a/path" assert_paths A, *B.view_paths assert_paths C, *original_load_paths C.view_paths = [] assert_nothing_raised { C.append_view_path "c/path" } assert_paths C, "c/path" end def test_lookup_context_accessor assert_equal ["test"], TestController.new.lookup_context.prefixes end end
29.606936
125
0.729988
bb202049ad8b83f096e8bf92ba3e3834fd74ac62
1,273
require File.expand_path('../../../spec_helper', __FILE__) require "stringio" require File.expand_path('../shared/read', __FILE__) describe "StringIO#sysread when passed length, buffer" do it_behaves_like :stringio_read, :sysread end describe "StringIO#sysread when passed [length]" do it_behaves_like :stringio_read_length, :sysread end describe "StringIO#sysread when passed no arguments" do it_behaves_like :stringio_read_no_arguments, :sysread it "returns an empty String if at EOF" do @io.sysread.should == "example" @io.sysread.should == "" end end describe "StringIO#sysread when self is not readable" do it_behaves_like :stringio_read_not_readable, :sysread end describe "StringIO#sysread when passed nil" do it_behaves_like :stringio_read_nil, :sysread it "returns an empty String if at EOF" do @io.sysread(nil).should == "example" @io.sysread(nil).should == "" end end describe "StringIO#sysread when passed [length]" do before(:each) do @io = StringIO.new("example") end it "raises an EOFError when self's position is at the end" do @io.pos = 7 lambda { @io.sysread(10) }.should raise_error(EOFError) end it "returns an empty String when length is 0" do @io.sysread(0).should == "" end end
25.979592
63
0.724273
4abc221067f750601c9ffb1fd08a9db63eb5814a
670
module Puppet::Parser::Functions Puppet::Parser::Functions.newfunction(:default_content, :type => :rvalue, :doc => <<-'ENDOFDOC' Takes an optional content and an optional template name to calculate the actual contents of a file. This small function abbreviates the default initialisation boilerplate of stdmod modules. ENDOFDOC ) do |args| content = args[0] template_name = args[1] Puppet::Parser::Functions.autoloader.loadall return content if content != '' return function_template([template_name]) if template_name != '' return :undef end end
29.130435
81
0.637313
2175e0be45b69b957a50a970b56bbd2dc63ba45e
286
module BreadcrumbHelper def breadcrumb_name(object, prop='name') name = if prop == 'name' && object.respond_to?(:full_name) object.full_name else "#{object.class.model_name.human} #{object.public_send(prop)}" end name.truncate(40) end end
22
70
0.636364
0801f4abbbb3d348136fd79f1c0ff7e78f9aa3b8
85
cask 'missing-name' do version '1.2.3' url 'http://localhost/something.dmg' end
14.166667
38
0.682353
18d29c663dd1243cbe6a4cc52a2d21091f34bf31
701
cask 'prince' do version '12.5' sha256 'd7940c2f60b1e9657db1deb1144b2496cc34c728f650c36b24d6885b964e9aed' url "https://www.princexml.com/download/prince-#{version}-macosx.tar.gz" appcast 'https://www.princexml.com/download/' name 'Prince' homepage 'https://www.princexml.com/' # shim script (https://github.com/Homebrew/homebrew-cask/issues/18809) shimscript = "#{staged_path}/prince-#{version}-macosx/prince.wrapper.sh" binary shimscript, target: 'prince' preflight do IO.write shimscript, <<~EOS #!/bin/sh exec '#{staged_path}/prince-#{version}-macosx/lib/prince/bin/prince' --prefix '#{staged_path}/prince-#{version}-macosx/lib/prince' "$@" EOS end end
33.380952
141
0.713267
39253e51b2f1d18c4bc55ade0433473cdd3fd4d4
513
# Copyright (c) 2016, 2022, Oracle and/or its affiliates. All rights reserved. # This software is dual-licensed to you under the Universal Permissive License (UPL) 1.0 as shown at https://oss.oracle.com/licenses/upl or Apache License 2.0 as shown at http://www.apache.org/licenses/LICENSE-2.0. You may choose either license. module OCI module Opsi::Models EXADATA_ENTITY_SOURCE_ENUM = [ EXADATA_ENTITY_SOURCE_EM_MANAGED_EXTERNAL_EXADATA = 'EM_MANAGED_EXTERNAL_EXADATA'.freeze ].freeze end end
46.636364
245
0.769981
4aa9175ceb6506da6445af7febbe097e7a302508
7,996
=begin #Argo Workflows API #Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argoproj.github.io/argo-workflows/ The version of the OpenAPI document: VERSION Generated by: https://openapi-generator.tech OpenAPI Generator version: 5.2.1 =end require 'date' require 'time' module ArgoWorkflows # HTTPGetAction describes an action based on HTTP Get requests. class HTTPGetAction # Host name to connect to, defaults to the pod IP. You probably want to set \"Host\" in httpHeaders instead. attr_accessor :host # Custom headers to set in the request. HTTP allows repeated headers. attr_accessor :http_headers # Path to access on the HTTP server. attr_accessor :path attr_accessor :port # Scheme to use for connecting to the host. Defaults to HTTP. attr_accessor :scheme # Attribute mapping from ruby-style variable name to JSON key. def self.attribute_map { :'host' => :'host', :'http_headers' => :'httpHeaders', :'path' => :'path', :'port' => :'port', :'scheme' => :'scheme' } end # Returns all the JSON keys this model knows about def self.acceptable_attributes attribute_map.values end # Attribute type mapping. def self.openapi_types { :'host' => :'String', :'http_headers' => :'Array<HTTPHeader>', :'path' => :'String', :'port' => :'String', :'scheme' => :'String' } end # List of attributes with nullable: true def self.openapi_nullable Set.new([ ]) end # Initializes the object # @param [Hash] attributes Model attributes in the form of hash def initialize(attributes = {}) if (!attributes.is_a?(Hash)) fail ArgumentError, "The input argument (attributes) must be a hash in `ArgoWorkflows::HTTPGetAction` initialize method" end # check to see if the attribute exists and convert string to symbol for hash key attributes = attributes.each_with_object({}) { |(k, v), h| if (!self.class.attribute_map.key?(k.to_sym)) fail ArgumentError, "`#{k}` is not a valid attribute in `ArgoWorkflows::HTTPGetAction`. Please check the name to make sure it's valid. List of attributes: " + self.class.attribute_map.keys.inspect end h[k.to_sym] = v } if attributes.key?(:'host') self.host = attributes[:'host'] end if attributes.key?(:'http_headers') if (value = attributes[:'http_headers']).is_a?(Array) self.http_headers = value end end if attributes.key?(:'path') self.path = attributes[:'path'] end if attributes.key?(:'port') self.port = attributes[:'port'] end if attributes.key?(:'scheme') self.scheme = attributes[:'scheme'] end end # Show invalid properties with the reasons. Usually used together with valid? # @return Array for valid properties with the reasons def list_invalid_properties invalid_properties = Array.new if @port.nil? invalid_properties.push('invalid value for "port", port cannot be nil.') end invalid_properties end # Check to see if the all the properties in the model are valid # @return true if the model is valid def valid? return false if @port.nil? true end # Checks equality by comparing each attribute. # @param [Object] Object to be compared def ==(o) return true if self.equal?(o) self.class == o.class && host == o.host && http_headers == o.http_headers && path == o.path && port == o.port && scheme == o.scheme end # @see the `==` method # @param [Object] Object to be compared def eql?(o) self == o end # Calculates hash code according to all attributes. # @return [Integer] Hash code def hash [host, http_headers, path, port, scheme].hash end # Builds the object from hash # @param [Hash] attributes Model attributes in the form of hash # @return [Object] Returns the model itself def self.build_from_hash(attributes) new.build_from_hash(attributes) end # Builds the object from hash # @param [Hash] attributes Model attributes in the form of hash # @return [Object] Returns the model itself def build_from_hash(attributes) return nil unless attributes.is_a?(Hash) self.class.openapi_types.each_pair do |key, type| if attributes[self.class.attribute_map[key]].nil? && self.class.openapi_nullable.include?(key) self.send("#{key}=", nil) elsif type =~ /\AArray<(.*)>/i # check to ensure the input is an array given that the attribute # is documented as an array but the input is not if attributes[self.class.attribute_map[key]].is_a?(Array) self.send("#{key}=", attributes[self.class.attribute_map[key]].map { |v| _deserialize($1, v) }) end elsif !attributes[self.class.attribute_map[key]].nil? self.send("#{key}=", _deserialize(type, attributes[self.class.attribute_map[key]])) end end self end # Deserializes the data based on type # @param string type Data type # @param string value Value to be deserialized # @return [Object] Deserialized data def _deserialize(type, value) case type.to_sym when :Time Time.parse(value) when :Date Date.parse(value) when :String value.to_s when :Integer value.to_i when :Float value.to_f when :Boolean if value.to_s =~ /\A(true|t|yes|y|1)\z/i true else false end when :Object # generic object (usually a Hash), return directly value when /\AArray<(?<inner_type>.+)>\z/ inner_type = Regexp.last_match[:inner_type] value.map { |v| _deserialize(inner_type, v) } when /\AHash<(?<k_type>.+?), (?<v_type>.+)>\z/ k_type = Regexp.last_match[:k_type] v_type = Regexp.last_match[:v_type] {}.tap do |hash| value.each do |k, v| hash[_deserialize(k_type, k)] = _deserialize(v_type, v) end end else # model # models (e.g. Pet) or oneOf klass = ArgoWorkflows.const_get(type) klass.respond_to?(:openapi_one_of) ? klass.build(value) : klass.build_from_hash(value) end end # Returns the string representation of the object # @return [String] String presentation of the object def to_s to_hash.to_s end # to_body is an alias to to_hash (backward compatibility) # @return [Hash] Returns the object in the form of hash def to_body to_hash end # Returns the object in the form of hash # @return [Hash] Returns the object in the form of hash def to_hash hash = {} self.class.attribute_map.each_pair do |attr, param| value = self.send(attr) if value.nil? is_nullable = self.class.openapi_nullable.include?(attr) next if !is_nullable || (is_nullable && !instance_variable_defined?(:"@#{attr}")) end hash[param] = _to_hash(value) end hash end # Outputs non-array value in the form of hash # For object, use to_hash. Otherwise, just return the value # @param [Object] value Any valid value # @return [Hash] Returns the value in the form of hash def _to_hash(value) if value.is_a?(Array) value.compact.map { |v| _to_hash(v) } elsif value.is_a?(Hash) {}.tap do |hash| value.each { |k, v| hash[k] = _to_hash(v) } end elsif value.respond_to? :to_hash value.to_hash else value end end end end
29.947566
206
0.617434
bbc2b48b8567e24813d0305c871df7a22d96b545
617
# frozen_string_literal: true class NotePolicy < BasePolicy delegate { @subject.project } delegate { @subject.noteable if DeclarativePolicy.has_policy?(@subject.noteable) } condition(:is_author) { @user && @subject.author == @user } condition(:is_noteable_author) { @user && @subject.noteable.author_id == @user.id } condition(:editable, scope: :subject) { @subject.editable? } rule { ~editable }.prevent :admin_note rule { is_author }.policy do enable :read_note enable :admin_note enable :resolve_note end rule { is_noteable_author }.policy do enable :resolve_note end end
25.708333
85
0.711507
1aa4129380e93cf41ded24129f7f9ea61b6ba4c6
1,018
module Telapi # Wraps TelAPI Available Phone Number functionality class AvailablePhoneNumber < Resource class << self # Returns a resource collection containing Telapi::AvailablePhoneNumber objects # See http://www.telapi.com/docs/api/rest/available-phone-numbers/list/ # # Required params: # +country_code+:: country code (ISO), see http://en.wikipedia.org/wiki/ISO_3166-1_alpha-2 # +num_type+:: Local or TollFree # # Optional params is a hash containing: # +AreaCode+:: valid area code, e.g. 415 # +Contains+:: 0-9, lower and upper case letters of the alphabet (Aa-Zz), or * # +InRegion+:: valid region (state or province) abbreviation # +InPostalCode+:: valid postal code def list(country_code, num_type, optional_params = {}) response = Network.get(['AvailablePhoneNumbers', country_code, num_type], optional_params) ResourceCollection.new(response, 'available_phone_numbers', self) end end end end
40.72
98
0.680747
f8b26155701c1054bef8e732ccc5b90cd9d2c705
1,995
# Encoding: utf-8 # # This is auto-generated code, changes will be overwritten. # # Copyright:: Copyright 2021, Google Inc. All Rights Reserved. # License:: Licensed under the Apache License, Version 2.0. # # Code generated by AdsCommon library 1.0.3 on 2021-08-11 23:14:51. require 'ads_common/savon_service' require 'ad_manager_api/v202108/dai_encoding_profile_service_registry' module AdManagerApi; module V202108; module DaiEncodingProfileService class DaiEncodingProfileService < AdsCommon::SavonService def initialize(config, endpoint) namespace = 'https://www.google.com/apis/ads/publisher/v202108' super(config, endpoint, namespace, :v202108) end def create_dai_encoding_profiles(*args, &block) return execute_action('create_dai_encoding_profiles', args, &block) end def create_dai_encoding_profiles_to_xml(*args) return get_soap_xml('create_dai_encoding_profiles', args) end def get_dai_encoding_profiles_by_statement(*args, &block) return execute_action('get_dai_encoding_profiles_by_statement', args, &block) end def get_dai_encoding_profiles_by_statement_to_xml(*args) return get_soap_xml('get_dai_encoding_profiles_by_statement', args) end def perform_dai_encoding_profile_action(*args, &block) return execute_action('perform_dai_encoding_profile_action', args, &block) end def perform_dai_encoding_profile_action_to_xml(*args) return get_soap_xml('perform_dai_encoding_profile_action', args) end def update_dai_encoding_profiles(*args, &block) return execute_action('update_dai_encoding_profiles', args, &block) end def update_dai_encoding_profiles_to_xml(*args) return get_soap_xml('update_dai_encoding_profiles', args) end private def get_service_registry() return DaiEncodingProfileServiceRegistry end def get_module() return AdManagerApi::V202108::DaiEncodingProfileService end end end; end; end
31.666667
83
0.762907
e85ccecc92b8331b9555a6ce7b0764ba163b7c19
1,637
# frozen_string_literal: true require "liquid" require "jekyll_plugin_logger" require_relative "jekyll_context_inspector/version" module JekyllPlubinContextInspectorName PLUGIN_NAME = "context_inspector" end class ContextInspector < Liquid::Tag def render(context) @logger = PluginMetaLogger.instance.new_logger(self, PluginMetaLogger.instance.config) site = context.registers[:site] inspector_enabled = site.config["context_inspector"] return if inspector_enabled.nil? || !inspector_enabled mode = site.config["env"]["JEKYLL_ENV"] return unless inspector_enabled == "force" || mode == "development" dump_info(context) end private def dump_info(context) page = context.registers[:page] info do key_value_pairs = context.registers.map do |key, value| " <code>#{key}</code> has a value with type <code>#{value.class}</code>" end vars = page.keys.sort.join("</code>, <code>") <<~END_MESSAGE context for #{page.path} is of type #{context.class}. context.registers for #{page.path} contains the following key/value pairs: #{key_value_pairs.join("\n")} #{JekyllPlubinContextInspectorName::PLUGIN_NAME}: #{page.path} contains the following key/value pairs: <p class='info'>Jekyll variables for this page are: <code>#{vars}</code></p> END_MESSAGE end end end Liquid::Template.register_tag(JekyllPlubinContextInspectorName::PLUGIN_NAME, ContextInspector) PluginMetaLogger.instance.info { "Loaded #{JekyllPlubinContextInspectorName::PLUGIN_NAME} v#{JekyllContextInspectorVersion::VERSION} plugin." }
34.829787
143
0.718387
d566ed54d1ce412d16cec82e889b212e6cbf303d
22,241
# frozen_string_literal: true require_relative "bootstrap_options" module ComfyBootstrapForm class FormBuilder < ActionView::Helpers::FormBuilder FIELD_HELPERS = %w[ color_field date_field datetime_field email_field month_field password_field phone_field range_field search_field text_area text_field rich_text_area time_field url_field week_field ].freeze DATE_SELECT_HELPERS = %w[ date_select datetime_select time_select ].freeze delegate :content_tag, :capture, :concat, to: :@template # Bootstrap settings set on the form itself attr_accessor :form_bootstrap def initialize(object_name, object, template, options) @form_bootstrap = ComfyBootstrapForm::BootstrapOptions.new(options.delete(:bootstrap)) super(object_name, object, template, options) end # Wrapper for all field helpers. Example usage: # # bootstrap_form_with model: @user do |form| # form.text_field :name # end # # Output of the `text_field` will be wrapped in Bootstrap markup # FIELD_HELPERS.each do |field_helper| class_eval <<-RUBY_EVAL, __FILE__, __LINE__ + 1 def #{field_helper}(method, options = {}) bootstrap = form_bootstrap.scoped(options.delete(:bootstrap)) return super if bootstrap.disabled draw_form_group(bootstrap, method, options) do super(method, options) end end RUBY_EVAL end # Wrapper for datetime select helpers. Boostrap options are sent via options hash: # # date_select :birthday, bootstrap: {label: {text: "Custom"}} # DATE_SELECT_HELPERS.each do |select_helper| class_eval <<-RUBY_EVAL, __FILE__, __LINE__ + 1 def #{select_helper}(method, options = {}, html_options = {}, &block) bootstrap = form_bootstrap.scoped(options.delete(:bootstrap)) return super if bootstrap.disabled add_css_class!(html_options, "d-inline-block w-auto") add_css_class!(html_options, "custom-select") if bootstrap.custom_control draw_form_group(bootstrap, method, html_options) do content_tag(:div, class: "#{select_helper}") do super(method, options, html_options, &block) end end end RUBY_EVAL end # Wrapper for the number field. It has default changed from `step: "1"` to `step: "any"` # to prevent confusion when dealing with decimal numbers. # # number_field :amount, step: 5 # def number_field(method, options = {}) bootstrap = form_bootstrap.scoped(options.delete(:bootstrap)) options.reverse_merge!(step: "any") return super(method, options) if bootstrap.disabled draw_form_group(bootstrap, method, options) do super(method, options) end end # Wrapper for select helper. Boostrap options are sent via options hash: # # select :choices, ["a", "b"], bootstrap: {label: {text: "Custom"}} # def select(method, choices = nil, options = {}, html_options = {}, &block) bootstrap = form_bootstrap.scoped(options.delete(:bootstrap)) return super if bootstrap.disabled add_css_class!(html_options, "custom-select") if bootstrap.custom_control draw_form_group(bootstrap, method, html_options, true) do super(method, choices, options, html_options, &block) end end # Wrapper for collection_select helper. Boostrap options are sent via options hash: # # collection_select :collection, [["a", "aa"], ["b", "bb"]], :first, :last, bootstrap: {label: {text: "Custom"}} # def collection_select(method, collection, value_method, text_method, options = {}, html_options = {}) bootstrap = form_bootstrap.scoped(options.delete(:bootstrap)) return super if bootstrap.disabled add_css_class!(html_options, "custom-select") if bootstrap.custom_control draw_form_group(bootstrap, method, html_options, true) do super(method, collection, value_method, text_method, options, html_options) end end # Wrapper for file_field helper. It can accept `custom_control` option. # # file_field :photo, bootstrap: {custom_control: true} # def file_field(method, options = {}) bootstrap = form_bootstrap.scoped(options.delete(:bootstrap)) return super if bootstrap.disabled draw_form_group(bootstrap, method, options) do if bootstrap.custom_control content_tag(:div, class: "custom-file") do form_group_class += " form-floating" if bootstrap.floating? add_css_class!(options, "custom-file-input") remove_css_class!(options, "form-control") label_text = options.delete(:placeholder) concat super(method, options) label_options = { class: "custom-file-label" } label_options[:for] = options[:id] if options[:id].present? concat label(method, label_text, label_options) end else super(method, options) end end end # Wrapper around radio button. Example usage: # # radio_button :choice, "value", bootstrap: {label: {text: "Do you agree?"}} # def radio_button(method, tag_value, options = {}) bootstrap = form_bootstrap.scoped(options.delete(:bootstrap)) return super if bootstrap.disabled help_text = draw_help(bootstrap) errors = draw_errors(bootstrap, method) add_css_class!(options, "form-check-input") add_css_class!(options, "is-invalid") if errors.present? label_text = nil if (custom_text = bootstrap.label[:text]).present? label_text = custom_text end fieldset_css_class = "form-group" fieldset_css_class += " row" if bootstrap.horizontal? fieldset_css_class += " #{bootstrap.inline_margin_class}" if bootstrap.inline? content_tag(:fieldset, class: fieldset_css_class) do draw_control_column(bootstrap, offset: true) do if bootstrap.custom_control content_tag(:div, class: "custom-control custom-radio") do add_css_class!(options, "custom-control-input") remove_css_class!(options, "form-check-input") concat super(method, tag_value, options) concat label(method, label_text, value: tag_value, class: "custom-control-label") concat errors if errors.present? concat help_text if help_text.present? end else content_tag(:div, class: "form-check") do concat super(method, tag_value, options) concat label(method, label_text, value: tag_value, class: "form-check-label") concat errors if errors.present? concat help_text if help_text.present? end end end end end # Wrapper around checkbox. Example usage: # # checkbox :agree, bootstrap: {label: {text: "Do you agree?"}} # def check_box(method, options = {}, checked_value = "1", unchecked_value = "0") bootstrap = form_bootstrap.scoped(options.delete(:bootstrap)) return super if bootstrap.disabled help_text = draw_help(bootstrap) errors = draw_errors(bootstrap, method) add_css_class!(options, "form-check-input") add_css_class!(options, "is-invalid") if errors.present? label_text = nil if (custom_text = bootstrap.label[:text]).present? label_text = custom_text end fieldset_css_class = "form-group" fieldset_css_class += " row" if bootstrap.horizontal? fieldset_css_class += " #{bootstrap.inline_margin_class}" if bootstrap.inline? content_tag(:fieldset, class: fieldset_css_class) do draw_control_column(bootstrap, offset: true) do if bootstrap.custom_control content_tag(:div, class: "custom-control custom-checkbox") do add_css_class!(options, "custom-control-input") remove_css_class!(options, "form-check-input") concat super(method, options, checked_value, unchecked_value) concat label(method, label_text, class: "custom-control-label") concat errors if errors.present? concat help_text if help_text.present? end else content_tag(:div, class: "form-check") do concat super(method, options, checked_value, unchecked_value) concat label(method, label_text, class: "form-check-label") concat errors if errors.present? concat help_text if help_text.present? end end end end end # Helper to generate multiple radio buttons. Example usage: # # collection_radio_buttons :choices, ["a", "b"], :to_s, :to_s %> # collection_radio_buttons :choices, [["a", "Label A"], ["b", "Label B"]], :first, :second # collection_radio_buttons :choices, Choice.all, :id, :label # # Takes bootstrap options: # inline: true - to render inputs inline # label: {text: "Custom"} - to specify a label # label: {hide: true} - to not render label at all # def collection_radio_buttons(method, collection, value_method, text_method, options = {}, html_options = {}) bootstrap = form_bootstrap.scoped(options.delete(:bootstrap)) return super if bootstrap.disabled args = [bootstrap, :radio_button, method, collection, value_method, text_method, options, html_options] draw_choices(*args) do |m, v, opts| radio_button(m, v, opts.merge(bootstrap: { disabled: true })) end end # Helper to generate multiple checkboxes. Same options as for radio buttons. # Example usage: # # collection_check_boxes :choices, Choice.all, :id, :label # def collection_check_boxes(method, collection, value_method, text_method, options = {}, html_options = {}) bootstrap = form_bootstrap.scoped(options.delete(:bootstrap)) return super if bootstrap.disabled content = "".html_safe unless options[:include_hidden] == false content << hidden_field(method, multiple: true, value: "") end args = [bootstrap, :check_box, method, collection, value_method, text_method, options, html_options] content << draw_choices(*args) do |m, v, opts| opts[:multiple] = true opts[:include_hidden] = false check_box(m, opts.merge(bootstrap: { disabled: true }), v) end end # Bootstrap wrapper for readonly text field that is shown as plain text. # # plaintext(:value) # def plaintext(method, options = {}) bootstrap = form_bootstrap.scoped(options.delete(:bootstrap)) draw_form_group(bootstrap, method, options) do remove_css_class!(options, "form-control") add_css_class!(options, "form-control-plaintext") options[:readonly] = true ActionView::Helpers::FormBuilder.instance_method(:text_field).bind(self).call(method, options) end end # Add bootstrap formatted submit button. If you need to change its type or # add another css class, you need to override all css classes like so: # # submit(class: "btn btn-info custom-class") # # You may add additional content that directly follows the button. Here's # an example of a cancel link: # # submit do # link_to("Cancel", "/", class: "btn btn-link") # end # def submit(value = nil, options = {}, &block) if value.is_a?(Hash) options = value value = nil end bootstrap = form_bootstrap.scoped(options.delete(:bootstrap)) return super if bootstrap.disabled add_css_class!(options, "btn") form_group_class = "form-group" form_group_class += " row" if bootstrap.horizontal? content_tag(:div, class: form_group_class) do draw_control_column(bootstrap, offset: true) do out = super(value, options) out << capture(&block) if block_given? out end end end # Same as submit button, only with btn-primary class added def primary(value = nil, options = {}, &block) add_css_class!(options, "btn-primary") submit(value, options, &block) end # Helper method to put arbitrary content in markup that renders correctly # for the Bootstrap form. Example: # # form_group bootstrap: {label: {text: "Label"}} do # "Some content" # end # def form_group(options = {}) bootstrap = form_bootstrap.scoped(options.delete(:bootstrap)) label_options = bootstrap.label.clone label_text = label_options.delete(:text) label = if label_text.present? if bootstrap.horizontal? add_css_class!(label_options, "col-form-label") add_css_class!(label_options, bootstrap.label_col_class) add_css_class!(label_options, bootstrap.label_align_class) elsif bootstrap.inline? add_css_class!(label_options, bootstrap.inline_margin_class) end content_tag(:label, label_text, label_options) end form_group_class = "form-group" form_group_class += " row" if bootstrap.horizontal? form_group_class += " mr-sm-2" if bootstrap.inline? content_tag(:div, class: form_group_class) do content = "".html_safe content << label if label.present? content << draw_control_column(bootstrap, offset: label.blank?) do yield end end end private # form group wrapper for input fields def draw_form_group(bootstrap, method, options, select = false) label = draw_label(bootstrap, method, for_attr: options[:id]) errors = draw_errors(bootstrap, method) control = draw_control(bootstrap, errors, method, options, select) do yield end form_group_class = "form-group" form_group_class += " row" if bootstrap.horizontal? form_group_class += " mr-sm-2" if bootstrap.inline? form_group_class += " form-floating" if bootstrap.floating content_tag(:div, class: form_group_class) do if bootstrap.floating concat control concat label else concat label concat control end end end def draw_errors(bootstrap, method) errors = [] if bootstrap.error.present? errors = [bootstrap.error] else return if object.nil? errors = object.errors[method] # If error is on association like `belongs_to :foo`, we need to render it # on an input field with `:foo_id` name. if errors.blank? errors = object.errors[method.to_s.sub(%r{_id$}, "")] end end return if errors.blank? content_tag(:div, class: "invalid-feedback") do errors.join(", ") end end # Renders label for a given field. Takes following bootstrap options: # # :text - replace default label text # :class - css class on the label # :hide - if `true` will render for screen readers only # # This is how those options can be passed in: # # text_field(:value, bootstrap: {label: {text: "Custom", class: "custom"}}) # # You may also just set the label text by passing a string instead of label hash: # # text_field(:value, bootstrap: {label: "Custom Label"}) # def draw_label(bootstrap, method, for_attr: nil) options = bootstrap.label.dup text = options.delete(:text) options[:for] = for_attr if for_attr.present? add_css_class!(options, "sr-only") if options.delete(:hide) add_css_class!(options, bootstrap.inline_margin_class) if bootstrap.inline? if bootstrap.horizontal? add_css_class!(options, "col-form-label") add_css_class!(options, bootstrap.label_col_class) add_css_class!(options, bootstrap.label_align_class) end label(method, text, options) end # Renders control for a given field def draw_control(bootstrap, errors, _method, options, select) add_css_class!(options, "form-control") unless select add_css_class!(options, "form-select") if select add_css_class!(options, "is-invalid") if errors.present? draw_control_column(bootstrap, offset: bootstrap.label[:hide]) do draw_input_group(bootstrap, errors) do yield end end end # Wrapping in control in column wrapper # def draw_control_column(bootstrap, offset:) return yield unless bootstrap.horizontal? css_class = bootstrap.control_col_class.to_s css_class += " #{bootstrap.offset_col_class}" if offset content_tag(:div, class: css_class) do yield end end # Wraps input field in input group container that allows prepending and # appending text or html. Example: # # text_field(:value, bootstrap: {prepend: "$.$$"}}) # text_field(:value, bootstrap: {append: {html: "<button>Go</button>"}}}) # def draw_input_group(bootstrap, errors, &block) prepend_html = draw_input_group_content(bootstrap, :prepend) append_html = draw_input_group_content(bootstrap, :append) help_text = draw_help(bootstrap) # Not prepending or appending anything. Bail. if prepend_html.blank? && append_html.blank? content = capture(&block) content << errors if errors.present? content << help_text if help_text.present? return content end content = "".html_safe content << content_tag(:div, class: "input-group") do concat prepend_html if prepend_html.present? concat capture(&block) concat append_html if append_html.present? concat errors if errors.present? end content << help_text if help_text.present? content end def draw_input_group_content(bootstrap, type) value = bootstrap.send(type) return unless value.present? content_tag(:div, class: "input-group-#{type}") do if value.is_a?(Hash) && value[:html].present? value[:html] else content_tag(:span, value, class: "input-group-text") end end end # Drawing boostrap form field help text. Example usage: # # text_field(:value, bootstrap: {help: "help text"}) # def draw_help(bootstrap) text = bootstrap.help return if text.blank? content_tag(:small, text, class: "form-text text-muted") end # Rendering of choices for checkboxes and radio buttons def draw_choices(bootstrap, type, method, collection, value_method, text_method, _options, html_options) draw_form_group_fieldset(bootstrap, method) do if bootstrap.custom_control label_css_class = "custom-control-label" form_check_css_class = "custom-control" form_check_css_class += case type when :radio_button then " custom-radio" when :check_box then " custom-checkbox" end form_check_css_class += " custom-control-inline" if bootstrap.check_inline add_css_class!(html_options, "custom-control-input") else label_css_class = "form-check-label" form_check_css_class = "form-check" form_check_css_class += " form-check-inline" if bootstrap.check_inline add_css_class!(html_options, "form-check-input") end errors = draw_errors(bootstrap, method) help_text = draw_help(bootstrap) add_css_class!(html_options, "is-invalid") if errors.present? content = "".html_safe collection.each_with_index do |item, index| item_value = item.send(value_method) item_text = item.send(text_method) content << content_tag(:div, class: form_check_css_class) do concat yield method, item_value, html_options concat label(method, item_text, value: item_value, class: label_css_class) if ((collection.count - 1) == index) && !bootstrap.check_inline concat errors if errors.present? concat help_text if help_text.present? end end end if bootstrap.check_inline content << errors if errors.present? content << help_text if help_text.present? end content end end # Wrapper for collections of radio buttons and checkboxes def draw_form_group_fieldset(bootstrap, method) options = {} unless bootstrap.label[:hide] label_text = bootstrap.label[:text] label_text ||= ActionView::Helpers::Tags::Label::LabelBuilder .new(@template, @object_name.to_s, method, @object, nil).translation add_css_class!(options, "col-form-label pt-0") add_css_class!(options, bootstrap.label[:class]) if bootstrap.horizontal? add_css_class!(options, bootstrap.label_col_class) add_css_class!(options, bootstrap.label_align_class) end label = content_tag(:legend, options) do label_text end end content_tag(:fieldset, class: "form-group") do content = "".html_safe content << label if label.present? content << draw_control_column(bootstrap, offset: bootstrap.label[:hide]) do yield end if bootstrap.horizontal? content_tag(:div, content, class: "row") else content end end end def add_css_class!(options, string) css_class = [options[:class], string].compact.join(" ") options[:class] = css_class if css_class.present? end def remove_css_class!(options, string) css_class = options[:class].to_s.split(" ") options[:class] = (css_class - [string]).compact.join(" ") options.delete(:class) if options[:class].blank? end end end
34.805947
118
0.639989
79a35dd8a67e2f5524cb3d6f02a4dde7bc7d6fa1
2,579
# This file is copied to spec/ when you run 'rails generate rspec:install' ENV['RAILS_ENV'] ||= 'test' require File.expand_path('../dummy/config/environment', __FILE__) # Prevent database truncation if the environment is production abort('The Rails environment is running in production mode!') if Rails.env.production? require 'spec_helper' require 'rspec/rails' # Add additional requires below this line. Rails is not loaded until this point! # Requires supporting ruby files with custom matchers and macros, etc, in # spec/support/ and its subdirectories. Files matching `spec/**/*_spec.rb` are # run as spec files by default. This means that files in spec/support that end # in _spec.rb will both be required and run as specs, causing the specs to be # run twice. It is recommended that you do not name files matching this glob to # end with _spec.rb. You can configure this pattern with the --pattern # option on the command line or in ~/.rspec, .rspec or `.rspec-local`. # # The following line is provided for convenience purposes. It has the downside # of increasing the boot-up time by auto-requiring all files in the support # directory. Alternatively, in the individual `*_spec.rb` files, manually # require only the support files necessary. # Dir[Ecm::Links::Engine.root.join('spec/support/**/*.rb')].each { |f| require f } # Checks for pending migrations before tests are run. # If you are not using ActiveRecord, you can remove this line. # ActiveRecord::Migration.maintain_test_schema! RSpec.configure do |config| # Remove this line if you're not using ActiveRecord or ActiveRecord fixtures # config.fixture_path = "#{::Rails.root}/spec/fixtures" # If you're not using ActiveRecord, or you'd prefer not to run each of your # examples within a transaction, remove the following line or assign false # instead of true. config.use_transactional_fixtures = true # RSpec Rails can automatically mix in different behaviours to your tests # based on their file location, for example enabling you to call `get` and # `post` in specs under `spec/controllers`. # # You can disable this behaviour by removing the line below, and instead # explicitly tag your specs with their type, e.g.: # # RSpec.describe UsersController, :type => :controller do # # ... # end # # The different available types are documented in the features, such as in # https://relishapp.com/rspec/rspec-rails/docs config.infer_spec_type_from_file_location! end require_relative 'dummy/app/admin/dashboard' require_relative 'dummy/config/routes'
46.053571
86
0.751454
f8a2302f99bb1e388be76afd5112bc523137e966
41
module Rendering VERSION = '0.1.0' end
10.25
19
0.682927
088143d61bddf2889003f10cacf5923c8dd41ebe
107
class Class def descendants ObjectSpace.each_object(Class).select { |klass| klass < self } end end
17.833333
66
0.719626
f703e99f399b020c56164da23ccbebc17737cdc9
777
module DockerRails class Application def version '1.12.0' end def build_time @build_time ||= ENV.fetch('COMMIT_TIME', Time.current).to_datetime end def alpine_release `cat /etc/alpine-release 2>/dev/null`.chomp end def ruby_version "#{RUBY_VERSION} patchlevel #{RUBY_PATCHLEVEL}" end def rubygems_version Gem::VERSION end def bundler_version Bundler::VERSION end def rails_version Rails::VERSION::STRING end def redis_version Sidekiq.redis_info['redis_version'] end def postgresql_version ActiveRecord::Base.connection.select_value('SHOW server_version;') end def elasticsearch_version Searchkick.server_version end end end
17.659091
72
0.661519
33e5d692ef7499bfc1af25c676183b9faab9d748
562
require 'test_helper' require 'reports/size' module Reports class SizeTest < ActiveSupport::TestCase def setup() @site = Site.create!({:name => 'Test', :url => 'http://example.com', :email_address => '[email protected]', :verified => true}) @email = @site.emails.create!({:message => Mail::Message.new().encoded()}) end test "create" do Reports::Size.new().create(@email) @email.reload() report = @email.reports.take!() assert_equal 'Reports::Size', report.key assert report.value > 0 end end end
25.545455
132
0.622776
ac0bdc58608c10acafc6c615b22e30b2df887142
708
require 'task_helper' namespace :app do def config Rails.configuration.app end desc "Import downloaded NPM package names to our database as `pending`" task :npm_file_import => :environment do |t| data = JSON.parse File.read(config.npm.filename) Task.new(data.size) do |progress| data.each do |name| begin Package.find_or_create_by(name: name) progress.increment! 1 rescue SystemExit, Interrupt, Octokit::RateLimit JsCoach.warn "Task interrupted!" exit rescue => e JsCoach.error e.to_s ExceptionNotifier.notify_exception(e) # Send backtrace exit end end end end end
24.413793
73
0.632768
bbc3a01f45fde3b7c599a8ca41e8464fb49374ce
3,361
class VCAP::CloudController::Permissions ROLES_FOR_ORG_READING ||= [ VCAP::CloudController::Membership::ORG_MANAGER, VCAP::CloudController::Membership::ORG_AUDITOR, VCAP::CloudController::Membership::ORG_MEMBER, VCAP::CloudController::Membership::ORG_BILLING_MANAGER, ].freeze ROLES_FOR_ORG_WRITING = [ VCAP::CloudController::Membership::ORG_MANAGER, ].freeze ROLES_FOR_READING ||= [ VCAP::CloudController::Membership::SPACE_DEVELOPER, VCAP::CloudController::Membership::SPACE_MANAGER, VCAP::CloudController::Membership::SPACE_AUDITOR, VCAP::CloudController::Membership::ORG_MANAGER, ].freeze ROLES_FOR_SECRETS ||= [ VCAP::CloudController::Membership::SPACE_DEVELOPER, ].freeze ROLES_FOR_WRITING ||= [ VCAP::CloudController::Membership::SPACE_DEVELOPER, ].freeze def initialize(user) @user = user end def can_read_globally? roles.admin? || roles.admin_read_only? || roles.global_auditor? end def can_read_secrets_globally? roles.admin? || roles.admin_read_only? end def can_write_globally? roles.admin? end def readable_org_guids if can_read_globally? VCAP::CloudController::Organization.select(:guid).all.map(&:guid) else membership.org_guids_for_roles(ROLES_FOR_ORG_READING) end end def can_read_from_org?(org_guid) can_read_globally? || membership.has_any_roles?(ROLES_FOR_ORG_READING, nil, org_guid) end def can_write_to_org?(org_guid) can_write_globally? || membership.has_any_roles?(ROLES_FOR_ORG_WRITING, nil, org_guid) end def readable_space_guids if can_read_globally? VCAP::CloudController::Space.select(:guid).all.map(&:guid) else membership.space_guids_for_roles(ROLES_FOR_READING) end end def can_read_from_space?(space_guid, org_guid) can_read_globally? || membership.has_any_roles?(ROLES_FOR_READING, space_guid, org_guid) end def can_read_secrets_in_space?(space_guid, org_guid) can_read_secrets_globally? || membership.has_any_roles?(ROLES_FOR_SECRETS, space_guid, org_guid) end def can_write_to_space?(space_guid) can_write_globally? || membership.has_any_roles?(ROLES_FOR_WRITING, space_guid) end def can_read_from_isolation_segment?(isolation_segment) can_read_globally? || isolation_segment.spaces.any? { |space| can_read_from_space?(space.guid, space.organization.guid) } || isolation_segment.organizations.any? { |org| can_read_from_org?(org.guid) } end def readable_route_guids VCAP::CloudController::Route.user_visible(@user, can_read_globally?).map(&:guid) end def can_read_route?(space_guid, org_guid) return true if can_read_globally? space = VCAP::CloudController::Space.where(guid: space_guid).first org = space.organization space.has_member?(@user) || @user.managed_organizations.include?(org) || @user.audited_organizations.include?(org) end def readable_app_guids VCAP::CloudController::AppModel.user_visible(@user, can_read_globally?).map(&:guid) end def readable_route_mapping_guids VCAP::CloudController::RouteMappingModel.user_visible(@user, can_read_globally?).map(&:guid) end private def membership VCAP::CloudController::Membership.new(@user) end def roles VCAP::CloudController::SecurityContext.roles end end
28.243697
108
0.747397
bf2c87caee4eed5b6ac266eeb40191fec49c1a92
1,930
qc = QueryCategory.find_by(name: 'Life Cycle (Performance Restriction)') # create period view view_sql = <<-SQL CREATE OR REPLACE VIEW track_most_recent_performance_restrictions_period_view AS SELECT mrae.asset_event_id, mrae.base_transam_asset_id, IF(ae.period_length IS NULL, "Until Removed", "Set Length") AS period, ae.period_length, ae.period_length_unit FROM query_tool_most_recent_asset_events_for_type_view AS mrae LEFT JOIN asset_events AS ae ON ae.id = mrae.asset_event_id LEFT JOIN transam_assets AS tma ON mrae.base_transam_asset_id = tma.id LEFT JOIN transit_assets AS tta ON tta.id = tma.transam_assetible_id AND tma.transam_assetible_type = 'TransitAsset' LEFT JOIN fta_asset_classes AS fac ON fac.id = tta.fta_asset_class_id WHERE ae.id = mrae.asset_event_id AND fac.name = 'Track' AND mrae.asset_event_name = 'Performance restrictions'; SQL ActiveRecord::Base.connection.execute view_sql data_table = QueryAssetClass.find_or_create_by( table_name: 'track_most_recent_performance_restrictions_period_view', transam_assets_join: "LEFT JOIN track_most_recent_performance_restrictions_period_view on track_most_recent_performance_restrictions_period_view.base_transam_asset_id = transam_assets.id" ) #create query fields fields = [ { name: 'period', label: 'Period', filter_type: 'multi_select' }, { name: 'period_length', label: 'Period of Time', filter_type: 'numeric', pairs_with: 'period_length_unit' }, { name: 'period_length_unit', label: 'Unit', filter_type: 'text', hidden: true } ] fields.each do |f| qf = QueryField.find_or_create_by( name: f[:name], label: f[:label], query_category: qc, filter_type: f[:filter_type], hidden: f[:hidden], pairs_with: f[:pairs_with] ) qf.query_asset_classes = [data_table] end
35.740741
191
0.725389
ac418db11b3b096bbd73c86e47f607c5b6c0c9bb
145
# Be sure to restart your server when you modify this file. Rails.application.config.session_store :cookie_store, key: '_retrospective_session'
36.25
83
0.813793
6203164c1c81cfb6a9443bd86ffd314acc05f58d
76,266
# frozen_string_literal: true # WARNING ABOUT GENERATED CODE # # This file is generated. See the contributing guide for more information: # https://github.com/aws/aws-sdk-ruby/blob/master/CONTRIBUTING.md # # WARNING ABOUT GENERATED CODE require 'seahorse/client/plugins/content_length.rb' require 'aws-sdk-core/plugins/credentials_configuration.rb' require 'aws-sdk-core/plugins/logging.rb' require 'aws-sdk-core/plugins/param_converter.rb' require 'aws-sdk-core/plugins/param_validator.rb' require 'aws-sdk-core/plugins/user_agent.rb' require 'aws-sdk-core/plugins/helpful_socket_errors.rb' require 'aws-sdk-core/plugins/retry_errors.rb' require 'aws-sdk-core/plugins/global_configuration.rb' require 'aws-sdk-core/plugins/regional_endpoint.rb' require 'aws-sdk-core/plugins/endpoint_discovery.rb' require 'aws-sdk-core/plugins/endpoint_pattern.rb' require 'aws-sdk-core/plugins/response_paging.rb' require 'aws-sdk-core/plugins/stub_responses.rb' require 'aws-sdk-core/plugins/idempotency_token.rb' require 'aws-sdk-core/plugins/jsonvalue_converter.rb' require 'aws-sdk-core/plugins/client_metrics_plugin.rb' require 'aws-sdk-core/plugins/client_metrics_send_plugin.rb' require 'aws-sdk-core/plugins/transfer_encoding.rb' require 'aws-sdk-core/plugins/http_checksum.rb' require 'aws-sdk-core/plugins/signature_v4.rb' require 'aws-sdk-core/plugins/protocols/json_rpc.rb' Aws::Plugins::GlobalConfiguration.add_identifier(:athena) module Aws::Athena # An API client for Athena. To construct a client, you need to configure a `:region` and `:credentials`. # # client = Aws::Athena::Client.new( # region: region_name, # credentials: credentials, # # ... # ) # # For details on configuring region and credentials see # the [developer guide](/sdk-for-ruby/v3/developer-guide/setup-config.html). # # See {#initialize} for a full list of supported configuration options. class Client < Seahorse::Client::Base include Aws::ClientStubs @identifier = :athena set_api(ClientApi::API) add_plugin(Seahorse::Client::Plugins::ContentLength) add_plugin(Aws::Plugins::CredentialsConfiguration) add_plugin(Aws::Plugins::Logging) add_plugin(Aws::Plugins::ParamConverter) add_plugin(Aws::Plugins::ParamValidator) add_plugin(Aws::Plugins::UserAgent) add_plugin(Aws::Plugins::HelpfulSocketErrors) add_plugin(Aws::Plugins::RetryErrors) add_plugin(Aws::Plugins::GlobalConfiguration) add_plugin(Aws::Plugins::RegionalEndpoint) add_plugin(Aws::Plugins::EndpointDiscovery) add_plugin(Aws::Plugins::EndpointPattern) add_plugin(Aws::Plugins::ResponsePaging) add_plugin(Aws::Plugins::StubResponses) add_plugin(Aws::Plugins::IdempotencyToken) add_plugin(Aws::Plugins::JsonvalueConverter) add_plugin(Aws::Plugins::ClientMetricsPlugin) add_plugin(Aws::Plugins::ClientMetricsSendPlugin) add_plugin(Aws::Plugins::TransferEncoding) add_plugin(Aws::Plugins::HttpChecksum) add_plugin(Aws::Plugins::SignatureV4) add_plugin(Aws::Plugins::Protocols::JsonRpc) # @overload initialize(options) # @param [Hash] options # @option options [required, Aws::CredentialProvider] :credentials # Your AWS credentials. This can be an instance of any one of the # following classes: # # * `Aws::Credentials` - Used for configuring static, non-refreshing # credentials. # # * `Aws::SharedCredentials` - Used for loading static credentials from a # shared file, such as `~/.aws/config`. # # * `Aws::AssumeRoleCredentials` - Used when you need to assume a role. # # * `Aws::AssumeRoleWebIdentityCredentials` - Used when you need to # assume a role after providing credentials via the web. # # * `Aws::SSOCredentials` - Used for loading credentials from AWS SSO using an # access token generated from `aws login`. # # * `Aws::ProcessCredentials` - Used for loading credentials from a # process that outputs to stdout. # # * `Aws::InstanceProfileCredentials` - Used for loading credentials # from an EC2 IMDS on an EC2 instance. # # * `Aws::ECSCredentials` - Used for loading credentials from # instances running in ECS. # # * `Aws::CognitoIdentityCredentials` - Used for loading credentials # from the Cognito Identity service. # # When `:credentials` are not configured directly, the following # locations will be searched for credentials: # # * `Aws.config[:credentials]` # * The `:access_key_id`, `:secret_access_key`, and `:session_token` options. # * ENV['AWS_ACCESS_KEY_ID'], ENV['AWS_SECRET_ACCESS_KEY'] # * `~/.aws/credentials` # * `~/.aws/config` # * EC2/ECS IMDS instance profile - When used by default, the timeouts # are very aggressive. Construct and pass an instance of # `Aws::InstanceProfileCredentails` or `Aws::ECSCredentials` to # enable retries and extended timeouts. # # @option options [required, String] :region # The AWS region to connect to. The configured `:region` is # used to determine the service `:endpoint`. When not passed, # a default `:region` is searched for in the following locations: # # * `Aws.config[:region]` # * `ENV['AWS_REGION']` # * `ENV['AMAZON_REGION']` # * `ENV['AWS_DEFAULT_REGION']` # * `~/.aws/credentials` # * `~/.aws/config` # # @option options [String] :access_key_id # # @option options [Boolean] :active_endpoint_cache (false) # When set to `true`, a thread polling for endpoints will be running in # the background every 60 secs (default). Defaults to `false`. # # @option options [Boolean] :adaptive_retry_wait_to_fill (true) # Used only in `adaptive` retry mode. When true, the request will sleep # until there is sufficent client side capacity to retry the request. # When false, the request will raise a `RetryCapacityNotAvailableError` and will # not retry instead of sleeping. # # @option options [Boolean] :client_side_monitoring (false) # When `true`, client-side metrics will be collected for all API requests from # this client. # # @option options [String] :client_side_monitoring_client_id ("") # Allows you to provide an identifier for this client which will be attached to # all generated client side metrics. Defaults to an empty string. # # @option options [String] :client_side_monitoring_host ("127.0.0.1") # Allows you to specify the DNS hostname or IPv4 or IPv6 address that the client # side monitoring agent is running on, where client metrics will be published via UDP. # # @option options [Integer] :client_side_monitoring_port (31000) # Required for publishing client metrics. The port that the client side monitoring # agent is running on, where client metrics will be published via UDP. # # @option options [Aws::ClientSideMonitoring::Publisher] :client_side_monitoring_publisher (Aws::ClientSideMonitoring::Publisher) # Allows you to provide a custom client-side monitoring publisher class. By default, # will use the Client Side Monitoring Agent Publisher. # # @option options [Boolean] :convert_params (true) # When `true`, an attempt is made to coerce request parameters into # the required types. # # @option options [Boolean] :correct_clock_skew (true) # Used only in `standard` and adaptive retry modes. Specifies whether to apply # a clock skew correction and retry requests with skewed client clocks. # # @option options [Boolean] :disable_host_prefix_injection (false) # Set to true to disable SDK automatically adding host prefix # to default service endpoint when available. # # @option options [String] :endpoint # The client endpoint is normally constructed from the `:region` # option. You should only configure an `:endpoint` when connecting # to test or custom endpoints. This should be a valid HTTP(S) URI. # # @option options [Integer] :endpoint_cache_max_entries (1000) # Used for the maximum size limit of the LRU cache storing endpoints data # for endpoint discovery enabled operations. Defaults to 1000. # # @option options [Integer] :endpoint_cache_max_threads (10) # Used for the maximum threads in use for polling endpoints to be cached, defaults to 10. # # @option options [Integer] :endpoint_cache_poll_interval (60) # When :endpoint_discovery and :active_endpoint_cache is enabled, # Use this option to config the time interval in seconds for making # requests fetching endpoints information. Defaults to 60 sec. # # @option options [Boolean] :endpoint_discovery (false) # When set to `true`, endpoint discovery will be enabled for operations when available. # # @option options [Aws::Log::Formatter] :log_formatter (Aws::Log::Formatter.default) # The log formatter. # # @option options [Symbol] :log_level (:info) # The log level to send messages to the `:logger` at. # # @option options [Logger] :logger # The Logger instance to send log messages to. If this option # is not set, logging will be disabled. # # @option options [Integer] :max_attempts (3) # An integer representing the maximum number attempts that will be made for # a single request, including the initial attempt. For example, # setting this value to 5 will result in a request being retried up to # 4 times. Used in `standard` and `adaptive` retry modes. # # @option options [String] :profile ("default") # Used when loading credentials from the shared credentials file # at HOME/.aws/credentials. When not specified, 'default' is used. # # @option options [Proc] :retry_backoff # A proc or lambda used for backoff. Defaults to 2**retries * retry_base_delay. # This option is only used in the `legacy` retry mode. # # @option options [Float] :retry_base_delay (0.3) # The base delay in seconds used by the default backoff function. This option # is only used in the `legacy` retry mode. # # @option options [Symbol] :retry_jitter (:none) # A delay randomiser function used by the default backoff function. # Some predefined functions can be referenced by name - :none, :equal, :full, # otherwise a Proc that takes and returns a number. This option is only used # in the `legacy` retry mode. # # @see https://www.awsarchitectureblog.com/2015/03/backoff.html # # @option options [Integer] :retry_limit (3) # The maximum number of times to retry failed requests. Only # ~ 500 level server errors and certain ~ 400 level client errors # are retried. Generally, these are throttling errors, data # checksum errors, networking errors, timeout errors, auth errors, # endpoint discovery, and errors from expired credentials. # This option is only used in the `legacy` retry mode. # # @option options [Integer] :retry_max_delay (0) # The maximum number of seconds to delay between retries (0 for no limit) # used by the default backoff function. This option is only used in the # `legacy` retry mode. # # @option options [String] :retry_mode ("legacy") # Specifies which retry algorithm to use. Values are: # # * `legacy` - The pre-existing retry behavior. This is default value if # no retry mode is provided. # # * `standard` - A standardized set of retry rules across the AWS SDKs. # This includes support for retry quotas, which limit the number of # unsuccessful retries a client can make. # # * `adaptive` - An experimental retry mode that includes all the # functionality of `standard` mode along with automatic client side # throttling. This is a provisional mode that may change behavior # in the future. # # # @option options [String] :secret_access_key # # @option options [String] :session_token # # @option options [Boolean] :simple_json (false) # Disables request parameter conversion, validation, and formatting. # Also disable response data type conversions. This option is useful # when you want to ensure the highest level of performance by # avoiding overhead of walking request parameters and response data # structures. # # When `:simple_json` is enabled, the request parameters hash must # be formatted exactly as the DynamoDB API expects. # # @option options [Boolean] :stub_responses (false) # Causes the client to return stubbed responses. By default # fake responses are generated and returned. You can specify # the response data to return or errors to raise by calling # {ClientStubs#stub_responses}. See {ClientStubs} for more information. # # ** Please note ** When response stubbing is enabled, no HTTP # requests are made, and retries are disabled. # # @option options [Boolean] :validate_params (true) # When `true`, request parameters are validated before # sending the request. # # @option options [URI::HTTP,String] :http_proxy A proxy to send # requests through. Formatted like 'http://proxy.com:123'. # # @option options [Float] :http_open_timeout (15) The number of # seconds to wait when opening a HTTP session before raising a # `Timeout::Error`. # # @option options [Integer] :http_read_timeout (60) The default # number of seconds to wait for response data. This value can # safely be set per-request on the session. # # @option options [Float] :http_idle_timeout (5) The number of # seconds a connection is allowed to sit idle before it is # considered stale. Stale connections are closed and removed # from the pool before making a request. # # @option options [Float] :http_continue_timeout (1) The number of # seconds to wait for a 100-continue response before sending the # request body. This option has no effect unless the request has # "Expect" header set to "100-continue". Defaults to `nil` which # disables this behaviour. This value can safely be set per # request on the session. # # @option options [Boolean] :http_wire_trace (false) When `true`, # HTTP debug output will be sent to the `:logger`. # # @option options [Boolean] :ssl_verify_peer (true) When `true`, # SSL peer certificates are verified when establishing a # connection. # # @option options [String] :ssl_ca_bundle Full path to the SSL # certificate authority bundle file that should be used when # verifying peer certificates. If you do not pass # `:ssl_ca_bundle` or `:ssl_ca_directory` the the system default # will be used if available. # # @option options [String] :ssl_ca_directory Full path of the # directory that contains the unbundled SSL certificate # authority files for verifying peer certificates. If you do # not pass `:ssl_ca_bundle` or `:ssl_ca_directory` the the # system default will be used if available. # def initialize(*args) super end # @!group API Operations # Returns the details of a single named query or a list of up to 50 # queries, which you provide as an array of query ID strings. Requires # you to have access to the workgroup in which the queries were saved. # Use ListNamedQueriesInput to get the list of named query IDs in the # specified workgroup. If information could not be retrieved for a # submitted query ID, information about the query ID submitted is listed # under UnprocessedNamedQueryId. Named queries differ from executed # queries. Use BatchGetQueryExecutionInput to get details about each # unique query execution, and ListQueryExecutionsInput to get a list of # query execution IDs. # # @option params [required, Array<String>] :named_query_ids # An array of query IDs. # # @return [Types::BatchGetNamedQueryOutput] Returns a {Seahorse::Client::Response response} object which responds to the following methods: # # * {Types::BatchGetNamedQueryOutput#named_queries #named_queries} => Array&lt;Types::NamedQuery&gt; # * {Types::BatchGetNamedQueryOutput#unprocessed_named_query_ids #unprocessed_named_query_ids} => Array&lt;Types::UnprocessedNamedQueryId&gt; # # @example Request syntax with placeholder values # # resp = client.batch_get_named_query({ # named_query_ids: ["NamedQueryId"], # required # }) # # @example Response structure # # resp.named_queries #=> Array # resp.named_queries[0].name #=> String # resp.named_queries[0].description #=> String # resp.named_queries[0].database #=> String # resp.named_queries[0].query_string #=> String # resp.named_queries[0].named_query_id #=> String # resp.named_queries[0].work_group #=> String # resp.unprocessed_named_query_ids #=> Array # resp.unprocessed_named_query_ids[0].named_query_id #=> String # resp.unprocessed_named_query_ids[0].error_code #=> String # resp.unprocessed_named_query_ids[0].error_message #=> String # # @see http://docs.aws.amazon.com/goto/WebAPI/athena-2017-05-18/BatchGetNamedQuery AWS API Documentation # # @overload batch_get_named_query(params = {}) # @param [Hash] params ({}) def batch_get_named_query(params = {}, options = {}) req = build_request(:batch_get_named_query, params) req.send_request(options) end # Returns the details of a single query execution or a list of up to 50 # query executions, which you provide as an array of query execution ID # strings. Requires you to have access to the workgroup in which the # queries ran. To get a list of query execution IDs, use # ListQueryExecutionsInput$WorkGroup. Query executions differ from named # (saved) queries. Use BatchGetNamedQueryInput to get details about # named queries. # # @option params [required, Array<String>] :query_execution_ids # An array of query execution IDs. # # @return [Types::BatchGetQueryExecutionOutput] Returns a {Seahorse::Client::Response response} object which responds to the following methods: # # * {Types::BatchGetQueryExecutionOutput#query_executions #query_executions} => Array&lt;Types::QueryExecution&gt; # * {Types::BatchGetQueryExecutionOutput#unprocessed_query_execution_ids #unprocessed_query_execution_ids} => Array&lt;Types::UnprocessedQueryExecutionId&gt; # # @example Request syntax with placeholder values # # resp = client.batch_get_query_execution({ # query_execution_ids: ["QueryExecutionId"], # required # }) # # @example Response structure # # resp.query_executions #=> Array # resp.query_executions[0].query_execution_id #=> String # resp.query_executions[0].query #=> String # resp.query_executions[0].statement_type #=> String, one of "DDL", "DML", "UTILITY" # resp.query_executions[0].result_configuration.output_location #=> String # resp.query_executions[0].result_configuration.encryption_configuration.encryption_option #=> String, one of "SSE_S3", "SSE_KMS", "CSE_KMS" # resp.query_executions[0].result_configuration.encryption_configuration.kms_key #=> String # resp.query_executions[0].query_execution_context.database #=> String # resp.query_executions[0].query_execution_context.catalog #=> String # resp.query_executions[0].status.state #=> String, one of "QUEUED", "RUNNING", "SUCCEEDED", "FAILED", "CANCELLED" # resp.query_executions[0].status.state_change_reason #=> String # resp.query_executions[0].status.submission_date_time #=> Time # resp.query_executions[0].status.completion_date_time #=> Time # resp.query_executions[0].statistics.engine_execution_time_in_millis #=> Integer # resp.query_executions[0].statistics.data_scanned_in_bytes #=> Integer # resp.query_executions[0].statistics.data_manifest_location #=> String # resp.query_executions[0].statistics.total_execution_time_in_millis #=> Integer # resp.query_executions[0].statistics.query_queue_time_in_millis #=> Integer # resp.query_executions[0].statistics.query_planning_time_in_millis #=> Integer # resp.query_executions[0].statistics.service_processing_time_in_millis #=> Integer # resp.query_executions[0].work_group #=> String # resp.unprocessed_query_execution_ids #=> Array # resp.unprocessed_query_execution_ids[0].query_execution_id #=> String # resp.unprocessed_query_execution_ids[0].error_code #=> String # resp.unprocessed_query_execution_ids[0].error_message #=> String # # @see http://docs.aws.amazon.com/goto/WebAPI/athena-2017-05-18/BatchGetQueryExecution AWS API Documentation # # @overload batch_get_query_execution(params = {}) # @param [Hash] params ({}) def batch_get_query_execution(params = {}, options = {}) req = build_request(:batch_get_query_execution, params) req.send_request(options) end # Creates (registers) a data catalog with the specified name and # properties. Catalogs created are visible to all users of the same AWS # account. # # @option params [required, String] :name # The name of the data catalog to create. The catalog name must be # unique for the AWS account and can use a maximum of 128 alphanumeric, # underscore, at sign, or hyphen characters. # # @option params [required, String] :type # The type of data catalog to create: `LAMBDA` for a federated catalog, # `GLUE` for AWS Glue Catalog, or `HIVE` for an external hive metastore. # # @option params [String] :description # A description of the data catalog to be created. # # @option params [Hash<String,String>] :parameters # Specifies the Lambda function or functions to use for creating the # data catalog. This is a mapping whose values depend on the catalog # type. # # * For the `HIVE` data catalog type, use the following syntax. The # `metadata-function` parameter is required. `The sdk-version` # parameter is optional and defaults to the currently supported # version. # # `metadata-function=lambda_arn, sdk-version=version_number ` # # * For the `LAMBDA` data catalog type, use one of the following sets of # required parameters, but not both. # # * If you have one Lambda function that processes metadata and # another for reading the actual data, use the following syntax. # Both parameters are required. # # `metadata-function=lambda_arn, record-function=lambda_arn ` # # * If you have a composite Lambda function that processes both # metadata and data, use the following syntax to specify your Lambda # function. # # `function=lambda_arn ` # # * The `GLUE` type has no parameters. # # @option params [Array<Types::Tag>] :tags # A list of comma separated tags to add to the data catalog that is # created. # # @return [Struct] Returns an empty {Seahorse::Client::Response response}. # # @example Request syntax with placeholder values # # resp = client.create_data_catalog({ # name: "CatalogNameString", # required # type: "LAMBDA", # required, accepts LAMBDA, GLUE, HIVE # description: "DescriptionString", # parameters: { # "KeyString" => "ParametersMapValue", # }, # tags: [ # { # key: "TagKey", # value: "TagValue", # }, # ], # }) # # @see http://docs.aws.amazon.com/goto/WebAPI/athena-2017-05-18/CreateDataCatalog AWS API Documentation # # @overload create_data_catalog(params = {}) # @param [Hash] params ({}) def create_data_catalog(params = {}, options = {}) req = build_request(:create_data_catalog, params) req.send_request(options) end # Creates a named query in the specified workgroup. Requires that you # have access to the workgroup. # # For code samples using the AWS SDK for Java, see [Examples and Code # Samples][1] in the *Amazon Athena User Guide*. # # # # [1]: http://docs.aws.amazon.com/athena/latest/ug/code-samples.html # # @option params [required, String] :name # The query name. # # @option params [String] :description # The query description. # # @option params [required, String] :database # The database to which the query belongs. # # @option params [required, String] :query_string # The contents of the query with all query statements. # # @option params [String] :client_request_token # A unique case-sensitive string used to ensure the request to create # the query is idempotent (executes only once). If another # `CreateNamedQuery` request is received, the same response is returned # and another query is not created. If a parameter has changed, for # example, the `QueryString`, an error is returned. # # This token is listed as not required because AWS SDKs (for example the # AWS SDK for Java) auto-generate the token for users. If you are not # using the AWS SDK or the AWS CLI, you must provide this token or the # action will fail. # # **A suitable default value is auto-generated.** You should normally # not need to pass this option.** # # @option params [String] :work_group # The name of the workgroup in which the named query is being created. # # @return [Types::CreateNamedQueryOutput] Returns a {Seahorse::Client::Response response} object which responds to the following methods: # # * {Types::CreateNamedQueryOutput#named_query_id #named_query_id} => String # # @example Request syntax with placeholder values # # resp = client.create_named_query({ # name: "NameString", # required # description: "DescriptionString", # database: "DatabaseString", # required # query_string: "QueryString", # required # client_request_token: "IdempotencyToken", # work_group: "WorkGroupName", # }) # # @example Response structure # # resp.named_query_id #=> String # # @see http://docs.aws.amazon.com/goto/WebAPI/athena-2017-05-18/CreateNamedQuery AWS API Documentation # # @overload create_named_query(params = {}) # @param [Hash] params ({}) def create_named_query(params = {}, options = {}) req = build_request(:create_named_query, params) req.send_request(options) end # Creates a workgroup with the specified name. # # @option params [required, String] :name # The workgroup name. # # @option params [Types::WorkGroupConfiguration] :configuration # The configuration for the workgroup, which includes the location in # Amazon S3 where query results are stored, the encryption # configuration, if any, used for encrypting query results, whether the # Amazon CloudWatch Metrics are enabled for the workgroup, the limit for # the amount of bytes scanned (cutoff) per query, if it is specified, # and whether workgroup's settings (specified with # EnforceWorkGroupConfiguration) in the WorkGroupConfiguration override # client-side settings. See # WorkGroupConfiguration$EnforceWorkGroupConfiguration. # # @option params [String] :description # The workgroup description. # # @option params [Array<Types::Tag>] :tags # A list of comma separated tags to add to the workgroup that is # created. # # @return [Struct] Returns an empty {Seahorse::Client::Response response}. # # @example Request syntax with placeholder values # # resp = client.create_work_group({ # name: "WorkGroupName", # required # configuration: { # result_configuration: { # output_location: "String", # encryption_configuration: { # encryption_option: "SSE_S3", # required, accepts SSE_S3, SSE_KMS, CSE_KMS # kms_key: "String", # }, # }, # enforce_work_group_configuration: false, # publish_cloud_watch_metrics_enabled: false, # bytes_scanned_cutoff_per_query: 1, # requester_pays_enabled: false, # }, # description: "WorkGroupDescriptionString", # tags: [ # { # key: "TagKey", # value: "TagValue", # }, # ], # }) # # @see http://docs.aws.amazon.com/goto/WebAPI/athena-2017-05-18/CreateWorkGroup AWS API Documentation # # @overload create_work_group(params = {}) # @param [Hash] params ({}) def create_work_group(params = {}, options = {}) req = build_request(:create_work_group, params) req.send_request(options) end # Deletes a data catalog. # # @option params [required, String] :name # The name of the data catalog to delete. # # @return [Struct] Returns an empty {Seahorse::Client::Response response}. # # @example Request syntax with placeholder values # # resp = client.delete_data_catalog({ # name: "CatalogNameString", # required # }) # # @see http://docs.aws.amazon.com/goto/WebAPI/athena-2017-05-18/DeleteDataCatalog AWS API Documentation # # @overload delete_data_catalog(params = {}) # @param [Hash] params ({}) def delete_data_catalog(params = {}, options = {}) req = build_request(:delete_data_catalog, params) req.send_request(options) end # Deletes the named query if you have access to the workgroup in which # the query was saved. # # For code samples using the AWS SDK for Java, see [Examples and Code # Samples][1] in the *Amazon Athena User Guide*. # # # # [1]: http://docs.aws.amazon.com/athena/latest/ug/code-samples.html # # @option params [required, String] :named_query_id # The unique ID of the query to delete. # # **A suitable default value is auto-generated.** You should normally # not need to pass this option.** # # @return [Struct] Returns an empty {Seahorse::Client::Response response}. # # @example Request syntax with placeholder values # # resp = client.delete_named_query({ # named_query_id: "NamedQueryId", # required # }) # # @see http://docs.aws.amazon.com/goto/WebAPI/athena-2017-05-18/DeleteNamedQuery AWS API Documentation # # @overload delete_named_query(params = {}) # @param [Hash] params ({}) def delete_named_query(params = {}, options = {}) req = build_request(:delete_named_query, params) req.send_request(options) end # Deletes the workgroup with the specified name. The primary workgroup # cannot be deleted. # # @option params [required, String] :work_group # The unique name of the workgroup to delete. # # @option params [Boolean] :recursive_delete_option # The option to delete the workgroup and its contents even if the # workgroup contains any named queries. # # @return [Struct] Returns an empty {Seahorse::Client::Response response}. # # @example Request syntax with placeholder values # # resp = client.delete_work_group({ # work_group: "WorkGroupName", # required # recursive_delete_option: false, # }) # # @see http://docs.aws.amazon.com/goto/WebAPI/athena-2017-05-18/DeleteWorkGroup AWS API Documentation # # @overload delete_work_group(params = {}) # @param [Hash] params ({}) def delete_work_group(params = {}, options = {}) req = build_request(:delete_work_group, params) req.send_request(options) end # Returns the specified data catalog. # # @option params [required, String] :name # The name of the data catalog to return. # # @return [Types::GetDataCatalogOutput] Returns a {Seahorse::Client::Response response} object which responds to the following methods: # # * {Types::GetDataCatalogOutput#data_catalog #data_catalog} => Types::DataCatalog # # @example Request syntax with placeholder values # # resp = client.get_data_catalog({ # name: "CatalogNameString", # required # }) # # @example Response structure # # resp.data_catalog.name #=> String # resp.data_catalog.description #=> String # resp.data_catalog.type #=> String, one of "LAMBDA", "GLUE", "HIVE" # resp.data_catalog.parameters #=> Hash # resp.data_catalog.parameters["KeyString"] #=> String # # @see http://docs.aws.amazon.com/goto/WebAPI/athena-2017-05-18/GetDataCatalog AWS API Documentation # # @overload get_data_catalog(params = {}) # @param [Hash] params ({}) def get_data_catalog(params = {}, options = {}) req = build_request(:get_data_catalog, params) req.send_request(options) end # Returns a database object for the specfied database and data catalog. # # @option params [required, String] :catalog_name # The name of the data catalog that contains the database to return. # # @option params [required, String] :database_name # The name of the database to return. # # @return [Types::GetDatabaseOutput] Returns a {Seahorse::Client::Response response} object which responds to the following methods: # # * {Types::GetDatabaseOutput#database #database} => Types::Database # # @example Request syntax with placeholder values # # resp = client.get_database({ # catalog_name: "CatalogNameString", # required # database_name: "NameString", # required # }) # # @example Response structure # # resp.database.name #=> String # resp.database.description #=> String # resp.database.parameters #=> Hash # resp.database.parameters["KeyString"] #=> String # # @see http://docs.aws.amazon.com/goto/WebAPI/athena-2017-05-18/GetDatabase AWS API Documentation # # @overload get_database(params = {}) # @param [Hash] params ({}) def get_database(params = {}, options = {}) req = build_request(:get_database, params) req.send_request(options) end # Returns information about a single query. Requires that you have # access to the workgroup in which the query was saved. # # @option params [required, String] :named_query_id # The unique ID of the query. Use ListNamedQueries to get query IDs. # # @return [Types::GetNamedQueryOutput] Returns a {Seahorse::Client::Response response} object which responds to the following methods: # # * {Types::GetNamedQueryOutput#named_query #named_query} => Types::NamedQuery # # @example Request syntax with placeholder values # # resp = client.get_named_query({ # named_query_id: "NamedQueryId", # required # }) # # @example Response structure # # resp.named_query.name #=> String # resp.named_query.description #=> String # resp.named_query.database #=> String # resp.named_query.query_string #=> String # resp.named_query.named_query_id #=> String # resp.named_query.work_group #=> String # # @see http://docs.aws.amazon.com/goto/WebAPI/athena-2017-05-18/GetNamedQuery AWS API Documentation # # @overload get_named_query(params = {}) # @param [Hash] params ({}) def get_named_query(params = {}, options = {}) req = build_request(:get_named_query, params) req.send_request(options) end # Returns information about a single execution of a query if you have # access to the workgroup in which the query ran. Each time a query # executes, information about the query execution is saved with a unique # ID. # # @option params [required, String] :query_execution_id # The unique ID of the query execution. # # @return [Types::GetQueryExecutionOutput] Returns a {Seahorse::Client::Response response} object which responds to the following methods: # # * {Types::GetQueryExecutionOutput#query_execution #query_execution} => Types::QueryExecution # # @example Request syntax with placeholder values # # resp = client.get_query_execution({ # query_execution_id: "QueryExecutionId", # required # }) # # @example Response structure # # resp.query_execution.query_execution_id #=> String # resp.query_execution.query #=> String # resp.query_execution.statement_type #=> String, one of "DDL", "DML", "UTILITY" # resp.query_execution.result_configuration.output_location #=> String # resp.query_execution.result_configuration.encryption_configuration.encryption_option #=> String, one of "SSE_S3", "SSE_KMS", "CSE_KMS" # resp.query_execution.result_configuration.encryption_configuration.kms_key #=> String # resp.query_execution.query_execution_context.database #=> String # resp.query_execution.query_execution_context.catalog #=> String # resp.query_execution.status.state #=> String, one of "QUEUED", "RUNNING", "SUCCEEDED", "FAILED", "CANCELLED" # resp.query_execution.status.state_change_reason #=> String # resp.query_execution.status.submission_date_time #=> Time # resp.query_execution.status.completion_date_time #=> Time # resp.query_execution.statistics.engine_execution_time_in_millis #=> Integer # resp.query_execution.statistics.data_scanned_in_bytes #=> Integer # resp.query_execution.statistics.data_manifest_location #=> String # resp.query_execution.statistics.total_execution_time_in_millis #=> Integer # resp.query_execution.statistics.query_queue_time_in_millis #=> Integer # resp.query_execution.statistics.query_planning_time_in_millis #=> Integer # resp.query_execution.statistics.service_processing_time_in_millis #=> Integer # resp.query_execution.work_group #=> String # # @see http://docs.aws.amazon.com/goto/WebAPI/athena-2017-05-18/GetQueryExecution AWS API Documentation # # @overload get_query_execution(params = {}) # @param [Hash] params ({}) def get_query_execution(params = {}, options = {}) req = build_request(:get_query_execution, params) req.send_request(options) end # Streams the results of a single query execution specified by # `QueryExecutionId` from the Athena query results location in Amazon # S3. For more information, see [Query Results][1] in the *Amazon Athena # User Guide*. This request does not execute the query but returns # results. Use StartQueryExecution to run a query. # # To stream query results successfully, the IAM principal with # permission to call `GetQueryResults` also must have permissions to the # Amazon S3 `GetObject` action for the Athena query results location. # # IAM principals with permission to the Amazon S3 `GetObject` action for # the query results location are able to retrieve query results from # Amazon S3 even if permission to the `GetQueryResults` action is # denied. To restrict user or role access, ensure that Amazon S3 # permissions to the Athena query location are denied. # # # # [1]: https://docs.aws.amazon.com/athena/latest/ug/querying.html # # @option params [required, String] :query_execution_id # The unique ID of the query execution. # # @option params [String] :next_token # A token generated by the Athena service that specifies where to # continue pagination if a previous request was truncated. To obtain the # next set of pages, pass in the `NextToken` from the response object of # the previous page call. # # @option params [Integer] :max_results # The maximum number of results (rows) to return in this request. # # @return [Types::GetQueryResultsOutput] Returns a {Seahorse::Client::Response response} object which responds to the following methods: # # * {Types::GetQueryResultsOutput#update_count #update_count} => Integer # * {Types::GetQueryResultsOutput#result_set #result_set} => Types::ResultSet # * {Types::GetQueryResultsOutput#next_token #next_token} => String # # The returned {Seahorse::Client::Response response} is a pageable response and is Enumerable. For details on usage see {Aws::PageableResponse PageableResponse}. # # @example Request syntax with placeholder values # # resp = client.get_query_results({ # query_execution_id: "QueryExecutionId", # required # next_token: "Token", # max_results: 1, # }) # # @example Response structure # # resp.update_count #=> Integer # resp.result_set.rows #=> Array # resp.result_set.rows[0].data #=> Array # resp.result_set.rows[0].data[0].var_char_value #=> String # resp.result_set.result_set_metadata.column_info #=> Array # resp.result_set.result_set_metadata.column_info[0].catalog_name #=> String # resp.result_set.result_set_metadata.column_info[0].schema_name #=> String # resp.result_set.result_set_metadata.column_info[0].table_name #=> String # resp.result_set.result_set_metadata.column_info[0].name #=> String # resp.result_set.result_set_metadata.column_info[0].label #=> String # resp.result_set.result_set_metadata.column_info[0].type #=> String # resp.result_set.result_set_metadata.column_info[0].precision #=> Integer # resp.result_set.result_set_metadata.column_info[0].scale #=> Integer # resp.result_set.result_set_metadata.column_info[0].nullable #=> String, one of "NOT_NULL", "NULLABLE", "UNKNOWN" # resp.result_set.result_set_metadata.column_info[0].case_sensitive #=> Boolean # resp.next_token #=> String # # @see http://docs.aws.amazon.com/goto/WebAPI/athena-2017-05-18/GetQueryResults AWS API Documentation # # @overload get_query_results(params = {}) # @param [Hash] params ({}) def get_query_results(params = {}, options = {}) req = build_request(:get_query_results, params) req.send_request(options) end # Returns table metadata for the specified catalog, database, and table. # # @option params [required, String] :catalog_name # The name of the data catalog that contains the database and table # metadata to return. # # @option params [required, String] :database_name # The name of the database that contains the table metadata to return. # # @option params [required, String] :table_name # The name of the table for which metadata is returned. # # @return [Types::GetTableMetadataOutput] Returns a {Seahorse::Client::Response response} object which responds to the following methods: # # * {Types::GetTableMetadataOutput#table_metadata #table_metadata} => Types::TableMetadata # # @example Request syntax with placeholder values # # resp = client.get_table_metadata({ # catalog_name: "CatalogNameString", # required # database_name: "NameString", # required # table_name: "NameString", # required # }) # # @example Response structure # # resp.table_metadata.name #=> String # resp.table_metadata.create_time #=> Time # resp.table_metadata.last_access_time #=> Time # resp.table_metadata.table_type #=> String # resp.table_metadata.columns #=> Array # resp.table_metadata.columns[0].name #=> String # resp.table_metadata.columns[0].type #=> String # resp.table_metadata.columns[0].comment #=> String # resp.table_metadata.partition_keys #=> Array # resp.table_metadata.partition_keys[0].name #=> String # resp.table_metadata.partition_keys[0].type #=> String # resp.table_metadata.partition_keys[0].comment #=> String # resp.table_metadata.parameters #=> Hash # resp.table_metadata.parameters["KeyString"] #=> String # # @see http://docs.aws.amazon.com/goto/WebAPI/athena-2017-05-18/GetTableMetadata AWS API Documentation # # @overload get_table_metadata(params = {}) # @param [Hash] params ({}) def get_table_metadata(params = {}, options = {}) req = build_request(:get_table_metadata, params) req.send_request(options) end # Returns information about the workgroup with the specified name. # # @option params [required, String] :work_group # The name of the workgroup. # # @return [Types::GetWorkGroupOutput] Returns a {Seahorse::Client::Response response} object which responds to the following methods: # # * {Types::GetWorkGroupOutput#work_group #work_group} => Types::WorkGroup # # @example Request syntax with placeholder values # # resp = client.get_work_group({ # work_group: "WorkGroupName", # required # }) # # @example Response structure # # resp.work_group.name #=> String # resp.work_group.state #=> String, one of "ENABLED", "DISABLED" # resp.work_group.configuration.result_configuration.output_location #=> String # resp.work_group.configuration.result_configuration.encryption_configuration.encryption_option #=> String, one of "SSE_S3", "SSE_KMS", "CSE_KMS" # resp.work_group.configuration.result_configuration.encryption_configuration.kms_key #=> String # resp.work_group.configuration.enforce_work_group_configuration #=> Boolean # resp.work_group.configuration.publish_cloud_watch_metrics_enabled #=> Boolean # resp.work_group.configuration.bytes_scanned_cutoff_per_query #=> Integer # resp.work_group.configuration.requester_pays_enabled #=> Boolean # resp.work_group.description #=> String # resp.work_group.creation_time #=> Time # # @see http://docs.aws.amazon.com/goto/WebAPI/athena-2017-05-18/GetWorkGroup AWS API Documentation # # @overload get_work_group(params = {}) # @param [Hash] params ({}) def get_work_group(params = {}, options = {}) req = build_request(:get_work_group, params) req.send_request(options) end # Lists the data catalogs in the current AWS account. # # @option params [String] :next_token # A token generated by the Athena service that specifies where to # continue pagination if a previous request was truncated. To obtain the # next set of pages, pass in the NextToken from the response object of # the previous page call. # # @option params [Integer] :max_results # Specifies the maximum number of data catalogs to return. # # @return [Types::ListDataCatalogsOutput] Returns a {Seahorse::Client::Response response} object which responds to the following methods: # # * {Types::ListDataCatalogsOutput#data_catalogs_summary #data_catalogs_summary} => Array&lt;Types::DataCatalogSummary&gt; # * {Types::ListDataCatalogsOutput#next_token #next_token} => String # # The returned {Seahorse::Client::Response response} is a pageable response and is Enumerable. For details on usage see {Aws::PageableResponse PageableResponse}. # # @example Request syntax with placeholder values # # resp = client.list_data_catalogs({ # next_token: "Token", # max_results: 1, # }) # # @example Response structure # # resp.data_catalogs_summary #=> Array # resp.data_catalogs_summary[0].catalog_name #=> String # resp.data_catalogs_summary[0].type #=> String, one of "LAMBDA", "GLUE", "HIVE" # resp.next_token #=> String # # @see http://docs.aws.amazon.com/goto/WebAPI/athena-2017-05-18/ListDataCatalogs AWS API Documentation # # @overload list_data_catalogs(params = {}) # @param [Hash] params ({}) def list_data_catalogs(params = {}, options = {}) req = build_request(:list_data_catalogs, params) req.send_request(options) end # Lists the databases in the specified data catalog. # # @option params [required, String] :catalog_name # The name of the data catalog that contains the databases to return. # # @option params [String] :next_token # A token generated by the Athena service that specifies where to # continue pagination if a previous request was truncated. To obtain the # next set of pages, pass in the `NextToken` from the response object of # the previous page call. # # @option params [Integer] :max_results # Specifies the maximum number of results to return. # # @return [Types::ListDatabasesOutput] Returns a {Seahorse::Client::Response response} object which responds to the following methods: # # * {Types::ListDatabasesOutput#database_list #database_list} => Array&lt;Types::Database&gt; # * {Types::ListDatabasesOutput#next_token #next_token} => String # # The returned {Seahorse::Client::Response response} is a pageable response and is Enumerable. For details on usage see {Aws::PageableResponse PageableResponse}. # # @example Request syntax with placeholder values # # resp = client.list_databases({ # catalog_name: "CatalogNameString", # required # next_token: "Token", # max_results: 1, # }) # # @example Response structure # # resp.database_list #=> Array # resp.database_list[0].name #=> String # resp.database_list[0].description #=> String # resp.database_list[0].parameters #=> Hash # resp.database_list[0].parameters["KeyString"] #=> String # resp.next_token #=> String # # @see http://docs.aws.amazon.com/goto/WebAPI/athena-2017-05-18/ListDatabases AWS API Documentation # # @overload list_databases(params = {}) # @param [Hash] params ({}) def list_databases(params = {}, options = {}) req = build_request(:list_databases, params) req.send_request(options) end # Provides a list of available query IDs only for queries saved in the # specified workgroup. Requires that you have access to the specified # workgroup. If a workgroup is not specified, lists the saved queries # for the primary workgroup. # # For code samples using the AWS SDK for Java, see [Examples and Code # Samples][1] in the *Amazon Athena User Guide*. # # # # [1]: http://docs.aws.amazon.com/athena/latest/ug/code-samples.html # # @option params [String] :next_token # A token generated by the Athena service that specifies where to # continue pagination if a previous request was truncated. To obtain the # next set of pages, pass in the `NextToken` from the response object of # the previous page call. # # @option params [Integer] :max_results # The maximum number of queries to return in this request. # # @option params [String] :work_group # The name of the workgroup from which the named queries are being # returned. If a workgroup is not specified, the saved queries for the # primary workgroup are returned. # # @return [Types::ListNamedQueriesOutput] Returns a {Seahorse::Client::Response response} object which responds to the following methods: # # * {Types::ListNamedQueriesOutput#named_query_ids #named_query_ids} => Array&lt;String&gt; # * {Types::ListNamedQueriesOutput#next_token #next_token} => String # # The returned {Seahorse::Client::Response response} is a pageable response and is Enumerable. For details on usage see {Aws::PageableResponse PageableResponse}. # # @example Request syntax with placeholder values # # resp = client.list_named_queries({ # next_token: "Token", # max_results: 1, # work_group: "WorkGroupName", # }) # # @example Response structure # # resp.named_query_ids #=> Array # resp.named_query_ids[0] #=> String # resp.next_token #=> String # # @see http://docs.aws.amazon.com/goto/WebAPI/athena-2017-05-18/ListNamedQueries AWS API Documentation # # @overload list_named_queries(params = {}) # @param [Hash] params ({}) def list_named_queries(params = {}, options = {}) req = build_request(:list_named_queries, params) req.send_request(options) end # Provides a list of available query execution IDs for the queries in # the specified workgroup. If a workgroup is not specified, returns a # list of query execution IDs for the primary workgroup. Requires you to # have access to the workgroup in which the queries ran. # # For code samples using the AWS SDK for Java, see [Examples and Code # Samples][1] in the *Amazon Athena User Guide*. # # # # [1]: http://docs.aws.amazon.com/athena/latest/ug/code-samples.html # # @option params [String] :next_token # A token generated by the Athena service that specifies where to # continue pagination if a previous request was truncated. To obtain the # next set of pages, pass in the `NextToken` from the response object of # the previous page call. # # @option params [Integer] :max_results # The maximum number of query executions to return in this request. # # @option params [String] :work_group # The name of the workgroup from which queries are being returned. If a # workgroup is not specified, a list of available query execution IDs # for the queries in the primary workgroup is returned. # # @return [Types::ListQueryExecutionsOutput] Returns a {Seahorse::Client::Response response} object which responds to the following methods: # # * {Types::ListQueryExecutionsOutput#query_execution_ids #query_execution_ids} => Array&lt;String&gt; # * {Types::ListQueryExecutionsOutput#next_token #next_token} => String # # The returned {Seahorse::Client::Response response} is a pageable response and is Enumerable. For details on usage see {Aws::PageableResponse PageableResponse}. # # @example Request syntax with placeholder values # # resp = client.list_query_executions({ # next_token: "Token", # max_results: 1, # work_group: "WorkGroupName", # }) # # @example Response structure # # resp.query_execution_ids #=> Array # resp.query_execution_ids[0] #=> String # resp.next_token #=> String # # @see http://docs.aws.amazon.com/goto/WebAPI/athena-2017-05-18/ListQueryExecutions AWS API Documentation # # @overload list_query_executions(params = {}) # @param [Hash] params ({}) def list_query_executions(params = {}, options = {}) req = build_request(:list_query_executions, params) req.send_request(options) end # Lists the metadata for the tables in the specified data catalog # database. # # @option params [required, String] :catalog_name # The name of the data catalog for which table metadata should be # returned. # # @option params [required, String] :database_name # The name of the database for which table metadata should be returned. # # @option params [String] :expression # A regex filter that pattern-matches table names. If no expression is # supplied, metadata for all tables are listed. # # @option params [String] :next_token # A token generated by the Athena service that specifies where to # continue pagination if a previous request was truncated. To obtain the # next set of pages, pass in the NextToken from the response object of # the previous page call. # # @option params [Integer] :max_results # Specifies the maximum number of results to return. # # @return [Types::ListTableMetadataOutput] Returns a {Seahorse::Client::Response response} object which responds to the following methods: # # * {Types::ListTableMetadataOutput#table_metadata_list #table_metadata_list} => Array&lt;Types::TableMetadata&gt; # * {Types::ListTableMetadataOutput#next_token #next_token} => String # # The returned {Seahorse::Client::Response response} is a pageable response and is Enumerable. For details on usage see {Aws::PageableResponse PageableResponse}. # # @example Request syntax with placeholder values # # resp = client.list_table_metadata({ # catalog_name: "CatalogNameString", # required # database_name: "NameString", # required # expression: "ExpressionString", # next_token: "Token", # max_results: 1, # }) # # @example Response structure # # resp.table_metadata_list #=> Array # resp.table_metadata_list[0].name #=> String # resp.table_metadata_list[0].create_time #=> Time # resp.table_metadata_list[0].last_access_time #=> Time # resp.table_metadata_list[0].table_type #=> String # resp.table_metadata_list[0].columns #=> Array # resp.table_metadata_list[0].columns[0].name #=> String # resp.table_metadata_list[0].columns[0].type #=> String # resp.table_metadata_list[0].columns[0].comment #=> String # resp.table_metadata_list[0].partition_keys #=> Array # resp.table_metadata_list[0].partition_keys[0].name #=> String # resp.table_metadata_list[0].partition_keys[0].type #=> String # resp.table_metadata_list[0].partition_keys[0].comment #=> String # resp.table_metadata_list[0].parameters #=> Hash # resp.table_metadata_list[0].parameters["KeyString"] #=> String # resp.next_token #=> String # # @see http://docs.aws.amazon.com/goto/WebAPI/athena-2017-05-18/ListTableMetadata AWS API Documentation # # @overload list_table_metadata(params = {}) # @param [Hash] params ({}) def list_table_metadata(params = {}, options = {}) req = build_request(:list_table_metadata, params) req.send_request(options) end # Lists the tags associated with an Athena workgroup or data catalog # resource. # # @option params [required, String] :resource_arn # Lists the tags for the resource with the specified ARN. # # @option params [String] :next_token # The token for the next set of results, or null if there are no # additional results for this request, where the request lists the tags # for the resource with the specified ARN. # # @option params [Integer] :max_results # The maximum number of results to be returned per request that lists # the tags for the resource. # # @return [Types::ListTagsForResourceOutput] Returns a {Seahorse::Client::Response response} object which responds to the following methods: # # * {Types::ListTagsForResourceOutput#tags #tags} => Array&lt;Types::Tag&gt; # * {Types::ListTagsForResourceOutput#next_token #next_token} => String # # The returned {Seahorse::Client::Response response} is a pageable response and is Enumerable. For details on usage see {Aws::PageableResponse PageableResponse}. # # @example Request syntax with placeholder values # # resp = client.list_tags_for_resource({ # resource_arn: "AmazonResourceName", # required # next_token: "Token", # max_results: 1, # }) # # @example Response structure # # resp.tags #=> Array # resp.tags[0].key #=> String # resp.tags[0].value #=> String # resp.next_token #=> String # # @see http://docs.aws.amazon.com/goto/WebAPI/athena-2017-05-18/ListTagsForResource AWS API Documentation # # @overload list_tags_for_resource(params = {}) # @param [Hash] params ({}) def list_tags_for_resource(params = {}, options = {}) req = build_request(:list_tags_for_resource, params) req.send_request(options) end # Lists available workgroups for the account. # # @option params [String] :next_token # A token generated by the Athena service that specifies where to # continue pagination if a previous request was truncated. To obtain the # next set of pages, pass in the `NextToken` from the response object of # the previous page call. # # @option params [Integer] :max_results # The maximum number of workgroups to return in this request. # # @return [Types::ListWorkGroupsOutput] Returns a {Seahorse::Client::Response response} object which responds to the following methods: # # * {Types::ListWorkGroupsOutput#work_groups #work_groups} => Array&lt;Types::WorkGroupSummary&gt; # * {Types::ListWorkGroupsOutput#next_token #next_token} => String # # The returned {Seahorse::Client::Response response} is a pageable response and is Enumerable. For details on usage see {Aws::PageableResponse PageableResponse}. # # @example Request syntax with placeholder values # # resp = client.list_work_groups({ # next_token: "Token", # max_results: 1, # }) # # @example Response structure # # resp.work_groups #=> Array # resp.work_groups[0].name #=> String # resp.work_groups[0].state #=> String, one of "ENABLED", "DISABLED" # resp.work_groups[0].description #=> String # resp.work_groups[0].creation_time #=> Time # resp.next_token #=> String # # @see http://docs.aws.amazon.com/goto/WebAPI/athena-2017-05-18/ListWorkGroups AWS API Documentation # # @overload list_work_groups(params = {}) # @param [Hash] params ({}) def list_work_groups(params = {}, options = {}) req = build_request(:list_work_groups, params) req.send_request(options) end # Runs the SQL query statements contained in the `Query`. Requires you # to have access to the workgroup in which the query ran. Running # queries against an external catalog requires GetDataCatalog permission # to the catalog. For code samples using the AWS SDK for Java, see # [Examples and Code Samples][1] in the *Amazon Athena User Guide*. # # # # [1]: http://docs.aws.amazon.com/athena/latest/ug/code-samples.html # # @option params [required, String] :query_string # The SQL query statements to be executed. # # @option params [String] :client_request_token # A unique case-sensitive string used to ensure the request to create # the query is idempotent (executes only once). If another # `StartQueryExecution` request is received, the same response is # returned and another query is not created. If a parameter has changed, # for example, the `QueryString`, an error is returned. # # This token is listed as not required because AWS SDKs (for example the # AWS SDK for Java) auto-generate the token for users. If you are not # using the AWS SDK or the AWS CLI, you must provide this token or the # action will fail. # # **A suitable default value is auto-generated.** You should normally # not need to pass this option.** # # @option params [Types::QueryExecutionContext] :query_execution_context # The database within which the query executes. # # @option params [Types::ResultConfiguration] :result_configuration # Specifies information about where and how to save the results of the # query execution. If the query runs in a workgroup, then workgroup's # settings may override query settings. This affects the query results # location. The workgroup settings override is specified in # EnforceWorkGroupConfiguration (true/false) in the # WorkGroupConfiguration. See # WorkGroupConfiguration$EnforceWorkGroupConfiguration. # # @option params [String] :work_group # The name of the workgroup in which the query is being started. # # @return [Types::StartQueryExecutionOutput] Returns a {Seahorse::Client::Response response} object which responds to the following methods: # # * {Types::StartQueryExecutionOutput#query_execution_id #query_execution_id} => String # # @example Request syntax with placeholder values # # resp = client.start_query_execution({ # query_string: "QueryString", # required # client_request_token: "IdempotencyToken", # query_execution_context: { # database: "DatabaseString", # catalog: "CatalogNameString", # }, # result_configuration: { # output_location: "String", # encryption_configuration: { # encryption_option: "SSE_S3", # required, accepts SSE_S3, SSE_KMS, CSE_KMS # kms_key: "String", # }, # }, # work_group: "WorkGroupName", # }) # # @example Response structure # # resp.query_execution_id #=> String # # @see http://docs.aws.amazon.com/goto/WebAPI/athena-2017-05-18/StartQueryExecution AWS API Documentation # # @overload start_query_execution(params = {}) # @param [Hash] params ({}) def start_query_execution(params = {}, options = {}) req = build_request(:start_query_execution, params) req.send_request(options) end # Stops a query execution. Requires you to have access to the workgroup # in which the query ran. # # For code samples using the AWS SDK for Java, see [Examples and Code # Samples][1] in the *Amazon Athena User Guide*. # # # # [1]: http://docs.aws.amazon.com/athena/latest/ug/code-samples.html # # @option params [required, String] :query_execution_id # The unique ID of the query execution to stop. # # **A suitable default value is auto-generated.** You should normally # not need to pass this option.** # # @return [Struct] Returns an empty {Seahorse::Client::Response response}. # # @example Request syntax with placeholder values # # resp = client.stop_query_execution({ # query_execution_id: "QueryExecutionId", # required # }) # # @see http://docs.aws.amazon.com/goto/WebAPI/athena-2017-05-18/StopQueryExecution AWS API Documentation # # @overload stop_query_execution(params = {}) # @param [Hash] params ({}) def stop_query_execution(params = {}, options = {}) req = build_request(:stop_query_execution, params) req.send_request(options) end # Adds one or more tags to an Athena resource. A tag is a label that you # assign to a resource. In Athena, a resource can be a workgroup or data # catalog. Each tag consists of a key and an optional value, both of # which you define. For example, you can use tags to categorize Athena # workgroups or data catalogs by purpose, owner, or environment. Use a # consistent set of tag keys to make it easier to search and filter # workgroups or data catalogs in your account. For best practices, see # [Tagging Best Practices][1]. Tag keys can be from 1 to 128 UTF-8 # Unicode characters, and tag values can be from 0 to 256 UTF-8 Unicode # characters. Tags can use letters and numbers representable in UTF-8, # and the following characters: + - = . \_ : / @. Tag keys and values # are case-sensitive. Tag keys must be unique per resource. If you # specify more than one tag, separate them by commas. # # # # [1]: https://aws.amazon.com/answers/account-management/aws-tagging-strategies/ # # @option params [required, String] :resource_arn # Specifies the ARN of the Athena resource (workgroup or data catalog) # to which tags are to be added. # # @option params [required, Array<Types::Tag>] :tags # A collection of one or more tags, separated by commas, to be added to # an Athena workgroup or data catalog resource. # # @return [Struct] Returns an empty {Seahorse::Client::Response response}. # # @example Request syntax with placeholder values # # resp = client.tag_resource({ # resource_arn: "AmazonResourceName", # required # tags: [ # required # { # key: "TagKey", # value: "TagValue", # }, # ], # }) # # @see http://docs.aws.amazon.com/goto/WebAPI/athena-2017-05-18/TagResource AWS API Documentation # # @overload tag_resource(params = {}) # @param [Hash] params ({}) def tag_resource(params = {}, options = {}) req = build_request(:tag_resource, params) req.send_request(options) end # Removes one or more tags from a data catalog or workgroup resource. # # @option params [required, String] :resource_arn # Specifies the ARN of the resource from which tags are to be removed. # # @option params [required, Array<String>] :tag_keys # A comma-separated list of one or more tag keys whose tags are to be # removed from the specified resource. # # @return [Struct] Returns an empty {Seahorse::Client::Response response}. # # @example Request syntax with placeholder values # # resp = client.untag_resource({ # resource_arn: "AmazonResourceName", # required # tag_keys: ["TagKey"], # required # }) # # @see http://docs.aws.amazon.com/goto/WebAPI/athena-2017-05-18/UntagResource AWS API Documentation # # @overload untag_resource(params = {}) # @param [Hash] params ({}) def untag_resource(params = {}, options = {}) req = build_request(:untag_resource, params) req.send_request(options) end # Updates the data catalog that has the specified name. # # @option params [required, String] :name # The name of the data catalog to update. The catalog name must be # unique for the AWS account and can use a maximum of 128 alphanumeric, # underscore, at sign, or hyphen characters. # # @option params [required, String] :type # Specifies the type of data catalog to update. Specify `LAMBDA` for a # federated catalog, `GLUE` for AWS Glue Catalog, or `HIVE` for an # external hive metastore. # # @option params [String] :description # New or modified text that describes the data catalog. # # @option params [Hash<String,String>] :parameters # Specifies the Lambda function or functions to use for updating the # data catalog. This is a mapping whose values depend on the catalog # type. # # * For the `HIVE` data catalog type, use the following syntax. The # `metadata-function` parameter is required. `The sdk-version` # parameter is optional and defaults to the currently supported # version. # # `metadata-function=lambda_arn, sdk-version=version_number ` # # * For the `LAMBDA` data catalog type, use one of the following sets of # required parameters, but not both. # # * If you have one Lambda function that processes metadata and # another for reading the actual data, use the following syntax. # Both parameters are required. # # `metadata-function=lambda_arn, record-function=lambda_arn ` # # * If you have a composite Lambda function that processes both # metadata and data, use the following syntax to specify your Lambda # function. # # `function=lambda_arn ` # # * The `GLUE` type has no parameters. # # @return [Struct] Returns an empty {Seahorse::Client::Response response}. # # @example Request syntax with placeholder values # # resp = client.update_data_catalog({ # name: "CatalogNameString", # required # type: "LAMBDA", # required, accepts LAMBDA, GLUE, HIVE # description: "DescriptionString", # parameters: { # "KeyString" => "ParametersMapValue", # }, # }) # # @see http://docs.aws.amazon.com/goto/WebAPI/athena-2017-05-18/UpdateDataCatalog AWS API Documentation # # @overload update_data_catalog(params = {}) # @param [Hash] params ({}) def update_data_catalog(params = {}, options = {}) req = build_request(:update_data_catalog, params) req.send_request(options) end # Updates the workgroup with the specified name. The workgroup's name # cannot be changed. # # @option params [required, String] :work_group # The specified workgroup that will be updated. # # @option params [String] :description # The workgroup description. # # @option params [Types::WorkGroupConfigurationUpdates] :configuration_updates # The workgroup configuration that will be updated for the given # workgroup. # # @option params [String] :state # The workgroup state that will be updated for the given workgroup. # # @return [Struct] Returns an empty {Seahorse::Client::Response response}. # # @example Request syntax with placeholder values # # resp = client.update_work_group({ # work_group: "WorkGroupName", # required # description: "WorkGroupDescriptionString", # configuration_updates: { # enforce_work_group_configuration: false, # result_configuration_updates: { # output_location: "String", # remove_output_location: false, # encryption_configuration: { # encryption_option: "SSE_S3", # required, accepts SSE_S3, SSE_KMS, CSE_KMS # kms_key: "String", # }, # remove_encryption_configuration: false, # }, # publish_cloud_watch_metrics_enabled: false, # bytes_scanned_cutoff_per_query: 1, # remove_bytes_scanned_cutoff_per_query: false, # requester_pays_enabled: false, # }, # state: "ENABLED", # accepts ENABLED, DISABLED # }) # # @see http://docs.aws.amazon.com/goto/WebAPI/athena-2017-05-18/UpdateWorkGroup AWS API Documentation # # @overload update_work_group(params = {}) # @param [Hash] params ({}) def update_work_group(params = {}, options = {}) req = build_request(:update_work_group, params) req.send_request(options) end # @!endgroup # @param params ({}) # @api private def build_request(operation_name, params = {}) handlers = @handlers.for(operation_name) context = Seahorse::Client::RequestContext.new( operation_name: operation_name, operation: config.api.operation(operation_name), client: self, params: params, config: config) context[:gem_name] = 'aws-sdk-athena' context[:gem_version] = '1.34.0' Seahorse::Client::Request.new(handlers, context) end # @api private # @deprecated def waiter_names [] end class << self # @api private attr_reader :identifier # @api private def errors_module Errors end end end end
43.630435
165
0.667952
91fa19d9ce81ca660a2f080dbc3d99c69d137a68
250
class Band < ActiveRecord::Base has_and_belongs_to_many(:venues) validates_presence_of(:name) before_save(:capitalize_name) private define_method(:capitalize_name) do self.name=(name().split.map {|i| i.capitalize }.join(' ')) end end
20.833333
62
0.736
21141d19693190fd7d680637fa8c7fd5d919aeb7
1,396
class Trends::DupeFinder def self.cronjob(date_in_week: 1.day.ago) news_this_week = NewsItem.without_dupes.where(created_at: date_in_week.to_time.all_week) new(news_this_week).run end def initialize(news_items) @news_items = news_items @already_processed = [] end def run if @news_items.is_a?(ActiveRecord::Relation) @news_items.find_each do |ni| check(ni) end else @news_items.each do check(ni) end end end def check(news_item) return if @already_processed.include?(news_item.id) return if news_item.plaintext.blank? || news_item.plaintext.length < 400 words = news_item.trend_usages.select(:word_id) sql = Trends::Usage.where(word_id: words). where.not(news_item_id: news_item.id). group('news_item_id'). having('count(*) > ?', (words.length * 0.5).round). count dupes = sql.keys return if dupes.count == 0 best_first = NewsItem.find(dupes + [news_item.id]).sort_by { |i| [-(i.absolute_score.to_i), i.published_at] } primary = best_first.shift best_first.each do |other| Rails.logger.info "DUPE: #{primary.title} (#{primary.id}) -> #{other.title} (#{other.id})" other.update(dupe_of: primary) other.trend_usages.update_all(dupe: true) @already_processed << other.id end @already_processed << primary.id end end
27.92
113
0.663324
f7e1a0d3fb514033fbfb901bd116ac790ff0e300
1,427
# frozen_string_literal: true class DeviseCreateUsers < ActiveRecord::Migration[5.1] def change create_table :users do |t| ## Database authenticatable t.string :email, null: false, default: "" t.string :encrypted_password, null: false, default: "" ## Recoverable t.string :reset_password_token t.datetime :reset_password_sent_at ## Rememberable t.datetime :remember_created_at ## Trackable # t.integer :sign_in_count, default: 0, null: false # t.datetime :current_sign_in_at # t.datetime :last_sign_in_at # t.string :current_sign_in_ip # t.string :last_sign_in_ip ## Confirmable # t.string :confirmation_token # t.datetime :confirmed_at # t.datetime :confirmation_sent_at # t.string :unconfirmed_email # Only if using reconfirmable ## Lockable # t.integer :failed_attempts, default: 0, null: false # Only if lock strategy is :failed_attempts # t.string :unlock_token # Only if unlock strategy is :email or :both # t.datetime :locked_at t.string :full_name t.timestamps null: false end add_index :users, :email, unique: true add_index :users, :reset_password_token, unique: true # add_index :users, :confirmation_token, unique: true # add_index :users, :unlock_token, unique: true end end
31.021739
104
0.650315
6218b66d99205d399973b7a89926a5bbf35c4e1f
305
def while_loop(number) i =0 numbers = [] while i < number puts "At the top i is #{i}" numbers.push(i) i +=1 puts "Numbers now:", numbers puts "At the bottom i is #{i}" end puts "The numbers:" numbers.each {|num| puts num} end #while_loop(3) #while_loop(6) while_loop(2)
16.052632
34
0.603279
1a2cdeb5fc7dfbaf67a7ecb59c3fb21919808d53
1,583
require 'spec_helper' describe Puppet::Type.type(:ilb_servergroup).provider(:solaris) do let(:params) do { :name => "sg1" } end let(:resource) { Puppet::Type.type(:ilb_servergroup).new(params) } let(:provider) { described_class.new(resource) } before(:each) do FileTest.stubs(:file?).with('/usr/sbin/ilbadm').returns true FileTest.stubs(:executable?).with('/usr/sbin/ilbadm').returns true end describe "responds to" do [:exists?, :create, :destroy].each { |method| it { is_expected.to respond_to(method) } } end describe "#instances" do described_class.expects(:ilbadm).with( 'show-servergroup', '-o', 'sgname', '-p').returns File.read( my_fixture('show-servergroup_o_sgname_p.txt')) instances = described_class.instances.map { |p| { :ensure => p.get(:ensure), :name => p.get(:name), } } it "has four(4) results" do expect(instances.size).to eq(4) end it "first instance is sg1" do expect(instances[0][:name]).to eq('sg1') end it "last instance is sg4" do expect(instances[-1][:name]).to eq('sg4') end end describe ".create" do it "creates a server group" do described_class.expects(:ilbadm).with('create-servergroup', params[:name]) expect(provider.create).to eq nil end end describe ".destroy" do it "destroys a server group" do described_class.expects(:ilbadm).with('delete-servergroup', params[:name]) expect(provider.destroy).to eq nil end end end
26.383333
80
0.622868
e2c8ffefd09d41479c3901717dab41d5acde5b4f
2,206
module Ci::DbConnectable extend ActiveSupport::Concern included do end # Dynamically sets the database connection. def connect_to_ci_database(options={}) master_db = options[:master_db] || false config = Rails.application.config_for(:configuration).symbolize_keys! db_config = config[:ci_db_server].symbolize_keys! raise 'ci database config missing' if db_config.blank? req_params = { host: db_config[:host], username: db_config[:username], password: db_config[:password], port: db_config[:port] } db_name = options[:db_name].blank? ? current_user.login : options[:db_name] req_params = req_params.merge(database: "#{db_name}_#{db_config[:database]}") unless master_db === true db_params = Ci::Database.get_connection_params(req_params) @connection = Ci::Database.set_connection(db_params).connection end def connect_to_trustie_ci_database(options={}) master_db = options[:master_db] || false config = Rails.application.config_for(:configuration).symbolize_keys! db_config = config[:ci_db_server_trustie].symbolize_keys! raise 'ci database config missing' if db_config.blank? req_params = { host: db_config[:host], username: db_config[:username], password: db_config[:password], port: db_config[:port] } req_params = req_params.merge(database: "#{db_config[:database]}") unless master_db === true db_params = Ci::Database.get_connection_params(req_params) @trustie_db_connection = Ci::Database.set_connection(db_params).connection end def auto_create_database!(connection, database) Rails.logger.info "[CI::DbConnectable] auto_create_database's connection: #{connection}" connection.execute("CREATE DATABASE IF NOT EXISTS #{database}") end def auto_create_table_structure!(connection) Rails.logger.info "[CI::DbConnectable] auto_create_table_structure's connection: #{connection}" sqls = Ci::Schema.statement.split(';').map(&:strip).reject { |e| e.to_s.empty? } sqls.each do |sql| con_result = connection.execute(sql) Rails.logger.info "=============> ci create tabels result: #{con_result}" end end end
36.766667
107
0.711242
bb2924c7cb63e25652f45126436d1b3b2cdcb573
431
#require 'spec_helper' # #describe Majak::Autopilot::Motor do # before do # @motor = Majak::Autopilot::Motor.new # end # # it "sets a pin to PWM mode" # it "turns on the output pin for 3 seconds per degree of adjustment" # it "turns in one direction for x time then turns on the motor in the opposite directon to straighten out" # # describe "#turn" do # it "when turning left, it should turn on the motor" # end #end
26.9375
108
0.696056
799b08de96d6cd9f71caa324360b3d824bf95ea7
289
class CreateReviews < ActiveRecord::Migration[6.0] def change create_table :reviews do |t| t.text :content t.text :title t.integer :rating t.belongs_to :user, foreign_key: true t.belongs_to :spot, foreign_key: true t.timestamps end end end
20.642857
50
0.650519
61756390a3a31abdbb6845ff0e2dcb2efad5903b
536
cask 'homespun-instant-access' do version :latest sha256 :no_check # vault.platformpurple.com/static/installers/homespun_installer was verified as official when first introduced to the cask url 'http://vault.platformpurple.com/static/installers/homespun_installer.zip' name 'Homespun Instant Access' homepage 'https://www.homespun.com/direct-download/' installer manual: 'Homespun Instant Access Installer.app' uninstall quit: 'com..mm_launcher', delete: '/Applications/Homespun Instant Access.app' end
35.733333
124
0.766791
875c9d62def0a46127267589fbef8aa67ffe858c
380
class SsnValidator < ActiveModel::EachValidator def validate_each(record, attribute, value) unless valid_ssn?(record, attribute, value) record.errors[attribute] << "#{value} is not a valid Social Security Number" end end def self.kind() :custom end def valid_ssn?(record, attribute, value) # irrelevant here how validation is done true end end
23.75
82
0.710526
e2bd01fb8541211191265fe55719e569b0afd202
824
# frozen_string_literal: true module Cocina module FromFedora class Descriptive # Maps titles class HydrusDefaultTitleBuilder # @param [Nokogiri::XML::Element] resource_element mods or relatedItem element # @param [Cocina::FromFedora::DataErrorNotifier] notifier # @return [Hash] a hash that can be mapped to a cocina model def self.build(resource_element:, notifier:, require_title: nil) titles = resource_element.xpath('mods:titleInfo/mods:title[string-length() > 0]', mods: DESC_METADATA_NS) if titles.empty? return [{ value: 'Hydrus' }] if resource_element.name != 'relatedItem' return [] end Titles.build(resource_element: resource_element, notifier: notifier) end end end end end
31.692308
115
0.657767
0346b9fdb500c3f929346878a2b6208208a46ad4
7,386
# frozen_string_literal: true RSpec.describe TableStructure::Writer do describe '#write' do include_context 'questions' include_context 'users' let(:context) { { questions: questions } } let(:array_items) { users } let(:enumerator_items) do ::Enumerator.new { |y| array_items.each { |item| y << item } } end context 'when output to CSV file' do include_context 'table_structured_array_with_stringified' shared_examples 'to convert and write data' do it 'succeeds' do require 'csv' require 'tempfile' schema = ::Mono::TestTableSchema.new(context: context) do column_converter :to_s do |val, *| val.to_s end end writer = described_class.new(schema) tf = ::Tempfile.open do |fp| writer.write(items, to: ::CSV.new(fp), &converter) fp end table = ::CSV.read(tf.path, **csv_options) expect(table[0]).to eq header_row expect(table[1]).to eq body_row_taro expect(table[2]).to eq body_row_hanako expect(table[3]).to eq body_row_jiro end end context 'when CSV encoding is UTF-8' do let(:csv_options) { {} } let(:converter) { ->(values) { values } } context 'when passed array_items' do let(:items) { array_items } it_behaves_like 'to convert and write data' end context 'when passed enumerator_items' do let(:items) { enumerator_items } it_behaves_like 'to convert and write data' end end context 'when CSV encoding is Shift_JIS' do let(:csv_options) { { encoding: 'Shift_JIS:UTF-8' } } let(:converter) do lambda do |values| values.map { |val| val.encode('Shift_JIS', invalid: :replace, undef: :replace) } end end context 'when passed array_items' do let(:items) { array_items } it_behaves_like 'to convert and write data' end context 'when passed enumerator_items' do let(:items) { enumerator_items } it_behaves_like 'to convert and write data' end end end context 'when output to yielder' do include_context 'table_structured_array' shared_examples 'to convert and write data' do it 'succeeds' do schema = ::Mono::TestTableSchema.new(context: context) writer = described_class.new(schema) times = 0 enum = ::Enumerator.new do |y| writer.write(items, to: y) do |values| times += 1 values end end expect(enum.next).to eq header_row expect(times).to eq 1 expect(enum.next).to eq body_row_taro expect(times).to eq 2 expect(enum.next).to eq body_row_hanako expect(times).to eq 3 expect(enum.next).to eq body_row_jiro expect(times).to eq 4 end end context 'when passed array_items' do let(:items) { array_items } it_behaves_like 'to convert and write data' end context 'when passed enumerator_items' do let(:items) { enumerator_items } it_behaves_like 'to convert and write data' end end context 'when output to array' do context 'with row_type: :array' do include_context 'table_structured_array' let(:options) do { row_type: :array } end shared_examples 'to convert and write data' do it 'succeeds' do table = [] writer.write(items, to: table) expect(table[0]).to eq header_row expect(table[1]).to eq body_row_taro expect(table[2]).to eq body_row_hanako expect(table[3]).to eq body_row_jiro end end let(:schema) { ::Mono::TestTableSchema.new(context: context) } let(:writer) { described_class.new(schema, **options) } context 'when passed array_items' do let(:items) { array_items } it_behaves_like 'to convert and write data' end context 'when passed enumerator_items' do let(:items) { enumerator_items } it_behaves_like 'to convert and write data' end end context 'with row_type: :hash' do include_context 'table_structured_hash_with_index_keys' let(:options) do { row_type: :hash } end shared_examples 'to convert and write data' do it 'succeeds' do table = [] writer.write(items, to: table) expect(table[0]).to eq header_row expect(table[1]).to eq body_row_taro expect(table[2]).to eq body_row_hanako expect(table[3]).to eq body_row_jiro end end let(:schema) { ::Mono::TestTableSchema.new(context: context) } let(:writer) { described_class.new(schema, **options) } context 'when passed array_items' do let(:items) { array_items } it_behaves_like 'to convert and write data' end context 'when passed enumerator_items' do let(:items) { enumerator_items } it_behaves_like 'to convert and write data' end end end context 'when output to string' do shared_examples 'to convert and write data with header' do it 'succeeds' do expect(@s).to eq "ID,Name,Pet 1,Pet 2,Pet 3,Q1,Q2,Q3\n" \ "1,太郎,cat,dog,,yes,no,yes\n" \ "2,花子,rabbit,turtle,squirrel,yes,yes,no\n" \ "3,次郎,tiger,elephant,doragon,no,yes,\n" end end shared_examples 'to convert and write data without header' do it 'succeeds' do expect(@s).to eq "1,太郎,cat,dog,,yes,no,yes\n" \ "2,花子,rabbit,turtle,squirrel,yes,yes,no\n" \ "3,次郎,tiger,elephant,doragon,no,yes,\n" end end before do schema = ::Mono::TestTableSchema.new(context: context) writer = described_class.new(schema, **options) @s = ::String.new writer.write(items, to: @s) do |row_values| row_values.join(',') + "\n" end end context 'when header is omitted' do let(:options) do { header: false } end context 'when passed array_items' do let(:items) { array_items } it_behaves_like 'to convert and write data without header' end context 'when passed enumerator_items' do let(:items) { enumerator_items } it_behaves_like 'to convert and write data without header' end end context 'when header is not omitted' do let(:options) do [ { header: true }, {} ].sample end context 'when passed array_items' do let(:items) { array_items } it_behaves_like 'to convert and write data with header' end context 'when passed enumerator_items' do let(:items) { enumerator_items } it_behaves_like 'to convert and write data with header' end end end end end
29.309524
92
0.567154
ac24b13940ee6960a061c882f576cb29d9563f93
167
class AddAllowDecimalsToCustomFields < ActiveRecord::Migration[5.2] def change add_column :custom_fields, :allow_decimals, :boolean, :default => false end end
27.833333
75
0.772455
4a18eccc9d5ad1688e5ff298e89ab02907ddf36d
97
module API module V1 class AttachmentsController < ApplicationController end end end
13.857143
55
0.752577
083e2d65bed88494ac76c8d38160b210934e7ee9
248
class CreateRumors < ActiveRecord::Migration[5.2] def change create_table :rumors do |t| t.string :title t.text :description t.integer :confirmed_cases t.integer :confirmed_deaths t.timestamps end end end
19.076923
49
0.665323
4ac8fda8b5a7baa271ebbb22ba39130f1783779a
4,243
# frozen_string_literal: true # rubocop:disable Lint/SuppressedException begin require 'rspec/core/rake_task' namespace :tests do desc "Run all RSpec tests" RSpec::Core::RakeTask.new(:spec) desc "Run RSpec tests that do not require VM fixtures or a particular shell" RSpec::Core::RakeTask.new(:unit) do |t| t.rspec_opts = '--tag ~ssh --tag ~docker --tag ~lxd_transport --tag ~bash --tag ~winrm ' \ '--tag ~windows_agents --tag ~puppetserver --tag ~puppetdb ' \ '--tag ~omi --tag ~kerberos --tag ~lxd_remote' end desc 'Run tests that require a host System Under Test configured with WinRM' RSpec::Core::RakeTask.new(:winrm) do |t| t.rspec_opts = '--tag winrm' end desc 'Run tests that require a host System Under Test configured with SSH' RSpec::Core::RakeTask.new(:ssh) do |t| t.rspec_opts = '--tag ssh' end desc 'Run tests that require a host System Under Test configured with Docker' RSpec::Core::RakeTask.new(:docker) do |t| t.rspec_opts = '--tag docker' end desc 'Run tests that require a host System Under Test configured with LXD' RSpec::Core::RakeTask.new(:lxd) do |t| t.rspec_opts = '--tag lxd_transport' end desc 'Run tests that require a host System Under Test configured with LXD remote' RSpec::Core::RakeTask.new(:lxd_remote) do |t| t.rspec_opts = '--tag lxd_remote' end desc 'Run tests that require Bash on the local host' RSpec::Core::RakeTask.new(:bash) do |t| t.rspec_opts = '--tag bash' end desc 'Run tests that require Windows on the local host' RSpec::Core::RakeTask.new(:windows) do |t| t.rspec_opts = '--tag windows' end desc 'Run tests that require OMI docker container' RSpec::Core::RakeTask.new(:omi) do |t| t.rspec_opts = '--tag omi' end end # The following tasks are run during CI and require additional environment setup # to run. Jobs that run these tests can be viewed in .github/workflows/ namespace :ci do namespace :linux do # Run RSpec tests that do not require WinRM desc '' RSpec::Core::RakeTask.new(:fast) do |t| t.rspec_opts = '--tag ~winrm --tag ~lxd_transport --tag ~windows_agents --tag ~puppetserver ' \ '--tag ~puppetdb --tag ~omi --tag ~windows --tag ~kerberos --tag ~expensive ' \ '--tag ~lxd_remote' end # Run RSpec tests that are slow or require slow to start containers for setup desc '' RSpec::Core::RakeTask.new(:slow) do |t| t.rspec_opts = '--tag puppetserver --tag puppetdb --tag expensive' end end namespace :windows do # Run RSpec tests that do not require Puppet Agents on Windows desc '' RSpec::Core::RakeTask.new(:agentless) do |t| t.rspec_opts = '--tag ~ssh --tag ~docker --tag ~lxd_transport --tag ~bash --tag ~windows_agents ' \ '--tag ~orchestrator --tag ~puppetserver --tag ~puppetdb --tag ~omi ' \ '--tag ~kerberos --tag ~lxd_remote' end # Run RSpec tests that require Puppet Agents configured with Windows desc '' RSpec::Core::RakeTask.new(:agentful) do |t| t.rspec_opts = '--tag windows_agents' end end desc "Run RSpec tests for Bolt's bundled content" task :modules do success = true # Test core modules %w[boltlib ctrl file dir out prompt system].each do |mod| Dir.chdir("#{__dir__}/../bolt-modules/#{mod}") do sh 'rake spec' do |ok, _| success = false unless ok end end end # Test modules %w[canary aggregate puppetdb_fact puppet_connect].each do |mod| Dir.chdir("#{__dir__}/../modules/#{mod}") do sh 'rake spec' do |ok, _| success = false unless ok end end end # Test BoltSpec Dir.chdir("#{__dir__}/../bolt_spec_spec/") do sh 'rake spec' do |ok, _| success = false unless ok end end raise "Module tests failed" unless success end end rescue LoadError end # rubocop:enable Lint/SuppressedException
33.944
107
0.612774
036ee5e33274a774902321728e21b3e258d6d9c2
1,598
# frozen_string_literal: true RSpec.describe Lab42::DataClass do describe "a data class with only positional params" do subject { DataClass(:a, :b) } let(:correct_instance) { subject.new(a: 1, b: 2) } it "raises an argument error if a key is missing" do expect{ subject.new(a: 1) }.to raise_error(ArgumentError, "missing initializers for [:b]") end it "raises an argument error for spurious keys too" do expect{ subject.new(a: 1, b: 2, c: 3) }.to raise_error(ArgumentError, "illegal initializers [:c]") end it "but if all goes well" do expect(correct_instance.a).to eq(1) expect(correct_instance.b).to eq(2) end end describe "a data class with default values" do subject { DataClass(:a, b: 0, c: nil) } let(:correct_instance) { subject.new(a: 1) } it "raises an argument error if a key is missing" do expect{ subject.new(b: 1) }.to raise_error(ArgumentError, "missing initializers for [:a]") end it "but if all goes well" do expect(correct_instance.a).to eq(1) expect(correct_instance.b).to be_zero expect(correct_instance.c).to be_nil end it "can extract values into a hash" do expect(correct_instance.to_h).to eq(a: 1, b: 0, c: nil) end end describe "Immutability" do subject { DataClass(a: 1, b: 2).new } let(:modified) { subject.merge(b: 3) } it "creates a new object w/o changing the old one" do expect(modified.to_h).to eq(a: 1, b: 3) expect(subject.to_h).to eq(a: 1, b: 2) end end end # SPDX-License-Identifier: Apache-2.0
31.333333
104
0.652065
f874adcc7cfe71b149a451ac7a6f8f02e9856c8b
272
class CreateDeposits < ActiveRecord::Migration[5.1] def change create_table :deposits do |t| t.decimal :amount, precision: 10, scale: 2 t.date :deposit_date t.belongs_to :wallet, index: true, foreign_key: true t.timestamps end end end
22.666667
58
0.672794
5da225f497a13dccceaa1028bf8b9e0822d75cbe
1,709
# Generated by the protocol buffer compiler. DO NOT EDIT! # Source: google/ads/googleads/v2/services/change_status_service.proto for package 'Google.Ads.GoogleAds.V2.Services' # Original file comments: # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # require 'grpc' require 'google/ads/google_ads/v2/services/change_status_service_pb' module Google module Ads module GoogleAds module V2 module Services module ChangeStatusService # Proto file describing the Change Status service. # # Service to fetch change statuses. class Service include GRPC::GenericService self.marshal_class_method = :encode self.unmarshal_class_method = :decode self.service_name = 'google.ads.googleads.v2.services.ChangeStatusService' # Returns the requested change status in full detail. rpc :GetChangeStatus, Google::Ads::GoogleAds::V2::Services::GetChangeStatusRequest, Google::Ads::GoogleAds::V2::Resources::ChangeStatus end Stub = Service.rpc_stub_class end end end end end end
34.18
149
0.692803
084645dcbaeafd4b76ab7f666bc4ff658e50297e
2,693
module LiveData class User IntegerPackCode = "I" ReadTime = 25 attr :groups, true attr :name, true # Create a user object def initialize( name = nil, channel = nil ) @name = name || self @channel = channel @read_time = ReadTime @lock = Mutex.new @read_pipe, @write_pipe = IO.pipe @groups = [] end def set_read_time( time ) @read_time = time end # Reset the write pipe and read pipe def reset begin @write_pipe.close @read_pipe.close rescue => err end @read_pipe, @write_pipe = IO.pipe end # Clean the Contain in the pipe def clean begin @lock.synchronize { while( @read_pipe.read_nonblock( 10000 ) ) end } rescue => err end end # Read yaml contain def read_yaml if( ioarrays = IO.select( [@read_pipe], [], [], @read_time ) ) if( ioarrays[0].include? @read_pipe ) @lock.synchronize { tcont = @read_pipe.read_nonblock(4) if( tcont and tcont.size == 4 ) len, etc = tcont.unpack( IntegerPackCode ) return @read_pipe.read_nonblock( len ) else return nil end } else return nil end else return nil end end # read a Object def read cont = read_yaml if( cont ) return YAML.load( cont ) else return nil end end # Write a string, which contain yam format # ==== Parameters # * +yaml_data+ - yaml string def write_yaml( yaml_data ) return unless yaml_data and yaml_data.class == String len = [ yaml_data.length ].pack( IntegerPackCode ) @lock.synchronize { @write_pipe.write( len ) @write_pipe.write( yaml_data ) } end # Write a Object # ==== Parameters # * +data+ - any Object def write( data ) write_yaml( data.to_yaml ) end # Destroy the user def destroy @groups.dup.each{|grp| grp.remove_user( self ) } if( @channel ) @channel.users.delete( @name ) @channel.user_in_groups.delete( @name ) end begin @read_pipe.close @write_pipe.close rescue => err end end end end
23.622807
71
0.473078
edd3f907c9356052e127a853e882d68e02635ce9
122
require 'wsdl_mapper/runtime/s8r_base' module WsdlMapper module Runtime class InputS8r < S8rBase end end end
13.555556
38
0.745902
5d74286dc5c42222963d278b2b149f69be6046ae
20,399
# Copyright (c) 2009-2012 VMware, Inc. module Bosh module Cli class Director include VersionCalc DIRECTOR_HTTP_ERROR_CODES = [400, 403, 500] DEFAULT_MAX_POLLS = nil # Not limited DEFAULT_POLL_INTERVAL = 1 API_TIMEOUT = 86400 * 3 CONNECT_TIMEOUT = 30 attr_reader :director_uri # The current task number. An accessor so it can be used in tests. # @return [String] The task number. attr_accessor :current_running_task def initialize(director_uri, user = nil, password = nil) if director_uri.nil? || director_uri =~ /^\s*$/ raise DirectorMissing, "no director URI given" end @director_uri = director_uri @user = user @password = password end def exists? get_status true rescue AuthError true # For compatibility with directors that return 401 for /info rescue DirectorError false end def authenticated? status = get_status # Backward compatibility: older directors return 200 # only for logged in users return true if !status.has_key?("version") !status["user"].nil? rescue DirectorError false end def create_user(username, password) payload = JSON.generate("username" => username, "password" => password) response_code, body = post("/users", "application/json", payload) response_code == 204 end def upload_stemcell(filename) upload_and_track("/stemcells", "application/x-compressed", filename, :log_type => "event") end def get_version get_status["version"] end def get_status get_json("/info") end def list_stemcells get_json("/stemcells") end def list_releases get_json("/releases") end def list_deployments get_json("/deployments") end def list_running_tasks if version_less(get_version, "0.3.5") get_json("/tasks?state=processing") else get_json("/tasks?state=processing,cancelling,queued") end end def list_recent_tasks(count = 30) count = [count.to_i, 100].min get_json("/tasks?limit=#{count}") end def get_release(name) get_json("/releases/#{name}") end def get_deployment(name) status, body = get_json_with_status("/deployments/#{name}") if status == 404 raise DeploymentNotFound, "Deployment `#{name}' not found" end body end def list_vms(name) status, body = get_json_with_status("/deployments/#{name}/vms") if status == 404 raise DeploymentNotFound, "Deployment `#{name}' not found" end body end def upload_release(filename) upload_and_track("/releases", "application/x-compressed", filename, :log_type => "event") end def delete_stemcell(name, version, options = {}) track_options = { :log_type => "event" } track_options[:quiet] = options[:quiet] if options.has_key?(:quiet) request_and_track(:delete, "/stemcells/%s/%s" % [name, version], nil, nil, track_options) end def delete_deployment(name, options = {}) url = "/deployments/#{name}" query_params = [] query_params << "force=true" if options[:force] url += "?#{query_params.join("&")}" if query_params.size > 0 request_and_track(:delete, url, nil, nil, :log_type => "event") end def delete_release(name, options = {}) url = "/releases/#{name}" query_params = [] query_params << "force=true" if options[:force] query_params << "version=#{options[:version]}" if options[:version] url += "?#{query_params.join("&")}" if query_params.size > 0 track_options = { :log_type => "event" } track_options[:quiet] = options[:quiet] if options.has_key?(:quiet) request_and_track(:delete, url, nil, nil, track_options) end def deploy(manifest_yaml, options = {}) url = "/deployments" url += "?recreate=true" if options[:recreate] request_and_track(:post, url, "text/yaml", manifest_yaml, :log_type => "event") end def setup_ssh(deployment_name, job, index, user, public_key, password) url = "/deployments/#{deployment_name}/ssh" payload = JSON.generate("command" => "setup", "deployment_name" => deployment_name, "target" => { "job" => job, "indexes" => [index].compact }, "params" => { "user" => user, "public_key" => public_key, "password" => password }) results = "" output_stream = lambda do |entries| results << entries "" end status, task_id = request_and_track(:post, url, "application/json", payload, :log_type => "result", :output_stream => output_stream) return nil if status != :done || task_id.nil? JSON.parse(results) end def cleanup_ssh(deployment_name, job, user_regex, indexes) indexes ||= [] url = "/deployments/#{deployment_name}/ssh" payload = JSON.generate("command" => "cleanup", "deployment_name" => deployment_name, "target" => { "job" => job, "indexes" => indexes.compact }, "params" => { "user_regex" => user_regex }) request_and_track(:post, url, "application/json", payload, :quiet => true) end def change_job_state(deployment_name, manifest_yaml, job_name, index, new_state) url = "/deployments/#{deployment_name}/jobs/#{job_name}" url += "/#{index}" if index url += "?state=#{new_state}" request_and_track(:put, url, "text/yaml", manifest_yaml, :log_type => "event") end def fetch_logs(deployment_name, job_name, index, log_type, filters = nil) url = "/deployments/#{deployment_name}/jobs/#{job_name}" + "/#{index}/logs?type=#{log_type}&filters=#{filters}" status, task_id = request_and_track(:get, url, nil, nil, :log_type => "event") return nil if status != :done || task_id.nil? get_task_result(task_id) end def fetch_vm_state(deployment_name) url = "/deployments/#{deployment_name}/vms?format=full" vms = [] # CLEANUP TODO output stream only being used for side effects output_stream = lambda do |vm_states| vm_states.to_s.split("\n").each do |vm_state| vms << JSON.parse(vm_state) end "" end status, task_id = request_and_track(:get, url, nil, nil, :log_type => "result", :output_stream => output_stream, :quiet => true) if status != :done || task_id.nil? raise DirectorError, "Failed to fetch VMs information from director" end vms end def download_resource(id) status, tmp_file, headers = get("/resources/#{id}", nil, nil, {}, :file => true) if status == 200 tmp_file else raise DirectorError, "Cannot download resource `#{id}': " + "HTTP status #{status}" end end def create_property(deployment_name, property_name, value) url = "/deployments/#{deployment_name}/properties" payload = JSON.generate("name" => property_name, "value" => value) post(url, "application/json", payload) end def update_property(deployment_name, property_name, value) url = "/deployments/#{deployment_name}/properties/#{property_name}" payload = JSON.generate("value" => value) put(url, "application/json", payload) end def delete_property(deployment_name, property_name) url = "/deployments/#{deployment_name}/properties/#{property_name}" delete(url, "application/json") end def get_property(deployment_name, property_name) url = "/deployments/#{deployment_name}/properties/#{property_name}" get_json_with_status(url) end def list_properties(deployment_name) url = "/deployments/#{deployment_name}/properties" get_json(url) end def perform_cloud_scan(deployment_name) url = "/deployments/#{deployment_name}/scans" request_and_track(:post, url, nil, nil, :log_type => "event", :log_only => true) end def list_problems(deployment_name) url = "/deployments/#{deployment_name}/problems" get_json(url) end def apply_resolutions(deployment_name, resolutions) url = "/deployments/#{deployment_name}/problems" request_and_track(:put, url, "application/json", JSON.generate("resolutions" => resolutions), :log_type => "event", :log_only => true) end def get_current_time status, body, headers = get("/info") Time.parse(headers[:date]) rescue nil end def get_time_difference # This includes the roundtrip to director ctime = get_current_time ctime ? Time.now - ctime : 0 end def get_task(task_id) response_code, body = get("/tasks/#{task_id}") raise AuthError if response_code == 401 raise MissingTask, "Task #{task_id} not found" if response_code == 404 if response_code != 200 raise TaskTrackError, "Got HTTP #{response_code} " + "while tracking task state" end JSON.parse(body) rescue JSON::ParserError raise TaskTrackError, "Cannot parse task JSON, " + "incompatible director version" end def get_task_state(task_id) get_task(task_id)["state"] end def get_task_result(task_id) get_task(task_id)["result"] end def get_task_output(task_id, offset, log_type = nil) uri = "/tasks/#{task_id}/output" uri += "?type=#{log_type}" if log_type headers = { "Range" => "bytes=#{offset}-" } response_code, body, headers = get(uri, nil, nil, headers) if response_code == 206 && headers[:content_range].to_s =~ /bytes \d+-(\d+)\/\d+/ new_offset = $1.to_i + 1 else new_offset = nil end [body, new_offset] end def cancel_task(task_id) response_code, body = delete("/task/#{task_id}") raise AuthError if response_code == 401 raise MissingTask, "No task##{task_id} found" if response_code == 404 [body, response_code] end ## # Cancels the task currently running. def cancel_current body, response_code = cancel_task(@current_running_task) if (200..299).include?(response_code) say("Cancelling task ##{@current_running_task}.".red) end end ## # Returns whether there is a task currently running. # # @return [Boolean] Whether there is a task currently running. def has_current? unless @current_running_task return false end task_state = get_task_state(@current_running_task) task_state == "queued" || task_state == "processing" end [:post, :put, :get, :delete].each do |method_name| define_method method_name do |*args| request(method_name, *args) end end def request_and_track(method, uri, content_type, payload = nil, options = {}) http_status, body, headers = request(method, uri, content_type, payload) location = headers[:location] redirected = http_status == 302 task_id = nil if redirected if location =~ /\/tasks\/(\d+)\/?$/ # Looks like we received task URI task_id = $1 @current_running_task = task_id status = poll_task(task_id, options) else status = :non_trackable end else status = :failed end [status, task_id] end def upload_and_track(uri, content_type, filename, options = {}) file = FileWithProgressBar.open(filename, "r") method = options[:method] || :post request_and_track(method, uri, content_type, file, options) ensure file.stop_progress_bar if file end def poll_task(task_id, options = {}) polls = 0 log_type = options[:log_type] poll_interval = options[:poll_interval] || DEFAULT_POLL_INTERVAL max_polls = options[:max_polls] || DEFAULT_MAX_POLLS start_time = Time.now quiet = options[:quiet] output_stream = options[:output_stream] log_only = options[:log_only] task = DirectorTask.new(self, task_id, log_type) unless quiet || log_only say("Tracking task output for task##{task_id}...") end renderer = Bosh::Cli::TaskLogRenderer.create_for_log_type(log_type) renderer.time_adjustment = get_time_difference no_output_yet = true while true polls += 1 state, output = task.state, task.output if output no_output_yet = false output = output_stream.call(output) unless output_stream.nil? renderer.add_output(output) unless quiet end if no_output_yet && polls % 10 == 0 && !quiet && !log_only say("Task state is '#{state}', waiting for output...") end renderer.refresh if state == "done" result = :done break elsif state == "error" result = :error break elsif state == "cancelled" result = :cancelled break elsif !max_polls.nil? && polls >= max_polls result = :track_timeout break end sleep(poll_interval) end unless quiet renderer.add_output(task.flush_output) renderer.finish(state) end return result if quiet return result if log_only && result == :done if Bosh::Cli::Config.interactive && log_type != "debug" && result == :error confirm = ask("\nThe task has returned an error status, " + "do you want to see debug log? [Yn]: ") if confirm.empty? || confirm =~ /y(es)?/i options.delete(:output_stream) poll_task(task_id, options.merge(:log_type => "debug")) else say("Please use 'bosh task #{task_id}' command " + "to see the debug log".red) result end else nl status = "Task #{task_id}: state is '#{state}'" duration = renderer.duration || (Time.now - start_time) if result == :done status += ", took #{format_time(duration).green} to complete" end say(status) result end end def request(method, uri, content_type = nil, payload = nil, headers = {}, options = { }) headers = headers.dup headers["Content-Type"] = content_type if content_type if options[:file] tmp_file = File.open(File.join(Dir.mktmpdir, "streamed-response"), "w") response_reader = lambda do |part| tmp_file.write(part) end else response_reader = nil end response = perform_http_request(method, @director_uri + uri, payload, headers, &response_reader) if options[:file] tmp_file.close body = tmp_file.path else body = response.body end if DIRECTOR_HTTP_ERROR_CODES.include?(response.code) raise DirectorError, parse_error_message(response.code, body) end headers = response.headers.inject({}) do |hash, (k, v)| # Some HTTP clients symbolize headers, some do not. # To make it easier to switch between them, we try # to symbolize them ourselves. hash[k.to_s.downcase.gsub(/-/, "_").to_sym] = v hash end [response.code, body, headers] rescue URI::Error, SocketError, Errno::ECONNREFUSED => e raise DirectorInaccessible, "cannot access director (#{e.message})" rescue SystemCallError => e raise DirectorError, "System call error while talking to director: #{e}" end def parse_error_message(status, body) parsed_body = JSON.parse(body.to_s) if parsed_body["code"] && parsed_body["description"] "Director error %s: %s" % [parsed_body["code"], parsed_body["description"]] else "Director error (HTTP %s): %s" % [status, body] end rescue JSON::ParserError "Director error (HTTP %s): %s" % [status, body] end private def perform_http_request(method, uri, payload = nil, headers = {}, &block) http_client = HTTPClient.new http_client.send_timeout = API_TIMEOUT http_client.receive_timeout = API_TIMEOUT http_client.connect_timeout = CONNECT_TIMEOUT # HTTPClient#set_auth doesn't seem to work properly, # injecting header manually instead. # TODO: consider using vanilla Net::HTTP if @user && @password headers["Authorization"] = "Basic " + Base64.encode64("#{@user}:#{@password}").strip end http_client.request(method, uri, :body => payload, :header => headers, &block) rescue HTTPClient::BadResponseError => e err("Received bad HTTP response from director: #{e}") rescue URI::Error, SocketError, Errno::ECONNREFUSED, SystemCallError raise # We handle these upstream rescue => e # httpclient (sadly) doesn't have a generic exception err("REST API call exception: #{e}") end def get_json(url) status, body = get_json_with_status(url) raise AuthError if status == 401 raise DirectorError, "Director HTTP #{status}" if status != 200 body end def get_json_with_status(url) status, body, headers = get(url, "application/json") body = JSON.parse(body) if status == 200 [status, body] rescue JSON::ParserError raise DirectorError, "Cannot parse director response: #{body}" end end class FileWithProgressBar < ::File def progress_bar return @progress_bar if @progress_bar out = Bosh::Cli::Config.output || StringIO.new @progress_bar = ProgressBar.new(File.basename(self.path), File.size(self.path), out) @progress_bar.file_transfer_mode @progress_bar end def stop_progress_bar progress_bar.halt unless progress_bar.finished? end def size File.size(self.path) end def read(*args) result = super(*args) if result && result.size > 0 progress_bar.inc(result.size) else progress_bar.finish end result end end end end
32.175079
80
0.554782
ff070bce0f0e3630ab1bf3d5a3722be0939f5d46
12,195
require 'spec_helper' describe 'collection', type: :feature do def create_collection(title, description) visit '/dashboard' first('#hydra-collection-add').click expect(page).to have_content 'Create New Collection' # Creator is a multi-value field, so it should have button to add more fields expect(page).to have_selector "div.collection_creator .input-append button.add" # Title is a single-value field, so it should not have the adder button expect(page).to_not have_selector "div.collection_title .input-append button.add" fill_in('Title', with: title) fill_in('Abstract or Summary', with: description) click_button("Create Collection") expect(page).to have_content 'Items in this Collection' expect(page).to have_content title expect(page).to have_content description end let(:title1) { "Test Collection 1" } let(:description1) { "Description for collection 1 we are testing." } let(:title2) { "Test Collection 2" } let(:description2) { "Description for collection 2 we are testing." } let(:collection1) do Collection.create(title: title1, description: description1, members: []) { |c| c.apply_depositor_metadata(user.user_key) } end let(:collection2) do Collection.create(title: title2, description: description2, members: []) { |c| c.apply_depositor_metadata(user.user_key) } end let(:user) { FactoryGirl.create(:user) } let(:gfs) do (0..1).map do |x| GenericFile.create(title: ["title #{x}"]) do |f| f.apply_depositor_metadata(user.user_key) end end end let(:gf1) { gfs[0] } let(:gf2) { gfs[1] } describe 'create collection' do let!(:gf1) { gfs[0] } let!(:gf2) { gfs[1] } before do sign_in user create_collection(title2, description2) end it "creates collection from the dashboard and include files", js: true do visit '/dashboard/files' first('input#check_all').click click_button "Add to Collection" # opens the modal # since there is only one collection, it's not necessary to choose a radio button click_button "Update Collection" expect(page).to have_content "Items in this Collection" # There are two rows in the table per document (one for the general info, one for the details) # Make sure we have at least 2 documents expect(page).to have_selector "table.table-zebra-striped tr#document_#{gf1.id}" expect(page).to have_selector "table.table-zebra-striped tr#document_#{gf2.id}" end end describe 'delete collection' do let!(:collection) do Collection.create(title: 'collection title', description: 'collection description') do |c| c.apply_depositor_metadata(user.user_key) end end before do sign_in user visit '/dashboard/collections' end it "deletes a collection" do expect(page).to have_content(collection.title) within('#document_' + collection.id) do first('button.dropdown-toggle').click first(".itemtrash").click end expect(page).not_to have_content(collection.title) end end describe 'show collection' do let!(:collection) do Collection.create(title: 'collection title', description: 'collection description', members: [gf1, gf2]) do |c| c.apply_depositor_metadata(user.user_key) end end before do sign_in user visit '/dashboard/collections' end it "shows a collection with a listing of Descriptive Metadata and catalog-style search results" do expect(page).to have_content(collection.title) within('#document_' + collection.id) do click_link("Display all details of collection title") end expect(page).to have_content(collection.title) expect(page).to have_content(collection.description) # Should not show title and description a second time expect(page).to_not have_css('.metadata-collections', text: collection.title) expect(page).to_not have_css('.metadata-collections', text: collection.description) # Should not have Collection Descriptive metadata table expect(page).to have_content("Descriptions") # Should have search results / contents listing expect(page).to have_content(gf1.title.first) expect(page).to have_content(gf2.title.first) expect(page).to_not have_css(".pager") click_link "Gallery" expect(page).to have_content(gf1.title.first) expect(page).to have_content(gf2.title.first) end it "hides collection descriptive metadata when searching a collection" do # URL: /dashboard/collections expect(page).to have_content(collection.title) within("#document_#{collection.id}") do click_link("Display all details of collection title") end # URL: /collections/collection-id expect(page).to have_content(collection.title) expect(page).to have_content(collection.description) expect(page).to have_content(gf1.title.first) expect(page).to have_content(gf2.title.first) fill_in('collection_search', with: gf1.title.first) click_button('collection_submit') # Should not have Collection metadata table (only title and description) expect(page).to_not have_content("Total Items") expect(page).to have_content(collection.title) expect(page).to have_content(collection.description) # Should have search results / contents listing expect(page).to have_content("Search Results") expect(page).to have_content(gf1.title.first) expect(page).to_not have_content(gf2.title.first) end end describe 'collection sorting' do before do collection1 # create the collections by referencing them sleep(1) # make sure the timestamps aren't equal collection2 sleep(1) collection1.title += 'changed' collection1.save # collection 1 is now earlier when sorting by create date but later # when sorting by modified date sign_in user visit '/dashboard/collections' end it "has creation date for collections" do expect(page).to have_content(collection1.create_date.to_date.to_formatted_s(:standard)) end it "allows changing sort order" do find(:xpath, "//select[@id='sort']/option[contains(., 'date modified')][contains(@value, 'asc')]") \ .select_option click_button('Refresh') expect(page).to have_css("#document_#{collection1.id}") expect(page).to have_css("#document_#{collection2.id}") expect(page.body.index("id=\"document_#{collection1.id}")).to be > page.body.index("id=\"document_#{collection2.id}") find(:xpath, "//select[@id='sort']/option[contains(., 'date modified')][contains(@value, 'desc')]") \ .select_option click_button('Refresh') expect(page).to have_css("#document_#{collection1.id}") expect(page).to have_css("#document_#{collection2.id}") expect(page.body.index("id=\"document_#{collection1.id}")).to be < page.body.index("id=\"document_#{collection2.id}") end end describe 'add files to collection' do let!(:gf1) { gfs[0] } let!(:gf2) { gfs[1] } before do collection1 # create collections by referencing them collection2 sign_in user end it "preselects the collection we are adding files to" do visit "/collections/#{collection1.id}" click_link 'Add files' first('input#check_all').click click_button "Add to Collection" expect(page).to have_css("input#id_#{collection1.id}[checked='checked']") expect(page).not_to have_css("input#id_#{collection2.id}[checked='checked']") visit "/collections/#{collection2.id}" click_link 'Add files' first('input#check_all').click click_button "Add to Collection" expect(page).not_to have_css("input#id_#{collection1.id}[checked='checked']") expect(page).to have_css("input#id_#{collection2.id}[checked='checked']") end end describe 'edit collection' do let!(:collection) do Collection.create(title: 'collection title', description: 'collection description', members: [gf1, gf2]) { |c| c.apply_depositor_metadata(user.user_key) } end before do sign_in user visit '/dashboard/collections' end it "edits and update collection metadata" do # URL: /dashboard/collections expect(page).to have_content(collection.title) within("#document_#{collection.id}") do find('button.dropdown-toggle').click click_link('Edit Collection') end # URL: /collections/collection-id/edit expect(page).to have_field('collection_title', with: collection.title) expect(page).to have_field('collection_description', with: collection.description) new_title = "Altered Title" new_description = "Completely new Description text." creators = ["Dorje Trollo", "Vajrayogini"] fill_in('Title', with: new_title) fill_in('Abstract or Summary', with: new_description) fill_in('Creator', with: creators.first) within('.primary-actions') do click_button('Update Collection') end # URL: /collections/collection-id header = find('header') expect(header).to_not have_content(collection.title) expect(header).to_not have_content(collection.description) expect(header).to have_content(new_title) expect(header).to have_content(new_description) expect(page).to have_content(creators.first) end it "removes a file from a collection" do expect(page).to have_content(collection.title) within("#document_#{collection.id}") do first('button.dropdown-toggle').click click_link('Edit Collection') end expect(page).to have_field('collection_title', with: collection.title) expect(page).to have_field('collection_description', with: collection.description) expect(page).to have_content(gf1.title.first) expect(page).to have_content(gf2.title.first) within("#document_#{gf1.id}") do first('button.dropdown-toggle').click click_button('Remove from Collection') end expect(page).to have_content(collection.title) expect(page).to have_content(collection.description) expect(page).not_to have_content(gf1.title.first) expect(page).to have_content(gf2.title.first) end it "removes all files from a collection", js: true do expect(page).to have_content(collection.title) within('#document_' + collection.id) do first('button.dropdown-toggle').click click_link('Edit Collection') end expect(page).to have_field('collection_title', with: collection.title) expect(page).to have_field('collection_description', with: collection.description) expect(page).to have_content(gf1.title.first) expect(page).to have_content(gf2.title.first) first('input#check_all').click click_button('Remove From Collection') expect(page).to have_content(collection.title) expect(page).to have_content(collection.description) expect(page).not_to have_content(gf1.title.first) expect(page).not_to have_content(gf2.title.first) end end describe 'show pages of a collection' do let(:gfs) do (0..12).map do |x| GenericFile.create(title: ["title #{x}"]) do |f| f.apply_depositor_metadata(user.user_key) end end end before { sign_in user } let!(:collection) do Collection.create(title: 'collection title', description: 'collection description', members: gfs) { |c| c.apply_depositor_metadata(user.user_key) } end it "shows a collection with a listing of Descriptive Metadata and catalog-style search results" do visit '/dashboard/collections' expect(page).to have_content(collection.title) within('#document_' + collection.id) do click_link("Display all details of collection title") end expect(page).to have_css(".pager") end end end
38.22884
123
0.678475
ab27d3ba3820cc8f58c277b518d89916ba0c0e0e
1,639
# This file is auto-generated from the current state of the database. Instead # of editing this file, please use the migrations feature of Active Record to # incrementally modify your database, and then regenerate this schema definition. # # This file is the source Rails uses to define your schema when running `rails # db:schema:load`. When creating a new database, `rails db:schema:load` tends to # be faster and is potentially less error prone than running all of your # migrations from scratch. Old migrations may fail to apply correctly if those # migrations use external dependencies or application code. # # It's strongly recommended that you check this file into your version control system. ActiveRecord::Schema.define(version: 2021_01_29_224702) do create_table "countries", force: :cascade do |t| t.string "name" t.string "image" t.datetime "created_at", precision: 6, null: false t.datetime "updated_at", precision: 6, null: false end create_table "flavors", force: :cascade do |t| t.string "name" t.datetime "created_at", precision: 6, null: false t.datetime "updated_at", precision: 6, null: false end create_table "profiles", force: :cascade do |t| t.integer "votes" t.integer "country_id", null: false t.integer "flavor_id", null: false t.datetime "created_at", precision: 6, null: false t.datetime "updated_at", precision: 6, null: false t.index ["country_id"], name: "index_profiles_on_country_id" t.index ["flavor_id"], name: "index_profiles_on_flavor_id" end add_foreign_key "profiles", "countries" add_foreign_key "profiles", "flavors" end
39.97561
86
0.736425
f8dedc2a44e0fee691e2e3e8b3ec10bedf73627b
48
class JwtAuthentication VERSION = "0.1.1" end
12
23
0.729167
f7b46ddc20b157a86c598fb68e20ec6d476db8e3
2,349
# frozen_string_literal: true module Primer # Extend this module to test basic linter behavior. You must define the following methods: # # * `default_tag` - returns the default tag to be matched by the linter # * `default_class` - returns the class to be matched by the linter. Return `nil` if no class is necessary. # * `required_attributes` - returns the HTML attributes required for the linter to run. module BasicLinterSharedTests def test_warns_if_there_is_a_html_element @file = <<~HTML <#{default_tag} class="#{default_class}" #{required_attributes}> #{linter_class.name.demodulize} </#{default_tag}> HTML @linter.run(processed_source) refute_empty @linter.offenses end def test_suggests_ignoring_with_correct_number_of_elements @file = <<~HTML <#{default_tag} class="#{default_class}" #{required_attributes} invalid-attr> #{linter_class.name.demodulize} </#{default_tag}> <#{default_tag} class="#{default_class}" #{required_attributes} invalid-attr> #{linter_class.name.demodulize} </#{default_tag}> <a-random-tag> #{linter_class.name.demodulize} </a-random-tag> HTML assert_equal "<%# erblint:counter #{linter_class.name.demodulize} 2 %>\n#{@file}", corrected_content end def test_suggests_updating_the_number_of_ignored_elements @file = <<~HTML <%# erblint:counter #{linter_class.name.demodulize} 1 %> <#{default_tag} class="#{default_class}" #{required_attributes} invalid-attr> #{linter_class.name.demodulize} </#{default_tag}> <#{default_tag} class="#{default_class}" #{required_attributes} invalid-attr> #{linter_class.name.demodulize} </#{default_tag}> <a-random-tag> #{linter_class.name.demodulize} </a-random-tag> HTML @linter.run(processed_source) assert_equal "<%# erblint:counter #{linter_class.name.demodulize} 2 %>", offenses.last.context end def test_does_not_warn_if_wrong_tag @file = <<~HTML <a-random-tag class="#{default_class}" #{required_attributes}>#{linter_class.name.demodulize}</a-random-tag>" HTML @linter.run(processed_source) assert_empty @linter.offenses end end end
34.544118
117
0.659855
6a832049213d454619f67866cea33b6f662709a4
151
class AddPrimaryToPosition < ActiveRecord::Migration def change add_column :positions, :primary, :boolean, default: false, null: false end end
25.166667
74
0.761589
f7bc27e007d7f6be7676a85d29b6f47192acb82d
4,426
require 'jekyll' require 'coveralls' Coveralls.wear! # Requires supporting ruby files with custom matchers and macros, etc, # in spec/support/ and its subdirectories. Dir[File.expand_path('../support', __FILE__) + '/**/*.rb'] .each { |f| require f } RSpec.configure do |config| config.include FixturesHelpers config.extend FixturesHelpers config.disable_monkey_patching! config.before(:all) do if Gem::Version.new('2') <= Gem::Version.new(Jekyll::VERSION) Jekyll.logger.log_level = ENV['debug'].nil? ? :warn : :debug else Jekyll.logger.log_level = Jekyll::Stevenson::WARN end @template_root = File.expand_path(File.join(File.dirname(__FILE__), '..', 'templates')) FileUtils.touch Dir.glob(File.join(File.dirname(__FILE__), 'fixtures', '_attendease', 'data', '*.*')) #let!(:site) { build_site } #let!(:org_site) { build_site({ attendease: { mode: 'organization' } }) } #let!(:page) { Jekyll::Page.new(@site, File.join(File.dirname(__FILE__), 'fixtures'), '', 'page.html') } end def site @site end def dest @dest ||= fixtures_path.join('_site') end def page Jekyll::Page.new(site, File.join(File.dirname(__FILE__), 'fixtures'), '', 'page.html') end def test_dir(*subdirs) File.join(File.dirname(__FILE__), *subdirs) end def dest_dir(*subdirs) test_dir('dest', *subdirs) end def source_dir(*subdirs) test_dir('source', *subdirs) end def build_configs(overrides, base_hash = Jekyll::Configuration::DEFAULTS) Jekyll::Utils.deep_merge_hashes(base_hash, overrides) end def find_generator(site, generator_class) site.generators.select { |m| m.class == generator_class }.first end def find_page(site, page_class, lambda_matcher = false) site.pages.detect do |m| if m.class == page_class match = true if lambda_matcher match = lambda_matcher.call(m) end m if match end end end def site_configuration(overrides = {}) Jekyll::Utils.deep_merge_hashes(build_configs({ 'source' => fixtures_path.to_s, 'destination' => dest.to_s, 'attendease' => { 'api_host' => 'https://foobar/', 'has_sessions' => true, 'has_presenters' => true, 'has_sponsors' => true, 'has_rooms' => true, 'has_filters' => true, 'has_venues' => true, 'has_mappable' => true, 'environment' => 'test', 'live_mode' => true, 'locale' => 'en', 'source_id' => 'foobar', 'auth_host' => 'https://foobar.auth/', 'organization_url' => 'https://foobar.org/', 'organization_id' => 'batbaz', 'organization_name' => 'Foo Bar Widgets', 'available_portal_locales' => %w{ en fr it es de }, 'features' => { 'sentry' => true }, 'sentry_client_version' => '5.2.0', 'sentry_dsn' => 'https://[email protected]/baz' } }), overrides) end def build_site(config = {}) #dest.rmtree if dest.exist? @site = Jekyll::Site.new(site_configuration(config)) @site.process @site end def build_cms_site build_site({ 'attendease' => { 'jekyll33' => true } }) end def build_org_site build_site({ 'attendease' => { 'mode' => 'organization', 'jekyll33' => true } }) end config.after(:each) do |foo| puts "Removing #{dest}" dest.rmtree if dest.exist? fixtures_path.join('_attendease', 'templates').rmtree if File.exists?(fixtures_path.join('_attendease', 'templates')) fixtures_path.join('attendease_layouts').rmtree if File.exists?(fixtures_path.join('attendease_layouts')) unless @site.nil? Dir.glob(File.join(@site.source, '**', 'index.json')).map do |i| if (Pathname.new(i).parent == fixtures_path) FileUtils.rm i else FileUtils.rm_r Pathname.new(i).parent end end Dir.glob(File.join(@site.source, '**', 'index-private.json')).map do |i| if (Pathname.new(i).parent == fixtures_path) FileUtils.rm i else FileUtils.rm_r Pathname.new(i).parent end end end end end
30.951049
121
0.580886
03e4df472993ea7a8d24d479e506e9150abd4a83
125
class AddDeletedAtToUser < ActiveRecord::Migration[4.2] def change add_column :users, :deleted_at, :datetime end end
20.833333
55
0.752
bff15c97f1541195910014e63978e86d46f24936
232
describe command('chef-client -v') do target_version = '13.10.0' its('stdout') { should match "^Chef: #{target_version}" } end describe command('/opt/chef/embedded/bin/gem -v') do its('stdout') { should cmp >= '2.6.11' } end
25.777778
59
0.655172
39afe14122f783e7b9e5110f62aa5ff773230f2e
974
module Heartcheck module Controllers describe Essential do subject(:controller) { described_class.new } describe '#index' do subject(:index) { controller.index } let(:check_01) { { dummy1: { status: :ok }, time: 1100 } } let(:check_02) { { dummy2: { status: :ok }, time: 100 } } before do expect(Time).to receive(:now).and_return( # millisec time calc :p 0.1, 1.2, # (1.2 - 0.1) * 1000.0 = 1100 2.9, 3.0 # (3.0 - 2.9) * 1000.0 = 100 ) Heartcheck.setup do |monitor| monitor.add :dummy1 do |c| c.add_service(name: 'dummy1') end monitor.add :dummy2 do |c| c.add_service(name: 'dummy2') end end end it { is_expected.to eq(MultiJson.dump([check_01, check_02])) } end end end end
28.647059
70
0.472279
abd06e4ce50803e61b75063cb3fdddbfe2d4b888
3,583
# Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is # regenerated. module Azure::OperationsManagement::Mgmt::V2015_11_01_preview module Models # # The container for solution. # class Solution include MsRestAzure # @return [String] Resource ID. attr_accessor :id # @return [String] Resource name. attr_accessor :name # @return [String] Resource type. attr_accessor :type # @return [String] Resource location attr_accessor :location # @return [Hash{String => String}] Resource tags attr_accessor :tags # @return [SolutionPlan] Plan for solution object supported by the # OperationsManagement resource provider. attr_accessor :plan # @return [SolutionProperties] Properties for solution object supported # by the OperationsManagement resource provider. attr_accessor :properties # # Mapper for Solution class as Ruby Hash. # This will be used for serialization/deserialization. # def self.mapper() { client_side_validation: true, required: false, serialized_name: 'Solution', type: { name: 'Composite', class_name: 'Solution', model_properties: { id: { client_side_validation: true, required: false, read_only: true, serialized_name: 'id', type: { name: 'String' } }, name: { client_side_validation: true, required: false, read_only: true, serialized_name: 'name', type: { name: 'String' } }, type: { client_side_validation: true, required: false, read_only: true, serialized_name: 'type', type: { name: 'String' } }, location: { client_side_validation: true, required: false, serialized_name: 'location', type: { name: 'String' } }, tags: { client_side_validation: true, required: false, serialized_name: 'tags', type: { name: 'Dictionary', value: { client_side_validation: true, required: false, serialized_name: 'StringElementType', type: { name: 'String' } } } }, plan: { client_side_validation: true, required: false, serialized_name: 'plan', type: { name: 'Composite', class_name: 'SolutionPlan' } }, properties: { client_side_validation: true, required: false, serialized_name: 'properties', type: { name: 'Composite', class_name: 'SolutionProperties' } } } } } end end end end
28.212598
77
0.456042
871d79412d1dbc8aba158b3147de35327638f060
1,762
require 'helper' class EphemeralBufferTest < Test::Unit::TestCase def setup Fluent::Test.setup end def test_configure_without_log_level buf = Fluent::EphemeralBuffer.new buf.configure({}) refute buf.instance_eval{ @log }.is_a?(Fluent::PluginLogger) end def test_configure_with_log_level buf = Fluent::EphemeralBuffer.new buf.configure({'log_level' => 'error'}) assert buf.instance_eval{ @log }.is_a?(Fluent::PluginLogger) assert_equal Fluent::Log::LEVEL_ERROR, buf.instance_eval{ @log }.level end def test_configure_with_buffer_log_level buf = Fluent::EphemeralBuffer.new buf.configure({'buffer_log_level' => 'error'}) assert buf.instance_eval{ @log }.is_a?(Fluent::PluginLogger) assert_equal Fluent::Log::LEVEL_ERROR, buf.instance_eval{ @log }.level end def test_configure_with_log_level_and_buffer_log_level buf = Fluent::EphemeralBuffer.new buf.configure({'log_leve' => 'error', 'buffer_log_level' => 'warn'}) assert buf.instance_eval{ @log }.is_a?(Fluent::PluginLogger) assert_equal Fluent::Log::LEVEL_WARN, buf.instance_eval{ @log }.level end class DummyBufferedOutput < Fluent::BufferedOutput Fluent::Plugin.register_output('ephemeral_test', self) def write(chunk) raise "failed to write" end end CONFIG = %[ buffer_type ephemeral disable_retry_limit true ] def create_driver(conf=CONFIG, tag='test') Fluent::Test::OutputTestDriver.new(DummyBufferedOutput, tag).configure(conf) end def test_emit d = create_driver d.instance.start d.emit({"a" => 1}) d.instance.enqueue_buffer(true) d.instance.try_flush assert d.instance.instance_eval{ @buffer }.instance_eval{ @queue }.empty? end end
25.911765
80
0.714529
38669344196f52eb5aaab38ce02589a5f8fcc9c4
889
class NoticeMailer < ActionMailer::Base default from: ENV['MAIL_SENDER'] # Subject can be set in your I18n file at config/locales/en.yml # with the following lookup: # # en.notice_mailer.sendmail_update.subject # def sendmail_update(user, article) @greeting = "Hi" @user = user @article = article mail( to: User.pluck(:email), subject: '[Lodge] [New] ' + @article.title ) end def sendmail_comment(user, article) @greeting = "Hi" @user = user @article = article mail( to: User.pluck(:email), subject: '[Lodge] [Comment] ' + @article.title ) end def sendmail_edit(user, article, update_history) @greeting = "Hi" @user = user @article = article @update_history = update_history mail( to: User.pluck(:email), subject: '[Lodge] [Edit] ' + @article.title ) end end
21.682927
65
0.616423
2659d0efe56cd01ff71379e5af23af96d11a5f2c
590
# frozen_string_literal: true require 'readymade/response' module Readymade class Action class NonKeywordArgumentsError < StandardError; end def self.call(*args, &block) new(*args, &block).call end attr_reader :args, :data def initialize(args = {}) raise NonKeywordArgumentsError if args.present? && !args.is_a?(Hash) @args = @data = args @args.each do |name, value| instance_variable_set("@#{name}", value) end end def call; end def response(status, *args) Response.new(status, *args) end end end
19.032258
74
0.638983
083d5552551242665c6edf6e74213b63fcf535ff
13,089
describe ReviewResponseMap do let(:team) { build(:assignment_team, id: 1, name: 'team no name', assignment: assignment, users: [student], parent_id: 1) } let(:team1) { build(:assignment_team, id: 2, name: 'team has name', assignment: assignment, users: [student]) } let(:review_response_map) { build(:review_response_map, id: 1, assignment: assignment, reviewer: participant, reviewee: team) } let(:review_response_map1) do build :review_response_map, id: 2, assignment: assignment, reviewer: participant1, reviewee: team1, reviewed_object_id: 1, response: [response], calibrate_to: 0 end let(:feedback) { FeedbackResponseMap.new(id: 1, reviewed_object_id: 1, reviewer_id: 1, reviewee_id: 1) } let(:participant) { build(:participant, id: 1, parent_id: 1, user: student) } let(:participant1) { build(:participant, id: 2, parent_id: 2, user: student1) } let(:assignment) { build(:assignment, id: 1, name: 'Test Assgt', rounds_of_reviews: 2) } let(:assignment1) { build(:assignment, id: 2, name: 'Test Assgt', rounds_of_reviews: 1) } let(:response) { build(:response, id: 1, map_id: 1, round: 1, response_map: review_response_map, is_submitted: true) } let(:response1) { build(:response, id: 2, map_id: 1, round: 2, response_map: review_response_map) } let(:response2) { build(:response, id: 3, map_id: 1, round: nil, response_map: review_response_map, is_submitted: true) } let(:metareview_response_map) { build(:meta_review_response_map, reviewed_object_id: 1) } let(:student) { build(:student, id: 1, name: 'name', fullname: 'no one', email: '[email protected]') } let(:student1) { build(:student, id: 2, name: "name1", fullname: 'no one', email: '[email protected]') } let(:questionnaire) { Questionnaire.new(id: 1, type: 'ReviewQuestionnaire') } before(:each) do allow(review_response_map).to receive(:response).and_return(response) end it '#questionnaire' do allow(assignment).to receive(:review_questionnaire_id).and_return(1) allow(Questionnaire).to receive(:find_by).with(id: 1).and_return(questionnaire) expect(review_response_map.questionnaire(1)).to eq(questionnaire) end it '#get_title' do expect(review_response_map.get_title).to eq("Review") end it '#delete' do allow(Response).to receive(:find).and_return(response) allow(FeedbackResponseMap).to receive(:where).with(reviewed_object_id: 1).and_return([feedback]) allow(MetareviewResponseMap).to receive(:where).and_return([metareview_response_map]) expect(review_response_map.delete).to equal(review_response_map) end it '#export_fields' do expect(ReviewResponseMap.export_fields('options')).to eq(["contributor", "reviewed by"]) end it '#export' do csv = [] parent_id = 1 options = nil allow(ReviewResponseMap).to receive(:where).with(reviewed_object_id: 1).and_return([review_response_map, review_response_map1]) expect(ReviewResponseMap.export(csv, parent_id, options)).to eq([review_response_map1, review_response_map]) end it '#import' do row_hash = {reviewee: "name", reviewers: ["name1"]} session = nil assignment_id = 1 # when reviewee user = nil allow(User).to receive(:find_by).and_return(nil) # when reviewee user exists but reviewee user is not a participant in this assignment allow(User).to receive(:find_by).with(name: "name").and_return(student) allow(AssignmentParticipant).to receive(:find_by).with(user_id: 1, parent_id: 1).and_return(nil) # when reviewee user exists and reviewee user is a participant in this assignment allow(AssignmentParticipant).to receive(:find_by).with(user_id: 1, parent_id: 1).and_return(participant) allow(AssignmentTeam).to receive(:team).with(participant).and_return(team) ## when reviewer user doesn't exist allow(User).to receive(:find_by).with(name: "name1").and_return(nil) allow(Team).to receive(:find_by).with(name: "name", parent_id: 1).and_return(team) expect { ReviewResponseMap.import(row_hash, session, 1) }.to raise_error(ArgumentError, "Cannot find reviewer user.") ## when reviewer user exist allow(User).to receive(:find_by).with(name: "name1").and_return(student1) ### when reviewer user is not a participant in this assignment. allow(AssignmentParticipant).to receive(:find_by).with(user_id: 2, parent_id: 1).and_return(nil) expect { ReviewResponseMap.import(row_hash, session, 1) }.to raise_error(ArgumentError, "Reviewer user is not a participant in this assignment.") ### when reviewer user is a participant in this assignment. allow(AssignmentParticipant).to receive(:find_by).with(user_id: 2, parent_id: 1).and_return(participant1) allow(ReviewResponseMap).to receive(:find_or_create_by) .with(reviewed_object_id: 1, reviewer_id: 2, reviewee_id: 1, calibrate_to: false) .and_return(review_response_map) expect(ReviewResponseMap.import(row_hash, session, 1)).to eq(["name1"]) # when reviewee_team = nil allow(AssignmentTeam).to receive(:team).with(participant).and_return(nil) allow(AssignmentTeam).to receive(:create).and_return(double('team', id: 1)) allow(TeamsUser).to receive(:create).with(team_id: 1, user_id: 1).and_return(double('teams_users', id: 1, team_id: 1, user_id: 1)) allow(TeamNode).to receive(:create).with(parent_id: assignment_id, node_object_id: 1).and_return(double('team_node', id: 1, parent_id: 1, node_object_id: 1)) allow(TeamUserNode).to receive(:create).with(parent_id: 1, node_object_id: 1).and_return(double('team_user_node', id: 1, parent_id: 1, node_object_id: 1)) allow(User).to receive(:find_by).with(name: "name1").and_return(student1) allow(AssignmentParticipant).to receive(:find_by).with(user_id: 2, parent_id: 1).and_return(participant1) allow(ReviewResponseMap).to receive(:find_or_create_by) .with(reviewed_object_id: 1, reviewer_id: 1, reviewee_id: 1, calibrate_to: false).and_return(review_response_map) expect(ReviewResponseMap.import(row_hash, session, 1)).to eq(["name1"]) end it '#show_feedback' do allow(review_response_map).to receive(:response).and_return([response]) allow(Response).to receive(:find).and_return(response) allow(FeedbackResponseMap).to receive(:find_by).with(reviewed_object_id: 1).and_return(feedback) allow(feedback).to receive(:response).and_return([response]) expect(review_response_map.show_feedback(response)).to eq("<table width=\"100%\"><tr><td align=\"left\" width=\"70%\"><b>Review </b>"\ "&nbsp;&nbsp;&nbsp;<a href=\"#\" name= \"review_1Link\" onClick=\"toggleElement('review_1','review');return false;\">"\ "hide review</a></td><td align=\"left\"><b>Last Reviewed:</b><span>Not available</span></td></tr></table><table id=\"review_1\""\ " class=\"table table-bordered\"><tr><td><b>"\ "Additional Comment: </b></td></tr></table>") end it '#metareview_response_maps' do allow(Response).to receive(:where).with(map_id: 1).and_return([response]) allow(MetareviewResponseMap).to receive(:where).with(reviewed_object_id: 1).and_return([metareview_response_map]) expect(review_response_map.metareview_response_maps).to eq([metareview_response_map]) end it '#get_responses_for_team_round' do allow(Team).to receive(:find).and_return(team) round = 1 allow(ResponseMap).to receive(:where).with(reviewee_id: team.id, type: "ReviewResponseMap").and_return([review_response_map1]) expect(ReviewResponseMap.get_responses_for_team_round(team, 1)).to eq([response]) end it '#final_versions_from_reviewer' do reviewer_id = 1 allow(ReviewResponseMap).to receive(:where).with(reviewer_id: 1).and_return([review_response_map]) allow(Participant).to receive(:find).with(1).and_return(participant) allow(participant).to receive(:parent_id).and_return(1) allow(Assignment).to receive(:find).with(1).and_return(assignment) allow(Response).to receive(:where).with(map_id: 1, round: 1).and_return([response]) allow(assignment).to receive(:review_questionnaire_id).with(1).and_return(1) allow(Response).to receive(:where).with(map_id: 1, round: 2).and_return([response1]) allow(assignment).to receive(:review_questionnaire_id).with(2).and_return(1) expect(ReviewResponseMap.final_versions_from_reviewer(1)) .to eq("review round1": {questionnaire_id: 1, response_ids: [1]}, "review round2": {questionnaire_id: 1, response_ids: [2]}) end it '#review_response_report' do id = 1 type = "MetareviewResponseMap" reviewer_id = 1 user_ids = [] review_user = student allow(Participant).to receive(:find).with(1).and_return(participant) allow(Assignment).to receive(:find).with(1).and_return(assignment) allow(User).to receive_message_chain(:select, :where).and_return([student]) allow(AssignmentParticipant).to receive(:where).and_return([participant]) expect(ReviewResponseMap.review_response_report(id, Assignment.find(Participant.find(reviewer_id).parent_id), type, review_user)).to eq([participant]) review_user = nil allow(ResponseMap).to receive_message_chain(:select, :where).and_return([review_response_map]) allow([review_response_map]).to receive(:reviewer_id).and_return(1) allow(AssignmentParticipant).to receive(:find).with(1).and_return([participant]) allow(Participant).to receive(:sort_by_name).and_return([participant]) expect(ReviewResponseMap.review_response_report(id, Assignment.find(Participant.find(reviewer_id).parent_id), type, review_user)).to eq([participant]) end it '#email' do reviewer_id = 1 allow(Participant).to receive(:find).with(1).and_return(participant) allow(Assignment).to receive(:find).with(1).and_return(assignment) allow(AssignmentTeam).to receive(:find).with(1).and_return(team) allow(AssignmentTeam).to receive(:users).and_return(student) allow(User).to receive(:find).with(1).and_return(student) review_response_map.reviewee_id = 1 defn = {body: {type: "Peer Review", obj_name: "Test Assgt", first_name: "no one", partial_name: "new_submission"}, to: "[email protected]"} expect { review_response_map.email(defn, participant, Assignment.find(Participant.find(reviewer_id).parent_id)) } .to change { ActionMailer::Base.deliveries.count }.by 1 end it '#prepare_final_review_versions' do review_final_versions = {} reviewer_id = 1 allow(metareview_response_map).to receive(:id).and_return(1) allow(Participant).to receive(:find).with(1).and_return(participant) allow(Assignment).to receive(:find).with(1).and_return(assignment) allow(MetareviewResponseMap).to receive(:where).with(reviewed_object_id: 1).and_return([metareview_response_map]) allow(Response).to receive(:where).with(map_id: 1, round: 1).and_return([response]) allow(assignment).to receive(:review_questionnaire_id).with(1).and_return(1) allow(Response).to receive(:where).with(map_id: 1, round: 2).and_return([response1]) allow(assignment).to receive(:review_questionnaire_id).with(2).and_return(1) current_assignment = Assignment.find(Participant.find(reviewer_id).parent_id) meta_review_response_maps = MetareviewResponseMap.where(reviewed_object_id: 1) expect(ReviewResponseMap.prepare_final_review_versions(current_assignment, meta_review_response_maps)) .to eq("review round1": {questionnaire_id: 1, response_ids: [1]}, "review round2": {questionnaire_id: 1, response_ids: [2]}) # when round = nil reviewer_id = 2 allow(Participant).to receive(:find).with(2).and_return(participant1) allow(Assignment).to receive(:find).with(2).and_return(assignment1) allow(MetareviewResponseMap).to receive(:where).with(reviewed_object_id: 1).and_return([metareview_response_map]) allow(assignment).to receive(:review_questionnaire_id).with(nil).and_return(1) allow(Response).to receive(:where).with(map_id: 1).and_return([response2]) current_assignment = Assignment.find(Participant.find(reviewer_id).parent_id) meta_review_response_maps = MetareviewResponseMap.where(reviewed_object_id: 1) expect(ReviewResponseMap.prepare_final_review_versions(current_assignment, meta_review_response_maps)) .to eq(review: {questionnaire_id: nil, response_ids: [3]}) end it '#prepare_review_response' do review_final_versions = {} review_response_map.id = 1 round = 1 maps = [review_response_map] allow(Assignment).to receive(:find).with(1).and_return(assignment) allow(Response).to receive(:where).with(map_id: 1, round: 1).and_return([response]) expect(ReviewResponseMap.prepare_review_response(assignment, maps, review_final_versions, round)).to eq([1]) round = nil allow(Assignment).to receive(:find).with(1).and_return(assignment) allow(Response).to receive(:where).with(map_id: 1).and_return([response2]) expect(ReviewResponseMap.prepare_review_response(assignment, maps, review_final_versions, round)).to eq([3]) end end
61.163551
161
0.734357
790f6fa21198cdd81d9cb1baa26ab3892f113412
186
class SetSubscriptionStatusDefault < ActiveRecord::Migration[6.0] def change change_column :subscriptions, :status, :subscription_status, default: "pending", null: false end end
31
96
0.77957
f7e4a7070dad18435e5e030a923d69038c6ed2d9
106
class RegistrationsController < Devise::RegistrationsController clear_respond_to respond_to :json end
21.2
63
0.849057
7a3d19900ce51945c22baa14e09ef156866ea69b
1,376
require 'spec_helper' require 'json' describe Stackr::TemplateHelpers do describe 'find_in_env_map' do it 'returns a Fn::FindInMap fragment' do expected = { 'Fn::FindInMap': [ 'EnvironmentMap', { Ref: 'Environment' }, 'foo' ] } expect(find_in_env_map('foo')).to eq expected end end describe 'find_in_env' do before(:each) do load_environment_map(includes_path) end it 'handles a map with > 64 attribures' do ENV['ENVIRONMENT'] = 'dev' expect(find_in_env('dev65')).to eq 'dev65' end it 'returns the string' do ENV['ENVIRONMENT'] = 'dev' expect(find_in_env('dev65')).to eq 'dev65' end it 'raises an exception if ENVIRONMENT not set' do ENV.delete 'ENVIRONMENT' expect { find_in_env('dev1') }.to raise_error(Stackr::EnvironmentMissingError) end end describe 'include_file' do it 'renders file into Fn::Join fragment' do filepath = File.join(includes_path, 'hello_world') expect(include_file(filepath)).to eq({'Fn::Join': ['', ["hello world\n"]]}) end it 'interpolates variables' do filepath = File.join(includes_path, 'hello_x_world') expect(include_file(filepath, {x: 'foo'})).to eq({'Fn::Join': ['', ["hello ", "foo", " world\n"]]}) end end end
25.481481
105
0.606105
6a3f1a06a89cd8bf67def2a8088084b5aad85cf3
3,183
class Course < ActiveRecord::Base attr_accessible :name, :code, :subject, :number, :description has_many :prerequisite_relationships, :foreign_key => "prereq_for_id", :class_name => "Prerequisite", :dependent => :destroy has_many :prerequisites, :through => :prerequisite_relationships, :source => :prereq has_many :documents def add_prerequisite!(course) prerequisite_relationships.create!(:prereq_id => course.id) end def remove_prerequisite!(course) prerequisite_relationships.find_by_prereq_id(course).destroy end def has_prerequisite?(course) prerequisite_relationships.find_by_prereq_id(course) end def last_update_time time = updated_at documents.each do |doc| time = doc.updated_at if doc.updated_at > time end time end def Course.split_code(code) code.upcase! parts = code.match(/^([A-Z]+)(.+)/) if parts return [parts[1], parts[2]] end end def code unless subject.nil? or number.nil? "#{subject.upcase}#{number}" end end def code=(c) parts = Course.split_code(c) if parts self.subject = parts[0] self.number = parts[1] end end def Course.find_by_code(c) parts = split_code(c) if parts return Course.where(:subject => parts[0], :number => parts[1]).first end end def Course.create_from_ryerson_calendar(calendar) courses = parse_ryerson_calendar(calendar) courses.each do |c| if existing = find_by_code("#{c[:subject]}#{c[:number]}") existing.update_attributes(:name => c[:name], :description => c[:description]) else create!(:name => c[:name], :subject => c[:subject], :number => c[:number], :description => c[:description]) end end courses.each do |c| course = find_by_code("#{c[:subject]}#{c[:number]}") c[:prereqs].each do |p| if prereq = find_by_code(p) course.add_prerequisite!(prereq) unless course.has_prerequisite?(prereq) end end end end def Course.parse_ryerson_calendar(calendar) courses = [] divs = calendar.xpath('//div/a').select{|a| a.to_s =~ /\d+/}.collect{|a| a.parent} divs.each do |div| header = div.xpath("table/tr/td//span//span").map{|s| s.text} c = { :subject => header[0], :number => header[1], :name => header[2] } div.xpath("table/tr/td/span").each do |content| id = content.attr 'id' text = content.text case id when /CourseDescription/ c[:description] = text when /CourseAttribute/ c[:attributes] = text when /Components/ c[:components] = text when /CoursePrereq/ c[:prereqs] = [] content.xpath("a").each{|a| c[:prereqs] << a.text.gsub(' ','')} when /Consent/ c[:consent] = text when /CourseWeight/ c[:weight] = text.match(/(\d+.?\d*)/)[1] when /BillingUnits/ c[:billing_units] = text.match(/(\d+.?\d*)/)[1] end end courses << c end courses end end
28.168142
115
0.588439
6a7a94a88f76401326a490f0267a4fc77467cd23
43
module FontAssets VERSION = "0.1.11" end
10.75
20
0.697674
28fe2e6f1908c48f0b76fb41959b7a00a1ee34dd
338
module MongoMetrics class MetricsController < ApplicationController respond_to :html, :json respond_to :csv, only: :index def index @metrics = Metric.all respond_with(@metrics) end def destroy @metric = Metric.find(params[:id]) @metric.destroy respond_with(@metric) end end end
18.777778
49
0.653846
ac5b9dac6cc7acc96b9b45d81e2bf114791ac515
395
class PlainAccount < Account EMAIL_REGEX = /([\w\.%\+\-]+)@([\w\-]+\.)+([\w]{2,})/i include Mongoid::Document include Mongoid::Timestamps include ActiveModel::SecurePassword attr_reader :password_confirmation field :password_digest has_secure_password validates :email, presence: true, uniqueness: true, format: { with: EMAIL_REGEX }, if: Proc.new {|a| a.provider.nil?} end
24.6875
119
0.696203
ac6ac03631cedf9c2b47f495e168f2e4c239182a
720
module LanguageTool module Actions class Check < Base REQUIRED_PARAMETERS = %i(text language).freeze OPTIONAL_PARAMETERS = %i(mother_tongue preferred_variants enabled_rules disabled_rules enabled_categories disabled_categories enabled_only).freeze PARAMETERS = (REQUIRED_PARAMETERS + OPTIONAL_PARAMETERS).freeze def run response = RestClient.get uri('check'), params: query $languagetool_last_response = response Resources::Matches.new JSON.parse(response.body).merge('original' => options[:text]) end protected def query normalize_query(PARAMETERS.map { |k| [k, options[k]] }) end end end end
31.304348
92
0.672222
18de52c35cccc0be9e5c457fd73bae914b661a82
185
require 'rails/engine' module Babel module Es6 module Rails class Engine < ::Rails::Engine config.app_generators.javascript_engine :es6 end end end end
15.416667
52
0.664865
91dda9bada96284c80ede2050d5de5226319f195
396
# # Cookbook Name:: cpe_preferencesecurity # Recipe:: default # # vim: syntax=ruby:expandtab:shiftwidth=2:softtabstop=2:tabstop=2 # # Copyright (c) 2017-present, Pinterest, Inc. # All rights reserved. # # This source code is licensed under the Apache 2.0 license found in the # LICENSE file in the root directory of this source tree. # cpe_preferencesecurity 'Apply Preference Security profile'
26.4
72
0.767677
03dd93aea61bbff7dea4d26c91d7f17e119699df
295
# frozen_string_literal: true class ChangesetSerializer < ActiveModel::Serializer root false attributes :length_before, :length_after, :changes def length_before object.length_before end def length_after object.length_after end def changes object.changes end end
15.526316
52
0.759322
79ac4bab9b69f92ebbff2b888adf5932b708d5ef
9,371
require "rails_helper" RSpec.describe Jobseekers::AlertMailer do include DatesHelper include OrganisationHelper include ERB::Util let(:body) { mail.body.raw_source } let(:email) { "[email protected]" } let(:frequency) { :daily } let(:search_criteria) { { keyword: "English" } } let(:subscription) do subscription = Subscription.create(email: email, frequency: frequency, search_criteria: search_criteria) # The hashing algorithm uses a random initialization vector to encrypt the token, # so is different every time, so we stub the token to be the same every time, so # it's clearer what we're testing when we test the unsubscribe link token = subscription.token allow_any_instance_of(Subscription).to receive(:token) { token } subscription end let(:school) { create(:school) } let(:mail) { described_class.alert(subscription.id, vacancies.pluck(:id)) } # The array of vacancies is set to length 1 because the order varies, making it hard to test url parameters. let(:vacancies) { VacanciesPresenter.new(create_list(:vacancy, 1, :published)).decorated_collection } let(:campaign_params) { { utm_source: "a_unique_identifier", utm_medium: "email", utm_campaign: "#{frequency}_alert" } } let(:relevant_job_alert_feedback_url) do new_subscription_job_alert_feedback_url( subscription.token, params: { job_alert_feedback: { relevant_to_user: true, job_alert_vacancy_ids: vacancies.pluck(:id), search_criteria: subscription.search_criteria } }, ) end let(:irrelevant_job_alert_feedback_url) do new_subscription_job_alert_feedback_url( subscription.token, params: { job_alert_feedback: { relevant_to_user: false, job_alert_vacancy_ids: vacancies.pluck(:id), search_criteria: subscription.search_criteria } }, ) end let(:expected_data) do { notify_template: notify_template, email_identifier: anonymised_form_of(email), user_anonymised_jobseeker_id: user_anonymised_jobseeker_id, user_anonymised_publisher_id: nil, subscription_identifier: anonymised_form_of(subscription.id), subscription_frequency: frequency, uid: "a_unique_identifier", } end before do # Stub the uid so that we can test links more easily allow_any_instance_of(ApplicationMailer).to receive(:uid).and_return("a_unique_identifier") vacancies.each { |vacancy| vacancy.organisation_vacancies.create(organisation: school) } subscription.create_alert_run end context "when frequency is daily" do let(:notify_template) { NOTIFY_SUBSCRIPTION_DAILY_TEMPLATE } let(:frequency) { "daily" } it "sends a job alert email" do expect(mail.subject).to eq(I18n.t("jobseekers.alert_mailer.alert.subject")) expect(mail.to).to eq([subscription.email]) expect(body).to include(I18n.t("jobseekers.alert_mailer.alert.summary.daily", count: 1)) .and include(vacancies.first.job_title) .and include(vacancies.first.job_title) .and include(job_url(vacancies.first, **campaign_params)) .and include(vacancy_full_job_location(vacancies.first)) .and include(I18n.t("jobseekers.alert_mailer.alert.salary", salary: vacancies.first.salary)) .and include(I18n.t("jobseekers.alert_mailer.alert.working_pattern", working_pattern: vacancies.first.working_patterns)) .and include(I18n.t("jobseekers.alert_mailer.alert.closing_date", closing_date: expiry_date_and_time(vacancies.first))) .and include(I18n.t("jobseekers.alert_mailer.alert.title")) .and include(I18n.t("subscriptions.intro")) .and include("Keyword: English") .and include(I18n.t("jobseekers.alert_mailer.alert.alert_frequency", frequency: subscription.frequency)) .and include(I18n.t("jobseekers.alert_mailer.alert.edit_link_text")) .and include(edit_subscription_url(subscription.token, **campaign_params)) .and include(I18n.t("jobseekers.alert_mailer.alert.feedback.heading")) .and match(/(\[#{I18n.t('jobseekers.alert_mailer.alert.feedback.relevant_link_text')}\]\(.+true)/) .and include(relevant_job_alert_feedback_url) .and match(/(\[#{I18n.t('jobseekers.alert_mailer.alert.feedback.irrelevant_link_text')}\]\(.+false)/) .and include(irrelevant_job_alert_feedback_url) .and include(I18n.t("jobseekers.alert_mailer.alert.feedback.reason")) .and include(unsubscribe_subscription_url(subscription.token, **campaign_params)) end context "when the subscription email matches a jobseeker account" do let(:jobseeker) { create(:jobseeker, email: email) } let(:user_anonymised_jobseeker_id) { anonymised_form_of(jobseeker.id) } it "triggers a `jobseeker_subscription_alert` email event with the anonymised jobseeker id" do expect { mail.deliver_now }.to have_triggered_event(:jobseeker_subscription_alert).with_data(expected_data) end end context "when the subscription email does not match a jobseeker account" do let(:user_anonymised_jobseeker_id) { nil } it "triggers a `jobseeker_subscription_alert` email event without the anonymised jobseeker id" do expect { mail.deliver_now }.to have_triggered_event(:jobseeker_subscription_alert).with_data(expected_data) end end end context "when frequency is weekly" do let(:notify_template) { NOTIFY_SUBSCRIPTION_WEEKLY_TEMPLATE } let(:frequency) { "weekly" } it "sends a job alert email" do expect(mail.subject).to eq(I18n.t("jobseekers.alert_mailer.alert.subject")) expect(mail.to).to eq([subscription.email]) expect(body).to include(I18n.t("jobseekers.alert_mailer.alert.summary.weekly", count: 1)) .and include(vacancies.first.job_title) .and include(vacancies.first.job_title) .and include(job_url(vacancies.first, **campaign_params)) .and include(vacancy_full_job_location(vacancies.first)) .and include(I18n.t("jobseekers.alert_mailer.alert.salary", salary: vacancies.first.salary)) .and include(I18n.t("jobseekers.alert_mailer.alert.working_pattern", working_pattern: vacancies.first.working_patterns)) .and include(I18n.t("jobseekers.alert_mailer.alert.closing_date", closing_date: expiry_date_and_time(vacancies.first))) .and include(I18n.t("jobseekers.alert_mailer.alert.title")) .and include(I18n.t("subscriptions.intro")) .and include("Keyword: English") .and include(I18n.t("jobseekers.alert_mailer.alert.alert_frequency", frequency: subscription.frequency)) .and include(I18n.t("jobseekers.alert_mailer.alert.edit_link_text")) .and include(edit_subscription_url(subscription.token, **campaign_params)) .and include(I18n.t("jobseekers.alert_mailer.alert.feedback.heading")) .and match(/(\[#{I18n.t('jobseekers.alert_mailer.alert.feedback.relevant_link_text')}\]\(.+true)/) .and include(relevant_job_alert_feedback_url) .and match(/(\[#{I18n.t('jobseekers.alert_mailer.alert.feedback.irrelevant_link_text')}\]\(.+false)/) .and include(irrelevant_job_alert_feedback_url) .and include(I18n.t("jobseekers.alert_mailer.alert.feedback.reason")) .and include(unsubscribe_subscription_url(subscription.token, **campaign_params)) end context "when the subscription email matches a jobseeker account" do let(:jobseeker) { create(:jobseeker, email: email) } let(:user_anonymised_jobseeker_id) { anonymised_form_of(jobseeker.id) } it "triggers a `jobseeker_subscription_alert` email event with the anonymised jobseeker id" do expect { mail.deliver_now }.to have_triggered_event(:jobseeker_subscription_alert).with_data(expected_data) end end context "when the subscription email does not match a jobseeker account" do let(:user_anonymised_jobseeker_id) { nil } it "triggers a `jobseeker_subscription_alert` email event without the anonymised jobseeker id" do expect { mail.deliver_now }.to have_triggered_event(:jobseeker_subscription_alert).with_data(expected_data) end end end describe "create account section" do context "when the subscription email matches a jobseeker account" do let!(:jobseeker) { create(:jobseeker, email: email) } it "does not display create account section" do expect(body).not_to include(I18n.t("jobseekers.alert_mailer.alert.create_account.heading")) end end context "when the subscription email does not match a jobseeker account" do it "displays create account section" do expect(body).to include(I18n.t("jobseekers.alert_mailer.alert.create_account.heading")) end end end end
53.244318
138
0.683171
b970fde0d29375629d01a1226ed73a0c20c1711b
957
Pod::Spec.new do |s| s.name = "AFHTTPSessionManager-AFUniqueGET" s.version = "0.5.1" s.summary = "Your GET request is in progress, better reuse it than create a new one, right?" s.description = <<-DESC This category of AFHTTPSessionManager adds the `uniqueGET` method, which is useful if you want to avoid making multiple calls at the same resource if the operation is still in progress. We achieve this by checking on-going operations before creating new ones. DESC s.homepage = "https://github.com/3lvis/AFHTTPSessionManager-AFUniqueGET" s.license = 'MIT' s.author = { "Elvis Nuñez" => "[email protected]" } s.source = { :git => "https://github.com/3lvis/AFHTTPSessionManager-AFUniqueGET.git", :tag => s.version.to_s } s.social_media_url = 'https://twitter.com/3lvis' s.platform = :ios, '7.0' s.requires_arc = true s.source_files = 'Source/**/*' s.dependency 'AFNetworking' end
47.85
185
0.684431
21969e0807e0da93723b764146faae5ed7bec683
1,440
module EwayRapid module Message module Convert module Response class DirectSettlementToSettlement # @param [DirectSettlementSearchResponse] response # @return [SettlementSearchResponse] def do_convert(response) settlement_search_response = SettlementSearchResponse.new if response.settlement_summaries && response.settlement_summaries.length > 0 summary_convert = InternalSummaryToSummary.new settlement_search_response.settlement_summaries = [] response.settlement_summaries.each {|summary| obj = summary_convert.do_convert(summary) settlement_search_response.settlement_summaries.push(obj) } end if response.settlement_transactions && response.settlement_summaries.length > 0 settlement_convert = InternalSettlementToSettlement.new settlement_search_response.settlement_transactions = [] response.settlement_transactions.each {|transaction| obj = settlement_convert.do_convert(transaction) settlement_search_response.settlement_transactions.push(obj) } end settlement_search_response.errors = response.error.split(/\s*,\s*/) if response.error settlement_search_response end end end end end end
37.894737
97
0.655556
399e481971fc80aeed8feef63c2fd19e8da88c49
2,130
require 'singleton' require 'monitor' require 'thread_safe' module RablRails class Library include Singleton UnknownFormat = Class.new(StandardError) RENDERER_MAP = { json: Renderers::JSON, xml: Renderers::XML, ruby: Renderers::Hash, plist: Renderers::PLIST }.freeze def initialize @cached_templates = ThreadSafe::Cache.new @mutex = Monitor.new end def reset_cache! @cached_templates = ThreadSafe::Cache.new end def get_rendered_template(source, view, locals = nil) compiled_template = compile_template_from_source(source, view) format = view.lookup_context.rendered_format || :json raise UnknownFormat, "#{format} is not supported in rabl-rails" unless RENDERER_MAP.key?(format) RENDERER_MAP[format].render(compiled_template, view, locals) end def compile_template_from_source(source, view) if RablRails.configuration.cache_templates path = view.instance_variable_get(:@virtual_path) synchronized_compile(path, source, view) else compile(source, view) end end def compile_template_from_path(path, view) if RablRails.configuration.cache_templates synchronized_compile(path, nil, view) else source = fetch_source(path, view) compile(source, view) end end private def synchronized_compile(path, source, view) @cached_templates[path] || @mutex.synchronize do # Any thread holding this lock will be compiling the template needed # by the threads waiting. So re-check the template presence to avoid # re-compilation @cached_templates.fetch(path) do source ||= fetch_source(path, view) @cached_templates[path] = compile(source, view) end end end def compile(source, view) Compiler.new(view).compile_source(source) end def fetch_source(path, view) template = view.lookup_context.find_template(path, [], false) source = template.source || File.binread(template.try(:identifier)) end end end
28.4
102
0.673239
4ac85c6fc3d7bf369b26d17c9ba3ddf696ca45b2
807
Pod::Spec.new do |s| s.name = "NUI" s.version = "0.4.0" s.summary = "Style iOS apps with a stylesheet, similar to CSS." s.description = "NUI is a drop-in UI kit for iOS that lets you style UI elements using a stylesheet, similar to CSS. It lets you style an entire app in minutes." s.homepage = "https://github.com/tombenner/nui" s.license = { :type => 'MIT', :file => 'LICENSE.txt' } s.author = { "Tom Benner" => "[email protected]" } s.source = { :git => "https://github.com/tombenner/nui.git", :tag => "v0.4.0" } s.platform = :ios, '5.1' s.source_files = 'NUI', 'NUI/**/*.{h,m}' s.resources = "NUI/Resources/*.png", "NUI/**/*.nss" s.requires_arc = true s.frameworks = [ "UIKit", "CoreGraphics","QuartzCore", "CoreImage" ] end
47.470588
164
0.592317
391f84718e6f7c02b4d06b90dc82a17c765dd511
343
class FinishTurnController < ApplicationController FINISH_TURN_FAILED_ERROR = "Could not finish turn." def create @game = Game.find(params.require(:game_id)) finish_turn_result = FinishTurn.new(game: @game).call if !finish_turn_result flash[:errors] = [FINISH_TURN_FAILED_ERROR] end redirect_to @game end end
24.5
57
0.731778
39cd46af7efeaa013fe8a6a474e6f6c2f7c527e4
6,556
class Scaler::Listener::BoshScaler class Condition def self.load_by_definition(processors, deployment_name, job_name, options) const_get(options['class'] + 'Condition').load( processors, deployment_name, job_name, options ) end class Base attr_reader :threshold def initialize( processor, deployment_name, job_name, threshold) @processor = processor @deployment_name = deployment_name @job_name = job_name @threshold = threshold end def self.load(processors, deployment_name, job_name, options) new( select_processor(processors), deployment_name, job_name, create_threshold(options)) end def self.select_processor(processors) processor = processors.find { |proc| proc.is_a?(processor_class) } if processor.nil? fail 'No compatible processor found' end processor end def self.processor_class fail 'Not implemented' end def self.create_threshold(options) if options.key?('larger_than') { :name => 'larger than', :proc => proc { |value| value > options['larger_than'] }, :value => options['larger_than'] } elsif options.key?('smaller_than') { :name => 'smaller than', :proc => proc { |value| value < options['smaller_than'] }, :value => options['smaller_than'] } else fail 'No condition given' end end def match fail 'Not implemented' end def to_s name = self.class.to_s.split(/::/).last.gsub(/Condition$/, '') "#{name} is #{@threshold[:name]} #{@threshold[:value]}" end end class DurationAverageConditionBase < Base def initialize( processor, deployment_name, job_name, threshold, duration) super( processor, deployment_name, job_name, threshold) @duration = duration end def self.load(processors, deployment_name, job_name, options) new( select_processor(processors), deployment_name, job_name, create_threshold(options), options['duration']) end def self.processor_class Scaler::Listener::BoshScaler::HeartbeatProcessor end def calc cutoff_time = Time.now - @duration usage_total = 0.0 usage_num = 0 @processor.buffers.each do |_, entity_buffer| entity_buffer.each do |metric| break if metric[:timestamp] <= cutoff_time.to_i break if metric[:deployment] != @deployment_name || metric[:job] != @job_name usage_total += sample(metric) usage_num += 1 end end usage_total / usage_num end def match @threshold[:proc].call(calc) end def sample(metric) fail 'Not implemented' end def to_s name = self.class.to_s.split(/::/).last.gsub(/Condition$/, '') "#{name} (#{@duration} secs) is #{@threshold[:name]} #{@threshold[:value]} (#{calc})" end end class CpuAverageCondition < DurationAverageConditionBase def sample(metric) metric[:vitals]['cpu']['user'].to_f + metric[:vitals]['cpu']['sys'].to_f + metric[:vitals]['cpu']['wait'].to_f end end class MemoryAverageCondition < DurationAverageConditionBase def sample(metric) metric[:vitals]['mem']['percent'].to_f end end class CfVarzAverageCondition < DurationAverageConditionBase def initialize( processor, deployment_name, job_name, threshold, duration, varz_job_name, varz_key) super( processor, deployment_name, job_name, threshold, duration) @varz_job_name = varz_job_name @varz_key = varz_key end def self.load(processors, deployment_name, job_name, options) new( select_processor(processors), deployment_name, job_name, create_threshold(options), options['duration'], options['varz_job'], options['varz_key']) end def self.processor_class Scaler::Listener::BoshScaler::CfVarzProcessor end def calc cutoff_time = Time.now - @duration usage_total = 0.0 usage_num = 0 @processor.buffers.each do |_, entity_buffer| entity_buffer.each do |metric| break if metric.timestamp <= cutoff_time break if metric.deployment != @deployment_name || metric.job != @varz_job_name || metric.key != @varz_key usage_total += sample(metric) usage_num += 1 end end usage_total / usage_num end def match @threshold[:proc].call(calc) end def sample(metric) metric.value end def to_s name = self.class.to_s.split(/::/).last.gsub(/Condition$/, '') "#{name} (#{@varz_job_name}, #{@varz_key}) is #{@threshold[:name]} #{@threshold[:value]} (#{calc})" end end class LastSampleConditionBase < Base def calc usage_total = 0.0 usage_num = 0 @processor.buffers.each do |_, entity_buffer| metric = entity_buffer.first next if metric[:deployment] != @deployment_name || metric[:job] != @job_name usage_total += sample(metric) usage_num += 1 end usage_total / usage_num end def match @threshold[:proc].call(calc) end def to_s name = self.class.to_s.split(/::/).last.gsub(/Condition$/, '') "#{name} is #{@threshold[:name]} #{@threshold[:value]} (#{calc})" end def self.processor_class Scaler::Listener::BoshScaler::HeartbeatProcessor end def sample(metric) fail 'Not implemented' end end class LoadAverage1Condition < LastSampleConditionBase def sample(metric) metric[:vitals]['load'][0].to_f end end class LoadAverage5Condition < LastSampleConditionBase def sample(metric) metric[:vitals]['load'][1].to_f end end class LoadAverage15Condition < LastSampleConditionBase def sample(metric) metric[:vitals]['load'][2].to_f end end end end
26.329317
107
0.575351
bf569cae8993a0b10d5a321f19f8d3572b11af1d
666
require 'date' require 'time' class Rotation < Struct.new(:start, :name, :user_id) def initialize(start, name, user_id) super(Date.parse(start), name, user_id) end def start_date self.start end def end_date self.start + 7 end def valid?(actual_schedule) actual_user_id = actual_schedule['user']['id'] self.user_id == actual_user_id end def includes?(actual_schedule) actual_start = DateTime.parse(actual_schedule['start']) actual_end = DateTime.parse(actual_schedule['end']) includes_date?(actual_start) end def includes_date?(actual_date) start_date < actual_date && end_date > actual_date end end
20.181818
59
0.705706
f867801e0e721832a6f6924f3910ce9e72dbabd5
1,133
class WaylandProtocols < Formula desc "Additional Wayland protocols" homepage "https://wayland.freedesktop.org" url "https://wayland.freedesktop.org/releases/wayland-protocols-1.23.tar.xz" sha256 "6c0af1915f96f615927a6270d025bd973ff1c58e521e4ca1fc9abfc914633f76" license "MIT" livecheck do url "https://wayland.freedesktop.org/releases.html" regex(/href=.*?wayland-protocols[._-]v?(\d+(?:\.\d+)+)\.t/i) end bottle do sha256 cellar: :any_skip_relocation, x86_64_linux: "46ff9ea4c52643e3a0c7291e4dcf3badcb3adc64648e6c3e3c949fdca53b4f49" end depends_on "autoconf" => :build depends_on "automake" => :build depends_on "pkg-config" => [:build, :test] depends_on "wayland" => :build depends_on :linux def install system "./autogen.sh", "--prefix=#{prefix}", "--sysconfdir=#{etc}", "--localstatedir=#{var}", "--disable-silent-rules" system "make" system "make", "install" end test do system "pkg-config", "--exists", "wayland-protocols" assert_equal 0, $CHILD_STATUS.exitstatus end end
30.621622
121
0.658429
ac08ef76925a108751773f606ae83970209bc602
598
# Copyright 2019 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. package 'php7.3-intl'
37.375
74
0.759197
5d77ebb7b52b33da8474b57c4be36fc4f0517871
1,269
#!/usr/bin/env ruby # -------------------------------------------------------------------------- # # Copyright 2002-2022, OpenNebula Project, OpenNebula Systems # # # # Licensed under the Apache License, Version 2.0 (the "License"); you may # # not use this file except in compliance with the License. You may obtain # # a copy of the License at # # # # http://www.apache.org/licenses/LICENSE-2.0 # # # # Unless required by applicable law or agreed to in writing, software # # distributed under the License is distributed on an "AS IS" BASIS, # # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # # See the License for the specific language governing permissions and # # limitations under the License. # #--------------------------------------------------------------------------- # require_relative '../../../lib/linux' LinuxHost.usage('lxc')
57.681818
78
0.410559
2105bddb9ceadefaf4f41402a0cd7575d52bb33b
2,830
# # Cookbook Name:: apt_test # Recipe:: lwrps # # Copyright 2012, Chef Software, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # require_relative './spec_helper' describe 'apt_test::lwrps' do it 'creates the JuJu sources.list' do expect(file('/etc/apt/sources.list.d/juju.list')).to exist end it 'creates the NodeJS sources.list' do expect(file('/etc/apt/sources.list.d/nodejs.list')).to exist end it 'creates the HAProxy sources.list' do expect(file('/etc/apt/sources.list.d/haproxy.list')).to exist end it 'creates a repo with a url that is already quoted' do src = 'deb\s+\"http://ppa.launchpad.net/juju/stable/ubuntu\" trusty main' expect(file('/etc/apt/sources.list.d/juju.list').content).to match(/#{src}/) end it 'adds the JuJu package signing key' do expect(command('apt-key list').stdout).to contain('Launchpad Ensemble PPA') end it 'creates the correct pinning preferences for chef' do pinning_prefs = 'Package: chef\nPin: version 12.7.2-1' expect(file('/etc/apt/preferences.d/chef.pref').content).to match(/#{pinning_prefs}/) end it 'correctly handles a ppa: repository' do skip('not on ubuntu') unless os[:family] == 'ubuntu' rust = 'http://ppa.launchpad.net/hansjorg/rust/ubuntu' expect(file('/etc/apt/sources.list.d/rust.list').content).to match(/#{rust}/) end it 'renames an old preferences file' do expect(file('/etc/apt/preferences.d/wget')).to_not exist expect(file('/etc/apt/preferences.d/wget.pref')).to exist end it 'renames an invalid preferences file' do expect(file('/etc/apt/preferences.d/*.pref')).to_not exist expect(file('/etc/apt/preferences.d/wildcard.pref')).to exist end it 'removes a preferences file' do expect(file('/etc/apt/preferences.d/camel.pref')).to_not exist end it 'creates a repo with an architecture' do cloudera = 'deb\s+\[arch=amd64 \] \"http:\/\/archive.cloudera.com\/cdh4\/ubuntu\/precise\/amd64\/cdh\" precise-cdh4 contrib' expect(file('/etc/apt/sources.list.d/cloudera.list').content).to match(/#{cloudera}/) end it 'creates the correct pinning preferences with a glob' do pinning_prefs = 'Package: \\*\nPin: origin packages.dotdeb.org' expect(file('/etc/apt/preferences.d/dotdeb.pref').content).to match(/#{pinning_prefs}/) end end
35.822785
128
0.706714