hexsha
stringlengths
40
40
size
int64
2
1.01M
content
stringlengths
2
1.01M
avg_line_length
float64
1.5
100
max_line_length
int64
2
1k
alphanum_fraction
float64
0.25
1
6a10a8b57d13bc5d6958644103669288edccf3ff
2,731
require "redis/connection/command_helper" require "redis/connection/registry" require "em-synchrony" require "hiredis/reader" class Redis module Connection class RedisClient < EventMachine::Connection include EventMachine::Deferrable def post_init @req = nil @connected = false @reader = ::Hiredis::Reader.new end def connection_completed @connected = true succeed end def connected? @connected end def receive_data(data) @reader.feed(data) begin until (reply = @reader.gets) == false @req.succeed [:reply, reply] end rescue RuntimeError => err @req.fail [:error, ::Redis::ProtocolError.new(err.message)] end end def read @req = EventMachine::DefaultDeferrable.new EventMachine::Synchrony.sync @req end def send(data) callback { send_data data } end def unbind @connected = false if @req @req.fail [:error, Errno::ECONNRESET] @req = nil else fail end end end class Synchrony include Redis::Connection::CommandHelper def initialize @timeout = 5_000_000 @connection = nil end def connected? @connection && @connection.connected? end def timeout=(usecs) @timeout = usecs end def connect(host, port, timeout) conn = EventMachine.connect(host, port, RedisClient) do |c| c.pending_connect_timeout = [Float(timeout / 1_000_000), 0.1].max end setup_connect_callbacks(conn, Fiber.current) end def connect_unix(path, timeout) conn = EventMachine.connect_unix_domain(path, RedisClient) setup_connect_callbacks(conn, Fiber.current) end def disconnect @connection.close_connection @connection = nil end def write(command) @connection.send(build_command(command)) end def read type, payload = @connection.read if type == :reply payload elsif type == :error raise payload else raise "Unknown type #{type.inspect}" end end private def setup_connect_callbacks(conn, f) conn.callback do @connection = conn f.resume conn end conn.errback do @connection = conn f.resume :refused end r = Fiber.yield raise Errno::ECONNREFUSED if r == :refused r end end end end Redis::Connection.drivers << Redis::Connection::Synchrony
21.007692
75
0.572318
613d177251626c4e00881d52de48e3e8fd4e7f02
482
module ClippyBot class Mailer def self.send(from:, subject:, to:, body:) from_email = SendGrid::Email.new(email: from) to_email = SendGrid::Email.new(email: to) content = SendGrid::Content.new(type: 'text/plain', value: body) email = SendGrid::Mail.new(from_email, subject, to_email, content) sendgrid = SendGrid::API.new(api_key: ENV['SENDGRID_API_KEY']) sendgrid.client.mail._('send').post(request_body: email.to_json) end end end
34.428571
72
0.678423
6147b01600ad528f27f877b2edcd60c1837c8a61
814
module Spree class AssembliesPart < ActiveRecord::Base belongs_to :assembly, class_name: "Spree::Variant", foreign_key: "assembly_id", touch: true belongs_to :part, class_name: "Spree::Variant", foreign_key: "part_id" delegate :name, :sku, to: :part after_create :set_master_unlimited_stock def self.get(assembly_id, part_id) find_or_initialize_by(assembly_id: assembly_id, part_id: part_id) end def options_text if variant_selection_deferred? Spree.t(:user_selectable) else part.options_text end end private def set_master_unlimited_stock if part.product.variants.any? part.product.master.update_attribute :track_inventory, false end end end end
23.941176
74
0.654791
ab10641956ea6c1d7ea2593cc3f77b791168eab0
126
newparam(:bridge_name) do include EasyType include EasyType::Validators::Name isnamevar desc 'The bridge name' end
12.6
36
0.746032
21548852bd9fa44440d8d6b1828a956c26b17e8e
658
version = File.read(File.expand_path('../../VERSION', __FILE__)).strip Gem::Specification.new do |spec| spec.name = 'aws-sdk' spec.version = version spec.summary = 'AWS SDK for Ruby' spec.description = 'The official AWS SDK for Ruby. Provides both resource oriented interfaces and API clients for AWS services.' spec.author = 'Amazon Web Services' spec.homepage = 'http://github.com/aws/aws-sdk-ruby' spec.license = 'Apache 2.0' spec.email = ['[email protected]'] spec.require_paths = ['lib'] spec.files += Dir['lib/**/*.rb'] spec.add_dependency('aws-sdk-resources', version) end
32.9
132
0.645897
3970789ce88cef32fd9a6a485b8410c9c5b6365d
2,287
require 'spec_helper' require 'taketo/constructs' include Taketo describe "BaseConstruct" do class TestBaseConstruct < Constructs::BaseConstruct; end subject(:construct) { TestBaseConstruct.new(:my_node) } specify "#node_type returns demodulized snake-cased class name" do construct.node_type.should == :test_base_construct end specify "#qualified_name returns node type and name as string" do expect(construct.qualified_name).to eq('test_base_construct my_node') end describe "#parent=" do it "stores parent" do parent = stub construct.parent = parent construct.parent.should == parent end end shared_context "parents" do let(:grand_parent) { TestBaseConstruct.new(:grand_parent) } let(:parent) { TestBaseConstruct.new(:parent) } before(:each) do parent.parent = grand_parent construct.parent = parent end end describe "#parents" do include_context "parents" it "returns parent nodes up until NullConstruct" do expect(construct.parents).to eq([parent, grand_parent]) end end specify "#parents returns an empty array if there are no parents" do expect(construct.parents).to eq([]) end describe "#path" do include_context "parents" it "returns names of parents separated by :" do expect(construct.path).to eq("grand_parent:parent:my_node") end end describe "#default_server_config=" do let(:default_server_config) { proc { call_from_self } } let(:context) { stub(:Context) } it "sets default server config" do construct.default_server_config = default_server_config context.should_receive(:call_from_self) context.instance_eval(&construct.default_server_config) end it "merges given config to parent's default server config" do construct.parent = stub(:default_server_config => proc { call_from_parent }) construct.default_server_config = default_server_config context.should_receive(:call_from_parent).ordered context.should_receive(:call_from_self).ordered context.instance_eval(&construct.default_server_config) end end it "has an empty proc as an initial default server config" do expect(construct.default_server_config.call).to eq(nil) end end
27.22619
82
0.721906
080d0e51cd59e9c2f31b1ab0d7cb60274961b2c7
813
Pod::Spec.new do |s| s.name = "KCUserKit" s.version = "0.1.5" s.summary = "A short description of KCUserKit." s.license = 'MIT' s.author = { "Emil Wojtaszek" => "[email protected]" } s.source = { :git => "[email protected]:newmedia/kingschat-user-ios.git", :tag => s.version.to_s } s.platform = :ios, '8.0' s.requires_arc = true s.source_files = 'KCUserKit/*.{h,m}' s.homepage = 'https://www.appunite.com' s.dependency 'AFNetworking', '~> 2.6' s.dependency 'Mantle', '~> 2.0' s.dependency 'APAddressBook', '~> 0.2' s.dependency 'AFgzipRequestSerializer' s.dependency 'libPhoneNumber-iOS', '~> 0.8' s.dependency 'Objective-LevelDB-appunite', '~> 2.1' s.dependency 'KCEnvironmentKit' end
35.347826
113
0.581796
e8c66014ea448707af6de5a1c59dae5ea9740a79
2,122
# == Schema Information # # Table name: follower_relationships # # id :integer not null, primary key # person_id :string(255) not null # follower_id :string(255) not null # created_at :datetime not null # updated_at :datetime not null # # Indexes # # index_follower_relationships_on_follower_id (follower_id) # index_follower_relationships_on_person_id (person_id) # index_follower_relationships_on_person_id_and_follower_id (person_id,follower_id) UNIQUE # require 'spec_helper' describe FollowerRelationship do before(:each) do @follower_relationship = FactoryGirl.create(:follower_relationship) @person = @follower_relationship.person @follower = @follower_relationship.follower end it "should include the follower in the person's follower list" do @person.followers.should include @follower end it "should not include the person in the follower's follower list" do @follower.followers.should_not include @person end it "should include the person in the follower's followed people list" do @follower.followed_people.should include @person end it "should not include the follower in the person's followed people list" do @person.followed_people.should_not include @follower end it "should not allow a duplicate follower relationship" do duplicate_attributes = @follower_relationship.attributes.slice(:person_id, :follower_id) FollowerRelationship.new(duplicate_attributes).should_not be_valid end it "should allow an inverse follower relationship" do inverse_attributes = { :person_id => @follower_relationship.follower_id, :follower_id => @follower_relationship.person_id } FollowerRelationship.new(inverse_attributes).should be_valid end it "should not allow a person to follow themselves" do self_attributes = { :person_id => @follower_relationship.person_id, :follower_id => @follower_relationship.person_id } FollowerRelationship.new(self_attributes).should_not be_valid end end
32.646154
92
0.732328
bbca08310f389a618c72e912d5019532b3ef5d85
40
module Geocoder VERSION = "1.7.0" end
10
19
0.675
5dd7ba32905c535072d4cf6ba89b9fedb3620d62
26
require 'advanced_search'
13
25
0.846154
b934713e4687d7e4ffce780e28a69026fe221065
29,510
require "bio-ucsc" describe "Bio::Ucsc::MonDom5" do before(:all) do Bio::Ucsc::MonDom5::DBConnection.connect end describe "Bio::Ucsc::MonDom5::All_est" do context ".first" do it 'returns the first records' do result = Bio::Ucsc::MonDom5::All_est.first pp result result.should be_true end end end describe "Bio::Ucsc::MonDom5::All_mrna" do context ".first" do it 'returns the first records' do result = Bio::Ucsc::MonDom5::All_mrna.first pp result result.should be_true end end end describe "Bio::Ucsc::MonDom5::Author" do context ".first" do it 'returns the first records' do result = Bio::Ucsc::MonDom5::Author.first pp result result.should be_true end end end describe "Bio::Ucsc::MonDom5::BlastHg18KG" do context ".first" do it 'returns the first records' do result = Bio::Ucsc::MonDom5::BlastHg18KG.first pp result result.should be_true end end end describe "Bio::Ucsc::MonDom5::Cds" do context ".first" do it 'returns the first records' do result = Bio::Ucsc::MonDom5::Cds.first pp result result.should be_true end end end describe "Bio::Ucsc::MonDom5::Cell" do context ".first" do it 'returns the first records' do result = Bio::Ucsc::MonDom5::Cell.first pp result result.should be_true end end end describe "Bio::Ucsc::MonDom5::ChainAilMel1" do context ".first" do it 'returns the first records' do result = Bio::Ucsc::MonDom5::ChainAilMel1.first pp result result.should be_true end end end describe "Bio::Ucsc::MonDom5::ChainAilMel1Link" do context ".first" do it 'returns the first records' do result = Bio::Ucsc::MonDom5::ChainAilMel1Link.first pp result result.should be_true end end end describe "Bio::Ucsc::MonDom5::ChainCalJac3" do context ".first" do it 'returns the first records' do result = Bio::Ucsc::MonDom5::ChainCalJac3.first pp result result.should be_true end end end describe "Bio::Ucsc::MonDom5::ChainCalJac3Link" do context ".first" do it 'returns the first records' do result = Bio::Ucsc::MonDom5::ChainCalJac3Link.first pp result result.should be_true end end end describe "Bio::Ucsc::MonDom5::ChainCanFam2" do context ".first" do it 'returns the first records' do result = Bio::Ucsc::MonDom5::ChainCanFam2.first pp result result.should be_true end end end describe "Bio::Ucsc::MonDom5::ChainCanFam2Link" do context ".first" do it 'returns the first records' do result = Bio::Ucsc::MonDom5::ChainCanFam2Link.first pp result result.should be_true end end end describe "Bio::Ucsc::MonDom5::ChainDanRer5" do context ".first" do it 'returns the first records' do result = Bio::Ucsc::MonDom5::ChainDanRer5.first pp result result.should be_true end end end describe "Bio::Ucsc::MonDom5::ChainDanRer5Link" do context ".first" do it 'returns the first records' do result = Bio::Ucsc::MonDom5::ChainDanRer5Link.first pp result result.should be_true end end end describe "Bio::Ucsc::MonDom5::ChainEquCab2" do context ".first" do it 'returns the first records' do result = Bio::Ucsc::MonDom5::ChainEquCab2.first pp result result.should be_true end end end describe "Bio::Ucsc::MonDom5::ChainEquCab2Link" do context ".first" do it 'returns the first records' do result = Bio::Ucsc::MonDom5::ChainEquCab2Link.first pp result result.should be_true end end end describe "Bio::Ucsc::MonDom5::ChainFelCat4" do context ".first" do it 'returns the first records' do result = Bio::Ucsc::MonDom5::ChainFelCat4.first pp result result.should be_true end end end describe "Bio::Ucsc::MonDom5::ChainFelCat4Link" do context ".first" do it 'returns the first records' do result = Bio::Ucsc::MonDom5::ChainFelCat4Link.first pp result result.should be_true end end end describe "Bio::Ucsc::MonDom5::ChainGalGal3" do context ".first" do it 'returns the first records' do result = Bio::Ucsc::MonDom5::ChainGalGal3.first pp result result.should be_true end end end describe "Bio::Ucsc::MonDom5::ChainGalGal3Link" do context ".first" do it 'returns the first records' do result = Bio::Ucsc::MonDom5::ChainGalGal3Link.first pp result result.should be_true end end end describe "Bio::Ucsc::MonDom5::ChainHg19" do context ".first" do it 'returns the first records' do result = Bio::Ucsc::MonDom5::ChainHg19.first pp result result.should be_true end end end describe "Bio::Ucsc::MonDom5::ChainHg19Link" do context ".first" do it 'returns the first records' do result = Bio::Ucsc::MonDom5::ChainHg19Link.first pp result result.should be_true end end end describe "Bio::Ucsc::MonDom5::ChainMm9" do context ".first" do it 'returns the first records' do result = Bio::Ucsc::MonDom5::ChainMm9.first pp result result.should be_true end end end describe "Bio::Ucsc::MonDom5::ChainMm9Link" do context ".first" do it 'returns the first records' do result = Bio::Ucsc::MonDom5::ChainMm9Link.first pp result result.should be_true end end end describe "Bio::Ucsc::MonDom5::ChainOrnAna1" do context ".first" do it 'returns the first records' do result = Bio::Ucsc::MonDom5::ChainOrnAna1.first pp result result.should be_true end end end describe "Bio::Ucsc::MonDom5::ChainOrnAna1Link" do context ".first" do it 'returns the first records' do result = Bio::Ucsc::MonDom5::ChainOrnAna1Link.first pp result result.should be_true end end end describe "Bio::Ucsc::MonDom5::ChainOryCun2" do context ".first" do it 'returns the first records' do result = Bio::Ucsc::MonDom5::ChainOryCun2.first pp result result.should be_true end end end describe "Bio::Ucsc::MonDom5::ChainOryCun2Link" do context ".first" do it 'returns the first records' do result = Bio::Ucsc::MonDom5::ChainOryCun2Link.first pp result result.should be_true end end end describe "Bio::Ucsc::MonDom5::ChainOviAri1" do context ".first" do it 'returns the first records' do result = Bio::Ucsc::MonDom5::ChainOviAri1.first pp result result.should be_true end end end describe "Bio::Ucsc::MonDom5::ChainOviAri1Link" do context ".first" do it 'returns the first records' do result = Bio::Ucsc::MonDom5::ChainOviAri1Link.first pp result result.should be_true end end end describe "Bio::Ucsc::MonDom5::ChainPanTro3" do context ".first" do it 'returns the first records' do result = Bio::Ucsc::MonDom5::ChainPanTro3.first pp result result.should be_true end end end describe "Bio::Ucsc::MonDom5::ChainPanTro3Link" do context ".first" do it 'returns the first records' do result = Bio::Ucsc::MonDom5::ChainPanTro3Link.first pp result result.should be_true end end end describe "Bio::Ucsc::MonDom5::ChainPonAbe2" do context ".first" do it 'returns the first records' do result = Bio::Ucsc::MonDom5::ChainPonAbe2.first pp result result.should be_true end end end describe "Bio::Ucsc::MonDom5::ChainPonAbe2Link" do context ".first" do it 'returns the first records' do result = Bio::Ucsc::MonDom5::ChainPonAbe2Link.first pp result result.should be_true end end end describe "Bio::Ucsc::MonDom5::ChainRheMac3" do context ".first" do it 'returns the first records' do result = Bio::Ucsc::MonDom5::ChainRheMac3.first pp result result.should be_true end end end describe "Bio::Ucsc::MonDom5::ChainRheMac3Link" do context ".first" do it 'returns the first records' do result = Bio::Ucsc::MonDom5::ChainRheMac3Link.first pp result result.should be_true end end end describe "Bio::Ucsc::MonDom5::ChainSusScr2" do context ".first" do it 'returns the first records' do result = Bio::Ucsc::MonDom5::ChainSusScr2.first pp result result.should be_true end end end describe "Bio::Ucsc::MonDom5::ChainSusScr2Link" do context ".first" do it 'returns the first records' do result = Bio::Ucsc::MonDom5::ChainSusScr2Link.first pp result result.should be_true end end end describe "Bio::Ucsc::MonDom5::ChromInfo" do context ".first" do it 'returns the first records' do result = Bio::Ucsc::MonDom5::ChromInfo.first pp result result.should be_true end end end describe "Bio::Ucsc::MonDom5::CpgIslandExt" do context ".first" do it 'returns the first records' do result = Bio::Ucsc::MonDom5::CpgIslandExt.first pp result result.should be_true end end end describe "Bio::Ucsc::MonDom5::Description" do context ".first" do it 'returns the first records' do result = Bio::Ucsc::MonDom5::Description.first pp result result.should be_true end end end describe "Bio::Ucsc::MonDom5::Development" do context ".first" do it 'returns the first records' do result = Bio::Ucsc::MonDom5::Development.first pp result result.should be_true end end end describe "Bio::Ucsc::MonDom5::EnsGene" do context ".first" do it 'returns the first records' do result = Bio::Ucsc::MonDom5::EnsGene.first pp result result.should be_true end end end describe "Bio::Ucsc::MonDom5::EnsGtp" do context ".first" do it 'returns the first records' do result = Bio::Ucsc::MonDom5::EnsGtp.first pp result result.should be_true end end end describe "Bio::Ucsc::MonDom5::EnsPep" do context ".first" do it 'returns the first records' do result = Bio::Ucsc::MonDom5::EnsPep.first pp result result.should be_true end end end describe "Bio::Ucsc::MonDom5::EstOrientInfo" do context ".first" do it 'returns the first records' do result = Bio::Ucsc::MonDom5::EstOrientInfo.first pp result result.should be_true end end end describe "Bio::Ucsc::MonDom5::ExtFile" do context ".first" do it 'returns the first records' do result = Bio::Ucsc::MonDom5::ExtFile.first pp result result.should be_true end end end describe "Bio::Ucsc::MonDom5::Gap" do context ".first" do it 'returns the first records' do result = Bio::Ucsc::MonDom5::Gap.first pp result result.should be_true end end end describe "Bio::Ucsc::MonDom5::GbCdnaInfo" do context ".first" do it 'returns the first records' do result = Bio::Ucsc::MonDom5::GbCdnaInfo.first pp result result.should be_true end end end describe "Bio::Ucsc::MonDom5::GbExtFile" do context ".first" do it 'returns the first records' do result = Bio::Ucsc::MonDom5::GbExtFile.first pp result result.should be_true end end end describe "Bio::Ucsc::MonDom5::GbLoaded" do context ".first" do it 'returns the first records' do result = Bio::Ucsc::MonDom5::GbLoaded.first pp result result.should be_true end end end describe "Bio::Ucsc::MonDom5::GbMiscDiff" do context ".first" do it 'returns the first records' do result = Bio::Ucsc::MonDom5::GbMiscDiff.first pp result result.should be_true end end end describe "Bio::Ucsc::MonDom5::GbSeq" do context ".first" do it 'returns the first records' do result = Bio::Ucsc::MonDom5::GbSeq.first pp result result.should be_true end end end describe "Bio::Ucsc::MonDom5::GbStatus" do context ".first" do it 'returns the first records' do result = Bio::Ucsc::MonDom5::GbStatus.first pp result result.should be_true end end end describe "Bio::Ucsc::MonDom5::GbWarn" do context ".first" do it 'returns the first records' do result = Bio::Ucsc::MonDom5::GbWarn.first pp result result.should be_true end end end describe "Bio::Ucsc::MonDom5::Gc5Base" do context ".first" do it 'returns the first records' do result = Bio::Ucsc::MonDom5::Gc5Base.first pp result result.should be_true end end end describe "Bio::Ucsc::MonDom5::GeneName" do context ".first" do it 'returns the first records' do result = Bio::Ucsc::MonDom5::GeneName.first pp result result.should be_true end end end describe "Bio::Ucsc::MonDom5::Genscan" do context ".first" do it 'returns the first records' do result = Bio::Ucsc::MonDom5::Genscan.first pp result result.should be_true end end end describe "Bio::Ucsc::MonDom5::GenscanPep" do context ".first" do it 'returns the first records' do result = Bio::Ucsc::MonDom5::GenscanPep.first pp result result.should be_true end end end describe "Bio::Ucsc::MonDom5::Gold" do context ".first" do it 'returns the first records' do result = Bio::Ucsc::MonDom5::Gold.first pp result result.should be_true end end end describe "Bio::Ucsc::MonDom5::Grp" do context ".first" do it 'returns the first records' do result = Bio::Ucsc::MonDom5::Grp.first pp result result.should be_true end end end describe "Bio::Ucsc::MonDom5::HgFindSpec" do context ".first" do it 'returns the first records' do result = Bio::Ucsc::MonDom5::HgFindSpec.first pp result result.should be_true end end end describe "Bio::Ucsc::MonDom5::History" do context ".first" do it 'returns the first records' do result = Bio::Ucsc::MonDom5::History.first pp result result.should be_true end end end describe "Bio::Ucsc::MonDom5::ImageClone" do context ".first" do it 'returns the first records' do result = Bio::Ucsc::MonDom5::ImageClone.first pp result result.should be_true end end end describe "Bio::Ucsc::MonDom5::IntronEst" do context ".first" do it 'returns the first records' do result = Bio::Ucsc::MonDom5::IntronEst.first pp result result.should be_true end end end describe "Bio::Ucsc::MonDom5::Keyword" do context ".first" do it 'returns the first records' do result = Bio::Ucsc::MonDom5::Keyword.first pp result result.should be_true end end end describe "Bio::Ucsc::MonDom5::Library" do context ".first" do it 'returns the first records' do result = Bio::Ucsc::MonDom5::Library.first pp result result.should be_true end end end describe "Bio::Ucsc::MonDom5::MrnaClone" do context ".first" do it 'returns the first records' do result = Bio::Ucsc::MonDom5::MrnaClone.first pp result result.should be_true end end end describe "Bio::Ucsc::MonDom5::MrnaOrientInfo" do context ".first" do it 'returns the first records' do result = Bio::Ucsc::MonDom5::MrnaOrientInfo.first pp result result.should be_true end end end describe "Bio::Ucsc::MonDom5::Multiz9way" do context ".first" do it 'returns the first records' do result = Bio::Ucsc::MonDom5::Multiz9way.first pp result result.should be_true end end end describe "Bio::Ucsc::MonDom5::Multiz9wayFrames" do context ".first" do it 'returns the first records' do result = Bio::Ucsc::MonDom5::Multiz9wayFrames.first pp result result.should be_true end end end describe "Bio::Ucsc::MonDom5::Multiz9waySummary" do context ".first" do it 'returns the first records' do result = Bio::Ucsc::MonDom5::Multiz9waySummary.first pp result result.should be_true end end end describe "Bio::Ucsc::MonDom5::NestedRepeats" do context ".first" do it 'returns the first records' do result = Bio::Ucsc::MonDom5::NestedRepeats.first pp result result.should be_true end end end describe "Bio::Ucsc::MonDom5::NetAilMel1" do context ".first" do it 'returns the first records' do result = Bio::Ucsc::MonDom5::NetAilMel1.first pp result result.should be_true end end end describe "Bio::Ucsc::MonDom5::NetCalJac3" do context ".first" do it 'returns the first records' do result = Bio::Ucsc::MonDom5::NetCalJac3.first pp result result.should be_true end end end describe "Bio::Ucsc::MonDom5::NetCanFam2" do context ".first" do it 'returns the first records' do result = Bio::Ucsc::MonDom5::NetCanFam2.first pp result result.should be_true end end end describe "Bio::Ucsc::MonDom5::NetDanRer5" do context ".first" do it 'returns the first records' do result = Bio::Ucsc::MonDom5::NetDanRer5.first pp result result.should be_true end end end describe "Bio::Ucsc::MonDom5::NetEquCab2" do context ".first" do it 'returns the first records' do result = Bio::Ucsc::MonDom5::NetEquCab2.first pp result result.should be_true end end end describe "Bio::Ucsc::MonDom5::NetFelCat4" do context ".first" do it 'returns the first records' do result = Bio::Ucsc::MonDom5::NetFelCat4.first pp result result.should be_true end end end describe "Bio::Ucsc::MonDom5::NetGalGal3" do context ".first" do it 'returns the first records' do result = Bio::Ucsc::MonDom5::NetGalGal3.first pp result result.should be_true end end end describe "Bio::Ucsc::MonDom5::NetHg19" do context ".first" do it 'returns the first records' do result = Bio::Ucsc::MonDom5::NetHg19.first pp result result.should be_true end end end describe "Bio::Ucsc::MonDom5::NetMm9" do context ".first" do it 'returns the first records' do result = Bio::Ucsc::MonDom5::NetMm9.first pp result result.should be_true end end end describe "Bio::Ucsc::MonDom5::NetOrnAna1" do context ".first" do it 'returns the first records' do result = Bio::Ucsc::MonDom5::NetOrnAna1.first pp result result.should be_true end end end describe "Bio::Ucsc::MonDom5::NetOryCun2" do context ".first" do it 'returns the first records' do result = Bio::Ucsc::MonDom5::NetOryCun2.first pp result result.should be_true end end end describe "Bio::Ucsc::MonDom5::NetOviAri1" do context ".first" do it 'returns the first records' do result = Bio::Ucsc::MonDom5::NetOviAri1.first pp result result.should be_true end end end describe "Bio::Ucsc::MonDom5::NetPanTro3" do context ".first" do it 'returns the first records' do result = Bio::Ucsc::MonDom5::NetPanTro3.first pp result result.should be_true end end end describe "Bio::Ucsc::MonDom5::NetPonAbe2" do context ".first" do it 'returns the first records' do result = Bio::Ucsc::MonDom5::NetPonAbe2.first pp result result.should be_true end end end describe "Bio::Ucsc::MonDom5::NetRheMac3" do context ".first" do it 'returns the first records' do result = Bio::Ucsc::MonDom5::NetRheMac3.first pp result result.should be_true end end end describe "Bio::Ucsc::MonDom5::NetSusScr2" do context ".first" do it 'returns the first records' do result = Bio::Ucsc::MonDom5::NetSusScr2.first pp result result.should be_true end end end describe "Bio::Ucsc::MonDom5::NscanGene" do context ".first" do it 'returns the first records' do result = Bio::Ucsc::MonDom5::NscanGene.first pp result result.should be_true end end end describe "Bio::Ucsc::MonDom5::NscanPep" do context ".first" do it 'returns the first records' do result = Bio::Ucsc::MonDom5::NscanPep.first pp result result.should be_true end end end describe "Bio::Ucsc::MonDom5::Organism" do context ".first" do it 'returns the first records' do result = Bio::Ucsc::MonDom5::Organism.first pp result result.should be_true end end end describe "Bio::Ucsc::MonDom5::PhastCons9way" do context ".first" do it 'returns the first records' do result = Bio::Ucsc::MonDom5::PhastCons9way.first pp result result.should be_true end end end describe "Bio::Ucsc::MonDom5::PhastConsElements9way" do context ".first" do it 'returns the first records' do result = Bio::Ucsc::MonDom5::PhastConsElements9way.first pp result result.should be_true end end end describe "Bio::Ucsc::MonDom5::ProductName" do context ".first" do it 'returns the first records' do result = Bio::Ucsc::MonDom5::ProductName.first pp result result.should be_true end end end describe "Bio::Ucsc::MonDom5::Quality" do context ".first" do it 'returns the first records' do result = Bio::Ucsc::MonDom5::Quality.first pp result result.should be_true end end end describe "Bio::Ucsc::MonDom5::RefFlat" do context ".first" do it 'returns the first records' do result = Bio::Ucsc::MonDom5::RefFlat.first pp result result.should be_true end end end describe "Bio::Ucsc::MonDom5::RefGene" do context ".first" do it 'returns the first records' do result = Bio::Ucsc::MonDom5::RefGene.first pp result result.should be_true end end end describe "Bio::Ucsc::MonDom5::RefLink" do context ".first" do it 'returns the first records' do result = Bio::Ucsc::MonDom5::RefLink.first pp result result.should be_true end end end describe "Bio::Ucsc::MonDom5::RefSeqAli" do context ".first" do it 'returns the first records' do result = Bio::Ucsc::MonDom5::RefSeqAli.first pp result result.should be_true end end end describe "Bio::Ucsc::MonDom5::RefSeqStatus" do context ".first" do it 'returns the first records' do result = Bio::Ucsc::MonDom5::RefSeqStatus.first pp result result.should be_true end end end describe "Bio::Ucsc::MonDom5::RefSeqSummary" do context ".first" do it 'returns the first records' do result = Bio::Ucsc::MonDom5::RefSeqSummary.first pp result result.should be_true end end end describe "Bio::Ucsc::MonDom5::Rmsk" do context ".first" do it 'returns the first records' do result = Bio::Ucsc::MonDom5::Rmsk.first pp result result.should be_true end end end describe "Bio::Ucsc::MonDom5::Sex" do context ".first" do it 'returns the first records' do result = Bio::Ucsc::MonDom5::Sex.first pp result result.should be_true end end end describe "Bio::Ucsc::MonDom5::SimpleRepeat" do context ".first" do it 'returns the first records' do result = Bio::Ucsc::MonDom5::SimpleRepeat.first pp result result.should be_true end end end describe "Bio::Ucsc::MonDom5::Source" do context ".first" do it 'returns the first records' do result = Bio::Ucsc::MonDom5::Source.first pp result result.should be_true end end end describe "Bio::Ucsc::MonDom5::TableDescriptions" do context ".first" do it 'returns the first records' do result = Bio::Ucsc::MonDom5::TableDescriptions.first pp result result.should be_true end end end describe "Bio::Ucsc::MonDom5::Tissue" do context ".first" do it 'returns the first records' do result = Bio::Ucsc::MonDom5::Tissue.first pp result result.should be_true end end end describe "Bio::Ucsc::MonDom5::TrackDb" do context ".first" do it 'returns the first records' do result = Bio::Ucsc::MonDom5::TrackDb.first pp result result.should be_true end end end describe "Bio::Ucsc::MonDom5::TransMapAlnMRna" do context ".first" do it 'returns the first records' do result = Bio::Ucsc::MonDom5::TransMapAlnMRna.first pp result result.should be_true end end end describe "Bio::Ucsc::MonDom5::TransMapAlnRefSeq" do context ".first" do it 'returns the first records' do result = Bio::Ucsc::MonDom5::TransMapAlnRefSeq.first pp result result.should be_true end end end describe "Bio::Ucsc::MonDom5::TransMapAlnSplicedEst" do context ".first" do it 'returns the first records' do result = Bio::Ucsc::MonDom5::TransMapAlnSplicedEst.first pp result result.should be_true end end end describe "Bio::Ucsc::MonDom5::TransMapAlnUcscGenes" do context ".first" do it 'returns the first records' do result = Bio::Ucsc::MonDom5::TransMapAlnUcscGenes.first pp result result.should be_true end end end describe "Bio::Ucsc::MonDom5::TransMapInfoMRna" do context ".first" do it 'returns the first records' do result = Bio::Ucsc::MonDom5::TransMapInfoMRna.first pp result result.should be_true end end end describe "Bio::Ucsc::MonDom5::TransMapInfoRefSeq" do context ".first" do it 'returns the first records' do result = Bio::Ucsc::MonDom5::TransMapInfoRefSeq.first pp result result.should be_true end end end describe "Bio::Ucsc::MonDom5::TransMapInfoSplicedEst" do context ".first" do it 'returns the first records' do result = Bio::Ucsc::MonDom5::TransMapInfoSplicedEst.first pp result result.should be_true end end end describe "Bio::Ucsc::MonDom5::TransMapInfoUcscGenes" do context ".first" do it 'returns the first records' do result = Bio::Ucsc::MonDom5::TransMapInfoUcscGenes.first pp result result.should be_true end end end describe "Bio::Ucsc::MonDom5::WindowmaskerSdust" do context ".first" do it 'returns the first records' do result = Bio::Ucsc::MonDom5::WindowmaskerSdust.first pp result result.should be_true end end end describe "Bio::Ucsc::MonDom5::XenoMrna" do context ".first" do it 'returns the first records' do result = Bio::Ucsc::MonDom5::XenoMrna.first pp result result.should be_true end end end describe "Bio::Ucsc::MonDom5::XenoRefFlat" do context ".first" do it 'returns the first records' do result = Bio::Ucsc::MonDom5::XenoRefFlat.first pp result result.should be_true end end end describe "Bio::Ucsc::MonDom5::XenoRefGene" do context ".first" do it 'returns the first records' do result = Bio::Ucsc::MonDom5::XenoRefGene.first pp result result.should be_true end end end describe "Bio::Ucsc::MonDom5::XenoRefSeqAli" do context ".first" do it 'returns the first records' do result = Bio::Ucsc::MonDom5::XenoRefSeqAli.first pp result result.should be_true end end end end
23.99187
65
0.61003
bb4a2cbe73a3011ed98a12f85472667843b0e990
456
cask 'eclipse-cpp' do version '4.5.1' sha256 'd485444edbd6761da697dd55e974f8a45691f6bce515880eeb4216ea5ef6cbb5' url 'https://www.eclipse.org/downloads/download.php?file=/technology/epp/downloads/release/mars/1/eclipse-cpp-mars-1-macosx-cocoa-x86_64.tar.gz&r=1' name 'Eclipse IDE for C/C++ Developers' homepage 'https://eclipse.org/' license :eclipse depends_on :macos => '>= :leopard' depends_on :arch => :x86_64 app 'Eclipse.app' end
30.4
150
0.741228
e92e0c0b6fec0c0838836d7b0eddc777235a611f
6,846
require 'spec_helper' describe "Steps API", type: :request do before(:each) do admin_user = create_admin_user post '/login', { email: admin_user.email, password: admin_user.password } @experiment = create_experiment_with_one_stage("test") @stage = @experiment.experiment_definition.protocol.stages.first end describe "#create" do it 'first step' do post "/stages/#{@stage.id}/steps", {}, http_headers expect(response).to be_success # test for the 200 status-code json = JSON.parse(response.body) json["step"]["name"].should be_nil json["step"]["order_number"].should == 0 end it 'last step' do params = { prev_id: @stage.steps.first.id } post "/stages/#{@stage.id}/steps", params.to_json, http_headers expect(response).to be_success # test for the 200 status-code json = JSON.parse(response.body) json["step"]["order_number"].should == 1 end end describe "#destroy" do it 'last step in last stage' do delete "/steps/#{@stage.steps.first.id}", { :format => 'json' } response.response_code.should == 422 json = JSON.parse(response.body) json["step"]["errors"].should_not be_nil end it "last step not in last stage" do new_stage = hold_stage(@stage.protocol) delete "/steps/#{@stage.steps.first.id}", { :format => 'json' } expect(response).to be_success json = JSON.parse(response.body) json["step"]["destroyed_stage_id"].should eq(@stage.id) end it "not last step" do new_step = Step.new @stage.steps << new_step @stage.steps.reload @stage.steps[0].id.should == new_step.id delete "/steps/#{new_step.id}", { :format => 'json' } expect(response).to be_success json = JSON.parse(response.body) json["step"]["destroyed_stage_id"].should be_nil @stage.steps.reload @stage.steps.should be_contiguous_order_numbers end end describe "#update" do it "step name" do params = { step: {name: "test"} } put "/steps/#{@stage.steps.first.id}", params.to_json, http_headers expect(response).to be_success json = JSON.parse(response.body) json["step"]["name"].should eq("test") end it "step name null" do params = { step: {name: ""} } put "/steps/#{@stage.steps.first.id}", params.to_json, http_headers expect(response).to be_success json = JSON.parse(response.body) json["step"]["name"].should be_nil end end describe "#move" do it "step from back to front" do @stage = meltcurve_stage(@stage.protocol) #3 steps #add another step at the end movestep = @stage.steps.last @stage.steps << Step.new(:order_number=>movestep.order_number+1) #4 steps now params = { prev_id: @stage.steps.first.id} #move step 3 after step 1 post "/steps/#{movestep.id}/move", params.to_json, http_headers expect(response).to be_success @stage.steps.reload @stage.steps.should be_contiguous_order_numbers @stage.steps[1].id.should == movestep.id end it "step from front to back" do @stage = meltcurve_stage(@stage.protocol) #3 steps #add another step at the end movestep = @stage.steps.first params = { prev_id: @stage.steps[1].id} #move step 1 after step 2 post "/steps/#{movestep.id}/move", params.to_json, http_headers expect(response).to be_success @stage.steps.reload @stage.steps.should be_contiguous_order_numbers @stage.steps[1].id.should == movestep.id end it "step to the same position (not moved)" do @stage = meltcurve_stage(@stage.protocol) #3 steps movestep = @stage.steps[2] params = { prev_id: @stage.steps[1].id} post "/steps/#{movestep.id}/move", params.to_json, http_headers expect(response).to be_success @stage.steps.reload @stage.steps.should be_contiguous_order_numbers @stage.steps[2].id.should == movestep.id end it "step to non-existent prev_id" do @stage = meltcurve_stage(@stage.protocol) #3 steps #add another step at the end movestep = @stage.steps[2] params = { prev_id: 123} #move step 3 after step 1 post "/steps/#{movestep.id}/move", params.to_json, http_headers expect(response).to be_success @stage.steps.reload @stage.steps.should be_contiguous_order_numbers @stage.steps[0].id.should == movestep.id end it "step from hold stage to cycle stage" do new_stage = cycle_stage(@stage.protocol) steps_count = new_stage.steps.count movestep = @stage.steps.first params = { stage_id: new_stage.id } post "/steps/#{movestep.id}/move", params.to_json, http_headers expect(response).to be_success @experiment.experiment_definition.protocol.stages.reload @experiment.experiment_definition.protocol.stages.count.should == 1 #hold stage should be deleted because the only step is moved @experiment.experiment_definition.protocol.stages.first.id.should == new_stage.id new_stage.steps.reload new_stage.steps.count.should == steps_count + 1 new_stage.steps.should be_contiguous_order_numbers new_stage.steps[0].id.should == movestep.id end it "step from meltcurve stage to cycle stage" do @stage = meltcurve_stage(@stage.protocol) new_stage = cycle_stage(@stage.protocol) steps_count = new_stage.steps.count movestep = @stage.steps[1] params = { stage_id: new_stage.id } post "/steps/#{movestep.id}/move", params.to_json, http_headers expect(response).to be_success @stage.steps.reload @stage.steps.count.should == 2 @stage.steps.should be_contiguous_order_numbers new_stage.steps.reload new_stage.steps.count.should == 3 new_stage.steps.should be_contiguous_order_numbers end it "step to non-existent stage not allowed" do params = { stage_id: 212 } post "/steps/#{@stage.steps.first.id}/move", params.to_json, http_headers response.response_code.should == 422 end end describe "check editable" do it "- not editable if experiment definition is not editable" do @experiment.experiment_definition = ExperimentDefinition.new(:experiment_type=>ExperimentDefinition::TYPE_DIAGNOSTIC) @experiment.save post "/stages/#{@stage.id}/steps", {}, http_headers response.response_code.should == 422 end it "not editable if experiment is runned" do @experiment.update_attributes(:started_at=>Time.now) @experiment.save post "/stages/#{@stage.id}/steps", {}, http_headers response.response_code.should == 422 end end end
37.005405
134
0.654835
bf2581af5ec3aa16f7df615f2e9aecaa36d78f0a
656
# Representación del estado del juego module Model module Direction UP = :up RIGHT = :right DOWN = :down LEFT = :left end class Coord < Struct.new(:row, :col) end class Food < Coord end class Snake < Struct.new(:positions) end class Grid < Struct.new(:rows, :cols) end class State < Struct.new(:snake, :food, :grid, :curr_direction, :game_finished) end def self.initial_state Model::State.new( Model::Snake.new([ Model::Coord.new(1, 1), Model::Coord.new(0, 1), ]), Model::Food.new(4, 4), Model::Grid.new(8, 12), Direction::DOWN, false ) end end
16.820513
81
0.588415
1a5d77bfd4d3eb16288a702e24845e8adc3a7b36
369
# Load all of the core implementation required to use state_machine. This # includes: # * StateMachine::MacroMethods which adds the state_machine DSL to your class # * A set of initializers for setting state_machine defaults based on the current # running environment (such as within Rails) require 'state_machine/macro_methods' require 'state_machine/initializers'
46.125
81
0.804878
380984c283e5cace3f77e72b63c475df4f493bec
1,354
# frozen_string_literal: true module QA module Page module Group class Menu < Page::Base include SubMenus::Common view 'app/views/layouts/nav/sidebar/_group.html.haml' do element :group_settings_item element :group_members_item element :general_settings_link end view 'app/views/layouts/nav/sidebar/_analytics_links.html.haml' do element :analytics_link element :analytics_sidebar_submenu end def click_group_members_item within_sidebar do click_element(:group_members_item) end end def click_settings within_sidebar do click_element(:group_settings_item) end end def click_contribution_analytics_item hover_element(:analytics_link) do within_submenu(:analytics_sidebar_submenu) do click_element(:contribution_analytics_link) end end end def click_group_general_settings_item hover_element(:group_settings_item) do within_submenu(:group_sidebar_submenu) do click_element(:general_settings_link) end end end end end end end QA::Page::Group::Menu.prepend_if_ee('QA::EE::Page::Group::Menu')
25.54717
74
0.6226
082857ace4b654eb07fe465246b4a0284f9f9151
676
module SynapseContent class CustomField < ApplicationRecord include Tokenable # audited associated_with: :publishable self.table_name = "synapse_custom_fields" belongs_to :publishable, polymorphic: true, touch: true has_one_attached :image validates :publishable, presence: true validates :name, presence: true validates :field_type, presence: true enum field_type: { default: 0, currency: 1 } def content if self.string_content? self.string_content elsif self.text_content? self.text_content elsif self.numeric_content? self.numeric_content end end end end
21.125
59
0.683432
5d5651018863c69a1eb069e9f496c6d4477a97c0
1,470
require File.join(__FILE__, '../../base') module Alchemy module Generators class ElementsGenerator < Base desc "This generator generates your elements view partials." source_root File.expand_path('templates', File.dirname(__FILE__)) def create_directory @elements_dir = "#{Rails.root}/app/views/alchemy/elements" empty_directory @elements_dir end def create_partials @elements = load_alchemy_yaml('elements.yml') @elements.each do |element| @element = element contents = element["contents"] || [] if @element['available_contents'] @available_contents_names = @element['available_contents'].collect { |c| c['name'] } @contents = contents.delete_if { |c| @available_contents_names.include?(c['name']) } or [] else @contents = contents end if element["name"] =~ /\A[a-z0-9_-]+\z/ @element_name = element["name"].underscore else raise "Element name '#{element['name']}' has wrong format. Only lowercase and non whitespace characters allowed." end conditional_template "editor.html.#{template_engine}", "#{@elements_dir}/_#{@element_name}_editor.html.#{template_engine}" conditional_template "view.html.#{template_engine}", "#{@elements_dir}/_#{@element_name}_view.html.#{template_engine}" end if @elements end end end end
38.684211
132
0.630612
9125eed55335c83cf96ecaac80abfb55af923345
5,319
=begin #UltraCart Rest API V2 #UltraCart REST API Version 2 OpenAPI spec version: 2.0.0 Contact: [email protected] Generated by: https://github.com/swagger-api/swagger-codegen.git Swagger Codegen version: 2.4.15-SNAPSHOT =end require 'date' module UltracartClient class ItemKitDefinition # Components attr_accessor :components # Attribute mapping from ruby-style variable name to JSON key. def self.attribute_map { :'components' => :'components' } end # Attribute type mapping. def self.swagger_types { :'components' => :'Array<ItemKitComponent>' } end # Initializes the object # @param [Hash] attributes Model attributes in the form of hash def initialize(attributes = {}) return unless attributes.is_a?(Hash) # convert string to symbol for hash key attributes = attributes.each_with_object({}) { |(k, v), h| h[k.to_sym] = v } if attributes.has_key?(:'components') if (value = attributes[:'components']).is_a?(Array) self.components = value end end end # Show invalid properties with the reasons. Usually used together with valid? # @return Array for valid properties with the reasons def list_invalid_properties invalid_properties = Array.new invalid_properties end # Check to see if the all the properties in the model are valid # @return true if the model is valid def valid? true end # Checks equality by comparing each attribute. # @param [Object] Object to be compared def ==(o) return true if self.equal?(o) self.class == o.class && components == o.components end # @see the `==` method # @param [Object] Object to be compared def eql?(o) self == o end # Calculates hash code according to all attributes. # @return [Fixnum] Hash code def hash [components].hash end # Builds the object from hash # @param [Hash] attributes Model attributes in the form of hash # @return [Object] Returns the model itself def build_from_hash(attributes) return nil unless attributes.is_a?(Hash) self.class.swagger_types.each_pair do |key, type| if type =~ /\AArray<(.*)>/i # check to ensure the input is an array given that the attribute # is documented as an array but the input is not if attributes[self.class.attribute_map[key]].is_a?(Array) self.send("#{key}=", attributes[self.class.attribute_map[key]].map { |v| _deserialize($1, v) }) end elsif !attributes[self.class.attribute_map[key]].nil? self.send("#{key}=", _deserialize(type, attributes[self.class.attribute_map[key]])) end # or else data not found in attributes(hash), not an issue as the data can be optional end self end # Deserializes the data based on type # @param string type Data type # @param string value Value to be deserialized # @return [Object] Deserialized data def _deserialize(type, value) case type.to_sym when :DateTime DateTime.parse(value) when :Date Date.parse(value) when :String value.to_s when :Integer value.to_i when :Float value.to_f when :BOOLEAN if value.to_s =~ /\A(true|t|yes|y|1)\z/i true else false end when :Object # generic object (usually a Hash), return directly value when /\AArray<(?<inner_type>.+)>\z/ inner_type = Regexp.last_match[:inner_type] value.map { |v| _deserialize(inner_type, v) } when /\AHash<(?<k_type>.+?), (?<v_type>.+)>\z/ k_type = Regexp.last_match[:k_type] v_type = Regexp.last_match[:v_type] {}.tap do |hash| value.each do |k, v| hash[_deserialize(k_type, k)] = _deserialize(v_type, v) end end else # model temp_model = UltracartClient.const_get(type).new temp_model.build_from_hash(value) end end # Returns the string representation of the object # @return [String] String presentation of the object def to_s to_hash.to_s end # to_body is an alias to to_hash (backward compatibility) # @return [Hash] Returns the object in the form of hash def to_body to_hash end # Returns the object in the form of hash # @return [Hash] Returns the object in the form of hash def to_hash hash = {} self.class.attribute_map.each_pair do |attr, param| value = self.send(attr) next if value.nil? hash[param] = _to_hash(value) end hash end # Outputs non-array value in the form of hash # For object, use to_hash. Otherwise, just return the value # @param [Object] value Any valid value # @return [Hash] Returns the value in the form of hash def _to_hash(value) if value.is_a?(Array) value.compact.map { |v| _to_hash(v) } elsif value.is_a?(Hash) {}.tap do |hash| value.each { |k, v| hash[k] = _to_hash(v) } end elsif value.respond_to? :to_hash value.to_hash else value end end end end
28.292553
107
0.619289
79cd068f821613228cd41540da0036a015cc772f
1,886
module Gobstones::BatchParser class << self def parse(request) test = parse_test(request) options = parse_options(test).merge(parse_settings(request)) examples = parse_examples test, options Gobstones::Batch.new request.content, examples, request.extra, options end private def parse_settings(request) { game_framework: !!request.dig(:settings, 'game_framework') } end def parse_test(request) YAML.load(request.test).deep_symbolize_keys end def parse_examples(test, options) examples = test[:examples] examples.each_with_index.map do |example, index| parse_example options, { id: index, name: example[:title], preconditions: example.slice(*preconditions), postconditions: example.slice(*postconditions) } end end def parse_example(options, example) return example unless options[:subject] return_value = example[:postconditions][:return] if return_value example[:name] = "#{options[:subject]}() -> #{return_value}" unless example[:name] options[:show_final_board] = false end example end def parse_options(test) [ struct(key: :show_initial_board, default: true), struct(key: :show_final_board, default: true), struct(key: :check_head_position, default: false), struct(key: :expect_endless_while, default: false), struct(key: :interactive, default: false), struct(key: :subject, default: nil) ].map { |it| [ it.key, if test[it.key].nil? it.default else test[it.key] end ] }.to_h end def preconditions [:initial_board, :arguments] end def postconditions [:final_board, :error, :return] end end end
25.486486
90
0.615058
e8edb26dbbb17a2ab1fd338e6c83b0c18d8ffb26
960
cask "sensei" do version "1.2.10,60" sha256 "ebc7b3ddcfd49478128d5d135e8ec7998dbbbd6e8c8c6b6c57d8da30eab967d8" # cindori.s3.amazonaws.com/ was verified as official when first introduced to the cask url "https://cindori.s3.amazonaws.com/Sensei.dmg" appcast "https://api.appcenter.ms/v0.1/public/sparkle/apps/51fc066a-f4b4-49ec-b966-b2f476d2eede" name "Sensei" homepage "https://sensei.app/" auto_updates true depends_on macos: ">= :catalina" app "Sensei.app" uninstall delete: "/Library/PrivilegedHelperTools/org.cindori.SenseiTool", launchctl: "org.cindori.SenseiTool" zap trash: [ "~/Library/Application Support/Sensei", "~/Library/Application Support/org.cindori.Sensei", "~/Library/Caches/org.cindori.Sensei", "~/Library/Cookies/org.cindori.Sensei.binarycookies", "~/Library/Preferences/org.cindori.Sensei.plist", "~/Library/Saved Application State/org.cindori.Sensei.savedState", ] end
34.285714
98
0.733333
0137e18b9de72199d61d869a219d8855d383d129
263
module SafeRedirection module Resolvers class Resolver attr_accessor :resolver def initialize(resolver = nil) @resolver = resolver end def recognize_path(*args) raise NotImplementedError end end end end
16.4375
36
0.646388
f843048830f50f09840699477a5f68f0e53ad4d3
978
default[:drbd][:remote_host] = nil default[:drbd][:disk] = nil default[:drbd][:mount] = nil default[:drbd][:fs_type] = "ext3" default[:drbd][:dev] = "/dev/drbd0" default[:drbd][:master] = false default[:drbd][:port] = 7789 default[:drbd][:configured] = false default[:drbd][:pacemaker][:agent] = "ocf:linbit:drbd" default[:drbd][:pacemaker][:params][:drbd_resource] = "r0" default[:drbd][:pacemaker][:op][:monitor][:interval] = "5s" default[:drbd][:pacemaker][:op][:monitor][:role] = "Master" default[:drbd][:pacemaker][:ms][:rsc_name] = "drbd" default[:drbd][:pacemaker][:ms][:meta][:master_max] = "2" default[:drbd][:pacemaker][:ms][:meta][:master_node_max] = "1" default[:drbd][:pacemaker][:ms][:meta][:clone_max] = "2" default[:drbd][:pacemaker][:ms][:meta][:clone_node_max] = "1" default[:drbd][:pacemaker][:ms][:meta][:notify] = "true" default[:drbd][:pacemaker][:ms][:meta][:resource_stickiness] = "100" default[:drbd][:pacemaker][:ms][:meta][:target_role] = "Started"
42.521739
68
0.662577
ac58b67a7ca761b4a91c2c612e5a1c476674a6f7
1,815
class Since < Formula desc "Stateful tail: show changes to files since last check" homepage "http://welz.org.za/projects/since" url "http://welz.org.za/projects/since/since-1.1.tar.gz" sha256 "739b7f161f8a045c1dff184e0fc319417c5e2deb3c7339d323d4065f7a3d0f45" license "GPL-3.0" livecheck do url :homepage regex(/href=.*?since[._-]v?(\d+(?:\.\d+)+)\.t/i) end bottle do sha256 cellar: :any_skip_relocation, arm64_big_sur: "95b9b96522d9cdb0ac317550daf1c9ee102d1a4df7736cd2072d896adf05fc04" sha256 cellar: :any_skip_relocation, big_sur: "60c3738e71c6455fa5a7445a21a79695d4644a34de06cbc05743a52c4f5b40f8" sha256 cellar: :any_skip_relocation, catalina: "20b3f4888282ed47021562eb24efe9c37ef3a652ad64164460a5f368260e75d8" sha256 cellar: :any_skip_relocation, mojave: "6c0290f3500966bb4155352bf277ae127eb341796729dfcc2b9ca968df20b9c4" sha256 cellar: :any_skip_relocation, high_sierra: "a5b4f42858c41ad5d60850a3a01b8658fb4e58d2473fe2d36938f4ab66eb05c6" sha256 cellar: :any_skip_relocation, sierra: "ff4ba4b7cad5fa4211bff04d5868521bc21b60995cf40f15bd507abb7c4cbaab" sha256 cellar: :any_skip_relocation, el_capitan: "ec4898462899cb632329f71dc0b4dd9a13a051aafd6da7dfd22e940e9d1ce01a" sha256 cellar: :any_skip_relocation, yosemite: "e92218f17ac1926f4651b3e70d3fe42d43b7024e1f10d0ab6f1c7c9dd6bad606" sha256 cellar: :any_skip_relocation, x86_64_linux: "606f75f34e3690b1f38dfbe519ac9362e91ef226aaeac62840ff00fc11561de1" # linuxbrew-core end def install bin.mkpath man1.mkpath system "make", "install", "prefix=#{prefix}", "INSTALL=install" end test do (testpath/"test").write <<~EOS foo bar EOS system "#{bin}/since", "-z", "test" assert_predicate testpath/".since", :exist? end end
45.375
139
0.769146
4a2e70f0dfc3d556957033165217278a25cd5c52
184
require "aws_cred_vault/version" require 'aws_cred_vault/account' require 'aws_cred_vault/bastion' require 'aws_cred_vault/user' require 'aws_cred_vault/toml' module AwsCredVault end
20.444444
32
0.842391
395ef8c384c7fa8416fafbd28a38c57fd79a67bc
559
module Refinery module Events class Engine < Rails::Engine extend Refinery::Engine isolate_namespace Refinery::Events engine_name :refinery_events before_inclusion do Refinery::Plugin.register do |plugin| plugin.name = "events" plugin.url = proc { Refinery::Core::Engine.routes.url_helpers.events_admin_events_path } plugin.pathname = root end end config.after_initialize do Refinery.register_extension(Refinery::Events) end end end end
23.291667
98
0.645796
1cecd0f2ef271b5acd9228c59d2105915a25a1e9
14,978
# # A class for creating and manipulating RGB color hashes by name or index. # # A default color set is defined in the class, other sets are from included modules. # The first two keys of a color set hash are the palette name and count, and are # followed by the color names and RGB array values, for example: # SET_NAME = { palette_name: 'Display Name', palette_count: 32, # 'color_name' => [r, g, b], # 'color_index' => [r, g, b] } # The palette_name is used to look up the color hash constant. The name will be # converted to screaming snake case by splitting the name at spaces (no regex), so # a display name of 'This is a test' needs to use the constant THIS_IS_A_TEST. # # Note that the indexing methods don't count these keys when using named # color sets (e.g. the first actual color is 0). # # Color.new will look up the color set hash if a name is provided, or a color set # hash can also be used. Color sets must have the palette keys listed above. # # The class RGB by color or index methods will also accept plain color hashes # (without palette keys). Care should be taken with key names for indexed sets # to prevent overwriting and preserve order if merging with other sets. A # percentage can also be used for the index, and indexes will wrap. # # Values are 0-255 integers instead of 0.0 - 1.0 floats to match existing # web documentation, tools, and DragonRuby GTK, although there are a few # conversion methods to work with hexadecimals and floats. # # Bang methods change the color object, otherwise an RGB array is returned. # # Looking up a color name from the RGB value can get expensive, so it is not # normally used. The name ivar is set from the parameters when creating the # color, and is reset to 'unknown' by the bang methods. The color name can # be looked up as desired with the name_from_rgb method. # # Class Methods: # Colors.color_set(set_name = 'Basics') # Colors.color_group(name_array = nil, names_only = false) # Colors.base_color_set(set_name = 'Basics') # Colors.merged_set # Colors.rgb_by_name(color_name = 'gray', set = 'Basics') # Colors.name_group(name_array = [], set = 'Basics') # Colors.rgb_by_index(color_index = 0, set = 'Basics') # Colors.index_group(index_array = [], set = 'Basics') # Colors.name_from_rgb(rgb_array, set = nil) # Colors.rgb_from_hex(hex_string) # Colors.hex_from_rgb(rgb_array) # Colors.float_from_rgba(int_rgba) # Colors.rgba_from_float(float_rgba) # Colors.new(color = 'gray', set = 'Basics', transparency = 255) # # Instance methods: # c.set_color(color = 'gray', set = 'Basics', transparency = nil) # c.alpha=(transparency) # c.to_h # c.opposite # c.opposite! # c.contrast # c.contrast! # c.lshift # c.lshift! # c.rshift # c.rshift! # c.tint(amount = 0.0) # c.tint!(amount = 0.0) # # Instance variables: # @set - the color set name, or the default if not found # @name - the color or index name, or 'unknown' if it doesn't exist # @rgb - an array of [red, green, blue] components (0 - 255) # @alpha - the transparency (0 - 255) # @rgba - an array of [red, green, blue, alpha] components # @hex - the hexadecimal equivalent for the rgb array (0080FF, etc) # # class Colors include HTMLColors # Basic colors and inverses (23) - color names are lower case. # Provides a default set if other color sets have not been included. BASICS = { palette_name: 'Basics', palette_count: 23, 'red' => [255, 0, 0], #FF0000 'aqua' => [0, 255, 255], #00FFFF 'lime' => [0, 255, 0], #00FF00 'magenta' => [255, 0, 255], #FF00FF 'blue' => [0, 0, 255], #0000FF 'yellow' => [255, 255, 0], #FFFF00 'lighterred' => [255, 128, 128], #FF8080 'teal' => [0, 128, 128], #008080 'lightergreen' => [128, 255, 128], #80FF80 'purple' => [128, 0, 128], #800080 'lighterblue' => [128, 128, 255], #8080FF 'olive' => [128, 128, 0], #808000 'maroon' => [128, 0, 0], #800000 'lightercyan' => [128, 255, 255], #80FFFF 'green' => [0, 128, 0], #008000 'lightermagenta' => [255, 128, 255], #FF80FF 'navy' => [0, 0, 128], #000080 'lighteryellow' => [255, 255, 128], #FFFF80 'black' => [0, 0, 0], #000000 'white' => [255, 255, 255], #FFFFFF 'gray' => [128, 128, 128], #808080 'silver' => [192, 192, 192], #C0C0C0 'darkergray' => [64, 64, 64] } #404040 ################################################## # ――― Attributes ――― ################################################## attr_reader :set, :name, :rgb, :alpha, :rgba, :hex ################################################## # ――― Class Methods ――― ################################################## class << self def color_set(set_name = 'Basics') # Return the hash for the specified color set name. return BASICS if set_name.to_s == '' || set_name.nil? symbol = set_name.to_s.trim.split.join('_').upcase.to_sym return merged_set if symbol == :MERGED_SET self.const_defined?(symbol) ? self.const_get(symbol) : BASICS end def color_group(name_array = nil, names_only = false) # Uses color_set to get an array of hashes for multiple color set names. # Returns all included color sets if the name_array parameter is nil. # An array of palette names is returned if the names_only flag is trueish. sets = name_array.nil? ? Colors.constants : Array(name_array) sets = [BASICS] if sets.empty? sets.each_with_object([]) do |name, output| output << (names_only ? color_set(name)[:palette_name] : color_set(name)) end end def base_color_set(set_name = 'Basics') # Return the hash for the specified color set name, less any palette keys. color_set(set_name).reject { |key, _| key.to_s.include?('palette_') } end def merged_set # Return a merged hash of all included color sets. # Individual palette keys are merged into a single entry. # Take care with indexed color sets to keep desired wrapping, indexes, etc. set = color_group.reduce(&:merge).reject { |key, _| key.to_s.include?('palette_') } { palette_name: 'Merged Set', palette_count: set.length }.merge(set) end def rgb_by_name(color_name = 'gray', set = 'Basics') # Get RGB values for a color name from a color set name or hash. # A gray color is used if the name is not specified or found. # Returns an [r, g, b] array. set = (set.class == Hash) ? set : color_set(set) color_name = set.keys.sample if color_name.to_s == 'random' rgb = set[color_name.to_s] rgb.nil? ? [128, 128, 128] : rgb end def name_group(name_array = [], set = 'Basics') # Uses rgb_by_name to get an array of RGB values for multiple color names. # Returns an array of [r, g, b] arrays. name_array.each_with_object([]) do |item, group| group << rgb_by_name(item, set) end end def rgb_by_index(color_index = 0, set = 'Basics') # Get RGB values by indexing into a color set name or hash. # Useful for circular sets or sets that use nameless colors. # Indexes wrap to the number of colors and skip the palette keys. # Returns an [r, g, b] array. set = (set.class == Hash) ? set : color_set(set) count = set.length - 2 # don't count palette keys color_index = color_index % count # wrap color_index = (color_index < 0) ? color_index + count : color_index rgb = set[set.keys[color_index + 2]] end def index_group(index_array = [], set = 'Basics') # Uses rgb_by_index to get an array of RGB values for multiple color indexes. # Returns an array of [r, g, b] arrays. index_array.each_with_object([]) do |item, group| group << rgb_by_index(item, set) end end def name_from_rgb(rgb_array, set = nil) # Look up a color name for an RGB array from a color set name or hash. # The merged set is used unless a color set is specified. # This can get expensive, so it is not normally used when manipulating RGB values. # Returns 'unknown' if the RGB array is not found. set = merged_set if set.nil? name = (set.class == Hash) ? set.key(rgb_array) : color_set(set).key(rgb_array) name.nil? ? 'unknown' : name end def rgb_from_hex(hex_string) # Get RGB for a hexadecimal string (0080FF, etc). # Returns an [r, g, b] array of integer (0 - 255) values. hex_string += '000000' # pad for incorrect length hex_string[0] = '' if hex_string.start_with?('#') [ hex_string[0..1].hex, hex_string[2..3].hex, hex_string[4..5].hex ] end def hex_from_rgb(rgb_array) # Get hex for an [r, g, b] array of integer (0 - 255) values. # Returns a hexadecimal string (0080FF, etc). rgb = rgb_array.map(&:abs) format('%02X%02X%02X', rgb[0], rgb[1], rgb[2]) end def float_from_rgba(int_rgba) # Convert an integer rgba array (0 - 255) to float values (0.0 - 1.0). # Returns the original item if it doesn't contain integers. rgba = int_rgba.map do |item| return int_rgba unless item.class == Fixnum item = (Float(item) / 255).round(2).clamp(0.0, 1.0) end end def rgba_from_float(float_rgba) # Convert a float rgba array (0.0 - 1.0) to integer values (0 - 255). # Returns the original item if it doesn't contain floats. rgba = float_rgba.map do |item| return float_rgba unless item.class == Float item = Integer((item * 255) + 0.5).clamp(0, 255) end end end ################################################## # ――― Initialization ――― ################################################## def initialize(color_name = 'gray', set = 'Basics', transparency = 255) # Set up a new color object with the specified name, color set, and transparency. # Using nil or the default for the transparency sets it to 255 (opaque). transparency = 255 if transparency.nil? set_color(color_name, set, transparency) # continue setup end ################################################## # ――― Instance Methods ――― ################################################## def set_color(color = 'gray', set = 'Basics', transparency = nil) # Set color attributes for a color name or index from a color set name or hash. # A nil transparency keeps the current setting. # Returns the new color - a gray color is used if the name or index is not found. set = (set.class == Hash) ? set : Colors.color_set(set) if color.class == Fixnum # index count = set.length - 2 # don't count palette keys index = color % count # wrap index = (index < 0) ? index + count : index color = set.keys[index + 2] @rgb = set[color] else @rgb = Colors.rgb_by_name(color, set) end @set = set[:palette_name] @alpha = transparency unless transparency.nil? @name = @rgb.nil? ? 'gray' : color # 'random' color name is not looked up update end def alpha=(transparency) # Set the color's transparency from an integer (0 - 255) or percentage (0.0 - 1.0). # Returns the alpha value (0 - 255). @alpha = transparency @alpha *= 255 if transparency.class == Float && transparency <= 1.0 @alpha = @alpha.round.to_i.clamp(0, 255) @rgba = @rgb + [Integer(@alpha)] @alpha end def to_h # Return the color's RGBA array as a hash. [ [:r, rgba[0]], [:g, rgba[1]], [:b, rgba[2]], [:a, rgba[3]] ].to_h end def opposite # Get complementary RGB values for the current color. # Returns an [r, g, b] array. [ 255 - @rgb[0], 255 - @rgb[1], 255 - @rgb[2] ] end def oposite! # Same as opposite, but changes and returns the new color. @rgb = opposite @name = 'unknown' update end def contrast # Get contrasting black or white RGB values for the current color. # Returns an [r, g, b] array. sum = @rgb.inject(0){|sum, x| sum + x } (sum / 3) < 128 ? [255, 255, 255] : [0, 0, 0] end def contrast! # Same as contrast, but changes and returns the new color. @rgb = contrast @name = 'unknown' update end def lshift # Shift the color's RGB array values left (b <- r, r <- g, g <- b). # Returns an [r, g, b] array. [ @rgb[1], @rgb[2], @rgb[0] ] end def lshift! # Same as lshift, but changes and returns the new color. @rgb = lshift @name = 'unknown' update end def rshift # Shift the color's RGB array values right (r -> g, g -> b, b -> r). # Returns an [r, g, b] array. [ @rgb[2], @rgb[0], @rgb[1] ] end def rshift! # Same as rshift, but changes and returns the new color. @rgb = rshift @name = 'unknown' update end def tint(amount = 0.0) # Add tint (toward white) or subtract shade (toward black) to RGB array values. # Amount can be +/- integer (0 - 255) or percentage (0.0 - 1.0). # Returns an [r, g, b] array. @rgb.each_with_object([]) do |component, result| if amount.class == Float && amount.abs <= 1.0 if amount < 0 value = Integer(component * (1.0 + amount) + 0.5) else value = Integer(component + ((255 - component) * amount) + 0.5) end else value = component + Integer(amount) end result << value.clamp(0, 255) end end def tint!(amount = 0.0) # Same as tint, but changes and returns the new color. @rgb = tint(amount) @name = 'unknown' update end ################################################## # ――― Private / Protected Methods ――― ################################################## private def update # Update color attributes from the RGB value. # Looking up the name is expensive, so it isn't automatically used. # Returns the new color. @rgba = @rgb + [@alpha] @hex = Colors.hex_from_rgb(@rgb) self end end
35.918465
92
0.569435
1d284e3e26e32d32fab40f2f4cceadb81b625322
15,512
require 'spec_helper' describe Mongo::Collection::View::Writable do let(:selector) do {} end let(:options) do {} end let(:view) do Mongo::Collection::View.new(authorized_collection, selector, options) end after do authorized_collection.delete_many end describe '#find_one_and_delete' do before do authorized_collection.insert_many([{ field: 'test1' }]) end context 'when a matching document is found' do let(:selector) do { field: 'test1' } end context 'when no options are provided' do let!(:document) do view.find_one_and_delete end it 'deletes the document from the database' do expect(view.to_a).to be_empty end it 'returns the document' do expect(document['field']).to eq('test1') end end context 'when a projection is provided' do let!(:document) do view.projection(_id: 1).find_one_and_delete end it 'deletes the document from the database' do expect(view.to_a).to be_empty end it 'returns the document with limited fields' do expect(document['field']).to be_nil expect(document['_id']).to_not be_nil end end context 'when a sort is provided' do let!(:document) do view.sort(field: 1).find_one_and_delete end it 'deletes the document from the database' do expect(view.to_a).to be_empty end it 'returns the document with limited fields' do expect(document['field']).to eq('test1') end end end context 'when no matching document is found' do let(:selector) do { field: 'test5' } end let!(:document) do view.find_one_and_delete end it 'returns nil' do expect(document).to be_nil end end end describe '#find_one_and_replace' do before do authorized_collection.insert_many([{ field: 'test1', other: 'sth' }]) end context 'when a matching document is found' do let(:selector) do { field: 'test1' } end context 'when no options are provided' do let(:document) do view.find_one_and_replace({ field: 'testing' }) end it 'returns the original document' do expect(document['field']).to eq('test1') end end context 'when return_document options are provided' do let(:document) do view.find_one_and_replace({ field: 'testing' }, :return_document => :after) end it 'returns the new document' do expect(document['field']).to eq('testing') end it 'replaces the document' do expect(document['other']).to be_nil end end context 'when a projection is provided' do let(:document) do view.projection(_id: 1).find_one_and_replace({ field: 'testing' }) end it 'returns the document with limited fields' do expect(document['field']).to be_nil expect(document['_id']).to_not be_nil end end context 'when a sort is provided' do let(:document) do view.sort(field: 1).find_one_and_replace({ field: 'testing' }) end it 'returns the original document' do expect(document['field']).to eq('test1') end end end context 'when no matching document is found' do context 'when no upsert options are provided' do let(:selector) do { field: 'test5' } end let(:document) do view.find_one_and_replace({ field: 'testing' }) end it 'returns nil' do expect(document).to be_nil end end context 'when upsert options are provided' do let(:selector) do { field: 'test5' } end let(:document) do view.find_one_and_replace({ field: 'testing' }, :upsert => true, :return_document => :after) end it 'returns the new document' do expect(document['field']).to eq('testing') end end end end describe '#find_one_and_update' do before do authorized_collection.insert_many([{ field: 'test1' }]) end context 'when a matching document is found' do let(:selector) do { field: 'test1' } end context 'when no options are provided' do let(:document) do view.find_one_and_update({ '$set' => { field: 'testing' }}) end it 'returns the original document' do expect(document['field']).to eq('test1') end end context 'when return_document options are provided' do let(:document) do view.find_one_and_update({ '$set' => { field: 'testing' }}, :return_document => :after) end it 'returns the new document' do expect(document['field']).to eq('testing') end end context 'when a projection is provided' do let(:document) do view.projection(_id: 1).find_one_and_update({ '$set' => { field: 'testing' }}) end it 'returns the document with limited fields' do expect(document['field']).to be_nil expect(document['_id']).to_not be_nil end end context 'when a sort is provided' do let(:document) do view.sort(field: 1).find_one_and_update({ '$set' => { field: 'testing' }}) end it 'returns the original document' do expect(document['field']).to eq('test1') end end end context 'when no matching document is found' do let(:selector) do { field: 'test5' } end let(:document) do view.find_one_and_update({ '$set' => { field: 'testing' }}) end it 'returns nil' do expect(document).to be_nil end end end describe '#delete_many' do context 'when a selector was provided' do let(:selector) do { field: 'test1' } end before do authorized_collection.insert_many([{ field: 'test1' }, { field: 'test2' }]) end let(:response) do view.delete_many end it 'deletes the matching documents in the collection' do expect(response.written_count).to eq(1) end end context 'when no selector was provided' do before do authorized_collection.insert_many([{ field: 'test1' }, { field: 'test2' }]) end let(:response) do view.delete_many end it 'deletes all the documents in the collection' do expect(response.written_count).to eq(2) end end end describe '#delete_one' do context 'when a selector was provided' do let(:selector) do { field: 'test1' } end before do authorized_collection.insert_many([ { field: 'test1' }, { field: 'test1' }, { field: 'test1' } ]) end let(:response) do view.delete_one end it 'deletes the first matching document in the collection' do expect(response.written_count).to eq(1) end end context 'when no selector was provided' do before do authorized_collection.insert_many([{ field: 'test1' }, { field: 'test2' }]) end let(:response) do view.delete_one end it 'deletes the first document in the collection' do expect(response.written_count).to eq(1) end end end describe '#replace_one' do context 'when a selector was provided' do let(:selector) do { field: 'test1' } end before do authorized_collection.insert_many([{ field: 'test1' }, { field: 'test1' }]) end let!(:response) do view.replace_one({ field: 'testing' }) end let(:updated) do authorized_collection.find(field: 'testing').first end it 'updates the first matching document in the collection' do expect(response.written_count).to eq(1) end it 'updates the documents in the collection' do expect(updated[:field]).to eq('testing') end end context 'when no selector was provided' do before do authorized_collection.insert_many([{ field: 'test1' }, { field: 'test2' }]) end let!(:response) do view.replace_one({ field: 'testing' }) end let(:updated) do authorized_collection.find(field: 'testing').first end it 'updates the first document in the collection' do expect(response.written_count).to eq(1) end it 'updates the documents in the collection' do expect(updated[:field]).to eq('testing') end end context 'when upsert is false' do let!(:response) do view.replace_one({ field: 'test1' }, upsert: false) end let(:updated) do authorized_collection.find(field: 'test1').to_a end it 'reports that no documents were written' do expect(response.written_count).to eq(0) end it 'does not insert the document' do expect(updated).to be_empty end end context 'when upsert is true' do let!(:response) do view.replace_one({ field: 'test1' }, upsert: true) end let(:updated) do authorized_collection.find(field: 'test1').first end it 'reports that a document was written' do expect(response.written_count).to eq(1) end it 'inserts the document' do expect(updated[:field]).to eq('test1') end end context 'when upsert is not specified' do let!(:response) do view.replace_one({ field: 'test1' }) end let(:updated) do authorized_collection.find(field: 'test1').to_a end it 'reports that no documents were written' do expect(response.written_count).to eq(0) end it 'does not insert the document' do expect(updated).to be_empty end end end describe '#update_many' do context 'when a selector was provided' do let(:selector) do { field: 'test' } end before do authorized_collection.insert_many([{ field: 'test' }, { field: 'test' }]) end let!(:response) do view.update_many('$set'=> { field: 'testing' }) end let(:updated) do authorized_collection.find(field: 'testing').first end it 'returns the number updated' do expect(response.written_count).to eq(2) end it 'updates the documents in the collection' do expect(updated[:field]).to eq('testing') end end context 'when no selector was provided' do before do authorized_collection.insert_many([{ field: 'test1' }, { field: 'test2' }]) end let!(:response) do view.update_many('$set'=> { field: 'testing' }) end let(:updated) do authorized_collection.find end it 'returns the number updated' do expect(response.written_count).to eq(2) end it 'updates all the documents in the collection' do updated.each do |doc| expect(doc[:field]).to eq('testing') end end end context 'when upsert is false' do let(:response) do view.update_many({ '$set'=> { field: 'testing' } }, upsert: false) end let(:updated) do authorized_collection.find.to_a end it 'reports that no documents were updated' do expect(response.written_count).to eq(0) end it 'updates no documents in the collection' do expect(updated).to be_empty end end context 'when upsert is true' do let!(:response) do view.update_many({ '$set'=> { field: 'testing' } }, upsert: true) end let(:updated) do authorized_collection.find.first end it 'reports that a document was written' do expect(response.written_count).to eq(1) end it 'inserts a document into the collection' do expect(updated[:field]).to eq('testing') end end context 'when upsert is not specified' do let(:response) do view.update_many({ '$set'=> { field: 'testing' } }) end let(:updated) do authorized_collection.find.to_a end it 'reports that no documents were updated' do expect(response.written_count).to eq(0) end it 'updates no documents in the collection' do expect(updated).to be_empty end end end describe '#update_one' do context 'when a selector was provided' do let(:selector) do { field: 'test1' } end before do authorized_collection.insert_many([{ field: 'test1' }, { field: 'test1' }]) end let!(:response) do view.update_one('$set'=> { field: 'testing' }) end let(:updated) do authorized_collection.find(field: 'testing').first end it 'updates the first matching document in the collection' do expect(response.written_count).to eq(1) end it 'updates the documents in the collection' do expect(updated[:field]).to eq('testing') end end context 'when no selector was provided' do before do authorized_collection.insert_many([{ field: 'test1' }, { field: 'test2' }]) end let!(:response) do view.update_one('$set'=> { field: 'testing' }) end let(:updated) do authorized_collection.find(field: 'testing').first end it 'updates the first document in the collection' do expect(response.written_count).to eq(1) end it 'updates the documents in the collection' do expect(updated[:field]).to eq('testing') end end context 'when upsert is false' do let(:response) do view.update_one({ '$set'=> { field: 'testing' } }, upsert: false) end let(:updated) do authorized_collection.find.to_a end it 'reports that no documents were updated' do expect(response.written_count).to eq(0) end it 'updates no documents in the collection' do expect(updated).to be_empty end end context 'when upsert is true' do let!(:response) do view.update_one({ '$set'=> { field: 'testing' } }, upsert: true) end let(:updated) do authorized_collection.find.first end it 'reports that a document was written' do expect(response.written_count).to eq(1) end it 'inserts a document into the collection' do expect(updated[:field]).to eq('testing') end end context 'when upsert is not specified' do let(:response) do view.update_one({ '$set'=> { field: 'testing' } }) end let(:updated) do authorized_collection.find.to_a end it 'reports that no documents were updated' do expect(response.written_count).to eq(0) end it 'updates no documents in the collection' do expect(updated).to be_empty end end end end
22.811765
102
0.579229
3869c036f3cf36d035765ae06b7b009329b94873
1,473
require 'logger' require 'json' require 'csv' require './page_saver.rb' require './config_loader.rb' ############## Initialize conf = ConfigLoader.load('tennis.conf') JSON_OUTDIR = conf::JSON_OUT_DIR || 'file/json' CSV_OUTDIR = conf::CSV_OUT_DIR || 'file/csv' logdir = conf::LOG_DIR || 'log' #$log = Logger.new("#{logdir}/#{File.basename(__FILE__)}.log",'daily') $log = Logger.new(STDOUT) #$log.level = conf::LOG_LEVEL $log.level = Logger::DEBUG $log.info File.basename(__FILE__)+' start' ############# page convert html to json $log.info "converting result table to json" id=1035 FileUtils.mkdir_p(CSV_OUTDIR) unless Dir.exist?(CSV_OUTDIR) outfile_full="#{CSV_OUTDIR}/player_result_#{id}.csv" CSV.open(outfile_full,"w") do |csv| csv << ["year","tournament_seq","round","result"] Dir.glob("#{JSON_OUTDIR}/*_#{id}_*.json").each do |infile| result = File.open(infile) {|f| JSON.load(f) } # $log.debug {result} # $log.debug {"#{result["year"]}"} # $log.debug {"#{result["tournaments"]}"} result["tournaments"].each_with_index do |t,i| # $log.debug {"#{result['year']},#{i+1},#{t['name']}"} t["matches"].select {|m| m['result'] !~ /(-|BYE)/ }.each do |m| $log.debug {"#{result['year']},#{i+1},#{m['round']},#{m['result']}"} csv << [result['year'],i+1,m['round'],m['result']] end end $log.info {"converted successful #{infile} to #{outfile_full}"} # break end end $log.info File.basename(__FILE__)+' end'
32.021739
76
0.623897
013bd4d25ef17a70aae330962ec9f28c8f926dc0
219
class CreateFrequentlyAskedQuestions < ActiveRecord::Migration[5.2] def change create_table :frequently_asked_questions do |t| t.string :question t.string :answer t.timestamps end end end
19.909091
67
0.712329
bf237e04c14ceb3ce0226a51a9251aaed6453baf
16,239
# == Schema Information # # Table name: external_users # # id :integer not null, primary key # created_at :datetime # updated_at :datetime # supplier_number :string # uuid :uuid # vat_registered :boolean default(TRUE) # provider_id :integer # roles :string # deleted_at :datetime # require 'rails_helper' require 'support/shared_examples_for_claim_types' RSpec.describe ExternalUser, type: :model do it_behaves_like 'roles', ExternalUser, ExternalUser::ROLES it { should belong_to(:provider) } it { should have_many(:claims) } it { should have_many(:claims_created) } it { should have_many(:documents) } it { should have_one(:user) } it { should validate_presence_of(:provider) } it { should validate_presence_of(:user) } it { should accept_nested_attributes_for(:user) } it { should delegate_method(:email).to(:user) } it { should delegate_method(:first_name).to(:user) } it { should delegate_method(:last_name).to(:user) } it { should delegate_method(:name).to(:user) } it { should delegate_method(:agfs?).to(:provider) } it { should delegate_method(:lgfs?).to(:provider) } context 'supplier number validation' do context 'when no Provider present' do context 'for advocate' do before { subject.roles = ['advocate'] } it 'is valid' do a = build :external_user, :advocate expect(a).to be_valid end end context 'for admin' do before { subject.roles = ['admin'] } it 'is valid' do a = build :external_user, :admin expect(a).to be_valid end end end context 'when Provider present and Provider is a "firm"' do let!(:provider) { create(:provider, :agfs_lgfs, firm_agfs_supplier_number: 'ZZ123') } before do subject.provider = provider end it { should_not validate_presence_of(:supplier_number) } context 'for advocate' do before { subject.roles = ['advocate'] } it 'is valid without a supplier number' do a = build :external_user, :advocate, provider: provider, supplier_number: nil expect(a).to be_valid end end context 'for admin' do before { subject.roles = ['admin'] } it { should_not validate_presence_of(:supplier_number) } it 'is valid without a supplier number' do a = build :external_user, :admin, provider: provider, supplier_number: nil expect(a).to be_valid end end end context 'when provider present and Provider is a "chamber"' do let(:provider) { create(:provider, provider_type: 'chamber', firm_agfs_supplier_number: '') } before do subject.provider = provider end context 'for advocate' do before { subject.roles = ['advocate'] } let(:format_error) { ['Enter a valid supplier number'] } it { should validate_presence_of(:supplier_number) } it 'is not valid without a supplier number' do a = build :external_user, provider: provider, supplier_number: nil expect(a).not_to be_valid end it 'fails validation if too long' do a = build :external_user, supplier_number: 'ACC123', provider: provider expect(a).not_to be_valid expect(a.errors[:supplier_number]).to eq(format_error) end it 'fails validation if too short' do a = build :external_user, supplier_number: 'AC12', provider: provider expect(a).not_to be_valid expect(a.errors[:supplier_number]).to eq(format_error) end it 'fails validation if not alpha-numeric' do a = build :external_user, supplier_number: 'AC-12', provider: provider expect(a).not_to be_valid expect(a.errors[:supplier_number]).to eq(format_error) end it 'passes validation if 5 alpha-numeric' do a = build :external_user, supplier_number: 'AC123', provider: provider expect(a).to be_valid end end context 'for admin' do before { subject.roles = ['admin'] } it { should_not validate_presence_of(:supplier_number) } it 'is valid without a supplier number' do a = build :external_user, :admin, provider: provider, supplier_number: nil expect(a).to be_valid end end end end describe '#name' do subject { create(:external_user) } it 'returns the first and last names' do expect(subject.name).to eq("#{subject.first_name} #{subject.last_name}") end end describe 'ROLES' do it 'has "admin" and "advocate" and "litigator"' do expect(ExternalUser::ROLES).to match_array(%w(admin advocate litigator)) end end describe '.admins' do before do create(:external_user, :admin) create(:external_user, :advocate) end it 'only returns external_users with role "admin"' do expect(ExternalUser.admins.count).to eq(1) end it 'returns external_users with role "admin" and "advocate"' do e = ExternalUser.first e.roles = ['admin', 'advocate'] e.supplier_number = 'ZA111' e.save! expect(ExternalUser.admins.count).to eq(1) end end describe '.advocates' do before do create(:external_user, :admin) create(:external_user, :admin) create(:external_user) end it 'only returns external_users with role "advocate"' do expect(ExternalUser.advocates.count).to eq(1) end it 'returns external_users with role "admin" and "advocate"' do e = ExternalUser.last e.roles = ['admin', 'advocate'] e.save! expect(ExternalUser.advocates.count).to eq(1) end end describe 'roles' do let(:admin) { create(:external_user, :admin) } let(:advocate) { create(:external_user, :advocate) } describe '#is?' do context 'given advocate' do context 'if advocate' do it 'returns true' do expect(advocate.is? :advocate).to eq(true) end end context 'for an admin' do it 'returns false' do expect(admin.is? :advocate).to eq(false) end end end context 'given admin' do context 'for an admin' do it 'returns true' do expect(admin.is? :admin).to eq(true) end end context 'for a advocate' do it 'returns false' do expect(advocate.is? :admin).to eq(false) end end end end describe '#advocate?' do context 'for an advocate' do it 'returns true' do expect(advocate.advocate?).to eq(true) end end context 'for an admin' do it 'returns false' do expect(admin.advocate?).to eq(false) end end end describe '#admin?' do context 'for an admin' do it 'returns true' do expect(admin.admin?).to eq(true) end end context 'for a advocate' do it 'returns false' do expect(advocate.admin?).to eq(false) end end end end describe '#available_claim_types' do subject { user.available_claim_types.map(&:to_s) } include_context 'claim-types object helpers' context 'for users with only an advocate role' do let(:user) { build(:external_user, :advocate) } it { is_expected.to match_array(agfs_claim_object_types) } end context 'for users with only a litigator role' do let(:user) { build(:external_user, :litigator) } it { is_expected.to match_array(lgfs_claim_object_types) } end context 'for users with an admin role' do let(:user) { build(:external_user, :admin, provider: build(:provider, :agfs)) } # TODO: i believe this is flawed as an admin should delegate available claim types to the provider) # e.g. an admin in an agfs only provider can only create advocate claims it { is_expected.to match_array(all_claim_object_types) } end context 'for users with both an advocate and litigator role in provider with both agfs and lgfs role' do let(:user) { build(:external_user, :advocate_litigator) } it { is_expected.to match_array(all_claim_object_types) } end end describe '#available_roles' do subject { user.available_roles } let(:user) { create(:external_user, :advocate, provider: provider) } # NOTE: there is provider cannot be blank validation - pointless test? context 'when the user does not belong to a provider' do let(:provider) { build(:provider) } before { user.provider = nil } it 'returns admin' do is_expected.to match_array %w[admin] end end context 'when the user belongs to a provider that' do context 'handles both AGFS and LGFS claims' do let(:provider) { build(:provider, :agfs_lgfs) } it { is_expected.to match_array %w[admin advocate litigator] } end context 'handles only AGFS claims' do let(:provider) { build(:provider, :agfs) } it { is_expected.to match_array %w[admin advocate] } end context 'handles only LGFS claims' do let(:provider) { build(:provider, :lgfs) } it { is_expected.to match_array %w[admin litigator] } end end context 'when an invalid role supplied' do let(:provider) { build(:provider) } before { user.provider.roles = %w[invalid_role] } it 'raises an error' do expect { user.available_roles }.to raise_error(RuntimeError) end end end describe '#name_and_number' do it 'returns last name, first name and supplier number' do a = create(:external_user, supplier_number: 'XX878', user: create(:user, last_name: 'Smith', first_name: 'John')) expect(a.name_and_number).to eq 'Smith, John (XX878)' end end context 'soft deletions' do before(:all) do @live_user_1 = create :external_user @live_user_2 = create :external_user @dead_user_1 = create :external_user, :softly_deleted @dead_user_2 = create :external_user, :softly_deleted end after(:all) { clean_database } describe 'active scope' do it 'only returns undeleted records' do expect(ExternalUser.active.order(:id)).to eq([@live_user_1, @live_user_2]) end it 'returns ActiveRecord::RecordNotFound if find by id relates to a deleted record' do expect { ExternalUser.active.find(@dead_user_1.id) }.to raise_error ActiveRecord::RecordNotFound, %Q{Couldn't find ExternalUser with 'id'=#{@dead_user_1.id} [WHERE "external_users"."deleted_at" IS NULL]} end it 'returns an empty array if the selection criteria only reference deleted records' do expect(ExternalUser.active.where(id: [@dead_user_1.id, @dead_user_2.id])).to be_empty end end describe 'deleted scope' do it 'returns only deleted records' do expect(ExternalUser.softly_deleted.order(:id)).to eq([@dead_user_1, @dead_user_2]) end it 'returns ActiveRecord::RecordNotFound if find by id relates to an undeleted record' do expect(ExternalUser.find(@live_user_1.id)).to eq(@live_user_1) expect { ExternalUser.softly_deleted.find(@live_user_1.id) }.to raise_error ActiveRecord::RecordNotFound, /Couldn't find ExternalUser with 'id'=#{@live_user_1.id}/ end it 'returns an empty array if the selection criteria only reference live records' do expect(User.softly_deleted.where(id: [@live_user_1.id, @live_user_2.id])).to be_empty end end describe 'default scope' do it 'returns deleted and undeleted records' do expect(ExternalUser.order(:id)).to eq([@live_user_1, @live_user_2, @dead_user_1, @dead_user_2]) end it 'returns the record if find by id relates to a deleted record' do expect(ExternalUser.find(@dead_user_1.id)).to eq @dead_user_1 end it 'returns the deleted records if the selection criteria reference only deleted records' do expect(ExternalUser.where(id: [@dead_user_1.id, @dead_user_2.id]).order(:id)).to eq([@dead_user_1, @dead_user_2]) end end end describe 'soft_delete' do it 'sets deleted at on the caseworker and user records' do eu = create :external_user user = eu.user eu.soft_delete expect(eu.reload.deleted_at).not_to be_nil expect(user.reload.deleted_at).not_to be_nil end end describe '#active?' do it 'returns false for deleted records' do eu = build :external_user, :softly_deleted expect(eu.active?).to be false end it 'returns true for active records' do eu = build :external_user expect(eu.active?).to be true end end describe 'supplier_number' do context 'supplier number present' do let(:external_user) { create :external_user, :advocate, supplier_number: 'ZZ114' } it 'returns the supplier number from the external user record' do expect(external_user.supplier_number).to eq 'ZZ114' end end context 'supplier number not present but provider is a firm' do let(:provider) { create :provider, :agfs_lgfs, firm_agfs_supplier_number: '999XX' } let(:external_user) { create :external_user, :advocate, supplier_number: nil, provider: provider } it 'returns the firm_agfs_supplier_number from the provider' do expect(external_user.supplier_number).to eq '999XX' end end end context 'email notification of messages preferences' do context 'settings on user record are nil' do let(:eu) { build :external_user } it 'has an underlying user setting of nil' do expect(eu.user.settings).to eq Hash.new end it 'returns false' do expect(eu.send_email_notification_of_message?).to be false end it 'sets the setting to true' do eu.email_notification_of_message = 'true' expect(eu.send_email_notification_of_message?).to be true end it 'sets the setting to false' do eu.email_notification_of_message = 'false' expect(eu.send_email_notification_of_message?).to be false end end context 'no setttings for email notifications present' do let(:eu) { build :external_user, :with_settings } it 'returns false' do expect(eu.settings).to eq({ 'setting1' => 'test1', 'setting2' => 'test2' }) expect(eu.send_email_notification_of_message?).to be false end it 'sets the setting to true' do eu.email_notification_of_message = 'true' expect(eu.send_email_notification_of_message?).to be true end it 'sets the setting to false' do eu.email_notification_of_message = 'false' expect(eu.send_email_notification_of_message?).to be false end end context 'settings for email notification are true' do let(:eu) { build :external_user, :with_email_notification_of_messages } it 'returns true' do expect(eu.send_email_notification_of_message?).to be true end it 'sets the setting to false' do eu.email_notification_of_message = 'false' expect(eu.send_email_notification_of_message?).to be false end end context 'settings for email notification are false' do let(:eu) { build :external_user, :without_email_notification_of_messages } it 'returns false' do expect(eu.send_email_notification_of_message?).to be false end it 'sets the setting to true' do eu.email_notification_of_message = 'true' expect(eu.send_email_notification_of_message?).to be true end end end end def create_admin(provider, first_name, last_name) create :external_user, :admin, provider: provider, user: create(:user, first_name: first_name, last_name: last_name) end def create_external_user(provider, first_name, last_name) create :external_user, provider: provider, user: create(:user, first_name: first_name, last_name: last_name) end
31.716797
160
0.650533
e81548de5abcdfcf1383f3c432f6401e1f80d42c
211
# frozen_string_literal: true class Datadog::MonitorsController < ApplicationController def index @stage = Stage.find(params.fetch(:id)) render "samson_datadog/_monitor_list", layout: false end end
23.444444
57
0.763033
1acf5d5b7b3040b8f5946bb17badf4898cfdada6
480
class Rack::Tracker::FacebookPixel < Rack::Tracker::Handler self.position = :body class Event < OpenStruct def write options.present? ? type_to_json << options_to_json : type_to_json end private def type_to_json type.to_json end def options_to_json ", #{options.to_json}" end end class Track < Event def name 'track' end end class TrackCustom < Event def name 'trackCustom' end end end
15
71
0.629167
79f0e155a7aabd0ed78dd8213d0a4b958df7c8e5
2,826
require File.expand_path("../base.rb", __FILE__) def local_build? File.exist?(ext_path("appsignal-agent")) && File.exist?(ext_path("libappsignal.a")) && File.exist?(ext_path("appsignal.h")) end def install library_type = "static" report["language"]["implementation"] = "ruby" report["build"]["library_type"] = library_type return unless check_architecture if local_build? report["build"]["source"] = "local" else archive = download_archive(library_type) return unless archive return unless verify_archive(archive, library_type) unarchive(archive) end is_linux_system = [ Appsignal::System::LINUX_TARGET, Appsignal::System::MUSL_TARGET ].include?(AGENT_PLATFORM) require "mkmf" link_libraries if is_linux_system if !have_library("appsignal", "appsignal_start", "appsignal.h") abort_installation("Library libappsignal.a or appsignal.h not found") elsif !find_executable("appsignal-agent", EXT_PATH) abort_installation("File appsignal-agent not found") else if is_linux_system # Statically link libgcc and libgcc_s libraries. # Dependencies of the libappsignal extension library. # If the gem is installed on a host with build tools installed, but is # run on one that isn't the missing libraries will cause the extension # to fail on start. $LDFLAGS += " -static-libgcc" # rubocop:disable Style/GlobalVars report["build"]["flags"]["LDFLAGS"] = $LDFLAGS # rubocop:disable Style/GlobalVars end create_makefile "appsignal_extension" successful_installation end rescue => error fail_installation_with_error(error) ensure create_dummy_makefile unless installation_succeeded? write_report end # Ruby 2.6 requires us to statically link more libraries we use in our # extension library than previous versions. Needed for normal Linux libc # and musl builds. def link_libraries if RbConfig::CONFIG["THREAD_MODEL"] == "pthread" # Link gem extension against pthread library have_library "pthread" have_required_function "pthread", "pthread_create" end # Links gem extension against the `dl` library. This is needed when Ruby is # not linked against `dl` itself, so link it on the gem extension. have_library "dl" # Check if functions are available now from the linked library %w[dlopen dlclose dlsym].each do |func| have_required_function "dl", func end end def have_required_function(library, func) # rubocop:disable Naming/PredicateName if have_func(func) report["build"]["dependencies"][library] = "linked" return end report["build"]["dependencies"][library] = "not linked" abort_installation("Missing function '#{func}'") # Exit with true/0/success because the AppSignal installation should never # break a build exit end install
31.752809
87
0.731776
d548555d3ba82a4ef608e470292ad195d0b19f9a
1,071
Gem::Specification.new do |s| s.name = 'user_trackers' s.version = '0.0.14' s.date = '2019-11-20' s.summary = "Gem for tracking user's activity on a rails app using mixpanel, intercom, slack and database" s.description = "Gem for tracking user's activity on a rails app using mixpanel, intercom, slack and database" s.authors = ["Camilo Barraza"] s.email = '[email protected]' s.files = Dir["{lib}/**/*.rb", "bin/*", "LICENSE", "*.md"] s.metadata = { "source_code_uri" => "https://github.com/camilo-barraza/user_trackers" } s.license = 'MIT' s.required_ruby_version = ">= 2.4.0" s.add_dependency "sidekiq", ">= 5.0.0" s.add_dependency "resque", ">= 2.0.0" s.add_dependency 'mixpanel-ruby', ">= 2.0.0" s.add_dependency 'intercom', ">= 3.0.0" s.add_dependency 'slack-ruby-client', "~> 0.14.0" s.add_dependency 'uuid', ">= 2.0.0" s.add_development_dependency "rake", ">= 10.0" s.add_development_dependency "minitest", ">= 5.0" s.add_development_dependency "rspec", ">= 3.9.0" end
46.565217
112
0.631186
e9693cfa95cf24868b2c55d4ef0db969b41cf13f
460
module StripWhitespace def self.included(base) base.extend(ClassMethods) end module ClassMethods def strip_whitespace!(*fields) before_validation do |record| fields.each do |field| setter = "#{field}=".to_sym value = record.send(field.to_sym) if value.respond_to?(:strip) and record.respond_to?(setter) record.send(setter, value.strip) end end end end end end
24.210526
69
0.617391
f79045a18d59981c430cff9d2a7c9d11d9bbb961
4,806
class GoodsNomenclature < Sequel::Model extend ActiveModel::Naming set_dataset order(Sequel.asc(:goods_nomenclatures__goods_nomenclature_item_id)) set_primary_key [:goods_nomenclature_sid] plugin :time_machine, period_start_column: Sequel.qualify(:goods_nomenclatures, :validity_start_date), period_end_column: Sequel.qualify(:goods_nomenclatures, :validity_end_date) plugin :oplog, primary_key: :goods_nomenclature_sid plugin :nullable plugin :conformance_validator plugin :active_model plugin :sti, class_determinator: ->(record) { gono_id = record[:goods_nomenclature_item_id].to_s if gono_id.ends_with?('00000000') 'Chapter' elsif gono_id.ends_with?('000000') && gono_id.slice(2,2) != '00' 'Heading' elsif !gono_id.ends_with?('000000') 'Commodity' else 'GoodsNomenclature' end } one_to_many :goods_nomenclature_indents, key: :goods_nomenclature_sid, primary_key: :goods_nomenclature_sid do |ds| ds.with_actual(GoodsNomenclatureIndent, self) .order(Sequel.desc(:goods_nomenclature_indents__validity_start_date)) end def goods_nomenclature_indent goods_nomenclature_indents(true).first end many_to_many :goods_nomenclature_descriptions, join_table: :goods_nomenclature_description_periods, left_primary_key: :goods_nomenclature_sid, left_key: :goods_nomenclature_sid, right_key: [:goods_nomenclature_description_period_sid, :goods_nomenclature_sid], right_primary_key: [:goods_nomenclature_description_period_sid, :goods_nomenclature_sid] do |ds| ds.with_actual(GoodsNomenclatureDescriptionPeriod, self) .order(Sequel.desc(:goods_nomenclature_description_periods__validity_start_date)) end def goods_nomenclature_description goods_nomenclature_descriptions(true).first || NullGoodsNomenclature.new end many_to_many :footnotes, join_table: :footnote_association_goods_nomenclatures, left_primary_key: :goods_nomenclature_sid, left_key: :goods_nomenclature_sid, right_key: [:footnote_type, :footnote_id], right_primary_key: [:footnote_type_id, :footnote_id] do |ds| ds.with_actual(FootnoteAssociationGoodsNomenclature) end def footnote footnotes(true).first end one_to_one :national_measurement_unit_set, key: :cmdty_code, primary_key: :goods_nomenclature_item_id do |ds| ds.with_actual(Chief::Comm) end delegate :national_measurement_unit_set_units, to: :national_measurement_unit_set, allow_nil: true delegate :number_indents, to: :goods_nomenclature_indent, allow_nil: true delegate :description, :formatted_description, to: :goods_nomenclature_description, allow_nil: true one_to_one :goods_nomenclature_origin, key: [:goods_nomenclature_item_id, :productline_suffix], primary_key: [:goods_nomenclature_item_id, :producline_suffix] one_to_many :goods_nomenclature_successors, key: [:absorbed_goods_nomenclature_item_id, :absorbed_productline_suffix], primary_key: [:goods_nomenclature_item_id, :producline_suffix] one_to_many :export_refund_nomenclatures, key: :goods_nomenclature_sid, primary_key: :goods_nomenclature_sid do |ds| ds.with_actual(ExportRefundNomenclature) end one_to_many :measures, key: :goods_nomenclature_sid, foreign_key: :goods_nomenclature_sid dataset_module do def declarable filter(producline_suffix: "80") end def non_hidden filter(Sequel.~(goods_nomenclature_item_id: HiddenGoodsNomenclature.codes)) end def indexable where(Sequel.~(goods_nomenclature_item_id: HiddenGoodsNomenclature.codes)) end end def id goods_nomenclature_sid end def to_s "#{number_indents}: #{goods_nomenclature_item_id}: #{description}" end def heading_id "#{goods_nomenclature_item_id.first(4)}______" end def chapter_id goods_nomenclature_item_id.first(2) + "0" * 8 end def code goods_nomenclature_item_id end def bti_url "http://ec.europa.eu/taxation_customs/dds2/ebti/ebti_consultation.jsp?Lang=en&nomenc=#{code}&Expand=true" end end
37.255814
145
0.660633
913f2adce38378cdde11ccca75740ead73e2a549
896
class Xsp < Formula desc "Mono's ASP.NET hosting server" homepage "https://github.com/mono/xsp" url "https://github.com/mono/xsp/archive/3.0.11.tar.gz" sha256 "290e302a03396c5cff7eb53dae008e9f79dd00aca15ad1e62865907220483baa" bottle do cellar :any sha256 "3be0887c819a5823d226b26142b99fc9ceeab6e58a6c1252c84e152ee0f37d04" => :yosemite sha256 "ac6eaec05b4544e8a2eae419cadaa1cf6fbde53b37c3e19a71e8eca6fd2eae5c" => :mavericks sha256 "306cae75d2ae3ff1096b586835a19050e2e7f54532ec7c2140ef29dfd5a5cb03" => :mountain_lion end depends_on "mono" depends_on "automake" => :build depends_on "autoconf" => :build depends_on "pkg-config" => :build def install system "./autogen.sh" system "./configure", "--disable-silent-rules", "--prefix=#{prefix}" system "make", "install" end test do system "xsp", "--help" end end
29.866667
95
0.715402
393273d02321da07c1f8587a888ebea91e3fd94b
925
Rails.application.routes.draw do get 'password_resets/new' get 'password_resets/edit' get 'sessions/new' resources :users do member do get :following, :followers end end resources :account_activations, only: [:edit] resources :password_resets, only: [:new, :create, :edit, :update] resources :microposts, only: [:create, :destroy] resources :relationships, only: [:create, :destroy] get 'users/new' get '/home', to: 'static_pages#home' get '/about', to: 'static_pages#about' get '/help', to: 'static_pages#help' get '/contact', to: 'static_pages#contact' get '/signup', to: 'users#new' post '/signup', to: 'users#create' get '/login', to: 'sessions#new' post '/login', to: 'sessions#create' delete '/logout', to: 'sessions#destroy' # For details on the DSL available within this file, see http://guides.rubyonrails.org/routing.html root 'static_pages#home' end
26.428571
101
0.683243
79d5bf4cf06cc892927234ae43c395978daae4be
11,321
require 'spec_helper' describe "Internal Project Access", feature: true do include AccessMatchers let(:project) { create(:project, :internal) } let(:owner) { project.owner } let(:master) { create(:user) } let(:developer) { create(:user) } let(:reporter) { create(:user) } let(:guest) { create(:user) } before do project.team << [master, :master] project.team << [developer, :developer] project.team << [reporter, :reporter] project.team << [guest, :guest] end describe "Project should be internal" do describe '#internal?' do subject { project.internal? } it { is_expected.to be_truthy } end end describe "GET /:project_path" do subject { namespace_project_path(project.namespace, project) } it { is_expected.to be_allowed_for :admin } it { is_expected.to be_allowed_for owner } it { is_expected.to be_allowed_for master } it { is_expected.to be_allowed_for developer } it { is_expected.to be_allowed_for reporter } it { is_expected.to be_allowed_for guest } it { is_expected.to be_allowed_for :user } it { is_expected.to be_denied_for :external } it { is_expected.to be_denied_for :visitor } end describe "GET /:project_path/tree/master" do subject { namespace_project_tree_path(project.namespace, project, project.repository.root_ref) } it { is_expected.to be_allowed_for :admin } it { is_expected.to be_allowed_for owner } it { is_expected.to be_allowed_for master } it { is_expected.to be_allowed_for developer } it { is_expected.to be_allowed_for reporter } it { is_expected.to be_allowed_for guest } it { is_expected.to be_allowed_for :user } it { is_expected.to be_denied_for :external } it { is_expected.to be_denied_for :visitor } end describe "GET /:project_path/commits/master" do subject { namespace_project_commits_path(project.namespace, project, project.repository.root_ref, limit: 1) } it { is_expected.to be_allowed_for :admin } it { is_expected.to be_allowed_for owner } it { is_expected.to be_allowed_for master } it { is_expected.to be_allowed_for developer } it { is_expected.to be_allowed_for reporter } it { is_expected.to be_allowed_for guest } it { is_expected.to be_allowed_for :user } it { is_expected.to be_denied_for :external } it { is_expected.to be_denied_for :visitor } end describe "GET /:project_path/commit/:sha" do subject { namespace_project_commit_path(project.namespace, project, project.repository.commit) } it { is_expected.to be_allowed_for :admin } it { is_expected.to be_allowed_for owner } it { is_expected.to be_allowed_for master } it { is_expected.to be_allowed_for developer } it { is_expected.to be_allowed_for reporter } it { is_expected.to be_allowed_for guest } it { is_expected.to be_allowed_for :user } it { is_expected.to be_denied_for :external } it { is_expected.to be_denied_for :visitor } end describe "GET /:project_path/compare" do subject { namespace_project_compare_index_path(project.namespace, project) } it { is_expected.to be_allowed_for :admin } it { is_expected.to be_allowed_for owner } it { is_expected.to be_allowed_for master } it { is_expected.to be_allowed_for developer } it { is_expected.to be_allowed_for reporter } it { is_expected.to be_allowed_for guest } it { is_expected.to be_allowed_for :user } it { is_expected.to be_denied_for :external } it { is_expected.to be_denied_for :visitor } end describe "GET /:project_path/project_members" do subject { namespace_project_project_members_path(project.namespace, project) } it { is_expected.to be_allowed_for :admin } it { is_expected.to be_allowed_for owner } it { is_expected.to be_allowed_for master } it { is_expected.to be_denied_for developer } it { is_expected.to be_denied_for reporter } it { is_expected.to be_denied_for guest } it { is_expected.to be_denied_for :user } it { is_expected.to be_denied_for :external } it { is_expected.to be_denied_for :visitor } end describe "GET /:project_path/blob" do let(:commit) { project.repository.commit } subject { namespace_project_blob_path(project.namespace, project, File.join(commit.id, '.gitignore')) } it { is_expected.to be_allowed_for :admin } it { is_expected.to be_allowed_for owner } it { is_expected.to be_allowed_for master } it { is_expected.to be_allowed_for developer } it { is_expected.to be_allowed_for reporter } it { is_expected.to be_allowed_for guest } it { is_expected.to be_allowed_for :user } it { is_expected.to be_denied_for :external } it { is_expected.to be_denied_for :visitor } end describe "GET /:project_path/edit" do subject { edit_namespace_project_path(project.namespace, project) } it { is_expected.to be_allowed_for :admin } it { is_expected.to be_allowed_for owner } it { is_expected.to be_allowed_for master } it { is_expected.to be_denied_for developer } it { is_expected.to be_denied_for reporter } it { is_expected.to be_denied_for guest } it { is_expected.to be_denied_for :user } it { is_expected.to be_denied_for :external } it { is_expected.to be_denied_for :visitor } end describe "GET /:project_path/deploy_keys" do subject { namespace_project_deploy_keys_path(project.namespace, project) } it { is_expected.to be_allowed_for :admin } it { is_expected.to be_allowed_for owner } it { is_expected.to be_allowed_for master } it { is_expected.to be_denied_for developer } it { is_expected.to be_denied_for reporter } it { is_expected.to be_denied_for guest } it { is_expected.to be_denied_for :user } it { is_expected.to be_denied_for :external } it { is_expected.to be_denied_for :visitor } end describe "GET /:project_path/issues" do subject { namespace_project_issues_path(project.namespace, project) } it { is_expected.to be_allowed_for :admin } it { is_expected.to be_allowed_for owner } it { is_expected.to be_allowed_for master } it { is_expected.to be_allowed_for developer } it { is_expected.to be_allowed_for reporter } it { is_expected.to be_allowed_for guest } it { is_expected.to be_allowed_for :user } it { is_expected.to be_denied_for :external } it { is_expected.to be_denied_for :visitor } end describe "GET /:project_path/issues/:id/edit" do let(:issue) { create(:issue, project: project) } subject { edit_namespace_project_issue_path(project.namespace, project, issue) } it { is_expected.to be_allowed_for :admin } it { is_expected.to be_allowed_for owner } it { is_expected.to be_allowed_for master } it { is_expected.to be_allowed_for developer } it { is_expected.to be_allowed_for reporter } it { is_expected.to be_denied_for guest } it { is_expected.to be_denied_for :user } it { is_expected.to be_denied_for :external } it { is_expected.to be_denied_for :visitor } end describe "GET /:project_path/snippets" do subject { namespace_project_snippets_path(project.namespace, project) } it { is_expected.to be_allowed_for :admin } it { is_expected.to be_allowed_for owner } it { is_expected.to be_allowed_for master } it { is_expected.to be_allowed_for developer } it { is_expected.to be_allowed_for reporter } it { is_expected.to be_allowed_for guest } it { is_expected.to be_allowed_for :user } it { is_expected.to be_denied_for :external } it { is_expected.to be_denied_for :visitor } end describe "GET /:project_path/snippets/new" do subject { new_namespace_project_snippet_path(project.namespace, project) } it { is_expected.to be_allowed_for :admin } it { is_expected.to be_allowed_for owner } it { is_expected.to be_allowed_for master } it { is_expected.to be_allowed_for developer } it { is_expected.to be_allowed_for reporter } it { is_expected.to be_denied_for guest } it { is_expected.to be_denied_for :user } it { is_expected.to be_denied_for :external } it { is_expected.to be_denied_for :visitor } end describe "GET /:project_path/merge_requests" do subject { namespace_project_merge_requests_path(project.namespace, project) } it { is_expected.to be_allowed_for :admin } it { is_expected.to be_allowed_for owner } it { is_expected.to be_allowed_for master } it { is_expected.to be_allowed_for developer } it { is_expected.to be_allowed_for reporter } it { is_expected.to be_allowed_for guest } it { is_expected.to be_allowed_for :user } it { is_expected.to be_denied_for :external } it { is_expected.to be_denied_for :visitor } end describe "GET /:project_path/merge_requests/new" do subject { new_namespace_project_merge_request_path(project.namespace, project) } it { is_expected.to be_allowed_for :admin } it { is_expected.to be_allowed_for owner } it { is_expected.to be_allowed_for master } it { is_expected.to be_allowed_for developer } it { is_expected.to be_denied_for reporter } it { is_expected.to be_denied_for guest } it { is_expected.to be_denied_for :user } it { is_expected.to be_denied_for :external } it { is_expected.to be_denied_for :visitor } end describe "GET /:project_path/branches" do subject { namespace_project_branches_path(project.namespace, project) } before do # Speed increase allow_any_instance_of(Project).to receive(:branches).and_return([]) end it { is_expected.to be_allowed_for :admin } it { is_expected.to be_allowed_for owner } it { is_expected.to be_allowed_for master } it { is_expected.to be_allowed_for developer } it { is_expected.to be_allowed_for reporter } it { is_expected.to be_allowed_for guest } it { is_expected.to be_allowed_for :user } it { is_expected.to be_denied_for :external } it { is_expected.to be_denied_for :visitor } end describe "GET /:project_path/tags" do subject { namespace_project_tags_path(project.namespace, project) } before do # Speed increase allow_any_instance_of(Project).to receive(:tags).and_return([]) end it { is_expected.to be_allowed_for :admin } it { is_expected.to be_allowed_for owner } it { is_expected.to be_allowed_for master } it { is_expected.to be_allowed_for developer } it { is_expected.to be_allowed_for reporter } it { is_expected.to be_allowed_for guest } it { is_expected.to be_allowed_for :user } it { is_expected.to be_denied_for :external } it { is_expected.to be_denied_for :visitor } end describe "GET /:project_path/hooks" do subject { namespace_project_hooks_path(project.namespace, project) } it { is_expected.to be_allowed_for :admin } it { is_expected.to be_allowed_for owner } it { is_expected.to be_allowed_for master } it { is_expected.to be_denied_for developer } it { is_expected.to be_denied_for reporter } it { is_expected.to be_denied_for guest } it { is_expected.to be_denied_for :user } it { is_expected.to be_denied_for :external } it { is_expected.to be_denied_for :visitor } end end
38.770548
113
0.722904
91d7284338b8a4c22ecd9e9d0d8b5697c022a739
594
# Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. # SPDX - License - Identifier: Apache - 2.0 require_relative '../sns-ruby-example-create-subscription' describe '#subscription_created?' do let(:sns_client) { Aws::SNS::Client.new(stub_responses: true) } let(:endpoint) { '[email protected]' } it 'confirms the subscriptions was created' do subscriptions_data = sns_client.stub_data( :create_subscriptions ) sns_client.stub_responses(:create_subscriptions, subscriptions_data) expect(subscription_created?(sns_client, endpoint)).to be end end
34.941176
72
0.747475
61e981a5b19c9986ac78f99b370acc0afe45983e
968
# Copyright 2010-2017 Amazon.com, Inc. or its affiliates. All Rights Reserved. # # This file is licensed under the Apache License, Version 2.0 (the "License"). # You may not use this file except in compliance with the License. A copy of the # License is located at # # http://aws.amazon.com/apache2.0/ # # This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS # OF ANY KIND, either express or implied. See the License for the specific # language governing permissions and limitations under the License. require 'aws-sdk-codebuild' # v2: require 'aws-sdk' project_name = '' if ARGV.length != 1 puts 'You must supply the name of the project to build' exit 1 else project_name = ARGV[0] end client = Aws::CodeBuild::Client.new(region: 'us-west-2') begin resp = client.start_build({project_name: project_name, }) puts 'Building project ' + project_name rescue StandardError => ex puts 'Error building project: ' + ex.message end
30.25
80
0.73657
018bc025ffa5eee9b0bff42cf273c7ac083f52b3
20,052
# frozen_string_literal: true require "active_support/core_ext/array/extract_options" require "active_support/core_ext/hash/keys" require_relative "asset_url_helper" require_relative "tag_helper" module ActionView # = Action View Asset Tag Helpers module Helpers #:nodoc: # This module provides methods for generating HTML that links views to assets such # as images, JavaScripts, stylesheets, and feeds. These methods do not verify # the assets exist before linking to them: # # image_tag("quails.png") # # => <img src="/assets/quails.png" /> # stylesheet_link_tag("application") # # => <link href="/assets/application.css?body=1" media="screen" rel="stylesheet" /> module AssetTagHelper extend ActiveSupport::Concern include AssetUrlHelper include TagHelper # Returns an HTML script tag for each of the +sources+ provided. # # Sources may be paths to JavaScript files. Relative paths are assumed to be relative # to <tt>assets/javascripts</tt>, full paths are assumed to be relative to the document # root. Relative paths are idiomatic, use absolute paths only when needed. # # When passing paths, the ".js" extension is optional. If you do not want ".js" # appended to the path <tt>extname: false</tt> can be set on the options. # # You can modify the HTML attributes of the script tag by passing a hash as the # last argument. # # When the Asset Pipeline is enabled, you can pass the name of your manifest as # source, and include other JavaScript or CoffeeScript files inside the manifest. # # ==== Options # # When the last parameter is a hash you can add HTML attributes using that # parameter. The following options are supported: # # * <tt>:extname</tt> - Append an extension to the generated url unless the extension # already exists. This only applies for relative urls. # * <tt>:protocol</tt> - Sets the protocol of the generated url, this option only # applies when a relative url and +host+ options are provided. # * <tt>:host</tt> - When a relative url is provided the host is added to the # that path. # * <tt>:skip_pipeline</tt> - This option is used to bypass the asset pipeline # when it is set to true. # # ==== Examples # # javascript_include_tag "xmlhr" # # => <script src="/assets/xmlhr.debug-1284139606.js"></script> # # javascript_include_tag "xmlhr", host: "localhost", protocol: "https" # # => <script src="https://localhost/assets/xmlhr.debug-1284139606.js"></script> # # javascript_include_tag "template.jst", extname: false # # => <script src="/assets/template.debug-1284139606.jst"></script> # # javascript_include_tag "xmlhr.js" # # => <script src="/assets/xmlhr.debug-1284139606.js"></script> # # javascript_include_tag "common.javascript", "/elsewhere/cools" # # => <script src="/assets/common.javascript.debug-1284139606.js"></script> # # <script src="/elsewhere/cools.debug-1284139606.js"></script> # # javascript_include_tag "http://www.example.com/xmlhr" # # => <script src="http://www.example.com/xmlhr"></script> # # javascript_include_tag "http://www.example.com/xmlhr.js" # # => <script src="http://www.example.com/xmlhr.js"></script> def javascript_include_tag(*sources) options = sources.extract_options!.stringify_keys path_options = options.extract!("protocol", "extname", "host", "skip_pipeline").symbolize_keys sources.uniq.map { |source| tag_options = { "src" => path_to_javascript(source, path_options) }.merge!(options) content_tag("script".freeze, "", tag_options) }.join("\n").html_safe end # Returns a stylesheet link tag for the sources specified as arguments. If # you don't specify an extension, <tt>.css</tt> will be appended automatically. # You can modify the link attributes by passing a hash as the last argument. # For historical reasons, the 'media' attribute will always be present and defaults # to "screen", so you must explicitly set it to "all" for the stylesheet(s) to # apply to all media types. # # stylesheet_link_tag "style" # # => <link href="/assets/style.css" media="screen" rel="stylesheet" /> # # stylesheet_link_tag "style.css" # # => <link href="/assets/style.css" media="screen" rel="stylesheet" /> # # stylesheet_link_tag "http://www.example.com/style.css" # # => <link href="http://www.example.com/style.css" media="screen" rel="stylesheet" /> # # stylesheet_link_tag "style", media: "all" # # => <link href="/assets/style.css" media="all" rel="stylesheet" /> # # stylesheet_link_tag "style", media: "print" # # => <link href="/assets/style.css" media="print" rel="stylesheet" /> # # stylesheet_link_tag "random.styles", "/css/stylish" # # => <link href="/assets/random.styles" media="screen" rel="stylesheet" /> # # <link href="/css/stylish.css" media="screen" rel="stylesheet" /> def stylesheet_link_tag(*sources) options = sources.extract_options!.stringify_keys path_options = options.extract!("protocol", "host", "skip_pipeline").symbolize_keys sources.uniq.map { |source| tag_options = { "rel" => "stylesheet", "media" => "screen", "href" => path_to_stylesheet(source, path_options) }.merge!(options) tag(:link, tag_options) }.join("\n").html_safe end # Returns a link tag that browsers and feed readers can use to auto-detect # an RSS, Atom, or JSON feed. The +type+ can be <tt>:rss</tt> (default), # <tt>:atom</tt>, or <tt>:json</tt>. Control the link options in url_for format # using the +url_options+. You can modify the LINK tag itself in +tag_options+. # # ==== Options # # * <tt>:rel</tt> - Specify the relation of this link, defaults to "alternate" # * <tt>:type</tt> - Override the auto-generated mime type # * <tt>:title</tt> - Specify the title of the link, defaults to the +type+ # # ==== Examples # # auto_discovery_link_tag # # => <link rel="alternate" type="application/rss+xml" title="RSS" href="http://www.currenthost.com/controller/action" /> # auto_discovery_link_tag(:atom) # # => <link rel="alternate" type="application/atom+xml" title="ATOM" href="http://www.currenthost.com/controller/action" /> # auto_discovery_link_tag(:json) # # => <link rel="alternate" type="application/json" title="JSON" href="http://www.currenthost.com/controller/action" /> # auto_discovery_link_tag(:rss, {action: "feed"}) # # => <link rel="alternate" type="application/rss+xml" title="RSS" href="http://www.currenthost.com/controller/feed" /> # auto_discovery_link_tag(:rss, {action: "feed"}, {title: "My RSS"}) # # => <link rel="alternate" type="application/rss+xml" title="My RSS" href="http://www.currenthost.com/controller/feed" /> # auto_discovery_link_tag(:rss, {controller: "news", action: "feed"}) # # => <link rel="alternate" type="application/rss+xml" title="RSS" href="http://www.currenthost.com/news/feed" /> # auto_discovery_link_tag(:rss, "http://www.example.com/feed.rss", {title: "Example RSS"}) # # => <link rel="alternate" type="application/rss+xml" title="Example RSS" href="http://www.example.com/feed.rss" /> def auto_discovery_link_tag(type = :rss, url_options = {}, tag_options = {}) if !(type == :rss || type == :atom || type == :json) && tag_options[:type].blank? raise ArgumentError.new("You should pass :type tag_option key explicitly, because you have passed #{type} type other than :rss, :atom, or :json.") end tag( "link", "rel" => tag_options[:rel] || "alternate", "type" => tag_options[:type] || Template::Types[type].to_s, "title" => tag_options[:title] || type.to_s.upcase, "href" => url_options.is_a?(Hash) ? url_for(url_options.merge(only_path: false)) : url_options ) end # Returns a link tag for a favicon managed by the asset pipeline. # # If a page has no link like the one generated by this helper, browsers # ask for <tt>/favicon.ico</tt> automatically, and cache the file if the # request succeeds. If the favicon changes it is hard to get it updated. # # To have better control applications may let the asset pipeline manage # their favicon storing the file under <tt>app/assets/images</tt>, and # using this helper to generate its corresponding link tag. # # The helper gets the name of the favicon file as first argument, which # defaults to "favicon.ico", and also supports +:rel+ and +:type+ options # to override their defaults, "shortcut icon" and "image/x-icon" # respectively: # # favicon_link_tag # # => <link href="/assets/favicon.ico" rel="shortcut icon" type="image/x-icon" /> # # favicon_link_tag 'myicon.ico' # # => <link href="/assets/myicon.ico" rel="shortcut icon" type="image/x-icon" /> # # Mobile Safari looks for a different link tag, pointing to an image that # will be used if you add the page to the home screen of an iOS device. # The following call would generate such a tag: # # favicon_link_tag 'mb-icon.png', rel: 'apple-touch-icon', type: 'image/png' # # => <link href="/assets/mb-icon.png" rel="apple-touch-icon" type="image/png" /> def favicon_link_tag(source = "favicon.ico", options = {}) tag("link", { rel: "shortcut icon", type: "image/x-icon", href: path_to_image(source, skip_pipeline: options.delete(:skip_pipeline)) }.merge!(options.symbolize_keys)) end # Returns an HTML image tag for the +source+. The +source+ can be a full # path, a file or an Active Storage attachment. # # ==== Options # # You can add HTML attributes using the +options+. The +options+ supports # additional keys for convenience and conformance: # # * <tt>:size</tt> - Supplied as "{Width}x{Height}" or "{Number}", so "30x45" becomes # width="30" and height="45", and "50" becomes width="50" and height="50". # <tt>:size</tt> will be ignored if the value is not in the correct format. # * <tt>:srcset</tt> - If supplied as a hash or array of <tt>[source, descriptor]</tt> # pairs, each image path will be expanded before the list is formatted as a string. # # ==== Examples # # Assets (images that are part of your app): # # image_tag("icon") # # => <img src="/assets/icon" /> # image_tag("icon.png") # # => <img src="/assets/icon.png" /> # image_tag("icon.png", size: "16x10", alt: "Edit Entry") # # => <img src="/assets/icon.png" width="16" height="10" alt="Edit Entry" /> # image_tag("/icons/icon.gif", size: "16") # # => <img src="/icons/icon.gif" width="16" height="16" /> # image_tag("/icons/icon.gif", height: '32', width: '32') # # => <img height="32" src="/icons/icon.gif" width="32" /> # image_tag("/icons/icon.gif", class: "menu_icon") # # => <img class="menu_icon" src="/icons/icon.gif" /> # image_tag("/icons/icon.gif", data: { title: 'Quails Application' }) # # => <img data-title="Quails Application" src="/icons/icon.gif" /> # image_tag("icon.png", srcset: { "icon_2x.png" => "2x", "icon_4x.png" => "4x" }) # # => <img src="/assets/icon.png" srcset="/assets/icon_2x.png 2x, /assets/icon_4x.png 4x"> # image_tag("pic.jpg", srcset: [["pic_1024.jpg", "1024w"], ["pic_1980.jpg", "1980w"]], sizes: "100vw") # # => <img src="/assets/pic.jpg" srcset="/assets/pic_1024.jpg 1024w, /assets/pic_1980.jpg 1980w" sizes="100vw"> # # Active Storage (images that are uploaded by the users of your app): # # image_tag(user.avatar) # # => <img src="/quails/active_storage/blobs/.../tiger.jpg" /> # image_tag(user.avatar.variant(resize: "100x100")) # # => <img src="/quails/active_storage/variants/.../tiger.jpg" /> # image_tag(user.avatar.variant(resize: "100x100"), size: '100') # # => <img width="100" height="100" src="/quails/active_storage/variants/.../tiger.jpg" /> def image_tag(source, options = {}) options = options.symbolize_keys check_for_image_tag_errors(options) skip_pipeline = options.delete(:skip_pipeline) options[:src] = resolve_image_source(source, skip_pipeline) if options[:srcset] && !options[:srcset].is_a?(String) options[:srcset] = options[:srcset].map do |src_path, size| src_path = path_to_image(src_path, skip_pipeline: skip_pipeline) "#{src_path} #{size}" end.join(", ") end options[:width], options[:height] = extract_dimensions(options.delete(:size)) if options[:size] tag("img", options) end # Returns a string suitable for an HTML image tag alt attribute. # The +src+ argument is meant to be an image file path. # The method removes the basename of the file path and the digest, # if any. It also removes hyphens and underscores from file names and # replaces them with spaces, returning a space-separated, titleized # string. # # ==== Examples # # image_alt('quails.png') # # => Quails # # image_alt('hyphenated-file-name.png') # # => Hyphenated file name # # image_alt('underscored_file_name.png') # # => Underscored file name def image_alt(src) ActiveSupport::Deprecation.warn("image_alt is deprecated and will be removed from Quails 6.0. You must explicitly set alt text on images.") File.basename(src, ".*".freeze).sub(/-[[:xdigit:]]{32,64}\z/, "".freeze).tr("-_".freeze, " ".freeze).capitalize end # Returns an HTML video tag for the +sources+. If +sources+ is a string, # a single video tag will be returned. If +sources+ is an array, a video # tag with nested source tags for each source will be returned. The # +sources+ can be full paths or files that exists in your public videos # directory. # # ==== Options # You can add HTML attributes using the +options+. The +options+ supports # two additional keys for convenience and conformance: # # * <tt>:poster</tt> - Set an image (like a screenshot) to be shown # before the video loads. The path is calculated like the +src+ of +image_tag+. # * <tt>:size</tt> - Supplied as "{Width}x{Height}" or "{Number}", so "30x45" becomes # width="30" and height="45", and "50" becomes width="50" and height="50". # <tt>:size</tt> will be ignored if the value is not in the correct format. # * <tt>:poster_skip_pipeline</tt> will bypass the asset pipeline when using # the <tt>:poster</tt> option instead using an asset in the public folder. # # ==== Examples # # video_tag("trailer") # # => <video src="/videos/trailer"></video> # video_tag("trailer.ogg") # # => <video src="/videos/trailer.ogg"></video> # video_tag("trailer.ogg", controls: true, preload: 'none') # # => <video preload="none" controls="controls" src="/videos/trailer.ogg" ></video> # video_tag("trailer.m4v", size: "16x10", poster: "screenshot.png") # # => <video src="/videos/trailer.m4v" width="16" height="10" poster="/assets/screenshot.png"></video> # video_tag("trailer.m4v", size: "16x10", poster: "screenshot.png", poster_skip_pipeline: true) # # => <video src="/videos/trailer.m4v" width="16" height="10" poster="screenshot.png"></video> # video_tag("/trailers/hd.avi", size: "16x16") # # => <video src="/trailers/hd.avi" width="16" height="16"></video> # video_tag("/trailers/hd.avi", size: "16") # # => <video height="16" src="/trailers/hd.avi" width="16"></video> # video_tag("/trailers/hd.avi", height: '32', width: '32') # # => <video height="32" src="/trailers/hd.avi" width="32"></video> # video_tag("trailer.ogg", "trailer.flv") # # => <video><source src="/videos/trailer.ogg" /><source src="/videos/trailer.flv" /></video> # video_tag(["trailer.ogg", "trailer.flv"]) # # => <video><source src="/videos/trailer.ogg" /><source src="/videos/trailer.flv" /></video> # video_tag(["trailer.ogg", "trailer.flv"], size: "160x120") # # => <video height="120" width="160"><source src="/videos/trailer.ogg" /><source src="/videos/trailer.flv" /></video> def video_tag(*sources) options = sources.extract_options!.symbolize_keys public_poster_folder = options.delete(:poster_skip_pipeline) sources << options multiple_sources_tag_builder("video", sources) do |tag_options| tag_options[:poster] = path_to_image(tag_options[:poster], skip_pipeline: public_poster_folder) if tag_options[:poster] tag_options[:width], tag_options[:height] = extract_dimensions(tag_options.delete(:size)) if tag_options[:size] end end # Returns an HTML audio tag for the +source+. # The +source+ can be full path or file that exists in # your public audios directory. # # audio_tag("sound") # # => <audio src="/audios/sound"></audio> # audio_tag("sound.wav") # # => <audio src="/audios/sound.wav"></audio> # audio_tag("sound.wav", autoplay: true, controls: true) # # => <audio autoplay="autoplay" controls="controls" src="/audios/sound.wav"></audio> # audio_tag("sound.wav", "sound.mid") # # => <audio><source src="/audios/sound.wav" /><source src="/audios/sound.mid" /></audio> def audio_tag(*sources) multiple_sources_tag_builder("audio", sources) end private def multiple_sources_tag_builder(type, sources) options = sources.extract_options!.symbolize_keys skip_pipeline = options.delete(:skip_pipeline) sources.flatten! yield options if block_given? if sources.size > 1 content_tag(type, options) do safe_join sources.map { |source| tag("source", src: send("path_to_#{type}", source, skip_pipeline: skip_pipeline)) } end else options[:src] = send("path_to_#{type}", sources.first, skip_pipeline: skip_pipeline) content_tag(type, nil, options) end end def resolve_image_source(source, skip_pipeline) if source.is_a?(Symbol) || source.is_a?(String) path_to_image(source, skip_pipeline: skip_pipeline) else polymorphic_url(source) end rescue NoMethodError => e raise ArgumentError, "Can't resolve image into URL: #{e}" end def extract_dimensions(size) size = size.to_s if /\A\d+x\d+\z/.match?(size) size.split("x") elsif /\A\d+\z/.match?(size) [size, size] end end def check_for_image_tag_errors(options) if options[:size] && (options[:height] || options[:width]) raise ArgumentError, "Cannot pass a :size option with a :height or :width option" end end end end end
50.004988
156
0.610912
3953a3a9cf781acdcbd5d248854d3a50f8847288
41
module Dijkstrar VERSION = "0.1.0" end
10.25
19
0.682927
bfdc69455ba4ff551c848d98a819a1affb5473c6
1,527
require 'chronicle/etl' module Chronicle module Shell class ShellHistoryTransformer < Chronicle::ETL::Transformer register_connector do |r| r.provider = 'shell' r.description = 'a shell command' r.identifier = 'shell-history' end def transform @command = @extraction.data build_commanded end def id end def timestamp @command[:timestamp] end private def build_commanded record = ::Chronicle::ETL::Models::Activity.new record.verb = 'commanded' record.end_at = timestamp record.provider = @extraction.meta[:shell_name] record.dedupe_on << [:verb, :end_at, :provider] record.involved = build_command record.actor = build_actor record end def build_command record = ::Chronicle::ETL::Models::Entity.new record.represents = 'command' record.provider = @extraction.meta[:shell_name] record.body = @command[:command] record.dedupe_on << [:body, :provider, :represents] record end def build_actor record = ::Chronicle::ETL::Models::Entity.new record.represents = 'identity' record.provider = 'system' record.slug = build_user_slug record.dedupe_on << [:represents, :provider, :slug] record end def build_user_slug "#{@extraction.meta[:username]}@#{@extraction.meta[:hostname]}" end end end end
25.032787
71
0.600524
f81352fa9e418d321f511ec536c07d7cfd147575
10,940
# frozen_string_literal: true require_relative '../support/test_case' require_relative '../support/fixture_config' require 'jekyll/minibundle/asset_file_registry' module Jekyll::Minibundle::Test class AssetFileRegistryTest < TestCase include FixtureConfig def setup AssetFileRegistry.clear_all end def test_register_returns_same_bundle_file_for_same_bundle_config with_fake_site do |site| first = AssetFileRegistry.register_bundle_file(site, bundle_config) second = AssetFileRegistry.register_bundle_file(site, bundle_config) assert_same(first, second) assert_equal(1, asset_file_registry_size) assert_contains_only(site.static_files, [first]) end end def test_register_returns_same_development_file_collection_for_same_bundle_config with_fake_site do |site| first = AssetFileRegistry.register_development_file_collection(site, bundle_config) second = AssetFileRegistry.register_development_file_collection(site, bundle_config) assert_same(first, second) assert_equal(1, asset_file_registry_size) assert_contains_only(site.static_files, first.files) end end def test_register_returns_different_bundle_files_for_bundle_configs_with_different_destination_paths with_fake_site do |site| first = AssetFileRegistry.register_bundle_file(site, bundle_config.merge('destination_path' => 'assets/dest1')) second = AssetFileRegistry.register_bundle_file(site, bundle_config.merge('destination_path' => 'assets/dest2')) refute_same first, second assert_equal(2, asset_file_registry_size) assert_contains_only(site.static_files, [first, second]) end end def test_register_returns_different_development_file_collections_for_bundle_configs_with_different_destination_paths with_fake_site do |site| first = AssetFileRegistry.register_development_file_collection(site, bundle_config.merge('destination_path' => 'assets/dest1')) second = AssetFileRegistry.register_development_file_collection(site, bundle_config.merge('destination_path' => 'assets/dest2')) refute_same first, second assert_equal(2, asset_file_registry_size) assert_contains_only(site.static_files, (first.files + second.files)) end end def test_register_returns_different_bundle_files_for_bundle_configs_with_different_types with_fake_site do |site| FileUtils.touch(File.join(JS_BUNDLE_SOURCE_DIR, 'dependency.css')) FileUtils.touch(File.join(JS_BUNDLE_SOURCE_DIR, 'app.css')) first = AssetFileRegistry.register_bundle_file(site, bundle_config.merge('type' => :css)) second = AssetFileRegistry.register_bundle_file(site, bundle_config.merge('type' => :js)) refute_same first, second assert_equal(2, asset_file_registry_size) assert_contains_only(site.static_files, [first, second]) end end def test_register_returns_different_development_file_collections_for_bundle_configs_with_different_types with_fake_site do |site| FileUtils.touch(File.join(JS_BUNDLE_SOURCE_DIR, 'dependency.css')) FileUtils.touch(File.join(JS_BUNDLE_SOURCE_DIR, 'app.css')) first = AssetFileRegistry.register_development_file_collection(site, bundle_config.merge('type' => :css)) second = AssetFileRegistry.register_development_file_collection(site, bundle_config.merge('type' => :js)) refute_same first, second assert_equal(2, asset_file_registry_size) assert_contains_only(site.static_files, (first.files + second.files)) end end [ {description: 'assets', config_diff: {'assets' => %w[b1 b2]}}, {description: 'source_directory', config_diff: {'source_dir' => '_assets/src2'}} ].each do |spec| define_method :"test_raise_exception_if_registering_bundle_file_with_same_destination_path_but_with_different_#{spec.fetch(:description)}" do with_fake_site do |site| first_config = bundle_config first_file = AssetFileRegistry.register_bundle_file(site, first_config) second_config = bundle_config.merge(spec[:config_diff]) err = assert_raises(RuntimeError) do AssetFileRegistry.register_bundle_file(site, second_config) end assert_equal(<<~MESSAGE, err.to_s) Two or more minibundle blocks with the same destination path "assets/site.js", but having different asset configuration: #{second_config.inspect} vs. #{first_config.inspect} MESSAGE assert_equal(1, asset_file_registry_size) assert_contains_only(site.static_files, [first_file]) end end define_method :"test_raise_exception_if_registering_development_file_collection_with_same_destination_path_but_with_different_#{spec.fetch(:description)}" do with_fake_site do |site| first_config = bundle_config first_file = AssetFileRegistry.register_development_file_collection(site, first_config) second_config = bundle_config.merge(spec[:config_diff]) err = assert_raises(RuntimeError) do AssetFileRegistry.register_development_file_collection(site, second_config) end assert_equal(<<~MESSAGE, err.to_s) Two or more minibundle blocks with the same destination path "assets/site.js", but having different asset configuration: #{second_config.inspect} vs. #{first_config.inspect} MESSAGE assert_equal(1, asset_file_registry_size) assert_contains_only(site.static_files, first_file.files) end end end [ {description: 'stamp_file', method: :register_stamp_file}, {description: 'development_file', method: :register_development_file} ].each do |spec| define_method :"test_register_returns_same_#{spec.fetch(:description)}_for_same_source_and_destination_paths" do with_fake_site do |site| first = AssetFileRegistry.send(spec.fetch(:method), site, STAMP_SOURCE_PATH, 'assets/dest1.css') second = AssetFileRegistry.send(spec.fetch(:method), site, STAMP_SOURCE_PATH, 'assets/dest1.css') assert_same(first, second) assert_equal(1, asset_file_registry_size) assert_contains_only(site.static_files, [first]) end end define_method :"test_register_returns_different_#{spec.fetch(:description)}s_for_different_source_and_destination_paths" do with_fake_site do |site| first = AssetFileRegistry.send(spec.fetch(:method), site, STAMP_SOURCE_PATH, 'assets/dest1.css') second = AssetFileRegistry.send(spec.fetch(:method), site, STAMP_SOURCE_PATH, 'assets/dest2.css') refute_same first, second assert_equal(2, asset_file_registry_size) assert_contains_only(site.static_files, [first, second]) end end define_method :"test_raise_exception_if_registering_#{spec.fetch(:description)}s_with_different_source_and_same_destination_paths" do with_fake_site do |site| source_paths = %w[src1.css src2.css].map do |file| File.join(CSS_BUNDLE_SOURCE_DIR, file) end source_paths.each { |path| FileUtils.touch(path) } first = AssetFileRegistry.send(spec.fetch(:method), site, source_paths[0], 'assets/dest1.css') err = assert_raises(RuntimeError) do AssetFileRegistry.send(spec.fetch(:method), site, source_paths[1], 'assets/dest1.css') end assert_equal(<<~MESSAGE, err.to_s) Two or more ministamp tags with the same destination path "assets/dest1.css", but different asset source paths: "#{source_paths[1]}" vs. "#{source_paths[0]}" MESSAGE assert_equal(1, asset_file_registry_size) assert_contains_only(site.static_files, [first]) end end end def test_raise_exception_if_registering_stamp_file_with_same_destination_path_as_existing_bundle_file with_fake_site do |site| FileUtils.touch('_assets/src.js') file = AssetFileRegistry.register_bundle_file(site, bundle_config.merge('destination_path' => 'assets/dest')) err = assert_raises(RuntimeError) do AssetFileRegistry.register_stamp_file(site, '_assets/src.js', 'assets/dest.js') end assert_equal('ministamp tag has the same destination path as a minibundle block: assets/dest.js', err.to_s) assert_equal(1, asset_file_registry_size) assert_contains_only(site.static_files, [file]) end end def test_raise_exception_if_registering_bundle_file_with_same_destination_path_as_existing_stamp_file with_fake_site do |site| FileUtils.touch('_assets/src.js') file = AssetFileRegistry.register_stamp_file(site, '_assets/src.js', 'assets/dest.js') err = assert_raises(RuntimeError) do AssetFileRegistry.register_bundle_file(site, bundle_config.merge('destination_path' => 'assets/dest')) end assert_equal('minibundle block has the same destination path as a ministamp tag: assets/dest.js', err.to_s) assert_equal(1, asset_file_registry_size) assert_contains_only(site.static_files, [file]) end end [ {description: 'bundle_file', method: :register_bundle_file}, {description: 'development_file_collection', method: :register_development_file_collection} ].each do |spec| define_method :"test_remove_unused_#{spec.fetch(:description)}" do with_fake_site do |site| AssetFileRegistry.send(spec.fetch(:method), site, bundle_config) AssetFileRegistry.clear_unused assert_equal(1, asset_file_registry_size) AssetFileRegistry.clear_unused assert_equal(0, asset_file_registry_size) end end end [ {description: 'stamp_file', method: :register_stamp_file}, {description: 'development_file', method: :register_development_file} ].each do |spec| define_method :"test_remove_unused_#{spec.fetch(:description)}" do with_fake_site do |site| AssetFileRegistry.send(spec.fetch(:method), site, STAMP_SOURCE_PATH, STAMP_DESTINATION_PATH) AssetFileRegistry.clear_unused assert_equal(1, asset_file_registry_size) AssetFileRegistry.clear_unused assert_equal(0, asset_file_registry_size) end end end private def bundle_config { 'type' => :js, 'source_dir' => JS_BUNDLE_SOURCE_DIR, 'assets' => %w[dependency app], 'destination_path' => JS_BUNDLE_DESTINATION_PATH, 'minifier_cmd' => 'unused_minifier_cmd' } end def asset_file_registry_size AssetFileRegistry.instance_variable_get(:@_files).size end end end
45.966387
185
0.715631
5d59f935224c56eced28aef95949b270fce84d8c
526
ENV['RAILS_ENV'] ||= 'test' require_relative "../config/environment" require "rails/test_help" class ActiveSupport::TestCase # Run tests in parallel with specified workers parallelize(workers: :number_of_processors) parallelize_setup do |i| ActiveStorage::Blob.service.instance_variable_set(:@root, "#{ActiveStorage::Blob.service.root}-#{i}") end # Setup all fixtures in test/fixtures/*.yml for all tests in alphabetical order. fixtures :all # Add more helper methods to be used by all tests here... end
29.222222
105
0.745247
91ceaf8970f00dff993133476752fdd412da4aaf
13,977
require File.dirname(__FILE__) + '/abstract_unit' class TestActsAsTaggable < Test::Unit::TestCase fixtures :tags, :taggings, :posts, :users, :photos, :subscriptions, :magazines, :tags_transitive_hierarchy def test_find_related_tags_with assert_equivalent [tags(:good), tags(:bad), tags(:question)], Post.find_related_tags("nature") assert_equivalent [tags(:nature)], Post.find_related_tags([tags(:good)]) assert_equivalent [tags(:bad), tags(:question)], Post.find_related_tags(["Very Good", "Nature"]) assert_equivalent [tags(:bad), tags(:question)], Post.find_related_tags([tags(:good), tags(:nature)]) end def test_find_related_tags_with_non_existent_tags assert_equal [], Post.find_related_tags("ABCDEFG") assert_equal [], Post.find_related_tags(['HIJKLM']) end def test_find_related_tags_with_nothing assert_equal [], Post.find_related_tags("") assert_equal [], Post.find_related_tags([]) end def test_find_tagged_with assert_equivalent [posts(:jonathan_sky), posts(:sam_flowers)], Post.find_tagged_with('"Very good"') assert_equal Post.find_tagged_with('"Very good"'), Post.find_tagged_with(['Very good']) assert_equal Post.find_tagged_with('"Very good"'), Post.find_tagged_with([tags(:good)]) assert_equivalent [photos(:jonathan_dog), photos(:sam_flower), photos(:sam_sky)], Photo.find_tagged_with('Nature') assert_equal Photo.find_tagged_with('Nature'), Photo.find_tagged_with(['Nature']) assert_equal Photo.find_tagged_with('Nature'), Photo.find_tagged_with([tags(:nature)]) assert_equivalent [photos(:jonathan_bad_cat), photos(:jonathan_dog), photos(:jonathan_questioning_dog)], Photo.find_tagged_with('"Crazy animal" Bad') assert_equal Photo.find_tagged_with('"Crazy animal" Bad'), Photo.find_tagged_with(['Crazy animal', 'Bad']) assert_equal Photo.find_tagged_with('"Crazy animal" Bad'), Photo.find_tagged_with([tags(:animal), tags(:bad)]) end def test_find_tagged_with_nothing assert_equal [], Post.find_tagged_with("") assert_equal [], Post.find_tagged_with([]) end def test_find_tagged_with_nonexistant_tags assert_equal [], Post.find_tagged_with('ABCDEFG') assert_equal [], Photo.find_tagged_with(['HIJKLM']) assert_equal [], Photo.find_tagged_with([Tag.new(:name => 'unsaved tag')]) end def test_find_tagged_with_match_all assert_equivalent [photos(:jonathan_dog)], Photo.find_tagged_with('Crazy animal, "Nature"', :match_all => true) end def test_find_tagged_with_match_all_and_include assert_equivalent [posts(:jonathan_sky), posts(:sam_flowers)], Post.find_tagged_with(['Very good', 'Nature'], :match_all => true, :include => :tags) end def test_find_tagged_with_conditions assert_equal [], Post.find_tagged_with('"Very good", Nature', :conditions => '1=0') end def test_find_tagged_with_duplicates_options_hash options = { :conditions => '1=1' }.freeze assert_nothing_raised { Post.find_tagged_with("Nature", options) } end def test_find_tagged_with_exclusions assert_equivalent [photos(:jonathan_questioning_dog), photos(:jonathan_bad_cat)], Photo.find_tagged_with("Nature", :exclude => true) assert_equivalent [posts(:jonathan_grass), posts(:jonathan_rain), posts(:jonathan_cloudy), posts(:jonathan_still_cloudy)], Post.find_tagged_with("'Very good', Bad", :exclude => true) end def test_find_options_for_find_tagged_with_no_tags_returns_empty_hash assert_equal Hash.new, Post.find_options_for_find_tagged_with("") assert_equal Hash.new, Post.find_options_for_find_tagged_with([nil]) end def test_find_options_for_find_tagged_with_leaves_arguments_unchanged original_tags = photos(:jonathan_questioning_dog).tags.dup Photo.find_options_for_find_tagged_with(photos(:jonathan_questioning_dog).tags) assert_equal original_tags, photos(:jonathan_questioning_dog).tags end def test_find_options_for_find_tagged_with_respects_custom_table_name Tagging.table_name = "categorisations" Tag.table_name = "categories" options = Photo.find_options_for_find_tagged_with("Hello") assert_no_match(/ taggings /, options[:joins]) assert_no_match(/ tags /, options[:joins]) assert_match(/ categorisations /, options[:joins]) assert_match(/ categories /, options[:joins]) ensure Tagging.table_name = "taggings" Tag.table_name = "tags" end def test_include_tags_on_find_tagged_with assert_nothing_raised do Photo.find_tagged_with('Nature', :include => :tags) Photo.find_tagged_with("Nature", :include => { :taggings => :tag }) end end def test_basic_tag_counts_on_class assert_tag_counts Post.tag_counts, :good => 2, :nature => 7, :question => 1, :bad => 1 assert_tag_counts Photo.tag_counts, :good => 1, :nature => 3, :question => 1, :bad => 1, :animal => 3 end def test_tag_counts_on_class_with_date_conditions assert_tag_counts Post.tag_counts(:start_at => Date.new(2006, 8, 4)), :good => 1, :nature => 5, :question => 1, :bad => 1 assert_tag_counts Post.tag_counts(:end_at => Date.new(2006, 8, 6)), :good => 1, :nature => 4, :question => 1 assert_tag_counts Post.tag_counts(:start_at => Date.new(2006, 8, 5), :end_at => Date.new(2006, 8, 10)), :good => 1, :nature => 4, :bad => 1 assert_tag_counts Photo.tag_counts(:start_at => Date.new(2006, 8, 12), :end_at => Date.new(2006, 8, 19)), :good => 1, :nature => 2, :bad => 1, :question => 1, :animal => 3 end def test_tag_counts_on_class_with_frequencies assert_tag_counts Photo.tag_counts(:at_least => 2), :nature => 3, :animal => 3 assert_tag_counts Photo.tag_counts(:at_most => 2), :good => 1, :question => 1, :bad => 1 end def test_tag_counts_on_class_with_frequencies_and_conditions assert_tag_counts Photo.tag_counts(:at_least => 2, :conditions => '1=1'), :nature => 3, :animal => 3 end def test_tag_counts_duplicates_options_hash options = { :at_least => 2, :conditions => '1=1' }.freeze assert_nothing_raised { Photo.tag_counts(options) } end def test_tag_counts_with_limit assert_equal 2, Photo.tag_counts(:limit => 2).size assert_equal 1, Post.tag_counts(:at_least => 4, :limit => 2).size end def test_tag_counts_with_limit_and_order assert_equal [tags(:nature), tags(:good)], Post.tag_counts(:order => 'count desc', :limit => 2) end def test_tag_counts_on_association assert_tag_counts users(:jonathan).posts.tag_counts, :good => 1, :nature => 5, :question => 1 assert_tag_counts users(:sam).posts.tag_counts, :good => 1, :nature => 2, :bad => 1 assert_tag_counts users(:jonathan).photos.tag_counts, :animal => 3, :nature => 1, :question => 1, :bad => 1 assert_tag_counts users(:sam).photos.tag_counts, :nature => 2, :good => 1 end def test_tag_counts_on_association_with_options assert_equal [], users(:jonathan).posts.tag_counts(:conditions => '1=0') assert_tag_counts users(:jonathan).posts.tag_counts(:at_most => 2), :good => 1, :question => 1 end def test_tag_counts_on_has_many_through assert_tag_counts users(:jonathan).magazines.tag_counts, :good => 1 end def test_tag_counts_respects_custom_table_names Tagging.table_name = "categorisations" Tag.table_name = "categories" options = Photo.find_options_for_tag_counts(:start_at => 2.weeks.ago, :end_at => Date.today) sql = options.values.join(' ') assert_no_match /taggings/, sql assert_no_match /tags/, sql assert_match /categorisations/, sql assert_match /categories/, sql ensure Tagging.table_name = "taggings" Tag.table_name = "tags" end def test_tag_list_reader assert_equivalent ["Very good", "Nature"], posts(:jonathan_sky).tag_list assert_equivalent ["Bad", "Crazy animal"], photos(:jonathan_bad_cat).tag_list end def test_reassign_tag_list assert_equivalent ["Nature", "Question"], posts(:jonathan_rain).tag_list posts(:jonathan_rain).taggings.reload # Only an update of the posts table should be executed assert_queries 1 do posts(:jonathan_rain).update_attributes!(:tag_list => posts(:jonathan_rain).tag_list.to_s) end assert_equivalent ["Nature", "Question"], posts(:jonathan_rain).tag_list end def test_new_tags assert_equivalent ["Very good", "Nature"], posts(:jonathan_sky).tag_list posts(:jonathan_sky).update_attributes!(:tag_list => "#{posts(:jonathan_sky).tag_list}, One, Two") assert_equivalent ["Very good", "Nature", "One", "Two"], posts(:jonathan_sky).tag_list end def test_remove_tag assert_equivalent ["Very good", "Nature"], posts(:jonathan_sky).tag_list posts(:jonathan_sky).update_attributes!(:tag_list => "Nature") assert_equivalent ["Nature"], posts(:jonathan_sky).tag_list end def test_change_case_of_tags original_tag_names = photos(:jonathan_questioning_dog).tag_list photos(:jonathan_questioning_dog).update_attributes!(:tag_list => photos(:jonathan_questioning_dog).tag_list.to_s.upcase) # The new tag list is not uppercase becuase the AR finders are not case-sensitive # and find the old tags when re-tagging with the uppercase tags. assert_equivalent original_tag_names, photos(:jonathan_questioning_dog).reload.tag_list end def test_remove_and_add_tag assert_equivalent ["Very good", "Nature"], posts(:jonathan_sky).tag_list posts(:jonathan_sky).update_attributes!(:tag_list => "Nature, Beautiful") assert_equivalent ["Nature", "Beautiful"], posts(:jonathan_sky).tag_list end def test_tags_not_saved_if_validation_fails assert_equivalent ["Very good", "Nature"], posts(:jonathan_sky).tag_list assert !posts(:jonathan_sky).update_attributes(:tag_list => "One, Two", :text => "") assert_equivalent ["Very good", "Nature"], Post.find(posts(:jonathan_sky).id).tag_list end def test_tag_list_accessors_on_new_record p = Post.new(:text => 'Test') assert p.tag_list.blank? p.tag_list = "One, Two" assert_equal "One, Two", p.tag_list.to_s end def test_clear_tag_list_with_nil p = photos(:jonathan_questioning_dog) assert !p.tag_list.blank? assert p.update_attributes(:tag_list => nil) assert p.tag_list.blank? assert p.reload.tag_list.blank? end def test_clear_tag_list_with_string p = photos(:jonathan_questioning_dog) assert !p.tag_list.blank? assert p.update_attributes(:tag_list => ' ') assert p.tag_list.blank? assert p.reload.tag_list.blank? end def test_tag_list_reset_on_reload p = photos(:jonathan_questioning_dog) assert !p.tag_list.blank? p.tag_list = nil assert p.tag_list.blank? assert !p.reload.tag_list.blank? end def test_instance_tag_counts assert_tag_counts posts(:jonathan_sky).tag_counts, :good => 2, :nature => 7 end def test_tag_list_populated_when_cache_nil assert_nil posts(:jonathan_sky).cached_tag_list posts(:jonathan_sky).save! assert_equal posts(:jonathan_sky).tag_list.to_s, posts(:jonathan_sky).cached_tag_list end def test_cached_tag_list_used posts(:jonathan_sky).save! posts(:jonathan_sky).reload assert_no_queries do assert_equivalent ["Very good", "Nature"], posts(:jonathan_sky).tag_list end end def test_cached_tag_list_not_used # Load fixture and column information posts(:jonathan_sky).taggings(:reload) assert_queries 1 do # Tags association will be loaded posts(:jonathan_sky).tag_list end end def test_cached_tag_list_updated assert_nil posts(:jonathan_sky).cached_tag_list posts(:jonathan_sky).save! assert_equivalent ["Very good", "Nature"], TagList.from(posts(:jonathan_sky).cached_tag_list) posts(:jonathan_sky).update_attributes!(:tag_list => "None") assert_equal 'None', posts(:jonathan_sky).cached_tag_list assert_equal 'None', posts(:jonathan_sky).reload.cached_tag_list end def test_clearing_cached_tag_list # Generate the cached tag list posts(:jonathan_sky).save! posts(:jonathan_sky).update_attributes!(:tag_list => "") assert_equal "", posts(:jonathan_sky).cached_tag_list end def test_find_tagged_with_using_sti special_post = SpecialPost.create!(:text => "Test", :tag_list => "Random") assert_equal [special_post], SpecialPost.find_tagged_with("Random") assert Post.find_tagged_with("Random").include?(special_post) end def test_tag_counts_using_sti SpecialPost.create!(:text => "Test", :tag_list => "Nature") assert_tag_counts SpecialPost.tag_counts, :nature => 1 end def test_case_insensitivity assert_difference "Tag.count", 1 do Post.create!(:text => "Test", :tag_list => "one") Post.create!(:text => "Test", :tag_list => "One") end assert_equal Post.find_tagged_with("Nature"), Post.find_tagged_with("nature") end def test_tag_not_destroyed_when_unused posts(:jonathan_sky).tag_list.add("Random") posts(:jonathan_sky).save! assert_no_difference 'Tag.count' do posts(:jonathan_sky).tag_list.remove("Random") posts(:jonathan_sky).save! end end def test_tag_destroyed_when_unused Tag.destroy_unused = true posts(:jonathan_sky).tag_list.add("Random") posts(:jonathan_sky).save! assert_difference 'Tag.count', -1 do posts(:jonathan_sky).tag_list.remove("Random") posts(:jonathan_sky).save! end ensure Tag.destroy_unused = false end end class ActsAsTaggableOnSteroidsFormTest < Test::Unit::TestCase fixtures :tags, :taggings, :posts, :users, :photos include ActionView::Helpers::FormHelper def test_tag_list_contents fields_for :post, posts(:jonathan_sky) do |f| assert_match /Very good, Nature/, f.text_field(:tag_list) end end end
38.293151
186
0.716892
38401199fdf18ba51807ba00bfd3ff6adb476542
314
module CurationConcerns module NestedWorks extend ActiveSupport::Concern included do class_attribute :valid_child_concerns self.valid_child_concerns = CurationConcerns::ClassifyConcern.new.all_curation_concern_classes end def in_works_ids in_works.map(&:id) end end end
20.933333
100
0.751592
1ce522e87dbe079950a1b776f2eb24e7aa0315f0
2,186
# frozen_string_literal: true require 'tempfile' module Geo class ContainerRepositorySync include Gitlab::Utils::StrongMemoize attr_reader :name, :container_repository def initialize(container_repository) @container_repository = container_repository @name = container_repository.path end def execute tags_to_sync.each do |tag| sync_tag(tag[:name]) end tags_to_remove.each do |tag| container_repository.delete_tag_by_digest(tag[:digest]) end true end private def sync_tag(tag) file = nil manifest = client.repository_raw_manifest(name, tag) manifest_parsed = JSON.parse(manifest) list_blobs(manifest_parsed).each do |digest| next if container_repository.blob_exists?(digest) file = client.pull_blob(name, digest) container_repository.push_blob(digest, file.path) file.unlink end container_repository.push_manifest(tag, manifest, manifest_parsed['mediaType']) ensure file.try(:unlink) end def list_blobs(manifest) layers = manifest['layers'].map do |layer| layer['digest'] end layers.push(manifest.dig('config', 'digest')).compact end def primary_tags @primary_tags ||= begin manifest = client.repository_tags(name) return [] unless manifest && manifest['tags'] manifest['tags'].map do |tag| { name: tag, digest: client.repository_tag_digest(name, tag) } end end end def secondary_tags @secondary_tags ||= begin container_repository.tags.map do |tag| { name: tag.name, digest: tag.digest } end end end def tags_to_sync primary_tags - secondary_tags end def tags_to_remove secondary_tags - primary_tags end # The client for primary registry def client strong_memoize(:client) do ContainerRegistry::Client.new( Gitlab.config.geo.registry_replication.primary_api_url, token: ::Auth::ContainerRegistryAuthenticationService.pull_access_token(name) ) end end end end
23.010526
87
0.651876
338835f6d93e5b140f9a0fb8e4663820365efc11
2,032
require File.expand_path('../../../spec_helper', __FILE__) require File.expand_path('../fixtures/classes', __FILE__) describe "Hash.[]" do it "creates a Hash; values can be provided as the argument list" do hash_class[:a, 1, :b, 2].should == new_hash(:a => 1, :b => 2) hash_class[].should == new_hash hash_class[:a, 1, :b, new_hash(:c => 2)].should == new_hash(:a => 1, :b => new_hash(:c => 2)) end it "creates a Hash; values can be provided as one single hash" do hash_class[:a => 1, :b => 2].should == new_hash(:a => 1, :b => 2) hash_class[new_hash(1 => 2, 3 => 4)].should == new_hash(1 => 2, 3 => 4) hash_class[new_hash].should == new_hash end ruby_version_is '1.8.7' do # Not officially documented yet, see http://redmine.ruby-lang.org/issues/show/1385 ruby_bug "[ruby-core:21249]", "1.8.7.167" do it "creates a Hash; values can be provided as a list of value-pairs in an array" do hash_class[[[:a, 1], [:b, 2]]].should == new_hash(:a => 1, :b => 2) hash_class[[[:a, 1], [:b], 42, [:d, 2], [:e, 2, 3], []]].should == new_hash(:a => 1, :b => nil, :d => 2) obj = mock('x') def obj.to_ary() [:b, 2] end hash_class[[[:a, 1], obj]].should == new_hash(:a => 1, :b => 2) end end end it "raises an ArgumentError when passed an odd number of arguments" do lambda { hash_class[1, 2, 3] }.should raise_error(ArgumentError) lambda { hash_class[1, 2, new_hash(3 => 4)] }.should raise_error(ArgumentError) end ruby_version_is '1.8.7' do it "calls to_hash" do obj = mock('x') def obj.to_hash() new_hash(1 => 2, 3 => 4) end hash_class[obj].should == new_hash(1 => 2, 3 => 4) end it "returns an instance of a subclass when passed an Array" do MyHash[[1,2,3,4]].should be_kind_of(MyHash) end end it "returns an instance of the class it's called on" do hash_class[MyHash[1, 2]].class.should == hash_class MyHash[hash_class[1, 2]].should be_kind_of(MyHash) end end
38.339623
112
0.60187
ed35202b25015b4af759c94494861cbced625f9d
2,530
require 'test_helper' class CampaignFlowTest < ActionDispatch::IntegrationTest test "can see the list of campaigns" do get admin_organization_url(organizations(:one)) assert_select "h3", "Campaigns" end test "can create a campaign" do get new_admin_organization_campaign_url(organizations(:one)) assert_response :success post admin_organization_campaigns_url(organizations(:one)), params: { campaign: { name: "Test Campaign", deadline: 1.month.from_now, can_edit_wishlists: true, address: "123 Book Dr.", book_limit: 6, campaign_catalog_ids: [1] } } assert_response :redirect follow_redirect! assert_response :success assert_select "h3", "Campaign Detail" end test "can edit a campaign" do get admin_organization_campaign_url(organizations(:one), campaigns(:one)) assert_response :success put admin_organization_campaign_url(organizations(:one), campaigns(:one)), params: { campaign: { name: "Test Campaign Edit", deadline: 1.month.from_now, can_edit_wishlists: true, address: "123 Book Dr.", book_limit: 6, campaign_catalog_ids: [1] } } assert_response :redirect follow_redirect! assert_response :success assert_select "h3", "Organization Detail" end test "can delete a campaign" do delete admin_organization_campaign_url(organizations(:one), campaigns(:two)) assert_response :redirect follow_redirect! assert_response :success assert_select "h3", "Organization Detail" end test "can add wishlists when can edit wishlists is true" do get admin_organization_campaign_url(organizations(:one), campaigns(:one)) assert_select "a", {count: 1, text: 'New Wishlist'} assert_select "a", {count: 1, text: 'Bulk Edit'} assert_select "a", {count: 1, text: 'Bulk Upload'} #These require selenium probably since there is some js rendering going on #assert_select "[colid=reader_name]", {count: 1, text: 'Reader One'} #assert_select "a", {count: 1, text: 'Edit'} end test "cannot add wishlists when can edit wishlists is false" do get admin_organization_campaign_url(organizations(:one), campaigns(:three)) assert_select "a", {count: 0, text: 'New Wishlist'} assert_select "a", {count: 0, text: 'Bulk Edit'} assert_select "a", {count: 0, text: 'Bulk Upload'} #These require selenium probably since there is some js rendering going on #assert_select "[colid=reader_name]", {count: 1, text: 'Reader Three'} #assert_select "a", {count: 0, text: 'Edit'} end end
40.806452
179
0.719368
ab37808e5f84217d2d38f1466140b1246e505e7d
98
class Setting < Settingslogic source "#{Rails.root}/config/config.yml" namespace Rails.env end
24.5
42
0.765306
1a4851a1071c803edaff470fe085f4d5ae64e8bb
144
class Bootstrap::DropdownDividersController < Bootstrap::BaseBootstrapController def index redirect_to bootstrap_dropdowns_path end end
24
80
0.840278
08b8a009e7cf4ab585ae7ceb0644983d26da962f
822
require_relative "../canvas_base_mutation" module LMSGraphQL module Mutations module Canvas class SelectProvisionalGrade < BaseMutation argument :course_id, ID, required: true argument :assignment_id, ID, required: true argument :provisional_grade_id, ID, required: true field :return_value, Boolean, null: false def resolve(course_id:, assignment_id:, provisional_grade_id:) context[:canvas_api].call("SELECT_PROVISIONAL_GRADE").proxy( "SELECT_PROVISIONAL_GRADE", { "course_id": course_id, "assignment_id": assignment_id, "provisional_grade_id": provisional_grade_id }, {}, ).parsed_response end end end end end
29.357143
70
0.607056
b9a1f4e5f413c95529f14138fbe4468260c82e5a
2,304
# frozen_string_literal: true require_relative '../buy_sell_par_shares' require_relative 'sell_company' require_relative 'choose_power' module Engine module Step module G18ZOO class BuySellParShares < BuySellParShares include SellCompany include ChoosePower def actions(entity) return [] unless entity == current_entity return ['sell_shares'] if must_sell?(entity) actions = [] actions << 'buy_shares' if can_buy_any?(entity) actions << 'par' if can_ipo_any?(entity) actions << 'buy_company' unless purchasable_unsold_companies.empty? actions << 'sell_shares' if can_sell_any?(entity) actions << 'sell_company' if can_sell_any_companies?(entity) actions << 'choose' if choice_available?(entity) actions << 'pass' unless actions.empty? actions end def can_buy_company?(player, _company) player.companies.count { |c| !c.name.start_with?('ZOOTicket') } < 3 end def process_buy_company(action) super @game.available_companies.delete(action.company) @game.apply_custom_ability(action.company) end def get_par_prices(_entity, _corp) super.reject do |p| (p.price == 9 && [email protected]?(:green)) || (p.price == 12 && [email protected]?(:brown)) end end def can_buy?(entity, bundle) super && more_than_80_only_from_market(entity, bundle) end def more_than_80_only_from_market(entity, bundle) corporation = bundle.corporation ipo_share = corporation.shares[0] is_ipo_share = ipo_share == bundle percent = entity.percent_of(corporation) !is_ipo_share || percent < 80 end def log_pass(entity) return @log << "#{entity.name} passes" if @current_actions.empty? return if bought? && sold? action = bought? ? 'to sell' : 'to buy' @log << "#{entity.name} declines #{action} shares" end private def purchasable_unsold_companies return [] if bought? @game.available_companies end end end end end
29.538462
77
0.605469
38b54bdc44068c82c778175581e68136dac9d7d4
16,122
# frozen_string_literal: true require 'rubygems/test_case' require 'rubygems' class TestGemRequire < Gem::TestCase class Latch def initialize(count = 1) @count = count @lock = Monitor.new @cv = @lock.new_cond end def release @lock.synchronize do @count -= 1 if @count > 0 @cv.broadcast if @count.zero? end end def await @lock.synchronize do @cv.wait_while { @count > 0 } end end end def setup super @old_loaded_features = $LOADED_FEATURES.dup assert_raises LoadError do require 'test_gem_require_a' end $LOADED_FEATURES.replace @old_loaded_features end def assert_require(path) assert require(path), "'#{path}' was already required" end def refute_require(path) refute require(path), "'#{path}' was not yet required" end # Providing -I on the commandline should always beat gems def test_dash_i_beats_gems a1 = util_spec "a", "1", {"b" => "= 1"}, "lib/test_gem_require_a.rb" b1 = util_spec "b", "1", {"c" => "> 0"}, "lib/b/c.rb" c1 = util_spec "c", "1", nil, "lib/c/c.rb" c2 = util_spec "c", "2", nil, "lib/c/c.rb" install_specs c1, c2, b1, a1 dir = Dir.mktmpdir("test_require", @tempdir) dash_i_arg = File.join dir, 'lib' c_rb = File.join dash_i_arg, 'b', 'c.rb' FileUtils.mkdir_p File.dirname c_rb File.open(c_rb, 'w') { |f| f.write "class Object; HELLO = 'world' end" } lp = $LOAD_PATH.dup # Pretend to provide a commandline argument that overrides a file in gem b $LOAD_PATH.unshift dash_i_arg assert_require 'test_gem_require_a' assert_require 'b/c' # this should be required from -I assert_equal "world", ::Object::HELLO assert_equal %w(a-1 b-1), loaded_spec_names ensure $LOAD_PATH.replace lp Object.send :remove_const, :HELLO if Object.const_defined? :HELLO end def create_sync_thread Thread.new do begin yield ensure FILE_ENTERED_LATCH.release FILE_EXIT_LATCH.await end end end # Providing -I on the commandline should always beat gems def test_dash_i_beats_default_gems a1 = new_default_spec "a", "1", {"b" => "= 1"}, "test_gem_require_a.rb" b1 = new_default_spec "b", "1", {"c" => "> 0"}, "b/c.rb" c1 = new_default_spec "c", "1", nil, "c/c.rb" c2 = new_default_spec "c", "2", nil, "c/c.rb" install_default_specs c1, c2, b1, a1 dir = Dir.mktmpdir("test_require", @tempdir) dash_i_arg = File.join dir, 'lib' c_rb = File.join dash_i_arg, 'c', 'c.rb' FileUtils.mkdir_p File.dirname c_rb File.open(c_rb, 'w') { |f| f.write "class Object; HELLO = 'world' end" } assert_require 'test_gem_require_a' lp = $LOAD_PATH.dup # Pretend to provide a commandline argument that overrides a file in gem b $LOAD_PATH.unshift dash_i_arg assert_require 'b/c' assert_require 'c/c' # this should be required from -I assert_equal "world", ::Object::HELLO assert_equal %w(a-1 b-1), loaded_spec_names ensure $LOAD_PATH.replace lp Object.send :remove_const, :HELLO if Object.const_defined? :HELLO end def test_concurrent_require Object.const_set :FILE_ENTERED_LATCH, Latch.new(2) Object.const_set :FILE_EXIT_LATCH, Latch.new(1) a1 = util_spec "a", "1", nil, "lib/a.rb" b1 = util_spec "b", "1", nil, "lib/b.rb" install_specs a1, b1 t1 = create_sync_thread{ assert_require 'a' } t2 = create_sync_thread{ assert_require 'b' } # wait until both files are waiting on the exit latch FILE_ENTERED_LATCH.await # now let them finish FILE_EXIT_LATCH.release assert t1.join, "thread 1 should exit" assert t2.join, "thread 2 should exit" ensure Object.send :remove_const, :FILE_ENTERED_LATCH if Object.const_defined? :FILE_ENTERED_LATCH Object.send :remove_const, :FILE_EXIT_LATCH if Object.const_defined? :FILE_EXIT_LATCH end def test_require_is_not_lazy_with_exact_req a1 = util_spec "a", "1", {"b" => "= 1"}, "lib/test_gem_require_a.rb" b1 = util_spec "b", "1", nil, "lib/b/c.rb" b2 = util_spec "b", "2", nil, "lib/b/c.rb" install_specs b1, b2, a1 assert_require 'test_gem_require_a' assert_equal %w(a-1 b-1), loaded_spec_names assert_equal unresolved_names, [] assert_require "b/c" assert_equal %w(a-1 b-1), loaded_spec_names end def test_require_is_lazy_with_inexact_req a1 = util_spec "a", "1", {"b" => ">= 1"}, "lib/test_gem_require_a.rb" b1 = util_spec "b", "1", nil, "lib/b/c.rb" b2 = util_spec "b", "2", nil, "lib/b/c.rb" install_specs b1, b2, a1 assert_require 'test_gem_require_a' assert_equal %w(a-1), loaded_spec_names assert_equal unresolved_names, ["b (>= 1)"] assert_require "b/c" assert_equal %w(a-1 b-2), loaded_spec_names end def test_require_is_not_lazy_with_one_possible a1 = util_spec "a", "1", {"b" => ">= 1"}, "lib/test_gem_require_a.rb" b1 = util_spec "b", "1", nil, "lib/b/c.rb" install_specs b1, a1 assert_require 'test_gem_require_a' assert_equal %w(a-1 b-1), loaded_spec_names assert_equal unresolved_names, [] assert_require "b/c" assert_equal %w(a-1 b-1), loaded_spec_names end def test_require_can_use_a_pathname_object a1 = util_spec "a", "1", nil, "lib/test_gem_require_a.rb" install_specs a1 assert_require Pathname.new 'test_gem_require_a' assert_equal %w(a-1), loaded_spec_names assert_equal unresolved_names, [] end def test_activate_via_require_respects_loaded_files skip "Not sure what's going on. If another spec creates a 'a' gem before this test, somehow require will load the benchmark in b, and ignore that the stdlib one is already in $LOADED_FEATURES?. Reproducible by running the spaceship_specific_file test before this one" if java_platform? lp = $LOAD_PATH.dup lib_dir = File.expand_path(File.join(File.dirname(__FILE__), "../../lib")) if File.exist?(lib_dir) $LOAD_PATH.delete lib_dir $LOAD_PATH.push lib_dir end a1 = util_spec "a", "1", {"b" => ">= 1"}, "lib/test_gem_require_a.rb" b1 = util_spec "b", "1", nil, "lib/benchmark.rb" b2 = util_spec "b", "2", nil, "lib/benchmark.rb" install_specs b1, b2, a1 assert_require 'test_gem_require_a' assert_equal unresolved_names, ["b (>= 1)"] refute require('benchmark'), "benchmark should have already been loaded" # We detected that we should activate b-2, so we did so, but # then original_require decided "I've already got benchmark.rb" loaded. # This case is fine because our lazy loading is provided exactly # the same behavior as eager loading would have. assert_equal %w(a-1 b-2), loaded_spec_names ensure $LOAD_PATH.replace lp unless java_platform? end def test_already_activated_direct_conflict a1 = util_spec "a", "1", { "b" => "> 0" } b1 = util_spec "b", "1", { "c" => ">= 1" }, "lib/ib.rb" b2 = util_spec "b", "2", { "c" => ">= 2" }, "lib/ib.rb" c1 = util_spec "c", "1", nil, "lib/d.rb" c2 = util_spec("c", "2", nil, "lib/d.rb") install_specs c1, c2, b1, b2, a1 a1.activate c1.activate assert_equal %w(a-1 c-1), loaded_spec_names assert_equal ["b (> 0)"], unresolved_names assert require("ib") assert_equal %w(a-1 b-1 c-1), loaded_spec_names assert_equal [], unresolved_names end def test_multiple_gems_with_the_same_path a1 = util_spec "a", "1", { "b" => "> 0", "x" => "> 0" } b1 = util_spec "b", "1", { "c" => ">= 1" }, "lib/ib.rb" b2 = util_spec "b", "2", { "c" => ">= 2" }, "lib/ib.rb" x1 = util_spec "x", "1", nil, "lib/ib.rb" x2 = util_spec "x", "2", nil, "lib/ib.rb" c1 = util_spec "c", "1", nil, "lib/d.rb" c2 = util_spec("c", "2", nil, "lib/d.rb") install_specs c1, c2, x1, x2, b1, b2, a1 a1.activate c1.activate assert_equal %w(a-1 c-1), loaded_spec_names assert_equal ["b (> 0)", "x (> 0)"], unresolved_names e = assert_raises(Gem::LoadError) do require("ib") end assert_equal "ib found in multiple gems: b, x", e.message end def test_unable_to_find_good_unresolved_version a1 = util_spec "a", "1", { "b" => "> 0" } b1 = util_spec "b", "1", { "c" => ">= 2" }, "lib/ib.rb" b2 = util_spec "b", "2", { "c" => ">= 3" }, "lib/ib.rb" c1 = util_spec "c", "1", nil, "lib/d.rb" c2 = util_spec "c", "2", nil, "lib/d.rb" c3 = util_spec "c", "3", nil, "lib/d.rb" install_specs c1, c2, c3, b1, b2, a1 a1.activate c1.activate assert_equal %w(a-1 c-1), loaded_spec_names assert_equal ["b (> 0)"], unresolved_names e = assert_raises(Gem::LoadError) do require("ib") end assert_equal "unable to find a version of 'b' to activate", e.message end def test_require_works_after_cleanup a1 = new_default_spec "a", "1.0", nil, "a/b.rb" b1 = new_default_spec "b", "1.0", nil, "b/c.rb" b2 = new_default_spec "b", "2.0", nil, "b/d.rb" install_default_gems a1 install_default_gems b1 install_default_gems b2 # Load default ruby gems fresh as if we've just started a ruby script. Gem::Specification.reset require 'rubygems' Gem::Specification.stubs # Remove an old default gem version directly from disk as if someone ran # gem cleanup. FileUtils.rm_rf(File.join @default_dir, "#{b1.full_name}") FileUtils.rm_rf(File.join @default_spec_dir, "#{b1.full_name}.gemspec") # Require gems that have not been removed. assert_require 'a/b' assert_equal %w(a-1.0), loaded_spec_names assert_require 'b/d' assert_equal %w(a-1.0 b-2.0), loaded_spec_names end def test_require_doesnt_traverse_development_dependencies a = util_spec("a", "1", nil, "lib/a.rb") z = util_spec("z", "1", "w" => "> 0") w1 = util_spec("w", "1") { |s| s.add_development_dependency "non-existent" } w2 = util_spec("w", "2") { |s| s.add_development_dependency "non-existent" } install_specs a, w1, w2, z assert gem("z") assert_equal %w(z-1), loaded_spec_names assert_equal ["w (> 0)"], unresolved_names assert require("a") end def test_default_gem_only default_gem_spec = new_default_spec("default", "2.0.0.0", nil, "default/gem.rb") install_default_specs(default_gem_spec) assert_require "default/gem" assert_equal %w(default-2.0.0.0), loaded_spec_names end def test_default_gem_require_activates_just_once default_gem_spec = new_default_spec("default", "2.0.0.0", nil, "default/gem.rb") install_default_specs(default_gem_spec) assert_require "default/gem" times_called = 0 Kernel.stub(:gem, ->(name, requirement) { times_called += 1 }) do refute_require "default/gem" end assert_equal 0, times_called end def test_realworld_default_gem begin gem 'json' rescue Gem::MissingSpecError skip "default gems are only available after ruby installation" end cmd = <<-RUBY $stderr = $stdout require "json" puts Gem.loaded_specs["json"].default_gem? RUBY output = Gem::Util.popen(Gem.ruby, "-e", cmd).strip assert_equal "true", output end def test_default_gem_and_normal_gem default_gem_spec = new_default_spec("default", "2.0.0.0", nil, "default/gem.rb") install_default_specs(default_gem_spec) normal_gem_spec = util_spec("default", "3.0", nil, "lib/default/gem.rb") install_specs(normal_gem_spec) assert_require "default/gem" assert_equal %w(default-3.0), loaded_spec_names end def test_default_gem_prerelease default_gem_spec = new_default_spec("default", "2.0.0", nil, "default/gem.rb") install_default_specs(default_gem_spec) normal_gem_higher_prerelease_spec = util_spec("default", "3.0.0.rc2", nil, "lib/default/gem.rb") install_default_specs(normal_gem_higher_prerelease_spec) assert_require "default/gem" assert_equal %w(default-3.0.0.rc2), loaded_spec_names end def loaded_spec_names Gem.loaded_specs.values.map(&:full_name).sort end def unresolved_names Gem::Specification.unresolved_deps.values.map(&:to_s).sort end def test_try_activate_error_unlocks_require_monitor silence_warnings do class << ::Gem alias old_try_activate try_activate def try_activate(*); raise 'raised from try_activate'; end end end require 'does_not_exist_for_try_activate_test' rescue RuntimeError => e assert_match(/raised from try_activate/, e.message) assert Kernel::RUBYGEMS_ACTIVATION_MONITOR.try_enter, "require monitor was not unlocked when try_activate raised" ensure silence_warnings do class << ::Gem alias try_activate old_try_activate end end Kernel::RUBYGEMS_ACTIVATION_MONITOR.exit end def test_require_when_gem_defined default_gem_spec = new_default_spec("default", "2.0.0.0", nil, "default/gem.rb") install_default_specs(default_gem_spec) c = Class.new do def self.gem(*args) raise "received #gem with #{args.inspect}" end end assert c.send(:require, "default/gem") assert_equal %w(default-2.0.0.0), loaded_spec_names end def test_require_default_when_gem_defined a = util_spec("a", "1", nil, "lib/a.rb") install_specs a c = Class.new do def self.gem(*args) raise "received #gem with #{args.inspect}" end end assert c.send(:require, "a") assert_equal %w(a-1), loaded_spec_names end def test_require_bundler b1 = util_spec('bundler', '1', nil, "lib/bundler/setup.rb") b2a = util_spec('bundler', '2.a', nil, "lib/bundler/setup.rb") install_specs b1, b2a require "rubygems/bundler_version_finder" $:.clear assert_require 'bundler/setup' assert_equal %w[bundler-2.a], loaded_spec_names assert_empty unresolved_names end def test_require_bundler_missing_bundler_version Gem::BundlerVersionFinder.stub(:bundler_version_with_reason, ["55", "reason"]) do b1 = util_spec('bundler', '1.999999999', nil, "lib/bundler/setup.rb") b2a = util_spec('bundler', '2.a', nil, "lib/bundler/setup.rb") install_specs b1, b2a e = assert_raises Gem::MissingSpecVersionError do gem('bundler') end assert_match "Could not find 'bundler' (55) required by reason.", e.message end end def test_require_bundler_with_bundler_version Gem::BundlerVersionFinder.stub(:bundler_version_with_reason, ["1", "reason"]) do b1 = util_spec('bundler', '1.999999999', nil, "lib/bundler/setup.rb") b2 = util_spec('bundler', '2', nil, "lib/bundler/setup.rb") install_specs b1, b2 $:.clear assert_require 'bundler/setup' assert_equal %w[bundler-1.999999999], loaded_spec_names end end # uplevel is 2.5+ only and jruby has some issues with it if RUBY_VERSION >= "2.5" && !java_platform? def test_no_kernel_require_in_warn_with_uplevel lib = File.realpath("../../../lib", __FILE__) Dir.mktmpdir("warn_test") do |dir| File.write(dir + "/sub.rb", "warn 'uplevel', 'test', uplevel: 1\n") File.write(dir + "/main.rb", "require 'sub'\n") _, err = capture_subprocess_io do system(@@ruby, "-w", "-rpp", "--disable=gems", "-I", lib, "-C", dir, "-I.", "main.rb") end assert_equal "main.rb:1: warning: uplevel\ntest\n", err _, err = capture_subprocess_io do system(@@ruby, "-w", "-rpp", "--enable=gems", "-I", lib, "-C", dir, "-I.", "main.rb") end assert_equal "main.rb:1: warning: uplevel\ntest\n", err end end end def silence_warnings old_verbose, $VERBOSE = $VERBOSE, false yield ensure $VERBOSE = old_verbose end end
30.476371
117
0.644213
33e6a4b7f04294d53a23949c29272ac0bc04ce56
959
# This file is auto-generated from the current state of the database. Instead # of editing this file, please use the migrations feature of Active Record to # incrementally modify your database, and then regenerate this schema definition. # # Note that this schema.rb definition is the authoritative source for your # database schema. If you need to create the application database on another # system, you should be using db:schema:load, not running all the migrations # from scratch. The latter is a flawed and unsustainable approach (the more migrations # you'll amass, the slower it'll run and the greater likelihood for issues). # # It's strongly recommended that you check this file into your version control system. ActiveRecord::Schema.define(version: 20180326025625) do create_table "posts", force: :cascade do |t| t.string "title" t.text "body" t.datetime "created_at", null: false t.datetime "updated_at", null: false end end
41.695652
86
0.766423
d57367e931ec798df8156b96f5e101a026f742a7
766
require 'spec_helper' describe Explore::ProjectsController do describe 'GET #trending' do context 'sorting by update date' do let(:project1) { create(:project, :public, updated_at: 3.days.ago) } let(:project2) { create(:project, :public, updated_at: 1.day.ago) } before do create(:trending_project, project: project1) create(:trending_project, project: project2) end it 'sorts by last updated' do get :trending, params: { sort: 'updated_desc' } expect(assigns(:projects)).to eq [project2, project1] end it 'sorts by oldest updated' do get :trending, params: { sort: 'updated_asc' } expect(assigns(:projects)).to eq [project1, project2] end end end end
27.357143
74
0.639687
bbd99676c35955582339d80fd03013da365912e0
2,048
# frozen_string_literal: true require "active_model/validations/clusivity" module ActiveModel module Validations class InclusionValidator < EachValidator # :nodoc: include Clusivity def validate_each(record, attribute, value) unless include?(record, value) record.errors.add(attribute, :inclusion, **options.except(:in, :within).merge!(value: value)) end end end module HelperMethods # Validates whether the value of the specified attribute is available in a # particular enumerable object. # # class Person < ActiveRecord::Base # validates_inclusion_of :role, in: %w( admin contributor ) # validates_inclusion_of :age, in: 0..99 # validates_inclusion_of :format, in: %w( jpg gif png ), message: "extension %{value} is not included in the list" # validates_inclusion_of :states, in: ->(person) { STATES[person.country] } # validates_inclusion_of :karma, in: :available_karmas # end # # Configuration options: # * <tt>:in</tt> - An enumerable object of available items. This can be # supplied as a proc, lambda or symbol which returns an enumerable. If the # enumerable is a numerical, time or datetime range the test is performed # with <tt>Range#cover?</tt>, otherwise with <tt>include?</tt>. When using # a proc or lambda the instance under validation is passed as an argument. # * <tt>:within</tt> - A synonym(or alias) for <tt>:in</tt> # * <tt>:message</tt> - Specifies a custom error message (default is: "is # not included in the list"). # # There is also a list of default options supported by every validator: # +:if+, +:unless+, +:on+, +:allow_nil+, +:allow_blank+, and +:strict+. # See <tt>ActiveModel::Validations#validates</tt> for more information def validates_inclusion_of(*attr_names) validates_with InclusionValidator, _merge_attributes(attr_names) end end end end
42.666667
124
0.656738
3893e1d0df85aacd41ce57e88890fea50ab86474
249
module EbayTrading # :nodoc: module Responses # :nodoc: # == Attributes class SellerReverseDispute < Abstract include XML::Mapping include Initializer root_element_name 'SellerReverseDisputeResponse' end end end
17.785714
54
0.698795
7aa5b9b97434bf0ea72b19dd295e6683754439ba
41
# typed: true begin; 1; 2; else; 3; end
10.25
25
0.585366
286cf7c6bcfb171715f4a75c814eafa3b1bfba99
103
puts '--Lista de Compras--' file = File.open('shopping-list.txt') file.each do |line| puts line end
14.714286
37
0.679612
330b9690c750b8bf75440134a115fb6a5cc3fd8d
550
require File.dirname(__FILE__) + '/spec_helper' describe "Benchmarking", :type => :formatter do version = RedCloth::VERSION.is_a?(Module) ? RedCloth::VERSION::STRING : RedCloth::VERSION platform = RedCloth.const_defined?(:EXTENSION_LANGUAGE) ? RedCloth::EXTENSION_LANGUAGE : (version < "4.0.0" ? "ruby-regex" : "C") it "should not be too slow" do # puts "Benchmarking version #{version} compiled in #{platform}..." fixtures.each do |name, doc| if doc['html'] RedCloth.new(doc['in']).to_html end end end end
36.666667
131
0.669091
bf1d077cfcfc92142113bf5b8bc5dd4665188337
3,158
# frozen_string_literal: true module ActiveRecord module Associations class Preloader class Branch #:nodoc: attr_reader :association, :children, :parent attr_reader :scope, :associate_by_default attr_writer :preloaded_records def initialize(association:, children:, parent:, associate_by_default:, scope:) @association = association @parent = parent @scope = scope @associate_by_default = associate_by_default @children = build_children(children) end def root? parent.nil? end def source_records @parent.preloaded_records end def preloaded_records @preloaded_records ||= loaders.flat_map(&:preloaded_records) end def done? loaders.all?(&:run?) end def runnable_loaders loaders.flat_map(&:runnable_loaders).reject(&:run?) end def grouped_records h = {} polymorphic_parent = !root? && parent.polymorphic? source_records.each do |record| reflection = record.class._reflect_on_association(association) next if polymorphic_parent && !reflection || !record.association(association).klass (h[reflection] ||= []) << record end h end def preloaders_for_reflection(reflection, reflection_records) reflection_records.group_by { |record| record.association(reflection.name).klass }.map do |rhs_klass, rs| preloader_for(reflection).new(rhs_klass, rs, reflection, scope, associate_by_default) end end def polymorphic? return false if root? return @polymorphic if defined?(@polymorphic) @polymorphic = source_records.any? do |record| reflection = record.class._reflect_on_association(association) reflection && reflection.options[:polymorphic] end end def loaders @loaders ||= grouped_records.flat_map do |reflection, reflection_records| preloaders_for_reflection(reflection, reflection_records) end end private def build_children(children) Array.wrap(children).flat_map { |association| Array(association).flat_map { |parent, child| Branch.new( parent: self, association: parent, children: child, associate_by_default: associate_by_default, scope: scope ) } } end # Returns a class containing the logic needed to load preload the data # and attach it to a relation. The class returned implements a `run` method # that accepts a preloader. def preloader_for(reflection) reflection.check_preloadable! if reflection.options[:through] ThroughAssociation else Association end end end end end end
30.07619
115
0.582964
62b760264881133fe8a715420df80563ef72b143
904
module Omx class Player include KeyboardShortcuts PIPE = '/tmp/omxpipe' def initialize mkfifo end def open(opts={}) opts = default_options.merge(opts) system unix_command_with(opts) start end private def unix_command_with(opts) ['omxplayer'].tap do |args| # Audio device =~ /hdmi|local/ args << "--adev #{opts[:audio_out]}" # Start position in seconds args << "--pos #{opts[:start_pos]}" args << "\"#{opts[:filename]}\"" args << "< #{PIPE} &" end.join ' ' end def mkfifo system "mkfifo #{PIPE}" unless File.exists?(PIPE) end def send_to_pipe(command) system "echo -n #{command} > #{PIPE} &" end def default_options { audio_out: 'hdmi', start_pos: 0, } end end end
19.652174
57
0.514381
6a203c80df57f4c5665f31d3e9f4302dd9957079
1,094
#-- copyright # ReportingEngine # # Copyright (C) 2010 - 2014 the OpenProject Foundation (OPF) # # This program is free software; you can redistribute it and/or # modify it under the terms of the GNU General Public License # version 3. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program; if not, write to the Free Software # Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA. #++ class Report::GroupBy module SqlAggregation def responsible_for_sql? true end def compute_result super.tap { |r| r.important_fields = group_fields }.grouped_by(all_group_fields(false), type, group_fields) end def sql_statement super.tap do |sql| define_group sql sql.count unless sql.selects.include? 'count' end end end end
28.789474
113
0.723035
ff489147bfadbe00c9ce7a10e34c39737ca5c48d
287
class Mobile::CandidaciesController < Mobile::ApplicationController before_filter :set_election def create @candidacies = Candidacy.find params[:candidacies] redirect_to tags_path @election.namespace, @candidacies.collect { |c| c.namespace }.join(',') end end
26.090909
97
0.731707
0357757c8685f5e9f35b7832561a3f527792ba10
727
# frozen_string_literal: true require "rails/generators/erb" require "rails/generators/abstract_generator" module Erb module Generators class ComponentGenerator < Base include ViewComponent::AbstractGenerator source_root File.expand_path("templates", __dir__) class_option :sidecar, type: :boolean, default: false class_option :inline, type: :boolean, default: false class_option :stimulus, type: :boolean, default: false def engine_name "erb" end def copy_view_file super end private def data_attributes if options["stimulus"] " data-controller=\"#{stimulus_controller}\"" end end end end end
21.382353
60
0.662999
876b922c10ca94486039138296a317b17db7efd0
757
require 'rx' # Have staggering intervals source1 = RX::Observable.interval(0.1) .map {|i| 'First: ' + i.to_s } source2 = RX::Observable.interval(0.15) .map {|i| 'Second: ' + i.to_s } # Combine latest of source1 and source2 whenever either gives a value source = source1.combine_latest(source2) {|s1, s2| s1 + ', ' + s2 } .take(4) subscription = source.subscribe( lambda {|x| puts 'Next: ' + x.to_s }, lambda {|err| puts 'Error: ' + err.to_s }, lambda { puts 'Completed' }) # => Next: First: 0, Second: 0 # => Next: First: 1, Second: 0 # => Next: First: 1, Second: 1 # => Next: First: 2, Second: 1 # => Completed while Thread.list.size > 1 (Thread.list - [Thread.current]).each &:join end
22.264706
69
0.589168
21674830fe5c7e951b29dbbc29e280b263e69dd5
749
class PostsController < ApplicationController def index @posts = Post.all.order('created_at DESC') end def new @post = Post.new end def create @post = Post.new(post_params) if @post.save redirect_to @post else render 'new' end end def show @post = Post.find(params[:id]) end def edit @post = Post.find(params[:id]) end def update @post = Post.find(params[:id]) if @post.update(params[:post].permit(:title, :body)) redirect_to @post else render 'edit' end end def destroy @post = Post.find(params[:id]) @post.destroy redirect_to root_path end private def post_params params.require(:post).permit(:title, :body) end end
15.285714
56
0.616822
33298bf7d7405c1cc55b677c32b1a8e4fd494c8a
25
require "gindex/version"
12.5
24
0.8
3802720b04eac20ff84f493e346a322a23130e81
9,742
require 'travis/remote_log' describe Travis::RemoteLog do subject { described_class.new(attrs) } let(:attrs) { { id: 4, content: 'huh', job_id: 5 } } before :each do described_class.instance_variable_set(:@client, nil) described_class.instance_variable_set(:@archive_client, nil) end it 'has a default client' do expect(described_class.send(:client)).to_not be_nil end it 'has a default archive_client' do expect(described_class.send(:archive_client)).to_not be_nil end it 'delegates public methods to client' do client = mock('client') client.expects(:find_by_job_id) client.expects(:find_by_id) client.expects(:write_content_for_job_id) described_class.instance_variable_set(:@client, client) described_class.find_by_id described_class.find_by_job_id described_class.write_content_for_job_id end it 'delegates public methods to archive_client' do archive_client = mock('archive_client') archive_client.expects(:fetch_archived_url) described_class.instance_variable_set(:@archive_client, archive_client) described_class.fetch_archived_url end it 'has all the necessary attributes' do %i( aggregated_at archive_verified archived_at archiving content created_at id job_id purged_at removed_at removed_by_id updated_at ).each do |attr| expect { subject.public_send(attr) }.to_not raise_error end end it 'has a nil removed_by without a removed_by_id' do subject.removed_by_id = nil subject.removed_by.should be_nil end it 'has a non-nil removed_by with a removed_by_id' do user = mock('user') subject.removed_by_id = 4 User.expects(:find).with(4).returns(user) subject.removed_by.should == user end it 'has a job' do job = mock('job') Job.expects(:find).with(attrs[:job_id]).returns(job) subject.job.should == job end it 'has archived content' do described_class.expects(:fetch_archived_url) .with(5, expires: nil) .returns('yep') subject.archived_url.should eq 'yep' end it 'has parts' do found_parts = [ Travis::RemoteLogPart.new(number: 42, content: 'yey', final: false) ] described_class.stubs(:find_parts_by_job_id) .with(5, after: nil, part_numbers: []) .returns(found_parts) subject.parts.should eq found_parts end { nil => false, Time.now => true, 'huh' => true }.each do |aggregated_at, is_aggregated| context "when aggregated_at=#{aggregated_at}" do before { subject.aggregated_at = aggregated_at } it "has aggregated?=#{is_aggregated}" do subject.aggregated?.should == is_aggregated end end end { [nil, nil] => false, [nil, true] => false, [nil, false] => false, [Time.now, false] => false, [Time.now, nil] => false, [Time.now, true] => true, }.each do |(archived_at, archive_verified), is_archived| context "when archived_at=#{archived_at.inspect} and " \ "archive_verified=#{archive_verified.inspect}" do before do subject.archived_at = archived_at subject.archive_verified = archive_verified end it "has archived?=#{is_archived}" do subject.archived?.should == is_archived end end end it 'can serialize via #to_json' do from_json = JSON.parse(subject.to_json).fetch('log') from_json.fetch('id').should == attrs[:id] from_json.fetch('job_id').should == attrs[:job_id] from_json.fetch('type').should == 'Log' from_json.fetch('body').should == attrs[:content] end it 'can serialize chunked via #to_json' do subject.stubs(:parts).returns([ Travis::RemoteLogPart.new( number: 8, content: 'whats that', final: false ), Travis::RemoteLogPart.new( number: 11, content: 'whats thaaaaat', final: false ) ]) from_json = JSON.parse( subject.to_json( chunked: true, after: 2, part_numbers: [8, 11] ) ).fetch('log') from_json.fetch('id').should == attrs[:id] from_json.fetch('job_id').should == attrs[:job_id] from_json.fetch('type').should == 'Log' from_json.fetch('parts').should == [ { 'number' => 8, 'content' => 'whats that', 'final' => false }, { 'number' => 11, 'content' => 'whats thaaaaat', 'final' => false }, ] from_json.should_not include('body') end it 'can serialize removed logs via #to_json' do user_id = 8 user = mock('user') user.stubs(:name).returns('Twizzler HotDog') user.stubs(:id).returns(user_id) now = mock('now') now.stubs(:utc).returns(now) now.stubs(:to_s).returns('whenebber') Time.stubs(:now).returns(now) subject.stubs(:removed_by).returns(user) subject.removed_at = now subject.removed_by_id = user_id from_json = JSON.parse(subject.to_json).fetch('log') from_json.fetch('id').should == attrs[:id] from_json.fetch('job_id').should == attrs[:job_id] from_json.fetch('type').should == 'Log' from_json.fetch('body').should == attrs[:content] from_json.fetch('removed_by').should == 'Twizzler HotDog' from_json.fetch('removed_at').should == 'whenebber' end it 'can be cleared' do content = 'Log removed by Floof MaGoof at sometime' user_id = 8 client = mock('client') client.expects(:write_content_for_job_id) .with(attrs.fetch(:job_id), content: content, removed_by: user_id) .returns(described_class.new(content: content, removed_by: user_id)) described_class.instance_variable_set(:@client, client) user = mock('user') user.expects(:name).returns('Floof MaGoof') user.expects(:id).returns(user_id) now = mock('now') now.expects(:utc).returns('sometime') Time.stubs(:now).returns(now) subject.clear!(user).should eq(content) end end describe Travis::RemoteLog::Client do let(:url) { 'http://loggo.example.com' } let(:token) { 'fafafafcoolbeansgeocitiesangelfire' } let :stubs do Faraday::Adapter::Test::Stubs.new do |stub| stub.get('/logs/4') { [200, {}, JSON.dump(content: 'huh wow')] } stub.get('/logs/4/id') { [200, {}, JSON.dump(id: 4000)] } stub.put('/logs/8?removed_by=3') do [ 200, {}, JSON.dump( content: 'why not eh', job_id: 8, removed_by_id: 3 ) ] end stub.get('/log-parts/8?after=4&part_numbers=42,17') do [ 200, {}, JSON.dump( job_id: 8, log_parts: [ { number: 42, content: 'whoa noww', final: false }, { number: 17, content: "is a party\e0m", final: false } ] ) ] end end end subject { described_class.new(url: url, token: token) } before do subject.instance_variable_set( :@conn, Faraday.new { |c| c.adapter :test, stubs } ) end it 'can find logs by id' do subject.find_by_id(4).should_not be_nil end it 'can find logs by job id' do subject.find_by_job_id(4).should_not be_nil end it 'can find log ids by job id' do subject.find_id_by_job_id(4).should_not be_nil end it 'can write content for job id' do subject.write_content_for_job_id(8, content: 'oh hi', removed_by: 3) .should_not be_nil end it 'can find parts by job id' do subject.find_parts_by_job_id(8, after: 4, part_numbers: [42, 17]) .should_not be_nil end context 'when the responses are sad' do let :stubs do Faraday::Adapter::Test::Stubs.new do |stub| stub.get('/logs/4') { [404, {}, ''] } stub.get('/logs/4/id') { [404, {}, ''] } stub.put('/logs/8') { [404, {}, ''] } stub.get('/log-parts/8?after=4&part_numbers=42,17') { [404, {}, ''] } end end it 'cannot find logs by id' do subject.find_by_id(4).should be_nil end it 'cannot find logs by job id' do subject.find_by_job_id(4).should be_nil end it 'cannot find log ids by job id' do subject.find_id_by_job_id(4).should be_nil end it 'cannot write content for job id' do expect { subject.write_content_for_job_id(8, content: 'nah') } .to raise_error(Travis::RemoteLog::Client::Error) end it 'cannot find parts by job id' do expect do subject.find_parts_by_job_id(8, after: 4, part_numbers: [42, 17]) end.to raise_error(Travis::RemoteLog::Client::Error) end end end describe Travis::RemoteLog::ArchiveClient do subject do described_class.new( access_key_id: 'AKFLAH', secret_access_key: 'SECRETSECRETWOWNEAT', bucket_name: 'fluffernutter-pretzel-pie' ) end let(:s3) { mock('s3') } before do subject.instance_variable_set(:@s3, s3) s3.stubs(:directories).returns(s3) s3.stubs(:get) .with('fluffernutter-pretzel-pie', prefix: 'jobs/9/log.txt') .returns(s3) s3.stubs(:files).returns([s3]) end it 'fetches public archived URLs' do s3.stubs(:public?).returns(true) s3.stubs(:public_url).returns('https://wowneat.example.com/flah') subject.fetch_archived_url(9).should eq 'https://wowneat.example.com/flah' end it 'fetches private archived URLs' do s3.stubs(:public?).returns(false) s3.stubs(:url).with(8001) .returns('https://whoabud.example.com/flah?sig=ya&exp=nah') subject.fetch_archived_url(9, expires: 8001) .should eq'https://whoabud.example.com/flah?sig=ya&exp=nah' end end
27.519774
78
0.628208
bf9c2c095e916b89238f38c8134cb36824a91f91
1,747
Rails.application.configure do # Settings specified here will take precedence over those in config/application.rb. # In the development environment your application's code is reloaded on # every request. This slows down response time but is perfect for development # since you don't have to restart the web server when you make code changes. config.cache_classes = false # Do not eager load code on boot. config.eager_load = false # Show full error reports and disable caching. config.consider_all_requests_local = true config.action_controller.perform_caching = false # Don't care if the mailer can't send. config.action_mailer.raise_delivery_errors = true config.action_mailer.delivery_method = :test host = 'rails-tutorial-c9-gwarren.c9.io' config.action_mailer.default_url_options = {host: host} # Print deprecation notices to the Rails logger. config.active_support.deprecation = :log # Raise an error on page load if there are pending migrations. config.active_record.migration_error = :page_load # Debug mode disables concatenation and preprocessing of assets. # This option may cause significant delays in view rendering with a large # number of complex assets. config.assets.debug = true # Asset digests allow you to set far-future HTTP expiration dates on all assets, # yet still be able to expire them through the digest params. config.assets.digest = true # Adds additional error checking when serving assets at runtime. # Checks for improperly declared sprockets dependencies. # Raises helpful error messages. config.assets.raise_runtime_errors = true # Raises error for missing translations # config.action_view.raise_on_missing_translations = true end
38.822222
85
0.773898
ed55df8921ee136a8e68d794ec0a2022cd29fada
2,804
# frozen_string_literal: true require 'spec_helper' describe RuboCop::Cop::Lint::RequireParentheses do subject(:cop) { described_class.new } it 'registers an offense for missing parentheses around expression with ' \ '&& operator' do inspect_source(cop, ["if day.is? 'monday' && month == :jan", ' foo', 'end']) expect(cop.highlights).to eq(["day.is? 'monday' && month == :jan"]) expect(cop.messages) .to eq(['Use parentheses in the method call to avoid confusion about ' \ 'precedence.']) end it 'registers an offense for missing parentheses around expression with ' \ '|| operator' do inspect_source(cop, "day_is? 'tuesday' || true") expect(cop.highlights).to eq(["day_is? 'tuesday' || true"]) end it 'registers an offense for missing parentheses around expression in ' \ 'ternary' do inspect_source(cop, "wd.include? 'tuesday' && true == true ? a : b") expect(cop.highlights).to eq(["wd.include? 'tuesday' && true == true"]) end it 'accepts missing parentheses around expression with + operator' do inspect_source(cop, ["if day_is? 'tuesday' + rest", 'end']) expect(cop.offenses).to be_empty end it 'accepts method calls without parentheses followed by keyword and/or' do inspect_source(cop, ["if day.is? 'tuesday' and month == :jan", 'end', "if day.is? 'tuesday' or month == :jan", 'end']) expect(cop.offenses).to be_empty end it 'accepts method calls that are all operations' do inspect_source(cop, ['if current_level == max + 1', 'end']) expect(cop.offenses).to be_empty end it 'accepts condition that is not a call' do inspect_source(cop, ['if @debug', 'end']) expect(cop.offenses).to be_empty end it 'accepts parentheses around expression with boolean operator' do inspect_source(cop, ["if day.is?('tuesday' && true == true)", 'end']) expect(cop.offenses).to be_empty end it 'accepts method call with parentheses in ternary' do inspect_source(cop, "wd.include?('tuesday' && true == true) ? a : b") expect(cop.offenses).to be_empty end it 'accepts missing parentheses when method is not a predicate' do inspect_source(cop, "weekdays.foo 'tuesday' && true == true") expect(cop.offenses).to be_empty end it 'accepts calls to methods that are setters' do inspect_source(cop, 's.version = @version || ">= 1.8.5"') expect(cop.offenses).to be_empty end it 'accepts calls to methods that are operators' do inspect_source(cop, 'a[b || c]') expect(cop.offenses).to be_empty end end
33.783133
78
0.615906
1d8587f65486926f127c38d2bef8eb7431e6f86f
2,376
module Seek module Ontologies module Controller module TypeHandler extend ActiveSupport::Concern included do before_action :find_ontology_type_class, only: [:show] before_action :find_and_authorize_assays, only: [:show] end def show prepare_show respond_to do |format| format.html format.xml end end private def prepare_show if !@type_class flash.now[:error] = "Unrecognised #{type_text}" elsif invalid_label? flash.now[:notice] = "Undefined #{type_text} with label <b> #{h(params[:label])} </b>. Did you mean #{link_to_alternative}?".html_safe @type_class = nil end end def find_ontology_type_class uri = params[:uri] || ontology_readers.first.default_parent_class_uri.to_s suggested_scheme = "suggested_#{controller_name.singularize}:" @type_class = if uri.start_with?(suggested_scheme) suggested_type_class.find(uri.gsub(suggested_scheme, '')) else ontology_readers.map do |ontology_reader| @type_class || ontology_reader.class_hierarchy.hash_by_uri[uri] end.compact.first end end def find_and_authorize_assays @assays = [] return unless @type_class @assays = @type_class.assays.authorized_for('view') end def invalid_label? !params[:label].blank? && !params[:label].casecmp(@type_class.label.downcase).zero? end def link_to_alternative path = eval("#{controller_name}_path(:uri=>@type_class.uri, :label=> @type_class.label)") view_context.link_to(@type_class.label, path, style: 'font-style:italic;font-weight:bold;') end ##### dynamic fields and attributes determined by the controller name ##### def type_text controller_name.singularize.humanize.downcase end def assay_uri_field "#{controller_name.singularize}_uri".to_sym end def suggested_type_class "suggested_#{controller_name.singularize}".camelize.constantize end end end end end
32.108108
146
0.5867
6a3c253041cc03f98fee40c514a1adc81725d489
397
cask 'trilium-notes' do version '0.42.3' sha256 'c7ba978f4debd0fb24f0197d312d4def0c9683452272fe9055875478f757a147' url "https://github.com/zadam/trilium/releases/download/v#{version}/trilium-mac-x64-#{version}.zip" appcast 'https://github.com/zadam/trilium/releases.atom' name 'Trilium Notes' homepage 'https://github.com/zadam/trilium' app 'trilium-mac-x64/Trilium Notes.app' end
33.083333
101
0.765743
26258c9b57ecff81d947766bb32f7bc0fee10be2
331
class AddOptionsToLabbook < ActiveRecord::Migration def change add_column :embeddable_labbooks, :action_type, :integer, null: false, default: 0 add_column :embeddable_labbooks, :name, :string add_column :embeddable_labbooks, :prompt, :text add_column :embeddable_labbooks, :custom_action_label, :string end end
36.777778
84
0.773414
0195cfc2edf75ddfcb7733f303f7ea70f4862672
4,153
class AssignmentsController < ApplicationController before_action :set_assignment, only: [:show, :edit, :update, :destroy] before_action :set_assignable, only: [:index, :new, :create, :end, :checkin] before_action :redirect_if_already_assigned, only: [:new, :create] # GET /assignments/:asset_type/:asset_id # GET /assignments/:asset_type/:asset_id.json def index @assignments = Assignment.where(assignable_type: params[:asset_type].capitalize, assignable_id: params[:asset_id]) end # GET /assignments/:id # GET /assignments/:id.json def show end # GET /checkout/:asset_type/:asset_id def new @assignment = Assignment.new render "#{params[:asset_type].downcase.pluralize}/checkout" end # GET /checkin/:asset_type/:asset_id def end @assignment = @assignable.assignment end # GET /assignments/:id/edit def edit @assignable = @assignment.assignable end # POST /assignments/:asset_type/:asset_id # POST /assignments/:asset_type/:asset_id.json def create assign_toable = find_assign_toable(assign_toable_type: assignment_params[:assign_toable_type], assign_toable_id: assignment_params[:assign_toable_id]) respond_to do |format| if @assignable.assign_to(assign_toable, assignment_params) format.html { redirect_to @assignable, notice: 'Assignment was successfully created.' } format.json { render :show, status: :created, location: @assignable } else format.html { render :new, status: :unprocessable_entity } format.json { render json: @assignable.errors, status: :unprocessable_entity } end end end # PATCH/PUT /assignments/:id # PATCH/PUT /assignments/:id.json def update respond_to do |format| if @assignment.update(assignment_params) format.html { redirect_to @assignment, notice: 'Assignment was successfully updated.' } format.json { render :show, status: :ok, location: @assignment } else format.html { render :edit, status: :unprocessable_entity } format.json { render json: @assignment.errors, status: :unprocessable_entity } end end end # PATCH/PUT /assignments/checkin/:asset_type/:asset_id # PATCH/PUT /assignments/checkin/:asset_type/:asset_id.json def checkin asset_class = params[:asset_type].downcase respond_to do |format| if @assignment = @assignable.unassign(returned_at: params[:returned_at], name: params[:assignment][asset_class][:name]) format.html { redirect_to @assignable, notice: "#{params[:asset_type].capitalize} has been checked in" } format.json { render :show, status: :ok, location: @assignable } else format.html { render :end, status: :unprocessable_entity } format.json { render json: @assignment.errors, status: :unprocessable_entity } end end end # DELETE /assignments/:id # DELETE /assignments/:id.json def destroy @assignment.destroy respond_to do |format| format.html { redirect_to assignments_url, notice: 'Assignment was successfully destroyed.' } format.json { head :no_content } end end private def set_assignment @assignment = Assignment.find(params[:id]) end def set_assignable asset_class = params[:asset_type].capitalize.constantize raise "\"#{asset_class.name}\" is not an assignable asset type" unless asset_class.include?(Assignable) @assignable = asset_class.find(params[:asset_id]) end def redirect_if_already_assigned if @assignable.respond_to?(:assigned_to) && @assignable&.assigned_to redirect_back fallback_location: @assignable, alert: "An asset can only have one active assignment" end end def assignment_params params.require(:assignment).permit(:assign_toable_id, :assign_toable_type, :assigned_at, :expected_at, :returned_at, :qty, :status, :notes, :active, item: [:name]) end def find_assignable(asset_type:, asset_id:) asset_type.capitalize.constantize.find(asset_id) end def find_assign_toable(assign_toable_type:, assign_toable_id:) assign_toable_type.capitalize.constantize.find(assign_toable_id) end end
34.89916
167
0.717794
03d38ec78fdff1cce18734e1e4c151a80e98162f
358
# frozen_string_literal: true module Gitlab module Kubernetes module Helm HELM_VERSION = '2.11.0'.freeze KUBECTL_VERSION = '1.11.0'.freeze NAMESPACE = 'gitlab-managed-apps'.freeze SERVICE_ACCOUNT = 'tiller'.freeze CLUSTER_ROLE_BINDING = 'tiller-admin'.freeze CLUSTER_ROLE = 'cluster-admin'.freeze end end end
23.866667
50
0.684358
083cb296d5d2ad9e53270aa0ee7af9a57d1ecebe
1,386
# coding: utf-8 lib = File.expand_path("../lib", __FILE__) $LOAD_PATH.unshift(lib) unless $LOAD_PATH.include?(lib) require "var_parser/version" Gem::Specification.new do |spec| spec.name = "var_parser" spec.version = VarParser::VERSION spec.authors = ["dinesh"] spec.email = ["[email protected]"] spec.summary = %q{Extract instance variables from ruby code} spec.description = %q{Extract instance variables from ruby code.} spec.homepage = "https://github.com/dtheetla/var_parser" spec.license = "MIT" # Prevent pushing this gem to RubyGems.org. To allow pushes either set the 'allowed_push_host' # to allow pushing to a single host or delete this section to allow pushing to any host. if spec.respond_to?(:metadata) spec.metadata["allowed_push_host"] = 'https://rubygems.org' else raise "RubyGems 2.0 or newer is required to protect against " \ "public gem pushes." end spec.files = `git ls-files -z`.split("\x0").reject do |f| f.match(%r{^(test|spec|features)/}) end spec.bindir = "exe" spec.executables = spec.files.grep(%r{^exe/}) { |f| File.basename(f) } spec.require_paths = ["lib"] spec.add_development_dependency "bundler", "~> 1.15" spec.add_development_dependency "rake", "~> 10.0" spec.add_development_dependency "minitest", "~> 5.0" end
37.459459
96
0.668831
872daea1dcd0717c641a5c070925fb4ce90e7a8d
5,991
require File.expand_path '../xref_test_case', __FILE__ class TestRDocMarkupToHtmlCrossref < XrefTestCase def setup super @options.hyperlink_all = true @to = RDoc::Markup::ToHtmlCrossref.new @options, 'index.html', @c1 end def test_convert_CROSSREF result = @to.convert 'C1' assert_equal para("<a href=\"C1.html\">C1</a>"), result end def test_convert_CROSSREF_label result = @to.convert 'C1@foo' assert_equal para("<a href=\"C1.html#label-foo\">foo at C1</a>"), result result = @to.convert 'C1#m@foo' assert_equal para("<a href=\"C1.html#method-i-m-label-foo\">foo at C1#m</a>"), result end def test_convert_CROSSREF_label_period result = @to.convert 'C1@foo.' assert_equal para("<a href=\"C1.html#label-foo\">foo at C1</a>."), result end def test_convert_CROSSREF_label_space result = @to.convert 'C1@foo+bar' assert_equal para("<a href=\"C1.html#label-foo+bar\">foo bar at C1</a>"), result end def test_convert_CROSSREF_section @c1.add_section 'Section' result = @to.convert 'C1@Section' assert_equal para("<a href=\"C1.html#Section\">Section at C1</a>"), result end def test_convert_RDOCLINK_rdoc_ref result = @to.convert 'rdoc-ref:C1' assert_equal para("<a href=\"C1.html\">C1</a>"), result end def test_convert_RDOCLINK_rdoc_ref_method result = @to.convert 'rdoc-ref:C1#m' assert_equal para("<a href=\"C1.html#method-i-m\">#m</a>"), result end def test_convert_RDOCLINK_rdoc_ref_method_label result = @to.convert 'rdoc-ref:C1#m@foo' assert_equal para("<a href=\"C1.html#method-i-m-label-foo\">foo at C1#m</a>"), result, 'rdoc-ref:C1#m@foo' end def test_convert_RDOCLINK_rdoc_ref_method_percent m = @c1.add_method RDoc::AnyMethod.new nil, '%' m.singleton = false result = @to.convert 'rdoc-ref:C1#%' assert_equal para("<a href=\"C1.html#method-i-25\">#%</a>"), result m.singleton = true result = @to.convert 'rdoc-ref:C1::%' assert_equal para("<a href=\"C1.html#method-c-25\">::%</a>"), result end def test_convert_RDOCLINK_rdoc_ref_method_percent_label m = @c1.add_method RDoc::AnyMethod.new nil, '%' m.singleton = false result = @to.convert 'rdoc-ref:C1#%@f' assert_equal para("<a href=\"C1.html#method-i-25-label-f\">f at C1#%</a>"), result m.singleton = true result = @to.convert 'rdoc-ref:C1::%@f' assert_equal para("<a href=\"C1.html#method-c-25-label-f\">f at C1::%</a>"), result end def test_convert_RDOCLINK_rdoc_ref_label result = @to.convert 'rdoc-ref:C1@foo' assert_equal para("<a href=\"C1.html#label-foo\">foo at C1</a>"), result, 'rdoc-ref:C1@foo' end def test_gen_url assert_equal '<a href="C1.html">Some class</a>', @to.gen_url('rdoc-ref:C1', 'Some class') assert_equal '<a href="http://example">HTTP example</a>', @to.gen_url('http://example', 'HTTP example') end def test_handle_special_CROSSREF assert_equal "<a href=\"C2/C3.html\">C2::C3</a>", SPECIAL('C2::C3') end def test_handle_special_CROSSREF_label assert_equal "<a href=\"C1.html#method-i-m-label-foo\">foo at C1#m</a>", SPECIAL('C1#m@foo') end def test_handle_special_CROSSREF_show_hash_false @to.show_hash = false assert_equal "<a href=\"C1.html#method-i-m\">m</a>", SPECIAL('#m') end def test_handle_special_HYPERLINK_rdoc readme = @store.add_file 'README.txt' readme.parser = RDoc::Parser::Simple @to = RDoc::Markup::ToHtmlCrossref.new @options, 'C2.html', @c2 link = @to.handle_special_HYPERLINK hyper 'C2::C3' assert_equal '<a href="C2/C3.html">C2::C3</a>', link link = @to.handle_special_HYPERLINK hyper 'C4' assert_equal '<a href="C4.html">C4</a>', link link = @to.handle_special_HYPERLINK hyper 'README.txt' assert_equal '<a href="README_txt.html">README.txt</a>', link end def test_handle_special_TIDYLINK_rdoc readme = @store.add_file 'README.txt' readme.parser = RDoc::Parser::Simple @to = RDoc::Markup::ToHtmlCrossref.new @options, 'C2.html', @c2 link = @to.handle_special_TIDYLINK tidy 'C2::C3' assert_equal '<a href="C2/C3.html">tidy</a>', link link = @to.handle_special_TIDYLINK tidy 'C4' assert_equal '<a href="C4.html">tidy</a>', link link = @to.handle_special_TIDYLINK tidy 'C1#m' assert_equal '<a href="C1.html#method-i-m">tidy</a>', link link = @to.handle_special_TIDYLINK tidy 'README.txt' assert_equal '<a href="README_txt.html">tidy</a>', link end def test_handle_special_TIDYLINK_label link = @to.handle_special_TIDYLINK tidy 'C1#m@foo' assert_equal "<a href=\"C1.html#method-i-m-label-foo\">tidy</a>", link, 'C1#m@foo' end def test_to_html_CROSSREF_email @options.hyperlink_all = false @to = RDoc::Markup::ToHtmlCrossref.new @options, 'index.html', @c1 result = @to.to_html '[email protected]' assert_equal '[email protected]', result end def test_to_html_CROSSREF_email_hyperlink_all result = @to.to_html '[email protected]' assert_equal '[email protected]', result end def test_link assert_equal 'n', @to.link('n', 'n') assert_equal '<a href="C1.html#method-c-m">::m</a>', @to.link('m', 'm') end def test_link_class_method_full assert_equal '<a href="Parent.html#method-c-m">Parent.m</a>', @to.link('Parent::m', 'Parent::m') end def para text "\n<p>#{text}</p>\n" end def SPECIAL text @to.handle_special_CROSSREF special text end def hyper reference RDoc::Markup::Special.new 0, "rdoc-ref:#{reference}" end def special text RDoc::Markup::Special.new 0, text end def tidy reference RDoc::Markup::Special.new 0, "{tidy}[rdoc-ref:#{reference}]" end end
26.50885
82
0.646637
338c6ba0e299069f2a38978c3481b30487916195
4,000
require 'pathname' require 'uri' module Sass::Tree # A static node reprenting a CSS rule. # # @see Sass::Tree class RuleNode < Node # The character used to include the parent selector PARENT = '&' # The CSS selector for this rule, # interspersed with {Sass::Script::Node}s # representing `#{}`-interpolation. # Any adjacent strings will be merged together. # # @return [Array<String, Sass::Script::Node>] attr_accessor :rule # The CSS selector for this rule, # without any unresolved interpolation # but with parent references still intact. # It's only set once {Tree::Node#perform} has been called. # # @return [Selector::CommaSequence] attr_accessor :parsed_rules # The CSS selector for this rule, # without any unresolved interpolation or parent references. # It's only set once {Tree::Visitors::Cssize} has been run. # # @return [Selector::CommaSequence] attr_accessor :resolved_rules # How deep this rule is indented # relative to a base-level rule. # This is only greater than 0 in the case that: # # * This node is in a CSS tree # * The style is :nested # * This is a child rule of another rule # * The parent rule has properties, and thus will be rendered # # @return [Fixnum] attr_accessor :tabs # Whether or not this rule is the last rule in a nested group. # This is only set in a CSS tree. # # @return [Boolean] attr_accessor :group_end # @param rule [Array<String, Sass::Script::Node>] # The CSS rule. See \{#rule} def initialize(rule) merged = Sass::Util.merge_adjacent_strings(rule) @rule = Sass::Util.strip_string_array(merged) @tabs = 0 try_to_parse_non_interpolated_rules super() end # If we've precached the parsed selector, set the line on it, too. def line=(line) @parsed_rules.line = line if @parsed_rules super end # If we've precached the parsed selector, set the filename on it, too. def filename=(filename) @parsed_rules.filename = filename if @parsed_rules super end # Compares the contents of two rules. # # @param other [Object] The object to compare with # @return [Boolean] Whether or not this node and the other object # are the same def ==(other) self.class == other.class && rule == other.rule && super end # Adds another {RuleNode}'s rules to this one's. # # @param node [RuleNode] The other node def add_rules(node) @rule = Sass::Util.strip_string_array( Sass::Util.merge_adjacent_strings(@rule + ["\n"] + node.rule)) try_to_parse_non_interpolated_rules end # @return [Boolean] Whether or not this rule is continued on the next line def continued? last = @rule.last last.is_a?(String) && last[-1] == ?, end # Extends this Rule's selector with the given `extends`. # # @see Node#do_extend def do_extend(extends) node = dup node.resolved_rules = resolved_rules.do_extend(extends) node end # A hash that will be associated with this rule in the CSS document # if the {file:SASS_REFERENCE.md#debug_info-option `:debug_info` option} is enabled. # This data is used by e.g. [the FireSass Firebug extension](https://addons.mozilla.org/en-US/firefox/addon/103988). # # @return [{#to_s => #to_s}] def debug_info {:filename => filename && ("file://" + URI.escape(File.expand_path(filename))), :line => self.line} end private def try_to_parse_non_interpolated_rules if @rule.all? {|t| t.kind_of?(String)} # We don't use real filename/line info because we don't have it yet. # When we get it, we'll set it on the parsed rules if possible. parser = Sass::SCSS::StaticParser.new(@rule.join.strip, 1) @parsed_rules = parser.parse_selector('') rescue nil end end end end
30.769231
120
0.64675
fff34a91ca8f26508c57931bf8da85ccd0730fbd
870
cask "tabby" do arch = Hardware::CPU.intel? ? "x86_64" : "arm64" version "1.0.170" if Hardware::CPU.intel? sha256 "0db486e1a925d7157e528b68e7c150f3b44f629d15c71c100ab827c61eeb4b34" else sha256 "d937a3e7c4e5dc2878980533245a30cd1cf6cc4ed366d179e9aecb992f5956dd" end url "https://github.com/Eugeny/tabby/releases/download/v#{version}/tabby-#{version}-macos-#{arch}.zip", verified: "github.com/Eugeny/tabby/" name "Tabby" name "Terminus" desc "Terminal emulator, SSH and serial client" homepage "https://eugeny.github.io/tabby/" livecheck do url :url strategy :github_latest end app "Tabby.app" zap trash: [ "~/Library/Application Support/tabby", "~/Library/Preferences/org.tabby.helper.plist", "~/Library/Preferences/org.tabby.plist", "~/Library/Saved Application State/org.tabby.savedState", ] end
26.363636
105
0.713793
1cb564e1b969014cea985c4c74a1433dc73f1271
8,027
=begin #ORY Hydra #Welcome to the ORY Hydra HTTP API documentation. You will find documentation for all HTTP APIs here. The version of the OpenAPI document: v1.11.0 Generated by: https://openapi-generator.tech OpenAPI Generator version: 5.4.0 =end require 'date' require 'time' module OryHydraClient class RejectRequest # The error should follow the OAuth2 error format (e.g. `invalid_request`, `login_required`). Defaults to `request_denied`. attr_accessor :error # Debug contains information to help resolve the problem as a developer. Usually not exposed to the public but only in the server logs. attr_accessor :error_debug # Description of the error in a human readable format. attr_accessor :error_description # Hint to help resolve the error. attr_accessor :error_hint # Represents the HTTP status code of the error (e.g. 401 or 403) Defaults to 400 attr_accessor :status_code # Attribute mapping from ruby-style variable name to JSON key. def self.attribute_map { :'error' => :'error', :'error_debug' => :'error_debug', :'error_description' => :'error_description', :'error_hint' => :'error_hint', :'status_code' => :'status_code' } end # Returns all the JSON keys this model knows about def self.acceptable_attributes attribute_map.values end # Attribute type mapping. def self.openapi_types { :'error' => :'String', :'error_debug' => :'String', :'error_description' => :'String', :'error_hint' => :'String', :'status_code' => :'Integer' } end # List of attributes with nullable: true def self.openapi_nullable Set.new([ ]) end # Initializes the object # @param [Hash] attributes Model attributes in the form of hash def initialize(attributes = {}) if (!attributes.is_a?(Hash)) fail ArgumentError, "The input argument (attributes) must be a hash in `OryHydraClient::RejectRequest` initialize method" end # check to see if the attribute exists and convert string to symbol for hash key attributes = attributes.each_with_object({}) { |(k, v), h| if (!self.class.attribute_map.key?(k.to_sym)) fail ArgumentError, "`#{k}` is not a valid attribute in `OryHydraClient::RejectRequest`. Please check the name to make sure it's valid. List of attributes: " + self.class.attribute_map.keys.inspect end h[k.to_sym] = v } if attributes.key?(:'error') self.error = attributes[:'error'] end if attributes.key?(:'error_debug') self.error_debug = attributes[:'error_debug'] end if attributes.key?(:'error_description') self.error_description = attributes[:'error_description'] end if attributes.key?(:'error_hint') self.error_hint = attributes[:'error_hint'] end if attributes.key?(:'status_code') self.status_code = attributes[:'status_code'] end end # Show invalid properties with the reasons. Usually used together with valid? # @return Array for valid properties with the reasons def list_invalid_properties invalid_properties = Array.new invalid_properties end # Check to see if the all the properties in the model are valid # @return true if the model is valid def valid? true end # Checks equality by comparing each attribute. # @param [Object] Object to be compared def ==(o) return true if self.equal?(o) self.class == o.class && error == o.error && error_debug == o.error_debug && error_description == o.error_description && error_hint == o.error_hint && status_code == o.status_code end # @see the `==` method # @param [Object] Object to be compared def eql?(o) self == o end # Calculates hash code according to all attributes. # @return [Integer] Hash code def hash [error, error_debug, error_description, error_hint, status_code].hash end # Builds the object from hash # @param [Hash] attributes Model attributes in the form of hash # @return [Object] Returns the model itself def self.build_from_hash(attributes) new.build_from_hash(attributes) end # Builds the object from hash # @param [Hash] attributes Model attributes in the form of hash # @return [Object] Returns the model itself def build_from_hash(attributes) return nil unless attributes.is_a?(Hash) self.class.openapi_types.each_pair do |key, type| if attributes[self.class.attribute_map[key]].nil? && self.class.openapi_nullable.include?(key) self.send("#{key}=", nil) elsif type =~ /\AArray<(.*)>/i # check to ensure the input is an array given that the attribute # is documented as an array but the input is not if attributes[self.class.attribute_map[key]].is_a?(Array) self.send("#{key}=", attributes[self.class.attribute_map[key]].map { |v| _deserialize($1, v) }) end elsif !attributes[self.class.attribute_map[key]].nil? self.send("#{key}=", _deserialize(type, attributes[self.class.attribute_map[key]])) end end self end # Deserializes the data based on type # @param string type Data type # @param string value Value to be deserialized # @return [Object] Deserialized data def _deserialize(type, value) case type.to_sym when :Time Time.parse(value) when :Date Date.parse(value) when :String value.to_s when :Integer value.to_i when :Float value.to_f when :Boolean if value.to_s =~ /\A(true|t|yes|y|1)\z/i true else false end when :Object # generic object (usually a Hash), return directly value when /\AArray<(?<inner_type>.+)>\z/ inner_type = Regexp.last_match[:inner_type] value.map { |v| _deserialize(inner_type, v) } when /\AHash<(?<k_type>.+?), (?<v_type>.+)>\z/ k_type = Regexp.last_match[:k_type] v_type = Regexp.last_match[:v_type] {}.tap do |hash| value.each do |k, v| hash[_deserialize(k_type, k)] = _deserialize(v_type, v) end end else # model # models (e.g. Pet) or oneOf klass = OryHydraClient.const_get(type) klass.respond_to?(:openapi_one_of) ? klass.build(value) : klass.build_from_hash(value) end end # Returns the string representation of the object # @return [String] String presentation of the object def to_s to_hash.to_s end # to_body is an alias to to_hash (backward compatibility) # @return [Hash] Returns the object in the form of hash def to_body to_hash end # Returns the object in the form of hash # @return [Hash] Returns the object in the form of hash def to_hash hash = {} self.class.attribute_map.each_pair do |attr, param| value = self.send(attr) if value.nil? is_nullable = self.class.openapi_nullable.include?(attr) next if !is_nullable || (is_nullable && !instance_variable_defined?(:"@#{attr}")) end hash[param] = _to_hash(value) end hash end # Outputs non-array value in the form of hash # For object, use to_hash. Otherwise, just return the value # @param [Object] value Any valid value # @return [Hash] Returns the value in the form of hash def _to_hash(value) if value.is_a?(Array) value.compact.map { |v| _to_hash(v) } elsif value.is_a?(Hash) {}.tap do |hash| value.each { |k, v| hash[k] = _to_hash(v) } end elsif value.respond_to? :to_hash value.to_hash else value end end end end
30.873077
207
0.628753
5d0123a795b7a1eb418499db70a38b67ded4d60d
1,367
class UsersController < ApplicationController before_action :logged_in_user, only: [:index, :edit, :update] before_action :correct_user, only: [:edit, :update] before_action :admin_user, only: [:destroy] def logged_in_user unless logged_in? store_location flash[:danger] = "please log in." redirect_to login_url end end def correct_user @user = User.find(params[:id]) redirect_to(root_url) unless @user == current_user end def admin_user redirect_to(root_url) unless current_user.admin? end def index @users = User.paginate(page: params[:page]) end def show @user = User.find(params[:id]) # debugger end def new @user = User.new end def create @user = User.new(user_params) if @user.save flash[:success] = "Welcome!" login @user redirect_to @user else render 'new' end end def edit @user = User.find(params[:id]) end def update @user = User.find(params[:id]) if @user.update_attributes(user_params) flash[:success] = "profile updated" redirect_to @user else render 'edit' end end def destroy User.find(params[:id]).destroy flash[:success] = "User deleted" redirect_to users_url end private def user_params params.require(:user).permit(:name, :email, :password, :password_confirmation) end end
17.0875
82
0.669349
4a1bde24319c0b70d72d6db790ba5c7d93889754
14,584
# Circonus v2 API Client Library for chef-solo # # Extremely loosely based on code by Adam Jacob # https://github.com/adamhjk/ruby-circonus/blob/master/lib/circonus.rb # # Author: Clinton Wolfe:: Clinton Wolfe (<[email protected]>) # Copyright:: Copyright (c) 2012 OmniTI, Inc. # License:: Apache License, Version 2.0 # # Original Author:: Adam Jacob (<[email protected]>) # Copyright:: Copyright (c) 2010 Opscode, Inc. # License:: Apache License, Version 2.0 # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. require 'json' require 'rest_client' require 'uri' require 'fileutils' if RUBY_VERSION =~ /^1\.8/ class Dir class << self def exists? (path) File.directory?(path) end alias_method :exist?, :exists? end end end module RestClient class Resource unless self.method_defined?(:brackets_orig) then alias :brackets_orig :"[]" def [](resource_name) brackets_orig(URI.escape(resource_name)) end end end end class Circonus VERSION = "0.2.0" APP_NAME = 'omniti_chef_cookbook' DEFAULT_CACHE_PATH = '/var/tmp/chef-circonus' DEFAULT_TIMEOUT = 30 attr_writer :api_token attr_reader :rest attr_writer :last_request_params attr_reader :options def initialize(api_token, opts_in={}) @api_token = api_token @options = opts_in options[:cache_path] ||= DEFAULT_CACHE_PATH options[:timeout] ||= DEFAULT_TIMEOUT options[:halt_on_error] = true if options[:halt_on_error].nil? unless Dir.exists?(options[:cache_path]) then Dir.mkdir(options[:cache_path]) end headers = { :x_circonus_auth_token => @api_token, :x_circonus_app_name => APP_NAME, :accept => 'application/json', } @rest = RestClient::Resource.new(options[:api_url], {:headers => headers, :timeout => options[:timeout], :open_timeout => options[:timeout]}) me_myself = self RestClient.add_before_execution_proc { |req, params| me_myself.last_request_params = params } end def all_string_values(old_hash) new_hash = Hash.new old_hash.each { |k,v| new_hash[k.to_s] = old_hash[k].to_s() } new_hash end #==================================================================================# # LOW-LEVEL METHDOS #==================================================================================# rw_resources = [ 'check_bundle', 'rule_set', 'graph', 'worksheet', 'template', 'contact_group', ] ro_resources = [ 'broker', 'account', 'user', ] def raise_or_warn(ex, blurb) message = blurb + make_exception_message(ex) if options[:halt_on_error] then raise message else if Object.const_defined?('Chef') chef_module = Object.const_get('Chef') chef_module.const_get('Log').send(:warn, message) else $stderr.puts "WARN: #{message}" end end end def bomb_shelter() attempts = 0 begin result = yield rescue RestClient::Unauthorized => ex raise_or_warn ex, "Circonus API error - HTTP 401 (API key missing or unauthorized)\nPro tip: you may not have added an API key under the node[:circonus][:api_token] attribute. Try visiting the Circonus web UI, clicking on your account, then API Tokens, obtaining a token, and adding it to the attributes for this node.\n If you've already obtained a key, make sure it is authorized within Circonus." rescue RestClient::Forbidden => ex raise_or_warn ex, "Circonus API error - HTTP 403 (not authorized)\nPro tip: You are accessing a resource you (or rather, your api token) aren't allowed to. Naughty!\n" rescue RestClient::ResourceNotFound => ex # Circonus nodes are eventually consistent. When creating a check and a rule, often the check won't exist yet, according to circonus. So we get a 404. Wait and retry. attempts = attempts + 1 if attempts < 3 then sleep 1 retry else raise_or_warn ex, "Circonus API error - HTTP 404 (no such resource)\nPro tip: We tried 3 times already, in case Circonus was syncing. Check the URL.\n" end rescue RestClient::BadRequest => ex # Check for out of metrics explanation = JSON.parse(ex.http_body) if explanation['message'] == 'Usage limit' then raise_or_warn ex, "Circonus API error - HTTP 400 (Usage Limit)\nPro tip: You are out of metrics!\n" else raise_or_warn ex, "Circonus API error - HTTP 400 (we made a bad request)\nPro tip: Circonus didn't like something about the request contents. It usually gives a detailed error message in the response body.\n" end rescue RestClient::InternalServerError => ex raise_or_warn ex, "Circonus API error - HTTP 500 (server's brain exploded)\n" rescue RestClient::RequestTimeout => ex raise_or_warn ex, "Circonus API error - HTTP Timeout. Current timeout is #{options[:timeout]}. You can adjust this setting using the node[:circonus][:timeout] setting.\n" end result end def make_exception_message(ex) message = "" message += " API token: " + (@last_request_params[:headers] ? @last_request_params[:headers][:x_circonus_auth_token].to_s : 'nil') + "\n" message += " URI: " + @last_request_params[:url].to_s + "\n" message += "HTTP Method: " + @last_request_params[:method].to_s.upcase + "\n" reqbod = @last_request_params[:payload].nil? ? '' : JSON.pretty_generate(JSON.parse(@last_request_params[:payload])) message += (reqbod.empty? ? '' : "Request body:\n" + reqbod + "\n\n") message += ((ex.http_body.nil? || ex.http_body.empty?) ? '' : "Response body:\n" + ex.http_body + "\n\n") # D-BUG # message += @last_request_params.inspect() message end #--------------- # List Methods - list_foos() - GET /v2/<resource> #--------------- [rw_resources, ro_resources].flatten.each do |resource_name| method_name = 'list_' + resource_name + 's' send :define_method, method_name do # TODO - one day maybe be able to take filtering args? bomb_shelter { JSON.parse(@rest[resource_name].get) } end end #--------------- # Get Methods - get_foo(id) - GET /v2/<resource>/id # Will escalate a 404 if not found #--------------- [rw_resources, ro_resources].flatten.each do |resource_name| method_name = 'get_' + resource_name send :define_method, method_name do |resource_id| bomb_shelter { JSON.parse(@rest[resource_name + '/' + resource_id.to_s].get) } end end #--------------- # Find Methods - find_foo(id) - GET /v2/<resource>/id # Will return nil if not found #--------------- [rw_resources, ro_resources].flatten.each do |resource_name| method_name = 'find_' + resource_name send :define_method, method_name do |resource_id| info = nil begin info = JSON.parse(@rest[resource_name + '/' + resource_id.to_s].get) rescue RestClient::ResourceNotFound => ex # Do nothing rescue Exception => ex # Kinda gross, but just hit it again to get error processing bomb_shelter { info = JSON.parse(@rest[resource_name + '/' + resource_id.to_s].get) } end return info end end #--------------- # Edit Methods - edit_foo(id,content_as_ruby_hash) - PUT /v2/<resource>/id #--------------- [rw_resources].flatten.each do |resource_name| method_name = 'edit_' + resource_name send :define_method, method_name do |resource_id, content| json_content = JSON.generate(content) bomb_shelter { JSON.parse(@rest[resource_name + '/' + resource_id.to_s].put(json_content)) } end end #--------------- # Create Methods - create_foo(content_as_ruby_hash) - POST /v2/<resource> #--------------- [rw_resources].flatten.each do |resource_name| method_name = 'create_' + resource_name send :define_method, method_name do |content| json_content = JSON.generate(content) bomb_shelter { JSON.parse(@rest[resource_name].post(json_content)) } end end #--------------- # Delete Methods - delete_foo(id) - DELETE /v2/<resource>/id #--------------- [rw_resources].flatten.each do |resource_name| method_name = 'delete_' + resource_name send :define_method, method_name do |resource_id| bomb_shelter { rv = @rest[resource_name + '/' + resource_id.to_s].delete if rv == '' then return {} else JSON.parse(rv) end } end end #--------------- # Cache Methods #--------------- def load_cache_file (which) if File.exists?(options[:cache_path] + '/' + which) then return JSON.parse(IO.read(options[:cache_path] + '/' + which)) else return {} end end def write_cache_file (which, data) File.open(options[:cache_path] + '/' + which, 'w') do |file| file.print(JSON.pretty_generate(data)) end end def clear_cache if File.exists?(options[:cache_path]) then FileUtils.rm_rf(options[:cache_path]) end Dir.mkdir(options[:cache_path]) end #==================================================================================# # MID-LEVEL METHODS #==================================================================================# def find_check_bundle_ids(target, type=nil, display_name=nil) unless type.nil? then type = type.to_s() end cache = load_cache_file('check_bundle_ids') hits = [] if cache.key?(target) then if type.nil? then hits = cache[target].values.flatten else hits = cache[target][type] || [] end end # If we have some hits, and a name was provided, check to see if any of them match the requested name if !display_name.nil? then hits = hits.select do |check_bundle_id| cb = get_check_bundle(check_bundle_id) cb['display_name'] == display_name end end if !hits.empty? then return hits end # Pessimism: if we ended up with 0 hits, go ahead and fetch the whole list # list_check_bundles is horrifyingly expensive # cache all IDS on that target and type, regardless of name matched_bundles = list_check_bundles.find_all do |bundle| match = bundle['target'] == target if match then cache[target] ||= {} cache[target][bundle['type']] ||= [] cache[target][bundle['type']] << bundle['_cid'].gsub('/check_bundle/', '') cache[target][bundle['type']].uniq! end if match && !type.nil? then match = bundle['type'] == type end match end write_cache_file('check_bundle_ids', cache) if !display_name.nil? then matched_bundles = matched_bundles.select do |cb| cb['display_name'] == display_name end end matched_ids = matched_bundles.map { |bundle| bundle['_cid'].gsub('/check_bundle/', '') } end def find_broker_id(name) cache = load_cache_file('brokers') if cache.key?(name) then return cache[name] end # If no name in cache file, assume a miss matched_brokers = list_brokers.find_all do |broker| cache[broker['_name']] = broker['_cid'].gsub('/broker/', '') match = broker['_name'] == name match end write_cache_file('brokers', cache) if matched_brokers.empty? then return nil else return matched_brokers[0]['_cid'].gsub('/broker/', '') end end def find_check_id(check_bundle_id, broker_name) broker_id = find_broker_id(broker_name) cache = load_cache_file('check_ids') if cache[check_bundle_id] && cache[check_bundle_id][broker_id] then return cache[check_bundle_id][broker_id] end check_bundle = get_check_bundle(check_bundle_id) # TODO - BAD ASSUMPTION # Assume that the check indexes match the broker indexes found_idx = nil check_bundle['brokers'].each_with_index do | broker_path, idx| if broker_path == '/broker/' + broker_id then found_idx = idx end end if found_idx.nil? then raise "Could not find broker #{broker_name} (id #{broker_id}) on check bundle ID #{check_bundle_id}" end check_id = check_bundle['_checks'][found_idx].gsub('/check/', '') cache[check_bundle_id] ||= {} cache[check_bundle_id][broker_id] = check_id write_cache_file('check_ids', cache) check_id end def find_contact_group_id(name) cache = load_cache_file('contact_groups') if cache.key?(name) then return cache[name] end # If no name in cache file, assume a miss matched_contact_groups = list_contact_groups.find_all do |contact_group| cache[contact_group['name']] = contact_group['_cid'].gsub('/contact_group/', '') match = contact_group['name'] == name match end write_cache_file('contact_groups', cache) if matched_contact_groups.empty? then return nil else return matched_contact_groups[0]['_cid'].gsub('/contact_group/', '') end end def find_graph_ids(title) cache = load_cache_file('graphs') if cache.key?(title) then return cache[title] end # If no title in cache file, assume a miss matched_graphs = list_graphs.find_all do |graph| match = graph['title'] == title # Only cache on a match? if match then cache[graph['title']] ||= [] id = graph['_cid'].gsub('/graph/', '') unless cache[graph['title']].member?(id) then cache[graph['title']] << id end end match end write_cache_file('graphs', cache) matched_graph_ids = matched_graphs.map { |bundle| bundle['_cid'].gsub('/graph/', '') } end end
29.763265
406
0.61588
ff02f5588d17fc62ab30974f6955032bf7d59088
27,566
module API module Entities class UserSafe < Grape::Entity expose :name, :username end class UserBasic < UserSafe expose :id, :state, :avatar_url expose :web_url do |user, options| Gitlab::Routing.url_helpers.user_url(user) end end class User < UserBasic expose :created_at expose :admin?, as: :is_admin expose :bio, :location, :skype, :linkedin, :twitter, :website_url, :organization end class UserActivity < Grape::Entity expose :username expose :last_activity_on expose :last_activity_on, as: :last_activity_at # Back-compat end class Identity < Grape::Entity expose :provider, :extern_uid end class UserPublic < User expose :last_sign_in_at expose :confirmed_at expose :last_activity_on expose :email expose :color_scheme_id, :projects_limit, :current_sign_in_at expose :identities, using: Entities::Identity expose :can_create_group?, as: :can_create_group expose :can_create_project?, as: :can_create_project expose :two_factor_enabled?, as: :two_factor_enabled expose :external end class UserWithPrivateToken < UserPublic expose :private_token end class Email < Grape::Entity expose :id, :email end class Hook < Grape::Entity expose :id, :url, :created_at, :push_events, :tag_push_events expose :enable_ssl_verification end class ProjectHook < Hook expose :project_id, :issues_events, :merge_requests_events expose :note_events, :pipeline_events, :wiki_page_events expose :build_events, as: :job_events end class ProjectPushRule < Grape::Entity expose :id, :project_id, :created_at expose :commit_message_regex, :deny_delete_tag expose :member_check, :prevent_secrets, :author_email_regex expose :file_name_regex, :max_file_size end class BasicProjectDetails < Grape::Entity expose :id expose :http_url_to_repo, :web_url expose :name, :name_with_namespace expose :path, :path_with_namespace end class SharedGroup < Grape::Entity expose :group_id expose :group_name do |group_link, options| group_link.group.name end expose :group_access, as: :group_access_level end class Project < Grape::Entity expose :id, :description, :default_branch, :tag_list expose :archived?, as: :archived expose :visibility, :ssh_url_to_repo, :http_url_to_repo, :web_url expose :owner, using: Entities::UserBasic, unless: ->(project, options) { project.group } expose :name, :name_with_namespace expose :path, :path_with_namespace expose :container_registry_enabled # Expose old field names with the new permissions methods to keep API compatible expose(:issues_enabled) { |project, options| project.feature_available?(:issues, options[:current_user]) } expose(:merge_requests_enabled) { |project, options| project.feature_available?(:merge_requests, options[:current_user]) } expose(:wiki_enabled) { |project, options| project.feature_available?(:wiki, options[:current_user]) } expose(:jobs_enabled) { |project, options| project.feature_available?(:builds, options[:current_user]) } expose(:snippets_enabled) { |project, options| project.feature_available?(:snippets, options[:current_user]) } expose :created_at, :last_activity_at expose :shared_runners_enabled expose :lfs_enabled?, as: :lfs_enabled expose :creator_id expose :namespace, using: 'API::Entities::Namespace' expose :forked_from_project, using: Entities::BasicProjectDetails, if: lambda{ |project, options| project.forked? } expose :avatar_url expose :star_count, :forks_count expose :open_issues_count, if: lambda { |project, options| project.feature_available?(:issues, options[:current_user]) && project.default_issues_tracker? } expose :runners_token, if: lambda { |_project, options| options[:user_can_admin_project] } expose :public_builds, as: :public_jobs expose :shared_with_groups do |project, options| SharedGroup.represent(project.project_group_links.all, options) end expose :only_allow_merge_if_pipeline_succeeds expose :repository_storage, if: lambda { |_project, options| options[:current_user].try(:admin?) } expose :request_access_enabled expose :only_allow_merge_if_all_discussions_are_resolved expose :approvals_before_merge expose :statistics, using: 'API::Entities::ProjectStatistics', if: :statistics end class ProjectStatistics < Grape::Entity expose :commit_count expose :storage_size expose :repository_size expose :lfs_objects_size expose :build_artifacts_size, as: :job_artifacts_size end class Member < UserBasic expose :access_level do |user, options| member = options[:member] || options[:source].members.find_by(user_id: user.id) member.access_level end expose :expires_at do |user, options| member = options[:member] || options[:source].members.find_by(user_id: user.id) member.expires_at end end class AccessRequester < UserBasic expose :requested_at do |user, options| access_requester = options[:access_requester] || options[:source].requesters.find_by(user_id: user.id) access_requester.requested_at end end class LdapGroupLink < Grape::Entity expose :cn, :group_access, :provider end class Group < Grape::Entity expose :id, :name, :path, :description, :visibility ## EE-only expose :ldap_cn, :ldap_access expose :ldap_group_links, using: Entities::LdapGroupLink, if: lambda { |group, options| group.ldap_group_links.any? } ## EE-only expose :lfs_enabled?, as: :lfs_enabled expose :avatar_url expose :web_url expose :request_access_enabled expose :full_name, :full_path expose :parent_id expose :statistics, if: :statistics do with_options format_with: -> (value) { value.to_i } do expose :storage_size expose :repository_size expose :lfs_objects_size expose :build_artifacts_size, as: :job_artifacts_size end end end class GroupDetail < Group expose :projects, using: Entities::Project expose :shared_projects, using: Entities::Project end class RepoCommit < Grape::Entity expose :id, :short_id, :title, :created_at expose :parent_ids expose :safe_message, as: :message expose :author_name, :author_email, :authored_date expose :committer_name, :committer_email, :committed_date end class RepoCommitStats < Grape::Entity expose :additions, :deletions, :total end class RepoCommitDetail < RepoCommit expose :stats, using: Entities::RepoCommitStats expose :status end class RepoBranch < Grape::Entity expose :name expose :commit, using: Entities::RepoCommit do |repo_branch, options| options[:project].repository.commit(repo_branch.dereferenced_target) end expose :merged do |repo_branch, options| options[:project].repository.merged_to_root_ref?(repo_branch.name) end expose :protected do |repo_branch, options| ProtectedBranch.protected?(options[:project], repo_branch.name) end expose :developers_can_push do |repo_branch, options| options[:project].protected_branches.developers_can?(:push, repo_branch.name) end expose :developers_can_merge do |repo_branch, options| options[:project].protected_branches.developers_can?(:merge, repo_branch.name) end end class RepoTreeObject < Grape::Entity expose :id, :name, :type, :path expose :mode do |obj, options| filemode = obj.mode filemode = "0" + filemode if filemode.length < 6 filemode end end class ProjectSnippet < Grape::Entity expose :id, :title, :file_name expose :author, using: Entities::UserBasic expose :updated_at, :created_at expose :web_url do |snippet, options| Gitlab::UrlBuilder.build(snippet) end end class PersonalSnippet < Grape::Entity expose :id, :title, :file_name expose :author, using: Entities::UserBasic expose :updated_at, :created_at expose :web_url do |snippet| Gitlab::UrlBuilder.build(snippet) end expose :raw_url do |snippet| Gitlab::UrlBuilder.build(snippet) + "/raw" end end class ProjectEntity < Grape::Entity expose :id, :iid expose(:project_id) { |entity| entity.project.id } expose :title, :description expose :state, :created_at, :updated_at end class RepoDiff < Grape::Entity expose :old_path, :new_path, :a_mode, :b_mode, :diff expose :new_file, :renamed_file, :deleted_file end class Milestone < ProjectEntity expose :due_date expose :start_date end class IssueBasic < ProjectEntity expose :label_names, as: :labels expose :milestone, using: Entities::Milestone expose :assignee, :author, using: Entities::UserBasic expose :user_notes_count expose :upvotes, :downvotes expose :due_date expose :confidential expose :weight expose :web_url do |issue, options| Gitlab::UrlBuilder.build(issue) end end class Issue < IssueBasic expose :subscribed do |issue, options| issue.subscribed?(options[:current_user], options[:project] || issue.project) end end class IssuableTimeStats < Grape::Entity expose :time_estimate expose :total_time_spent expose :human_time_estimate expose :human_total_time_spent end class ExternalIssue < Grape::Entity expose :title expose :id end class MergeRequestBasic < ProjectEntity expose :target_branch, :source_branch expose :upvotes, :downvotes expose :author, :assignee, using: Entities::UserBasic expose :source_project_id, :target_project_id expose :label_names, as: :labels expose :work_in_progress?, as: :work_in_progress expose :milestone, using: Entities::Milestone expose :merge_when_pipeline_succeeds expose :merge_status expose :diff_head_sha, as: :sha expose :merge_commit_sha expose :user_notes_count expose :approvals_before_merge expose :should_remove_source_branch?, as: :should_remove_source_branch expose :force_remove_source_branch?, as: :force_remove_source_branch expose :squash expose :web_url do |merge_request, options| Gitlab::UrlBuilder.build(merge_request) end end class MergeRequest < MergeRequestBasic expose :subscribed do |merge_request, options| merge_request.subscribed?(options[:current_user], options[:project]) end end class MergeRequestChanges < MergeRequest expose :diffs, as: :changes, using: Entities::RepoDiff do |compare, _| compare.raw_diffs(all_diffs: true).to_a end end class Approvals < Grape::Entity expose :user, using: Entities::UserBasic end class MergeRequestApprovals < ProjectEntity expose :merge_status expose :approvals_required expose :approvals_left expose :approvals, as: :approved_by, using: Entities::Approvals expose :approvers_left, as: :suggested_approvers, using: Entities::UserBasic expose :user_has_approved do |merge_request, options| merge_request.has_approved?(options[:current_user]) end expose :user_can_approve do |merge_request, options| merge_request.can_approve?(options[:current_user]) end end class MergeRequestDiff < Grape::Entity expose :id, :head_commit_sha, :base_commit_sha, :start_commit_sha, :created_at, :merge_request_id, :state, :real_size end class MergeRequestDiffFull < MergeRequestDiff expose :commits, using: Entities::RepoCommit expose :diffs, using: Entities::RepoDiff do |compare, _| compare.raw_diffs(all_diffs: true).to_a end end class SSHKey < Grape::Entity expose :id, :title, :key, :created_at, :can_push end class SSHKeyWithUser < SSHKey expose :user, using: Entities::UserPublic end class Note < Grape::Entity expose :id expose :note, as: :body expose :attachment_identifier, as: :attachment expose :author, using: Entities::UserBasic expose :created_at, :updated_at expose :system?, as: :system expose :noteable_id, :noteable_type end class AwardEmoji < Grape::Entity expose :id expose :name expose :user, using: Entities::UserBasic expose :created_at, :updated_at expose :awardable_id, :awardable_type end class MRNote < Grape::Entity expose :note expose :author, using: Entities::UserBasic end class CommitNote < Grape::Entity expose :note expose(:path) { |note| note.diff_file.try(:file_path) if note.diff_note? } expose(:line) { |note| note.diff_line.try(:new_line) if note.diff_note? } expose(:line_type) { |note| note.diff_line.try(:type) if note.diff_note? } expose :author, using: Entities::UserBasic expose :created_at end class CommitStatus < Grape::Entity expose :id, :sha, :ref, :status, :name, :target_url, :description, :created_at, :started_at, :finished_at, :allow_failure, :coverage expose :author, using: Entities::UserBasic end class Event < Grape::Entity expose :title, :project_id, :action_name expose :target_id, :target_type, :author_id expose :data, :target_title expose :created_at expose :note, using: Entities::Note, if: ->(event, options) { event.note? } expose :author, using: Entities::UserBasic, if: ->(event, options) { event.author } expose :author_username do |event, options| event.author&.username end end class LdapGroup < Grape::Entity expose :cn end class ProjectGroupLink < Grape::Entity expose :id, :project_id, :group_id, :group_access, :expires_at end class Todo < Grape::Entity expose :id expose :project, using: Entities::BasicProjectDetails expose :author, using: Entities::UserBasic expose :action_name expose :target_type expose :target do |todo, options| target = todo.target_type == 'Commit' ? 'RepoCommit' : todo.target_type Entities.const_get(target).represent(todo.target, options) end expose :target_url do |todo, options| target_type = todo.target_type.underscore target_url = "namespace_project_#{target_type}_url" target_anchor = "note_#{todo.note_id}" if todo.note_id? Gitlab::Application.routes.url_helpers.public_send(target_url, todo.project.namespace, todo.project, todo.target, anchor: target_anchor) end expose :body expose :state expose :created_at end class Namespace < Grape::Entity expose :id, :name, :path, :kind, :full_path end class MemberAccess < Grape::Entity expose :access_level expose :notification_level do |member, options| if member.notification_setting ::NotificationSetting.levels[member.notification_setting.level] end end end class ProjectAccess < MemberAccess end class GroupAccess < MemberAccess end class NotificationSetting < Grape::Entity expose :level expose :events, if: ->(notification_setting, _) { notification_setting.custom? } do ::NotificationSetting::EMAIL_EVENTS.each do |event| expose event end end end class GlobalNotificationSetting < NotificationSetting expose :notification_email do |notification_setting, options| notification_setting.user.notification_email end end class ProjectService < Grape::Entity expose :id, :title, :created_at, :updated_at, :active expose :push_events, :issues_events, :merge_requests_events expose :tag_push_events, :note_events, :pipeline_events expose :build_events, as: :job_events # Expose serialized properties expose :properties do |service, options| field_names = service.fields. select { |field| options[:include_passwords] || field[:type] != 'password' }. map { |field| field[:name] } service.properties.slice(*field_names) end end class ProjectWithAccess < Project expose :permissions do expose :project_access, using: Entities::ProjectAccess do |project, options| project.project_members.find_by(user_id: options[:current_user].id) end expose :group_access, using: Entities::GroupAccess do |project, options| if project.group project.group.group_members.find_by(user_id: options[:current_user].id) end end end end class LabelBasic < Grape::Entity expose :id, :name, :color, :description end class Label < LabelBasic expose :open_issues_count do |label, options| label.open_issues_count(options[:current_user]) end expose :closed_issues_count do |label, options| label.closed_issues_count(options[:current_user]) end expose :open_merge_requests_count do |label, options| label.open_merge_requests_count(options[:current_user]) end expose :priority do |label, options| label.priority(options[:project]) end expose :subscribed do |label, options| label.subscribed?(options[:current_user], options[:project]) end end class List < Grape::Entity expose :id expose :label, using: Entities::LabelBasic expose :position end class Board < Grape::Entity expose :id expose :name expose :project, using: Entities::BasicProjectDetails expose :milestone expose :lists, using: Entities::List do |board| board.lists.destroyable end end class Compare < Grape::Entity expose :commit, using: Entities::RepoCommit do |compare, options| Commit.decorate(compare.commits, nil).last end expose :commits, using: Entities::RepoCommit do |compare, options| Commit.decorate(compare.commits, nil) end expose :diffs, using: Entities::RepoDiff do |compare, options| compare.diffs(all_diffs: true).to_a end expose :compare_timeout do |compare, options| compare.diffs.overflow? end expose :same, as: :compare_same_ref end class Contributor < Grape::Entity expose :name, :email, :commits, :additions, :deletions end class BroadcastMessage < Grape::Entity expose :message, :starts_at, :ends_at, :color, :font end class ApplicationSetting < Grape::Entity expose :id expose :default_projects_limit expose :signup_enabled expose :signin_enabled expose :gravatar_enabled expose :sign_in_text expose :after_sign_up_text expose :created_at expose :updated_at expose :home_page_url expose :default_branch_protection expose(:restricted_visibility_levels) do |setting, _options| setting.restricted_visibility_levels.map { |level| Gitlab::VisibilityLevel.string_level(level) } end expose :max_attachment_size expose :session_expire_delay expose(:default_project_visibility) { |setting, _options| Gitlab::VisibilityLevel.string_level(setting.default_project_visibility) } expose(:default_snippet_visibility) { |setting, _options| Gitlab::VisibilityLevel.string_level(setting.default_snippet_visibility) } expose(:default_group_visibility) { |setting, _options| Gitlab::VisibilityLevel.string_level(setting.default_group_visibility) } expose :default_artifacts_expire_in expose :domain_whitelist expose :domain_blacklist_enabled expose :domain_blacklist expose :user_oauth_applications expose :after_sign_out_path expose :container_registry_token_expire_delay expose :repository_storages expose :koding_enabled expose :koding_url expose :plantuml_enabled expose :plantuml_url expose :terminal_max_session_time expose :polling_interval_multiplier end class Release < Grape::Entity expose :tag, as: :tag_name expose :description end class RepoTag < Grape::Entity expose :name, :message expose :commit do |repo_tag, options| options[:project].repository.commit(repo_tag.dereferenced_target) end expose :release, using: Entities::Release do |repo_tag, options| options[:project].releases.find_by(tag: repo_tag.name) end end class License < Grape::Entity expose :starts_at, :expires_at, :licensee, :add_ons expose :user_limit do |license, options| license.restricted?(:active_user_count) ? license.restrictions[:active_user_count] : 0 end expose :active_users do |license, options| ::User.active.count end end class TriggerRequest < Grape::Entity expose :id, :variables end class Runner < Grape::Entity expose :id expose :description expose :active expose :is_shared expose :name end class RunnerDetails < Runner expose :tag_list expose :run_untagged expose :locked expose :version, :revision, :platform, :architecture expose :contacted_at expose :token, if: lambda { |runner, options| options[:current_user].admin? || !runner.is_shared? } expose :projects, with: Entities::BasicProjectDetails do |runner, options| if options[:current_user].admin? runner.projects else options[:current_user].authorized_projects.where(id: runner.projects) end end end class RunnerRegistrationDetails < Grape::Entity expose :id, :token end class JobArtifactFile < Grape::Entity expose :filename, :size end class PipelineBasic < Grape::Entity expose :id, :sha, :ref, :status end class Job < Grape::Entity expose :id, :status, :stage, :name, :ref, :tag, :coverage expose :created_at, :started_at, :finished_at expose :user, with: User expose :artifacts_file, using: JobArtifactFile, if: -> (job, opts) { job.artifacts? } expose :commit, with: RepoCommit expose :runner, with: Runner expose :pipeline, with: PipelineBasic end class Trigger < Grape::Entity expose :id expose :token, :description expose :created_at, :updated_at, :deleted_at, :last_used expose :owner, using: Entities::UserBasic end class Variable < Grape::Entity expose :key, :value end class Pipeline < PipelineBasic expose :before_sha, :tag, :yaml_errors expose :user, with: Entities::UserBasic expose :created_at, :updated_at, :started_at, :finished_at, :committed_at expose :duration expose :coverage end class EnvironmentBasic < Grape::Entity expose :id, :name, :slug, :external_url end class Environment < EnvironmentBasic expose :project, using: Entities::BasicProjectDetails end class Deployment < Grape::Entity expose :id, :iid, :ref, :sha, :created_at expose :user, using: Entities::UserBasic expose :environment, using: Entities::EnvironmentBasic expose :deployable, using: Entities::Job end class RepoLicense < Grape::Entity expose :key, :name, :nickname expose :featured, as: :popular expose :url, as: :html_url expose(:source_url) { |license| license.meta['source'] } expose(:description) { |license| license.meta['description'] } expose(:conditions) { |license| license.meta['conditions'] } expose(:permissions) { |license| license.meta['permissions'] } expose(:limitations) { |license| license.meta['limitations'] } expose :content end class TemplatesList < Grape::Entity expose :name end class Template < Grape::Entity expose :name, :content end class BroadcastMessage < Grape::Entity expose :id, :message, :starts_at, :ends_at, :color, :font expose :active?, as: :active end class GeoNodeStatus < Grape::Entity expose :id expose :health expose :healthy?, as: :healthy expose :repositories_count expose :repositories_synced_count expose :repositories_failed_count expose :lfs_objects_count expose :lfs_objects_synced_count expose :attachments_count expose :attachments_synced_count end class PersonalAccessToken < Grape::Entity expose :id, :name, :revoked, :created_at, :scopes expose :active?, as: :active expose :expires_at do |personal_access_token| personal_access_token.expires_at ? personal_access_token.expires_at.strftime("%Y-%m-%d") : nil end end class PersonalAccessTokenWithToken < PersonalAccessToken expose :token end class ImpersonationToken < PersonalAccessTokenWithToken expose :impersonation end module JobRequest class JobInfo < Grape::Entity expose :name, :stage expose :project_id, :project_name end class GitInfo < Grape::Entity expose :repo_url, :ref, :sha, :before_sha expose :ref_type do |model| if model.tag 'tag' else 'branch' end end end class RunnerInfo < Grape::Entity expose :timeout end class Step < Grape::Entity expose :name, :script, :timeout, :when, :allow_failure end class Image < Grape::Entity expose :name end class Artifacts < Grape::Entity expose :name, :untracked, :paths, :when, :expire_in end class Cache < Grape::Entity expose :key, :untracked, :paths end class Credentials < Grape::Entity expose :type, :url, :username, :password end class ArtifactFile < Grape::Entity expose :filename, :size end class Dependency < Grape::Entity expose :id, :name, :token expose :artifacts_file, using: ArtifactFile, if: ->(job, _) { job.artifacts? } end class Response < Grape::Entity expose :id expose :token expose :allow_git_fetch expose :job_info, using: JobInfo do |model| model end expose :git_info, using: GitInfo do |model| model end expose :runner_info, using: RunnerInfo do |model| model end expose :variables expose :steps, using: Step expose :image, using: Image expose :services, using: Image expose :artifacts, using: Artifacts expose :cache, using: Cache expose :credentials, using: Credentials expose :dependencies, using: Dependency end end end end
31.07779
161
0.666691
bb893157d54a767decab172224ad4498e6a3939a
2,265
# encoding: utf-8 # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is # regenerated. module Azure::MediaServices::Mgmt::V2019_05_01_preview module Models # # Class to specify configurations of PlayReady in Streaming Policy # class StreamingPolicyPlayReadyConfiguration include MsRestAzure # @return [String] Template for the URL of the custom service delivering # licenses to end user players. Not required when using Azure Media # Services for issuing licenses. The template supports replaceable # tokens that the service will update at runtime with the value specific # to the request. The currently supported token values are # {AlternativeMediaId}, which is replaced with the value of # StreamingLocatorId.AlternativeMediaId, and {ContentKeyId}, which is # replaced with the value of identifier of the key being requested. attr_accessor :custom_license_acquisition_url_template # @return [String] Custom attributes for PlayReady attr_accessor :play_ready_custom_attributes # # Mapper for StreamingPolicyPlayReadyConfiguration class as Ruby Hash. # This will be used for serialization/deserialization. # def self.mapper() { client_side_validation: true, required: false, serialized_name: 'StreamingPolicyPlayReadyConfiguration', type: { name: 'Composite', class_name: 'StreamingPolicyPlayReadyConfiguration', model_properties: { custom_license_acquisition_url_template: { client_side_validation: true, required: false, serialized_name: 'customLicenseAcquisitionUrlTemplate', type: { name: 'String' } }, play_ready_custom_attributes: { client_side_validation: true, required: false, serialized_name: 'playReadyCustomAttributes', type: { name: 'String' } } } } } end end end end
34.846154
78
0.631347
bbaff95c66d23c8d0e5c36e881c62750a358ced5
657
# rubocop:disable all # File: config/initializers/static_router.rb module ActionDispatch module Routing class StaticResponder < Endpoint attr_accessor :path, :file_handler def initialize(path) self.path = path self.file_handler = ActionDispatch::FileHandler.new( Rails.configuration.paths["public"].first ) end def call(env) env["PATH_INFO"] = @file_handler.match?(path) @file_handler.call(env) end def inspect "static('#{path}')" end end class Mapper def static(path) StaticResponder.new(path) end end end end
19.323529
60
0.613394
18b3828f5409663cca3934dca19fe69848a31746
877
# Generated by the protocol buffer compiler. DO NOT EDIT! # source: DataEntry.proto require 'google/protobuf' Google::Protobuf::DescriptorPool.generated_pool.build do add_message "sharepubapi_v1.DataEntry" do optional :type, :enum, 1, "sharepubapi_v1.DataEntry.Type" optional :value, :bytes, 2 end add_enum "sharepubapi_v1.DataEntry.Type" do value :UNDEFINED, 0 value :INVOICE, 1 value :PAYMENT_TRANSACTION, 2 value :LOCATION, 3 value :TRANSACTION, 4 value :AGE_VERIFICATION_SECRET, 5 value :THIRD_PARTY_ATTRIBUTE, 6 end end module Yoti module Protobuf module Sharepubapi DataEntry = Google::Protobuf::DescriptorPool.generated_pool.lookup("sharepubapi_v1.DataEntry").msgclass DataEntry::Type = Google::Protobuf::DescriptorPool.generated_pool.lookup("sharepubapi_v1.DataEntry.Type").enummodule end end end
29.233333
122
0.748005
6a31a1ec9fc0eb646c2eefa5a2ec538c103ea013
946
require 'addressable/uri' require 'nokogiri' require 'net/http' module Imggrabber class HtmlParser def initialize(url) @uri = parse_url(url) @html = get_html(@uri) end def images @images ||= get_images_url_list(@html, @uri) end def uri @uri end private def parse_url(url) raise ArgumentError, 'Invalid URL' unless valid_url?(url) Addressable::URI.parse(url) end def valid_url?(url) url =~ /^(http|https):\/\/[a-z0-9]+([\-\.]{1}[a-z0-9]+)*\.[a-z]{2,5}(([0-9]{1,5})?\/.*)?$/ix end def get_html(uri) Net::HTTP.get(uri) end def get_images_url_list(html, uri) doc = Nokogiri::HTML(html) images = [] doc.css('img').each do |img| images << to_url(img.attributes['src'], uri) end images end def to_url(src, uri) valid_url?(src) ? src : Addressable::URI.join(uri, src) end end end
19.708333
98
0.566596
337a34d7ce5c02e8bf37651b7cd93f4a3db400e7
130
require 'coveralls' Coveralls.wear! $LOAD_PATH.unshift File.expand_path('../../lib', __FILE__) require 'jsender' require 'byebug'
21.666667
58
0.753846
61f314e9bff3a3540231f2941ac07bd728660faa
728
class SpeedUpResourcesPage < ActiveRecord::Migration[4.2] def change add_column :harvests, :nodes_count, :integer add_column :harvests, :identifiers_count, :integer add_column :harvests, :scientific_names_count, :integer add_column :resources, :root_nodes_count, :integer Harvest.find_each do |harvest| harvest.update_attributes(nodes_count: harvest.nodes.count, identifiers_count: harvest.identifiers.count, scientific_names_count: ScientificName.where(harvest_id: harvest.id).count) end Resource.find_each do |resource| resource.update_attribute(:root_nodes_count, resource.nodes.root.published.count) end end end
42.823529
107
0.708791
1db154487a5dc4124e0eb8faffd41db459af9a30
1,397
require File.expand_path(File.dirname(__FILE__) + '/spec_helper') describe 'SmsGlobal' do include SmsGlobal describe 'Sender' do before do @sender = Sender.new :user => 'DUMMY', :password => 'DUMMY' end it "requires :user and :password" do lambda { Sender.new }.should raise_error end it "sends SMS correctly" do stub_sms_ok resp = @sender.send_text('Lorem Ipsum', '12341324', '1234') resp[:status].should == :ok resp[:code].should == 0 resp[:message].should == 'Sent queued message ID: 941596d028699601' end it "gracefully fails" do stub_sms_failed resp = @sender.send_text('Lorem Ipsum', '12341324', '1234') resp[:status].should == :error resp[:message].should == 'Missing parameter: from' end it "hits the right URL" do stub_request(:get, 'http://www.smsglobal.com/http-api.php?action=sendsms&from=5678&password=DUMMY&text=xyz&to=1234&user=DUMMY').to_return(:body => 'ERROR: Missing parameter: from') @sender.send_text('xyz', '1234', '5678') end it "gracefully fails on connection error" do stub_request(:get, /www.smsglobal.com.*/).to_return(:status => [500, "Internal Server Error"]) resp = @sender.send_text('xyz', '1234', '5678') resp[:status].should == :failed resp[:message].should == "Unable to reach SMSGlobal" end end end
32.488372
186
0.643522
bbd2cb3e5d9232cccb3c9eb97aa5d9bfe61acda7
127
class AddUserIdToCapsules < ActiveRecord::Migration[5.0] def change add_column :capsules, :user_id, :integer end end
21.166667
56
0.740157
085a769a33da692cda2f0e3190e5c354aa5767cc
1,577
require 'rails_helper' RSpec.describe ImportsHelper, type: :helper do let(:import) { build(:import) } describe "import_type_list" do let(:import_types) { [["Generic", "generic"]] } it { expect(helper.import_type_list).to eq import_types } end describe "status_icon_class_for" do classes = { 'not_ready': 'glyphicon-question-sign', 'ready': 'glyphicon-thumbs-up', 'in_progress': 'glyphicon-time', 'complete': 'glyphicon-ok', 'reverting': 'glyphicon-backward', 'final': 'glyphicon-lock' } classes.each do |status, klass| it "returns '#{klass}' for status '#{status}'" do import = build(:simple_import, status: status) expect(helper.status_icon_class_for(import)).to eq klass end end it "returns 'glyphicon-warning-sign' as a default status" do allow(import).to receive(:status).and_return('another') expect(helper.status_icon_class_for(import)).to eq 'glyphicon-warning-sign' end end describe "last_run_for" do context "an import that has been run" do let(:time) { Time.new(2016).utc } let(:import) { create(:simple_import) } let!(:record) { create(:imported_record, import: import, created_at: time) } it "shows the formatted last run time" do expect(helper.last_run_for(import)).to eq time.strftime('%-m/%-d/%Y %l:%M%P') end end context "an import that has never been run" do it "shows 'Never'" do expect(helper.last_run_for(import)).to eq "<em>Never</em>" end end end end
30.326923
85
0.643627
330cb1347a9cd89ef78402615a498645c28da4d4
1,510
lib = File.expand_path("../lib", __FILE__) $LOAD_PATH.unshift(lib) unless $LOAD_PATH.include?(lib) require "transbank/sdk/version" Gem::Specification.new do |spec| spec.name = "transbank-sdk" spec.version = Transbank::Sdk::VERSION spec.authors = ["Transbank Developers"] spec.email = ["[email protected]"] spec.summary = %q{Transbank SDK for Ruby} spec.homepage = "https://www.transbankdevelopers.cl/" spec.license = "BSD-3-Clause" # Specify which files should be added to the gem when it is released. # The `git ls-files -z` loads the files in the RubyGem that have been added into git. spec.files = Dir.chdir(File.expand_path('..', __FILE__)) do `git ls-files -z`.split("\x0").reject { |f| f.match(%r{^(test|spec|features)/}) } end spec.bindir = "exe" spec.executables = spec.files.grep(%r{^exe/}) { |f| File.basename(f) } spec.require_paths = ["lib"] spec.add_dependency "json", "~> 2.0" spec.add_development_dependency "bundler", "~> 2.1" spec.add_development_dependency "rake", "~> 12.3" spec.add_development_dependency "minitest", "~> 5.0" spec.add_development_dependency "rubocop", "~> 0.59" spec.add_development_dependency "pry", "~> 0.11" spec.add_development_dependency 'minitest-reporters', '~> 1.1' spec.add_development_dependency 'byebug', "~> 11.1" spec.add_development_dependency 'pry-byebug', "~> 3.9" spec.add_development_dependency 'webmock', "~> 3.12" end
41.944444
87
0.670861
e2bbab7da8fb0eed19ef975ed7c3eb56ba9a2ab6
2,413
class UsersController < ApplicationController get "/users" do #Only employees can view the users page if logged_in? && current_user.kind!="user" @users = User.all.sort { |u1, u2| u1.full_name <=> u2.full_name } erb :'users/index' #If you aren't logged in as an admin, you can't view it else redirect "/error/lacking-privileges" end end get "/users/:username" do #Only employees can view the users page if logged_in? && current_user.kind!="user" #Finds the user @user = User.find_by(username: params[:username]) if @user erb :'users/user' else redirect "/error/user-not-found" end #If you aren't logged in as an admin, you can't view it else redirect "/error/lacking-privileges" end end get "/users/:username/edit" do #Only employees can view the users page if logged_in? && current_user.kind!="user" #Finds the user @user = User.find_by(username: params[:username]) @viewer = current_user if @user erb :'users/edit' else redirect "/error/user-not-found" end #If you aren't logged in as an admin, you can't view it else redirect "/error/lacking-privileges" end end patch "/users/:username" do #Finds the user @user = User.find_by(username: params[:username]) if logged_in? && current_user.kind!="user" if @user #Then updates them @user.update(params) #Then returns to their user view page redirect "/users/#{@user.username}" else redirect "/error/user-not-found" end else redirect "/error/lacking-privileges" end end delete "/users/:username" do #Finds the user @user = User.find_by(username: params[:username]) if logged_in? && current_user.kind!="user" if @user #Remove all of the appointments that involve the user @user_appointments = Appointment.all.filter { |appointment| appointment.provider_id == @user.id || appointment.user_id == @user.id } @user_appointments.each do |appointment| #Remove all of the services attached to those appointments @services = Service.all.filter { |service| service.appointment_id == appointment.id } @services.each do |service| service.delete end appointment.delete end #Finally, remove the user. @user.delete redirect "/users" else redirect "/error/user-not-found" end else redirect "/error/lacking-privileges" end end end
24.876289
136
0.675093
01efc8ca437c9799121ed5b3f004dd50581af6ba
658
cask :v1 => 'clipmenu' do version '0.4.3' sha256 'd0d7ca6c23f51b2dfe78c7bb40bf2f212c21b3304b3eacde86112d8ef3e6bfb9' # dropbox is the official download host per the vendor homepage url "https://dl.dropbox.com/u/1140644/clipmenu/ClipMenu_#{version}.dmg" appcast 'http://feeds.feedburner.com/clipmenu-appcast', :sha256 => 'e9f9df0e48aad4e00b8df26fd622f42a0218f5be662b6d2ee496664c5f45b4a3' homepage 'http://www.clipmenu.com/' license :unknown app 'ClipMenu.app' zap :delete => [ '~/Library/Application Support/ClipMenu', '~/Library/Preferences/com.naotaka.ClipMenu.plist', ] end
36.555556
87
0.694529