hexsha
stringlengths
40
40
size
int64
2
1.01M
content
stringlengths
2
1.01M
avg_line_length
float64
1.5
100
max_line_length
int64
2
1k
alphanum_fraction
float64
0.25
1
33363975df5946feb30fcf5aaa7bd29eee4836a5
1,462
# Copyright 2015 Google Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. require 'google/apis/youtube_partner_v1/service.rb' require 'google/apis/youtube_partner_v1/classes.rb' require 'google/apis/youtube_partner_v1/representations.rb' module Google module Apis # YouTube Content ID API # # The YouTube Content ID API allows the management of YouTube assets along with # their associated content, references, ownership, rights and policies. # # @see https://devsite.googleplex.com/youtube/partner/docs/v1/ module YoutubePartnerV1 VERSION = 'V1' REVISION = '20201005' # View and manage your assets and associated content on YouTube AUTH_YOUTUBEPARTNER = 'https://www.googleapis.com/auth/youtubepartner' # View content owner account details from YouTube. AUTH_YOUTUBEPARTNER_CONTENT_OWNER_READONLY = 'https://www.googleapis.com/auth/youtubepartner-content-owner-readonly' end end end
37.487179
122
0.751026
38c32378d3473404b3915f38afb40deb0f459c39
118
class AddReportCoordsIndex < ActiveRecord::Migration[5.1] def change add_index :reports, [:lat, :lng] end end
19.666667
57
0.720339
e27d7020c408aa612d07a19adba4a9bf681d96e0
406
cask 'atext' do version '2.21' sha256 '8ef2fc8f136f15ab4c1218a74097a9a83407ede3f79e6e616eade920bfff02ba' url 'https://www.trankynam.com/atext/downloads/aText.dmg' appcast 'https://www.trankynam.com/atext/aText-Appcast.xml', checkpoint: '9d96d77e7584acfe70ee83f0d00342fbe0b9d0b39e23685f9c92001bbd12f983' name 'aText' homepage 'https://www.trankynam.com/atext/' app 'aText.app' end
31.230769
88
0.770936
ab744e67bbd7d4d90e2e2f487b2abcadbada7368
2,703
require_relative 'config.rb' require_relative 'valuefirst.rb' module Valuefirst class Valuefirst include Constants attr_reader :config def initialize(opts = {}, &block) @config = Config.new(opts) yield(@config) if block_given? @config.validate end def self.error_desc error_code ERROR_CODES.fetch(error_code.to_s, "Description not available") end def credit_request payload = XmlPayload::RequestCredit.requestcredit @config call_api payload, "credits" end def status_request guid_seq_hash payload = XmlPayload::StatusRequest.statusrequest @config, guid_seq_hash call_api payload, "status" end def send_message message_content, phone_number, sender_id = nil payload = XmlPayload::TextMessage.textmessage @config, message_content, phone_number, sender_id call_api payload, "send" end def bulksend_message file_path raise ArgumentError, "File does not exist." unless File.exists? file_path.to_s raise ArgumentError, "File is not readable." unless File.readable? file_path.to_s payload = XmlPayload::Batchtext.batchtext @config, file_path call_api payload, "send" end def multicast_message message_content, phone_number_array, sender_id = nil payload = XmlPayload::MulticastMessage.multicastmessage @config, message_content, phone_number_array, sender_id call_api payload, "send" end def send_unicode message_content, phone_number, sender_id = nil payload = XmlPayload::UnicodeMessage.unicodemessage @config, message_content, phone_number, sender_id call_api payload, "send" end def bulksend_unicode file_path raise ArgumentError, "File does not exist." unless File.exists? file_path.to_s raise ArgumentError, "File is not readable." unless File.readable? file_path.to_s payload = XmlPayload::Batchunicode.batchunicode @config, file_path call_api payload, "send" end def multicast_unicode message_content, phone_number_array, sender_id = nil payload = XmlPayload::MulticastUnicode.multicastunicode @config, message_content, phone_number_array, sender_id call_api payload, "send" end private def call_api payload, action raise ArgumentError, "Invalid action" unless VALID_ACTIONS.include? action params = {data: payload, action: action} api_reponse = Net::HTTP.post_form( URI.parse(@config.url), params ) case api_reponse when Net::HTTPSuccess, Net::HTTPRedirection return HappyMapper.parse(api_reponse.body.downcase) else return api_reponse end end end end
31.8
117
0.718461
edd5bb575c482d6c01b051e8b365dacfec2ddcc6
1,770
# This file is copied to spec/ when you run 'rails generate rspec:install' ENV['RAILS_ENV'] ||= 'test' require 'rubygems' require 'bundler' if %w(true 1).include?(ENV['COVERAGE']) require 'simplecov' SimpleCov.start do add_filter '/test_app/' add_filter '/spec/support' end end require File.expand_path("../../test_app/config/environment", __FILE__) require 'rspec/rails' # Requires supporting ruby files with custom matchers and macros, etc, # in spec/support/ and its subdirectories. Dir[File.join(File.expand_path("../../", __FILE__), "spec/support/**/*.rb")].each {|f| require f} Dir[Rails.root.join("spec/support/**/*.rb")].each {|f| require f} RSpec.configure do |config| # ## Mock Framework # # If you prefer to use mocha, flexmock or RR, uncomment the appropriate line: # # config.mock_with :mocha # config.mock_with :flexmock # config.mock_with :rr # Remove this line if you're not using ActiveRecord or ActiveRecord fixtures # config.fixture_path = "#{::Rails.root}/spec/fixtures" # If you're not using ActiveRecord, or you'd prefer not to run each of your # examples within a transaction, remove the following line or assign false # instead of true. # config.use_transactional_fixtures = true # If true, the base class of anonymous controllers will be inferred # automatically. This will be the default behavior in future versions of # rspec-rails. config.infer_base_class_for_anonymous_controllers = true config.infer_spec_type_from_file_location! # Run specs in random order to surface order dependencies. If you find an # order dependency and want to debug it, you can fix the order by providing # the seed, which is printed after each run. # --seed 1234 config.order = "random" end
34.038462
97
0.726554
01e0cb32d582024457fff5c51681abdc526dd073
1,238
require 'spec_helper' require 'guard' require 'guard/compass_helper' describe Guard::CompassHelper do subject {self} include Guard::CompassHelper before :each do Pathname.stub!(:pwd).and_return(Pathname.new('/test/me')) end describe "pathname method" do it "retrieve pwd when nothing given" do subject.pathname.should == Pathname.new('/test/me') end it "retrieve the absolut path as it" do subject.pathname('/hello/boy').should == Pathname.new('/hello/boy') end it "computes the relative path" do subject.pathname('a', 'b', 'c').should == Pathname.new('/test/me/a/b/c') end it "takes the absolute path in middle of the run" do subject.pathname('a', '/another/test', 'c').should == Pathname.new('/another/test/c') end it "understand double dot notation" do subject.pathname('..').should == Pathname.new('/test') subject.pathname('..').to_s.should == '/test' subject.pathname('..', 'a/d/c').should == Pathname.new('/test/a/d/c') subject.pathname('..', 'custom_config_file/another_config_location/config.rb').to_s.should == '/test/custom_config_file/another_config_location/config.rb' end end end
30.195122
160
0.647819
e89ea6d1d7e9de3d12ecba8d9910b03fd31abf5d
1,247
require File.expand_path(File.join(File.dirname(__FILE__), '..', 'spec_helper')) describe ConfigurationStore do before(:each) do @instance = ConfigurationStore.new end it "should have a default_template accessor" do @instance.should respond_to(:default_template) @instance.should respond_to(:default_template=) end it "should be able to contain elements that weren't specified" do lambda { @instance.hello = :world @instance.hello.should == :world }.should_not raise_error end it "should raise exception if we call read methods with arguments" do lambda { @instance.hello :world }.should raise_error end it "should be able to contain boolean elements that weren't specified" do lambda { @instance.hello = true @instance.should be_hello }.should_not raise_error end it "should have a respond_to method that returns true after a value has been set" do @instance.should_not respond_to(:quack) @instance.should_not respond_to(:quack=) @instance.should_not respond_to(:quack?) @instance.quack = :quack @instance.should respond_to(:quack) @instance.should respond_to(:quack=) @instance.should respond_to(:quack?) end end
28.340909
86
0.708099
ab637f4e34b0a992f3dea0dd3d599818a9b3c2ad
1,022
class PapersController < ApplicationController def index if params.key?(:year) @paper = Paper.all.year_until(params[:year]) else @paper = Paper.all end end def show @paper = Paper.find(params[:id]) end def new @paper = Paper.new end def edit @paper = Paper.find(params[:id]) @authors = Author.all end def create @paper = Paper.new(paper_params) if @paper.save redirect_to @paper else render 'new' end end def update @paper = Paper.find(params[:id]) if @paper.update(paper_params) redirect_to @paper else render 'edit' end end def destroy @paper = Paper.find(params[:id]) @paper.destroy redirect_to papers_path end private def paper_params params.require(:paper).permit(:title, :venue, :year, :author_ids => []) end end
18.25
79
0.528376
bb1021cde37b5f545869a6ac04b7e987369ba6c5
3,178
# Generated by the protocol buffer compiler. DO NOT EDIT! # source: google/cloud/aiplatform/v1/index_endpoint.proto require 'google/cloud/aiplatform/v1/index_pb' require 'google/api/field_behavior_pb' require 'google/api/resource_pb' require 'google/cloud/aiplatform/v1/machine_resources_pb' require 'google/protobuf/timestamp_pb' require 'google/api/annotations_pb' require 'google/protobuf' Google::Protobuf::DescriptorPool.generated_pool.build do add_file("google/cloud/aiplatform/v1/index_endpoint.proto", :syntax => :proto3) do add_message "google.cloud.aiplatform.v1.IndexEndpoint" do optional :name, :string, 1 optional :display_name, :string, 2 optional :description, :string, 3 repeated :deployed_indexes, :message, 4, "google.cloud.aiplatform.v1.DeployedIndex" optional :etag, :string, 5 map :labels, :string, :string, 6 optional :create_time, :message, 7, "google.protobuf.Timestamp" optional :update_time, :message, 8, "google.protobuf.Timestamp" optional :network, :string, 9 end add_message "google.cloud.aiplatform.v1.DeployedIndex" do optional :id, :string, 1 optional :index, :string, 2 optional :display_name, :string, 3 optional :create_time, :message, 4, "google.protobuf.Timestamp" optional :private_endpoints, :message, 5, "google.cloud.aiplatform.v1.IndexPrivateEndpoints" optional :index_sync_time, :message, 6, "google.protobuf.Timestamp" optional :automatic_resources, :message, 7, "google.cloud.aiplatform.v1.AutomaticResources" optional :enable_access_logging, :bool, 8 optional :deployed_index_auth_config, :message, 9, "google.cloud.aiplatform.v1.DeployedIndexAuthConfig" repeated :reserved_ip_ranges, :string, 10 optional :deployment_group, :string, 11 end add_message "google.cloud.aiplatform.v1.DeployedIndexAuthConfig" do optional :auth_provider, :message, 1, "google.cloud.aiplatform.v1.DeployedIndexAuthConfig.AuthProvider" end add_message "google.cloud.aiplatform.v1.DeployedIndexAuthConfig.AuthProvider" do repeated :audiences, :string, 1 repeated :allowed_issuers, :string, 2 end add_message "google.cloud.aiplatform.v1.IndexPrivateEndpoints" do optional :match_grpc_address, :string, 1 end end end module Google module Cloud module AIPlatform module V1 IndexEndpoint = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("google.cloud.aiplatform.v1.IndexEndpoint").msgclass DeployedIndex = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("google.cloud.aiplatform.v1.DeployedIndex").msgclass DeployedIndexAuthConfig = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("google.cloud.aiplatform.v1.DeployedIndexAuthConfig").msgclass DeployedIndexAuthConfig::AuthProvider = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("google.cloud.aiplatform.v1.DeployedIndexAuthConfig.AuthProvider").msgclass IndexPrivateEndpoints = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("google.cloud.aiplatform.v1.IndexPrivateEndpoints").msgclass end end end end
49.65625
180
0.753304
030343e006f6f1db35820dcb0872b678ee7b39fd
5,029
; IBM852 UCS-2 decoding rule ; source: ftp://dkuug.dk/i18n/charmaps/IBM852 any [ #{00C7} (insert tail result #{80}) | #{00FC} (insert tail result #{81}) | #{00E9} (insert tail result #{82}) | #{00E2} (insert tail result #{83}) | #{00E4} (insert tail result #{84}) | #{016F} (insert tail result #{85}) | #{0107} (insert tail result #{86}) | #{00E7} (insert tail result #{87}) | #{0142} (insert tail result #{88}) | #{00EB} (insert tail result #{89}) | #{0150} (insert tail result #{8A}) | #{0151} (insert tail result #{8B}) | #{00EE} (insert tail result #{8C}) | #{0179} (insert tail result #{8D}) | #{00C4} (insert tail result #{8E}) | #{0106} (insert tail result #{8F}) | #{00C9} (insert tail result #{90}) | #{0139} (insert tail result #{91}) | #{013A} (insert tail result #{92}) | #{00F4} (insert tail result #{93}) | #{00F6} (insert tail result #{94}) | #{013D} (insert tail result #{95}) | #{013E} (insert tail result #{96}) | #{015A} (insert tail result #{97}) | #{015B} (insert tail result #{98}) | #{00D6} (insert tail result #{99}) | #{00DC} (insert tail result #{9A}) | #{0164} (insert tail result #{9B}) | #{0165} (insert tail result #{9C}) | #{0141} (insert tail result #{9D}) | #{00D7} (insert tail result #{9E}) | #{010D} (insert tail result #{9F}) | #{00E1} (insert tail result #{A0}) | #{00ED} (insert tail result #{A1}) | #{00F3} (insert tail result #{A2}) | #{00FA} (insert tail result #{A3}) | #{0104} (insert tail result #{A4}) | #{0105} (insert tail result #{A5}) | #{017D} (insert tail result #{A6}) | #{017E} (insert tail result #{A7}) | #{0118} (insert tail result #{A8}) | #{0119} (insert tail result #{A9}) | #{00AC} (insert tail result #{AA}) | #{017A} (insert tail result #{AB}) | #{010C} (insert tail result #{AC}) | #{015F} (insert tail result #{AD}) | #{00AB} (insert tail result #{AE}) | #{00BB} (insert tail result #{AF}) | #{2591} (insert tail result #{B0}) | #{2592} (insert tail result #{B1}) | #{2593} (insert tail result #{B2}) | #{2502} (insert tail result #{B3}) | #{2524} (insert tail result #{B4}) | #{00C1} (insert tail result #{B5}) | #{00C2} (insert tail result #{B6}) | #{011A} (insert tail result #{B7}) | #{015E} (insert tail result #{B8}) | #{2563} (insert tail result #{B9}) | #{2551} (insert tail result #{BA}) | #{2557} (insert tail result #{BB}) | #{255D} (insert tail result #{BC}) | #{017B} (insert tail result #{BD}) | #{017C} (insert tail result #{BE}) | #{2510} (insert tail result #{BF}) | #{2514} (insert tail result #{C0}) | #{2534} (insert tail result #{C1}) | #{252C} (insert tail result #{C2}) | #{251C} (insert tail result #{C3}) | #{2500} (insert tail result #{C4}) | #{253C} (insert tail result #{C5}) | #{0102} (insert tail result #{C6}) | #{0103} (insert tail result #{C7}) | #{255A} (insert tail result #{C8}) | #{2554} (insert tail result #{C9}) | #{2569} (insert tail result #{CA}) | #{2566} (insert tail result #{CB}) | #{2560} (insert tail result #{CC}) | #{2550} (insert tail result #{CD}) | #{256C} (insert tail result #{CE}) | #{00A4} (insert tail result #{CF}) | #{0111} (insert tail result #{D0}) | #{0110} (insert tail result #{D1}) | #{010E} (insert tail result #{D2}) | #{00CB} (insert tail result #{D3}) | #{010F} (insert tail result #{D4}) | #{0147} (insert tail result #{D5}) | #{00CD} (insert tail result #{D6}) | #{00CE} (insert tail result #{D7}) | #{011B} (insert tail result #{D8}) | #{2518} (insert tail result #{D9}) | #{250C} (insert tail result #{DA}) | #{2588} (insert tail result #{DB}) | #{2584} (insert tail result #{DC}) | #{0162} (insert tail result #{DD}) | #{016E} (insert tail result #{DE}) | #{2580} (insert tail result #{DF}) | #{00D3} (insert tail result #{E0}) | #{00DF} (insert tail result #{E1}) | #{00D4} (insert tail result #{E2}) | #{0143} (insert tail result #{E3}) | #{0144} (insert tail result #{E4}) | #{0148} (insert tail result #{E5}) | #{0160} (insert tail result #{E6}) | #{0161} (insert tail result #{E7}) | #{0154} (insert tail result #{E8}) | #{00DA} (insert tail result #{E9}) | #{0155} (insert tail result #{EA}) | #{0170} (insert tail result #{EB}) | #{00FD} (insert tail result #{EC}) | #{00DD} (insert tail result #{ED}) | #{0163} (insert tail result #{EE}) | #{00B4} (insert tail result #{EF}) | #{00AD} (insert tail result #{F0}) | #{02DD} (insert tail result #{F1}) | #{02DB} (insert tail result #{F2}) | #{02C7} (insert tail result #{F3}) | #{02D8} (insert tail result #{F4}) | #{00A7} (insert tail result #{F5}) | #{00F7} (insert tail result #{F6}) | #{00B8} (insert tail result #{F7}) | #{00B0} (insert tail result #{F8}) | #{00A8} (insert tail result #{F9}) | #{02D9} (insert tail result #{FA}) | #{0171} (insert tail result #{FB}) | #{0158} (insert tail result #{FC}) | #{0159} (insert tail result #{FD}) | #{25A0} (insert tail result #{FE}) | #{00A0} (insert tail result #{FF}) | #{00} copy c 1 skip (insert tail result c) | copy c 2 skip (decodeUnknownChar c) ]
37.529851
46
0.583018
f7bb54557f402b73d8db12eb1c051c92e4964edb
986
# encoding: utf-8 # This file is autogenerated. Do not edit it manually. # If you want change the content of this file, edit # # /spec/fixtures/responses/whois.nic.it/it/property_status_pendingdelete_redemptionperiod.expected # # and regenerate the tests with the following rake task # # $ rake spec:generate # require 'spec_helper' require 'whois/parsers/whois.nic.it.rb' describe Whois::Parsers::WhoisNicIt, "property_status_pendingdelete_redemptionperiod.expected" do subject do file = fixture("responses", "whois.nic.it/it/property_status_pendingdelete_redemptionperiod.txt") part = Whois::Record::Part.new(body: File.read(file)) described_class.new(part) end describe "#status" do it do expect(subject.status).to eq(:redemption) end end describe "#available?" do it do expect(subject.available?).to eq(false) end end describe "#registered?" do it do expect(subject.registered?).to eq(true) end end end
24.65
101
0.72211
5d49b5b1b44e72fd2271e9170c7944b0e5740634
2,283
require 'test_helper' class UserTest < ActiveSupport::TestCase def setup @user = User.new(name: "Example User", email: "[email protected]", password: "foobar", password_confirmation: "foobar") end test "should be valid" do assert @user.valid? end test "name should be present" do @user.name = " " assert_not @user.valid? end test "email should be present" do @user.email = " " assert_not @user.valid? end test "name should not be too long" do @user.name = "a" * 51 assert_not @user.valid? end test "email should not be too long" do @user.email = "a" * 244 + "@example.com" assert_not @user.valid? end test "email validation should accept valid addresses" do valid_addresses = %w[[email protected] [email protected] [email protected] [email protected] [email protected]] valid_addresses.each do |valid_address| @user.email = valid_address assert @user.valid?, "#{valid_address.inspect} should be valid" end end test "email validation should reject invalid addresses" do invalid_addresses = %w[user@example,com user_at_foo.org user.name@example. foo@bar_baz.com foo@bar+baz.com] invalid_addresses.each do |invalid_address| @user.email = invalid_address assert_not @user.valid?, "#{invalid_address.inspect} should be invalid" end end test "email addresses should be unique" do duplicate_user = @user.dup duplicate_user.email = @user.email.upcase @user.save assert_not duplicate_user.valid? end test "email addresses should be saved as lower-case" do mixed_case_email = "[email protected]" @user.email = mixed_case_email @user.save assert_equal mixed_case_email.downcase, @user.reload.email end test "password should be present (nonblank)" do @user.password = @user.password_confirmation = " " * 6 assert_not @user.valid? end test "password should have a minimum length" do @user.password = @user.password_confirmation = "a" * 5 assert_not @user.valid? end test "authenticated? should return false for a user with nil digest" do assert_not @user.authenticated?(:remember, '') end end
28.898734
78
0.663163
397e25e509be0d3e4a0a4648b3dc3cc35df19a15
9,587
# # Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved # # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. class Hhvm471 < Formula desc "JIT compiler and runtime for the Hack language" homepage "http://hhvm.com/" head "https://github.com/facebook/hhvm.git" url "https://dl.hhvm.com/source/hhvm-4.71.0.tar.gz" sha256 "a69597e0da5ac1422b32d312e050d122d9c264222fe83c6bb704fc529d36456d" bottle do root_url "https://dl.hhvm.com/homebrew-bottles" sha256 catalina: "797d2cc3e282d7c3547492a5fe7ff33ec13775faf2a46b1f8f94019fdc0c982c" sha256 mojave: "c0a6ebcf2924bd6e25731511d62162031ec72f946469ff26fc7b145dbef1bd3d" end option "with-debug", <<~EOS Make an unoptimized build with assertions enabled. This will run PHP and Hack code dramatically slower than a release build, and is suitable mostly for debugging HHVM itself. EOS # Needs very recent xcode depends_on :macos => :sierra depends_on "autoconf" => :build depends_on "automake" => :build depends_on "cmake" => :build depends_on "double-conversion" depends_on "dwarfutils" depends_on "gawk" => :build depends_on "libelf" => :build depends_on "libtool" => :build depends_on "md5sha1sum" => :build depends_on "pkg-config" => :build depends_on "wget" => :build # We statically link against icu4c as every non-bugfix release is not # backwards compatible; needing to rebuild for every release is too # brittle depends_on "icu4c" => :build depends_on "boost" depends_on "freetype" depends_on "gd" depends_on "gettext" depends_on "glog" depends_on "gmp" depends_on "imagemagick@6" depends_on "jemalloc" depends_on "jpeg" depends_on "libevent" depends_on "libmemcached" depends_on "libsodium" depends_on "libpng" depends_on "libxml2" depends_on "libzip" depends_on "lz4" depends_on "mcrypt" depends_on "oniguruma" depends_on "openssl" depends_on "pcre" # Used for Hack but not HHVM build - see #116 depends_on "postgresql" depends_on "sqlite" depends_on "tbb@2020" def install cmake_args = std_cmake_args + %W[ -DCMAKE_INSTALL_SYSCONFDIR=#{etc} -DDEFAULT_CONFIG_DIR=#{etc}/hhvm ] # Force use of bundled PCRE to workaround #116 cmake_args += %W[ -DSYSTEM_PCRE_HAS_JIT=0 ] # Features which don't work on OS X yet since they haven't been ported yet. cmake_args += %W[ -DENABLE_MCROUTER=OFF -DENABLE_EXTENSION_MCROUTER=OFF -DENABLE_EXTENSION_IMAP=OFF ] # Required to specify a socket path if you are using the bundled async SQL # client (which is very strongly recommended). cmake_args << "-DMYSQL_UNIX_SOCK_ADDR=/tmp/mysql.sock" # LZ4 warning macros are currently incompatible with clang cmake_args << "-DCMAKE_C_FLAGS=-DLZ4_DISABLE_DEPRECATE_WARNINGS=1" cmake_args << "-DCMAKE_CXX_FLAGS=-DLZ4_DISABLE_DEPRECATE_WARNINGS=1 -DU_USING_ICU_NAMESPACE=1" # Debug builds. This switch is all that's needed, it sets all the right # cflags and other config changes. if build.with? "debug" cmake_args << "-DCMAKE_BUILD_TYPE=Debug" else cmake_args << "-DCMAKE_BUILD_TYPE=RelWithDebInfo" end # Statically link libICU cmake_args += %W[ -DICU_INCLUDE_DIR=#{Formula["icu4c"].opt_include} -DICU_I18N_LIBRARY=#{Formula["icu4c"].opt_lib}/libicui18n.a -DICU_LIBRARY=#{Formula["icu4c"].opt_lib}/libicuuc.a -DICU_DATA_LIBRARY=#{Formula["icu4c"].opt_lib}/libicudata.a ] # TBB looks for itself in a different place than brew installs to. ENV["TBB_ARCH_PLATFORM"] = "." cmake_args += %W[ -DTBB_INCLUDE_DIR=#{Formula["tbb@2020"].opt_include} -DTBB_INSTALL_DIR=#{Formula["tbb@2020"].opt_prefix} -DTBB_LIBRARY=#{Formula["tbb@2020"].opt_lib}/libtbb.dylib -DTBB_LIBRARY_DEBUG=#{Formula["tbb@2020"].opt_lib}/libtbb.dylib -DTBB_LIBRARY_DIR=#{Formula["tbb@2020"].opt_lib} -DTBB_MALLOC_LIBRARY=#{Formula["tbb@2020"].opt_lib}/libtbbmalloc.dylib -DTBB_MALLOC_LIBRARY_DEBUG=#{Formula["tbb@2020"].opt_lib}/libtbbmalloc.dylib ] system "cmake", *cmake_args, '.' system "make" system "make", "install" tp_notices = (share/"doc/third_party_notices.txt") (share/"doc").install "third-party/third_party_notices.txt" (share/"doc/third_party_notices.txt").append_lines <<EOF ----- The following software may be included in this product: icu4c. This Software contains the following license and notice below: Unicode Data Files include all data files under the directories http://www.unicode.org/Public/, http://www.unicode.org/reports/, http://www.unicode.org/cldr/data/, http://source.icu-project.org/repos/icu/, and http://www.unicode.org/utility/trac/browser/. Unicode Data Files do not include PDF online code charts under the directory http://www.unicode.org/Public/. Software includes any source code published in the Unicode Standard or under the directories http://www.unicode.org/Public/, http://www.unicode.org/reports/, http://www.unicode.org/cldr/data/, http://source.icu-project.org/repos/icu/, and http://www.unicode.org/utility/trac/browser/. NOTICE TO USER: Carefully read the following legal agreement. BY DOWNLOADING, INSTALLING, COPYING OR OTHERWISE USING UNICODE INC.'S DATA FILES ("DATA FILES"), AND/OR SOFTWARE ("SOFTWARE"), YOU UNEQUIVOCALLY ACCEPT, AND AGREE TO BE BOUND BY, ALL OF THE TERMS AND CONDITIONS OF THIS AGREEMENT. IF YOU DO NOT AGREE, DO NOT DOWNLOAD, INSTALL, COPY, DISTRIBUTE OR USE THE DATA FILES OR SOFTWARE. COPYRIGHT AND PERMISSION NOTICE Copyright © 1991-2017 Unicode, Inc. All rights reserved. Distributed under the Terms of Use in http://www.unicode.org/copyright.html. Permission is hereby granted, free of charge, to any person obtaining a copy of the Unicode data files and any associated documentation (the "Data Files") or Unicode software and any associated documentation (the "Software") to deal in the Data Files or Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, and/or sell copies of the Data Files or Software, and to permit persons to whom the Data Files or Software are furnished to do so, provided that either (a) this copyright and permission notice appear with all copies of the Data Files or Software, or (b) this copyright and permission notice appear in associated Documentation. THE DATA FILES AND SOFTWARE ARE PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT OF THIRD PARTY RIGHTS. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR HOLDERS INCLUDED IN THIS NOTICE BE LIABLE FOR ANY CLAIM, OR ANY SPECIAL INDIRECT OR CONSEQUENTIAL DAMAGES, OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THE DATA FILES OR SOFTWARE. Except as contained in this notice, the name of a copyright holder shall not be used in advertising or otherwise to promote the sale, use or other dealings in these Data Files or Software without prior written authorization of the copyright holder. EOF ini = etc/"hhvm" (ini/"php.ini").write php_ini unless File.exist? (ini/"php.ini") (ini/"server.ini").write server_ini unless File.exist? (ini/"server.ini") end test do (testpath/"test.php").write <<~EOS <?php exit(is_integer(HHVM_VERSION_ID) ? 0 : 1); EOS system "#{bin}/hhvm", testpath/"test.php" end plist_options :manual => "hhvm -m daemon -c #{HOMEBREW_PREFIX}/etc/hhvm/php.ini -c #{HOMEBREW_PREFIX}/etc/hhvm/server.ini" def plist <<~EOS <?xml version="1.0" encoding="UTF-8"?> <!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd"> <plist version="1.0"> <dict> <key>Label</key> <string>#{plist_name}</string> <key>RunAtLoad</key> <true/> <key>KeepAlive</key> <true/> <key>ProgramArguments</key> <array> <string>#{opt_bin}/hhvm</string> <string>-m</string> <string>server</string> <string>-c</string> <string>#{etc}/hhvm/php.ini</string> <string>-c</string> <string>#{etc}/hhvm/server.ini</string> </array> <key>WorkingDirectory</key> <string>#{HOMEBREW_PREFIX}</string> </dict> </plist> EOS end # https://github.com/hhvm/packaging/blob/master/hhvm/deb/skeleton/etc/hhvm/php.ini def php_ini <<~EOS ; php options session.save_handler = files session.save_path = #{var}/lib/hhvm/sessions session.gc_maxlifetime = 1440 ; hhvm specific hhvm.log.always_log_unhandled_exceptions = true hhvm.log.runtime_error_reporting_level = 8191 hhvm.mysql.typed_results = false EOS end # https://github.com/hhvm/packaging/blob/master/hhvm/deb/skeleton/etc/hhvm/server.ini def server_ini <<~EOS ; php options pid = #{var}/run/hhvm/pid ; hhvm specific hhvm.server.port = 9000 hhvm.server.default_document = index.php hhvm.log.use_log_file = true hhvm.log.file = #{var}/log/hhvm/error.log hhvm.repo.central.path = #{var}/run/hhvm/hhvm.hhbc EOS end end
35.772388
125
0.707938
21a05d872672a793a96ab86c63d9d0011d4973dc
434
# frozen_string_literal: true module Logux class Client attr_reader :logux_host def initialize(logux_host: Logux.configuration.logux_host) @logux_host = logux_host end def post(params) client.post(params.to_json, content_type: :json, accept: :json) end def client @client ||= RestClient::Resource.new(logux_host, verify_ssl: false) end end end
19.727273
73
0.638249
8771b3d6b2305d4c9cf52d7150d8697b9552df49
1,840
class Avrdude < Formula desc "Atmel AVR MCU programmer" homepage "https://savannah.nongnu.org/projects/avrdude/" url "https://download.savannah.gnu.org/releases/avrdude/avrdude-6.3.tar.gz" mirror "https://download-mirror.savannah.gnu.org/releases/avrdude/avrdude-6.3.tar.gz" sha256 "0f9f731b6394ca7795b88359689a7fa1fba818c6e1d962513eb28da670e0a196" revision 1 livecheck do url "https://download.savannah.gnu.org/releases/avrdude/" regex(/href=.*?avrdude[._-]v?(\d+(?:\.\d+)+)\.t/i) end bottle do sha256 big_sur: "80bd53f8b78f172aaea62b9a58f6febfc4ac4b510969511ab0f3e06da9adb1bb" sha256 catalina: "d3f4c82170fa37bacd6e1bc3276ba27e7a8ed2ea781b101b7899e7602393a15b" sha256 mojave: "65fe6de6f540eb1c6ad94d35c847f8a5921cc9059ff044d1bc78f68cc8b8334b" sha256 high_sierra: "b0cb94b5c4f01fcc870f286bca293218c98fda23d76397db8a831272f7087038" sha256 sierra: "e8e26af5565cd897867d4e6e71e66e6e946e1e21eb4e27d3cd49f199f088fc5d" sha256 el_capitan: "c953526dc893a9b162a109d074edf8bb71d7049c63990282edc994c63de90c44" end head do url "https://svn.savannah.nongnu.org/svn/avrdude/trunk/avrdude" depends_on "autoconf" => :build depends_on "automake" => :build depends_on "libtool" => :build end depends_on "libelf" depends_on "libftdi0" depends_on "libhid" depends_on "libusb-compat" uses_from_macos "bison" uses_from_macos "flex" def install if build.head? inreplace "bootstrap", /libtoolize/, "glibtoolize" system "./bootstrap" end system "./configure", "--disable-dependency-tracking", "--prefix=#{prefix}" system "make" system "make", "install" end test do assert_equal "avrdude done. Thank you.", shell_output("#{bin}/avrdude -c jtag2 -p x16a4 2>&1", 1).strip end end
33.454545
90
0.727174
f7ada6436ad10d41cd6e758c2ccd983050b61832
1,327
module GnarusActivity class ExercisesController < ApplicationController before_filter :authenticate_user! if defined?(Devise) def index @exercises = Exercise.all.select{|e| e.author.id == current_user.id} end def show @exercise = Exercise.find(params[:id]) end def new @exercise = Exercise.new end # GET /exercises/1/edit def edit @exercise = Exercise.find(params[:id]) end def create @exercise = Exercise.new(params[:exercise]) @exercise.author = current_user if defined?(Devise) if @exercise.save redirect_to @exercise, notice: 'Exercise was successfully created.' else render action: "new" end end def update @exercise = Exercise.find(params[:id]) if defined?(Devise) && @exercise.author == current_user params[:exercise].delete :author_id @exercise.update_attributes(params[:exercise]) end redirect_to @exercise, notice: 'Exercise was successfully updated.' end # DELETE /exercises/1 # DELETE /exercises/1.json def destroy @exercise = Exercise.find(params[:id]) if defined?(Devise) && @exercise.author == current_user @exercise.destroy end redirect_to exercises_url end end end
23.696429
77
0.634514
62dfebc8f686b728dbe5dc26f304b7f3457867d1
165
require "ecm/core/backend/configuration" require "ecm/core/backend/engine" module Ecm module Core module Backend extend Configuration end end end
15
40
0.733333
1ca447463576b917a6d3106c527d448177b0583b
164
class CreateAccounts < ActiveRecord::Migration[5.1] def change create_table :accounts do |t| t.string :iban_string t.timestamps end end end
18.222222
51
0.682927
4a83688813a8e9e2f6e4953c5b45cc1463e74174
6,105
require File.expand_path(File.join(File.dirname(__FILE__), '..', 'rackspace')) module Fog module Rackspace class Databases < Fog::Service class ServiceError < Fog::Rackspace::Errors::ServiceError; end class InternalServerError < Fog::Rackspace::Errors::InternalServerError; end class BadRequest < Fog::Rackspace::Errors::BadRequest; end DFW_ENDPOINT = 'https://dfw.databases.api.rackspacecloud.com/v1.0' LON_ENDPOINT = 'https://lon.databases.api.rackspacecloud.com/v1.0' ORD_ENDPOINT = 'https://ord.databases.api.rackspacecloud.com/v1.0' requires :rackspace_api_key, :rackspace_username recognizes :rackspace_auth_url recognizes :rackspace_auth_token recognizes :rackspace_endpoint recognizes :rackspace_region recognizes :rackspace_database_url model_path 'fog/rackspace/models/databases' model :flavor collection :flavors model :instance collection :instances model :database collection :databases model :user collection :users request_path 'fog/rackspace/requests/databases' request :list_flavors request :get_flavor request :list_instances request :get_instance request :create_instance request :delete_instance request :check_root_user request :enable_root_user request :restart_instance request :resize_instance request :resize_instance_volume request :list_databases request :create_database request :delete_database request :list_users request :create_user request :delete_user class Mock < Fog::Rackspace::Service def request(params) Fog::Mock.not_implemented end end class Real < Fog::Rackspace::Service def service_name :cloudDatabases end def region @rackspace_region end def initialize(options = {}) @rackspace_api_key = options[:rackspace_api_key] @rackspace_username = options[:rackspace_username] @rackspace_auth_url = options[:rackspace_auth_url] @rackspace_must_reauthenticate = false @connection_options = options[:connection_options] || {} setup_custom_endpoint(options) authenticate deprecation_warnings(options) @persistent = options[:persistent] || false @connection = Fog::Connection.new(endpoint_uri.to_s, @persistent, @connection_options) end def request(params) begin response = @connection.request(params.merge!({ :headers => { 'Content-Type' => 'application/json', 'Accept' => 'application/json', 'X-Auth-Token' => auth_token }.merge!(params[:headers] || {}), :host => endpoint_uri.host, :path => "#{endpoint_uri.path}/#{params[:path]}" })) rescue Excon::Errors::NotFound => error raise NotFound.slurp error rescue Excon::Errors::BadRequest => error raise BadRequest.slurp error rescue Excon::Errors::InternalServerError => error raise InternalServerError.slurp error rescue Excon::Errors::HTTPStatusError => error raise ServiceError.slurp error end unless response.body.empty? response.body = Fog::JSON.decode(response.body) end response end def endpoint_uri(service_endpoint_url=nil) @uri = super(@rackspace_endpoint || service_endpoint_url, :rackspace_database_url) end def authenticate options = { :rackspace_api_key => @rackspace_api_key, :rackspace_username => @rackspace_username, :rackspace_auth_url => @rackspace_auth_url } super(options) end private def setup_custom_endpoint(options) @rackspace_endpoint = Fog::Rackspace.normalize_url(options[:rackspace_database_url] || options[:rackspace_endpoint]) if v2_authentication? case @rackspace_endpoint when DFW_ENDPOINT @rackspace_endpoint = nil @rackspace_region = :dfw when ORD_ENDPOINT @rackspace_endpoint = nil @rackspace_region = :ord when LON_ENDPOINT @rackspace_endpoint = nil @rackspace_region = :lon else # we are actually using a custom endpoint @rackspace_region = options[:rackspace_region] || :dfw end else #if we are using auth1 and the endpoint is not set, default to DFW_ENDPOINT for historical reasons @rackspace_endpoint ||= DFW_ENDPOINT end end def deprecation_warnings(options) Fog::Logger.deprecation("The :rackspace_endpoint option is deprecated. Please use :rackspace_database_url for custom endpoints") if options[:rackspace_endpoint] if [DFW_ENDPOINT, ORD_ENDPOINT, LON_ENDPOINT].include?(@rackspace_endpoint) && v2_authentication? regions = @identity_service.service_catalog.display_service_regions(service_name) Fog::Logger.deprecation("Please specify region using :rackspace_region rather than :rackspace_endpoint. Valid region for :rackspace_region are #{regions}.") end end def append_tenant_v1(credentials) account_id = credentials['X-Server-Management-Url'].match(/.*\/([\d]+)$/)[1] endpoint = @rackspace_endpoint || credentials['X-Server-Management-Url'] || DFW_ENDPOINT @uri = URI.parse(endpoint) @uri.path = "#{@uri.path}/#{account_id}" end def authenticate_v1(options) credentials = Fog::Rackspace.authenticate(options, @connection_options) append_tenant_v1 credentials @auth_token = credentials['X-Auth-Token'] end end end end end
34.106145
170
0.62932
bf223621ee57c9b8768b19e77d2b6f272b78bc5d
1,131
# frozen_string_literal: true # Copyright (c) 2008-2013 Michael Dvorkin and contributors. # # Fat Free CRM is freely distributable under the terms of MIT license. # See MIT-LICENSE file or http://www.opensource.org/licenses/mit-license.php #------------------------------------------------------------------------------ require File.expand_path(File.dirname(__FILE__) + '/../../spec_helper') describe "/leads/show" do include LeadsHelper before do login assign(:lead, @lead = build_stubbed(:lead, id: 42)) assign(:users, [current_user]) assign(:comment, Comment.new) assign(:timeline, [build_stubbed(:comment, commentable: @lead)]) # controller#controller_name and controller#action_name are not set in view specs allow(view).to receive(:template_for_current_view).and_return(nil) end it "should render lead landing page" do render expect(view).to render_template(partial: "comments/_new") expect(view).to render_template(partial: "shared/_timeline") expect(view).to render_template(partial: "shared/_tasks") expect(rendered).to have_tag("div[id=edit_lead]") end end
34.272727
85
0.680813
e896673e5fc2233c95a350943c1a1590d1dab1bd
106
require 'rails_helper' module StashApi RSpec.describe UsersController, type: :controller do end end
13.25
54
0.783019
ace78920fa8620aa51f1c0f9777f4b91b9df3f5a
1,582
require "spec_helper" describe Bump::CLI::Commands::Deploy do it 'calls the server and exit successfully if success' do stub_bump_api_validate('versions/post_success.http') expect do new_command.call(id: '1', token:'token', file: 'path/to/file', specification: 'openapi/v2/json') end.to output(/New version has been successfully deployed/).to_stdout expect(WebMock).to have_requested(:post,'https://bump.sh/api/v1/docs/1/versions').with( body: { definition: 'body', specification: 'openapi/v2/json' } ) end it 'displays the definition errors in case of unprocessable entity' do stub_bump_api_validate('versions/post_unprocessable_entity.http') expect do begin new_command.call(id: '1', token: 'token', file: 'path/to/file', specification: 'openapi/v2/yaml') rescue SystemExit; end end.to output(/Invalid request/).to_stderr end it 'displays a generic error message in case of unknown error' do stub_bump_api_validate('versions/post_unknown_error.http') expect do begin new_command.call(id: '1', token: 'token', file: 'path/to/file', specification: 'openapi/v2/yaml') rescue SystemExit; end end.to output(/Unknown error/).to_stderr end private def stub_bump_api_validate(path) stub_request(:post, %r{/versions}).to_return(read_http_fixture(path)) end def new_command command = Bump::CLI::Commands::Deploy.new(command_name: 'validate') allow(command).to receive(:open).and_return(double(read: 'body')) command end end
31.019608
105
0.695954
b97c621042da8596578c865c13e51bf5fa81a5b4
2,170
module TocHelper def get_h3s_toc(node) node.css('h3').map do |c_node| { level: c_node.name[1].to_i, value: c_node.children.map { |child| child.to_s }.join, anchor: c_node['id'] } end end def get_toc(page_content, options = {}) doc = Nokogiri::HTML(page_content) toc = [] options = {full_toc_container: '.full-toc', show_subheadings: false }.merge(options) (doc / 'h2').each do |h_node| full_toc_container = h_node.ancestors(options[:full_toc_container]).first h = { value: h_node.children.map { |child| child.to_s }.join, anchor: h_node['id'], children: options[:show_subheadings] && !full_toc_container.nil? ? get_h3s_toc(full_toc_container) : [] } toc.push(h) end toc end def get_guide_toc(page_content, options = {}) get_toc(page_content, { show_subheadings: true, full_toc_container: '.full-toc' }.merge(options)) end def get_scrollspy_toc(page_content, options = {}) get_toc(page_content, { show_subheadings: false, full_toc_container: 'section:not(.no-scrollspy-full-toc)' }.merge(options)) end def get_titles_for_page(page_content, tag) doc = Nokogiri::HTML(page_content) (doc / tag).map do |h_node| { value: h_node.children.map { |child| child.to_s }.join, anchor: h_node['id'] } end end def build_toc(content) @@toc ||= {} return @@toc[content] if @@toc[content] doc = Nokogiri::HTML(content) h1 = [] toc = [] (doc / 'h1, h2, h3, h4').each do |h_node| h = { level: h_node.name[1].to_i, value: h_node.children.map { |child| child.to_s }.join.gsub(/(\{\#.+?\})/, ''), anchor: h_node['id'], children: [] } if toc.empty? toc << h next end while toc.size > 0 && h[:level] <= toc.last[:level] tmp = toc.pop h1 << tmp if tmp[:level] == 1 end if toc.empty? toc << h else toc.last[:children] << h toc << h end end @@toc[content] = h1 + toc.select { |h| h[:level] == 1 } end end
27.820513
128
0.567281
91a967051e38086519c47e790e8fa61fcb03f77e
1,441
# coding: utf-8 lib = File.expand_path('../lib', __FILE__) $LOAD_PATH.unshift(lib) unless $LOAD_PATH.include?(lib) require 'beezup_api/version' Gem::Specification.new do |spec| spec.name = "beezup_api" spec.version = BeezupApi::VERSION spec.authors = ["Fabien Piette"] spec.email = ["[email protected]"] spec.description = 'A Ruby interface to the Beezup API.' spec.summary = spec.description spec.homepage = "https://github.com/tymate-team/beezup_api" spec.license = "MIT" # Prevent pushing this gem to RubyGems.org by setting 'allowed_push_host', or # delete this section to allow pushing this gem to any host. # if spec.respond_to?(:metadata) # spec.metadata['allowed_push_host'] = "TODO: Set to 'http://mygemserver.com'" # else # raise "RubyGems 2.0 or newer is required to protect against public gem pushes." # end spec.files = `git ls-files -z`.split("\x0").reject { |f| f.match(%r{^(test|spec|features)/}) } spec.require_paths = ["lib"] spec.add_dependency 'unirest' spec.add_dependency 'hashie', '~> 3.0' spec.add_development_dependency "bundler", "~> 1.11" spec.add_development_dependency "rake", "~> 10.0" spec.add_development_dependency "rspec", "~> 3.0" spec.add_development_dependency 'simplecov', '~> 0.7' spec.add_development_dependency 'vcr', '~> 2.5' spec.add_development_dependency 'webmock', '~> 1.11' end
37.921053
104
0.675226
1a46e92f24d458155b3f1096b877d5f2c050b9e2
1,255
# WARNING ABOUT GENERATED CODE # # This file is generated. See the contributing guide for more information: # https://github.com/aws/aws-sdk-ruby/blob/master/CONTRIBUTING.md # # WARNING ABOUT GENERATED CODE require 'aws-sdk-core' require 'aws-sigv4' require_relative 'aws-sdk-dynamodbstreams/types' require_relative 'aws-sdk-dynamodbstreams/client_api' require_relative 'aws-sdk-dynamodbstreams/client' require_relative 'aws-sdk-dynamodbstreams/errors' require_relative 'aws-sdk-dynamodbstreams/resource' require_relative 'aws-sdk-dynamodbstreams/customizations' # This module provides support for Amazon DynamoDB Streams. This module is available in the # `aws-sdk-dynamodbstreams` gem. # # # Client # # The {Client} class provides one method for each API operation. Operation # methods each accept a hash of request parameters and return a response # structure. # # See {Client} for more information. # # # Errors # # Errors returned from Amazon DynamoDB Streams all # extend {Errors::ServiceError}. # # begin # # do stuff # rescue Aws::DynamoDBStreams::Errors::ServiceError # # rescues all service API errors # end # # See {Errors} for more information. # # @service module Aws::DynamoDBStreams GEM_VERSION = '1.2.0' end
26.145833
91
0.759363
18b0ed1c78673f08c4b9623efe241fab3c9ca833
1,628
require 'omniauth/oauth' require 'multi_json' module OmniAuth module Strategies # # Authenticate to T163 via OAuth and retrieve basic # user information. # # Usage: # # use OmniAuth::Strategies::T163, 'APIKey', 'APIKeySecret' # class T163 < OmniAuth::Strategies::OAuth def initialize(app, consumer_key = nil, consumer_secret = nil, options = {}, &block) @api_key = consumer_key client_options = { :site => 'http://api.t.163.com', :request_token_path => '/oauth/request_token', :access_token_path => '/oauth/access_token', :authorize_path => '/oauth/authenticate', :realm => 'OmniAuth' } super(app, :t163, consumer_key, consumer_secret, client_options, options, &block) end def auth_hash OmniAuth::Utils.deep_merge(super, { 'uid' => user_hash['screen_name'], 'user_info' => user_info, 'extra' => {'user_hash' => user_hash} }) end def user_info user_hash = self.user_hash { 'username' => user_hash['name'], 'name' => user_hash['realName'], 'location' => user_hash['location'], 'image' => user_hash['profile_image_url'], 'description' => user_hash['description'], 'urls' => { 'T163' => 'http://t.163.com' } } end def user_hash @user_hash ||= MultiJson.decode(@access_token.get("http://api.t.163.com/account/verify_credentials.json").body) end end end end
28.068966
119
0.552826
ffe2c0e7e99d3a7889b10da7ff8ec6b9deb6ffae
889
command = Mixlib::ShellOut.new('echo ~root') command.run_command root_home_dir = command.stdout.to_s.strip puts 'Homedir:' + root_home_dir directory '/opt/chef/embedded/etc' do owner 'root' group 'root' mode 00755 recursive true end.run_action(:create) file '/opt/chef/embedded/etc/gemrc' do content <<-EOF --- :backtrace: false :bulk_threshold: 1000 :sources: - #{node['bnhp-gems']['source']} :update_sources: true :verbose: true :ssl_verify_mode: 0 install: "--user --no-document" update: "--user --no-document" EOF end.run_action(:create) file "#{root_home_dir}/.gemrc" do content <<-EOF --- :backtrace: false :bulk_threshold: 1000 :sources: - #{node['bnhp-gems']['source']} :update_sources: true :verbose: true :ssl_verify_mode: 0 install: "--user --no-document" update: "--user --no-document" EOF end.run_action(:create)
20.674419
45
0.674916
7a4f11b4ab73ffddc4a2201d509c37a5a8a453fd
5,743
require "spec_helper" describe Mongoid::Atomic do describe "#atomic_updates" do context "when the document is persisted" do let(:person) do Person.create end context "when the document is modified" do before do person.title = "Sir" end it "returns the atomic updates" do person.atomic_updates.should eq({ "$set" => { "title" => "Sir" }}) end context "when an embeds many child is added" do let!(:address) do person.addresses.build(:street => "Oxford St") end it "returns a $set and $pushAll for modifications" do person.atomic_updates.should eq( { "$set" => { "title" => "Sir" }, "$pushAll" => { "addresses" => [ { "_id" => "oxford-st", "street" => "Oxford St" } ]} } ) end end context "when an embeds one child is added" do let!(:name) do person.build_name(:first_name => "Lionel") end it "returns a $set for modifications" do person.atomic_updates.should eq( { "$set" => { "title" => "Sir", "name" => { "_id" => "Lionel-", "first_name" => "Lionel" } } } ) end end context "when an existing embeds many gets modified" do let!(:address) do person.addresses.create(:street => "Oxford St") end before do address.street = "Bond St" end it "returns the $set with correct position and modifications" do person.atomic_updates.should eq( { "$set" => { "title" => "Sir", "addresses.0.street" => "Bond St" }} ) end context "when an existing 2nd level embedded child gets modified" do let!(:location) do address.locations.create(:name => "Home") end before do location.name = "Work" end it "returns the $set with correct positions and modifications" do person.atomic_updates.should eq( { "$set" => { "title" => "Sir", "addresses.0.street" => "Bond St", "addresses.0.locations.0.name" => "Work" } } ) end end context "when a 2nd level embedded child gets added" do let!(:location) do address.locations.build(:name => "Home") end it "returns the $set with correct positions and modifications" do person.atomic_updates.should eq( { "$set" => { "title" => "Sir", "addresses.0.street" => "Bond St" }, :conflicts => { "$pushAll" => { "addresses.0.locations" => [{ "_id" => location.id, "name" => "Home" }] } } } ) end end context "when an embedded child gets unset" do before do person.attributes = { :addresses => nil } end let(:updates) do person.atomic_updates end it "returns the $set for the first level and $unset for other." do updates.should eq({ "$unset" => { "addresses" => true }, "$set" => { "title" => "Sir" } }) end end context "when adding a new second level child" do let!(:new_address) do person.addresses.build(:street => "Another") end let!(:location) do new_address.locations.build(:name => "Home") end it "returns the $set for 1st level and other for the 2nd level" do person.atomic_updates.should eq( { "$set" => { "title" => "Sir", "addresses.0.street" => "Bond St" }, :conflicts => { "$pushAll" => { "addresses" => [{ "_id" => new_address.id, "street" => "Another", "locations" => [ "_id" => location.id, "name" => "Home" ] }] } } } ) end end end context "when adding new embedded docs at multiple levels" do let!(:address) do person.addresses.build(:street => "Another") end let!(:location) do address.locations.build(:name => "Home") end it "returns the proper $sets and $pushAlls for all levels" do person.atomic_updates.should eq( { "$set" => { "title" => "Sir", }, "$pushAll" => { "addresses" => [{ "_id" => address.id, "street" => "Another", "locations" => [ "_id" => location.id, "name" => "Home" ] }] } } ) end end end end end end
27.878641
93
0.401706
bb51a692a7026b88cd585d0b7e68068a146bd6e5
1,327
# frozen_string_literal: true require 'rbnacl' require 'base64' # Verifies digitally signed requests class SignedRequest class VerificationError < StandardError; end def initialize(config) @verify_key = Base64.strict_decode64(config.VERIFY_KEY) @config = config # For SIGNING_KEY during tests end def self.generate_keypair signing_key = RbNaCl::SigningKey.generate verify_key = signing_key.verify_key { signing_key: Base64.strict_encode64(signing_key), verify_key: Base64.strict_encode64(verify_key) } end def parse(signed_json) parsed = JSON.parse(signed_json, symbolize_names: true) parsed[:data] if verify(parsed[:data], parsed[:signature]) end # Signing for internal tests (should be same as client method) def sign(message) signing_key = Base64.strict_decode64(@config.SIGNING_KEY) signature = RbNaCl::SigningKey.new(signing_key) .sign(message.to_json) .then { |sig| Base64.strict_encode64(sig) } { data: message, signature: signature } end private def verify(message, signature64) signature = Base64.strict_decode64(signature64) verifier = RbNaCl::VerifyKey.new(@verify_key) verifier.verify(signature, message.to_json) rescue StandardError raise VerificationError end end
27.645833
65
0.721176
1155026d3a40b009c2b9f55eb4c10e0a7a97e816
590
class ApplicationController < Sinatra::Base configure do set :public_folder, 'public' set :views, 'app/views' enable :sessions set :session_secret, "secret" end get '/' do if logged_in? redirect "/users/#{session[:user_id]}" else erb :'index' end end def valid_signup?(params) if params[:username] != "" && params[:email] != "" && params[:password] != "" true else false end end def logged_in? !!session[:user_id] end def current_user @user ||= User.find(session[:user_id]) end end
17.878788
81
0.583051
6a7a19b42c9f6371a9e798f9dce1110d14c452b2
872
# frozen_string_literal: true module ControllerHelpers def sign_in @account = Spina::Account.create name: 'My Website', preferences: {theme: 'default'} @user = Spina::User.create name: 'admin', email: '[email protected]', password: 'password', admin: true request.session[:spina_user_id] = @user.id end end module FeatureHelpers def sign_in @account = Spina::Account.create name: 'My Website', preferences: {theme: 'default'} @user = Spina::User.create name: 'admin', email: '[email protected]', password: 'password', admin: true visit '/admin/login' fill_in :email, with: @user.email fill_in :password, with: 'password' click_button 'Login' expect(page).to have_content("Pages") end end RSpec.configure do |config| config.include ControllerHelpers, type: :controller config.include FeatureHelpers, type: :system end
32.296296
107
0.711009
d558b18f439c99c732c806fdc22ff731feda3275
142
class AddFavouriteToImages < ActiveRecord::Migration[5.2] def change add_column :images, :favourite, :boolean, default: false end end
23.666667
60
0.753521
ac1693565dd787f1cf9d74a58723d7da17adc431
4,568
# frozen_string_literal: true Rails.application.configure do # Settings specified here will take precedence over those in config/application.rb. # Code is not reloaded between requests. config.cache_classes = true # Eager load code on boot. This eager loads most of Rails and # your application in memory, allowing both threaded web servers # and those relying on copy on write to perform better. # Rake tasks automatically ignore this option for performance. config.eager_load = true # Full error reports are disabled and caching is turned on. config.consider_all_requests_local = false config.action_controller.perform_caching = true # Ensures that a master key has been made available in either ENV["RAILS_MASTER_KEY"] # or in config/master.key. This key is used to decrypt credentials (and other encrypted files). # config.require_master_key = true # Disable serving static files from the `/public` folder by default since # Apache or NGINX already handles this. config.public_file_server.enabled = ENV['RAILS_SERVE_STATIC_FILES'].present? # Compress JavaScripts and CSS. config.assets.js_compressor = :uglifier # config.assets.css_compressor = :sass # Do not fallback to assets pipeline if a precompiled asset is missed. config.assets.compile = false # `config.assets.precompile` and `config.assets.version` have moved to config/initializers/assets.rb # Enable serving of images, stylesheets, and JavaScripts from an asset server. # config.action_controller.asset_host = 'http://assets.example.com' # Specifies the header that your server uses for sending files. # config.action_dispatch.x_sendfile_header = 'X-Sendfile' # for Apache # config.action_dispatch.x_sendfile_header = 'X-Accel-Redirect' # for NGINX # Store uploaded files on the local file system (see config/storage.yml for options) config.active_storage.service = :local # Mount Action Cable outside main process or domain # config.action_cable.mount_path = nil # config.action_cable.url = 'wss://example.com/cable' # config.action_cable.allowed_request_origins = [ 'http://example.com', /http:\/\/example.*/ ] # Force all access to the app over SSL, use Strict-Transport-Security, and use secure cookies. # config.force_ssl = true # Use the lowest log level to ensure availability of diagnostic information # when problems arise. config.log_level = :debug # Prepend all log lines with the following tags. config.log_tags = [:request_id] # Use a different cache store in production. # config.cache_store = :mem_cache_store # Use a real queuing backend for Active Job (and separate queues per environment) # config.active_job.queue_adapter = :resque # config.active_job.queue_name_prefix = "blog_#{Rails.env}" config.action_mailer.perform_caching = false # Ignore bad email addresses and do not raise email delivery errors. # Set this to true and configure the email server for immediate delivery to raise delivery errors. # config.action_mailer.raise_delivery_errors = false # Enable locale fallbacks for I18n (makes lookups for any locale fall back to # the I18n.default_locale when a translation cannot be found). config.i18n.fallbacks = true # Send deprecation notices to registered listeners. config.active_support.deprecation = :notify # Use default logging formatter so that PID and timestamp are not suppressed. config.log_formatter = ::Logger::Formatter.new # Use a different logger for distributed setups. # require 'syslog/logger' # config.logger = ActiveSupport::TaggedLogging.new(Syslog::Logger.new 'app-name') if ENV['RAILS_LOG_TO_STDOUT'].present? logger = ActiveSupport::Logger.new(STDOUT) logger.formatter = config.log_formatter config.logger = ActiveSupport::TaggedLogging.new(logger) end # Do not dump schema after migrations. config.active_record.dump_schema_after_migration = false config.action_mailer.default_url_options = { host: ENV['SEND_GRID_HOST'] } config.action_mailer.perform_deliveries = true # https://sendgrid.com/docs/API_Reference/SMTP_API/errors_and_troubleshooting.html config.action_mailer.delivery_method = :smtp config.action_mailer.smtp_settings = { address: 'smtp.sendgrid.net', port: 587, authentication: :plain, user_name: ENV['SEND_GRID_USER_NAME'], password: ENV['SEND_GRID_PASSWORD'], domain: ENV['SEND_GRID_DOMAIN'], enable_starttls_auto: true } config.action_mailer.raise_delivery_errors = false # true for debug end
40.070175
102
0.757881
61eba6bd0f9a8968e27ff8e070356d37ea72af44
1,095
require 'swagger_helper' describe 'Plans API', type: :request do let(:gateway) { double } let(:plans) { build_list(:plan, 3) } let(:basic) { create(:member) } before do create(:permission, member: basic, name: :billing, enabled: true ) allow_any_instance_of(Service::BraintreeGateway).to receive(:connect_gateway).and_return(gateway) allow(::BraintreeService::Plan).to receive(:get_plans).with(gateway).and_return(plans) end path '/billing/plans' do get 'Gets a list of billing plans' do tags 'Plans' operationId "listBillingPlans" parameter name: :pageNum, in: :query, type: :number, required: false parameter name: :orderBy, in: :query, type: :string, required: false parameter name: :order, in: :query, type: :string, required: false parameter name: :types, in: :query, schema: { type: :array, items: { type: :string } }, required: false response '200', 'billing plans found' do schema type: :array, items: { '$ref' => '#/components/schemas/Plan' } run_test! end end end end
36.5
109
0.652968
e2101dcf63d78df5a866a6e3935046ad4a71f42c
853
class PalaceInvite < ApplicationRecord attr_accessor :attendees_consent belongs_to :form_answer has_many :palace_attendees, dependent: :destroy, autosave: true validates :form_answer_id, presence: true, uniqueness: true validates :attendees_consent, acceptance: { allow_nil: false, accept: "1" }, on: :update before_create :set_token def prebuild_if_necessary attendees = palace_attendees records = attendees.select { |a| !a.new_record? } unless records.size == attendees_limit to_build = attendees_limit - records.size to_build.times do palace_attendees.build end end self end def submit! self.submitted = true save end def attendees_limit 2 end private def set_token self.token = SecureRandom.urlsafe_base64(24) end end
20.309524
90
0.689332
1cba06f9f9329ce520d3720d4767655325be1afb
1,048
require 'rubygems' require 'sinatra' require 'sinatra/contrib/all' get '/' do erb "<p>Hello Epta</p>" end get '/about' do erb :about end get '/contacts' do erb :contacts end get '/visit' do erb :visit end post '/visit' do @customer =params[:customer] @date = params[:date] @phone = params[:phone] @barber = params[:barber] @color = params[:color] dbase = File.open 'public/customers.txt', 'a' dbase.puts "#{@customer} | #{@date} | #{@phone} | #{@barber} | #{@color}\n" dbase.close erb :visit end post '/contacts' do @email = params[:email] @report = params[:report] messages = File.open 'public/messages.txt', 'a' messages.puts "#{@email} | #{@report}\n" messages.close erb "<p>Спасибо за ваш отзыв</p> <a href='/'>HOME</a>" end get '/admen' do erb :admen end post '/admen' do @lohin = params[:lohin] @parol = params[:parol] if @lohin == 'admin' && @parol == 'admin' f = File.open 'public/customers.txt', 'r' @listok = f.read f.close else @irror = 'Access denied' end end
17.180328
78
0.614504
0323da8c55a7b05c3911a0da3474b223082f726e
1,275
module Aws module EventStream module Errors # Raised when reading bytes exceed buffer total bytes class ReadBytesExceedLengthError < RuntimeError def initialize(target_byte, total_len) msg = "Attempting reading bytes to offset #{target_byte} exceeds"\ " buffer length of #{total_len}" super(msg) end end # Raise when insufficient bytes of a message is received class IncompleteMessageError < RuntimeError def initialize(*args) super('Not enough bytes for event message') end end class PreludeChecksumError < RuntimeError def initialize(*args) super('Prelude checksum mismatch') end end class MessageChecksumError < RuntimeError def initialize(*args) super('Message checksum mismatch') end end class EventPayloadLengthExceedError < RuntimeError def initialize(*args) super("Payload length of a message should be under 16mb.") end end class EventHeadersLengthExceedError < RuntimeError def initialize(*args) super("Encoded headers length of a message should be under 128kb.") end end end end end
26.5625
77
0.63451
5d13e033e62f784e7c413bb08171491b78eb7c02
66
load(Rails.root.join( 'db', 'seeds', "#{Rails.env.downcase}.rb"))
33
65
0.636364
01ed0778c0d0102de0303f3a440bd95c6463cb18
2,180
# http://www.mudynamics.com # http://labs.mudynamics.com # http://www.pcapr.net require 'mu/testcase' require 'mu/pcap/io_wrapper' require 'mu/pcap/io_pair' module Mu class Pcap class IOWrapper class Test < Mu::TestCase class MessageReader def initialize msg_size=10 @msg_size = msg_size end def read_message! bytes, state state[:bytes_read] ||= 0 if bytes.length >= @msg_size msg = bytes.slice!(0,@msg_size) msg.upcase! state[:bytes_read] += @msg_size end msg end def record_write bytes, state state[:bytes_sent] ||= 0 state[:bytes_sent] += bytes.size end end def test_basics inner, other = IOPair.stream_pair wrapped = IOWrapper.new inner, MessageReader.new # Reads other.write "01234567890123" assert_equal "", wrapped.unread assert_equal "0123456789", wrapped.read assert_equal "0123", wrapped.unread assert_equal 10, wrapped.state[:bytes_read] assert_nil wrapped.read other.write "456789" assert_equal "0123456789", wrapped.read assert_equal "", wrapped.unread assert_equal 20, wrapped.state[:bytes_read] other.write "abcdefghij" assert_equal "ABCDEFGHIJ", wrapped.read assert_equal 30, wrapped.state[:bytes_read] # Writes wrapped.write "hi mom" assert_equal 6, wrapped.state[:bytes_sent] assert_equal "hi mom", other.read assert_equal "", other.read end def test_too_big_receive # Message at max size. inner, other = IOPair.stream_pair wrapped = IOWrapper.new inner, MessageReader.new(MAX_RECEIVE_SIZE + 2) big = "a" * MAX_RECEIVE_SIZE other.write big wrapped.read # Message over max size. too_big = big + "1" other.write too_big e = assert_raises(RuntimeError) do wrapped.read end assert_match "Maximum message size (#{MAX_RECEIVE_SIZE}) exceeded", e.message end end end end end
26.91358
85
0.603211
91a016345a9d0c0bc7e6d1495c1699b2dabc89d2
207
require 'yaml' require 'rdoba/os' require 'schemic/generator' module Schemic::Generator::YAML class << self def load_from file_name ::YAML.load_file(file_name).to_os end end end
15.923077
42
0.676329
0853e30ed61453b5f425bc3329a9a8dcebcb0130
2,305
# encoding: utf-8 require 'lokka' module Lokka class App < Sinatra::Base include Padrino::Helpers::TranslationHelpers configure :development do register Sinatra::Reloader end configure do enable :method_override, :raise_errors, :static, :sessions YAML::ENGINE.yamler = 'syck' if YAML.const_defined?(:ENGINE) register Padrino::Helpers set :app_file, __FILE__ set :root, File.expand_path('../../..', __FILE__) set :public_folder => Proc.new { File.join(root, 'public') } set :views => Proc.new { public_folder } set :theme => Proc.new { File.join(public_folder, 'theme') } set :supported_templates => %w(erb haml slim erubis) set :supported_stylesheet_templates => %w(scss sass) set :scss, Compass.sass_engine_options set :sass, Compass.sass_engine_options set :per_page, 10 set :admin_per_page, 200 set :default_locale, 'en' set :haml, :ugly => false, :attr_wrapper => '"' supported_stylesheet_templates.each do |style| set style, :style => :expanded end ::I18n.load_path += Dir["#{root}/i18n/*.yml"] helpers Lokka::Helpers helpers Lokka::RenderHelper use Rack::Session::Cookie, :expire_after => 60 * 60 * 24 * 12 set :session_secret, 'development' if development? register Sinatra::Flash Lokka.load_plugin(self) Lokka::Database.new.connect end require 'lokka/app/admin.rb' require 'lokka/app/entries.rb' not_found do if custom_permalink? if /\/$/ =~ request.path return redirect(request.path.sub(/\/$/,"")) elsif correct_path = custom_permalink_fix(request.path) return redirect(correct_path) elsif @entry = custom_permalink_entry(request.path) status 200 return setup_and_render_entry end end if output = render_any(:'404', :layout => false) output else haml :'404', :views => 'public/lokka', :layout => false end end error do 'Error: ' + env['sinatra.error'].name end get '/*.css' do |path| content_type 'text/css', :charset => 'utf-8' render_any path.to_sym, :views => settings.views end run! if app_file == $0 end end
29.935065
66
0.62039
ac571fe26a9686e91d6dbb0ea9e2ece555f01edf
669
# == Schema Information # # Table name: fundraiser_tracker_relations # # id :integer not null, primary key # fundraiser_id :integer not null # tracker_id :integer not null # created_at :datetime not null # updated_at :datetime not null # # Indexes # # index_fundraiser_tracker_relations_on_ids (fundraiser_id,tracker_id) UNIQUE # class FundraiserTrackerRelation < ActiveRecord::Base attr_accessible :tracker, :fundraiser belongs_to :fundraiser belongs_to :tracker validates :fundraiser_id, presence: true validates :tracker_id, presence: true, uniqueness: { scope: :fundraiser_id } end
26.76
79
0.701046
2685a29d3210c775cb1dff1324b1a2ea0064f57c
3,822
require_relative "../../test_helper" require_relative "flow_unit_test_helper" require "smart_answer_flows/marriage-abroad" module SmartAnswer class MarriageAbroadFlowTest < ActiveSupport::TestCase include FlowUnitTestHelper setup do @calculator = Calculators::MarriageAbroadCalculator.new @flow = MarriageAbroadFlow.build world_location = stub("WorldLocation", slug: "afghanistan", name: "Afghanistan", fco_organisation: nil) WorldLocation.stubs(:all).returns([world_location]) WorldLocation.stubs(:find).with("afghanistan").returns(world_location) end should "start with the country_of_ceremony? question" do assert_equal :country_of_ceremony?, @flow.start_state.current_node end context "when answering country_of_ceremony? question" do setup do Calculators::MarriageAbroadCalculator.stubs(:new).returns(@calculator) setup_states_for_question(:country_of_ceremony?, responding_with: "afghanistan") end should "instantiate and store calculator" do assert_same @calculator, @new_state.calculator end should "store parsed response on calculator as ceremony_country" do assert_equal "afghanistan", @calculator.ceremony_country end context "responding with an invalid ceremony country" do setup do @calculator.stubs(:valid_ceremony_country?).returns(false) end should "raise an exception" do assert_raise(SmartAnswer::InvalidResponse) do setup_states_for_question(:country_of_ceremony?, responding_with: "unknown-country", initial_state: { calculator: @calculator }) end end end end context "when answering legal_residency? question" do setup do setup_states_for_question(:legal_residency?, responding_with: "uk", initial_state: { calculator: @calculator, }) end should "store parsed response on calculator as resident_of" do assert_equal "uk", @calculator.instance_variable_get("@resident_of") end end context "when answering what_is_your_partners_nationality? question" do setup do setup_states_for_question(:what_is_your_partners_nationality?, responding_with: "partner_british", initial_state: { calculator: @calculator, }) end should "store parsed response on calculator as partner_nationality" do assert_equal "partner_british", @calculator.instance_variable_get("@partner_nationality") end end context "when answering partner_opposite_or_same_sex? question" do setup do @calculator.ceremony_country = "france" setup_states_for_question(:partner_opposite_or_same_sex?, responding_with: "same_sex", initial_state: { calculator: @calculator, }) end should "store parsed response on calculator as sex_of_your_partner" do assert_equal "same_sex", @calculator.instance_variable_get("@sex_of_your_partner") end end context "when answering marriage_or_pacs? question" do setup do setup_states_for_question(:marriage_or_pacs?, responding_with: "marriage", initial_state: { calculator: @calculator, }) end should "store parsed response on calculator as marriage_or_pacs" do assert_equal "marriage", @calculator.instance_variable_get("@marriage_or_pacs") end end end end
35.388889
97
0.644165
f7b02f117a8a5686e01185801fd50c6ed78e0d30
583
class CommentsController < ApplicationController http_basic_authenticate_with name: "dhh", password: "secret", only: :destroy def create @article = Article.find(params[:article_id]) @comment = @article.comments.create(comment_params) redirect_to article_path(@article) end def destroy @article = Article.find(params[:article_id]) @comment = @article.comments.find(params[:id]) @comment.destroy redirect_to article_path(@article) end private def comment_params params.require(:comment).permit(:commenter, :body) end end
25.347826
78
0.713551
ff4b7a820b1223a4dfcb87b7de0ee5f922200356
138
Sequel.migration do change do alter_table :geographical_area_memberships_oplog do add_column :hjid, Integer end end end
17.25
55
0.746377
1a08dd7fd9c376d3b25abd599c9bda2b39652968
496
cask "panoply" do version "4.12.5" sha256 "4f882d7bde7ba39abadba79470d906f941605b2ccfacdd6915376f981d56ea57" url "https://www.giss.nasa.gov/tools/panoply/download/PanoplyMacOS-#{version}.dmg" appcast "https://www.giss.nasa.gov/tools/panoply/download/" name "Panoply netCDF, HDF and GRIB Data Viewer" desc "Plot geo-referenced data from netCDF, HDF, and GRIB" homepage "https://www.giss.nasa.gov/tools/panoply/" app "Panoply.app" caveats do depends_on_java "8+" end end
29.176471
84
0.743952
ab75c55bf8207f39ef228602445eef09b7ce377e
1,613
# frozen_string_literal: true Rails.application.configure do # Settings specified here will take precedence over those in config/application.rb. # The test environment is used exclusively to run your application's # test suite. You never need to work with it otherwise. Remember that # your test database is "scratch space" for the test suite and is wiped # and recreated between test runs. Don't rely on the data there! config.cache_classes = true # Do not eager load code on boot. This avoids loading your whole application # just for the purpose of running a single test. If you are using a tool that # preloads Rails for running tests, you may have to set it to true. config.eager_load = false # Show full error reports and disable caching. config.consider_all_requests_local = true config.action_controller.perform_caching = false # Raise exceptions instead of rendering exception templates. config.action_dispatch.show_exceptions = false # Disable request forgery protection in test environment. config.action_controller.allow_forgery_protection = false # Tell Action Mailer not to deliver emails to the real world. # The :test delivery method accumulates sent emails in the # ActionMailer::Base.deliveries array. config.action_mailer.delivery_method = :test # Randomize the order test cases are executed. config.active_support.test_order = :random # Print deprecation notices to the stderr. config.active_support.deprecation = :stderr # Raises error for missing translations # config.action_view.raise_on_missing_translations = true end
40.325
85
0.776813
f7e15b131a5d9c731f12d39a2dad7d0e8dbd31fe
2,240
module Stacks::Services::CanBeNatted NatConfig = Struct.new(:dnat_enabled, :snat_enabled, :public_network, :private_network, :tcp, :udp) do def create_rule(environment, type, hostname, site, port) public_uri = uri(hostname, environment.domain(site, public_network), port) private_uri = uri(hostname, environment.domain(site, private_network), port) case type when :dnat Stacks::Services::NatRule.new(public_uri, private_uri, tcp, udp) when :snat Stacks::Services::NatRule.new(private_uri, public_uri, tcp, udp) end end def networks [public_network, private_network] end private def uri(hostname, domain, port) URI.parse("http://#{hostname}.#{domain}:#{port}") end end def self.extended(object) object.configure end attr_accessor :nat_config def configure @nat_config = NatConfig.new(false, false, :front, :prod, true, false) end def configure_nat(dnat_enabled, snat_enabled, public_network, private_network, tcp, udp) @nat_config = NatConfig.new(dnat_enabled, snat_enabled, public_network, private_network, tcp, udp) end def configure_dnat(public_network, private_network, tcp, udp) configure_nat(true, nat_config.snat_enabled, public_network, private_network, tcp, udp) end def configure_snat(public_network, private_network, tcp, udp) configure_nat(nat_config.dnat_enabled, true, public_network, private_network, tcp, udp) end def calculate_nat_rules(type, site, requirements) hostnames = requirements.map do |requirement| case requirement when :nat_to_host children.map(&:hostname) when :nat_to_vip ["#{environment.name}-#{name}-vip"] end end.flatten case type when :dnat nat_config.dnat_enabled ? create_rules_for_hosts(hostnames, site, :dnat) : [] when :snat nat_config.snat_enabled ? create_rules_for_hosts(hostnames, site, :snat) : [] end end private def create_rules_for_hosts(hostnames, site, type) ports.keys.map do |port_name| hostnames.map do |hostname| nat_config.create_rule(environment, type, hostname, site, ports[port_name]['port']) end end.flatten end end
29.473684
104
0.701339
388922ba9f7cdc3098d3ea4d1d3e0877c2a3d7c9
284
list_item :bit_field, :type, :reserved do register_map do reserved end rtl do generate_code_from_template :bit_field def default_value hex(0, width) end end ral do access :ro hdl_path { "g_#{bit_field.name}.u_bit_field.i_value" } end end
14.947368
58
0.665493
6abfe3ba452911c535c9e6f1367aaeec90c274bd
1,084
$LOAD_PATH.push File.expand_path("../lib", __FILE__) require 'r2d2/version' Gem::Specification.new do |s| s.name = "r2d2" s.version = R2D2::VERSION s.platform = Gem::Platform::RUBY s.authors = ["ActBlue Technical Services"] s.email = ["[email protected]"] s.homepage = "https://github.com/actblue/r2d2" s.summary = "Google Pay payment token decryption library" s.description = "Given an (encrypted) Google Pay token, verify and decrypt it" s.files = `git ls-files`.split("\n") s.test_files = `git ls-files -- {test,spec,features}/*`.split("\n") s.executables = `git ls-files -- bin/*`.split("\n").map{ |f| File.basename(f) } s.require_paths = ["lib"] s.required_ruby_version = ">= 2.2" s.add_runtime_dependency 'hkdf' s.add_development_dependency "bundler", "~> 2" s.add_development_dependency "rake", "~> 12.0" s.add_development_dependency "minitest", "~> 5.0" s.add_development_dependency "timecop" s.add_development_dependency "pry-byebug" end
37.37931
86
0.632841
2879ec3a5a0fdf95b93a18ff06a0be7ca80826a7
1,058
# This file is auto-generated from the current state of the database. Instead # of editing this file, please use the migrations feature of Active Record to # incrementally modify your database, and then regenerate this schema definition. # # Note that this schema.rb definition is the authoritative source for your # database schema. If you need to create the application database on another # system, you should be using db:schema:load, not running all the migrations # from scratch. The latter is a flawed and unsustainable approach (the more migrations # you'll amass, the slower it'll run and the greater likelihood for issues). # # It's strongly recommended that you check this file into your version control system. ActiveRecord::Schema.define(version: 20190809150628) do create_table "users", force: :cascade do |t| t.string "name" t.string "email" t.datetime "created_at", null: false t.datetime "updated_at", null: false t.string "password_digest" t.index ["email"], name: "index_users_on_email", unique: true end end
42.32
86
0.759924
1d5d683e3f7cb321d7f5301263ae4fab1cb20ec4
1,038
module Fog module Compute class HP class Real # List all floating IP addresses # # ==== Returns # * response<~Excon::Response>: # * body<~Hash>: # * 'floating_ips'<~Array> - # * 'id'<~Integer> - Id of the address # * 'ip'<~String> - Floating IP of the address # * 'instance_id'<~String> - Id of the associated server instance # * 'fixed_ip'<~String> - Fixed IP of the address def list_addresses request( :expects => 200, :method => 'GET', :path => "os-floating-ips.json" ) end end class Mock def list_addresses response = Excon::Response.new addresses = [] addresses = self.data[:addresses].values unless self.data[:addresses].nil? response.status = 200 response.body = { 'floating_ips' => addresses } response end end end end end
24.714286
84
0.492293
6a701c9ee1c41f77ebc3ec1cfa99157edd90f6a9
5,931
require 'sinatra' require "sinatra/reloader" if development? require 'byebug' if development? require 'dotenv' require 'haml' require 'sequel' require 'time' require "httparty" require 'json' require 'sinatra/flash' require 'sinatra/json' require 'sinatra/cookies' require_relative "lib/login_helper" require_relative 'lib/view_helper' require_relative './services/claim_service' require_relative './services/geo_service' require_relative './services/AwsLambda/connection' require_relative "models/user" Dotenv.load enable :sessions set :session_secret, ENV["SECRET_KEY_BASE"] def test_db_connection Sequel.connect(ENV['SNAP_DB_PG_URL'] || "postgres://localhost/neighbourly_test") end configure do db = Sequel.connect('postgres://localhost/neighbourly') set :db, db end configure :production do db = Sequel.connect(ENV['DATABASE_URL']) set :db, db end configure :test do set :db, test_db_connection end Sequel.datetime_class = DateTime get '/' do if authorised? redirect '/map' else haml :main, locals: {page: 'main', body: 'main'} end end def login_attempt #Primary login method is e-mail #If a param is passed (form or URL) - use that #If a cookie is set - use that secondarily #If no e-mail is present, send to the frontpage if params.has_key?("email") authorise(params[:email].strip) elsif cookies.has_key?("email") authorise(cookies[:email]) else redirect '/' end user_params = Hash.new fields = ["email", "first_name", "last_name", "phone", "postcode"] #Check that user exists for current e-mail if authorised? redirect "/map" #If e-mail is set in param - go straight to the user_details page elsif params.has_key?("email") redirect "/user_details?email=#{CGI.escape(user_email)}" #If user does not exist and all fields exist in cookie - create_user elsif fields.all? {|s| cookies.key? s} fields.each do |key_get| user_params[key_get] = cookies[key_get] end create_user(user_params) redirect "/map" end end get '/login' do login_attempt end post '/login' do login_attempt end get "/user_details" do haml :user_details, locals: {page: "user_details", email: params[:email] } end def create_user(user_params) puts "Creating user: #{user_params}" user = User.new(settings.db) #Submit user details to database #And, Catch double-submission errors and send details to Zapier begin if user.create!(user_params) #Send user details to the Zapier endpoint if ENV["ZAP_API_ON"] == "True" HTTParty.post(ENV["ZAP_API"],:body => user_params, timeout: 2) end #Once the user is created - authorise them authorise(user_params['email']) redirect "/map" else #TODO - needs validation flash[:error] = "Please enter correct details." haml :user_details end #Skip all errors and retry auth without ZAP_API call #REDUNDANT - Skip details re-entry if e-mail already exists in database #REDUNDANT - Skip if HTTParty fails to make the API call rescue StandardError, Sequel::UniqueConstraintViolation, HTTParty::Error => e puts "Error in User Details Submission: #{e.message}" authorise(user_params['email']) redirect "/map" end end post "/user_details" do create_user(params[:user_details]) end get '/map' do authorised do haml :map, locals: {page: 'map'} end end get '/logout' do session.clear flash[:notice] = 'You have been logged out.' redirect '/' end def get_meshblocks_with_status(json) slugs = Array.new json["features"].each do |slug| slugs << slug["properties"]["slug"] end claim_service = ClaimService.new(settings.db) claimed = Array.new centrally_claimed = Array.new claimed_by_you = Array.new claim_service.get_mesh_blocks(slugs).each do |claim| if is_admin?(claim[:mesh_block_claimer]) centrally_claimed << claim[:mesh_block_slug] elsif claim[:mesh_block_claimer] == session[:user_email] claimed_by_you << claim[:mesh_block_slug] else claimed << claim[:mesh_block_slug] end end json["features"].each_with_index { |slug, index| if centrally_claimed.include? slug["properties"]["slug"] json["features"][index]["properties"]["claim_status"] = "quarantine" elsif claimed.include? slug["properties"]["slug"] json["features"][index]["properties"]["claim_status"] = "claimed" elsif claimed_by_you.include? slug["properties"]["slug"] json["features"][index]["properties"]["claim_status"] = "claimed_by_you" else json["features"][index]["properties"]["claim_status"] = "unclaimed" end } json end #For loading new SA1s when scrolling on the map get '/meshblocks_bounds' do authorised do query = {'swlat' => params[:swlat], 'swlng' => params[:swlng], 'nelat' => params[:nelat], 'nelng' => params[:nelng]} #interface with darren's tool goes here lambda_connection = AwsLambda::Connection.new #interface with local claims table goes here data = lambda_connection.execute(query) if data['features'] == nil puts "404 due to map location returning no meshblocks" status 404 else json get_meshblocks_with_status(data) end end end #For finding out the bounds of a postcode get '/pcode_get_bounds' do authorised do geo_service = GeoService.new(settings.db) bounds = geo_service.pcode_bounds(params[:pcode]) json bounds[0] end end post '/claim_meshblock/:id' do authorised do claim_service = ClaimService.new(settings.db) claim_service.claim(params['id'], user_email) status 200 end end post '/unclaim_meshblock/:id' do authorised do claim_service = ClaimService.new(settings.db) #TODO - return error on fail if is_admin?(user_email) claim_service.admin_unclaim(params['id']) else claim_service.unclaim(params['id'], user_email) end status 200 end end
25.675325
82
0.701568
1a0e6bd0d6dad1eae05b39c7e65212f90f69a6f5
1,359
require File.expand_path(File.dirname(__FILE__) + '/spec_helper') describe GitIssue do describe '#main' do context 'config issue.type does not configured' do it{ GitIssue::Helper.should_receive(:configured_value).with("type").and_return("") GitIssue::Helper.should_receive(:configured_value).with("apikey").and_return("some value") GitIssue::Helper.should_receive(:configure_error).with( "type (redmine | github)", "git config issue.type redmine") lambda { GitIssue.main([]) }.should raise_error(SystemExit) } end context 'invalid issue.type' do it{ GitIssue::Helper.should_receive(:configured_value).with("type").and_return("unknown-type") GitIssue::Helper.should_receive(:configured_value).with("apikey").and_return("some value") lambda { GitIssue.main([]) }.should raise_error(SystemExit) } end end describe '#its_klass_of' do context 'unknown type' do specify { lambda { GitIssue::Helper.its_klass_of("unknown_type") }.should raise_error } end context 'type is redmine' do subject { GitIssue::Helper.its_klass_of("redmine") } it { should == GitIssue::Redmine } end context 'type is github' do subject { GitIssue::Helper.its_klass_of("github") } it { should == GitIssue::Github} end end end
33.975
124
0.673289
7acdfff03c98b99ee654cff16f88dbecfd1fe734
978
cask 'bettertouchtool' do if MacOS.version <= :mavericks version '2.05' sha256 '41013cfeffee286a038363651db3dd315ff3a1e0cf07774d9ce852111be50a5a' # bettertouchtool.net/releases was verified as official when first introduced to the cask url "https://bettertouchtool.net/releases/btt#{version}_final_10_9.zip" else version '2.846' sha256 'd00e1e003dacecb5d17bfd10993191cb2c68ed738d84df162fece263893336b6' # bettertouchtool.net/releases was verified as official when first introduced to the cask url "https://bettertouchtool.net/releases/btt#{version}.zip" appcast 'https://bettertouchtool.net/releases/' end name 'BetterTouchTool' homepage 'https://folivora.ai/' auto_updates true app 'BetterTouchTool.app' uninstall login_item: 'BetterTouchTool' zap trash: [ '~/Library/Preferences/com.hegenberg.BetterTouchTool.plist', '~/Library/Application Support/BetterTouchTool', ] end
31.548387
93
0.735174
b95b0fbd8b3815e68383a68d4abffd62115a8cce
239
# frozen_string_literal: true require 'botan/digest' md5 = Botan::Digest::MD5.new md5 << 'some ' md5 << 'data' puts md5.hexdigest hash = Botan::Digest.new('Comb4P(SHA-160,RIPEMD-160)') hash << 'test' puts Botan.hex_encode(hash.digest)
17.071429
54
0.707113
115d3b8318285e9fb52c04ab4767000a45282f2b
72
module Fastlane module AwsDeviceFarm VERSION = "0.3.21" end end
12
22
0.694444
f7e7504b0f183e5274360ba9c01a3ff98e3ee6eb
2,283
# frozen_string_literal: true require 'graphql_devise/mutations/base' require 'graphql_devise/mutations/login' require 'graphql_devise/mutations/logout' require 'graphql_devise/mutations/resend_confirmation' require 'graphql_devise/mutations/resend_confirmation_with_token' require 'graphql_devise/mutations/send_password_reset' require 'graphql_devise/mutations/send_password_reset_with_token' require 'graphql_devise/mutations/sign_up' require 'graphql_devise/mutations/register' require 'graphql_devise/mutations/update_password' require 'graphql_devise/mutations/update_password_with_token' require 'graphql_devise/mutations/confirm_registration_with_token' module GraphqlDevise module DefaultOperations MUTATIONS = { login: { klass: GraphqlDevise::Mutations::Login, authenticatable: true }, logout: { klass: GraphqlDevise::Mutations::Logout, authenticatable: true }, sign_up: { klass: GraphqlDevise::Mutations::SignUp, authenticatable: true, deprecation_reason: 'use register instead' }, register: { klass: GraphqlDevise::Mutations::Register, authenticatable: true }, update_password: { klass: GraphqlDevise::Mutations::UpdatePassword, authenticatable: true, deprecation_reason: 'use update_password_with_token instead' }, update_password_with_token: { klass: GraphqlDevise::Mutations::UpdatePasswordWithToken, authenticatable: true }, send_password_reset: { klass: GraphqlDevise::Mutations::SendPasswordReset, authenticatable: false, deprecation_reason: 'use send_password_reset_with_token instead' }, send_password_reset_with_token: { klass: GraphqlDevise::Mutations::SendPasswordResetWithToken, authenticatable: false }, resend_confirmation: { klass: GraphqlDevise::Mutations::ResendConfirmation, authenticatable: false, deprecation_reason: 'use resend_confirmation_with_token instead' }, resend_confirmation_with_token: { klass: GraphqlDevise::Mutations::ResendConfirmationWithToken, authenticatable: false }, confirm_registration_with_token: { klass: GraphqlDevise::Mutations::ConfirmRegistrationWithToken, authenticatable: true } }.freeze end end
69.181818
185
0.761279
1dddfa7ca8d11ee557d7a6df6185c665e2fc0600
1,031
begin require_relative "lib/net/smtp/version" rescue LoadError # Fallback to load version file in ruby core repository require_relative "version" end Gem::Specification.new do |spec| spec.name = "net-smtp" spec.version = Net::SMTP::VERSION spec.authors = ["Yukihiro Matsumoto"] spec.email = ["[email protected]"] spec.summary = %q{Simple Mail Transfer Protocol client library for Ruby.} spec.description = %q{Simple Mail Transfer Protocol client library for Ruby.} spec.homepage = "https://github.com/ruby/net-smtp" spec.license = "BSD-2-Clause" spec.metadata["homepage_uri"] = spec.homepage spec.metadata["source_code_uri"] = spec.homepage spec.files = Dir.chdir(File.expand_path('..', __FILE__)) do `git ls-files -z 2>/dev/null`.split("\x0").reject { |f| f.match(%r{^(test|spec|features)/}) } end spec.bindir = "exe" spec.executables = spec.files.grep(%r{^exe/}) { |f| File.basename(f) } spec.require_paths = ["lib"] end
36.821429
97
0.656644
33f6dfe974efa82cd5bbe27873857753ab3fb7ac
1,061
require "rails_helper" RSpec.feature "Searching Friends" do before do user1 = User.create!(email: "[email protected]", username: "example_user1", password: "password1") @user2 = User.create!(email: "[email protected]", username: "example_user2", password: "password2") login_as(user1) visit '/' click_link "Friends" end scenario "a user follows other user searched by user name" do fill_in "Search Friends", with: "example_user2" find(:css, '.search-friends-button').click expect(page).to have_content(@user2.username) click_link "Follow" expect(page).to have_content(@user2.username) expect(page).to have_content("Unfollow") end scenario "a user folloss other user searched by user email" do fill_in "Search Friends", with: "[email protected]" find(:css, '.search-friends-button').click expect(page).to have_content(@user2.username) click_link "Follow" expect(page).to have_content(@user2.username) expect(page).to have_content("Unfollow") end end
30.314286
111
0.711593
f85f9972a2bd7b2908a052d353ef46856a55ab34
9,409
# frozen_string_literal: true require 'spec_helper' RSpec.describe ElasticAPM do describe 'life cycle' do it 'starts and stops the agent', :mock_intake do MockIntake.instance.stub! ElasticAPM.start expect(ElasticAPM::Agent).to be_running ElasticAPM.stop expect(ElasticAPM::Agent).to_not be_running end end describe '.restart', :mock_intake do before { MockIntake.instance.stub! } after { ElasticAPM.stop } context 'when the agent is not running' do it 'starts the agent' do ElasticAPM.restart expect(ElasticAPM::Agent).to be_running end end context 'when the agent is already running' do before { ElasticAPM.start } it 'restarts the agent' do expect(ElasticAPM::Agent).to receive(:stop) .at_least(:once).and_call_original expect(ElasticAPM::Agent).to receive(:start) .once.and_call_original ElasticAPM.restart expect(ElasticAPM::Agent).to be_running end end context 'when a new config is passed' do it 'restarts the agent with the new config' do ElasticAPM.restart(api_buffer_size: 10) expect(ElasticAPM::Agent).to be_running expect(ElasticAPM.agent.config.api_buffer_size).to be(10) end end end context 'when running', :mock_intake do before do MockIntake.instance.stub! ElasticAPM.start end let(:agent) { ElasticAPM.agent } describe '.log_ids' do context 'with no current_transaction' do it 'returns empty string' do expect(ElasticAPM.log_ids).to eq('') end end context 'with a current transaction' do it 'includes transaction and trace ids' do transaction = ElasticAPM.start_transaction 'Test' expect(ElasticAPM.log_ids).to eq( "transaction.id=#{transaction.id} trace.id=#{transaction.trace_id}" ) end end context 'with a current_span' do it 'includes transaction, span and trace ids' do trans = ElasticAPM.start_transaction span = ElasticAPM.start_span 'Test' expect(ElasticAPM.log_ids).to eq( "transaction.id=#{trans.id} span.id=#{span.id} " \ "trace.id=#{trans.trace_id}" ) end end context 'when passed a block' do it 'yields each id' do transaction = ElasticAPM.start_transaction span = ElasticAPM.start_span 'Test' ElasticAPM.log_ids do |transaction_id, span_id, trace_id| expect(transaction_id).to eq(transaction.id) expect(span_id).to eq(span.id) expect(trace_id).to eq(transaction.trace_id) end end end end describe '.start_transaction' do it 'delegates to agent' do expect(ElasticAPM.agent).to receive(:start_transaction) ElasticAPM.start_transaction end end describe '.end_transaction' do it 'delegates to agent' do expect(ElasticAPM.agent).to receive(:end_transaction) ElasticAPM.end_transaction end end describe '.with_transaction' do subject do ElasticAPM.with_transaction do 'original result' end end it 'delegates to agent' do expect(ElasticAPM.agent).to receive(:start_transaction) expect(ElasticAPM.agent).to receive(:end_transaction) subject end it { should eq 'original result' } end describe '.start_span' do it 'starts a span' do expect(ElasticAPM.agent).to receive(:start_span) ElasticAPM.start_span 'Test' end end describe '.end_span' do it 'ends current span' do expect(ElasticAPM.agent).to receive(:end_span) ElasticAPM.end_span end end describe '.with_span' do subject do ElasticAPM.with_span('Block test') do 'original result' end end it 'wraps block in span' do expect(ElasticAPM.agent).to receive(:start_span) expect(ElasticAPM.agent).to receive(:end_span) subject end it { should eq 'original result' } end it { should delegate :current_transaction, to: agent } it do should delegate :report, to: agent, args: ['E', { context: nil, handled: nil }] end it do should delegate :report_message, to: agent, args: ['NOT OK', { backtrace: Array, context: nil }] end it { should delegate :set_label, to: agent, args: [nil, nil] } it { should delegate :set_custom_context, to: agent, args: [nil] } it { should delegate :set_user, to: agent, args: [nil] } describe '#add_filter' do it { should delegate :add_filter, to: agent, args: [nil, -> {}] } it 'needs either callback or block' do expect { subject.add_filter(:key) }.to raise_error(ArgumentError) expect do subject.add_filter(:key) { 'ok' } end.to_not raise_error end end after { ElasticAPM.stop } end context 'async spans', :intercept do context 'transaction parent' do it 'allows async spans' do with_agent do transaction = ElasticAPM.start_transaction span1 = Thread.new do ElasticAPM.with_span( 'job 1', parent: transaction, sync: false ) { |span| span } end.value span2 = Thread.new do ElasticAPM.with_span( 'job 2', parent: transaction, sync: false ) { |span| span } end.value transaction.done expect(transaction.started_spans).to eq(2) expect(span1.parent_id).to eq(span2.parent_id) expect(span1.parent_id).to eq( transaction.trace_context.child.parent_id ) expect(span1.context.sync).to be(false) expect(span2.parent_id).to eq( transaction.trace_context.child.parent_id ) expect(span2.context.sync).to be(false) end end context 'span created after transaction is ended' do it 'allows async spans' do with_agent do transaction = ElasticAPM.start_transaction transaction.done span1 = Thread.new do ElasticAPM.with_span( 'job 1', parent: transaction, sync: false ) { |span| span } end.value span2 = Thread.new do ElasticAPM.with_span( 'job 2', parent: transaction, sync: false ) { |span| span } end.value transaction.done expect(transaction.started_spans).to eq(2) expect(span1.parent_id).to eq(span2.parent_id) expect(span1.context.sync).to be(false) expect(span1.parent_id).to eq( transaction.trace_context.child.parent_id ) expect(span2.context.sync).to be(false) expect(span2.parent_id).to eq( transaction.trace_context.child.parent_id ) end end end context '#with_span' do it 'allows async spans' do with_agent do transaction = ElasticAPM.start_transaction span1 = Thread.new do ElasticAPM.with_span( 'job 1', parent: transaction, sync: false ) { |span| span } end.value span2 = Thread.new do ElasticAPM.with_span('job 2', parent: transaction) { |span| span } end.value transaction.done expect(transaction.started_spans).to eq(2) expect(span1.parent_id).to eq(span2.parent_id) expect(span1.parent_id).to eq( transaction.trace_context.child.parent_id ) expect(span2.parent_id).to eq( transaction.trace_context.child.parent_id ) end end end end context 'span parent' do it 'allows async spans' do with_agent do transaction = ElasticAPM.start_transaction span1 = ElasticAPM.with_span 'run all the jobs' do |span| span2 = Thread.new do ElasticAPM.with_span('job 1', parent: span) { |s| s } end.value expect(span2.parent_id).to eq(span.trace_context.child.parent_id) expect(span2.context.sync).to be nil span3 = Thread.new do ElasticAPM.with_span('job 2', parent: span) { |s| s } end.value expect(span3.parent_id).to eq(span.trace_context.child.parent_id) expect(span3.context.sync).to be nil span end transaction.done expect(transaction.started_spans).to eq(3) expect(span1.parent_id).to eq( transaction.trace_context.child.parent_id ) expect(span1.context.sync).to be nil end end end end context 'when not running' do it 'still yields block' do ran = false ElasticAPM.with_transaction { ran = true } expect(ran).to be true end end end
28.950769
80
0.580721
ac037fe3e62567906f1bc3a0f829e75095ded2ee
1,003
require 'spec_helper' describe 'Imdb::Season' do subject { Imdb::Serie.new('1520211') } let(:season) { subject.seasons.first } it 'has 6 episodes' do expect(season.episodes.size).to eq(6) end it 'can fetch a specific episode' do episode = season.episode(1) expect(episode.title).to match(/Days Gone By/i) expect(episode.episode).to eq(1) expect(episode.season).to eq(1) end end describe 'Imdb::Season starting with episode 0' do subject { Imdb::Serie.new('0898266') } let(:season) { subject.season(1) } let(:episodes) { season.episodes } it 'indexes episode correctly' do expect(episodes[0].episode).to eq(0) expect(episodes[1].episode).to eq(1) end it 'returns the correct title' do expect(episodes[0].title).to eq('Unaired Pilot') expect(episodes[1].title).to eq('Pilot') end it 'fetches the correct episode' do expect(season.episode(0).episode).to eq(0) expect(season.episode(1).episode).to eq(1) end end
25.717949
52
0.667996
bf7a8e4e7ac423cb0a2eb6ebed5c60d8d16c2a28
1,078
class Dtc < Formula desc "Device tree compiler" homepage "https://www.devicetree.org/" url "https://www.kernel.org/pub/software/utils/dtc/dtc-1.4.7.tar.xz" sha256 "6643e8f00ff86350f465bb54b2185058b5b1b7bac01a0842c81a52b86589cde7" bottle do cellar :any sha256 "c230e06edc7710720e75e77319d3982f2d8e1e873018df6fdfa25268ae3ea2e1" => :high_sierra sha256 "ea5ae5a503636ed53d3dbc87e835c9fa2f3e004b06d871003a9a4617afc87163" => :sierra sha256 "6755bc2af0a42c3bb4834c468f36db8248b983d20cb47783afded95ac1267aac" => :el_capitan sha256 "13c2abd98fdb5e9e60807818f736051a6ae168ff591af9b208e80a12493f8ed8" => :x86_64_linux end unless OS.mac? depends_on "bison" depends_on "flex" end def install system "make", "NO_PYTHON=1" system "make", "NO_PYTHON=1", "DESTDIR=#{prefix}", "PREFIX=", "install" suffix = OS.mac? ? "dylib" : "so" mv lib/"libfdt.#{suffix}.1", lib/"libfdt.1.#{suffix}" end test do (testpath/"test.dts").write <<~EOS /dts-v1/; / { }; EOS system "#{bin}/dtc", "test.dts" end end
29.944444
94
0.705937
1a4848ba81ee37515b479a23f6af1ff478a28be1
409
class TheaterPolicy < ApplicationPolicy class Scope < Scope def resolve scope.all end end def new? communicator_or_admin? end def edit? communicator_or_admin? end def index? @user.present? && @user.registered? end def show? [email protected]? end def update? edit? end def create? communicator_or_admin? end def destroy? false end end
11.361111
39
0.630807
795817488a8c7b0243fadac5950fde4d59fcdfbe
836
{ matrix_id: '563', name: 'g7jac160sc', group: 'Hollinger', description: 'Jacobian from CEPII\'s \'G7marmotte\' OLG model, oldstack 160 (scaled)', author: 'P. Hollinger', editor: 'T. Davis', date: '2001', kind: 'economic problem', problem_2D_or_3D: '0', num_rows: '47430', num_cols: '47430', nonzeros: '564952', num_explicit_zeros: '91664', num_strongly_connected_components: '1157', num_dmperm_blocks: '2277', structural_full_rank: 'true', structural_rank: '47430', pattern_symmetry: '0.033', numeric_symmetry: '0.000', rb_type: 'real', structure: 'unsymmetric', cholesky_candidate: 'no', positive_definite: 'no', image_files: 'g7jac160sc.png,g7jac160sc_dmperm.png,g7jac160sc_scc.png,g7jac160sc_APlusAT_graph.gif,g7jac160sc_graph.gif,', }
30.962963
126
0.667464
62ee39615f96f4c24f022fcba33479ebcb6ed4d3
52
class UsersController < ApplicationController end
13
45
0.846154
91a11710418992c2c38d605e7f33087880ed49c0
150
#Kata: Sort by Last Char #URL: https://www.codewars.com/kata/57eba158e8ca2c8aba0002a0 def last(x) return x.split(/\s+/).sort_by{|i| i[-1]} end
25
61
0.68
1da4a79cbc7b1c835f3617c63e5c2b5d0eb310be
7,758
require 'pathname' Puppet::Type.newtype(:dsc_xwindowsupdateagent) do require Pathname.new(__FILE__).dirname + '../../' + 'puppet/type/base_dsc' require Pathname.new(__FILE__).dirname + '../../puppet_x/puppetlabs/dsc_type_helpers' @doc = %q{ The DSC xWindowsUpdateAgent resource type. Automatically generated from 'xWindowsUpdate/DscResources/MSFT_xWindowsUpdateAgent/MSFT_xWindowsUpdateAgent.schema.mof' To learn more about PowerShell Desired State Configuration, please visit https://technet.microsoft.com/en-us/library/dn249912.aspx. For more information about built-in DSC Resources, please visit https://technet.microsoft.com/en-us/library/dn249921.aspx. For more information about xDsc Resources, please visit https://github.com/PowerShell/DscResources. } validate do fail('dsc_issingleinstance is a required attribute') if self[:dsc_issingleinstance].nil? end def dscmeta_resource_friendly_name; 'xWindowsUpdateAgent' end def dscmeta_resource_name; 'MSFT_xWindowsUpdateAgent' end def dscmeta_module_name; 'xWindowsUpdate' end def dscmeta_module_version; '2.7.0.0' end newparam(:name, :namevar => true ) do end ensurable do newvalue(:exists?) { provider.exists? } newvalue(:present) { provider.create } defaultto { :present } end # Name: PsDscRunAsCredential # Type: MSFT_Credential # IsMandatory: False # Values: None newparam(:dsc_psdscrunascredential) do def mof_type; 'MSFT_Credential' end def mof_is_embedded?; true end desc "PsDscRunAsCredential" validate do |value| unless value.kind_of?(Hash) fail("Invalid value '#{value}'. Should be a hash") end PuppetX::Dsc::TypeHelpers.validate_MSFT_Credential("Credential", value) end end # Name: IsSingleInstance # Type: string # IsMandatory: True # Values: ["Yes"] newparam(:dsc_issingleinstance) do def mof_type; 'string' end def mof_is_embedded?; false end desc "IsSingleInstance - Specifies the resource is a single instance, the value must be 'Yes' Valid values are Yes." isrequired validate do |value| unless value.kind_of?(String) fail("Invalid value '#{value}'. Should be a string") end unless ['Yes', 'yes'].include?(value) fail("Invalid value '#{value}'. Valid values are Yes") end end end # Name: Category # Type: string[] # IsMandatory: False # Values: ["Security", "Important", "Optional"] newparam(:dsc_category, :array_matching => :all) do def mof_type; 'string[]' end def mof_is_embedded?; false end desc "Category - Indicates if the resource should ensure all Windows Updates are installed or if Automatic updates should be disabled. Valid values are Security, Important, Optional." validate do |value| unless value.kind_of?(Array) || value.kind_of?(String) fail("Invalid value '#{value}'. Should be a string or an array of strings") end if value.kind_of?(Array) unless (['Security', 'security', 'Important', 'important', 'Optional', 'optional'] & value).count == value.count fail("Invalid value #{value}. Valid values are Security, Important, Optional") end end if value.kind_of?(String) unless ['Security', 'security', 'Important', 'important', 'Optional', 'optional'].include?(value) fail("Invalid value #{value}. Valid values are Security, Important, Optional") end end end munge do |value| Array(value) end end # Name: Notifications # Type: string # IsMandatory: False # Values: ["Disabled", "ScheduledInstallation"] newparam(:dsc_notifications) do def mof_type; 'string' end def mof_is_embedded?; false end desc "Notifications - Indicates if Windows update agent should notify about updates, see:https://msdn.microsoft.com/en-us/library/windows/desktop/aa385806%28v=vs.85%29.aspx?f=255&MSPPError=-2147217396. Valid values are Disabled, ScheduledInstallation." validate do |value| unless value.kind_of?(String) fail("Invalid value '#{value}'. Should be a string") end unless ['Disabled', 'disabled', 'ScheduledInstallation', 'scheduledinstallation'].include?(value) fail("Invalid value '#{value}'. Valid values are Disabled, ScheduledInstallation") end end end # Name: Source # Type: string # IsMandatory: False # Values: ["WindowsUpdate", "MicrosoftUpdate", "WSUS"] newparam(:dsc_source) do def mof_type; 'string' end def mof_is_embedded?; false end desc "Source - Indicates which source service Windows update agent should use. Note: WSUS is not implemented in this resource. Valid values are WindowsUpdate, MicrosoftUpdate, WSUS." validate do |value| unless value.kind_of?(String) fail("Invalid value '#{value}'. Should be a string") end unless ['WindowsUpdate', 'windowsupdate', 'MicrosoftUpdate', 'microsoftupdate', 'WSUS', 'wsus'].include?(value) fail("Invalid value '#{value}'. Valid values are WindowsUpdate, MicrosoftUpdate, WSUS") end end end # Name: UpdateNow # Type: boolean # IsMandatory: False # Values: None newparam(:dsc_updatenow) do def mof_type; 'boolean' end def mof_is_embedded?; false end desc "UpdateNow - Indicates if the resource should trigger an update during consistency." validate do |value| end newvalues(true, false) munge do |value| PuppetX::Dsc::TypeHelpers.munge_boolean(value.to_s) end end # Name: AutomaticUpdatesNotificationSetting # Type: string # IsMandatory: False # Values: None newparam(:dsc_automaticupdatesnotificationsetting) do def mof_type; 'string' end def mof_is_embedded?; false end desc "AutomaticUpdatesNotificationSetting - Automatic Updates Notification Setting" validate do |value| unless value.kind_of?(String) fail("Invalid value '#{value}'. Should be a string") end end end # Name: TotalUpdatesNotInstalled # Type: uint32 # IsMandatory: False # Values: None newparam(:dsc_totalupdatesnotinstalled) do def mof_type; 'uint32' end def mof_is_embedded?; false end desc "TotalUpdatesNotInstalled - Count of updates not installed. Only returned if UpdateNow is specified." validate do |value| unless (value.kind_of?(Numeric) && value >= 0) || (value.to_i.to_s == value && value.to_i >= 0) fail("Invalid value #{value}. Should be a unsigned Integer") end end munge do |value| PuppetX::Dsc::TypeHelpers.munge_integer(value) end end # Name: RebootRequired # Type: boolean # IsMandatory: False # Values: None newparam(:dsc_rebootrequired) do def mof_type; 'boolean' end def mof_is_embedded?; false end desc "RebootRequired - Indicates if Wua Requires a reboot. Only returned if UpdateNow is specified." validate do |value| end newvalues(true, false) munge do |value| PuppetX::Dsc::TypeHelpers.munge_boolean(value.to_s) end end def builddepends pending_relations = super() PuppetX::Dsc::TypeHelpers.ensure_reboot_relationship(self, pending_relations) end end Puppet::Type.type(:dsc_xwindowsupdateagent).provide :powershell, :parent => Puppet::Type.type(:base_dsc).provider(:powershell) do confine :true => (Gem::Version.new(Facter.value(:powershell_version)) >= Gem::Version.new('5.0.10586.117')) defaultfor :operatingsystem => :windows mk_resource_methods end
35.587156
256
0.677236
39958992468381d7fbed2aeb420fc9ad48b88224
1,951
class TagNameValidator < ActiveModel::EachValidator def validate_each(record, attribute, value) normalized = Tag.normalize_name(value) case normalized when /\A_*\z/ record.errors[attribute] << "'#{value}' cannot be blank" when /\*/ record.errors[attribute] << "'#{value}' cannot contain asterisks ('*')" when /,/ record.errors[attribute] << "'#{value}' cannot contain commas (',')" when /#/ record.errors[attribute] << "'#{value}' cannot contain octothorpes ('#')" when /\$/ record.errors[attribute] << "'#{value}' cannot contain peso signs ('$')" when /%/ record.errors[attribute] << "'#{value}' cannot contain percent signs ('%')" when /\\/ record.errors[attribute] << "'#{value}' cannot contain back slashes ('\\')" when /\A~/ record.errors[attribute] << "'#{value}' cannot begin with a tilde ('~')" when /\A-/ record.errors[attribute] << "'#{value}' cannot begin with a dash ('-')" when /\A:/ record.errors[attribute] << "'#{value}' cannot begin with a colon (':')" when /\A_/ record.errors[attribute] << "'#{value}' cannot begin with an underscore ('_')" when /_\z/ record.errors[attribute] << "'#{value}' cannot end with an underscore ('_')" when /[_\-~]{2}/ record.errors[attribute] << "'#{value}' cannot contain consecutive underscores, hyphens or tildes" when /[^[:graph:]]/ record.errors[attribute] << "'#{value}' cannot contain non-printable characters" when /\A(#{Tag::METATAGS.join("|")}):(.+)\z/i record.errors[attribute] << "'#{value}' cannot begin with '#{$1}:'" when /\A(#{Tag.categories.regexp}):(.+)\z/i record.errors[attribute] << "'#{value}' cannot begin with '#{$1}:'" end if normalized =~ /[^[:ascii:]]/ && !options[:disable_ascii_check] == true record.errors[attribute] << "'#{value}' must consist of only ASCII characters" end end end
44.340909
104
0.596105
4a62e696cd9a3008d49f2c4bd0ba3acc70873b01
4,646
require 'spec_helper' describe TreeHelper do let(:project) { create(:project, :repository) } let(:repository) { project.repository } let(:sha) { 'c1c67abbaf91f624347bb3ae96eabe3a1b742478' } def create_file(filename) project.repository.create_file( project.creator, filename, 'test this', message: "Automatically created file #{filename}", branch_name: 'master' ) end describe '.render_tree' do before do @id = sha @path = "" @project = project @lfs_blob_ids = [] end it 'displays all entries without a warning' do tree = repository.tree(sha, 'files') html = render_tree(tree) expect(html).not_to have_selector('.tree-truncated-warning') end it 'truncates entries and adds a warning' do stub_const('TreeHelper::FILE_LIMIT', 1) tree = repository.tree(sha, 'files') html = render_tree(tree) expect(html).to have_selector('.tree-truncated-warning', count: 1) expect(html).to have_selector('.tree-item-file-name', count: 1) end end describe '.fast_project_blob_path' do it 'generates the same path as project_blob_path' do blob_path = repository.tree(sha, 'with space').entries.first.path fast_path = fast_project_blob_path(project, blob_path) std_path = project_blob_path(project, blob_path) expect(fast_path).to eq(std_path) end it 'generates the same path with encoded file names' do tree = repository.tree(sha, 'encoding') blob_path = tree.entries.find { |entry| entry.path == 'encoding/テスト.txt' }.path fast_path = fast_project_blob_path(project, blob_path) std_path = project_blob_path(project, blob_path) expect(fast_path).to eq(std_path) end it 'respects a configured relative URL' do allow(Gitlab.config.gitlab).to receive(:relative_url_root).and_return('/gitlab/root') blob_path = repository.tree(sha, '').entries.first.path fast_path = fast_project_blob_path(project, blob_path) expect(fast_path).to start_with('/gitlab/root') end it 'encodes files starting with #' do filename = '#test-file' create_file(filename) fast_path = fast_project_blob_path(project, filename) expect(fast_path).to end_with('%23test-file') end end describe '.fast_project_tree_path' do let(:tree_path) { repository.tree(sha, 'with space').path } let(:fast_path) { fast_project_tree_path(project, tree_path) } let(:std_path) { project_tree_path(project, tree_path) } it 'generates the same path as project_tree_path' do expect(fast_path).to eq(std_path) end it 'respects a configured relative URL' do allow(Gitlab.config.gitlab).to receive(:relative_url_root).and_return('/gitlab/root') expect(fast_path).to start_with('/gitlab/root') end it 'encodes files starting with #' do filename = '#test-file' create_file(filename) fast_path = fast_project_tree_path(project, filename) expect(fast_path).to end_with('%23test-file') end end describe 'flatten_tree' do let(:tree) { repository.tree(sha, 'files') } let(:root_path) { 'files' } let(:tree_item) { tree.entries.find { |entry| entry.path == path } } subject { flatten_tree(root_path, tree_item) } context "on a directory containing more than one file/directory" do let(:path) { 'files/html' } it "returns the directory name" do expect(subject).to match('html') end end context "on a directory containing only one directory" do let(:path) { 'files/flat' } it "returns the flattened path" do expect(subject).to match('flat/path/correct') end context "with a nested root path" do let(:root_path) { 'files/flat' } it "returns the flattened path with the root path suffix removed" do expect(subject).to match('path/correct') end end end context 'when the root path contains a plus character' do let(:root_path) { 'gtk/C++' } let(:tree_item) { double(flat_path: 'gtk/C++/glade') } it 'returns the flattened path' do expect(subject).to eq('glade') end end end describe '#commit_in_single_accessible_branch' do it 'escapes HTML from the branch name' do helper.instance_variable_set(:@branch_name, "<script>alert('escape me!');</script>") escaped_branch_name = '&lt;script&gt;alert(&#39;escape me!&#39;);&lt;/script&gt;' expect(helper.commit_in_single_accessible_branch).to include(escaped_branch_name) end end end
29.782051
91
0.665734
e281524f43ea0642d0afd9f315f81ee169722f90
1,727
class Sphinx < Formula desc "Full-text search engine" homepage "https://sphinxsearch.com/" url "https://sphinxsearch.com/files/sphinx-2.2.11-release.tar.gz" sha256 "6662039f093314f896950519fa781bc87610f926f64b3d349229002f06ac41a9" license "GPL-2.0" revision 3 head "https://github.com/sphinxsearch/sphinx.git", branch: "master" bottle do rebuild 2 sha256 arm64_big_sur: "57a2dc9f3c5c40d46785753531e4801a7db0560c11434a10d224efeba3c2c1b2" sha256 big_sur: "bbaebbfc31099a28b528c679a2c7825e218e42d83d04a4b0dc53561e70fcbdca" sha256 catalina: "f3d89ffcd2926373af5a35bb7ae6f16e59074699eeacfb4d358a0dc5742729cc" sha256 mojave: "61f1ae14e253c8c84f0e8a9f3a26833ca4a1da887d97c0df8ecebb6096222546" sha256 high_sierra: "3daf6e565c7c12803c13b6439a872e61335b3b27c06719ca6f8cec93dcd2176e" sha256 x86_64_linux: "231d104e3a780a5228da3109c40b42af91c2f57c3e1504aafc5c28c0804a9a20" # linuxbrew-core end depends_on "[email protected]" depends_on "[email protected]" uses_from_macos "zlib" conflicts_with "manticoresearch", because: "manticoresearch is a fork of sphinx" resource "stemmer" do url "https://github.com/snowballstem/snowball.git", revision: "9b58e92c965cd7e3208247ace3cc00d173397f3c" end def install resource("stemmer").stage do system "make", "dist_libstemmer_c" system "tar", "xzf", "dist/libstemmer_c.tgz", "-C", buildpath end args = %W[ --prefix=#{prefix} --disable-dependency-tracking --localstatedir=#{var} --with-libstemmer --with-mysql --without-pgsql ] system "./configure", *args system "make", "install" end test do system bin/"searchd", "--help" end end
31.4
109
0.733642
e8d140cf36d19f15d7900c00289c1994abe6af6d
5,189
module Travis::API::V3 class Service DEFAULT_PARAMS = [ "include".freeze, "@type".freeze ] private_constant :DEFAULT_PARAMS def self.result_type(rt = nil) @result_type = rt if rt @result_type ||= parent.result_type if parent and parent.respond_to? :result_type raise 'result type not set' unless defined? @result_type @result_type end def self.type(t = nil) @type ||= (t || result_type) end def self.filter_params(params) wanted = self.params params.select { |key| wanted.include? key } end def self.params(*list, prefix: nil) @params ||= superclass.respond_to?(:params) ? superclass.params.dup : DEFAULT_PARAMS list.each do |entry| @params << entry.to_s @params << "#{prefix || result_type}.#{entry}" if entry.is_a? Symbol end @params end def self.accepted_params self.params.select { |p| p =~ /#{type}\./.freeze } end def self.paginate(**options) params("limit".freeze, "offset".freeze, "skip_count".freeze) params("sort_by".freeze) if query_factory.sortable? @paginator = Paginator.new(**options) end def self.paginator @paginator ||= nil end def self.paginate? !!@paginator if defined? @paginator end def self.query_factory Queries[result_type] end attr_accessor :access_control, :params, :request_body def initialize(access_control, params, request_body) @access_control = access_control @params = params @queries = {} @github = {} @request_body = request_body end def query(type = result_type) @queries[type] ||= Queries[type].new(params, result_type, service: self) end def github(user = nil) @github[user] ||= GitHub.new(user) end def find(type = result_type, *args) not_found(true, type) unless object = query(type).find(*args) not_found(false, type) unless access_control.visible? object object end def check_login_and_find(*args) raise LoginRequired unless access_control.full_access_or_logged_in? find(*args) or raise NotFound end def not_found(actually_not_found = false, type = nil) type, actually_not_found = actually_not_found, false if actually_not_found.is_a? Symbol error = actually_not_found ? EntityMissing : NotFound raise(error, type || result_type) end def run! not_implemented end def result_type self.class.result_type end def result(resource, **meta_data) return not_found unless resource meta_data[:type] ||= meta_data[:result_type] || result_type meta_data[:status] ||= 200 meta_data[:access_control] ||= access_control meta_data[:resource] ||= resource Result.new(meta_data) end def head(**meta_data) meta_data[:access_control] ||= access_control meta_data[:type] ||= result_type meta_data[:resource] ||= nil Result::Head.new(meta_data) end def deleted head(status: 204) end def no_content head(status: 204) end def run check_force_auth not_found unless result = run! result = paginate(result) if self.class.paginate? check_deprecated_params(result) if params['include'] apply_warnings(result) result end def check_force_auth if access_control.force_auth? raise LoginRequired unless access_control.logged_in? || access_control.temp_access? end end def check_deprecated_params(result) case when params['include'].match(/repository.current_build/) result.deprecated_param('current_build', reason: "repository.last_started_build".freeze) end end def warnings @warnings ||= [] end def warn(*args) warnings << args end def apply_warnings(result) warnings.each { |args| result.warn(*args) } end def paginate(result) self.class.paginator.paginate(result, limit: params['limit'.freeze], offset: params['offset'.freeze], skip_count: params['skip_count'.freeze] == 'true', access_control: access_control) end def params_for?(prefix) return true if params['@type'.freeze] == prefix return true if params[prefix].is_a? Hash params.keys.any? { |key| key.start_with? "#{prefix}." } end def accepted(**payload) payload[:resource_type] ||= result_type result(payload, status: 202, result_type: :accepted) end def rejected(payload) result(payload, status: 403, result_type: :error) end def abuse_detected(message = 'Abuse detected. Restart disabled. If you think you have received this message in error, please contact support: [email protected]') rejected(Error.new(message, status: 403)) end def not_implemented raise NotImplemented end def private_repo_feature!(repository) raise PrivateRepoFeature unless access_control.enterprise? || repository.private? end end end
27.310526
169
0.641357
28221a2d368ba584f129e6e762acc8dec2a3d12f
4,081
#!/usr/bin/env ruby require 'date' require 'runt' module Runt # :title:PDate # == PDate # Date and DateTime with explicit precision. # # Based the <tt>pattern</tt>[http://martinfowler.com/ap2/timePoint.html] by Martin Fowler. # # # Author:: Matthew Lipper class PDate < DateTime include DPrecision attr_accessor :date_precision class << self alias_method :old_civil, :civil def civil(*args) precision=nil if(args[0].instance_of?(DPrecision::Precision)) precision = args.shift else return PDate::sec(*args) end _civil = old_civil(*args) _civil.date_precision = precision _civil end end class << self; alias_method :new, :civil end def include?(expr) eql?(expr) end def + (n) raise TypeError, 'expected numeric' unless n.kind_of?(Numeric) case @date_precision when YEAR then return DPrecision::to_p(PDate::civil(year+n,month,day),@date_precision) when MONTH then current_date = self.class.to_date(self) return DPrecision::to_p((current_date>>n),@date_precision) when WEEK then return new_self_plus(n*7) when DAY then return new_self_plus(n) when HOUR then return new_self_plus(n){ |n| n = (n*(1.to_r/24) ) } when MIN then return new_self_plus(n){ |n| n = (n*(1.to_r/1440) ) } when SEC then return new_self_plus(n){ |n| n = (n*(1.to_r/86400) ) } when MILLI then return self end end def - (x) case x when Numeric then return self+(-x) #FIXME!! when Date; return @ajd - x.ajd end raise TypeError, 'expected numeric or date' end def <=> (other) result = nil if(other.respond_to?("date_precision") && other.date_precision>@date_precision) result = super(DPrecision::to_p(other,@date_precision)) else result = super(other) end #puts "#{self.to_s}<=>#{other.to_s} => #{result}" if $DEBUG result end def new_self_plus(n) if(block_given?) n=yield(n) end return DPrecision::to_p(Time.at(to_time.to_i + (n.to_f * 86400)),date_precision) end def PDate.to_date(pdate) if( pdate.date_precision > DPrecision::DAY) then DateTime.new(pdate.year,pdate.month,pdate.day,pdate.hour,pdate.min,pdate.sec) end return Date.new(pdate.year,pdate.month,pdate.day) end def PDate.year(yr,*ignored) PDate.civil(YEAR, yr, MONTH.min_value, DAY.min_value ) end def PDate.month( yr,mon,*ignored ) PDate.civil(MONTH, yr, mon, DAY.min_value ) end def PDate.week( yr,mon,day,*ignored ) #LJK: need to calculate which week this day implies, #and then move the day back to the *first* day in that week; #note that since rfc2445 defaults to weekstart=monday, I'm #going to use commercial day-of-week raw = PDate.day(yr, mon, day) cooked = PDate.commercial(raw.cwyear, raw.cweek, 1) PDate.civil(WEEK, cooked.year, cooked.month, cooked.day) end def PDate.day( yr,mon,day,*ignored ) PDate.civil(DAY, yr, mon, day ) end def PDate.hour( yr,mon,day,hr=HOUR.min_value,*ignored ) PDate.civil(HOUR, yr, mon, day,hr,MIN.min_value, SEC.min_value) end def PDate.min( yr,mon,day,hr=HOUR.min_value,min=MIN.min_value,*ignored ) PDate.civil(MIN, yr, mon, day,hr,min, SEC.min_value) end def PDate.sec( yr,mon,day,hr=HOUR.min_value,min=MIN.min_value,sec=SEC.min_value,*ignored ) PDate.civil(SEC, yr, mon, day,hr,min, sec) end def PDate.millisecond( yr,mon,day,hr,min,sec,ms,*ignored ) PDate.civil(SEC, yr, mon, day,hr,min, sec, ms, *ignored) #raise "Not implemented yet." end def PDate.default(*args) PDate.civil(DEFAULT, *args) end # # Custom dump which preserves DatePrecision # # Author:: Jodi Showers # def marshal_dump [date_precision, ajd, start, offset] end # # Custom load which preserves DatePrecision # # Author:: Jodi Showers # def marshal_load(dumped_obj) @date_precision, @ajd, @sg, @of=dumped_obj end end end
24.584337
92
0.649351
ab506770d2b7246d5f198b5710f920870726ff98
1,969
module GOVUKDesignSystemFormBuilder module Elements class ErrorSummary < Base include Traits::Error def initialize(builder, object_name, title) @builder = builder @object_name = object_name @title = title end def html return nil unless object_has_errors? content_tag('div', class: error_summary_class, **error_summary_options) do safe_join([error_title, error_summary]) end end private def error_title tag.h2(@title, id: error_summary_title_id, class: error_summary_class('title')) end def error_summary content_tag('div', class: error_summary_class('body')) do content_tag('ul', class: [%(#{brand}-list), error_summary_class('list')]) do safe_join(error_list) end end end def error_list @builder.object.errors.messages.map do |attribute, messages| error_list_item(attribute, messages.first) end end def error_list_item(attribute, message) tag.li(link_to(message, same_page_link(field_id(attribute)), data: { turbolinks: false })) end def same_page_link(target) '#'.concat(target) end def error_summary_class(part = nil) if part %(#{brand}-error-summary).concat('__', part) else %(#{brand}-error-summary) end end def field_id(attribute) build_id('field-error', attribute_name: attribute) end def error_summary_title_id 'error-summary-title' end def object_has_errors? @builder.object.errors.any? end def error_summary_options { tabindex: -1, role: 'alert', data: { module: %(#{brand}-error-summary) }, aria: { labelledby: error_summary_title_id } } end end end end
23.722892
98
0.583545
282b710b7516170188655dc81ea745daefed1888
95
module ApexParser ApexClassCreator.new do |c| c.add_class(:Crypto, %i[public]) end end
15.833333
36
0.705263
1cf768b0ddb58c75f4afbcbcd43b786d8400777e
21,457
# # Author:: Prajakta Purohit ([email protected]) # Copyright:: Copyright 2012-2016, Chef Software Inc. # License:: Apache License, Version 2.0 # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # require "spec_helper" describe Chef::Win32::Registry do include_context "Win32" let(:value1) { { :name => "one", :type => :string, :data => "1" } } let(:value1_upcase_name) { { :name => "ONE", :type => :string, :data => "1" } } let(:key_path) { 'HKCU\Software\OpscodeNumbers' } let(:key) { 'Software\OpscodeNumbers' } let(:key_parent) { "Software" } let(:key_to_delete) { "OpscodeNumbers" } let(:sub_key) { "OpscodePrimes" } let(:missing_key_path) { 'HKCU\Software' } let(:registry) { Chef::Win32::Registry.new() } let(:hive_mock) { double("::Win32::Registry::KHKEY_CURRENT_USER") } let(:reg_mock) { double("reg") } before(:all) do Win32::Registry = Class.new Win32::Registry::Error = Class.new(RuntimeError) end before(:each) do allow_any_instance_of(Chef::Win32::Registry).to receive(:machine_architecture).and_return(:x86_64) #Making the values for registry constants available on unix Win32::Registry::KEY_SET_VALUE = 0x0002 Win32::Registry::KEY_QUERY_VALUE = 0x0001 Win32::Registry::KEY_WRITE = 0x00020000 | 0x0002 | 0x0004 Win32::Registry::KEY_READ = 0x00020000 | 0x0001 | 0x0008 | 0x0010 end after(:each) do Win32::Registry.send(:remove_const, "KEY_SET_VALUE") if defined?(Win32::Registry::KEY_SET_VALUE) Win32::Registry.send(:remove_const, "KEY_QUERY_VALUE") if defined?(Win32::Registry::KEY_QUERY_VALUE) Win32::Registry.send(:remove_const, "KEY_READ") if defined?(Win32::Registry::KEY_READ) Win32::Registry.send(:remove_const, "KEY_WRITE") if defined?(Win32::Registry::KEY_WRITE) end describe "get_values" do it "gets all values for a key if the key exists" do expect(registry).to receive(:get_hive_and_key).with(key_path).and_return([hive_mock, key]) expect(registry).to receive(:key_exists!).with(key_path).and_return(true) expect(hive_mock).to receive(:open).with(key, ::Win32::Registry::KEY_READ | registry.registry_system_architecture).and_yield(reg_mock) expect(reg_mock).to receive(:map) registry.get_values(key_path) end it "throws an exception if key does not exist" do expect(registry).to receive(:get_hive_and_key).with(key_path).and_return([hive_mock, key]) expect(registry).to receive(:key_exists!).with(key_path).and_raise(Chef::Exceptions::Win32RegKeyMissing) expect { registry.get_values(key_path) }.to raise_error(Chef::Exceptions::Win32RegKeyMissing) end end describe "set_value" do it "does nothing if key and hive and value exist" do expect(registry).to receive(:key_exists!).with(key_path).and_return(true) expect(registry).to receive(:get_hive_and_key).with(key_path).and_return([hive_mock, key]) expect(registry).to receive(:value_exists?).with(key_path, value1).and_return(true) expect(registry).to receive(:data_exists?).with(key_path, value1).and_return(true) registry.set_value(key_path, value1) end it "does nothing if case insensitive key and hive and value exist" do expect(registry).to receive(:key_exists!).with(key_path.downcase).and_return(true) expect(registry).to receive(:get_hive_and_key).with(key_path.downcase).and_return([hive_mock, key]) expect(registry).to receive(:value_exists?).with(key_path.downcase, value1).and_return(true) expect(registry).to receive(:data_exists?).with(key_path.downcase, value1).and_return(true) registry.set_value(key_path.downcase, value1) end it "does nothing if key and hive and value with a case insensitive name exist" do expect(registry).to receive(:key_exists!).with(key_path.downcase).and_return(true) expect(registry).to receive(:get_hive_and_key).with(key_path.downcase).and_return([hive_mock, key]) expect(registry).to receive(:value_exists?).with(key_path.downcase, value1_upcase_name).and_return(true) expect(registry).to receive(:data_exists?).with(key_path.downcase, value1_upcase_name).and_return(true) registry.set_value(key_path.downcase, value1_upcase_name) end it "updates value if key and hive and value exist, but data is different" do expect(registry).to receive(:key_exists!).with(key_path).and_return(true) expect(registry).to receive(:get_hive_and_key).with(key_path).and_return([hive_mock, key]) expect(registry).to receive(:value_exists?).with(key_path, value1).and_return(true) expect(registry).to receive(:data_exists?).with(key_path, value1).and_return(false) expect(hive_mock).to receive(:open).with(key, Win32::Registry::KEY_SET_VALUE | ::Win32::Registry::KEY_QUERY_VALUE | registry.registry_system_architecture).and_yield(reg_mock) expect(registry).to receive(:get_type_from_name).with(:string).and_return(1) expect(reg_mock).to receive(:write).with("one", 1, "1") registry.set_value(key_path, value1) end it "creates value if the key exists and the value does not exist" do expect(registry).to receive(:key_exists!).with(key_path).and_return(true) expect(registry).to receive(:get_hive_and_key).with(key_path).and_return([hive_mock, key]) expect(registry).to receive(:value_exists?).with(key_path, value1).and_return(false) expect(hive_mock).to receive(:open).with(key, ::Win32::Registry::KEY_SET_VALUE | ::Win32::Registry::KEY_QUERY_VALUE | registry.registry_system_architecture).and_yield(reg_mock) expect(registry).to receive(:get_type_from_name).with(:string).and_return(1) expect(reg_mock).to receive(:write).with("one", 1, "1") registry.set_value(key_path, value1) end it "should raise an exception if the key does not exist" do expect(registry).to receive(:key_exists!).with(key_path).and_raise(Chef::Exceptions::Win32RegKeyMissing) expect { registry.set_value(key_path, value1) }.to raise_error(Chef::Exceptions::Win32RegKeyMissing) end end describe "delete_value" do it "deletes value if value exists" do expect(registry).to receive(:value_exists?).with(key_path, value1).and_return(true) expect(registry).to receive(:get_hive_and_key).with(key_path).and_return([hive_mock, key]) expect(hive_mock).to receive(:open).with(key, ::Win32::Registry::KEY_SET_VALUE | registry.registry_system_architecture).and_yield(reg_mock) expect(reg_mock).to receive(:delete_value).with("one").and_return(true) registry.delete_value(key_path, value1) end it "raises an exception if the key does not exist" do expect(registry).to receive(:value_exists?).with(key_path, value1).and_return(true) expect(registry).to receive(:get_hive_and_key).with(key_path).and_raise(Chef::Exceptions::Win32RegKeyMissing) registry.delete_value(key_path, value1) end it "does nothing if the value does not exist" do expect(registry).to receive(:value_exists?).with(key_path, value1).and_return(false) registry.delete_value(key_path, value1) end end describe "create_key" do it "creates key if intermediate keys are missing and recursive is set to true" do expect(registry).to receive(:keys_missing?).with(key_path).and_return(true) expect(registry).to receive(:create_missing).with(key_path) expect(registry).to receive(:key_exists?).with(key_path).and_return(false) expect(registry).to receive(:get_hive_and_key).with(key_path).and_return([hive_mock, key]) expect(hive_mock).to receive(:create).with(key, ::Win32::Registry::KEY_WRITE | registry.registry_system_architecture) registry.create_key(key_path, true) end it "raises an exception if intermediate keys are missing and recursive is set to false" do expect(registry).to receive(:keys_missing?).with(key_path).and_return(true) expect { registry.create_key(key_path, false) }.to raise_error(Chef::Exceptions::Win32RegNoRecursive) end it "does nothing if the key exists" do expect(registry).to receive(:keys_missing?).with(key_path).and_return(true) expect(registry).to receive(:create_missing).with(key_path) expect(registry).to receive(:key_exists?).with(key_path).and_return(true) registry.create_key(key_path, true) end it "create key if intermediate keys not missing and recursive is set to false" do expect(registry).to receive(:keys_missing?).with(key_path).and_return(false) expect(registry).to receive(:key_exists?).with(key_path).and_return(false) expect(registry).to receive(:get_hive_and_key).with(key_path).and_return([hive_mock, key]) expect(hive_mock).to receive(:create).with(key, ::Win32::Registry::KEY_WRITE | registry.registry_system_architecture) registry.create_key(key_path, false) end it "create key if intermediate keys not missing and recursive is set to true" do expect(registry).to receive(:keys_missing?).with(key_path).and_return(false) expect(registry).to receive(:key_exists?).with(key_path).and_return(false) expect(registry).to receive(:get_hive_and_key).with(key_path).and_return([hive_mock, key]) expect(hive_mock).to receive(:create).with(key, ::Win32::Registry::KEY_WRITE | registry.registry_system_architecture) registry.create_key(key_path, true) end end describe "delete_key", :windows_only do it "deletes key if it has subkeys and recursive is set to true" do expect(registry).to receive(:key_exists?).with(key_path).and_return(true) expect(registry).to receive(:has_subkeys?).with(key_path).and_return(true) expect(registry).to receive(:get_hive_and_key).with(key_path).and_return([hive_mock, key]) expect(hive_mock).to receive(:open).with(key_parent, ::Win32::Registry::KEY_WRITE | registry.registry_system_architecture).and_yield(reg_mock) expect(reg_mock).to receive(:delete_key).with(key_to_delete, true).and_return(true) registry.delete_key(key_path, true) end it "raises an exception if it has subkeys but recursive is set to false" do expect(registry).to receive(:key_exists?).with(key_path).and_return(true) expect(registry).to receive(:has_subkeys?).with(key_path).and_return(true) expect { registry.delete_key(key_path, false) }.to raise_error(Chef::Exceptions::Win32RegNoRecursive) end it "deletes key if the key exists and has no subkeys" do expect(registry).to receive(:key_exists?).with(key_path).and_return(true) expect(registry).to receive(:has_subkeys?).with(key_path).and_return(false) expect(registry).to receive(:get_hive_and_key).with(key_path).and_return([hive_mock, key]) expect(hive_mock).to receive(:open).with(key_parent, ::Win32::Registry::KEY_WRITE | registry.registry_system_architecture).and_yield(reg_mock) expect(reg_mock).to receive(:delete_key).with(key_to_delete, true).and_return(true) registry.delete_key(key_path, true) end end describe "key_exists?" do it "returns true if key_exists" do expect(registry).to receive(:get_hive_and_key).with(key_path).and_return([hive_mock, key]) expect(hive_mock).to receive(:open).with(key, ::Win32::Registry::KEY_READ | registry.registry_system_architecture).and_yield(reg_mock) expect(registry.key_exists?(key_path)).to eq(true) end it "returns false if key does not exist" do expect(registry).to receive(:get_hive_and_key).with(key_path).and_return([hive_mock, key]) expect(hive_mock).to receive(:open).with(key, ::Win32::Registry::KEY_READ | registry.registry_system_architecture).and_raise(::Win32::Registry::Error) expect(registry.key_exists?(key_path)).to eq(false) end end describe "key_exists!" do it "throws an exception if the key_parent does not exist" do expect(registry).to receive(:key_exists?).with(key_path).and_return(false) expect { registry.key_exists!(key_path) }.to raise_error(Chef::Exceptions::Win32RegKeyMissing) end end describe "hive_exists?" do it "returns true if the hive exists" do expect(registry).to receive(:get_hive_and_key).with(key_path).and_return([hive_mock, key]) registry.hive_exists?(key_path) == true end it "returns false if the hive does not exist" do expect(registry).to receive(:get_hive_and_key).with(key_path).and_raise(Chef::Exceptions::Win32RegHiveMissing) registry.hive_exists?(key_path) == false end end describe "has_subkeys?" do it "returns true if the key has subkeys" do expect(registry).to receive(:key_exists!).with(key_path).and_return(true) expect(registry).to receive(:get_hive_and_key).with(key_path).and_return([hive_mock, key]) expect(hive_mock).to receive(:open).with(key, ::Win32::Registry::KEY_READ | registry.registry_system_architecture).and_yield(reg_mock) expect(reg_mock).to receive(:each_key).and_yield(key) registry.has_subkeys?(key_path) == true end it "returns false if the key does not have subkeys" do expect(registry).to receive(:key_exists!).with(key_path).and_return(true) expect(registry).to receive(:get_hive_and_key).with(key_path).and_return([hive_mock, key]) expect(hive_mock).to receive(:open).with(key, ::Win32::Registry::KEY_READ | registry.registry_system_architecture).and_yield(reg_mock) expect(reg_mock).to receive(:each_key).and_return(no_args()) expect(registry.has_subkeys?(key_path)).to eq(false) end it "throws an exception if the key does not exist" do expect(registry).to receive(:key_exists!).with(key_path).and_raise(Chef::Exceptions::Win32RegKeyMissing) expect { registry.set_value(key_path, value1) }.to raise_error(Chef::Exceptions::Win32RegKeyMissing) end end describe "get_subkeys" do it "returns the subkeys if they exist" do expect(registry).to receive(:key_exists!).with(key_path).and_return(true) expect(registry).to receive(:get_hive_and_key).with(key_path).and_return([hive_mock, key]) expect(hive_mock).to receive(:open).with(key, ::Win32::Registry::KEY_READ | registry.registry_system_architecture).and_yield(reg_mock) expect(reg_mock).to receive(:each_key).and_yield(sub_key) registry.get_subkeys(key_path) end end describe "value_exists?" do it "throws an exception if the key does not exist" do expect(registry).to receive(:key_exists!).with(key_path).and_raise(Chef::Exceptions::Win32RegKeyMissing) expect { registry.value_exists?(key_path, value1) }.to raise_error(Chef::Exceptions::Win32RegKeyMissing) end it "returns true if the value exists" do expect(registry).to receive(:key_exists!).with(key_path).and_return(true) expect(registry).to receive(:get_hive_and_key).with(key_path).and_return([hive_mock, key]) expect(hive_mock).to receive(:open).with(key, ::Win32::Registry::KEY_READ | registry.registry_system_architecture).and_yield(reg_mock) expect(reg_mock).to receive(:any?).and_yield("one") registry.value_exists?(key_path, value1) == true end it "returns false if the value does not exist" do expect(registry).to receive(:key_exists!).with(key_path).and_return(true) expect(registry).to receive(:get_hive_and_key).with(key_path).and_return([hive_mock, key]) expect(hive_mock).to receive(:open).with(key, ::Win32::Registry::KEY_READ | registry.registry_system_architecture).and_yield(reg_mock) expect(reg_mock).to receive(:any?).and_yield(no_args()) registry.value_exists?(key_path, value1) == false end end describe "data_exists?" do it "throws an exception if the key does not exist" do expect(registry).to receive(:key_exists!).with(key_path).and_raise(Chef::Exceptions::Win32RegKeyMissing) expect { registry.data_exists?(key_path, value1) }.to raise_error(Chef::Exceptions::Win32RegKeyMissing) end it "returns true if the data exists" do expect(registry).to receive(:key_exists!).with(key_path).and_return(true) expect(registry).to receive(:get_hive_and_key).with(key_path).and_return([hive_mock, key]) expect(registry).to receive(:get_type_from_name).with(:string).and_return(1) expect(reg_mock).to receive(:each).with(no_args()).and_yield("one", 1, "1") expect(hive_mock).to receive(:open).with(key, ::Win32::Registry::KEY_READ | registry.registry_system_architecture).and_yield(reg_mock) expect(registry.data_exists?(key_path, value1)).to eq(true) end it "returns false if the data does not exist" do expect(registry).to receive(:key_exists!).with(key_path).and_return(true) expect(registry).to receive(:get_hive_and_key).with(key_path).and_return([hive_mock, key]) expect(hive_mock).to receive(:open).with(key, ::Win32::Registry::KEY_READ | registry.registry_system_architecture).and_yield(reg_mock) expect(registry).to receive(:get_type_from_name).with(:string).and_return(1) expect(reg_mock).to receive(:each).with(no_args()).and_yield("one", 1, "2") expect(registry.data_exists?(key_path, value1)).to eq(false) end end describe "value_exists!" do it "does nothing if the value exists" do expect(registry).to receive(:value_exists?).with(key_path, value1).and_return(true) registry.value_exists!(key_path, value1) end it "throws an exception if the value does not exist" do expect(registry).to receive(:value_exists?).with(key_path, value1).and_return(false) expect { registry.value_exists!(key_path, value1) }.to raise_error(Chef::Exceptions::Win32RegValueMissing) end end describe "data_exists!" do it "does nothing if the data exists" do expect(registry).to receive(:data_exists?).with(key_path, value1).and_return(true) registry.data_exists!(key_path, value1) end it "throws an exception if the data does not exist" do expect(registry).to receive(:data_exists?).with(key_path, value1).and_return(false) expect { registry.data_exists!(key_path, value1) }.to raise_error(Chef::Exceptions::Win32RegDataMissing) end end describe "type_matches?" do it "returns true if type matches" do expect(registry).to receive(:value_exists!).with(key_path, value1).and_return(true) expect(registry).to receive(:get_hive_and_key).with(key_path).and_return([hive_mock, key]) expect(hive_mock).to receive(:open).with(key, ::Win32::Registry::KEY_READ | registry.registry_system_architecture).and_yield(reg_mock) expect(registry).to receive(:get_type_from_name).with(:string).and_return(1) expect(reg_mock).to receive(:each).and_yield("one", 1) expect(registry.type_matches?(key_path, value1)).to eq(true) end it "returns false if type does not match" do expect(registry).to receive(:value_exists!).with(key_path, value1).and_return(true) expect(registry).to receive(:get_hive_and_key).with(key_path).and_return([hive_mock, key]) expect(hive_mock).to receive(:open).with(key, ::Win32::Registry::KEY_READ | registry.registry_system_architecture).and_yield(reg_mock) expect(reg_mock).to receive(:each).and_yield("two", 2) expect(registry.type_matches?(key_path, value1)).to eq(false) end it "throws an exception if value does not exist" do expect(registry).to receive(:value_exists?).with(key_path, value1).and_return(false) expect { registry.type_matches?(key_path, value1) }.to raise_error(Chef::Exceptions::Win32RegValueMissing) end end describe "type_matches!" do it "does nothing if the type_matches" do expect(registry).to receive(:type_matches?).with(key_path, value1).and_return(true) registry.type_matches!(key_path, value1) end it "throws an exception if the type does not match" do expect(registry).to receive(:type_matches?).with(key_path, value1).and_return(false) expect { registry.type_matches!(key_path, value1) }.to raise_error(Chef::Exceptions::Win32RegTypesMismatch) end end describe "keys_missing?" do it "returns true if the keys are missing" do expect(registry).to receive(:key_exists?).with(missing_key_path).and_return(false) expect(registry.keys_missing?(key_path)).to eq(true) end it "returns false if no keys in the path are missing" do expect(registry).to receive(:key_exists?).with(missing_key_path).and_return(true) expect(registry.keys_missing?(key_path)).to eq(false) end end end
54.321519
183
0.717435
5d7dbfa3c3d3f6c3070a60c7241f33644febb23f
7,071
# A visitor for converting a Sass tree into a source string. class Sass::Tree::Visitors::Convert < Sass::Tree::Visitors::Base # Runs the visitor on a tree. # # @param root [Tree::Node] The root node of the Sass tree. # @param options [{Symbol => Object}] An options hash (see {Sass::CSS#initialize}). # @param format [Symbol] `:sass` or `:scss`. # @return [String] The Sass or SCSS source for the tree. def self.visit(root, options, format) new(options, format).send(:visit, root) end protected def initialize(options, format) @options = options @format = format @tabs = 0 end def visit_children(parent) @tabs += 1 return @format == :sass ? "\n" : " {}\n" if parent.children.empty? (@format == :sass ? "\n" : " {\n") + super.join.rstrip + (@format == :sass ? "\n" : " }\n") ensure @tabs -= 1 end # Ensures proper spacing between top-level nodes. def visit_root(node) Sass::Util.enum_cons(node.children + [nil], 2).map do |child, nxt| visit(child) + if nxt && (child.is_a?(Sass::Tree::CommentNode) && child.line + child.lines + 1 == nxt.line) || (child.is_a?(Sass::Tree::ImportNode) && nxt.is_a?(Sass::Tree::ImportNode) && child.line + 1 == nxt.line) || (child.is_a?(Sass::Tree::VariableNode) && nxt.is_a?(Sass::Tree::VariableNode) && child.line + 1 == nxt.line) "" else "\n" end end.join.rstrip + "\n" end def visit_charset(node) "#{tab_str}@charset \"#{node.name}\"#{semi}\n" end def visit_comment(node) value = node.value.map do |r| next r if r.is_a?(String) "\#{#{r.to_sass(@options)}}" end.join content = if @format == :sass content = value.gsub(/\*\/$/, '').rstrip if content =~ /\A[ \t]/ # Re-indent SCSS comments like this: # /* foo # bar # baz */ content.gsub!(/^/, ' ') content.sub!(/\A([ \t]*)\/\*/, '/*\1') end content = unless content.include?("\n") content else content.gsub!(/\n( \*|\/\/)/, "\n ") spaces = content.scan(/\n( *)/).map {|s| s.first.size}.min sep = node.silent ? "\n//" : "\n *" if spaces >= 2 content.gsub(/\n /, sep) else content.gsub(/\n#{' ' * spaces}/, sep) end end content.gsub!(/\A\/\*/, '//') if node.silent content.gsub!(/^/, tab_str) content.rstrip + "\n" else spaces = (' ' * [@tabs - value[/^ */].size, 0].max) content = if node.silent value.gsub(/^[\/ ]\*/, '//').gsub(/ *\*\/$/, '') else value end.gsub(/^/, spaces) + "\n" content end if node.loud if node.silent content.gsub!(%r{^\s*(//!?)}, '//!') else content.sub!(%r{^\s*(/\*)}, '/*!') end end content end def visit_debug(node) "#{tab_str}@debug #{node.expr.to_sass(@options)}#{semi}\n" end def visit_directive(node) res = "#{tab_str}#{node.value}" return res + "#{semi}\n" unless node.has_children res + yield + "\n" end def visit_each(node) "#{tab_str}@each $#{dasherize(node.var)} in #{node.list.to_sass(@options)}#{yield}" end def visit_extend(node) "#{tab_str}@extend #{selector_to_src(node.selector).lstrip}#{semi}\n" end def visit_for(node) "#{tab_str}@for $#{dasherize(node.var)} from #{node.from.to_sass(@options)} " + "#{node.exclusive ? "to" : "through"} #{node.to.to_sass(@options)}#{yield}" end def visit_function(node) args = node.args.map do |v, d| d ? "#{v.to_sass(@options)}: #{d.to_sass(@options)}" : v.to_sass(@options) end.join(", ") "#{tab_str}@function #{dasherize(node.name)}(#{args})#{yield}" end def visit_if(node) name = if !@is_else; "if" elsif node.expr; "else if" else; "else" end @is_else = false str = "#{tab_str}@#{name}" str << " #{node.expr.to_sass(@options)}" if node.expr str << yield @is_else = true str << visit(node.else) if node.else str ensure @is_else = false end def visit_import(node) quote = @format == :scss ? '"' : '' "#{tab_str}@import #{quote}#{node.imported_filename}#{quote}#{semi}\n" end def visit_media(node) "#{tab_str}@media #{node.query.join(', ')}#{yield}" end def visit_mixindef(node) args = if node.args.empty? "" else '(' + node.args.map do |v, d| if d "#{v.to_sass(@options)}: #{d.to_sass(@options)}" else v.to_sass(@options) end end.join(", ") + ')' end "#{tab_str}#{@format == :sass ? '=' : '@mixin '}#{dasherize(node.name)}#{args}#{yield}" end def visit_mixin(node) unless node.args.empty? && node.keywords.empty? args = node.args.map {|a| a.to_sass(@options)}.join(", ") keywords = Sass::Util.hash_to_a(node.keywords). map {|k, v| "$#{dasherize(k)}: #{v.to_sass(@options)}"}.join(', ') arglist = "(#{args}#{', ' unless args.empty? || keywords.empty?}#{keywords})" end "#{tab_str}#{@format == :sass ? '+' : '@include '}#{dasherize(node.name)}#{arglist}#{semi}\n" end def visit_prop(node) res = tab_str + node.declaration(@options, @format) return res + semi + "\n" if node.children.empty? res + yield.rstrip + semi + "\n" end def visit_return(node) "#{tab_str}@return #{node.expr.to_sass(@options)}#{semi}\n" end def visit_rule(node) if @format == :sass name = selector_to_sass(node.rule) name = "\\" + name if name[0] == ?: name.gsub(/^/, tab_str) + yield elsif @format == :scss name = selector_to_scss(node.rule) res = name + yield if node.children.last.is_a?(Sass::Tree::CommentNode) && node.children.last.silent res.slice!(-3..-1) res << "\n" << tab_str << "}\n" end res end end def visit_variable(node) "#{tab_str}$#{dasherize(node.name)}: #{node.expr.to_sass(@options)}#{' !default' if node.guarded}#{semi}\n" end def visit_warn(node) "#{tab_str}@warn #{node.expr.to_sass(@options)}#{semi}\n" end def visit_while(node) "#{tab_str}@while #{node.expr.to_sass(@options)}#{yield}" end private def selector_to_src(sel) @format == :sass ? selector_to_sass(sel) : selector_to_scss(sel) end def selector_to_sass(sel) sel.map do |r| if r.is_a?(String) r.gsub(/(,)?([ \t]*)\n\s*/) {$1 ? "#{$1}#{$2}\n" : " "} else "\#{#{r.to_sass(@options)}}" end end.join end def selector_to_scss(sel) sel.map {|r| r.is_a?(String) ? r : "\#{#{r.to_sass(@options)}}"}. join.gsub(/^[ \t]*/, tab_str).gsub(/[ \t]*$/, '') end def semi @format == :sass ? "" : ";" end def tab_str ' ' * @tabs end def dasherize(s) if @options[:dasherize] s.gsub('_', '-') else s end end end
26.885932
111
0.533871
4ac6dee5de90d74ede117b5c66aef4016c5bf839
18,894
module JRails @@config = { :google => false, :jquery_version => "1.4.2", :jqueryui_version => "1.8.4", :compressed => true } def self.load_config config_file = File.join(Rails.root, "config", "jrails.yml") if File.exist? config_file loaded_config = YAML.load_file(config_file) if loaded_config and loaded_config.key? Rails.env @@config.merge!(loaded_config[Rails.env].symbolize_keys) if google? @@jquery_path = "http://ajax.googleapis.com/ajax/libs/jquery/#{@@config[:jquery_version]}/jquery#{".min" if compressed?}.js" @@jqueryui_path = "http://ajax.googleapis.com/ajax/libs/jqueryui/#{@@config[:jqueryui_version]}/jquery-ui#{".min" if compressed?}.js" @@jqueryui_i18n_path = "http://ajax.googleapis.com/ajax/libs/jqueryui/#{@@config[:jqueryui_version]}/i18n/jquery-ui-i18n#{".min" if compressed?}.js" end else raise Exception.new "Failed finding '#{Rails.env}' environment in config. check your 'config/jrails.yml' or delete that file " end end end def self.config ; @@config ; end def self.google? ; @@config[:google] ; end def self.compressed? ; @@config[:compressed] ; end def self.jquery_path ; @@jquery_path ; end def self.jqueryui_path ; @@jqueryui_path ; end def self.jqueryui_i18n_path ; @@jqueryui_i18n_path ; end end module ActionView module Helpers module JavaScriptHelper # This function can be used to render rjs inline # # <%= javascript_function do |page| # page.replace_html :list, :partial => 'list', :object => @list # end %> # def javascript_function(*args, &block) html_options = args.extract_options! function = args[0] || '' html_options.symbolize_keys! function = update_page(&block) if block_given? javascript_tag(function) end def jquery_id(id) id.to_s.count('#.*,>+~:[/ ') == 0 ? "##{id}" : id end def jquery_ids(ids) Array(ids).map{|id| jquery_id(id)}.join(',') end end module PrototypeHelper USE_PROTECTION = const_defined?(:DISABLE_JQUERY_FORGERY_PROTECTION) ? !DISABLE_JQUERY_FORGERY_PROTECTION : true unless const_defined? :JQUERY_VAR JQUERY_VAR = 'jQuery' end unless const_defined? :JQCALLBACKS JQCALLBACKS = Set.new([ :beforeSend, :complete, :error, :success ] + (100..599).to_a) #instance_eval { remove_const :AJAX_OPTIONS } remove_const(:AJAX_OPTIONS) if const_defined?(:AJAX_OPTIONS) AJAX_OPTIONS = Set.new([ :before, :after, :condition, :url, :asynchronous, :method, :insertion, :position, :form, :with, :update, :script ]).merge(JQCALLBACKS) end def periodically_call_remote(options = {}) frequency = options[:frequency] || 10 # every ten seconds by default code = "setInterval(function() {#{remote_function(options)}}, #{frequency} * 1000)" javascript_tag(code) end def remote_function(options) javascript_options = options_for_ajax(options) update = '' if options[:update] && options[:update].is_a?(Hash) update = [] update << "success:'#{options[:update][:success]}'" if options[:update][:success] update << "failure:'#{options[:update][:failure]}'" if options[:update][:failure] update = '{' + update.join(',') + '}' elsif options[:update] update << "'#{options[:update]}'" end function = "#{JQUERY_VAR}.ajax(#{javascript_options})" function = "#{options[:before]}; #{function}" if options[:before] function = "#{function}; #{options[:after]}" if options[:after] function = "if (#{options[:condition]}) { #{function}; }" if options[:condition] function = "if (confirm('#{escape_javascript(options[:confirm])}')) { #{function}; }" if options[:confirm] return function end class JavaScriptGenerator module GeneratorMethods def insert_html(position, id, *options_for_render) insertion = position.to_s.downcase insertion = 'append' if insertion == 'bottom' insertion = 'prepend' if insertion == 'top' call "#{JQUERY_VAR}(\"#{jquery_id(id)}\").#{insertion}", render(*options_for_render) end def replace_html(id, *options_for_render) insert_html(:html, id, *options_for_render) end def replace(id, *options_for_render) call "#{JQUERY_VAR}(\"#{jquery_id(id)}\").replaceWith", render(*options_for_render) end def remove(*ids) call "#{JQUERY_VAR}(\"#{jquery_ids(ids)}\").remove" end def show(*ids) call "#{JQUERY_VAR}(\"#{jquery_ids(ids)}\").show" end def hide(*ids) call "#{JQUERY_VAR}(\"#{jquery_ids(ids)}\").hide" end def toggle(*ids) call "#{JQUERY_VAR}(\"#{jquery_ids(ids)}\").toggle" end def jquery_id(id) id.to_s.count('#.*,>+~:[/ ') == 0 ? "##{id}" : id end def jquery_ids(ids) Array(ids).map{|id| jquery_id(id)}.join(',') end end end protected def options_for_ajax(options) js_options = build_callbacks(options) url_options = options[:url] url_options = url_options.merge(:escape => false) if url_options.is_a?(Hash) js_options['url'] = "'#{url_for(url_options)}'" js_options['async'] = false if options[:type] == :synchronous js_options['type'] = options[:method] ? method_option_to_s(options[:method]) : ( options[:form] ? "'post'" : nil ) js_options['dataType'] = options[:datatype] ? "'#{options[:datatype]}'" : (options[:update] ? nil : "'script'") if options[:form] js_options['data'] = "#{JQUERY_VAR}.param(#{JQUERY_VAR}(this).serializeArray())" elsif options[:submit] js_options['data'] = "#{JQUERY_VAR}(\"##{options[:submit]}:input\").serialize()" elsif options[:with] js_options['data'] = options[:with].gsub("Form.serialize(this.form)","#{JQUERY_VAR}.param(#{JQUERY_VAR}(this.form).serializeArray())") end js_options['type'] ||= "'post'" if options[:method] if method_option_to_s(options[:method]) == "'put'" || method_option_to_s(options[:method]) == "'delete'" js_options['type'] = "'post'" if js_options['data'] js_options['data'] << " + '&" else js_options['data'] = "'" end js_options['data'] << "_method=#{options[:method]}'" end end if USE_PROTECTION && respond_to?('protect_against_forgery?') && protect_against_forgery? if js_options['data'] js_options['data'] << " + '&" else js_options['data'] = "'" end js_options['data'] << "#{request_forgery_protection_token}=' + encodeURIComponent('#{escape_javascript form_authenticity_token}')" end js_options['data'] = "''" if js_options['type'] == "'post'" && js_options['data'].nil? options_for_javascript(js_options.reject {|key, value| value.nil?}) end def build_update_for_success(html_id, insertion=nil) insertion = build_insertion(insertion) "#{JQUERY_VAR}('#{jquery_id(html_id)}').#{insertion}(request);" end def build_update_for_error(html_id, insertion=nil) insertion = build_insertion(insertion) "#{JQUERY_VAR}('#{jquery_id(html_id)}').#{insertion}(request.responseText);" end def build_insertion(insertion) insertion = insertion ? insertion.to_s.downcase : 'html' insertion = 'append' if insertion == 'bottom' insertion = 'prepend' if insertion == 'top' insertion end def build_observer(klass, name, options = {}) if options[:with] && (options[:with] !~ /[\{=(.]/) options[:with] = "'#{options[:with]}=' + value" else options[:with] ||= 'value' unless options[:function] end callback = options[:function] || remote_function(options) javascript = "#{JQUERY_VAR}('#{jquery_id(name)}').delayedObserver(" javascript << "#{options[:frequency] || 0}, " javascript << "function(element, value) {" javascript << "#{callback}}" #javascript << ", '#{options[:on]}'" if options[:on] javascript << ")" javascript_tag(javascript) end def build_callbacks(options) callbacks = {} options[:beforeSend] = ''; [:uninitialized,:loading].each do |key| options[:beforeSend] << (options[key].last == ';' ? options.delete(key) : options.delete(key) << ';') if options[key] end options.delete(:beforeSend) if options[:beforeSend].blank? options[:complete] = options.delete(:loaded) if options[:loaded] options[:error] = options.delete(:failure) if options[:failure] if options[:update] if options[:update].is_a?(Hash) options[:update][:error] = options[:update].delete(:failure) if options[:update][:failure] if options[:update][:success] options[:success] = build_update_for_success(options[:update][:success], options[:position]) << (options[:success] ? options[:success] : '') end if options[:update][:error] options[:error] = build_update_for_error(options[:update][:error], options[:position]) << (options[:error] ? options[:error] : '') end else options[:success] = build_update_for_success(options[:update], options[:position]) << (options[:success] ? options[:success] : '') end end options.each do |callback, code| if JQCALLBACKS.include?(callback) callbacks[callback] = "function(request){#{code}}" end end callbacks end end class JavaScriptElementProxy < JavaScriptProxy #:nodoc: unless const_defined? :JQUERY_VAR JQUERY_VAR = PrototypeHelper::JQUERY_VAR end def initialize(generator, id) id = id.to_s.count('#.*,>+~:[/ ') == 0 ? "##{id}" : id @id = id super(generator, "#{JQUERY_VAR}(\"#{id}\")") end def replace_html(*options_for_render) call 'html', @generator.send(:render, *options_for_render) end def replace(*options_for_render) call 'replaceWith', @generator.send(:render, *options_for_render) end def reload(options_for_replace={}) replace(options_for_replace.merge({ :partial => @id.to_s.sub(/^#/,'') })) end def value() call 'val()' end def value=(value) call 'val', value end end class JavaScriptElementCollectionProxy < JavaScriptCollectionProxy #:nodoc:\ unless const_defined? :JQUERY_VAR JQUERY_VAR = PrototypeHelper::JQUERY_VAR end def initialize(generator, pattern) super(generator, "#{JQUERY_VAR}(#{pattern.to_json})") end end module ScriptaculousHelper unless const_defined? :JQUERY_VAR JQUERY_VAR = PrototypeHelper::JQUERY_VAR end unless const_defined? :SCRIPTACULOUS_EFFECTS SCRIPTACULOUS_EFFECTS = { :appear => {:method => 'fadeIn'}, :blind_down => {:method => 'blind', :mode => 'show', :options => {:direction => 'vertical'}}, :blind_up => {:method => 'blind', :mode => 'hide', :options => {:direction => 'vertical'}}, :blind_right => {:method => 'blind', :mode => 'show', :options => {:direction => 'horizontal'}}, :blind_left => {:method => 'blind', :mode => 'hide', :options => {:direction => 'horizontal'}}, :bounce_in => {:method => 'bounce', :mode => 'show', :options => {:direction => 'up'}}, :bounce_out => {:method => 'bounce', :mode => 'hide', :options => {:direction => 'up'}}, :drop_in => {:method => 'drop', :mode => 'show', :options => {:direction => 'up'}}, :drop_out => {:method => 'drop', :mode => 'hide', :options => {:direction => 'down'}}, :fade => {:method => 'fadeOut'}, :fold_in => {:method => 'fold', :mode => 'hide'}, :fold_out => {:method => 'fold', :mode => 'show'}, :grow => {:method => 'scale', :mode => 'show'}, :shrink => {:method => 'scale', :mode => 'hide'}, :slide_down => {:method => 'slide', :mode => 'show', :options => {:direction => 'up'}}, :slide_up => {:method => 'slide', :mode => 'hide', :options => {:direction => 'up'}}, :slide_right => {:method => 'slide', :mode => 'show', :options => {:direction => 'left'}}, :slide_left => {:method => 'slide', :mode => 'hide', :options => {:direction => 'left'}}, :squish => {:method => 'scale', :mode => 'hide', :options => {:origin => "['top','left']"}}, :switch_on => {:method => 'clip', :mode => 'show', :options => {:direction => 'vertical'}}, :switch_off => {:method => 'clip', :mode => 'hide', :options => {:direction => 'vertical'}}, :toggle_appear => {:method => 'fadeToggle'}, :toggle_slide => {:method => 'slide', :mode => 'toggle', :options => {:direction => 'up'}}, :toggle_blind => {:method => 'blind', :mode => 'toggle', :options => {:direction => 'vertical'}}, } end def visual_effect(name, element_id = false, js_options = {}) element = element_id ? element_id : "this" if SCRIPTACULOUS_EFFECTS.has_key? name.to_sym effect = SCRIPTACULOUS_EFFECTS[name.to_sym] name = effect[:method] mode = effect[:mode] js_options = js_options.merge(effect[:options]) if effect[:options] end [:color, :direction, :startcolor, :endcolor].each do |option| js_options[option] = "'#{js_options[option]}'" if js_options[option] end if js_options.has_key? :duration speed = js_options.delete :duration speed = (speed * 1000).to_i unless speed.nil? else speed = js_options.delete :speed end if ['fadeIn','fadeOut','fadeToggle'].include?(name) # 090905 - Jake - changed ' to \" so it passes assert_select_rjs with an id javascript = "#{JQUERY_VAR}(\"#{jquery_id(element_id)}\").#{name}(" javascript << "#{speed}" unless speed.nil? javascript << ");" else # 090905 - Jake - changed ' to \" so it passes "assert_select_rjs :effect, ID" javascript = "#{JQUERY_VAR}(\"#{jquery_id(element_id)}\").#{mode || 'effect'}('#{name}'" javascript << ",#{options_for_javascript(js_options)}" unless speed.nil? && js_options.empty? javascript << ",#{speed}" unless speed.nil? javascript << ");" end end def sortable_element_js(element_id, options = {}) #:nodoc: #convert similar attributes options[:handle] = ".#{options[:handle]}" if options[:handle] if options[:tag] || options[:only] options[:items] = "> " options[:items] << options.delete(:tag) if options[:tag] options[:items] << ".#{options.delete(:only)}" if options[:only] end options[:connectWith] = options.delete(:containment).map {|x| "##{x}"} if options[:containment] options[:containment] = options.delete(:container) if options[:container] options[:dropOnEmpty] = false unless options[:dropOnEmpty] options[:helper] = "'clone'" if options[:ghosting] == true options[:axis] = case options.delete(:constraint) when "vertical", :vertical "y" when "horizontal", :horizontal "x" when false nil when nil "y" end options.delete(:axis) if options[:axis].nil? options.delete(:overlap) options.delete(:ghosting) if options[:onUpdate] || options[:url] if options[:format] options[:with] ||= "#{JQUERY_VAR}(this).sortable('serialize',{key:'#{element_id}[]', expression:#{options[:format]}})" options.delete(:format) else options[:with] ||= "#{JQUERY_VAR}(this).sortable('serialize',{key:'#{element_id}[]'})" end options[:onUpdate] ||= "function(){" + remote_function(options) + "}" end options.delete_if { |key, value| PrototypeHelper::AJAX_OPTIONS.include?(key) } options[:update] = options.delete(:onUpdate) if options[:onUpdate] [:axis, :cancel, :containment, :cursor, :handle, :tolerance, :items, :placeholder].each do |option| options[option] = "'#{options[option]}'" if options[option] end options[:connectWith] = array_or_string_for_javascript(options[:connectWith]) if options[:connectWith] %(#{JQUERY_VAR}('#{jquery_id(element_id)}').sortable(#{options_for_javascript(options)});) end def draggable_element_js(element_id, options = {}) %(#{JQUERY_VAR}("#{jquery_id(element_id)}").draggable(#{options_for_javascript(options)});) end def drop_receiving_element_js(element_id, options = {}) #convert similar options options[:hoverClass] = options.delete(:hoverclass) if options[:hoverclass] options[:drop] = options.delete(:onDrop) if options[:onDrop] if options[:drop] || options[:url] options[:with] ||= "'id=' + encodeURIComponent(#{JQUERY_VAR}(ui.draggable).attr('id'))" options[:drop] ||= "function(ev, ui){" + remote_function(options) + "}" end options.delete_if { |key, value| PrototypeHelper::AJAX_OPTIONS.include?(key) } options[:accept] = array_or_string_for_javascript(options[:accept]) if options[:accept] [:activeClass, :hoverClass, :tolerance].each do |option| options[option] = "'#{options[option]}'" if options[option] end %(#{JQUERY_VAR}('#{jquery_id(element_id)}').droppable(#{options_for_javascript(options)});) end end end end
41.163399
158
0.562083
6ab2cee2a6f850c2a34ec1c31d9b3707d8cea52e
1,600
## # Bootstrap tests for Exceptions assert('BS Exception 1') do begin 1+1 ensure 2+2 end == 2 end assert('BS Exception 2') do begin 1+1 begin 2+2 ensure 3+3 end ensure 4+4 end == 4 end assert('BS Exception 3') do begin 1+1 begin 2+2 ensure 3+3 end ensure 4+4 begin 5+5 ensure 6+6 end end == 4 end assert('BS Exception 4') do a = nil 1.times{|e| begin rescue => err end a = err.class } a == NilClass end assert('BS Exception 5') do $ans = [] def m $! end def m2 1.times{ begin return ensure $ans << m end } end m2 $ans == [nil] end assert('BS Exception 6') do $i = 0 def m iter{ begin $i += 1 begin $i += 2 break ensure end ensure $i += 4 end $i = 0 } end def iter yield end m $i == 7 end assert('BS Exception 7') do $i = 0 def m begin $i += 1 begin $i += 2 return ensure $i += 3 end ensure $i += 4 end p :end end m $i == 10 end assert('BS Exception 8') do begin 1 rescue 2 else 3 end == 3 end assert('BS Exception 9') do begin 1+1 rescue 2+2 else 3+3 ensure 4+4 end == 6 end assert('BS Exception 10') do begin 1+1 begin 2+2 rescue 3+3 else 4+4 end rescue 5+5 else 6+6 ensure 7+7 end == 12 end
10.062893
32
0.446875
180244cbbafc94b9e655270060f74735ff079513
1,094
require 'serialcaster/episode_days' module Serialcaster RSpec.describe EpisodeDays do let(:friday) { Date.new(2016, 11, 11) } let(:monday) { Date.new(2016, 11, 14) } let(:wednesday) { Date.new(2016, 11, 16) } subject { EpisodeDays.new(:wednesday, :friday) } it "thinks a Friday is available" do expect(subject.available?(friday)).to be true end it "thinks a Monday is not available" do expect(subject.available?(monday)).to be false end it "thinks a wednesday is available" do expect(subject.available?(wednesday)).to be true end context "equality" do it "compares equal to an instance with the same days" do expect(subject).to eq(EpisodeDays.new(:friday, :wednesday)) end it "compares non-equal to an instance with different days" do expect(subject).not_to eq(EpisodeDays.new(:friday, :monday)) end it "doesn't care about the order days were passed in" do expect(EpisodeDays.new(:monday, :friday)).to eq(EpisodeDays.new(:friday, :monday)) end end end end
28.789474
90
0.660878
5d487a84d40dedaa2d785b0d102d7a8d948902f3
1,332
# frozen_string_literal: true # WARNING ABOUT GENERATED CODE # # This file is generated. See the contributing guide for more information: # https://github.com/aws/aws-sdk-ruby/blob/version-3/CONTRIBUTING.md # # WARNING ABOUT GENERATED CODE Gem::Specification.new do |spec| spec.name = 'aws-sdk-redshiftdataapiservice' spec.version = File.read(File.expand_path('../VERSION', __FILE__)).strip spec.summary = 'AWS SDK for Ruby - Redshift Data API Service' spec.description = 'Official AWS Ruby gem for Redshift Data API Service. This gem is part of the AWS SDK for Ruby.' spec.author = 'Amazon Web Services' spec.homepage = 'https://github.com/aws/aws-sdk-ruby' spec.license = 'Apache-2.0' spec.email = ['[email protected]'] spec.require_paths = ['lib'] spec.files = Dir['LICENSE.txt', 'CHANGELOG.md', 'VERSION', 'lib/**/*.rb'] spec.metadata = { 'source_code_uri' => 'https://github.com/aws/aws-sdk-ruby/tree/version-3/gems/aws-sdk-redshiftdataapiservice', 'changelog_uri' => 'https://github.com/aws/aws-sdk-ruby/tree/version-3/gems/aws-sdk-redshiftdataapiservice/CHANGELOG.md' } spec.add_dependency('aws-sdk-core', '~> 3', '>= 3.121.2') spec.add_dependency('aws-sigv4', '~> 1.1') spec.required_ruby_version = '>= 2.3' end
40.363636
126
0.678679
b9ed0a0ac22d4bc6ddad7a8534e4244ffb2ab813
926
class CreateArtists < ActiveRecord::Migration[4.2] def self.up create_table :artists do |t| t.column :name, :string, :null => false t.column :creator_id, :integer, :null => false t.column :is_active, :boolean, :null => false, :default => true t.column :is_banned, :boolean, :null => false, :default => false t.column :other_names, :text t.column :other_names_index, "tsvector" t.column :group_name, :string t.timestamps end add_index :artists, :name, :unique => true add_index :artists, :group_name execute "CREATE INDEX index_artists_on_other_names_index ON artists USING GIN (other_names_index)" execute "CREATE TRIGGER trigger_artists_on_update BEFORE INSERT OR UPDATE ON artists FOR EACH ROW EXECUTE PROCEDURE tsvector_update_trigger('other_names_index', 'public.danbooru', 'other_names')" end def self.down drop_table :artists end end
38.583333
199
0.705184
bf95daebc3ac0d42eb0c24d9b9e2684da924bd7c
59
class Tartarus module Rb VERSION = "0.5.0" end end
9.833333
21
0.627119
6a06b70a8e71e96ff949cd5de0db4e42dc0302f5
263
class CreateMarriages < ActiveRecord::Migration def change create_table :marriages do |t| t.belongs_to :husband, index: true, foreign_key: true t.belongs_to :wife, index: true, foreign_key: true t.timestamps null: false end end end
23.909091
59
0.69962
33e2bc50c4e17c37a5f35334df8ec1a6df410c49
369
# Copyright (c) Universidade Federal Fluminense (UFF). # This file is part of SAPOS. Please, consult the license terms in the LICENSE file. class UpdateOffsetsColumnsOfNotification < ActiveRecord::Migration[5.1] def up change_column :notifications, :query_offset, :string change_column :notifications, :notification_offset, :string end def down end end
28.384615
84
0.772358
61ede813775c7fd08c180f9eb589128b6407ded4
7,071
## # Hash # # ISO 15.2.13 class Hash ## # Equality---Two hashes are equal if they each contain the same number # of keys and if each key-value pair is equal to (according to # <code>Object#==</code>) the corresponding elements in the other # hash. # # ISO 15.2.13.4.1 def ==(hash) return true if self.equal?(hash) unless Hash === hash return false end return false if self.size != hash.size self.each do |k,v| return false unless hash.key?(k) return false unless self[k] == hash[k] end return true end ## # Returns <code>true</code> if <i>hash</i> and <i>other</i> are # both hashes with the same content compared by eql?. # # ISO 15.2.13.4.32 (x) def eql?(hash) return true if self.equal?(hash) unless Hash === hash return false end return false if self.size != hash.size self.each do |k,v| return false unless hash.key?(k) return false unless self[k].eql?(hash[k]) end return true end ## # Delete the element with the key +key+. # Return the value of the element if +key+ # was found. Return nil if nothing was # found. If a block is given, call the # block with the value of the element. # # ISO 15.2.13.4.8 def delete(key, &block) if block && !self.has_key?(key) return block.call(key) end self.__delete(key) end ## # Calls the given block for each element of +self+ # and pass the key and value of each element. # # call-seq: # hsh.each {| key, value | block } -> hsh # hsh.each_pair {| key, value | block } -> hsh # hsh.each -> an_enumerator # hsh.each_pair -> an_enumerator # # # If no block is given, an enumerator is returned instead. # # h = { "a" => 100, "b" => 200 } # h.each {|key, value| puts "#{key} is #{value}" } # # <em>produces:</em> # # a is 100 # b is 200 # # ISO 15.2.13.4.9 def each(&block) return to_enum :each unless block keys = self.keys vals = self.values len = self.size i = 0 while i < len block.call [keys[i], vals[i]] i += 1 end self end ## # Calls the given block for each element of +self+ # and pass the key of each element. # # call-seq: # hsh.each_key {| key | block } -> hsh # hsh.each_key -> an_enumerator # # If no block is given, an enumerator is returned instead. # # h = { "a" => 100, "b" => 200 } # h.each_key {|key| puts key } # # <em>produces:</em> # # a # b # # ISO 15.2.13.4.10 def each_key(&block) return to_enum :each_key unless block self.keys.each{|k| block.call(k)} self end ## # Calls the given block for each element of +self+ # and pass the value of each element. # # call-seq: # hsh.each_value {| value | block } -> hsh # hsh.each_value -> an_enumerator # # If no block is given, an enumerator is returned instead. # # h = { "a" => 100, "b" => 200 } # h.each_value {|value| puts value } # # <em>produces:</em> # # 100 # 200 # # ISO 15.2.13.4.11 def each_value(&block) return to_enum :each_value unless block self.values.each{|v| block.call(v)} self end ## # Return a hash which contains the content of # +self+ and +other+. If a block is given # it will be called for each element with # a duplicate key. The value of the block # will be the final value of this element. # # ISO 15.2.13.4.22 def merge(other, &block) raise TypeError, "Hash required (#{other.class} given)" unless Hash === other h = self.dup if block other.each_key{|k| h[k] = (self.has_key?(k))? block.call(k, self[k], other[k]): other[k] } else other.each_key{|k| h[k] = other[k]} end h end # internal method for Hash inspection def _inspect(recur_list) return "{}" if self.size == 0 return "{...}" if recur_list[self.object_id] recur_list[self.object_id] = true ary=[] keys=self.keys vals=self.values size=keys.size i=0 while i<size ary<<(keys[i]._inspect(recur_list) + "=>" + vals[i]._inspect(recur_list)) i+=1 end "{"+ary.join(", ")+"}" end ## # Return the contents of this hash as a string. # # ISO 15.2.13.4.30 (x) def inspect self._inspect({}) end # ISO 15.2.13.4.31 (x) alias to_s inspect ## # call-seq: # hsh.reject! {| key, value | block } -> hsh or nil # hsh.reject! -> an_enumerator # # Equivalent to <code>Hash#delete_if</code>, but returns # <code>nil</code> if no changes were made. # # 1.8/1.9 Hash#reject! returns Hash; ISO says nothing. # def reject!(&block) return to_enum :reject! unless block keys = [] self.each{|k,v| if block.call([k, v]) keys.push(k) end } return nil if keys.size == 0 keys.each{|k| self.delete(k) } self end ## # call-seq: # hsh.reject {|key, value| block} -> a_hash # hsh.reject -> an_enumerator # # Returns a new hash consisting of entries for which the block returns false. # # If no block is given, an enumerator is returned instead. # # h = { "a" => 100, "b" => 200, "c" => 300 } # h.reject {|k,v| k < "b"} #=> {"b" => 200, "c" => 300} # h.reject {|k,v| v > 100} #=> {"a" => 100} # # 1.8/1.9 Hash#reject returns Hash; ISO says nothing. # def reject(&block) return to_enum :reject unless block h = {} self.each{|k,v| unless block.call([k, v]) h[k] = v end } h end ## # call-seq: # hsh.select! {| key, value | block } -> hsh or nil # hsh.select! -> an_enumerator # # Equivalent to <code>Hash#keep_if</code>, but returns # <code>nil</code> if no changes were made. # # 1.9 Hash#select! returns Hash; ISO says nothing. # def select!(&block) return to_enum :select! unless block keys = [] self.each{|k,v| unless block.call([k, v]) keys.push(k) end } return nil if keys.size == 0 keys.each{|k| self.delete(k) } self end ## # call-seq: # hsh.select {|key, value| block} -> a_hash # hsh.select -> an_enumerator # # Returns a new hash consisting of entries for which the block returns true. # # If no block is given, an enumerator is returned instead. # # h = { "a" => 100, "b" => 200, "c" => 300 } # h.select {|k,v| k > "a"} #=> {"b" => 200, "c" => 300} # h.select {|k,v| v < 200} #=> {"a" => 100} # # 1.9 Hash#select returns Hash; ISO says nothing # def select(&block) return to_enum :select unless block h = {} self.each{|k,v| if block.call([k, v]) h[k] = v end } h end end ## # Hash is enumerable # # ISO 15.2.13.3 class Hash include Enumerable end
22.957792
81
0.549286
9117a673794395e433f98778afe6ce678ec40940
2,548
# frozen_string_literal: true require 'support/encrypted_event' require 'support/dummy_repository' module EventStoreClient RSpec.describe Mapper::Encrypted do let(:data) do { 'user_id' => 'dab48d26-e4f8-41fc-a9a8-59657e590716', 'first_name' => 'Anakin', 'last_name' => 'Skylwalker', 'profession' => 'Jedi' } end describe '#serialize' do let(:encrypted_data) do { 'user_id' => 'dab48d26-e4f8-41fc-a9a8-59657e590716', 'first_name' => 'es_encrypted', 'last_name' => 'es_encrypted', 'profession' => 'Jedi', 'es_encrypted' => 'darthvader' } end let(:user_registered) { EncryptedEvent.new(data: data) } subject { described_class.new(DummyRepository.new).serialize(user_registered) } it 'returns serialized event' do expect(subject).to be_kind_of(EventStoreClient::Event) expect(subject.data).to eq(JSON.generate(encrypted_data)) expect(subject.metadata).to include('created_at') expect(subject.metadata).to include('encryption') expect(subject.type).to eq('EncryptedEvent') end end describe '#deserialize' do let(:encryption_metadata) do { iv: 'DarthSidious', key: 'dab48d26-e4f8-41fc-a9a8-59657e590716', attributes: %i[first_name last_name] } end let(:encrypted_data) do { 'user_id' => 'dab48d26-e4f8-41fc-a9a8-59657e590716', 'first_name' => 'es_encrypted', 'last_name' => 'es_encrypted', 'profession' => 'Jedi', 'es_encrypted' => 'darthvader' } end let(:decrypted_data) do { 'user_id' => 'dab48d26-e4f8-41fc-a9a8-59657e590716', 'first_name' => 'Anakin', 'last_name' => 'Skylwalker', 'profession' => 'Jedi' } end let(:user_registered) do EventStoreClient::Event.new( data: JSON.generate(encrypted_data), metadata: JSON.generate(encryption: encryption_metadata), type: 'EncryptedEvent' ) end subject { described_class.new(DummyRepository.new).deserialize(user_registered) } it 'returns deserialized event' do expect(subject).to be_kind_of(EncryptedEvent) expect(subject.data).to eq(decrypted_data) expect(subject.metadata).to include('created_at') expect(subject.data).not_to include('es_encrypted') end end end end
28.954545
87
0.600863
87a536ddb037cc9e891b23bde8a67417c12cf010
953
Pod::Spec.new do |spec| spec.name = 'CCHMapClusterController' spec.version = '1.6.6' spec.license = 'MIT' spec.summary = 'High-performance map clustering with MapKit for iOS and OS X. Integrate with 4 lines of code.' spec.homepage = 'https://github.com/choefele/CCHMapClusterController' spec.authors = { 'Claus Höfele' => '[email protected]' } spec.social_media_url = 'https://twitter.com/claushoefele' spec.source = { :git => 'https://github.com/choefele/CCHMapClusterController.git', :tag => spec.version.to_s } spec.frameworks = 'MapKit', 'CoreLocation' spec.requires_arc = true spec.ios.deployment_target = '6.0' spec.osx.deployment_target = '10.9' spec.source_files = 'CCHMapClusterController/*.{h,m}' spec.private_header_files = 'CCHMapClusterController/{CCHMapTree,CCHMapTreeUtils,CCHMapClusterControllerUtils,CCHMapClusterControllerDebugPolygon,CCHMapClusterOperation,CCHMapViewDelegateProxy}.h' end
50.157895
198
0.751312
1ad7e78550488342d276a9584a45813139c3eb9a
7,618
class Factory class << self attr_accessor :factories #:nodoc: # An Array of strings specifying locations that should be searched for # factory definitions. By default, factory_girl will attempt to require # "factories," "test/factories," and "spec/factories." Only the first # existing file will be loaded. attr_accessor :definition_file_paths end self.factories = {} self.definition_file_paths = %w(factories test/factories spec/factories) attr_reader :factory_name attr_reader :attributes #:nodoc: # Defines a new factory that can be used by the build strategies (create and # build) to build new objects. # # Arguments: # name: (Symbol) # A unique name used to identify this factory. # options: (Hash) # class: the class that will be used when generating instances for this # factory. If not specified, the class will be guessed from the # factory name. # # Yields: # The newly created factory (Factory) def self.define (name, options = {}) instance = Factory.new(name, options) yield(instance) self.factories[instance.factory_name] = instance end def build_class #:nodoc: @build_class ||= class_for(@options[:class] || factory_name) end def initialize (name, options = {}) #:nodoc: assert_valid_options(options) @factory_name = factory_name_for(name) @options = options @attributes = [] end # Adds an attribute that should be assigned on generated instances for this # factory. # # This method should be called with either a value or block, but not both. If # called with a block, the attribute will be generated "lazily," whenever an # instance is generated. Lazy attribute blocks will not be called if that # attribute is overriden for a specific instance. # # When defining lazy attributes, an instance of Factory::Proxy will # be yielded, allowing associations to be built using the correct build # strategy. # # Arguments: # name: (Symbol) # The name of this attribute. This will be assigned using :"#{name}=" for # generated instances. # value: (Object) # If no block is given, this value will be used for this attribute. def add_attribute (name, value = nil, &block) if block_given? if value raise AttributeDefinitionError, "Both value and block given" else attribute = Attribute::Dynamic.new(name, block) end else attribute = Attribute::Static.new(name, value) end if attribute_defined?(attribute.name) raise AttributeDefinitionError, "Attribute already defined: #{name}" end @attributes << attribute end # Calls add_attribute using the missing method name as the name of the # attribute, so that: # # Factory.define :user do |f| # f.name 'Billy Idol' # end # # and: # # Factory.define :user do |f| # f.add_attribute :name, 'Billy Idol' # end # # are equivilent. def method_missing (name, *args, &block) add_attribute(name, *args, &block) end # Adds an attribute that builds an association. The associated instance will # be built using the same build strategy as the parent instance. # # Example: # Factory.define :user do |f| # f.name 'Joey' # end # # Factory.define :post do |f| # f.association :author, :factory => :user # end # # Arguments: # name: (Symbol) # The name of this attribute. # options: (Hash) # factory: (Symbol) # The name of the factory to use when building the associated instance. # If no name is given, the name of the attribute is assumed to be the # name of the factory. For example, a "user" association will by # default use the "user" factory. def association (name, options = {}) factory_name = options.delete(:factory) || name @attributes << Attribute::Association.new(name, factory_name, options) end # Generates and returns a Hash of attributes from this factory. Attributes # can be individually overridden by passing in a Hash of attribute => value # pairs. # # Arguments: # overrides: (Hash) # Attributes to overwrite for this set. # # Returns: # A set of attributes that can be used to build an instance of the class # this factory generates. (Hash) def self.attributes_for (name, overrides = {}) factory_by_name(name).run(Proxy::AttributesFor, overrides) end # Generates and returns an instance from this factory. Attributes can be # individually overridden by passing in a Hash of attribute => value pairs. # # Arguments: # overrides: (Hash) # See attributes_for # # Returns: # An instance of the class this factory generates, with generated # attributes assigned. def self.build (name, overrides = {}) factory_by_name(name).run(Proxy::Build, overrides) end # Generates, saves, and returns an instance from this factory. Attributes can # be individually overridden by passing in a Hash of attribute => value # pairs. # # If the instance is not valid, an ActiveRecord::Invalid exception will be # raised. # # Arguments: # overrides: (Hash) # See attributes_for # # Returns: # A saved instance of the class this factory generates, with generated # attributes assigned. def self.create (name, overrides = {}) factory_by_name(name).run(Proxy::Create, overrides) end def self.find_definitions #:nodoc: definition_file_paths.each do |path| require("#{path}.rb") if File.exists?("#{path}.rb") if File.directory? path Dir[File.join(path, '*.rb')].each do |file| require file end end end end def run (proxy_class, overrides) #:nodoc: proxy = proxy_class.new(build_class) overrides = symbolize_keys(overrides) overrides.each {|attr, val| proxy.set(attr, val) } passed_keys = overrides.keys.collect {|k| Factory.aliases_for(k) }.flatten @attributes.each do |attribute| unless passed_keys.include?(attribute.name) attribute.add_to(proxy) end end proxy.result end private def self.factory_by_name (name) factories[name.to_sym] or raise ArgumentError.new("No such factory: #{name.to_s}") end def class_for (class_or_to_s) if class_or_to_s.respond_to?(:to_sym) Object.const_get(variable_name_to_class_name(class_or_to_s)) else class_or_to_s end end def factory_name_for (class_or_to_s) if class_or_to_s.respond_to?(:to_sym) class_or_to_s.to_sym else class_name_to_variable_name(class_or_to_s).to_sym end end def attribute_defined? (name) [email protected] {|attr| attr.name == name }.nil? end def assert_valid_options(options) invalid_keys = options.keys - [:class] unless invalid_keys == [] raise ArgumentError, "Unknown arguments: #{invalid_keys.inspect}" end end # Based on ActiveSupport's underscore inflector def class_name_to_variable_name(name) name.to_s.gsub(/::/, '/'). gsub(/([A-Z]+)([A-Z][a-z])/,'\1_\2'). gsub(/([a-z\d])([A-Z])/,'\1_\2'). tr("-", "_"). downcase end # Based on ActiveSupport's camelize inflector def variable_name_to_class_name(name) name.to_s. gsub(/\/(.?)/) { "::#{$1.upcase}" }. gsub(/(?:^|_)(.)/) { $1.upcase } end # From ActiveSupport def symbolize_keys(hash) hash.inject({}) do |options, (key, value)| options[(key.to_sym rescue key) || key] = value options end end end
29.413127
86
0.663035
ffc2430f3db0582befb3b70a464dd75ba0fa9a5f
399
class CreateSocMedLikes < ActiveRecord::Migration[6.0] def change create_table :soc_med_likes do |t| t.references :target, null: false, polymorphic: true t.references :owner, null: false, polymorphic: true t.timestamps end add_index :soc_med_likes, %i[target_id target_type owner_id owner_type], unique: true, name: :idx_soc_med_likes_on_target_and_owner end end
30.692308
135
0.739348
4ad13ac26efe4d038b13687ca05d3f88e9819e8f
5,226
# coding: utf-8 require_relative "sha256lib.rb" # ------- # Default # ------- if !defined? $input # default $input = "abc" $message = $input.unpack("B*")[0] # 011000010110001001100011 $padded = padding($message) $blocks = split($padded, 512) $block_number = 0 # message block number $block = $blocks[$block_number] # argument passed $block = ARGV[0] if ARGV[0] # accept 512 bit binary string as message block end # Set variables (these are global variables given to us by the parent sha256.rb script) if defined? $block $block = $block $block_number = $block_number end # ---------------- # Message Schedule # ---------------- # Get size of block size = $block.size # The message block provides the first 16 words for the message schedule (512 bits / 32 bits = 16 words) $schedule = $block.scan(/.{32}/).map { |w| w.to_i(2) } # convert from binary string to integer for calculations # Remember the values used to calculate each word from 16 to 63 memory = Array.new(16) # leave first 16 blank because they were not calculated from previous values # Calculate remaining 48 words 16.upto(63) do |i| $schedule << add(sigma1($schedule[i - 2]), $schedule[i - 7], sigma0($schedule[i - 15]), $schedule[i - 16]) memory << [sigma1($schedule[i - 2]), $schedule[i - 7], sigma0($schedule[i - 15]), $schedule[i - 16]] # store the values used in the calculation as we go end # -------- # Settings # -------- indent = " " * 2 # --------- # Animation # --------- # Frame system "clear" puts $state + "\n" if defined? $state puts "#{indent}-------" puts "#{indent}block #{$block_number}:" puts "#{indent}-------" puts "#{indent}#{$block}" delay(:slowest) # Frame system "clear" puts $state + "\n" if defined? $state puts "#{indent}-------" puts "#{indent}block #{$block_number}:" puts "#{indent}-------" puts "#{indent}#{$block}" puts puts "#{indent}----------------" puts "#{indent}message schedule:" puts "#{indent}----------------" delay(:slowest) # Frame 64.times do |i| system "clear" puts $state + "\n" if defined? $state puts "#{indent}-------" puts "#{indent}block #{$block_number}:" puts "#{indent}-------" if i <= 15 puts "#{indent}#{$block[((i + 1) * 32)..-1].ljust(size, " ")}" else puts "#{indent}#{" " * size}" # leave space where it used to be end puts puts "#{indent}----------------" puts "#{indent}message schedule:" puts "#{indent}----------------" (i + 1).times do |j| # first 16 value are just words from the message block if i <= 15 puts "#{indent}W#{j.to_s.ljust(2, " ")} #{bits($schedule[j])}" end # show values used in calculation for last 48 words if i >= 16 if j < i - 16 # show nothing elsif j == i - 16 puts "#{indent}W#{j.to_s.ljust(2, " ")} #{bits($schedule[j])} -> #{bits(memory[i][3])}" elsif j == i - 15 puts "#{indent}W#{j.to_s.ljust(2, " ")} #{bits($schedule[j])} -> σ0 #{bits(memory[i][2])}" elsif j == i - 7 puts "#{indent}W#{j.to_s.ljust(2, " ")} #{bits($schedule[j])} -> #{bits(memory[i][1])}" elsif j == i - 2 puts "#{indent}W#{j.to_s.ljust(2, " ")} #{bits($schedule[j])} -> σ1 #{bits(memory[i][0])}" elsif j == i puts "#{indent}W#{j.to_s.ljust(2, " ")} #{bits($schedule[j])} = σ1(t-2) + (t-7) + σ0(t-15) + (t-16)" else puts "#{indent}W#{j.to_s.ljust(2, " ")} #{bits($schedule[j])}" end end end # pause before calculating remaining 48 words after the initial 16 if i == 15 delay(:normal) else delay(:fastest) end end delay(:normal) # Frame system "clear" puts $state + "\n" if defined? $state puts "#{indent}-------" puts "#{indent}block #{$block_number}:" puts "#{indent}-------" puts "#{indent}#{" " * size}" puts puts "#{indent}----------------" puts "#{indent}message schedule:" puts "#{indent}----------------" 47.upto(63) do |i| puts "#{indent}W#{i.to_s.ljust(2, " ")} #{bits($schedule[i])}" end delay(:end) # Save Final State $state = <<-FRAME #{$state} #{indent}------- #{indent}block #{$block_number}: #{indent}------- #{indent}#{" " * size} #{indent}---------------- #{indent}message schedule: #{indent}---------------- #{indent}W#{47.to_s.ljust(2, " ")} #{bits($schedule[47])} #{indent}W#{48.to_s.ljust(2, " ")} #{bits($schedule[48])} #{indent}W#{49.to_s.ljust(2, " ")} #{bits($schedule[49])} #{indent}W#{50.to_s.ljust(2, " ")} #{bits($schedule[50])} #{indent}W#{51.to_s.ljust(2, " ")} #{bits($schedule[51])} #{indent}W#{52.to_s.ljust(2, " ")} #{bits($schedule[52])} #{indent}W#{53.to_s.ljust(2, " ")} #{bits($schedule[53])} #{indent}W#{54.to_s.ljust(2, " ")} #{bits($schedule[54])} #{indent}W#{55.to_s.ljust(2, " ")} #{bits($schedule[55])} #{indent}W#{56.to_s.ljust(2, " ")} #{bits($schedule[56])} #{indent}W#{57.to_s.ljust(2, " ")} #{bits($schedule[57])} #{indent}W#{58.to_s.ljust(2, " ")} #{bits($schedule[58])} #{indent}W#{59.to_s.ljust(2, " ")} #{bits($schedule[59])} #{indent}W#{60.to_s.ljust(2, " ")} #{bits($schedule[60])} #{indent}W#{61.to_s.ljust(2, " ")} #{bits($schedule[61])} #{indent}W#{62.to_s.ljust(2, " ")} #{bits($schedule[62])} #{indent}W#{63.to_s.ljust(2, " ")} #{bits($schedule[63])} FRAME system "clear" puts $state
30.034483
154
0.565059
186d8c95b085f3f5ad3c2bb7c51ed8fd5d2313f0
129
require 'test_helper' class SearchAssignee1Test < ActiveSupport::TestCase # test "the truth" do # assert true # end end
16.125
51
0.72093
017731998c9fd16594bcea2e2817e29d9021abea
571
module Rack::App::SingletonMethods::Settings def cli(&block) @cli ||= Rack::App::CLI.new @cli.instance_exec(&block) unless block.nil? @cli end protected def headers(new_headers=nil) middleware do |b| b.use(Rack::App::Middlewares::HeaderSetter,new_headers) end if new_headers.is_a?(Hash) new_headers end def error(*exception_classes, &block) @error_handler ||= Rack::App::ErrorHandler.new unless block.nil? @error_handler.register_handler(exception_classes, block) end return @error_handler end end
19.689655
63
0.686515
33c26adb4e6f591f091342a07cc35a49fabbaadf
2,303
require 'spec_helper' include WebMock::API describe VCloud::RestApi do describe 'when receiving an HTTP error code' do it 'should handle a HTTP 401 error when logging in with a bad password' do stub_request(:post, 'https://someuser%40someorg:[email protected]/api/sessions'). with(:headers => {'Accept'=>'application/*+xml;version=1.5', 'Accept-Encoding'=>'gzip, deflate', 'User-Agent'=>'Ruby'}). to_return(:status => 401, :body => '', :headers => {}) session = VCloud::Client.new('https://some.vcloud.com/api/', '1.5') expect { session.login('someuser@someorg', 'badpassword') }.to raise_error(VCloud::VCloudError) { |error| error.major_error_code.should == 401 } end it 'should handle a HTTP 403 error when logging out with a bad token' do stub_request(:post, 'https://someuser%40someorg:[email protected]/api/sessions'). with(:headers => {'Accept'=>'application/*+xml;version=1.5'}). to_return(:status => 200, :body => fixture_file('session.xml'), :headers => {:x_vcloud_authorization => 'abc123xyz'}) session = VCloud::Client.new('https://some.vcloud.com/api/', '1.5') session.login('someuser@someorg', 'password') stub_request(:delete, 'https://some.vcloud.com/api/session'). with(:headers => {'Accept'=>'application/*+xml;version=1.5', 'X-Vcloud-Authorization'=>'bad token'}). to_return(:status => 403, :body => '', :headers => {}) session.token[:x_vcloud_authorization] = 'bad token' expect { session.logout }.to raise_error(VCloud::VCloudError) { |error| error.major_error_code.should == 403 } end it 'should handle a HTTP 404 error when logging in with a bad API endpoint' do stub_request(:post, 'https://someuser%40someorg:[email protected]/badapiendpoint/sessions'). with(:headers => {'Accept'=>'application/*+xml;version=1.5'}). to_return(:status => 404, :body => fixture_file('error_login_404.html'), :headers => {}) session = VCloud::Client.new('https://some.vcloud.com/badapiendpoint/', '1.5') expect { session.login('someuser@someorg', 'password') }.to raise_error(VCloud::VCloudError) { |error| error.major_error_code.should == 404 } end end end
47
128
0.653061
f71ceb46d3501ec922bedd558ddd82d6d89c6746
285
class NotificationSchema include JSON::SchemaBuilder root :notifications def create root do |root_object| additional_properties false end end def update root do additional_properties false boolean :delivered, required: true end end end
15
40
0.701754
21113fa4c408358c5bbb431ab03bdabe8abc1f30
90
class CreateProject def initialize(attributes) Project.create!(attributes) end end
18
31
0.777778
91b31f3d4a1820bcce620ce6e3ca30da58a7e2e0
1,640
module MCollective module Data class Resource_data<Base activate_when do require 'mcollective/util/puppet_agent_mgr' true end def sanitize_val(result_value, default_value) if result_value.nil? return default_value end result_value end query do |resource| configfile = Config.instance.pluginconf.fetch("puppet.config", nil) puppet_agent = Util::PuppetAgentMgr.manager(configfile) summary = puppet_agent.load_summary result[:managed] = puppet_agent.managing_resource?(resource) if resource result[:out_of_sync_resources] = sanitize_val(summary["resources"].fetch("out_of_sync", 0), 0) result[:failed_resources] = sanitize_val(summary["resources"].fetch("failed", 0), 0) result[:corrected_resources] = sanitize_val(summary["resources"].fetch("corrective_change", 0), 0) result[:changed_resources] = sanitize_val(summary["resources"].fetch("changed", 0), 0) result[:total_resources] = sanitize_val(summary["resources"].fetch("total", 0), 0) result[:total_time] = sanitize_val(summary["time"].fetch("total", 0), 0) result[:config_retrieval_time] = sanitize_val(summary["time"].fetch("config_retrieval", 0), 0) result[:lastrun] = Integer(sanitize_val(summary["time"].fetch("last_run", 0), 0)) result[:since_lastrun] = Integer(Time.now.to_i - result[:lastrun]) result[:config_version] = sanitize_val(summary["version"].fetch("config", "unknown"), "unknown") end end end end
42.051282
111
0.64878
08f4be93f73e9a2c036b09ce5290f71ddbf5be3e
358
# frozen_string_literal: true require 'phoner' # Validate phone numbers class PhoneValidator < ActiveModel::EachValidator def validate_each(record, attribute, value) return if Phoner::Phone.valid? value, country_code: '1' record.errors[attribute] << (options[:message] || 'is not a valid phone number') end end
27.538462
63
0.675978
e8593492f8b1c163b5b53353a441f6eaa0178dbf
423
class TokyoMetro::App::Renderer::Concerns::Link::ToRailwayLinePage::ConnectingRailwayLine::FromStationFacilityPage < TokyoMetro::App::Renderer::Concerns::Link::ToRailwayLinePage::ConnectingRailwayLine::MetaClass def initialize( request , connecting_railway_line_decorated ) @display_another_station_info = true @display_additional_info = false super( request , connecting_railway_line_decorated ) end end
42.3
211
0.810875
61003413b6900c83a364dc5dceb57e2fdd44dffd
977
require 'json' package = JSON.parse(File.read(File.join(__dir__, '..', 'package.json'))) Pod::Spec.new do |s| s.name = 'EXAdsFacebook' s.version = package['version'] s.summary = package['description'] s.description = package['description'] s.license = package['license'] s.author = package['author'] s.homepage = package['homepage'] s.platform = :ios, '11.0' s.source = { git: 'https://github.com/expo/expo.git' } s.dependency 'UMCore' s.dependency 'UMPermissionsInterface' s.dependency 'FBAudienceNetwork', $FBAudienceNetworkVersion || '6.3.0' if !$ExpoUseSources&.include?(package['name']) && ENV['EXPO_USE_SOURCE'].to_i == 0 && File.exist?("#{s.name}.xcframework") && Gem::Version.new(Pod::VERSION) >= Gem::Version.new('1.10.0') s.source_files = "#{s.name}/**/*.h" s.vendored_frameworks = "#{s.name}.xcframework" else s.source_files = "#{s.name}/**/*.{h,m}" end end
36.185185
188
0.612078