hexsha
stringlengths
40
40
size
int64
2
1.01M
content
stringlengths
2
1.01M
avg_line_length
float64
1.5
100
max_line_length
int64
2
1k
alphanum_fraction
float64
0.25
1
5dc2030e2fa406e93fb5222e55e8792a068eaf26
718
#!/usr/bin/env ruby #--- # Excerpted from "Everyday Scripting in Ruby" # We make no guarantees that this code is fit for any purpose. # Visit http://www.pragmaticprogrammer.com/titles/bmsft for more book information. #--- module Gem class GemRunner def run(args) do_configuration(args) cmd = Gem::CommandManager.instance cmd.command_names.each do |c| Command.add_specific_extra_args c, Array(@cfg[c]) end cmd.run(@cfg) end private def do_configuration(args) @cfg = Gem::ConfigFile.new(args) Gem.use_paths(@cfg[:gemhome], @cfg[:gempath]) Command.extra_args = @cfg[:gem] DocManager.configured_args = @cfg[:rdoc] end end end
21.757576
82
0.657382
914368ae8fdc2a1b7e996ae914cb7e45b791de2d
3,282
class Suricata < Formula desc "Network IDS, IPS, and security monitoring engine" homepage "https://suricata-ids.org/" url "https://www.openinfosecfoundation.org/download/suricata-4.0.4.tar.gz" sha256 "617e83b6e20b03aa7d5e05a980d3cb6d2810ec18a6f15a36bf66c81c9c0a2abb" bottle do sha256 "d01576e7a951c8909a2193e758ca7d19b5d47d818547172317d4e47a63b08245" => :high_sierra sha256 "45581acbe7020a50fea0dc1fb72a71739053b1ad12f493f10b20b2a910809c9f" => :sierra sha256 "76b6f1235a829f6744181f82ed216a31060b1e266cbdb519d78629416b46b18a" => :el_capitan end depends_on "python@2" if MacOS.version <= :snow_leopard depends_on "pkg-config" => :build depends_on "libmagic" depends_on "libnet" depends_on "libyaml" depends_on "pcre" depends_on "nss" depends_on "nspr" depends_on "geoip" => :optional depends_on "lua" => :optional depends_on "luajit" => :optional depends_on "jansson" => :optional depends_on "hiredis" => :optional resource "argparse" do url "https://files.pythonhosted.org/packages/source/a/argparse/argparse-1.4.0.tar.gz" sha256 "62b089a55be1d8949cd2bc7e0df0bddb9e028faefc8c32038cc84862aefdd6e4" end resource "simplejson" do url "https://files.pythonhosted.org/packages/source/s/simplejson/simplejson-3.13.2.tar.gz" sha256 "4c4ecf20e054716cc1e5a81cadc44d3f4027108d8dd0861d8b1e3bd7a32d4f0a" end def install libnet = Formula["libnet"] libmagic = Formula["libmagic"] ENV.prepend_create_path "PYTHONPATH", libexec/"vendor/lib/python2.7/site-packages" resources.each do |r| r.stage do system "python", *Language::Python.setup_install_args(libexec/"vendor") end end args = %W[ --disable-dependency-tracking --disable-silent-rules --prefix=#{prefix} --sysconfdir=#{etc} --localstatedir=#{var} --with-libnet-includes=#{libnet.opt_include} --with-libnet-libs=#{libnet.opt_lib} --with-libmagic-includes=#{libmagic.opt_include} --with-libmagic-libraries=#{libmagic.opt_lib} ] args << "--enable-lua" if build.with? "lua" args << "--enable-luajit" if build.with? "luajit" if build.with? "geoip" geoip = Formula["geoip"] args << "--enable-geoip" args << "--with-libgeoip-includes=#{geoip.opt_include}" args << "--with-libgeoip-libs=#{geoip.opt_lib}" end if build.with? "jansson" jansson = Formula["jansson"] args << "--with-libjansson-includes=#{jansson.opt_include}" args << "--with-libjansson-libraries=#{jansson.opt_lib}" end if build.with? "hiredis" hiredis = Formula["hiredis"] args << "--enable-hiredis" args << "--with-libhiredis-includes=#{hiredis.opt_include}" args << "--with-libhiredis-libraries=#{hiredis.opt_lib}" end system "./configure", *args system "make", "install-full" bin.env_script_all_files(libexec/"bin", :PYTHONPATH => ENV["PYTHONPATH"]) # Leave the magic-file: prefix in otherwise it overrides a commented out line rather than intended line. inreplace etc/"suricata/suricata.yaml", %r{magic-file: /.+/magic}, "magic-file: #{libmagic.opt_share}/misc/magic" end test do assert_match(/#{version}/, shell_output("#{bin}/suricata --build-info")) end end
34.547368
117
0.695612
013fff990a06e94947a359e8635846711ba97072
1,110
require 'puppetlabs_spec_helper/module_spec_helper' require 'rspec-puppet-facts' include RspecPuppetFacts if Dir.exist?(File.expand_path('../../lib', __FILE__)) && RUBY_VERSION !~ %r{^1.9} require 'coveralls' require 'simplecov' require 'simplecov-console' SimpleCov.formatters = [ SimpleCov::Formatter::HTMLFormatter, SimpleCov::Formatter::Console, Coveralls::SimpleCov::Formatter ] SimpleCov.start do track_files 'lib/**/*.rb' add_filter '/spec' add_filter '/vendor' add_filter '/.vendor' end end RSpec.configure do |c| default_facts = { puppetversion: Puppet.version, facterversion: Facter.version } default_facts.merge!(YAML.load(File.read(File.expand_path('../default_facts.yml', __FILE__)))) if File.exist?(File.expand_path('../default_facts.yml', __FILE__)) default_facts.merge!(YAML.load(File.read(File.expand_path('../default_module_facts.yml', __FILE__)))) if File.exist?(File.expand_path('../default_module_facts.yml', __FILE__)) c.default_facts = default_facts c.mock_with :rspec end require 'spec_helper_methods' # vim: syntax=ruby
31.714286
177
0.730631
f8e4e4ff05a664adef27472eb6ebc0a194c8cf11
4,946
# # Copyright (C) 2011 by Dominic Graefen / http://devboy.org # # Permission is hereby granted, free of charge, to any person obtaining a copy # of this software and associated documentation files (the "Software"), to deal # in the Software without restriction, including without limitation the rights # to use, copy, modify, merge, publish, distribute, sublicense, and/or sell # copies of the Software, and to permit persons to whom the Software is # furnished to do so, subject to the following conditions: # # The above copyright notice and this permission notice shall be included in # all copies or substantial portions of the Software. # # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN # THE SOFTWARE. # module Buildr module AS3 module Toolkits class ApparatToolkit < Buildr::AS3::Toolkits::ZipToolkiteBase DEFAULT_SCALA_VERSION = "2.8.0" attr_reader :home, :asmifier, :concrete, :coverage, :dump, :jitb, :reducer, :stripper, :tdsi, :asm_swc, :ersatz_swc, :lzma_decoder_swc def initialize(version) @version = version @spec = "com.googlecode:apparat-bin:zip:#{@version}" @zip = Buildr.artifact(@spec) @zip_destination = File.join(File.dirname(@zip.to_s), "apparat-#{@version}", "apparat-#{@version}") generate_paths @zip_destination, @version end def invoke #:nodoc: @url ||= generate_url_from_version @version super self end def scala_dependencies Buildr.repositories.remote << 'http://scala-tools.org/repo-releases' version = DEFAULT_SCALA_VERSION Buildr.artifacts('org.scala-lang:scala-library:jar:' + version). map { |a| a.install a.to_s } end def apparat_dependencies Dir.glob("#{@home}/*.jar") end private def generate_url_from_version(version) "http://apparat.googlecode.com/files/apparat-#{version}-bin.zip" end def generate_paths(home_dir, version) @home = home_dir bat_ext = Buildr::Util.win_os? ? "" : "" @apparat = "#{@home}/apparat#{bat_ext}" @asmifier = "#{@home}/asmifier#{bat_ext}" @concrete = "#{@home}/concrete#{bat_ext}" @coverage = "#{@home}/coverage#{bat_ext}" @dump = "#{@home}/dump#{bat_ext}" @jitb = "#{@home}/jitb#{bat_ext}" @reducer = "#{@home}/reducer#{bat_ext}" @stripper = "#{@home}/stripper#{bat_ext}" @tdsi = "#{@home}/tdsi#{bat_ext}" @asm_swc = "#{@home}/apparat-asm-#{version}.swc" @ersatz_swc = "#{@home}/apparat-ersatz-#{version}.swc" @lzma_decoder_swc = "#{@home}/apparat-lzma-decoder-#{version}.swc" self end end module ApparatTasks include Extension def apparat_tdsi(options = {}) output = get_output_file compile.options[:apparat].invoke cmd_args = [] cmd_args << "apparat.tools.tdsi.TurboDieselSportInjection" cmd_args << "-i #{output}" cmd_args << "-o #{output}" reserved = [] options.to_hash.reject { |key, value| reserved.include?(key) }. each do |key, value| cmd_args << "-#{key}" << "#{value}" end call_system(cmd_args) end def apparat_reducer(options ={}) output = get_output_file compile.options[:apparat].invoke cmd_args = [] cmd_args << "apparat.tools.reducer.Reducer" cmd_args << "-i #{output}" cmd_args << "-o #{output}" reserved = [] options.to_hash.reject { |key, value| reserved.include?(key) }. each do |key, value| cmd_args << "-#{key}" << "#{value}" end call_system(cmd_args) end private def call_system(args) unless Buildr.application.options.dryrun cp = compile.options[:apparat].scala_dependencies + compile.options[:apparat].apparat_dependencies sh (['java', '-classpath', cp.join(":")] + args).join(" ") end end def get_output_file File.join(compile.target.to_s, compile.options[:output] || "#{project.name.split(":").last}.#{compile.packaging.to_s}") end end end end class Project include Buildr::AS3::Toolkits::ApparatTasks end end
35.582734
129
0.597048
39447de3eff2f9541aed58cbfbfad0337de35c20
943
# coding: utf-8 lib = File.expand_path('../lib', __FILE__) $LOAD_PATH.unshift(lib) unless $LOAD_PATH.include?(lib) require 'file/visitor/version' Gem::Specification.new do |spec| spec.name = "file-visitor" spec.version = File::Visitor::VERSION spec.authors = ["bonar"] spec.email = ["[email protected]"] spec.summary = %q{File path collectiong utility} spec.description = %q{file-visitor is an alternative way to collecting files.} spec.homepage = "https://github.com/bonar/file-visitor" spec.license = "MIT" spec.files = `git ls-files -z`.split("\x0").reject { |f| f.match(%r{^(test|spec|features)/}) } spec.bindir = "exe" spec.executables = spec.files.grep(%r{^exe/}) { |f| File.basename(f) } spec.require_paths = ["lib"] spec.add_development_dependency "bundler" spec.add_development_dependency "rake" spec.add_development_dependency "rspec" end
36.269231
104
0.654295
03c8a36d82eb5257c802eefade8cd9e8be63373a
4,969
require 'json' require 'net/http' require 'uri' module Garage module Strategy module AuthServer extend ActiveSupport::Concern included do before_action :verify_auth, if: -> (_) { verify_permission? } end def access_token if defined?(@access_token) @access_token else @access_token = AccessTokenFetcher.fetch(request) end end def verify_permission? true end module Cache def self.with_cache(key) return yield unless Garage.configuration.cache_acceess_token_validation? cached_token = Rails.cache.read(key) return cached_token if cached_token && !cached_token.expired? token = yield Rails.cache.write(key, token, expires_in: default_ttl) if token && token.accessible? token end def self.default_ttl Garage.configuration.ttl_for_access_token_cache end end # Returns an AccessToken from request object or returns nil if failed. class AccessTokenFetcher READ_TIMEOUT = 1 OPEN_TIMEOUT = 1 USER_AGENT = "Garage #{Garage::VERSION}" def self.fetch(*args) new(*args).fetch end def initialize(request) @request = request end def fetch if has_any_valid_credentials? if has_cacheable_credentials? fetch_with_cache else fetch_without_cache end else nil end rescue Timeout::Error raise AuthBackendTimeout.new(OPEN_TIMEOUT, read_timeout) end private def get raw = http_client.get(path_with_query, header) Response.new(raw) end def header { 'Authorization' => @request.authorization, 'Host' => Garage.configuration.auth_server_host, 'Resource-Owner-Id' => @request.headers['Resource-Owner-Id'], 'Scopes' => @request.headers['Scopes'], 'User-Agent' => USER_AGENT, }.reject {|_, v| v.nil? } end def path_with_query result = uri.path result << "?" + query unless query.empty? result end def query @query ||= @request.params.slice(:access_token, :bearer_token).to_query end def uri @uri ||= URI.parse(auth_server_url) end def http_client client = Net::HTTP.new(uri.host, uri.port) client.use_ssl = true if uri.scheme == 'https' client.read_timeout = read_timeout client.open_timeout = OPEN_TIMEOUT client end def auth_server_url Garage.configuration.auth_server_url or raise NoUrlError end def read_timeout Garage.configuration.auth_server_timeout or READ_TIMEOUT end def has_any_valid_credentials? @request.authorization.present? || @request.params[:access_token].present? || @request.params[:bearer_token].present? end # Cacheable requests are: # - Bearer token request with `Authorization` header. # # We don't cache these requests because they are less requested: # - Bearer token request with query parameter which has been deprecated. # - Any other token type. def has_cacheable_credentials? bearer_token.present? end def bearer_token @bearer_token ||= @request.authorization.try {|o| o.slice(/\ABearer\s+(.+)\z/, 1) } end def fetch_with_cache Cache.with_cache("garage_gem/token_cache/#{Garage::VERSION}/#{bearer_token}") do fetch_without_cache end end def fetch_without_cache response = get if response.valid? Garage::Strategy::AccessToken.new(response.to_hash) else if response.status_code == 401 nil else raise AuthBackendError.new(response) end end end end class Response def initialize(raw) @raw = raw end def valid? status_code == 200 && json? && parsed_body.is_a?(Hash) end def to_hash parsed_body.symbolize_keys end def status_code @raw.code.to_i end def body @raw.body end private def json? parsed_body true rescue JSON::ParserError false end def parsed_body @parsed_body ||= JSON.parse(body) end end class NoUrlError < StandardError def message 'You must set Garage.configuration.auth_server_url' end end end end end
24.721393
94
0.561079
e8a0c66b1ef4ab01b05dd01e3b867f6ef558949c
7,424
=begin #Cloudsmith API #The API to the Cloudsmith Service OpenAPI spec version: v1 Contact: [email protected] Generated by: https://github.com/swagger-api/swagger-codegen.git Swagger Codegen version: 2.2.3 =end require 'date' module CloudsmithApi class PackagesUploadVagrant # The name of this package. attr_accessor :name # The primary file for the package. attr_accessor :package_file # The virtual machine provider for the box. attr_accessor :provider # If true, the uploaded package will overwrite any others with the same attributes (e.g. same version); otherwise, it will be flagged as a duplicate. attr_accessor :republish # A comma-separated values list of tags to add to the package. attr_accessor :tags # The raw version for this package. attr_accessor :version # Attribute mapping from ruby-style variable name to JSON key. def self.attribute_map { :'name' => :'name', :'package_file' => :'package_file', :'provider' => :'provider', :'republish' => :'republish', :'tags' => :'tags', :'version' => :'version' } end # Attribute type mapping. def self.swagger_types { :'name' => :'String', :'package_file' => :'String', :'provider' => :'String', :'republish' => :'BOOLEAN', :'tags' => :'String', :'version' => :'String' } end # Initializes the object # @param [Hash] attributes Model attributes in the form of hash def initialize(attributes = {}) return unless attributes.is_a?(Hash) # convert string to symbol for hash key attributes = attributes.each_with_object({}){|(k,v), h| h[k.to_sym] = v} if attributes.has_key?(:'name') self.name = attributes[:'name'] end if attributes.has_key?(:'package_file') self.package_file = attributes[:'package_file'] end if attributes.has_key?(:'provider') self.provider = attributes[:'provider'] end if attributes.has_key?(:'republish') self.republish = attributes[:'republish'] end if attributes.has_key?(:'tags') self.tags = attributes[:'tags'] end if attributes.has_key?(:'version') self.version = attributes[:'version'] end end # Show invalid properties with the reasons. Usually used together with valid? # @return Array for valid properies with the reasons def list_invalid_properties invalid_properties = Array.new if @name.nil? invalid_properties.push("invalid value for 'name', name cannot be nil.") end if @package_file.nil? invalid_properties.push("invalid value for 'package_file', package_file cannot be nil.") end if @provider.nil? invalid_properties.push("invalid value for 'provider', provider cannot be nil.") end if @version.nil? invalid_properties.push("invalid value for 'version', version cannot be nil.") end return invalid_properties end # Check to see if the all the properties in the model are valid # @return true if the model is valid def valid? return false if @name.nil? return false if @package_file.nil? return false if @provider.nil? return false if @version.nil? return true end # Checks equality by comparing each attribute. # @param [Object] Object to be compared def ==(o) return true if self.equal?(o) self.class == o.class && name == o.name && package_file == o.package_file && provider == o.provider && republish == o.republish && tags == o.tags && version == o.version end # @see the `==` method # @param [Object] Object to be compared def eql?(o) self == o end # Calculates hash code according to all attributes. # @return [Fixnum] Hash code def hash [name, package_file, provider, republish, tags, version].hash end # Builds the object from hash # @param [Hash] attributes Model attributes in the form of hash # @return [Object] Returns the model itself def build_from_hash(attributes) return nil unless attributes.is_a?(Hash) self.class.swagger_types.each_pair do |key, type| if type =~ /\AArray<(.*)>/i # check to ensure the input is an array given that the the attribute # is documented as an array but the input is not if attributes[self.class.attribute_map[key]].is_a?(Array) self.send("#{key}=", attributes[self.class.attribute_map[key]].map{ |v| _deserialize($1, v) } ) end elsif !attributes[self.class.attribute_map[key]].nil? self.send("#{key}=", _deserialize(type, attributes[self.class.attribute_map[key]])) end # or else data not found in attributes(hash), not an issue as the data can be optional end self end # Deserializes the data based on type # @param string type Data type # @param string value Value to be deserialized # @return [Object] Deserialized data def _deserialize(type, value) case type.to_sym when :DateTime DateTime.parse(value) when :Date Date.parse(value) when :String value.to_s when :Integer value.to_i when :Float value.to_f when :BOOLEAN if value.to_s =~ /\A(true|t|yes|y|1)\z/i true else false end when :Object # generic object (usually a Hash), return directly value when /\AArray<(?<inner_type>.+)>\z/ inner_type = Regexp.last_match[:inner_type] value.map { |v| _deserialize(inner_type, v) } when /\AHash<(?<k_type>.+?), (?<v_type>.+)>\z/ k_type = Regexp.last_match[:k_type] v_type = Regexp.last_match[:v_type] {}.tap do |hash| value.each do |k, v| hash[_deserialize(k_type, k)] = _deserialize(v_type, v) end end else # model temp_model = CloudsmithApi.const_get(type).new temp_model.build_from_hash(value) end end # Returns the string representation of the object # @return [String] String presentation of the object def to_s to_hash.to_s end # to_body is an alias to to_hash (backward compatibility) # @return [Hash] Returns the object in the form of hash def to_body to_hash end # Returns the object in the form of hash # @return [Hash] Returns the object in the form of hash def to_hash hash = {} self.class.attribute_map.each_pair do |attr, param| value = self.send(attr) next if value.nil? hash[param] = _to_hash(value) end hash end # Outputs non-array value in the form of hash # For object, use to_hash. Otherwise, just return the value # @param [Object] value Any valid value # @return [Hash] Returns the value in the form of hash def _to_hash(value) if value.is_a?(Array) value.compact.map{ |v| _to_hash(v) } elsif value.is_a?(Hash) {}.tap do |hash| value.each { |k, v| hash[k] = _to_hash(v) } end elsif value.respond_to? :to_hash value.to_hash else value end end end end
28.553846
153
0.613281
e2f592eb0a3ed4d7163413157598b4c37ed0c82e
871
class ProjectsController < InheritedResources::Base before_filter :authenticate_user! before_filter :pull_repo, :only => [:show, :edit] before_filter :ensure_accessibility_by_current_user, :except => [:index, :new, :new_github, :create] respond_to :json, :only => :show custom_actions :resource => :pull def show show! do @recent_tasks = resource.tasks.order(:name) end end def pull resource.pull! redirect_to resource, :notice => "Local repository is being updated..." end def destory destroy! { root_url(:anchor => "projects") } end private def ensure_accessibility_by_current_user unless resource.accessible_by? current_user redirect_to collection_path, :alert => "You do not have access to this project '#{resource}'." end end def pull_repo resource.pull end end
21.243902
102
0.686567
e915262468aab364d76da73a7c58e501223d6be2
1,859
# ---------------------------------------------------------------------------- # # *** AUTO GENERATED CODE *** Type: MMv1 *** # # ---------------------------------------------------------------------------- # # This file is automatically generated by Magic Modules and manual # changes will be clobbered when the file is regenerated. # # Please read more about how to change this file in README.md and # CONTRIBUTING.md located at the root of this package. # # ---------------------------------------------------------------------------- title 'Test GCP google_access_context_manager_access_policy resource.' gcp_organization_id = attribute(:gcp_organization_id, default: gcp_organization_id, description: 'The identifier of the organization that is the parent of the perimeter') gcp_enable_privileged_resources = attribute(:gcp_enable_privileged_resources, default:0, description:'Flag to enable privileged resources requiring elevated privileges in GCP.') service_perimeter = attribute('service_perimeter', default: { "name": "restrict_all", "title": "restrict_all", "restricted_service": "storage.googleapis.com", "policy_title": "policytitle" }, description: 'Service perimeter definition') control 'google_access_context_manager_access_policy-1.0' do impact 1.0 title 'google_access_context_manager_access_policy resource test' only_if { gcp_enable_privileged_resources.to_i == 1 && gcp_organization_id != '' } describe.one do google_access_context_manager_access_policies(org_id: gcp_organization_id).names.each do |policy_name| describe google_access_context_manager_access_policy(name: policy_name) do it { should exist } its('title') { should cmp service_perimeter['policy_title'] } its('parent') { should match gcp_organization_id } end end end end
45.341463
177
0.660032
4ae3bb82fadca460ed3921e33e02291ef61f3757
374
require 'spec_helper' describe Ethereum::Contract do let(:client) { Ethereum::IpcClient.new } let(:path) { "#{Dir.pwd}/spec/fixtures/TestContract.sol" } it "namespaces the generated contract class" do @works = Ethereum::Contract.create(file: path, client: client) expect(@works.parent.class_object.to_s).to eq("Ethereum::Contract::TestContract") end end
26.714286
85
0.721925
1d9eb80ceea3d75792f369111c7d8140c56d4848
905
require 'pry' class ProtestCliApp::CLI def call # binding.pry # puts "hello world" list_books menu goodbye end def list_books puts "here are some bestselling books!" @books = ProtestCliApp::Protest.today @books.each.with_index(1) do |book, i| puts "#{i}. #{book.title}- #{book.author}" end end def menu input = nil while input != "exit" puts "enter the number of the book you're interested in, or type list to see the list again, or type exit to exit the program." input = gets.strip.downcase if input.to_i > 0 puts @books[input.to_i - 1].description elsif input == "list" list_books elsif input == "exit" goodbye exit else puts "not sure what you want, type list or exit." end end end def goodbye puts "see you tomorrow for more books!" end end
17.403846
82
0.60884
335cda878cb604d70efb89ee0fb8248f1b858170
768
# vulkan-ruby 0.1.0 # # => https://github.com/sinisterchipmunk/vulkan-ruby # # [NOTICE] This is an automatically generated file. module Vulkan VK_KHR_DISPLAY_SPEC_VERSION = 21 unless defined?(VK_KHR_DISPLAY_SPEC_VERSION) VK_KHR_DISPLAY_EXTENSION_NAME = "VK_KHR_display" unless defined?(VK_KHR_DISPLAY_EXTENSION_NAME) VK_STRUCTURE_TYPE_DISPLAY_MODE_CREATE_INFO_KHR = 1000002000 unless defined?(VK_STRUCTURE_TYPE_DISPLAY_MODE_CREATE_INFO_KHR) VK_STRUCTURE_TYPE_DISPLAY_SURFACE_CREATE_INFO_KHR = 1000002001 unless defined?(VK_STRUCTURE_TYPE_DISPLAY_SURFACE_CREATE_INFO_KHR) VK_OBJECT_TYPE_DISPLAY_KHR = 1000002000 unless defined?(VK_OBJECT_TYPE_DISPLAY_KHR) VK_OBJECT_TYPE_DISPLAY_MODE_KHR = 1000002001 unless defined?(VK_OBJECT_TYPE_DISPLAY_MODE_KHR) end
51.2
131
0.856771
87f1dc2afaecf5f9979e9c8d4ad18063909f11bd
4,248
# encoding: UTF-8 # This file is auto-generated from the current state of the database. Instead # of editing this file, please use the migrations feature of Active Record to # incrementally modify your database, and then regenerate this schema definition. # # Note that this schema.rb definition is the authoritative source for your # database schema. If you need to create the application database on another # system, you should be using db:schema:load, not running all the migrations # from scratch. The latter is a flawed and unsustainable approach (the more migrations # you'll amass, the slower it'll run and the greater likelihood for issues). # # It's strongly recommended that you check this file into your version control system. ActiveRecord::Schema.define(version: 20150509215213) do # These are extensions that must be enabled in order to support this database enable_extension "plpgsql" create_table "acceptance_criteria", force: :cascade do |t| t.integer "story_id" t.string "given" t.string "when" t.string "then" t.datetime "created_at", null: false t.datetime "updated_at", null: false end add_index "acceptance_criteria", ["story_id"], name: "index_acceptance_criteria_on_story_id", using: :btree create_table "comments", force: :cascade do |t| t.string "comment" t.integer "story_id" t.integer "user_id" t.datetime "created_at", null: false t.datetime "updated_at", null: false end add_index "comments", ["story_id"], name: "index_comments_on_story_id", using: :btree add_index "comments", ["user_id"], name: "index_comments_on_user_id", using: :btree create_table "epics", force: :cascade do |t| t.string "name", null: false t.integer "total_points" t.integer "created_by_id", null: false t.integer "updated_by_id", null: false t.datetime "created_at", null: false t.datetime "updated_at", null: false end create_table "profiles", force: :cascade do |t| t.integer "user_id" t.string "first_name" t.string "last_name" t.datetime "created_at", null: false t.datetime "updated_at", null: false end add_index "profiles", ["user_id"], name: "index_profiles_on_user_id", using: :btree create_table "sprints", force: :cascade do |t| t.string "name", null: false t.datetime "begin_date" t.datetime "end_date" t.integer "total_points" t.datetime "created_at", null: false t.datetime "updated_at", null: false end create_table "stories", force: :cascade do |t| t.string "name", null: false t.string "description" t.string "assumptions" t.string "status" t.integer "estimated_points" t.integer "actual_points" t.boolean "complete", default: false t.datetime "completed_on" t.integer "epic_id" t.integer "sprint_id" t.datetime "created_at", null: false t.datetime "updated_at", null: false end add_index "stories", ["epic_id"], name: "index_stories_on_epic_id", using: :btree add_index "stories", ["sprint_id"], name: "index_stories_on_sprint_id", using: :btree create_table "stories_users", id: false, force: :cascade do |t| t.integer "story_id" t.integer "user_id" end add_index "stories_users", ["story_id", "user_id"], name: "index_stories_users_on_story_id_and_user_id", using: :btree create_table "users", force: :cascade do |t| t.string "email", default: "", null: false t.string "encrypted_password", default: "", null: false t.string "reset_password_token" t.datetime "reset_password_sent_at" t.datetime "remember_created_at" t.integer "sign_in_count", default: 0, null: false t.datetime "current_sign_in_at" t.datetime "last_sign_in_at" t.inet "current_sign_in_ip" t.inet "last_sign_in_ip" t.datetime "created_at" t.datetime "updated_at" end add_index "users", ["email"], name: "index_users_on_email", unique: true, using: :btree add_index "users", ["reset_password_token"], name: "index_users_on_reset_password_token", unique: true, using: :btree end
37.59292
120
0.682674
114872c6c8f22226f3293e57b7c0766329a31341
441
module Fog module Identity class Telefonica class V3 class Real def list_policies(options = {}) request( :expects => [200], :method => 'GET', :path => "policies", :query => options ) end end class Mock def list_policies(options = {}) end end end end end end
18.375
41
0.414966
337b90b67ee07668070e133e280a94f827b07213
214
class CreateFeatureToggle < ActiveRecord::Migration[6.0] def change create_table :feature_toggles, if_not_exists: true do |t| t.string :key t.boolean :active t.timestamps end end end
19.454545
61
0.686916
abbf522c9fa4dd0be2509c19129f89ad8aa303e3
1,112
class BotUser < User AUTH_HEADER = 'HTTP_BOT_AUTH' # Create bot user with name def self.create(name) user = find_or_initialize_by name: name, role: 'bot' token = '' if ! user.persisted? token = user.reset_token! end {user: user, token: token} end # Immediately overwrite existing token with a new one def reset_token! token = Devise.friendly_token[0,20] self.password = self.password_confirmation = token self.email = "#{name}@scribe" save! validate: false token end def self.pack_auth_header(user_id, token) [user_id, token].join ":" end def self.unpack_auth_header(str) str.split ":" end # Given hash of headers, return bot user if a header authenticates def self.by_auth(headers) # No header? Fail. return nil if headers[AUTH_HEADER].blank? # Fail if header doesn't have two values: parts = unpack_auth_header headers[AUTH_HEADER] return nil if parts.size != 2 # Get user by name and auth using token: user = find parts[0] return nil if ! user.valid_password? parts[1] user end end
23.166667
68
0.673561
1891d5e50acfa22107035ad4ae28ee97ce858876
355
# encoding: utf-8 namespace :release do desc 'apply all release tasks' task :pack do on roles(:app) do within release_path do with rails_env: fetch(:rails_env) do # add test for existing db # if test execute :rake, 'release:pack' # else # end end end end end end
19.722222
44
0.549296
7a292b9fb162ec23ea2c2dadc9f54546696cf635
222
class CreateDepartmentsTable < ActiveRecord::Migration[5.2] def change create_table :departments do |t| t.integer :organization_id t.integer :sector_id t.integer :job_id end end end
22.2
59
0.666667
4a8102474fca90a581bcf1ad9d12f8e8f3fafe3e
267
module Slugifiable module InstanceMethods def slug self.name.downcase.strip.gsub(' ', '-').gsub(/[^\w-]/, '') end end module ClassMethods def find_by_slug(slug) self.all.find{ |instance| instance.slug == slug } end end end
20.538462
66
0.606742
21ca78eeb1358217d8e68e631c691610876260ad
1,235
# frozen_string_literal: true module Dashboard module Projects class ListService PRESELECT_PROJECTS_LIMIT = 150 def initialize(user, feature:) @user = user @feature = feature end def execute(project_ids, include_unavailable: false) return Project.none unless License.feature_available?(feature) project_ids = available_project_ids(project_ids) unless include_unavailable find_projects(project_ids) end private attr_reader :user, :feature # see https://gitlab.com/gitlab-org/gitlab/-/merge_requests/39847 def available_project_ids(project_ids) projects = Project.with_namespace.id_in(project_ids.first(PRESELECT_PROJECTS_LIMIT)) projects.select { |project| project.feature_available?(feature) }.map(&:id) end def find_projects(project_ids) ProjectsFinder.new( current_user: user, project_ids_relation: project_ids, params: projects_finder_params ).execute end def projects_finder_params return {} if user.can?(:read_all_resources) { min_access_level: ProjectMember::DEVELOPER } end end end end
25.729167
92
0.668826
d57c0880d7743bd9956e69535bbab772fcb54503
4,437
require 'pathname' Puppet::Type.newtype(:dsc_xwindowspackagecab) do require Pathname.new(__FILE__).dirname + '../../' + 'puppet/type/base_dsc' require Pathname.new(__FILE__).dirname + '../../puppet_x/puppetlabs/dsc_type_helpers' @doc = %q{ The DSC xWindowsPackageCab resource type. Automatically generated from 'xPSDesiredStateConfiguration/DSCResources/MSFT_xWindowsPackageCab/MSFT_xWindowsPackageCab.schema.mof' To learn more about PowerShell Desired State Configuration, please visit https://technet.microsoft.com/en-us/library/dn249912.aspx. For more information about built-in DSC Resources, please visit https://technet.microsoft.com/en-us/library/dn249921.aspx. For more information about xDsc Resources, please visit https://github.com/PowerShell/DscResources. } validate do fail('dsc_name is a required attribute') if self[:dsc_name].nil? end def dscmeta_resource_friendly_name; 'xWindowsPackageCab' end def dscmeta_resource_name; 'MSFT_xWindowsPackageCab' end def dscmeta_module_name; 'xPSDesiredStateConfiguration' end def dscmeta_module_version; '8.5.0.0' end newparam(:name, :namevar => true ) do end ensurable do newvalue(:exists?) { provider.exists? } newvalue(:present) { provider.create } newvalue(:absent) { provider.destroy } defaultto { :present } end # Name: PsDscRunAsCredential # Type: MSFT_Credential # IsMandatory: False # Values: None newparam(:dsc_psdscrunascredential) do def mof_type; 'MSFT_Credential' end def mof_is_embedded?; true end desc "PsDscRunAsCredential" validate do |value| unless value.kind_of?(Hash) fail("Invalid value '#{value}'. Should be a hash") end PuppetX::Dsc::TypeHelpers.validate_MSFT_Credential("Credential", value) end munge do |value| PuppetX::Dsc::TypeHelpers.munge_sensitive_hash!(value) end end # Name: Name # Type: string # IsMandatory: True # Values: None newparam(:dsc_name) do def mof_type; 'string' end def mof_is_embedded?; false end desc "Name - The name of the package to install or uninstall." isrequired validate do |value| unless value.kind_of?(String) fail("Invalid value '#{value}'. Should be a string") end end end # Name: Ensure # Type: string # IsMandatory: False # Values: ["Present", "Absent"] newparam(:dsc_ensure) do def mof_type; 'string' end def mof_is_embedded?; false end desc "Ensure - Specifies whether the package should be installed or uninstalled. To install the package, set this property to Present. To uninstall the package, set the property to Absent. Valid values are Present, Absent." validate do |value| resource[:ensure] = value.downcase unless value.kind_of?(String) fail("Invalid value '#{value}'. Should be a string") end unless ['Present', 'present', 'Absent', 'absent'].include?(value) fail("Invalid value '#{value}'. Valid values are Present, Absent") end end end # Name: SourcePath # Type: string # IsMandatory: False # Values: None newparam(:dsc_sourcepath) do def mof_type; 'string' end def mof_is_embedded?; false end desc "SourcePath - The path to the cab file to install or uninstall the package from." validate do |value| unless value.kind_of?(String) fail("Invalid value '#{value}'. Should be a string") end end end # Name: LogPath # Type: string # IsMandatory: False # Values: None newparam(:dsc_logpath) do def mof_type; 'string' end def mof_is_embedded?; false end desc "LogPath - The path to a file to log the operation to." validate do |value| unless value.kind_of?(String) fail("Invalid value '#{value}'. Should be a string") end end end def builddepends pending_relations = super() PuppetX::Dsc::TypeHelpers.ensure_reboot_relationship(self, pending_relations) end end Puppet::Type.type(:dsc_xwindowspackagecab).provide :powershell, :parent => Puppet::Type.type(:base_dsc).provider(:powershell) do confine :true => (Gem::Version.new(Facter.value(:powershell_version)) >= Gem::Version.new('5.0.10586.117')) defaultfor :operatingsystem => :windows mk_resource_methods end
31.920863
227
0.679062
ac9436fa611769bad535f11ae92000221a416581
41
module WebShield VERSION = "0.1.2" end
10.25
19
0.682927
334bddd18c726623a24ab07ef680d76699792aff
2,890
# frozen_string_literal: true require 'spec_helper' require 'yaml' module Cucumber module Cli describe ProfileLoader do def given_cucumber_yml_defined_as(hash_or_string) allow(Dir).to receive(:glob).with('{,.config/,config/}cucumber{.yml,.yaml}') { ['cucumber.yml'] } allow(File).to receive(:exist?) { true } cucumber_yml = hash_or_string.is_a?(Hash) ? hash_or_string.to_yaml : hash_or_string allow(IO).to receive(:read).with('cucumber.yml') { cucumber_yml } end def loader ProfileLoader.new end it 'treats backslashes as literals in rerun.txt when on Windows (JRuby or MRI)' do given_cucumber_yml_defined_as('default' => '--format "pretty" features\sync_imap_mailbox.feature:16:22') if Cucumber::WINDOWS expect(loader.args_from('default')).to eq ['--format', 'pretty', 'features\sync_imap_mailbox.feature:16:22'] else expect(loader.args_from('default')).to eq ['--format', 'pretty', 'featuressync_imap_mailbox.feature:16:22'] end end it 'treats forward slashes as literals' do given_cucumber_yml_defined_as('default' => '--format "ugly" features/sync_imap_mailbox.feature:16:22') expect(loader.args_from('default')).to eq ['--format', 'ugly', 'features/sync_imap_mailbox.feature:16:22'] end it 'treats percent sign as ERB code block after YAML directive' do yml = <<-HERE --- % x = '--format "pretty" features/sync_imap_mailbox.feature:16:22' default: <%= x %> HERE given_cucumber_yml_defined_as yml expect(loader.args_from('default')).to eq ['--format', 'pretty', 'features/sync_imap_mailbox.feature:16:22'] end it 'correctly parses a profile that uses tag expressions (with double quotes)' do given_cucumber_yml_defined_as('default' => '--format "pretty" features\sync_imap_mailbox.feature:16:22 --tags "not @jruby"') if Cucumber::WINDOWS expect(loader.args_from('default')).to eq ['--format', 'pretty', 'features\sync_imap_mailbox.feature:16:22', '--tags', 'not @jruby'] else expect(loader.args_from('default')).to eq ['--format', 'pretty', 'featuressync_imap_mailbox.feature:16:22', '--tags', 'not @jruby'] end end it 'correctly parses a profile that uses tag expressions (with single quotes)' do given_cucumber_yml_defined_as('default' => "--format 'pretty' features\\sync_imap_mailbox.feature:16:22 --tags 'not @jruby'") if Cucumber::WINDOWS expect(loader.args_from('default')).to eq ['--format', 'pretty', 'features\sync_imap_mailbox.feature:16:22', '--tags', 'not @jruby'] else expect(loader.args_from('default')).to eq ['--format', 'pretty', 'featuressync_imap_mailbox.feature:16:22', '--tags', 'not @jruby'] end end end end end
43.134328
142
0.658824
87045ca8d64f7a593a6e35b2d5d38e9845aaa2e0
4,529
require "rails_helper" RSpec.describe ActivityHelper, type: :helper do let(:organisation) { create(:delivery_partner_organisation) } describe "#step_is_complete_or_next?" do context "when the activity has passed the identification step" do it "returns true for the purpose fields" do activity = build(:project_activity, :at_purpose_step) expect(helper.step_is_complete_or_next?(activity: activity, step: "objectives")).to be(true) end it "returns false for the next fields following the purpose field" do activity = build(:project_activity, :at_identifier_step) expect(helper.step_is_complete_or_next?(activity: activity, step: "sector")).to be(false) expect(helper.step_is_complete_or_next?(activity: activity, step: "programme_status")).to be(false) expect(helper.step_is_complete_or_next?(activity: activity, step: "dates")).to be(false) expect(helper.step_is_complete_or_next?(activity: activity, step: "aid_type")).to be(false) end end context "when the activity form has been completed" do it "shows all steps" do activity = build(:project_activity, form_state: "complete") all_steps = Activity::FORM_STEPS all_steps.each do |step| expect(helper.step_is_complete_or_next?(activity: activity, step: step)).to be(true) end end end end describe "#link_to_activity_parent" do context "when there is no parent" do it "returns nil" do expect(helper.link_to_activity_parent(parent: nil, user: nil)).to be_nil end end context "when the parent is a fund" do context "and the user is delivery partner" do it "returns the parent title without a link" do parent_activity = create(:fund_activity) _activity = create(:programme_activity, parent: parent_activity) user = create(:delivery_partner_user) expect(helper.link_to_activity_parent(parent: parent_activity, user: user)).to eql parent_activity.title end end end context "when there is a parent" do it "returns a link to the parent" do parent_activity = create(:fund_activity) _activity = create(:programme_activity, parent: parent_activity) user = create(:beis_user) expect(helper.link_to_activity_parent(parent: parent_activity, user: user)).to include organisation_activity_path(parent_activity.organisation, parent_activity) expect(helper.link_to_activity_parent(parent: parent_activity, user: user)).to include parent_activity.title end end end describe "#custom_capitalisation" do context "when a string needs to be presented with the first letter of the first word upcased" do it "takes that string, upcases that letter and leaves the rest of the string as it is" do sample_string = "programme (level B)" expect(custom_capitalisation(sample_string)).to eql("Programme (level B)") end end end describe "#benefitting_countries_with_percentages" do it "returns an array of structs with country name, code and percentage" do codes = ["AG", "LC"] countries = benefitting_countries_with_percentages(codes) expect(countries.count).to eql(2) expect(countries.first.code).to eq("AG") expect(countries.first.name).to eq("Antigua and Barbuda") expect(countries.first.percentage).to eq(50.0) expect(countries.last.code).to eq("LC") expect(countries.last.name).to eq("Saint Lucia") expect(countries.last.percentage).to eq(50.0) end it "handles the case when all countries are selected" do codes = Codelist.new(type: "benefitting_countries", source: "beis").map { |c| c["code"] } countries = benefitting_countries_with_percentages(codes) expect(countries.first.percentage).to eq 100 / countries.count.to_f expect(countries.last.percentage).to eq 100 / countries.count.to_f end it "handles the case when three coutries are selected" do codes = ["AG", "LC", "BZ"] countries = benefitting_countries_with_percentages(codes) expect(countries.first.percentage).to eq 100 / countries.count.to_f expect(countries.last.percentage).to eq 100 / countries.count.to_f end it "returns an empty array if the codes are nil or empty" do expect(benefitting_countries_with_percentages(nil)).to eq([]) expect(benefitting_countries_with_percentages([])).to eq([]) end end end
40.4375
168
0.702363
614f75a146ee13adec2d15e100c32356bab56c1e
2,791
class Admin::DeliveryServicePricesController < ApplicationController before_action :authenticate_user! layout "admin" def index @delivery_service = DeliveryService.includes(:active_prices).find(params[:delivery_service_id]) @delivery_service_prices = @delivery_service.active_prices end def new set_delivery_service @delivery_service_price = @delivery_service.prices.build end def edit @form_delivery_service_price = DeliveryServicePrice.find(params[:id]) end def create set_delivery_service @delivery_service_price = @delivery_service.prices.build(params[:delivery_service_price]) if @delivery_service_price.save flash_message :success, 'Delivery service price was successfully created.' redirect_to admin_delivery_service_delivery_service_prices_url else render :new end end # Updating a delivery_service_price # # If the accessory is not associated with orders, update the current record. # Else create a new delivery_service_price with the new attributes. # Then set the old delivery_service_price as inactive. def update set_delivery_service_price unless @delivery_service_price.orders.empty? Store.inactivate!(@delivery_service_price) @old_delivery_service_price = @delivery_service_price @delivery_service_price = @old_delivery_service_price.delivery_service.prices.new end @delivery_service_price.attributes = params[:delivery_service_price] if @delivery_service_price.save flash_message :success, 'Delivery service price was successfully updated.' redirect_to admin_delivery_service_delivery_service_prices_url else @form_delivery_service_price = @old_delivery_service_price ||= DeliveryServicePrice.find(params[:id]) Store.activate!(@form_delivery_service_price) @form_delivery_service_price.attributes = params[:delivery_service_price] render :edit end end # Destroying a delivery_service_price # # If no associated order records, destroy the delivery_service_price. Else set it to inactive. def destroy set_delivery_service_price if @delivery_service_price.orders.empty? @result = Store.last_record(@delivery_service_price, DeliveryServicePrice.active.load.count) else Store.inactivate!(@delivery_service_price) end @result = [:success, 'Delivery service price was successfully deleted.'] if @result.nil? flash_message @result[0], @result[1] redirect_to admin_delivery_service_delivery_service_prices_url end private def set_delivery_service_price @delivery_service_price = DeliveryServicePrice.find(params[:id]) end def set_delivery_service @delivery_service = DeliveryService.find(params[:delivery_service_id]) end end
34.036585
107
0.774633
e968155e4076e401ad9af5cb654adeda473b8b87
1,313
require "mixlib/cli" class CliConfig include Mixlib::CLI option :room, :short => "-r ROOM_NAME", :long => "--room ROOM_NAME", :description => "Use quotes if there are spaces on the room name" option :domain, :short => "-d DOMAIN_NAME", :long => "--domain DOMAIN_NAME", :description => "The subdomain of your campfire room - <domain>.campfirenow.com" option :username, :short => "-u USERNAME", :long => "--user USERNAME" option :api_key, :short => "-k API_KEY", :long => "--key API_KEY", :description => "Use this to log without a username/password prompt. You can skip the -u option if you use that" option :help, :short => "-h", :long => "--help", :on => :tail, :description => "This handy guide you're reading right now", :boolean => true, :show_options => true, :exit => 0 option :version, :short => "-v", :long => "--version", :proc => Proc.new { puts "Campline version #{File.open('VERSION').read}" }, :boolean => true, :exit => 0 def load! self.parse_options unless self.config[:domain] && self.config[:room] && (self.config[:username] || self.config[:api_key]) raise "Missing parameters - please run 'campline --help' for help" end end def data config end end
25.25
116
0.600914
ac7bc6bc640278750f5496d10065c163c9195b48
316
class CreateAdmins < ActiveRecord::Migration[5.2] def change create_table :admins do |t| t.string :name, null: false t.string :email, null: false t.string :password_digest, null: false t.index ["email"], name: "index_admins_on_email", unique: true t.timestamps end end end
24.307692
68
0.658228
26e039ef5727cba251605e59ee6117a13a9730c0
2,157
require File.expand_path('../../../spec_helper', __FILE__) module Pod describe ExternalSources::PodspecSource do before do podspec_path = fixture('integration/Reachability/Reachability.podspec') dependency = Dependency.new("Reachability", :podspec => podspec_path.to_s) podfile_path = fixture('integration/Podfile') @subject = ExternalSources.from_dependency(dependency, podfile_path) end it "creates a copy of the podspec" do @subject.fetch(config.sandbox) path = config.sandbox.root + 'Local Podspecs/Reachability.podspec' path.should.exist? end it "returns the description" do @subject.description.should.match %r|from `.*Reachability/Reachability.podspec`| end describe "Helpers" do it "handles absolute paths" do @subject.stubs(:params).returns(:podspec => fixture('integration/Reachability')) path = @subject.send(:podspec_uri) path.should == fixture('integration/Reachability/Reachability.podspec').to_s end it "handles paths when there is no podfile path" do @subject.stubs(:podfile_path).returns(nil) @subject.stubs(:params).returns(:podspec => fixture('integration/Reachability')) path = @subject.send(:podspec_uri) path.should == fixture('integration/Reachability/Reachability.podspec').to_s end it "handles relative paths" do @subject.stubs(:params).returns(:podspec => 'Reachability') path = @subject.send(:podspec_uri) path.should == fixture('integration/Reachability/Reachability.podspec').to_s end it "expands the tilde" do File.stubs(:exist?).returns(true) @subject.stubs(:params).returns(:podspec => '~/Reachability') path = @subject.send(:podspec_uri) path.should == ENV['HOME'] + '/Reachability/Reachability.podspec' end it "handles URLs" do @subject.stubs(:params).returns(:podspec => "http://www.example.com/Reachability.podspec") path = @subject.send(:podspec_uri) path.should == "http://www.example.com/Reachability.podspec" end end end end
35.360656
98
0.670376
0191abf2f226c1f52057d7da0c9966e0c657f84f
166
require File.expand_path('../../../../spec_helper', __FILE__) describe "Zlib::GzipReader#external_encoding" do it "needs to be reviewed for spec completeness" end
27.666667
61
0.740964
2610813b0a4f07a298a988f47fd1d481fb36df14
751
# # Cookbook Name:: lvm # Attributes:: default # # Copyright 2009-2016, Chef Software, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # default['lvm']['di-ruby-lvm']['version'] = '0.2.1' default['lvm']['di-ruby-lvm-attrib']['version'] = '0.0.26'
34.136364
74
0.725699
7a20d119c3394ddd513b7e4c14cad6dab1460c45
1,630
# frozen_string_literal: true #------------------------------------------------------------------------- # # Copyright (c) Microsoft and contributors. All rights reserved. # # The MIT License(MIT) # Permission is hereby granted, free of charge, to any person obtaining a copy # of this software and associated documentation files(the "Software"), to deal # in the Software without restriction, including without limitation the rights # to use, copy, modify, merge, publish, distribute, sublicense, and / or sell # copies of the Software, and to permit persons to whom the Software is # furnished to do so, subject to the following conditions : # The above copyright notice and this permission notice shall be included in # all copies or substantial portions of the Software. # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.IN NO EVENT SHALL THE # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN # THE SOFTWARE. #-------------------------------------------------------------------------- require "azure/storage/common/service/geo_replication" module Azure::Storage::Common module Service class StorageServiceStats def initialize @geo_replication = GeoReplication.new yield self if block_given? end attr_accessor :geo_replication end end end
40.75
79
0.695092
034b121361e5f24053c9ef251aabf81a52f4074f
282
# Variable declaration a = 'value' # Print with NewLine puts a # Print without NewLine print a # constants are all caps WHATEVER ='constant forever!' # Global $global_here =0 # class_variable @@class_variable # instance_var @instance_var # Get input variable_name = gets
10.071429
29
0.737589
4aff9bccade826cedab71c2a3e98422825ecd0e1
1,238
module Memorandom module Plugins class DER < PluginTemplate require 'openssl' @description = "This plugin looks for DER-encoded encryption keys (RSA/DSA/EC)" @confidence = 0.90 # Scan takes a buffer and an offset of where this buffer starts in the source def scan(buffer, source_offset) buffer.scan( # Look for a DER record start (0x30), a length value, and a version marker. # This identifies RSA, DSA, and EC keys /\x30.{1,5}\x02\x01(?:\x00\x02|\x01\x04)/m ).each do |m| # This may hit an earlier identical match, but thats ok last_offset = buffer.index(m) next unless last_offset # Attempt to parse the key at the specified offset key_candidate = buffer[last_offset, 20000] key_type = nil key = nil [:RSA, :DSA, :EC ].each do |ktype| next unless OpenSSL::PKey.const_defined?(ktype) key_type = ktype key = OpenSSL::PKey.const_get(ktype).new(key_candidate) rescue nil break if key end # Ignore this if OpenSSL could not parse out a valid key next unless key report_hit(:type => "#{key_type}", :data => key.to_pem, :offset => source_offset + last_offset) end end end end end
28.136364
101
0.649435
e9e8cae02e56a5fbea63a8aad848b074b26f9c1e
1,076
# coding: utf-8 lib = File.expand_path('../lib', __FILE__) $LOAD_PATH.unshift(lib) unless $LOAD_PATH.include?(lib) require 'reject_by_key/version' Gem::Specification.new do |spec| spec.name = "reject_by_key" spec.version = RejectByKey::VERSION spec.authors = ["Yusuke Nakamura"] spec.email = ["[email protected]"] spec.summary = %q(Implementation of the Hash#reject_by_key) spec.description = %q(Hash#reject_by_key is return hash that rejected passed key element.) spec.homepage = %q(https://github.com/unasuke/reject_by_key) spec.license = "MIT" spec.required_ruby_version = '~> 2.2' spec.files = `git ls-files -z`.split("\x0").reject do |f| f.match(%r{^(test|spec|features)/}) end spec.bindir = "exe" spec.executables = spec.files.grep(%r{^exe/}) { |f| File.basename(f) } spec.require_paths = ["lib"] spec.add_development_dependency "bundler", "~> 1.14" spec.add_development_dependency "rake", "~> 10.0" spec.add_development_dependency "minitest", "~> 5.0" end
37.103448
94
0.659851
e8502a2cedd66bff5f07f978b0b366a77336ca0a
8,353
require_relative 'spec_helper' describe Hooks do include Rack::Test::Methods def app Hooks end let(:stripe_event_id) { 'xxx' } let(:metadata) {{ country_code: 'NL', vat_registered: 'false', other: 'random' }} let(:customer) do Stripe::Customer.create \ card: { number: '4242424242424242', exp_month: '12', exp_year: '30', cvc: '222' }, metadata: metadata end let(:customer_invoices) do Stripe::Invoice.list(customer: customer.id, limit: 1) end let(:stripe_invoice) do Stripe::InvoiceItem.create \ customer: customer.id, amount: 100, currency: 'usd' Stripe::Invoice.create(customer: customer.id) end let(:plan) do begin Stripe::Plan.retrieve('test') rescue Stripe::Plan.create \ id: 'test', name: 'Test Plan', amount: 1499, currency: 'usd', interval: 'month' end end describe 'any hook' do it 'spreads a rumor' do Rumor.expects(:spread).with do |rumor| rumor.event == :charge_succeeded && rumor.subject == 1 end post '/', json( id: stripe_event_id, type: 'charge.succeeded', data: {object: 1} ) end end describe 'stubbed rumor' do before do Rumor.stubs(:spread) end describe 'post customer updated' do it 'does nothing if the metadata country code is unaffected' do VCR.use_cassette('hook_customer_updated') do StripeService.expects(:new).never post '/', json( id: stripe_event_id, type: 'customer.updated', data: {object: customer, previous_attributes: Stripe::StripeObject.construct_from({email: '[email protected]'})} ) _(last_response.ok?).must_equal true _(last_response.body).must_be_empty end end it 'calls for vat rate update if the metadata country code is changed' do VCR.use_cassette('hook_customer_updated') do StripeService.expects(:new).with(customer_id: customer.id).returns( mock.tap do |m| m.expects(:update_vat_rate) end ) post '/', json( id: stripe_event_id, type: 'customer.updated', data: {object: customer, previous_attributes: Stripe::StripeObject.construct_from({metadata: {country_code: 'DE'}})} ) _(last_response.ok?).must_equal true _(last_response.body).must_be_empty end end end describe 'post invoice payment succeeded' do it 'finalizes the invoice' do VCR.use_cassette('hook_invoice_payment_succeeded') do stripe_invoice.pay post '/', json( id: stripe_event_id, type: 'invoice.payment_succeeded', data: { object: stripe_invoice} ) _(last_response.ok?).must_equal true _(last_response.body).must_be_empty _(Invoice.count).must_equal 1 invoice = Invoice.first _(invoice.sequence_number).must_equal 1 _(invoice.finalized_at).wont_be_nil _(invoice.credit_note).must_equal false end end end describe 'post credit note created' do it 'creates a credit note' do VCR.use_cassette('hook_credit_note_created') do stripe_credit_note = create_stripe_credit_note before_count = Invoice.count post '/', json( id: stripe_event_id, type: 'credit_note.created', data: {object: stripe_credit_note} ) _(last_response.ok?).must_equal true _(last_response.body).must_be_empty _(Invoice.count).must_equal before_count+1 invoice = Invoice.order(:sequence_number).first _(invoice.sequence_number).must_equal 1 _(invoice.finalized_at).wont_be_nil _(invoice.credit_note).must_equal false credit_note = Invoice.order(:sequence_number).last _(credit_note.sequence_number).must_equal 2 _(credit_note.finalized_at).wont_be_nil _(credit_note.credit_note).must_equal true _(credit_note.reference_number).must_equal invoice.number _(credit_note.total).must_equal invoice.total end end end describe 'post credit note created partial' do it 'creates a credit note' do VCR.use_cassette('hook_credit_note_created_partial') do stripe_credit_note = create_stripe_credit_note( lines: [ {unit_amount: 10} ] ) before_count = Invoice.count post '/', json( id: stripe_event_id, type: 'credit_note.created', data: {object: stripe_credit_note} ) _(last_response.ok?).must_equal true _(last_response.body).must_be_empty _(Invoice.count).must_equal before_count+1 invoice = Invoice.order(:sequence_number).first _(invoice.sequence_number).must_equal 1 _(invoice.finalized_at).wont_be_nil _(invoice.credit_note).must_equal false credit_note = Invoice.order(:sequence_number).last _(credit_note.sequence_number).must_equal 2 _(credit_note.finalized_at).wont_be_nil _(credit_note.credit_note).must_equal true _(credit_note.reference_number).must_equal invoice.number _(credit_note.total).must_equal 10 end end end describe 'a stripe error occurs' do it 'responds with the error' do invoice_service = stub app.any_instance.stubs(:invoice_service) .with(customer_id: '10').returns(invoice_service) invoice_service.expects(:process_payment) .raises(Stripe::CardError.new('not good', :test, code: 1)) post '/', json( id: stripe_event_id, type: 'invoice.payment_succeeded', data: {object: {id: '1', customer: '10'}} ) _(last_response.ok?).must_equal false _(last_response.status).must_equal 402 _(last_response.body).must_equal '{"error":{"message":"not good","type":"card_error","code":1,"param":"test"}}' end end describe 'the customer does not have any metadata' do let(:metadata) { { other: 'random' } } it 'does nothing' do VCR.use_cassette('hook_invoice_created_no_meta') do stripe_invoice.pay post '/', json( id: stripe_event_id, type: 'invoice.payment_succeeded', data: {object: Stripe::Invoice.retrieve(stripe_invoice.id)} ) _(last_response.ok?).must_equal true invoice = Invoice.first _(invoice.number).wont_be_nil end end end end def json(object) MultiJson.dump(object) end def create_stripe_credit_note(type: 'refund', lines: nil) # other options are 'credit' and 'out_of_band' customer.subscriptions.create(plan: plan.id) stripe_invoice = customer_invoices.first post '/', json( id: stripe_event_id, type: 'invoice.payment_succeeded', data: {object: stripe_invoice} ) credit_lines = if lines lines.map do |line| { type: 'custom_line_item', unit_amount: line[:unit_amount], quantity: line[:quantity] || 1, description: line[:description] || "Refund" } end else stripe_invoice.lines.map do |line| { type: 'invoice_line_item', invoice_line_item: line.id, quantity: 1 } end end Stripe::CreditNote.create( invoice: stripe_invoice.id, reason: 'order_change', :"#{type}_amount" => credit_lines.sum{|line| lines ? lines.sum{|line| line[:unit_amount] * (line[:quantity] || 1)} : stripe_invoice.lines.sum(&:amount)}.to_i, lines: credit_lines ) end end
27.936455
164
0.580271
2651d3bbcdea5374132411f8e5f2131876306b96
1,555
# frozen_string_literal: true require 'yabeda/http_requests/version' require 'yabeda/http_requests/sniffer' require 'yabeda' require 'sniffer' module Yabeda # Common module module HttpRequests SNIFFER_STORAGE_SIZE = 0 # rubocop: disable Metrics/BlockLength Yabeda.configure do group :http LONG_RUNNING_REQUEST_BUCKETS = [ 0.5, 1, 2.5, 5, 10, 25, 50, 100, 250, 500, 1000, # standard 30_000, 60_000, 120_000, 300_000, 600_000 # slow queries ].freeze counter :request_total, comment: 'A counter of the total number of external HTTP \ requests.', tags: %i[host port method] counter :response_total, comment: 'A counter of the total number of external HTTP \ responses.', tags: %i[host port method status] histogram :response_duration, tags: %i[host port method status], unit: :milliseconds, buckets: LONG_RUNNING_REQUEST_BUCKETS, comment: "A histogram of the response \ duration (milliseconds)." ::Sniffer.config do |c| c.enabled = true c.store = { capacity: SNIFFER_STORAGE_SIZE } c.middleware do |chain| chain.remove(::Sniffer::Middleware::Logger) chain.add(Yabeda::HttpRequests::Sniffer) end end end # rubocop: enable Metrics/BlockLength end end
31.734694
75
0.570418
333221594cbfc21e3de27f0f81779c2447050d6a
450
require 'fog/core/collection' require 'fog/digitalocean/models/compute/flavor' module Fog module Compute class DigitalOcean class Flavors < Fog::Collection model Fog::Compute::DigitalOcean::Flavor def all load service.list_flavors.body['sizes'] end def get(id) all.find { |f| f.id == id } rescue Fog::Errors::NotFound nil end end end end end
17.307692
49
0.588889
bb35d499504dd05090e7867a03356713d07d8950
16,792
=begin #NSX-T Manager API #VMware NSX-T Manager REST API OpenAPI spec version: 2.5.1.0.0 Generated by: https://github.com/swagger-api/swagger-codegen.git Swagger Codegen version: 2.4.7 =end require 'date' module NSXT class UpgradeUnitGroupAggregateInfo # Link to this resource attr_accessor :_self # The server will populate this field when returing the resource. Ignored on PUT and POST. attr_accessor :_links # Schema for this resource attr_accessor :_schema # The _revision property describes the current revision of the resource. To prevent clients from overwriting each other's changes, PUT operations must include the current _revision of the resource, which clients should obtain by issuing a GET operation. If the _revision provided in a PUT request is missing or stale, the operation will be rejected. attr_accessor :_revision # Indicates system owned resource attr_accessor :_system_owned # Defaults to ID if not set attr_accessor :display_name # Description of this resource attr_accessor :description # Opaque identifiers meaningful to the API user attr_accessor :tags # ID of the user who created this resource attr_accessor :_create_user # Protection status is one of the following: PROTECTED - the client who retrieved the entity is not allowed to modify it. NOT_PROTECTED - the client who retrieved the entity is allowed to modify it REQUIRE_OVERRIDE - the client who retrieved the entity is a super user and can modify it, but only when providing the request header X-Allow-Overwrite=true. UNKNOWN - the _protection field could not be determined for this entity. attr_accessor :_protection # Timestamp of resource creation attr_accessor :_create_time # Timestamp of last modification attr_accessor :_last_modified_time # ID of the user who last modified this resource attr_accessor :_last_modified_user # Unique identifier of this resource attr_accessor :id # The type of this resource. attr_accessor :resource_type # Upgrade status of upgrade unit group attr_accessor :status # Number of upgrade units in the group attr_accessor :upgrade_unit_count # Number of nodes in the upgrade unit group that failed upgrade attr_accessor :failed_count # Component type attr_accessor :type # Indicator of upgrade progress in percentage attr_accessor :percent_complete # Post-upgrade status of group attr_accessor :post_upgrade_status # Flag to indicate whether upgrade of this group is enabled or not attr_accessor :enabled # List of upgrade units in the group attr_accessor :upgrade_units # Extended configuration for the group attr_accessor :extended_configuration # Upgrade method to specify whether the upgrade is to be performed in parallel or serially attr_accessor :parallel class EnumAttributeValidator attr_reader :datatype attr_reader :allowable_values def initialize(datatype, allowable_values) @allowable_values = allowable_values.map do |value| case datatype.to_s when /Integer/i value.to_i when /Float/i value.to_f else value end end end def valid?(value) !value || allowable_values.include?(value) end end # Attribute mapping from ruby-style variable name to JSON key. def self.attribute_map { :'_self' => :'_self', :'_links' => :'_links', :'_schema' => :'_schema', :'_revision' => :'_revision', :'_system_owned' => :'_system_owned', :'display_name' => :'display_name', :'description' => :'description', :'tags' => :'tags', :'_create_user' => :'_create_user', :'_protection' => :'_protection', :'_create_time' => :'_create_time', :'_last_modified_time' => :'_last_modified_time', :'_last_modified_user' => :'_last_modified_user', :'id' => :'id', :'resource_type' => :'resource_type', :'status' => :'status', :'upgrade_unit_count' => :'upgrade_unit_count', :'failed_count' => :'failed_count', :'type' => :'type', :'percent_complete' => :'percent_complete', :'post_upgrade_status' => :'post_upgrade_status', :'enabled' => :'enabled', :'upgrade_units' => :'upgrade_units', :'extended_configuration' => :'extended_configuration', :'parallel' => :'parallel' } end # Attribute type mapping. def self.swagger_types { :'_self' => :'SelfResourceLink', :'_links' => :'Array<ResourceLink>', :'_schema' => :'String', :'_revision' => :'Integer', :'_system_owned' => :'BOOLEAN', :'display_name' => :'String', :'description' => :'String', :'tags' => :'Array<Tag>', :'_create_user' => :'String', :'_protection' => :'String', :'_create_time' => :'Integer', :'_last_modified_time' => :'Integer', :'_last_modified_user' => :'String', :'id' => :'String', :'resource_type' => :'String', :'status' => :'String', :'upgrade_unit_count' => :'Integer', :'failed_count' => :'Integer', :'type' => :'String', :'percent_complete' => :'Float', :'post_upgrade_status' => :'UpgradeChecksExecutionStatus', :'enabled' => :'BOOLEAN', :'upgrade_units' => :'Array<UpgradeUnit>', :'extended_configuration' => :'Array<KeyValuePair>', :'parallel' => :'BOOLEAN' } end # Initializes the object # @param [Hash] attributes Model attributes in the form of hash def initialize(attributes = {}) return unless attributes.is_a?(Hash) # convert string to symbol for hash key attributes = attributes.each_with_object({}) { |(k, v), h| h[k.to_sym] = v } if attributes.has_key?(:'_self') self._self = attributes[:'_self'] end if attributes.has_key?(:'_links') if (value = attributes[:'_links']).is_a?(Array) self._links = value end end if attributes.has_key?(:'_schema') self._schema = attributes[:'_schema'] end if attributes.has_key?(:'_revision') self._revision = attributes[:'_revision'] end if attributes.has_key?(:'_system_owned') self._system_owned = attributes[:'_system_owned'] end if attributes.has_key?(:'display_name') self.display_name = attributes[:'display_name'] end if attributes.has_key?(:'description') self.description = attributes[:'description'] end if attributes.has_key?(:'tags') if (value = attributes[:'tags']).is_a?(Array) self.tags = value end end if attributes.has_key?(:'_create_user') self._create_user = attributes[:'_create_user'] end if attributes.has_key?(:'_protection') self._protection = attributes[:'_protection'] end if attributes.has_key?(:'_create_time') self._create_time = attributes[:'_create_time'] end if attributes.has_key?(:'_last_modified_time') self._last_modified_time = attributes[:'_last_modified_time'] end if attributes.has_key?(:'_last_modified_user') self._last_modified_user = attributes[:'_last_modified_user'] end if attributes.has_key?(:'id') self.id = attributes[:'id'] end if attributes.has_key?(:'resource_type') self.resource_type = attributes[:'resource_type'] end if attributes.has_key?(:'status') self.status = attributes[:'status'] end if attributes.has_key?(:'upgrade_unit_count') self.upgrade_unit_count = attributes[:'upgrade_unit_count'] end if attributes.has_key?(:'failed_count') self.failed_count = attributes[:'failed_count'] end if attributes.has_key?(:'type') self.type = attributes[:'type'] end if attributes.has_key?(:'percent_complete') self.percent_complete = attributes[:'percent_complete'] end if attributes.has_key?(:'post_upgrade_status') self.post_upgrade_status = attributes[:'post_upgrade_status'] end if attributes.has_key?(:'enabled') self.enabled = attributes[:'enabled'] else self.enabled = true end if attributes.has_key?(:'upgrade_units') if (value = attributes[:'upgrade_units']).is_a?(Array) self.upgrade_units = value end end if attributes.has_key?(:'extended_configuration') if (value = attributes[:'extended_configuration']).is_a?(Array) self.extended_configuration = value end end if attributes.has_key?(:'parallel') self.parallel = attributes[:'parallel'] else self.parallel = true end end # Show invalid properties with the reasons. Usually used together with valid? # @return Array for valid properties with the reasons def list_invalid_properties invalid_properties = Array.new if !@display_name.nil? && @display_name.to_s.length > 255 invalid_properties.push('invalid value for "display_name", the character length must be smaller than or equal to 255.') end if [email protected]? && @description.to_s.length > 1024 invalid_properties.push('invalid value for "description", the character length must be smaller than or equal to 1024.') end if @type.nil? invalid_properties.push('invalid value for "type", type cannot be nil.') end invalid_properties end # Check to see if the all the properties in the model are valid # @return true if the model is valid def valid? return false if !@display_name.nil? && @display_name.to_s.length > 255 return false if [email protected]? && @description.to_s.length > 1024 status_validator = EnumAttributeValidator.new('String', ['SUCCESS', 'FAILED', 'IN_PROGRESS', 'NOT_STARTED', 'PAUSING', 'PAUSED']) return false unless status_validator.valid?(@status) return false if @type.nil? true end # Custom attribute writer method with validation # @param [Object] display_name Value to be assigned def display_name=(display_name) if !display_name.nil? && display_name.to_s.length > 255 fail ArgumentError, 'invalid value for "display_name", the character length must be smaller than or equal to 255.' end @display_name = display_name end # Custom attribute writer method with validation # @param [Object] description Value to be assigned def description=(description) if !description.nil? && description.to_s.length > 1024 fail ArgumentError, 'invalid value for "description", the character length must be smaller than or equal to 1024.' end @description = description end # Custom attribute writer method checking allowed values (enum). # @param [Object] status Object to be assigned def status=(status) validator = EnumAttributeValidator.new('String', ['SUCCESS', 'FAILED', 'IN_PROGRESS', 'NOT_STARTED', 'PAUSING', 'PAUSED']) unless validator.valid?(status) fail ArgumentError, 'invalid value for "status", must be one of #{validator.allowable_values}.' end @status = status end # Checks equality by comparing each attribute. # @param [Object] Object to be compared def ==(o) return true if self.equal?(o) self.class == o.class && _self == o._self && _links == o._links && _schema == o._schema && _revision == o._revision && _system_owned == o._system_owned && display_name == o.display_name && description == o.description && tags == o.tags && _create_user == o._create_user && _protection == o._protection && _create_time == o._create_time && _last_modified_time == o._last_modified_time && _last_modified_user == o._last_modified_user && id == o.id && resource_type == o.resource_type && status == o.status && upgrade_unit_count == o.upgrade_unit_count && failed_count == o.failed_count && type == o.type && percent_complete == o.percent_complete && post_upgrade_status == o.post_upgrade_status && enabled == o.enabled && upgrade_units == o.upgrade_units && extended_configuration == o.extended_configuration && parallel == o.parallel end # @see the `==` method # @param [Object] Object to be compared def eql?(o) self == o end # Calculates hash code according to all attributes. # @return [Fixnum] Hash code def hash [_self, _links, _schema, _revision, _system_owned, display_name, description, tags, _create_user, _protection, _create_time, _last_modified_time, _last_modified_user, id, resource_type, status, upgrade_unit_count, failed_count, type, percent_complete, post_upgrade_status, enabled, upgrade_units, extended_configuration, parallel].hash end # Builds the object from hash # @param [Hash] attributes Model attributes in the form of hash # @return [Object] Returns the model itself def build_from_hash(attributes) return nil unless attributes.is_a?(Hash) self.class.swagger_types.each_pair do |key, type| if type =~ /\AArray<(.*)>/i # check to ensure the input is an array given that the the attribute # is documented as an array but the input is not if attributes[self.class.attribute_map[key]].is_a?(Array) self.send("#{key}=", attributes[self.class.attribute_map[key]].map { |v| _deserialize($1, v) }) end elsif !attributes[self.class.attribute_map[key]].nil? self.send("#{key}=", _deserialize(type, attributes[self.class.attribute_map[key]])) end # or else data not found in attributes(hash), not an issue as the data can be optional end self end # Deserializes the data based on type # @param string type Data type # @param string value Value to be deserialized # @return [Object] Deserialized data def _deserialize(type, value) case type.to_sym when :DateTime DateTime.parse(value) when :Date Date.parse(value) when :String value.to_s when :Integer value.to_i when :Float value.to_f when :BOOLEAN if value.to_s =~ /\A(true|t|yes|y|1)\z/i true else false end when :Object # generic object (usually a Hash), return directly value when /\AArray<(?<inner_type>.+)>\z/ inner_type = Regexp.last_match[:inner_type] value.map { |v| _deserialize(inner_type, v) } when /\AHash<(?<k_type>.+?), (?<v_type>.+)>\z/ k_type = Regexp.last_match[:k_type] v_type = Regexp.last_match[:v_type] {}.tap do |hash| value.each do |k, v| hash[_deserialize(k_type, k)] = _deserialize(v_type, v) end end else # model temp_model = NSXT.const_get(type).new temp_model.build_from_hash(value) end end # Returns the string representation of the object # @return [String] String presentation of the object def to_s to_hash.to_s end # to_body is an alias to to_hash (backward compatibility) # @return [Hash] Returns the object in the form of hash def to_body to_hash end # Returns the object in the form of hash # @return [Hash] Returns the object in the form of hash def to_hash hash = {} self.class.attribute_map.each_pair do |attr, param| value = self.send(attr) next if value.nil? hash[param] = _to_hash(value) end hash end # Outputs non-array value in the form of hash # For object, use to_hash. Otherwise, just return the value # @param [Object] value Any valid value # @return [Hash] Returns the value in the form of hash def _to_hash(value) if value.is_a?(Array) value.compact.map { |v| _to_hash(v) } elsif value.is_a?(Hash) {}.tap do |hash| value.each { |k, v| hash[k] = _to_hash(v) } end elsif value.respond_to? :to_hash value.to_hash else value end end end end
33.185771
508
0.630777
ac3f9de5146d41c447ddb637c398ed6fee5fd1bf
405
require File.dirname(__FILE__) + '/braintree/braintree_common' module ActiveMerchant #:nodoc: module Billing #:nodoc: class BraintreeGateway < Gateway include BraintreeCommon def self.new(options={}) if options.has_key?(:login) BraintreeOrangeGateway.new(options) else BraintreeBlueGateway.new(options) end end end end end
22.5
62
0.649383
28d8f6e6be874381ba6410cb64ac4657260f5412
1,404
module CartoDb MAX_SQL_GET_LENGTH = 1024 TEMPLATES_PATH = Rails.root.join('lib', 'modules', 'carto_db', 'templates') SQL_URL = '/api/v2/sql' USERNAME = Rails.application.secrets.cartodb['username'] API_KEY = Rails.application.secrets.cartodb['api_key'] include HTTParty def self.query query, with_transaction=true query = with_transaction(query) if with_transaction needs_a_post = query.length >= MAX_SQL_GET_LENGTH response = if needs_a_post self.post(build_url(SQL_URL), body: {q: query, format: 'json'}) else self.get(url_for_query(query)) end JSON.parse(response.body) end def self.table_name habitat prefix = Rails.application.secrets.cartodb['table_prefix'] "#{prefix}_#{habitat}_staging" end def self.build_url path, opts={} opts = {with_api_key: true}.merge opts uri = URI::HTTPS.build( host: "#{USERNAME}.cartodb.com", path: path, query: build_querystring(opts) ) opts[:as_uri] ? uri : uri.to_s end private def self.build_querystring opts (opts[:query] || {}).tap { |query| query[:api_key] = API_KEY if opts[:with_api_key] }.to_query end def self.with_transaction query query << ';' if query.last != ';' "BEGIN; #{query} COMMIT;" end def self.url_for_query query, format="json" build_url(SQL_URL, query: {q: query, format: format}) end end
24.631579
77
0.670228
0150e5a02c3c6c9499fb5733b0180f1ba82eaf5c
146
class LatLng include Mongoid::Document field :lat, type: Float field :lng, type: Float embedded_in :geometry, class_name: "Geometry" end
18.25
47
0.732877
03443e569c8cf2782c3728c27f62df784d70db3c
330
class District < ApplicationRecord include CommonUtils belongs_to :city has_many :locations before_validation do self.code = region_code(name) if name.present? end validates :name, presence: true, uniqueness: { scope: :city_id } validates :code, presence: true, uniqueness: { scope: [:name, :city_id] } end
23.571429
76
0.721212
5d075a8f9a2ba274f223f05e40e9ecb7ac280aba
11,500
# frozen_string_literal: true require_relative "../test_helper" SingleCov.covered! describe EnvironmentVariableGroupsController do def self.it_updates it "updates" do variable = env_group.environment_variables.first refute_difference "EnvironmentVariable.count" do put :update, params: { id: env_group.id, environment_variable_group: { environment_variables_attributes: { "0" => {name: "N1", value: "V2", scope_type_and_id: "DeployGroup-#{deploy_group.id}", id: variable.id} } } } end assert_redirected_to "/environment_variable_groups" variable.reload.value.must_equal "V2" variable.reload.scope.must_equal deploy_group end end def self.it_destroys it "destroy" do env_group assert_difference "EnvironmentVariableGroup.count", -1 do delete :destroy, params: {id: env_group.id} end assert_redirected_to "/environment_variable_groups" end end let(:stage) { stages(:test_staging) } let(:project) { stage.project } let(:deploy_group) { stage.deploy_groups.first } let!(:env_group) do EnvironmentVariableGroup.create!( name: "G1", environment_variables_attributes: { 0 => {name: "X", value: "Y"}, 1 => {name: "Y", value: "Z"} } ) end let!(:other_env_group) do EnvironmentVariableGroup.create!( name: "OtherG1", environment_variables_attributes: { 0 => {name: "X", value: "Y"}, 1 => {name: "Y", value: "Z", scope_type_and_id: "DeployGroup-#{deploy_group.id}"} } ) end let(:other_project) do p = project.dup p.name = 'xxxxx' p.permalink = 'xxxxx' p.save!(validate: false) p end as_a :viewer do unauthorized :get, :new unauthorized :post, :create describe "#update" do it "is unauthorized" do patch :update, params: {id: env_group.id} assert_response :unauthorized end end describe "#destroy" do it "is unauthorized" do delete :destroy, params: {id: env_group.id} assert_response :unauthorized end end describe "#index" do it "renders" do get :index assert_response :success end it "renders json" do get :index, format: :json assert_response :success json_response = JSON.parse response.body first_group = json_response['environment_variable_groups'].first first_group.keys.must_include "name" first_group.keys.must_include "variable_names" first_group['name'].must_equal "G1" first_group['variable_names'].must_equal ["X", "Y"] end it "renders with envionment_variables if present" do get :index, params: {includes: "environment_variables", format: :json} assert_response :success project = JSON.parse(response.body) project.keys.must_include "environment_variables" end it "filters by project" do ProjectEnvironmentVariableGroup.create!(environment_variable_group: other_env_group, project: other_project) get :index, params: {project_id: other_project.id, format: :json} assert_response :success json_response = JSON.parse response.body first_group = json_response['environment_variable_groups'].first json_response['environment_variable_groups'].count.must_equal 1 first_group.keys.must_include "name" first_group.keys.must_include "variable_names" first_group['name'].must_equal other_env_group.name first_group['variable_names'].must_equal ["X", "Y"] end end describe "#show" do def unauthorized_env_group ProjectEnvironmentVariableGroup.create!(environment_variable_group: env_group, project: other_project) end it "renders" do get :show, params: {id: env_group.id} assert_response :success end it 'disables fields if user cannot edit env group' do unauthorized_env_group get :show, params: {id: env_group.id} assert_response :success assert_select 'fieldset[disabled]', count: 2 end end describe "#preview" do it "renders for groups" do get :preview, params: {group_id: env_group.id} assert_response :success end it "renders for projects" do get :preview, params: {project_id: project.id} assert_response :success end it "shows secret previews" do EnvironmentVariable.expects(:env). with(anything, anything, project_specific: nil, resolve_secrets: :preview).times(3) get :preview, params: {group_id: env_group.id} assert_response :success end it "can show secret paths" do EnvironmentVariable.expects(:env). with(anything, anything, project_specific: nil, resolve_secrets: false).times(3) get :preview, params: {group_id: env_group.id, preview: "false"} assert_response :success end end describe "a json GET to #preview" do it "succeeds" do get :preview, params: {group_id: env_group.id, project_id: project.id, preview: false}, format: :json assert_response :success json_response = JSON.parse response.body json_response['groups'].sort.must_equal [ [".pod1", {"X" => "Y", "Y" => "Z"}], [".pod100", {"X" => "Y", "Y" => "Z"}], [".pod2", {"X" => "Y", "Y" => "Z"}] ] end it "only shows single deploy_group with filtering on" do get :preview, params: {group_id: env_group.id, project_id: project.id, deploy_group: "pod2"}, format: :json assert_response :success json_response = JSON.parse response.body json_response['groups'].sort.must_equal [ [".pod2", {"X" => "Y", "Y" => "Z"}] ] end it "fails when deploy group is unknown" do assert_raises ActiveRecord::RecordNotFound do get :preview, params: {group_id: env_group.id, project_id: project.id, deploy_group: "pod23"}, format: :json end end describe "project_specific" do before do EnvironmentVariable.create!(parent: project, name: 'B', value: 'b') ProjectEnvironmentVariableGroup.create!(environment_variable_group: other_env_group, project: project) end it "renders only project env" do get :preview, params: {project_id: project.id, project_specific: true}, format: :json assert_response :success json_response = JSON.parse response.body json_response['groups'].sort.must_equal [ [".pod1", {"B" => "b"}], [".pod100", {"B" => "b"}], [".pod2", {"B" => "b"}] ] end it "renders only groups env" do get :preview, params: {project_id: project.id, project_specific: false}, format: :json assert_response :success json_response = JSON.parse response.body json_response['groups'].sort.must_equal [ [".pod1", {"X" => "Y"}], [".pod100", {"Y" => "Z", "X" => "Y"}], [".pod2", {"X" => "Y"}] ] end it "renders without project_specific" do get :preview, params: {project_id: project.id, project_specific: nil}, format: :json assert_response :success json_response = JSON.parse response.body json_response['groups'].sort.must_equal [ [".pod1", {"B" => "b", "X" => "Y"}], [".pod100", {"B" => "b", "Y" => "Z", "X" => "Y"}], [".pod2", {"B" => "b", "X" => "Y"}] ] end end end end as_a :project_admin do describe "#new" do it "renders" do get :new assert_response :success end end describe "#create" do it "creates" do assert_difference "EnvironmentVariable.count", +1 do assert_difference "EnvironmentVariableGroup.count", +1 do post :create, params: { environment_variable_group: { environment_variables_attributes: {"0" => {name: "N1", value: "V1"}}, name: "G2" } } end end assert_redirected_to "/environment_variable_groups" end it "shows errors" do refute_difference "EnvironmentVariable.count" do post :create, params: {environment_variable_group: {name: ""}} end assert_template "form" end end describe "#update" do let(:params) do { id: env_group.id, environment_variable_group: { name: "G2", comment: "COOMMMENT", environment_variables_attributes: { "0" => {name: "N1", value: "V1"} } } } end before { env_group } it "adds" do assert_difference "EnvironmentVariable.count", +1 do put :update, params: params end assert_redirected_to "/environment_variable_groups" env_group.reload env_group.name.must_equal "G2" env_group.comment.must_equal "COOMMMENT" end it_updates it "shows errors" do refute_difference "EnvironmentVariable.count" do put :update, params: {id: env_group.id, environment_variable_group: {name: ""}} end assert_template "form" end it "destroys variables" do variable = env_group.environment_variables.first assert_difference "EnvironmentVariable.count", -1 do put :update, params: { id: env_group.id, environment_variable_group: { environment_variables_attributes: { "0" => {name: "N1", value: "V2", id: variable.id, _destroy: true} } } } end assert_redirected_to "/environment_variable_groups" end it 'updates when the group is used by a project where the user is an admin' do ProjectEnvironmentVariableGroup.create!(environment_variable_group: env_group, project: project) assert_difference "EnvironmentVariable.count", +1 do put :update, params: params end end it "cannot update when not an admin for any used projects" do ProjectEnvironmentVariableGroup.create!(environment_variable_group: env_group, project: other_project) put :update, params: params assert_response :unauthorized end it "cannot update when not an admin for some used projects" do ProjectEnvironmentVariableGroup.create!(environment_variable_group: env_group, project: project) ProjectEnvironmentVariableGroup.create!(environment_variable_group: env_group, project: other_project) put :update, params: params assert_response :unauthorized end end describe "#destroy" do it_destroys it "cannot destroy when not an admin for all used projects" do ProjectEnvironmentVariableGroup.create!(environment_variable_group: env_group, project: other_project) delete :destroy, params: {id: env_group.id} assert_response :unauthorized end end end as_a :admin do describe "#update" do before { env_group } it_updates end describe "#destroy" do it_destroys end end end
31.593407
118
0.609739
111b45404bd9dcff50f7182c659097d4f651063d
1,847
class Dromeaudio < Formula desc "Small C++ audio manipulation and playback library" homepage "https://github.com/joshb/dromeaudio/" url "https://github.com/joshb/DromeAudio/archive/v0.3.0.tar.gz" sha256 "d226fa3f16d8a41aeea2d0a32178ca15519aebfa109bc6eee36669fa7f7c6b83" license "BSD-2-Clause" head "https://github.com/joshb/dromeaudio.git" bottle do rebuild 2 sha256 cellar: :any_skip_relocation, arm64_big_sur: "56127ff9fdb552e5a521d52a9a848ddf1f4a79029740d65f053ba9cc8ab2c7f7" sha256 cellar: :any_skip_relocation, big_sur: "ef9ce724d04545c565e1e46f06560128f54c8fd164fdc3d3abca18a4d17ad9b6" sha256 cellar: :any_skip_relocation, catalina: "5199ecfbb8454f1560685c537b1fbaf1b301b39ad8ea825a9f846cc9f3530f30" sha256 cellar: :any_skip_relocation, mojave: "062b0fa8e43363d60e5816343d1fcb7f58ce02c236512d96f4bf4ba10c96fd2c" sha256 cellar: :any_skip_relocation, high_sierra: "1334685c021a520567e2d16bfe68ebddea8f9382a50645e241d09349cfb6b450" end depends_on "cmake" => :build def install # install FindDromeAudio.cmake under share/cmake/Modules/ inreplace "share/CMakeLists.txt", "${CMAKE_ROOT}", "#{share}/cmake" system "cmake", ".", *std_cmake_args system "make", "install" end test do assert_predicate include/"DromeAudio", :exist? assert_predicate lib/"libDromeAudio.a", :exist? # We don't test DromeAudioPlayer with an audio file because it only works # with certain audio devices and will fail on CI with this error: # DromeAudio Exception: AudioDriverOSX::AudioDriverOSX(): # AudioUnitSetProperty (for StreamFormat) failed # # Related PR: https://github.com/Homebrew/homebrew-core/pull/55292 assert_match /Usage: .*?DromeAudioPlayer <filename>/i, shell_output(bin/"DromeAudioPlayer 2>&1", 1) end end
45.04878
122
0.756903
d5ef0f2843d560c4ebbb119dc06e677bccb2769d
313
# encoding: utf-8 require File.expand_path(File.join(File.dirname(__FILE__), '..', 'spec_helper')) require 'data_objects/spec/shared/typecast/boolean_spec' describe 'DataObjects::Postgres with Boolean' do it_should_behave_like 'supporting Boolean' it_should_behave_like 'supporting Boolean autocasting' end
31.3
80
0.801917
112b6b713a7f7c58de6f383e1b46d5afc9b1889f
5,657
class MingwW64 < Formula desc "Minimalist GNU for Windows and GCC cross-compilers" homepage "http://mingw-w64.org/" url "https://downloads.sourceforge.net/project/mingw-w64/mingw-w64/mingw-w64-release/mingw-w64-v7.0.0.tar.bz2" sha256 "aa20dfff3596f08a7f427aab74315a6cb80c2b086b4a107ed35af02f9496b628" revision 2 bottle do sha256 "092d1d30ae9f2de677a35f14ec2907d285b85f9b4ed465a506f72a970deea715" => :catalina sha256 "cdefb18e91d0102ba193caa2c6994d83c30742fa03e03e12b3fc5864ca6b003c" => :mojave sha256 "2658e687bbfee45cfa9d0d74c9c129f9ffc2ac1de017143d25b4017aa899404f" => :high_sierra end # Apple's makeinfo is old and has bugs depends_on "texinfo" => :build depends_on "gmp" depends_on "isl" depends_on "libmpc" depends_on "mpfr" resource "binutils" do url "https://ftp.gnu.org/gnu/binutils/binutils-2.34.tar.xz" mirror "https://ftpmirror.gnu.org/binutils/binutils-2.34.tar.xz" sha256 "f00b0e8803dc9bab1e2165bd568528135be734df3fabf8d0161828cd56028952" end resource "gcc" do url "https://ftp.gnu.org/gnu/gcc/gcc-9.3.0/gcc-9.3.0.tar.xz" mirror "https://ftpmirror.gnu.org/gcc/gcc-9.3.0/gcc-9.3.0.tar.xz" sha256 "71e197867611f6054aa1119b13a0c0abac12834765fe2d81f35ac57f84f742d1" end def target_archs ["i686", "x86_64"].freeze end def install target_archs.each do |arch| arch_dir = "#{prefix}/toolchain-#{arch}" target = "#{arch}-w64-mingw32" resource("binutils").stage do args = %W[ --target=#{target} --with-sysroot=#{arch_dir} --prefix=#{arch_dir} --enable-targets=#{target} --disable-multilib ] mkdir "build-#{arch}" do system "../configure", *args system "make" system "make", "install" end end # Put the newly built binutils into our PATH ENV.prepend_path "PATH", "#{arch_dir}/bin" mkdir "mingw-w64-headers/build-#{arch}" do system "../configure", "--host=#{target}", "--prefix=#{arch_dir}/#{target}" system "make" system "make", "install" end # Create a mingw symlink, expected by GCC ln_s "#{arch_dir}/#{target}", "#{arch_dir}/mingw" # Build the GCC compiler resource("gcc").stage buildpath/"gcc" args = %W[ --target=#{target} --with-sysroot=#{arch_dir} --prefix=#{arch_dir} --with-bugurl=https://github.com/Homebrew/homebrew-core/issues --enable-languages=c,c++,fortran --with-ld=#{arch_dir}/bin/#{target}-ld --with-as=#{arch_dir}/bin/#{target}-as --with-gmp=#{Formula["gmp"].opt_prefix} --with-mpfr=#{Formula["mpfr"].opt_prefix} --with-mpc=#{Formula["libmpc"].opt_prefix} --with-isl=#{Formula["isl"].opt_prefix} --disable-multilib --enable-threads=posix ] mkdir "#{buildpath}/gcc/build-#{arch}" do system "../configure", *args system "make", "all-gcc" system "make", "install-gcc" end # Build the mingw-w64 runtime args = %W[ CC=#{target}-gcc CXX=#{target}-g++ CPP=#{target}-cpp --host=#{target} --with-sysroot=#{arch_dir}/#{target} --prefix=#{arch_dir}/#{target} ] if arch == "i686" args << "--enable-lib32" << "--disable-lib64" elsif arch == "x86_64" args << "--disable-lib32" << "--enable-lib64" end mkdir "mingw-w64-crt/build-#{arch}" do system "../configure", *args system "make" system "make", "install" end # Build the winpthreads library # we need to build this prior to the # GCC runtime libraries, to have `-lpthread` # available, for `--enable-threads=posix` args = %W[ CC=#{target}-gcc CXX=#{target}-g++ CPP=#{target}-cpp --host=#{target} --with-sysroot=#{arch_dir}/#{target} --prefix=#{arch_dir}/#{target} ] mkdir "mingw-w64-libraries/winpthreads/build-#{arch}" do system "../configure", *args system "make" system "make", "install" end # Finish building GCC (runtime libraries) chdir "#{buildpath}/gcc/build-#{arch}" do system "make" system "make", "install" end # Symlinks all binaries into place mkdir_p bin Dir["#{arch_dir}/bin/*"].each { |f| ln_s f, bin } end end test do (testpath/"hello.c").write <<~EOS #include <stdio.h> #include <windows.h> int main() { puts("Hello world!"); MessageBox(NULL, TEXT("Hello GUI!"), TEXT("HelloMsg"), 0); return 0; } EOS (testpath/"hello.cc").write <<~EOS #include <iostream> int main() { std::cout << "Hello, world!" << std::endl; return 0; } EOS (testpath/"hello.f90").write <<~EOS program hello ; print *, "Hello, world!" ; end program hello EOS ENV["LC_ALL"] = "C" ENV.remove_macosxsdk target_archs.each do |arch| target = "#{arch}-w64-mingw32" outarch = (arch == "i686") ? "i386" : "x86-64" system "#{bin}/#{target}-gcc", "-o", "test.exe", "hello.c" assert_match "file format pei-#{outarch}", shell_output("#{bin}/#{target}-objdump -a test.exe") system "#{bin}/#{target}-g++", "-o", "test.exe", "hello.cc" assert_match "file format pei-#{outarch}", shell_output("#{bin}/#{target}-objdump -a test.exe") system "#{bin}/#{target}-gfortran", "-o", "test.exe", "hello.f90" assert_match "file format pei-#{outarch}", shell_output("#{bin}/#{target}-objdump -a test.exe") end end end
31.780899
112
0.594485
79a1aa2dfcff99ef7630c76f3ee77855fe8308cd
621
Rails.application.routes.draw do namespace :api do namespace :v1 do resources :events, only: [:show, :create, :update, :destroy] resources :pets, only: [:create, :update, :index, :destroy] resources :users, only: [:show, :create, :update, :index] resources :user_relationships, only: [ :create, :update, :destroy] # get '/profile/:id', to: 'users#profile' get '/users/friends/:id', to: 'users#friends' # delete '/relationships/', to: 'user_relationships#destroy' end end post '/login', to: 'application#login' # get 'users/friends/:id', to: 'users#friends' end
34.5
72
0.639291
79e434a6b5b8ea08d3dd204cb87d94fc38537edb
197
module Api class ProgressController < ApplicationController def index render json: {number_of_cities: City.count, number_of_cities_with_data_portal: CityPortal.count} end end end
24.625
102
0.77665
d56e23a74e519d444c058bdafbafb3d18f6be1bd
66
module Fog module Aliyun VERSION = '0.2.0'.freeze end end
11
28
0.651515
e2e7698189cefa199a87bc45d3213141042c3651
1,185
class AvalonDerivativeService < Hyrax::ActiveEncode::ActiveEncodeDerivativeService # HACK: Prepend this derivative service Hyrax::DerivativeService.services = [AvalonDerivativeService] + Hyrax::DerivativeService.services def create_derivatives(filename) super # Create thumbnails and posters outside of ActiveEncode for now. # Hydra-derivatives's video runner/processor don't have a way to # set the size of image derivatives so thumbnail and poster are both the same size for now. Hydra::Derivatives::VideoDerivatives.create(filename, outputs: [{ label: :thumbnail, format: 'jpg', url: derivative_url('thumbnail') }, { label: :poster, format: 'jpg', url: derivative_url('poster') }]) end # The destination_name parameter has to match up with the file parameter # passed to the DownloadsController def derivative_url(destination_name) if destination_name.to_s == 'thumbnail' || destination_name.to_s == 'poster' path = Hyrax::DerivativePath.derivative_path_for_reference(file_set, destination_name) URI("file://#{path}").to_s else super end end end
47.4
139
0.705485
ff6c4a06517241ff4446d49dc8a630a7cf9e2c66
5,521
# encoding: utf-8 # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is # regenerated. module Azure::CognitiveServices::LuisAuthoring::V3_0_preview module Models # # Object model of an application version. # class VersionInfo include MsRestAzure # @return [String] The version ID. E.g.: "0.1" attr_accessor :version # @return [DateTime] The version's creation timestamp. attr_accessor :created_date_time # @return [DateTime] Timestamp of the last update. attr_accessor :last_modified_date_time # @return [DateTime] Timestamp of the last time the model was trained. attr_accessor :last_trained_date_time # @return [DateTime] Timestamp when was last published. attr_accessor :last_published_date_time # @return [String] The Runtime endpoint URL for this model version. attr_accessor :endpoint_url # @return [Hash{String => String}] The endpoint key. attr_accessor :assigned_endpoint_key # @return External keys. attr_accessor :external_api_keys # @return [Integer] Number of intents in this model. attr_accessor :intents_count # @return [Integer] Number of entities in this model. attr_accessor :entities_count # @return [Integer] Number of calls made to this endpoint. attr_accessor :endpoint_hits_count # @return [TrainingStatus] The current training status. Possible values # include: 'NeedsTraining', 'InProgress', 'Trained' attr_accessor :training_status # # Mapper for VersionInfo class as Ruby Hash. # This will be used for serialization/deserialization. # def self.mapper() { client_side_validation: true, required: false, serialized_name: 'VersionInfo', type: { name: 'Composite', class_name: 'VersionInfo', model_properties: { version: { client_side_validation: true, required: true, serialized_name: 'version', type: { name: 'String' } }, created_date_time: { client_side_validation: true, required: false, serialized_name: 'createdDateTime', type: { name: 'DateTime' } }, last_modified_date_time: { client_side_validation: true, required: false, serialized_name: 'lastModifiedDateTime', type: { name: 'DateTime' } }, last_trained_date_time: { client_side_validation: true, required: false, serialized_name: 'lastTrainedDateTime', type: { name: 'DateTime' } }, last_published_date_time: { client_side_validation: true, required: false, serialized_name: 'lastPublishedDateTime', type: { name: 'DateTime' } }, endpoint_url: { client_side_validation: true, required: false, serialized_name: 'endpointUrl', type: { name: 'String' } }, assigned_endpoint_key: { client_side_validation: true, required: false, serialized_name: 'assignedEndpointKey', type: { name: 'Dictionary', value: { client_side_validation: true, required: false, serialized_name: 'StringElementType', type: { name: 'String' } } } }, external_api_keys: { client_side_validation: true, required: false, serialized_name: 'externalApiKeys', type: { name: 'Object' } }, intents_count: { client_side_validation: true, required: false, serialized_name: 'intentsCount', type: { name: 'Number' } }, entities_count: { client_side_validation: true, required: false, serialized_name: 'entitiesCount', type: { name: 'Number' } }, endpoint_hits_count: { client_side_validation: true, required: false, serialized_name: 'endpointHitsCount', type: { name: 'Number' } }, training_status: { client_side_validation: true, required: true, serialized_name: 'trainingStatus', type: { name: 'Enum', module: 'TrainingStatus' } } } } } end end end end
31.016854
77
0.487412
918bd5c835fc76a1e38dc7eff01e12832690c4a7
2,827
require 'json' require 'bunny' require 'slop' #require_relative '../terminal.rb' # parsing commandline arguments OPTIONS = Slop.parse { |o| o.string '-q', '--queue-name', 'RabbitMQ queue name (default: testing)', default: 'testing' o.string '-H', '--rabbit-host', 'RabbitMQ host address (default: 127.0.0.1)', default: '127.0.0.1' o.string '-U', '--rabbit-user', 'RabbitMQ user name (default: '')', default: 'guest' o.string '-P', '--rabbit-password', 'RabbitMQ user password (default: '')', default: 'guest' o.string '-r', '--require-file', 'File that defines workitem_process(workitem) function' o.on '-h', '--help' do puts o; exit 0; end } # exception which prevents re-processing of the workitem (message will be ack-ed in rabbit). other exceptions may lead to processing retry. class PermanentWorkItemError < Exception; end # loading the main workitem processing function def crash_on_missing_workitem_process_function $stderr.puts "You need to (re-)define the workitem_process(workitem) function in some file, and require that file using the -r option." exit 1 end crash_on_missing_workitem_process_function if not OPTIONS["require-file"] require_relative OPTIONS["require-file"] crash_on_missing_workitem_process_function if not defined? workitem_process #Create a new bunny connection and initialize it. puts "Connecting to the rabbitmq" connection = Bunny.new(host: OPTIONS['rabbit-host'], vhost: "/", user: OPTIONS['rabbit-user'], password: OPTIONS['rabbit-password']) connection.start #Create a new channel and queue channel = connection.create_channel queue = channel.queue(OPTIONS['queue-name'], durable: true) channel.prefetch(1) #Create an exchange for the bindings exchange = Bunny::Exchange.new(channel, :direct, OPTIONS['queue-name'], durable: true) #Create the bindings queue.bind(OPTIONS['queue-name']) queue.bind(OPTIONS['queue-name'], routing_key: OPTIONS['queue-name']) #Validate if queue exists. raise "Why? Queue does not exist!" if not connection.queue_exists?(OPTIONS['queue-name']) #Subscribe and wait for messages from the queue puts "Waiting for workitems" queue.subscribe(block: true, manual_ack: true) do |delivery_info, metadata, payload| begin puts "Received a workitem wi-#{delivery_info.delivery_tag}" workitem = JSON.parse(payload)["args"][0] puts workitem should_we_ack = workitem_process(workitem) puts "Workitem processing finished, ack: " + should_we_ack.inspect channel.ack(delivery_info.delivery_tag, false) if should_we_ack rescue PermanentWorkItemError => error channel.ack(delivery_info.delivery_tag, false) $stderr.puts error.to_s error.backtrace.each { |line| $stderr.puts line.inspect } exit 2 rescue Exception => error $stderr.puts error.to_s error.backtrace.each { |line| $stderr.puts line.inspect } exit 3 end end
34.47561
139
0.749912
edfc54b9edf3a1081cddf656442b4fc6508bbaa4
2,526
# Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is # regenerated. module Azure::RecoveryServicesSiteRecovery::Mgmt::V2018_01_10 module Models # # DiskExclusionInput when doing enable protection of virtual machine in # InMage provider. # class InMageDiskExclusionInput include MsRestAzure # @return [Array<InMageVolumeExclusionOptions>] The volume label based # option for disk exclusion. attr_accessor :volume_options # @return [Array<InMageDiskSignatureExclusionOptions>] The guest disk # signature based option for disk exclusion. attr_accessor :disk_signature_options # # Mapper for InMageDiskExclusionInput class as Ruby Hash. # This will be used for serialization/deserialization. # def self.mapper() { client_side_validation: true, required: false, serialized_name: 'InMageDiskExclusionInput', type: { name: 'Composite', class_name: 'InMageDiskExclusionInput', model_properties: { volume_options: { client_side_validation: true, required: false, serialized_name: 'volumeOptions', type: { name: 'Sequence', element: { client_side_validation: true, required: false, serialized_name: 'InMageVolumeExclusionOptionsElementType', type: { name: 'Composite', class_name: 'InMageVolumeExclusionOptions' } } } }, disk_signature_options: { client_side_validation: true, required: false, serialized_name: 'diskSignatureOptions', type: { name: 'Sequence', element: { client_side_validation: true, required: false, serialized_name: 'InMageDiskSignatureExclusionOptionsElementType', type: { name: 'Composite', class_name: 'InMageDiskSignatureExclusionOptions' } } } } } } } end end end end
32.384615
88
0.527712
e96932dfac404389a3e23a25678f2915228c11c0
805
include_recipe "git" overlay_dir = node['joe']['overlay_dir'] directory overlay_dir do owner "root" group "root" recursive true end overlay_repo_branch = "joe-3.7" overlay_repo_branch = "joe-4.4" if node['platform'] == "debian" and node['platform_version'].to_i >= 9 git overlay_dir do repository node['joe']['overlay_repo'] reference overlay_repo_branch user "root" group "root" action :sync end template "#{overlay_dir}/setup-symlinks.sh" do source "setup-symlinks.sh.erb" owner "root" group "root" mode 00755 variables(:source => overlay_dir, :dest => node['joe']['target_dir']) end execute "setup-symlinks" do command "#{overlay_dir}/setup-symlinks.sh" end link "#{node['joe']['etc_dir']}/ftyperc" do to "#{overlay_dir}/ftyperc" owner "root" group "root" end
20.125
102
0.700621
79696b0e8596004a559bf9c405fd2b301dd2f6da
563
# frozen_string_literal: true class User < ApplicationRecord class << User def admin User.new(ActionController::Parameters.new(:name => 'admin', :roles => [Role.admin, Role.standard])) end def standard User.new(ActionController::Parameters.new(:name => 'standard', :roles => [Role.standard])) end def guest User.new(ActionController::Parameters.new(:name => 'guest', :roles => [Role.guest])) end end has_many :user_roles has_many :roles, through: :user_roles def role_names roles.map(&:name) end end
22.52
105
0.662522
6a753ec71b8d21158760e74d8f6879e32beb793f
501
# Be sure to restart your server when you modify this file. # Version of your assets, change this if you want to expire all your assets. Rails.application.config.assets.version = '1.0' # Precompile additional assets. # application.js, application.css, and all non-JS/CSS in app/assets folder are already added. # Rails.application.config.assets.precompile += %w( search.js ) Rails.application.config.assets.precompile += ["joblint.min.js", 'administrate/job-show.css', 'administrate/joblinter.js']
45.545455
122
0.764471
1cab7327d14592362a2592dfc92785cec0e87d34
4,034
# This file should contain all the record creation needed to seed the database with its default values. # The data can then be loaded with the rake db:seed (or created alongside the db with db:setup). # # Examples: # # cities = City.create([{ name: 'Chicago' }, { name: 'Copenhagen' }]) # Mayor.create(name: 'Emanuel', city: cities.first) AdminUser.destroy_all User.destroy_all Category.destroy_all Item.destroy_all Shop.destroy_all AdminUser.create(email: '[email protected]', password: 'password', is_admin: true) jerry = User.new(openid: 'jerryatsephplus') jerry.save(validate: false) feixiong = User.new(openid: 'feixiongatsephplus') feixiong.save(validate: false) ['燃面系列', '汤面系列', '抄手系列', '炒饭系列', '特色点心', '爽口凉菜', '饮料系列'].each do |cat| Category.create(name: cat) end [ ['叙府燃面', 'XuFu Burning Noodles', 17, '特制芽菜、花生碎米、肉糜', 2], ['辣子鸡燃面', 'Burning Noodles with Spicy Chicken', 17, '香辣鸡丁、笋尖', 1], ['蜀味凉面', 'Cold Noodles', 16, '龙须牛肉、海带丝、花生、豆芽', 2], ['豇豆燃面', 'Burning Noodles with Cowpea', 18, '肉糜、豇豆', 2], ['双椒牛肉燃面', 'Burning Noodles with Shredded Beef and Bell Peppe', 18, '牛肉糜、川红椒、豆芽', 3], ['绝品牛肉面', 'Noodle Soup with Braised Beef', 19, '上等牛肉、笋尖', 3], ['蜀味排骨面', 'Noodle Soup with Spare Ribs', 18, '排骨、木耳丝', 2], ['红油臊子面', 'Noodle Soup with Spicy Meat Paste', 16, '肉糜、红油', 2], ['香辣蹄花面', 'Noodle Soup with Pork Trotters', 22, '蹄尖、豌豆', 2], ['原味炖鸡面', 'Noodle Soup with Chicken', 19, '鸡丁、青菜、笋片'] , ['老鸭竹荪面', 'Noodle Soup with Roasted Duck and Dictyophora', 23, '卤鸭、竹荪'], ['上汤三鲜面', 'Noodle Soup with Seafood', 21, '虾、香菇、鱿鱼'], ['菌王面', 'Noodle Soup with Mushrooms', 21, '金针菇、平菇、香菇'], ['竹笋酸菜面', 'Noodle Soup with Bamboo Shoots and Sauerkraut', 19, '竹笋、酸菜、肉片'], ['素养生面', 'Noodle Soup with Varied Vegetables', 17, '木耳丝、萝卜、青菜、玉米'], ['红油抄手', 'Spicy Wonton Soup', 18, '红汤、抄手', 2], ['燃抄手', 'Burning Wonton', 17, '特制芽菜、花生米', 1], ['鲜味抄手', 'Wonton Soup', 19, '三鲜汤、抄手'], ['豇豆肉糜炒饭', 'Meat with Cowpea Fried Rice', 18, '豇豆、肉糜'], ['虾仁炒饭', 'Shrimps Fried Rice', 21, '虾仁、鸡蛋'], ['麻辣牛肉炒饭', 'Super Spicy Beef Fried Rices', 19, '麻辣牛肉、土豆', 3], ['芽菜肉糜炒饭', 'Meat with Bean Sprouts Fried Rice', 17, '特制芽菜、肉糜、胡萝卜'], ['野山椒肉丝炒饭', 'Shredded Meat with Wild Pepper Fried Rice', 18, '野山椒、肉丝', 4], ['鱼香肉丝炒饭', 'Fish-flavored Fried Rice', 18, '鱼香、肉丝、木耳丝'], ['蜀香蛋炒饭', 'Egg Fried Rice', 15, '鸡蛋、玉米、青豆'], ['蜀香奶黄包', 'Steamed Bread with Milk and Egg', 9, '蛋黄、牛奶、糖'], ['肉糜蒸蛋', 'Steamed Egg', 5, '肉糜、鸡蛋'], ['竹香小黄粑', 'Steamed Sichuan Pastry', 11, '糯米、酥油'], ['蜀味蒸饺', 'Steamed Dumpling', 9, '肉糜、面粉、葱'], ['珍珠丸子', 'Pearl-shaped Cake', 13, '西米、芝麻、花生'], ['叶儿粑', 'Sichuan Sticky Rice Cake', 12, '肉糜、糯米'], ['玉米窝窝头', 'Sichuan Cornmeal Bread', 9, '玉米粉、白糖'], ['金针菇肥牛', 'Needle Mushroom with Sliced Beef', 13, '金针菇、上等肥牛'], ['凉拌海带丝', 'Kelp in Chili Sauce', 7, '海带丝、川椒'], ['凉拌豆腐干', 'Bean Curd in Chili Sauce', 8, '豆腐干、川椒'], ['凉拌鸡蛋干', 'Jidangan in Chili Sauce', 9, '鸡蛋干、川椒'], ['蜀味拌黄瓜', 'Cucumber Salad Dressed with Sauce', 7, '黄瓜、川椒'], ['油酥花生', 'Crispy Peanuts', 8, '花椒、花生'], ['糖拌西红柿', 'Tomatoes in Sugar', 9, '西红柿、白糖'], ['山椒木耳丝', 'Agaric Mixed with Wild Pepper', 9, '野山椒、木耳丝'], ['泡椒凤爪', 'Chicken Feet with Wild Pepper', 10, '凤爪、野山椒'], ['蜀味海藻', 'Seaweed Salad', 11, '海藻、香油'], ['银耳汤', 'Tremella Soup', 5], ['水晶冰粉', 'Crystal Bingfen', 5] , ['黑芝麻核桃羹', 'Sesame and Walnut Juice', 9], ['红枣羹', 'Red Date Juice', 9], ['营养花生浆', 'Nutrient Peanut Juice', 12], ['蜂蜜黄瓜汁', 'Fresh Cucumber Juice with Honey', 13], ['鲜榨香蕉汁', 'Fresh Banana Juice', 12], ['鲜榨菠萝汁', 'Fresh Pineapple Juice', 12], ['鲜榨西瓜汁', 'Fresh Watermelon Juice', 12], ['酸梅汁', 'Plum Juice', 6], ['阳光芒果汁', 'Sunshine Mango Juice', 8], ['七喜柠檬冰', 'Seven-up Lemon Juice', 7], ['柠檬红茶', 'Lemon Black Tea', 6], ['柠檬可乐冰', 'Lemon Coke', 7] ].each do |i| materials = i[3].nil? ? '' : i[3].split('、') level = i[4].nil? ? 0 : i[4] category_ids = Category.all.map(&:id) Item.create(name: i[0], code: i[1], price: i[2], material_list: materials.blank? ? '' : materials.join(','), level: level, category_id: category_ids.sample) end
45.840909
158
0.620476
91940830f24bf366fa3564b596f3af579f41e96b
4,147
# encoding: utf-8 # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is # regenerated. module Azure::Automation::Mgmt::V2015_10_31 module Models # # Definition of the connection type. # class ConnectionType include MsRestAzure # @return [String] Gets the id of the resource. attr_accessor :id # @return [String] Gets the name of the connection type. attr_accessor :name # @return [String] Resource type attr_accessor :type # @return [Boolean] Gets or sets a Boolean value to indicate if the # connection type is global. attr_accessor :is_global # @return [Hash{String => FieldDefinition}] Gets the field definitions of # the connection type. attr_accessor :field_definitions # @return [DateTime] Gets the creation time. attr_accessor :creation_time # @return [DateTime] Gets or sets the last modified time. attr_accessor :last_modified_time # @return [String] Gets or sets the description. attr_accessor :description # # Mapper for ConnectionType class as Ruby Hash. # This will be used for serialization/deserialization. # def self.mapper() { client_side_validation: true, required: false, serialized_name: 'ConnectionType', type: { name: 'Composite', class_name: 'ConnectionType', model_properties: { id: { client_side_validation: true, required: false, read_only: true, serialized_name: 'id', type: { name: 'String' } }, name: { client_side_validation: true, required: false, read_only: true, serialized_name: 'name', type: { name: 'String' } }, type: { client_side_validation: true, required: false, read_only: true, serialized_name: 'type', type: { name: 'String' } }, is_global: { client_side_validation: true, required: false, serialized_name: 'properties.isGlobal', type: { name: 'Boolean' } }, field_definitions: { client_side_validation: true, required: false, read_only: true, serialized_name: 'properties.fieldDefinitions', type: { name: 'Dictionary', value: { client_side_validation: true, required: false, serialized_name: 'FieldDefinitionElementType', type: { name: 'Composite', class_name: 'FieldDefinition' } } } }, creation_time: { client_side_validation: true, required: false, read_only: true, serialized_name: 'properties.creationTime', type: { name: 'DateTime' } }, last_modified_time: { client_side_validation: true, required: false, serialized_name: 'properties.lastModifiedTime', type: { name: 'DateTime' } }, description: { client_side_validation: true, required: false, serialized_name: 'properties.description', type: { name: 'String' } } } } } end end end end
29.621429
79
0.472149
21444f68ea57c6dd77bc35752aa690b342ee3c1b
6,420
module Gollum # Controls all access to the Git objects from Gollum. Extend this class to # add custom caching for special cases. class GitAccess # Initializes the GitAccess instance. # # path - The String path to the Git repository that holds the # Gollum site. # page_file_dir - String the directory in which all page files reside # # Returns this instance. def initialize(path, page_file_dir = nil) @page_file_dir = page_file_dir @path = path @repo = Grit::Repo.new(path) clear end # Public: Determines whether the Git repository exists on disk. # # Returns true if it exists, or false. def exist? @repo.git.exist? end # Public: Converts a given Git reference to a SHA, using the cache if # available. # # ref - a String Git reference (ex: "master") # # Returns a String. def ref_to_sha(ref) if sha?(ref) ref else get_cache(:ref, ref) { ref_to_sha!(ref) } end end # Public: Gets a recursive list of Git blobs for the whole tree at the # given commit. # # ref - A String Git reference or Git SHA to a commit. # # Returns an Array of BlobEntry instances. def tree(ref) if sha = ref_to_sha(ref) get_cache(:tree, sha) { tree!(sha) } else [] end end # Public: Fetches the contents of the Git blob at the given SHA. # # sha - A String Git SHA. # # Returns the String content of the blob. def blob(sha) cat_file!(sha) end # Public: Looks up the Git commit using the given Git SHA or ref. # # ref - A String Git SHA or ref. # # Returns a Grit::Commit. def commit(ref) if sha?(ref) get_cache(:commit, ref) { commit!(ref) } else if sha = get_cache(:ref, ref) commit(sha) else if cm = commit!(ref) set_cache(:ref, ref, cm.id) set_cache(:commit, cm.id, cm) end end end end # Public: Clears all of the cached data that this GitAccess is tracking. # # Returns nothing. def clear @ref_map = {} @tree_map = {} @commit_map = {} end # Public: Refreshes just the cached Git reference data. This should # be called after every Gollum update. # # Returns nothing. def refresh @ref_map.clear end ######################################################################### # # Internal Methods # ######################################################################### # Gets the String path to the Git repository. attr_reader :path # Gets the Grit::Repo instance for the Git repository. attr_reader :repo # Gets a Hash cache of refs to commit SHAs. # # {"master" => "abc123", ...} # attr_reader :ref_map # Gets a Hash cache of commit SHAs to a recursive tree of blobs. # # {"abc123" => [<BlobEntry>, <BlobEntry>]} # attr_reader :tree_map # Gets a Hash cache of commit SHAs to the Grit::Commit instance. # # {"abcd123" => <Grit::Commit>} # attr_reader :commit_map # Checks to see if the given String is a 40 character hex SHA. # # str - Possible String SHA. # # Returns true if the String is a SHA, or false. def sha?(str) !!(str =~ /^[0-9a-f]{40}$/) end # Looks up the Git SHA for the given Git ref. # # ref - String Git ref. # # Returns a String SHA. def ref_to_sha!(ref) @repo.git.rev_list({:max_count=>1}, ref) rescue Grit::GitRuby::Repository::NoSuchShaFound end # Looks up the Git blobs for a given commit. # # sha - String commit SHA. # # Returns an Array of BlobEntry instances. def tree!(sha) tree = @repo.git.native(:ls_tree, {:r => true, :l => true, :z => true}, sha) items = tree.split("\0").inject([]) do |memo, line| memo << parse_tree_line(line) end if dir = @page_file_dir regex = /^#{dir}\// items.select { |i| i.path =~ regex } else items end end # Reads the content from the Git db at the given SHA. # # sha - The String SHA. # # Returns the String content of the Git object. def cat_file!(sha) @repo.git.cat_file({:p => true}, sha) end # Reads a Git commit. # # sha - The string SHA of the Git commit. # # Returns a Grit::Commit. def commit!(sha) @repo.commit(sha) end # Attempts to get the given data from a cache. If it doesn't exist, it'll # pass the results of the yielded block to the cache for future accesses. # # name - The cache prefix used in building the full cache key. # key - The unique cache key suffix, usually a String Git SHA. # # Yields a block to pass to the cache. # Returns the cached result. def get_cache(name, key) cache = instance_variable_get("@#{name}_map") value = cache[key] if value.nil? && block_given? set_cache(name, key, value = yield) end value == :_nil ? nil : value end # Writes some data to the internal cache. # # name - The cache prefix used in building the full cache key. # key - The unique cache key suffix, usually a String Git SHA. # value - The value to write to the cache. # # Returns nothing. def set_cache(name, key, value) cache = instance_variable_get("@#{name}_map") cache[key] = value || :_nil end # Parses a line of output from the `ls-tree` command. # # line - A String line of output: # "100644 blob 839c2291b30495b9a882c17d08254d3c90d8fb53 Home.md" # # Returns an Array of BlobEntry instances. def parse_tree_line(line) mode, type, sha, size, *name = line.split(/\s+/) BlobEntry.new(sha, name.join(' '), size.to_i) end # Decode octal sequences (\NNN) in tree path names. # # path - String path name. # # Returns a decoded String. def decode_git_path(path) if path[0] == ?" && path[-1] == ?" path = path[1...-1] path.gsub!(/\\\d{3}/) { |m| m[1..-1].to_i(8).chr } end path.gsub!(/\\[rn"\\]/) { |m| eval(%("#{m.to_s}")) } path end end end
26.639004
78
0.561526
bf25f712d364e778cf6ccde43ba96697a20acfe8
218
# frozen_string_literal: true class AddUserStatsTable < ActiveRecord::Migration[5.2] def change create_table :user_stats do |t| t.jsonb :data t.integer :user_id t.timestamps end end end
16.769231
54
0.683486
2675d5f0f11e9f319fac55b6fd11ebd58517eee6
2,773
class UseraccountsController < ApplicationController before_action :set_useraccount, only: [:show, :edit, :update, :destroy] # GET /useraccounts # GET /useraccounts.json def index @useraccounts = Useraccount.all insint = current_user.insints.first if insint.present? if insint.inskey.present? uri = "http://"+"#{insint.inskey}"+":"+"#{insint.password}"+"@"+"#{insint.subdomen}"+"/admin/account.json" else uri = "http://k-comment:"+"#{insint.password}"+"@"+"#{insint.subdomen}"+"/admin/account.json" end # puts uri response = RestClient.get(uri) data = JSON.parse(response) @ins_title = data['title'] @ins_phone = data['phone'] end invoice = Invoice.where(:status => "Оплачен").last @pay_period = invoice.updated_at.to_date + invoice.payplan.period.split(' ')[0].to_i.months || '' if invoice.present? end # GET /useraccounts/1 # GET /useraccounts/1.json def show end # GET /useraccounts/new def new @useraccount = Useraccount.new end # GET /useraccounts/1/edit def edit end # POST /useraccounts # POST /useraccounts.json def create @useraccount = Useraccount.new(useraccount_params) respond_to do |format| if @useraccount.save format.html { redirect_to @useraccount, notice: 'Useraccount was successfully created.' } format.json { render :show, status: :created, location: @useraccount } else format.html { render :new } format.json { render json: @useraccount.errors, status: :unprocessable_entity } end end end # PATCH/PUT /useraccounts/1 # PATCH/PUT /useraccounts/1.json def update respond_to do |format| if @useraccount.update(useraccount_params) format.html { redirect_to @useraccount, notice: 'Useraccount was successfully updated.' } format.json { render :show, status: :ok, location: @useraccount } else format.html { render :edit } format.json { render json: @useraccount.errors, status: :unprocessable_entity } end end end # DELETE /useraccounts/1 # DELETE /useraccounts/1.json def destroy @useraccount.destroy respond_to do |format| format.html { redirect_to useraccounts_url, notice: 'Useraccount was successfully destroyed.' } format.json { head :no_content } end end private # Use callbacks to share common setup or constraints between actions. def set_useraccount @useraccount = Useraccount.find(params[:id]) end # Never trust parameters from the scary internet, only allow the white list through. def useraccount_params params.require(:useraccount).permit(:name, :email, :shop, :insuserid, :user_id) end end
30.141304
121
0.664263
ffcf312ff21557e7c4395484b18290a198ff10aa
1,391
class MottimagetxtsController < ApplicationController def create @motd = Motd.find(params[:motd_id]) @mott = @motd.motts.find(params[:mott_id]) @mottimage = @mott.mottimages.find(params[:mottimage_id]) @mottimage.mottimagetxts.create(mottimagetxt_params) redirect_to motd_mott_path(@motd,@mott) end def destroy @motd = Motd.find(params[:motd_id]) @mott = @motd.motts.find(params[:mott_id]) @mottimage = @mott.mottimages.find(params[:mottimage_id]) @mottimagetxt = @mottimage.mottimagetxts.find(params[:id]) @mottimagetxt.destroy redirect_to motd_mott_path(@motd,@mott) end def edit @motd = Motd.find(params[:motd_id]) @mott = @motd.motts.find(params[:mott_id]) @mottimage = @mott.mottimages.find(params[:mottimage_id]) @mottimagetxt = @mottimage.mottimagetxts.find(params[:id]) end def update @motd = Motd.find(params[:motd_id]) @mott = @motd.motts.find(params[:mott_id]) @mottimage = @mott.mottimages.find(params[:mottimage_id]) @mottimagetxt = @mottimage.mottimagetxts.find(params[:id]) if @mottimagetxt.update(mottimagetxt_params) redirect_to motd_mott_path(@motd,@mott) else render 'edit' end end private def mottimagetxt_params params.require(:mottimagetxt).permit(:picga, :picba, :picda, :viddga, :vidba, :vidda, :adiddga, :adiba, :adida) end end
30.911111
117
0.696621
619d51a099d72baf8ad224ae4929d6f7a127830d
380
class MigrateProviderOfficeCiudadToEnum < ActiveRecord::Migration def up say_with_time "WARNING: wiping all ciudades in provider offices" do remove_column :provider_offices, :ciudad add_column :provider_offices, :ciudad, :integer end end def down remove_column :provider_offices, :ciudad add_column :provider_offices, :ciudad, :string end end
27.142857
71
0.752632
1ce8f811aff0c3ac8e0a1b4df1439a0049c8276b
165
FactoryBot.define do factory :user do name { Faker::Name.name } email { Faker::Internet.unique.email } password { Faker::Internet.password } end end
20.625
42
0.678788
1a4338d9d474e698047c19f5d9cdea89ecd32fe1
1,419
require 'routemaster/jobs/client' require 'routemaster/jobs/cache_and_sweep' RSpec.describe Routemaster::Jobs::Client do let(:client) { Routemaster::Jobs::Client.new(adapter) } let(:perform) do client.enqueue('routemaster', Routemaster::Jobs::CacheAndSweep, 'https://example.com/1') end describe '#enqueue' do before do allow(Routemaster::Config).to receive(:queue_adapter).and_return(backend) allow(client).to receive(:enqueue).and_call_original end context 'when the backend is Resque' do let(:backend) { :resque } let(:adapter) { double('Resque', enqueue_to: nil) } it 'queues a Resque fetch job' do expect(adapter).to receive(:enqueue_to).with( 'routemaster', Routemaster::Jobs::Backends::Resque::JobWrapper, { 'class' => 'Routemaster::Jobs::CacheAndSweep', 'args' => ['https://example.com/1'] }) perform end end context 'when the backend is Sidekiq' do let(:backend) { :sidekiq } let(:adapter) { double('Sidekiq', push: nil) } it 'queues a Sidekiq fetch job' do expect(adapter).to receive(:push).with( 'queue' => 'routemaster', 'class' => Routemaster::Jobs::Backends::Sidekiq::JobWrapper, 'args' => [{ 'class' => 'Routemaster::Jobs::CacheAndSweep', 'args' => ['https://example.com/1'] }]) perform end end end end
30.191489
109
0.625793
1ac38c4468d67d8cdce77b53953c208112548abf
406
# encoding: utf-8 require 'spec_helper' require 'ice_nine/freezer' require 'ice_nine/freezer/no_freeze' require 'ice_nine/freezer/symbol' describe IceNine::Freezer::Symbol, '.deep_freeze' do subject { object.deep_freeze(value) } let(:object) { described_class } context 'with a Symbol object' do let(:value) { :symbol } it_behaves_like 'IceNine::Freezer::NoFreeze.deep_freeze' end end
21.368421
60
0.736453
e2b1fe948292aadf2dc0239ce16aedac2f795ac3
387
FactoryBot.define do factory :course do organization category { build(:category, organization: organization) } course { build(:course, organization: organization, category: category) } sequence(:name) { |n| "Course#{n}" } sequence(:description) { |n| "Description#{n}" } start_booking_hours { 24 } end_booking_minutes { 60 } state { 'active' } end end
32.25
77
0.666667
01051ea0aa4c6816bfb6c0b1f90fd44a4176b753
207
class Model::LogicalAndExpr attr_accessor :left, :right def initialize () @template = Template.make("templates/logicalAndExpr.c.erb") end def render () @template.render(binding) end end
15.923077
63
0.700483
f70a76f978687d07f3e6bfd485cf91e3462e3825
1,607
class Chipmunk < Formula desc "2D rigid body physics library written in C" homepage "https://chipmunk-physics.net/" url "https://chipmunk-physics.net/release/Chipmunk-7.x/Chipmunk-7.0.3.tgz" mirror "https://www.mirrorservice.org/sites/distfiles.macports.org/chipmunk/Chipmunk-7.0.3.tgz" sha256 "048b0c9eff91c27bab8a54c65ad348cebd5a982ac56978e8f63667afbb63491a" license "MIT" head "https://github.com/slembcke/Chipmunk2D.git" bottle do cellar :any sha256 "b71191c2c1e4859cb9d5e77b8684612dec1c191780a0b1d56afc04ada66da036" => :catalina sha256 "16292e5518bae60c6990a6f1565e1416f91ffe1c878ab43b58465bb2a24d3d11" => :mojave sha256 "5370b9d8db489d6b8944c23fd4906768c84d87e22f054ca3381c7ee527233f4d" => :high_sierra sha256 "c92a9c1134a272244ca3936b2c94431df7ed7002a9eec99f6914fe1128adae12" => :sierra sha256 "01338d806746c74b8500036c3014a0fdba695cedc95c58a7938046698191aecb" => :x86_64_linux end depends_on "cmake" => :build def install system "cmake", ".", "-DBUILD_DEMOS=OFF", *std_cmake_args system "make", "install" doc.install Dir["doc/*"] end test do (testpath/"test.c").write <<~EOS #include <stdio.h> #include <chipmunk.h> int main(void){ cpVect gravity = cpv(0, -100); cpSpace *space = cpSpaceNew(); cpSpaceSetGravity(space, gravity); cpSpaceFree(space); return 0; } EOS system ENV.cc, *("-pthread" unless OS.mac?), "-I#{include}/chipmunk", testpath/"test.c", "-L#{lib}", "-lchipmunk", "-o", testpath/"test" system "./test" end end
33.479167
97
0.700685
bfad603291395bff437d7dbd03b414e4ec320585
269
class CreateContributions < ActiveRecord::Migration[5.2] def change create_table :contributions do |t| t.belongs_to :invitation, index: true t.belongs_to :user, index: true t.string :title t.text :note t.timestamps end end end
22.416667
56
0.665428
bfe5e361f015d2c4683986b9784a11430a00e241
114
# frozen_string_literal: true require 'rails_helper' RSpec.describe AccountsController, type: :controller do end
19
55
0.824561
386b99b8708a57387726f69152c57642c8c96e3f
354
class AddFinalAttributeToResults < ActiveRecord::Migration def self.up change_table :results do |t| t.boolean :final, :null => false, :default => false t.boolean :freezed, :null => false, :default => false end end def self.down change_table :results do |t| t.remove :final t.remove :freezed end end end
22.125
59
0.641243
f8eb3e77734bc97355a4f10585d5038df5cc8af4
212
class CreateReviews < ActiveRecord::Migration[5.1] def change create_table :reviews do |t| t.belongs_to :medication t.integer :rating t.text :comment t.timestamps end end end
17.666667
50
0.65566
bfe39f864f4fe0c28b8bb451afbc01085077ed89
1,082
require 'rubygems' require 'ramaze' require 'sequel' require 'scaffolding_extensions' # More information on Scaffolding Extensions here: http://scaffolding-ext.rubyforge.org/ DB = Sequel.sqlite # Sequel::Model doesn't support schema creation by default # So we have to load it as a plugin Sequel::Model.plugin :schema class User < Sequel::Model(:user) set_schema do primary_key :id varchar :name text :description end create_table unless table_exists? # Add a couple of users to our database create(:name => 'manveru', :description => 'The first user!') create(:name => 'injekt', :description => 'Just another user') end ScaffoldingExtensions.all_models = [User] class UserController < Ramaze::Controller map '/user' scaffold_all_models :only => [User] end class MainController < Ramaze::Controller def index %{Scaffolding extension enabled for <a href="http://sequel.rubyforge.org/classes/Sequel/Model.html"> Sequel::Model </a> User. You can access the scaffolded Model at #{a('/user')}} end end Ramaze.start
23.521739
88
0.714418
21b3552668f0b6ffbd5d3478ff38e079513d129d
918
module Geokit module Geocoders # Provides geocoding based upon an IP address. The underlying web service is GeoSelect class GeobytesGeocoder < BaseIpGeocoder def self.do_geocode(ip, _=nil) process :json, ip end def self.submit_url(ip) "http://getcitydetails.geobytes.com/GetCityDetails?fqcn=#{ip}" end def self.parse_json(json) loc = new_loc loc.city = json['geobytescity'] loc.country_code = json['geobytesinternet'] loc.full_address = json['geobytesfqcn'] loc.lat = json['geobyteslatitude'] loc.lng = json['geobyteslongitude'] loc.state = json['geobytescode'] loc.precision = json['geobytescertainty'] loc.state_name = json['geobytesregion'] loc.success = !json['geobytescity'].empty? loc end end end end
31.655172
91
0.599129
edf2d64f41e55a7f9b36655decaf1896fdb2d8ed
866
covers 'facets/date/cmp' test_case Time do method :<=> do test "equivalent" do time = Time.gm(2000) # 2000-01-01 00:00:00 UTC date = time.to_date datetime = time.to_datetime (time <=> date).assert == 0 (time <=> datetime).assert == 0 (date <=> datetime).assert == 0 (date <=> time).assert == 0 (datetime <=> time).assert == 0 (datetime <=> date).assert == 0 end test "different" do today = Date.today yesterday = today - 1 (today <=> yesterday).assert == 1 (yesterday <=> today).assert == -1 (today <=> yesterday.to_datetime).assert == 1 (today <=> yesterday.to_time).assert == 1 (today.to_time <=> yesterday.to_datetime).assert == 1 (today.to_time <=> yesterday.to_time).assert == 1 end end end
22.205128
59
0.536952
877502c0798794d9bf1726ead9dcfbbd9df93b45
42
module Feedbacker VERSION = '0.1.0' end
10.5
19
0.690476
ed5b4d8dddf4c9effd9ab4e4cfe884fa535d2354
1,384
# frozen_string_literal: true # this enables activerecord, and allows us to test the auto detection logic require 'active_record' describe SqlPatches do # and patched= to some extent describe ".all_patch_files" do it "uses env variable" do with_patch_env("custom1") do expect(SqlPatches.all_patch_files).to eq(["custom1"]) end end it "uses supports multiple env variable" do with_patch_env("custom1,custom2") do expect(SqlPatches.all_patch_files).to eq(["custom1", "custom2"]) end end it "strips whitespace from env variable" do with_patch_env("custom1, custom2") do expect(SqlPatches.all_patch_files).to eq(["custom1", "custom2"]) end end it "allows env var to turn off" do with_patch_env("false") do expect(SqlPatches.all_patch_files).to eq([]) end end it "uses detection of env variable is not defined" do with_patch_env(nil) do expect(SqlPatches.all_patch_files).to eq([]) expect(Rack::MiniProfiler).to receive(:patch_rails?).and_return(true) expect(SqlPatches.all_patch_files).to eq(["activerecord"]) end end end def with_patch_env(value) old_value = ENV["RACK_MINI_PROFILER_PATCH"] ENV["RACK_MINI_PROFILER_PATCH"] = value yield ensure ENV["RACK_MINI_PROFILER_PATCH"] = old_value end end
27.68
77
0.679913
21b6fd573469fc3e46d52dacb1680d47114476ba
1,269
require File.join(File.dirname(__FILE__), '..', 'spec_helper') require "shared_factory_specs" require 'rubyonacid/factories/sine' include RubyOnAcid describe "SineFactory" do before :each do @page = Harmony::Page.new @page.load('lib/jsonacid.js') @page.execute_js "var it = new SineFactory();" end it_should_behave_like "a factory" it "loops between 0 and 1" do js("it.interval = 1.0;") js("it.getUnit('x')").should be_close(0.920, MARGIN) js("it.getUnit('x')").should be_close(0.954, MARGIN) js("it.getUnit('x')").should be_close(0.570, MARGIN) js("it.getUnit('x')").should be_close(0.122, MARGIN) js("it.getUnit('x')").should be_close(0.020, MARGIN) js("it.getUnit('x')").should be_close(0.360, MARGIN) end it "can take a different interval" do js("it.interval = 0.5;") js("it.getUnit('x')").should be_close(0.740, MARGIN) js("it.getUnit('x')").should be_close(0.920, MARGIN) end it "handles multiple keys" do js("it.interval = 1.0;") js("it.getUnit('x')").should be_close(0.920, MARGIN) js("it.getUnit('y')").should be_close(0.920, MARGIN) js("it.getUnit('x')").should be_close(0.954, MARGIN) js("it.getUnit('y')").should be_close(0.954, MARGIN) end end
30.214286
62
0.642238
f7847c55b38a90f50faed1558487143d3c5a787b
78
class KeepItUpPlayer < ActiveRecord::Base attr_accessible :name, :score end
19.5
41
0.794872
7940479d6cac04ebdff6bcdbe231c19ac16055af
901
cask 'kubernetic' do version '2.5.0' sha256 'f426312f7191ba86c0990a31a2d831f983777c412d5a93c92356b21fc945728b' # kubernetic.s3.amazonaws.com was verified as official when first introduced to the cask url "https://kubernetic.s3.amazonaws.com/Kubernetic-#{version}.dmg" appcast 'https://kubernetic.s3.amazonaws.com/latest-mac.yml' name 'Kubernetic' homepage 'https://kubernetic.com/' app 'Kubernetic.app' uninstall signal: [ ['TERM', 'com.kubernetic.desktop.helper'], ['TERM', 'com.kubernetic.desktop'], ] zap trash: [ '~/.kubernetic', '~/Library/Application Support/Kubernetic', '~/Library/Logs/Kubernetic', '~/Library/Preferences/com.kubernetic.desktop.*', '~/Library/Saved Application State/com.kubernetic.desktop.*', ] end
34.653846
90
0.618202
5dbf0286c009094d33c9e88c7966bd0c4edea67c
230
# typed: true module Opus; end def main T.assert_type!(Opus, Module) # T.assert_type!(T::Array, Module) # temporary disabled while we polish generic syntax T.junk # error: Method `junk` does not exist on `T.class_of(T)` end
28.75
88
0.717391
1ad1855f80cbf4d4f0c584986667d39750014a05
6,408
# frozen_string_literal: true require "set" require "kafka/consumer_group/assignor" require "kafka/round_robin_assignment_strategy" module Kafka class ConsumerGroup attr_reader :assigned_partitions, :generation_id, :group_id def initialize(cluster:, logger:, group_id:, session_timeout:, rebalance_timeout:, retention_time:, instrumenter:, assignment_strategy:) @cluster = cluster @logger = TaggedLogger.new(logger) @group_id = group_id @session_timeout = session_timeout @rebalance_timeout = rebalance_timeout @instrumenter = instrumenter @member_id = "" @generation_id = nil @members = {} @topics = Set.new @assigned_partitions = {} @assignor = Assignor.new( cluster: cluster, strategy: assignment_strategy || RoundRobinAssignmentStrategy.new ) @retention_time = retention_time end def subscribe(topic) @topics.add(topic) @cluster.add_target_topics([topic]) end def subscribed_partitions @assigned_partitions.select { |topic, _| @topics.include?(topic) } end def assigned_to?(topic, partition) subscribed_partitions.fetch(topic, []).include?(partition) end def member? !@generation_id.nil? end def join if @topics.empty? raise Kafka::Error, "Cannot join group without at least one topic subscription" end join_group synchronize rescue NotCoordinatorForGroup @logger.error "Failed to find coordinator for group `#{@group_id}`; retrying..." sleep 1 @coordinator = nil retry rescue ConnectionError @logger.error "Connection error while trying to join group `#{@group_id}`; retrying..." sleep 1 @cluster.mark_as_stale! @coordinator = nil retry end def leave @logger.info "Leaving group `#{@group_id}`" # Having a generation id indicates that we're a member of the group. @generation_id = nil @instrumenter.instrument("leave_group.consumer", group_id: @group_id) do coordinator.leave_group(group_id: @group_id, member_id: @member_id) end rescue ConnectionError end def fetch_offsets coordinator.fetch_offsets( group_id: @group_id, topics: @assigned_partitions, ) end def commit_offsets(offsets) response = coordinator.commit_offsets( group_id: @group_id, member_id: @member_id, generation_id: @generation_id, offsets: offsets, retention_time: @retention_time ) response.topics.each do |topic, partitions| partitions.each do |partition, error_code| Protocol.handle_error(error_code) end end rescue Kafka::Error => e @logger.error "Error committing offsets: #{e}" raise OffsetCommitError, e end def heartbeat @logger.debug "Sending heartbeat..." @instrumenter.instrument('heartbeat.consumer', group_id: @group_id, topic_partitions: @assigned_partitions) do response = coordinator.heartbeat( group_id: @group_id, generation_id: @generation_id, member_id: @member_id, ) Protocol.handle_error(response.error_code) end rescue ConnectionError, UnknownMemberId, IllegalGeneration => e @logger.error "Error sending heartbeat: #{e}" raise HeartbeatError, e rescue RebalanceInProgress => e @logger.warn "Error sending heartbeat: #{e}" raise HeartbeatError, e rescue NotCoordinatorForGroup @logger.error "Failed to find coordinator for group `#{@group_id}`; retrying..." sleep 1 @coordinator = nil retry end def to_s "[#{@group_id}] {" + assigned_partitions.map { |topic, partitions| partition_str = partitions.size > 5 ? "#{partitions[0..4].join(', ')}..." : partitions.join(', ') "#{topic}: #{partition_str}" }.join('; ') + '}:' end private def join_group @logger.info "Joining group `#{@group_id}`" @instrumenter.instrument("join_group.consumer", group_id: @group_id) do response = coordinator.join_group( group_id: @group_id, session_timeout: @session_timeout, rebalance_timeout: @rebalance_timeout, member_id: @member_id, topics: @topics, protocol_name: @assignor.protocol_name, user_data: @assignor.user_data, ) Protocol.handle_error(response.error_code) @generation_id = response.generation_id @member_id = response.member_id @leader_id = response.leader_id @members = response.members end @logger.info "Joined group `#{@group_id}` with member id `#{@member_id}`" rescue UnknownMemberId @logger.error "Failed to join group; resetting member id and retrying in 1s..." @member_id = "" sleep 1 retry rescue CoordinatorLoadInProgress @logger.error "Coordinator broker still loading, retrying in 1s..." sleep 1 retry end def group_leader? @member_id == @leader_id end def synchronize group_assignment = {} if group_leader? @logger.info "Chosen as leader of group `#{@group_id}`" group_assignment = @assignor.assign( members: @members, topics: @topics, ) end @instrumenter.instrument("sync_group.consumer", group_id: @group_id) do response = coordinator.sync_group( group_id: @group_id, generation_id: @generation_id, member_id: @member_id, group_assignment: group_assignment, ) Protocol.handle_error(response.error_code) response.member_assignment.topics.each do |topic, assigned_partitions| @logger.info "Partitions assigned for `#{topic}`: #{assigned_partitions.join(', ')}" end @assigned_partitions.replace(response.member_assignment.topics) end end def coordinator @coordinator ||= @cluster.get_group_coordinator(group_id: @group_id) rescue CoordinatorNotAvailable @logger.error "Group coordinator not available for group `#{@group_id}`" sleep 1 retry end end end
28.229075
140
0.633115
21d6986ef09ff620a735a108334b2f4d03b6deff
1,259
######################################################################################################## ######################################################################################################## class NicController < ApplicationController ###################################################################################################### #### mixins include Aethyr::Mixins::MonomodelControllerHelper include Aethyr::Aln::SupporterControllerHelper ###################################################################################################### #### default layout layout 'agent' ###################################################################################################### #### supported models has_supported :models => :network_interface_terminations, :sort_column => 'name' ###################################################################################################### #### filters before_filter :find_nic, :only => [:show] before_filter :find_network_interface_terminations, :only=>[:show] before_filter :set_root_page_of_click_path, :only => [:show] ######################################################################################################## protected end
44.964286
104
0.305798
082818c72b1e0005343026bb260832924632ada8
225
class UpdateBackpackIndices < ActiveRecord::Migration[5.2] def change remove_index :backpacks, :user_id add_index :backpacks, :user_id, unique: true add_index :backpacks, :storage_app_id, unique: true end end
28.125
58
0.751111
1a63650884458053427a66e56d0c2e06ac0a48d5
1,393
lib = File.expand_path("../lib", __FILE__) $LOAD_PATH.unshift(lib) unless $LOAD_PATH.include?(lib) require "request_signature" Gem::Specification.new do |spec| spec.name = "request_signature" spec.version = RequestSignature::VERSION spec.authors = ["Georges Gabereau"] spec.email = ["[email protected]"] spec.summary = %q{A gem that signs API requests and verifies received requests.} spec.description = %q{A gem that uniquely signs API requests so that a receiver can be sure they came from you. It can also verify signed requests, assuming they were signed in the same way.} spec.homepage = "https://github.com/multiplegeorges/request_signature" spec.license = "MIT" # Specify which files should be added to the gem when it is released. # The `git ls-files -z` loads the files in the RubyGem that have been added into git. spec.files = Dir.chdir(File.expand_path('..', __FILE__)) do `git ls-files -z`.split("\x0").reject { |f| f.match(%r{^(test|spec|features)/}) } end spec.bindir = "exe" spec.executables = spec.files.grep(%r{^exe/}) { |f| File.basename(f) } spec.require_paths = ["lib"] spec.add_development_dependency "bundler", "~> 1.16" spec.add_development_dependency "rake", "~> 10.0" spec.add_development_dependency "minitest", "~> 5.0" end
44.935484
115
0.668342
395599312dbb873ac8afcb18e2a71a480c949067
681
# frozen_string_literal: true require "rails_helper" describe Eve::UpdateAncestriesJob do it { should be_an(ApplicationJob) } it { expect(described_class.queue_name).to eq("default") } describe "#perform" do before do # # LanguageMapper::LANGUAGES.each_key do |locale| # Eve::AncestriesImporter.new(locale).import # end # LanguageMapper::LANGUAGES.each_key do |locale| expect(Eve::AncestriesImporter).to receive(:new).with(locale) do double.tap do |a| expect(a).to receive(:import) end end end end specify { expect { subject.perform }.not_to raise_error } end end
23.482759
72
0.640235
1d1149bfa52b69c31769eb010602598e662f7497
2,188
class Openrct2 < Formula desc "Open source re-implementation of RollerCoaster Tycoon 2" homepage "https://openrct2.io/" url "https://github.com/OpenRCT2/OpenRCT2.git", :tag => "v0.2.1", :revision => "8ac731e2124ecfb43f592c7f1cc5dd6902d5d83f" revision 1 head "https://github.com/OpenRCT2/OpenRCT2.git", :branch => "develop" bottle do cellar :any sha256 "d6c869c40a8e2b130b974fcd904d023480a801ec189f2ded6f213199d2f6f2d8" => :mojave sha256 "23a8a95a75981e14cbbcdc8d0f2fde2a0483353a0ebe793e1e0ac5e4675a7019" => :high_sierra end depends_on "cmake" => :build depends_on "pkg-config" => :build depends_on "freetype" # for sdl2_ttf depends_on "icu4c" depends_on "jansson" depends_on "libpng" depends_on "libzip" depends_on :macos => :high_sierra # "missing: Threads_FOUND" on Sierra depends_on "openssl" depends_on "sdl2" depends_on "sdl2_ttf" depends_on "speexdsp" resource "title-sequences" do url "https://github.com/OpenRCT2/title-sequences/releases/download/v0.1.2/title-sequence-v0.1.2.zip", :using => :nounzip sha256 "dcb1648739b351e857e2d19fed1626bec561d5e9f4b49201568f42c475ee7e61" end resource "objects" do url "https://github.com/OpenRCT2/objects/releases/download/v1.0.6/objects.zip", :using => :nounzip sha256 "714257dcf6dc4af8761ecda1b313bfa63b3ef93ab7e46572a3e499fe4bf26e02" end def install # Avoid letting CMake download things during the build process. (buildpath/"data/title").install resource("title-sequences") (buildpath/"data/object").install resource("objects") tversion = resource("title-sequences").version mv buildpath/"data/title/title-sequence-v#{tversion}.zip", "title-sequences.zip" mkdir "build" do system "cmake", "..", *std_cmake_args system "make", "install" end # By default macOS build only looks up data in app bundle Resources libexec.install bin/"openrct2" (bin/"openrct2").write <<~EOS #!/bin/bash exec "#{libexec}/openrct2" "$@" "--openrct-data-path=#{pkgshare}" EOS end test do assert_match "OpenRCT2, v#{version}", shell_output("#{bin}/openrct2 -v") end end
33.661538
105
0.710695
26c51703a67623d89ac9417c7a92d5b45d442dcc
2,548
module Spina::Shop module Api class RecountController < ApiController before_action :set_product skip_before_action :verify_authenticity_token, only: [:create] def create if recount_params[:stock_levels].any?{|s| s[:stock_level].present?} original_stock_level = @product.stock_level.to_i recount_stock_level = recount_params[:stock_levels].inject(0){|t,i| t = t + i[:stock_level].to_i} + @reserved recount_difference = recount_stock_level - original_stock_level @product.transaction do # Reset stock to 0 ChangeStockLevel.new(@product, { adjustment: @product.stock_level * -1, description: "Recount - Reset", actor: params[:user] }).save # Add new stock for each in recount_params[:stock_levels] stock_levels = recount_params[:stock_levels].sort_by do |stock_level| Date.parse("01-#{stock_level[:expiration_month]}-#{stock_level[:expiration_year]}") end.each do |stock_level| next if stock_level[:stock_level].to_i.zero? # Skip any zero stock levels ChangeStockLevel.new(@product, { adjustment: stock_level[:stock_level], description: "Recount", expiration_year: stock_level[:expiration_year], expiration_month: stock_level[:expiration_month], actor: params[:user] }).save end # Re-add reserved stock if @reserved != 0 ChangeStockLevel.new(@product, { adjustment: @reserved, description: "Recount - Reserved", actor: params[:user] }).save end # Save as recount @product.recounts.create({ difference: recount_difference, actor: params[:user] }) end else @product.recounts.create({difference: 0, actor: params[:user]}) end @product.cache_stock_level head :ok end private def set_product @product = Product.find(params[:product_id]) @reserved = @product.stock_level_adjustments.reserved.sum(:adjustment) end def recount_params params.require(:product).permit(stock_levels: [:stock_level, :expiration_month, :expiration_year]) end end end end
34.432432
119
0.567896
913cec9b965d432a36129c93b98184d6316c060e
1,195
require "test_helper" class OrderCustomizationSerializerTest < ActiveSupport::TestCase test "serializes correctly with deep flag off" do Random.stub :uuid, "asd" do OrderCustomizationSerializer.serialize(OrderCustomization.all).each do |serialized| order_customized_area = OrderCustomization.find(serialized[:id]) assert_equal serialized[:id], order_customized_area.id assert_equal serialized[:customization], CustomizationSerializer.serialize(order_customized_area.customization) assert_nil serialized[:customizations] end end end test "serializes correctly with deep flag on" do Random.stub :uuid, "asd" do OrderCustomizationSerializer.serialize(OrderCustomization.all, deep: true).each do |serialized| order_customized_area = OrderCustomization.find(serialized[:id]) assert_equal serialized[:id], order_customized_area.id assert_equal serialized[:customization], CustomizationSerializer.serialize(order_customized_area.customization) assert_equal serialized[:customizations], OrderCustomizedAreaSerializer.serialize(order_customized_area.children, deep: true) end end end end
42.678571
133
0.770711
edb9cb68ecb0524f81b5d29e066f946c0fdd92d2
861
require 'spec_helper' describe 'postgresql::server::database', :type => :define do let :facts do { :osfamily => 'Debian', :operatingsystem => 'Debian', :operatingsystemrelease => '6.0', :kernel => 'Linux', :concat_basedir => tmpfilename('contrib'), :id => 'root', :path => '/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin', } end let :title do 'test' end let :pre_condition do "class {'postgresql::server':}" end it { is_expected.to contain_postgresql__server__database('test') } it { is_expected.to contain_postgresql_psql("Check for existence of db 'test'") } context "with comment set to 'test comment'" do let (:params) {{ :comment => 'test comment' }} it { is_expected.to contain_postgresql_psql("COMMENT ON DATABASE test IS 'test comment'") } end end
26.90625
95
0.636469
26d6de10941bf11130aa6d8168a8f218c3930513
608
require 'fileutils' desc "Create nondigest versions of all ckeditor digest assets" task "assets:precompile" do fingerprint = /\-[0-9a-f]{32}\./ for file in Dir["public/assets/ckeditor/**/*"] next unless file =~ fingerprint nondigest = file.sub fingerprint, '.' FileUtils.cp file, nondigest, verbose: true end end namespace :rich do desc "Re-generate image styles" task :refresh_assets => :environment do # re-generate images ENV['CLASS'] = "Rich::RichFile" Rake::Task["paperclip:refresh"].invoke # re-generate uri cache Rich::RichFile.find_each(&:save) end end
25.333333
62
0.6875
f7b619add0e65ab5d43cef22fd8ac8ab213cf534
3,643
# frozen_string_literal: true require 'ipaddr' def logger Rails.logger end # Harvest a Web Logg, and save to mysql # class HarvestWeblogs # Util Class that parses a single line class LineRecord attr_accessor :agent, :event, :event_time, :ip, :method, :path, :pid, :status # parse the line record def initialize(line) fields = line.split @ip = fields[0] @event = nil @status = fields[8] @method = fields[5].sub('"', '') @path = fields[6].sub(/\?.*/, '') @event_time = parse_time(fields[3] + fields[4]) @pid = nil @agent = line.split('"')[5] end # save the record using the active record framework def save this_event = FedoraAccessEvent.new this_event.pid = pid this_event.agent = agent.truncate(254) this_event.event = event this_event.location = ip_format(ip) this_event.event_time = event_time this_event.save end def ip_format(ip) IPAddr.new(ip).mask(24).to_s.split('/')[0] end private def parse_time(s) s = s.sub('[', '').sub(']', '') DateTime.strptime(s, '%d/%b/%Y:%H:%M:%S%z') end end # method returns false if line is not to be logged, true otherwise # def self.handle_one_record(record) return unless record.status == '200' return unless record.method == 'GET' return if record.agent =~ /(bot|spider|yahoo)/i # since all paths are rooted, the first index is always "" id = nil id = set_pid_event(record, id) return if id.nil? record.pid = id # we made it! save the record record.save end def self.set_pid_event(record, id) p = record.path.split('/') case p[1] when 'downloads' id = check_is_download(record, p) when 'files', 'citations', 'show', 'collections' id = check_is_view(record, p) when 'concern' id = check_is_concern(record, p) end id end def self.check_is_view(record, p) record.event = 'view' p[2] end def self.check_is_download(record, p) return nil if record.path.index('thumbnail') # skip thumbnail downloads record.event = 'download' p[2] end def self.check_is_concern(record, p) return nil if record.path.index('new') # don't record /concern/:class/new record.event = 'view' p[3] end # Opens a gzipped file, and reads all of the lines def self.parse_file_gz(fname) Zlib::GzipReader.open(fname).each_line do |line| record = LineRecord.new(line) handle_one_record(record) end end # Ingest all *.gz files in the given directory # the WEBLOG_STATEFILE, if present, list the files # arleady harvested- we need not do these again # def self.harvest_directory(config) # keep two lists so files which are deleted are removed # from the state_fname file past_files = [] ingested_files = [] past_files = JSON.parse(File.read(config['WEBLOG_STATEFILE'])) if config['WEBLOG_STATEFILE'] && File.exist?(config['WEBLOG_STATEFILE']) parse_files(config, past_files, ingested_files) generate_ingested_filelist(config, ingested_files) end def self.parse_files(config, past_files, ingested_files) Dir.glob(File.join(config['LOGDIR'], config['LOGFILE_MASK'])) do |fname| ingested_files << fname next if past_files.include?(fname) parse_file_gz(fname) end end def self.generate_ingested_filelist(config, ingested_files) return unless config['WEBLOG_STATEFILE'] File.open(config['WEBLOG_STATEFILE'], 'w') do |f| f.write(JSON.generate(ingested_files)) end end end
24.782313
139
0.648641
e241aa77805abbb9ec861f3d94bb3d019d80210d
2,669
# frozen_string_literal: true require 'fast_spec_helper' RSpec.describe Gitlab::APIAuthentication::Builder do describe '#build' do shared_examples 'builds the correct result' do |token_type:, sent_through:, builds:| context "with #{token_type.size} token type(s) and #{sent_through.size} sent through(s)" do it 'works when passed together' do strategies = described_class.new.build { |allow| allow.token_types(*token_type).sent_through(*sent_through) } expect(strategies).to eq(builds) end it 'works when token types are passed separately' do strategies = described_class.new.build { |allow| token_type.each { |t| allow.token_types(t).sent_through(*sent_through) } } expect(strategies).to eq(builds) end it 'works when sent throughs are passed separately' do strategies = described_class.new.build { |allow| sent_through.each { |s| allow.token_types(*token_type).sent_through(s) } } expect(strategies).to eq(builds) end it 'works when token types and sent throughs are passed separately' do strategies = described_class.new.build { |allow| token_type.each { |t| sent_through.each { |s| allow.token_types(t).sent_through(s) } } } expect(strategies).to eq(builds) end end end it_behaves_like 'builds the correct result', token_type: [:pat], sent_through: [:basic], builds: { basic: [:pat] } it_behaves_like 'builds the correct result', token_type: [:pat], sent_through: [:basic, :oauth], builds: { basic: [:pat], oauth: [:pat] } it_behaves_like 'builds the correct result', token_type: [:pat, :job], sent_through: [:basic], builds: { basic: [:pat, :job] } it_behaves_like 'builds the correct result', token_type: [:pat, :job], sent_through: [:basic, :oauth], builds: { basic: [:pat, :job], oauth: [:pat, :job] } context 'with a complex auth strategy' do it 'builds the correct result' do strategies = described_class.new.build do |allow| allow.token_types(:pat, :job, :deploy).sent_through(:http_basic, :oauth) allow.token_types(:pat).sent_through(:http_private, :query_private) allow.token_types(:oauth2).sent_through(:http_bearer, :query_access) end expect(strategies).to eq({ http_basic: [:pat, :job, :deploy], oauth: [:pat, :job, :deploy], http_private: [:pat], query_private: [:pat], http_bearer: [:oauth2], query_access: [:oauth2] }) end end end end
34.662338
147
0.630573
61bbad4bfcbdf502a61face433620dfb8c57ccd9
1,050
########################################################################## # Copyright 2016 ThoughtWorks, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. ########################################################################## module ApiV2 module Config class ApprovalRepresenter < ApiV2::BaseRepresenter alias_method :approval, :represented error_representer property :type property :auth_config, as: :authorization, decorator: ApiV2::Config::StageAuthorizationRepresenter, class: AuthConfig end end end
35
123
0.645714
87628a707c83f002473755a684d411aaf184014f
7,150
describe MiqAeEngine::MiqAeMethod do describe ".invoke_inline_ruby (private)" do let(:workspace) do Class.new do attr_accessor :invoker # rubocop:disable Style/SingleLineMethods, Style/EmptyLineBetweenDefs def persist_state_hash; end def disable_rbac; end def current_method; "/my/automate/method"; end # rubocop:enable Style/SingleLineMethods, Style/EmptyLineBetweenDefs end.new end let(:aem) { double("AEM", :data => script, :fqname => "/my/automate/method") } let(:obj) { double("OBJ", :workspace => workspace) } let(:inputs) { [] } subject { described_class.send(:invoke_inline_ruby, aem, obj, inputs) } context "with a script that ends normally" do let(:script) do <<-RUBY puts 'Hi from puts' RUBY end it "logs and returns the correct exit status" do allow($miq_ae_logger).to receive(:info).and_call_original expect($miq_ae_logger).to receive(:info).with("Method exited with rc=MIQ_OK").at_least(:once) expect($miq_ae_logger).to_not receive(:error) expect(subject).to eq(0) end end context "with a script that raises" do let(:script) do <<-RUBY puts 'Hi from puts' raise RUBY end it "logs the error with file and line numbers changed in the stacktrace, and raises an exception" do allow($miq_ae_logger).to receive(:error).and_call_original expect($miq_ae_logger).to receive(:error).with("Method STDERR: /my/automate/method:2:in `<main>': unhandled exception").at_least(:once) expect { subject }.to raise_error(MiqAeException::UnknownMethodRc) end end context "with a script that raises in a nested method" do let(:script) do <<-RUBY def my_method raise end puts 'Hi from puts' my_method RUBY end it "logs the error with file and line numbers changed in the stacktrace, and raises an exception" do allow($miq_ae_logger).to receive(:error).and_call_original expect($miq_ae_logger).to receive(:error).with("Method STDERR: /my/automate/method:2:in `my_method': unhandled exception").at_least(:once) expect($miq_ae_logger).to receive(:error).with("Method STDERR: \tfrom /my/automate/method:6:in `<main>'").at_least(:once) expect { subject }.to raise_error(MiqAeException::UnknownMethodRc) end end context "with a script that exits" do let(:script) do <<-RUBY puts 'Hi from puts' exit RUBY end it "logs and returns the correct exit status" do allow($miq_ae_logger).to receive(:info).and_call_original expect($miq_ae_logger).to receive(:info).with("Method exited with rc=MIQ_OK").at_least(:once) expect($miq_ae_logger).to_not receive(:error) expect(subject).to eq(0) end end context "with a script that exits with an unknown return code" do let(:script) do <<-RUBY puts 'Hi from puts' exit 1234 RUBY end it "does not log but raises an exception" do expect($miq_ae_logger).to_not receive(:error) expect { subject }.to raise_error(MiqAeException::UnknownMethodRc) end end context "with a script that exits MIQ_OK" do let(:script) do <<-RUBY puts 'Hi from puts' exit MIQ_OK RUBY end it "logs and returns the correct exit status" do allow($miq_ae_logger).to receive(:info).and_call_original expect($miq_ae_logger).to receive(:info).with("Method exited with rc=MIQ_OK").at_least(:once) expect($miq_ae_logger).to_not receive(:error) expect(subject).to eq(0) end end context "with a script that exits MIQ_WARN" do let(:script) do <<-RUBY puts 'Hi from puts' exit MIQ_WARN RUBY end it "logs and returns the correct exit status" do allow($miq_ae_logger).to receive(:warn).and_call_original expect($miq_ae_logger).to receive(:warn).with("Method exited with rc=MIQ_WARN").at_least(:once) expect($miq_ae_logger).to_not receive(:error) expect(subject).to eq(4) end end context "with a script that exits MIQ_STOP" do let(:script) do <<-RUBY puts 'Hi from puts' exit MIQ_STOP RUBY end it "does not log but raises an exception" do expect($miq_ae_logger).to_not receive(:error) expect { subject }.to raise_error(MiqAeException::StopInstantiation) end end context "with a script that exits MIQ_ABORT" do let(:script) do <<-RUBY puts 'Hi from puts' exit MIQ_ABORT RUBY end it "does not log but raises an exception" do expect($miq_ae_logger).to_not receive(:error) expect { subject }.to raise_error(MiqAeException::AbortInstantiation) end end context "with a script that does I/O" do let(:script) do <<-RUBY puts 'Hi from puts' STDOUT.puts 'Hi from STDOUT.puts' $stdout.puts 'Hi from $stdout.puts' STDERR.puts 'Hi from STDERR.puts' $stderr.puts 'Hi from $stderr.puts' $evm.logger.sleep RUBY end it "writes to the logger synchronously" do logger_stub = Class.new do attr_reader :expected_messages def initialize @expected_messages = [ "Method STDOUT: Hi from puts", "Method STDOUT: Hi from STDOUT.puts", "Method STDOUT: Hi from $stdout.puts", "Method STDERR: Hi from STDERR.puts", "Method STDERR: Hi from $stderr.puts", ] end def sleep # Raise if all messages have not already been written before a method like sleep runs. raise unless expected_messages == [] end def verify_next_message(message) expected = expected_messages.shift return if message == expected puts "Expected: #{expected.inspect}, Got: #{message.inspect}" raise end alias_method :error, :verify_next_message alias_method :info, :verify_next_message end.new svc = MiqAeMethodService::MiqAeService.new(workspace, [], logger_stub) expect(MiqAeMethodService::MiqAeService).to receive(:new).with(workspace, []).and_return(svc) expect($miq_ae_logger).to receive(:info).with("<AEMethod [/my/automate/method]> Starting ").ordered expect(logger_stub).to receive(:sleep).and_call_original.ordered expect($miq_ae_logger).to receive(:info).with("<AEMethod [/my/automate/method]> Ending").ordered expect($miq_ae_logger).to receive(:info).with("Method exited with rc=MIQ_OK").ordered expect(subject).to eq(0) expect(logger_stub.expected_messages).to eq([]) end end end end
31.919643
146
0.615105
e924a0bfa4c1574038a73416a2828cdcff879d39
63
FactoryBot.define do factory :user_setting do end end
10.5
26
0.714286
1d7b73f6fd021acc4e62dbbcc5896c26300450cb
255
class CreateTuesdayReaders < ActiveRecord::Migration def change create_table :tuesday_readers do |t| t.references :person t.string :email t.timestamps end add_index :tuesday_readers, [:person_id], :unique => true end end
23.181818
61
0.694118
d5e9669a916fa8ebd9695197f0a1cd72fdb4af04
1,995
# mdJson 2.0 writer tests - vector representation # History: # Stan Smith 2018-06-08 refactor to use mdJson construction helpers # Stan Smith 2017-03-15 original script require 'adiwg-mdtranslator' require_relative '../../helpers/mdJson_hash_objects' require_relative '../../helpers/mdJson_hash_functions' require_relative 'mdjson_test_parent' class TestWriterMdJsonVector < TestWriterMdJsonParent # instance classes needed in script TDClass = MdJsonHashWriter.new # build mdJson test file in hash mdHash = TDClass.base hSpaceRef = TDClass.build_vectorRepresentation('level one') TDClass.add_vectorObject(hSpaceRef,'type one', 1) TDClass.add_vectorObject(hSpaceRef,'type two', 2) mdHash[:metadata][:resourceInfo][:spatialRepresentation] = [] mdHash[:metadata][:resourceInfo][:spatialRepresentation] << { vectorRepresentation: hSpaceRef } @@mdHash = mdHash def test_schema_vector hTest = @@mdHash[:metadata][:resourceInfo][:spatialRepresentation][0][:vectorRepresentation] errors = TestWriterMdJsonParent.testSchema(hTest, 'vectorRepresentation.json') assert_empty errors end def test_complete_vector metadata = ADIWG::Mdtranslator.translate( file: @@mdHash.to_json, reader: 'mdJson', validate: 'normal', writer: 'mdJson', showAllTags: false) expect = JSON.parse(@@mdHash.to_json) expect = expect['metadata']['resourceInfo']['spatialRepresentation'] got = JSON.parse(metadata[:writerOutput]) got = got['metadata']['resourceInfo']['spatialRepresentation'] assert metadata[:writerPass] assert metadata[:readerStructurePass] assert metadata[:readerValidationPass] assert metadata[:readerExecutionPass] assert_empty metadata[:writerMessages] assert_empty metadata[:readerStructureMessages] assert_empty metadata[:readerValidationMessages] assert_empty metadata[:readerExecutionMessages] assert_equal expect, got end end
33.25
98
0.737845
bf6f963c0dc9fbe6218a43c9942eef889309efa1
1,749
class SNail < Formula desc "Fork of Heirloom mailx" homepage "https://www.sdaoden.eu/code.html" url "https://www.sdaoden.eu/downloads/s-nail-14.9.3.tar.gz" sha256 "9048abe94c8b732ddefcd70b9f6052da16977de76b07598790e4c763e97f911d" bottle do sha256 "bab3491cd174beab7597ac7009237def73badd8e0b403712d1970f1c7c6a2978" => :high_sierra sha256 "a60e73ba770df3fc445e4d08930c44f6754e144a605cd6e359566dc3f37b2533" => :sierra sha256 "ae1b9a1f45d5719dd2aaf3eeddee2391b86dc0691f4d4aadbd5f79f829ef8b91" => :el_capitan sha256 "d8241de6c64e399204671dc5b11018c3fdb3e08d55a11b73f401567d84f92155" => :yosemite end depends_on "libidn" depends_on "openssl" def install mv "INSTALL", "INSTALL.txt" # remove for > 14.9.3 system "make", "OPT_AUTOCC=no", "CC=#{ENV.cc}", "cc_maxtopt=1", "OPT_NOMEMBDBG=1", "C_INCLUDE_PATH=#{Formula["openssl"].opt_include}", "LDFLAGS=-L#{Formula["openssl"].opt_lib}", "VAL_PREFIX=#{prefix}", "OPT_DOTLOCK=no", "config" system "make", "build" system "make", "install" end test do ENV["SOURCE_DATE_EPOCH"] = "844221007" date1 = Utils.popen_read("date", "-r", "844221007", "+%a %b %e %T %Y") date2 = Utils.popen_read("date", "-r", "844221007", "+%a, %d %b %Y %T %z") expected = <<-EOS.undent From reproducible_build #{date1.chomp} Date: #{date2.chomp} To: User-Agent: s-nail reproducible_build Hello oh you Hammer2! EOS input = "Hello oh you Hammer2!\n" output = pipe_output("#{bin}/s-nail -#:/ -Sexpandaddr -", input, 0) assert_equal expected, output.chomp end end
33.634615
93
0.635792