hexsha
stringlengths
40
40
size
int64
2
1.01M
content
stringlengths
2
1.01M
avg_line_length
float64
1.5
100
max_line_length
int64
2
1k
alphanum_fraction
float64
0.25
1
acfe92dad69a0acb1eb89b4bca1623e73dbb264c
611
class FormattedAnecdoteContent include ActionView::Helpers::TagHelper def initialize text @anecdote = JSON.parse(text) end def html (content_tag(:p, content_tag(:strong, 'Challenge')) + content_tag(:p, @anecdote['introduction']) + content_tag(:p, content_tag(:strong, 'Exploration of options')) + content_tag(:p, @anecdote['insight']) + content_tag(:p, content_tag(:strong, 'Actions')) + content_tag(:p, @anecdote['actions']) + content_tag(:p, content_tag(:strong, 'Impact and evaluation')) + content_tag(:p, @anecdote['effect'])).html_safe end end
32.157895
71
0.667758
1a72e8885d0c5332da9f7521a59f6125ca5e8ef9
5,218
require "rails_helper" describe "GET /reporting" do let(:expected) do { providers: { total: { all: 0, non_training_providers: 0, training_providers: 0, }, training_providers: { findable_total: { open: 0, closed: 0, }, accredited_body: { open: { accredited_body: 0, not_an_accredited_body: 0, }, closed: { accredited_body: 0, not_an_accredited_body: 0, }, }, provider_type: { open: { scitt: 0, lead_school: 0, university: 0, unknown: 0, invalid_value: 0, }, closed: { scitt: 0, lead_school: 0, university: 0, unknown: 0, invalid_value: 0, }, }, region_code: { open: { no_region: 0, london: 0, south_east: 0, south_west: 0, wales: 0, west_midlands: 0, east_midlands: 0, eastern: 0, north_west: 0, yorkshire_and_the_humber: 0, north_east: 0, scotland: 0, }, closed: { no_region: 0, london: 0, south_east: 0, south_west: 0, wales: 0, west_midlands: 0, east_midlands: 0, eastern: 0, north_west: 0, yorkshire_and_the_humber: 0, north_east: 0, scotland: 0, }, }, }, }, courses: { total: { all: 0, non_findable: 0, all_findable: 0, }, findable_total: { open: 0, closed: 0, }, provider_type: { open: { scitt: 0, lead_school: 0, university: 0, unknown: 0, invalid_value: 0 }, closed: { scitt: 0, lead_school: 0, university: 0, unknown: 0, invalid_value: 0 }, }, program_type: { open: { higher_education_programme: 0, school_direct_training_programme: 0, school_direct_salaried_training_programme: 0, scitt_programme: 0, pg_teaching_apprenticeship: 0 }, closed: { higher_education_programme: 0, school_direct_training_programme: 0, school_direct_salaried_training_programme: 0, scitt_programme: 0, pg_teaching_apprenticeship: 0 }, }, study_mode: { open: { full_time: 0, part_time: 0, full_time_or_part_time: 0 }, closed: { full_time: 0, part_time: 0, full_time_or_part_time: 0 }, }, qualification: { open: { qts: 0, pgce_with_qts: 0, pgde_with_qts: 0, pgce: 0, pgde: 0 }, closed: { qts: 0, pgce_with_qts: 0, pgde_with_qts: 0, pgce: 0, pgde: 0 }, }, is_send: { open: { yes: 0, no: 0 }, closed: { yes: 0, no: 0 }, }, subject: { open: Subject.active.each_with_index.map { |sub, _i| x = {}; x[sub.subject_name] = 0; x }.reduce({}, :merge), closed: Subject.active.each_with_index.map { |sub, _i| x = {}; x[sub.subject_name] = 0; x }.reduce({}, :merge), }, }, publish: { users: { total: { all: 0, active_users: 0, non_active_users: 0, }, recent_active_users: 0, }, providers: { total: { all: 0, providers_with_non_active_users: 0, providers_with_recent_active_users: 0, }, with_1_recent_active_users: 0, with_2_recent_active_users: 0, with_3_recent_active_users: 0, with_4_recent_active_users: 0, with_more_than_5_recent_active_users: 0, }, courses: { total_updated_recently: 0, updated_non_findable_recently: 0, updated_findable_recently: 0, updated_open_courses_recently: 0, updated_closed_courses_recently: 0, created_recently: 0, }, }, allocations: { previous: { total: { allocations: 0, distinct_accredited_bodies: 0, distinct_providers: 0, number_of_places: 0, }, }, current: { total: { allocations: 0, distinct_accredited_bodies: 0, distinct_providers: 0, number_of_places: 0, }, }, }, }.with_indifferent_access end let(:previous_recruitment_cycle) { find_or_create(:recruitment_cycle, :previous) } it "returns status success" do previous_recruitment_cycle get "/reporting" expect(response.status).to eq(200) expect(JSON.parse(response.body)).to eq(expected) end end
27.755319
121
0.471253
5d0a5379a2ac4098ae7459f25fcdd8f59fcbc259
1,136
## # This file is part of WhatWeb and may be subject to # redistribution and commercial restrictions. Please see the WhatWeb # web site for more information on licensing and terms of use. # http://www.morningstarsecurity.com/research/whatweb ## # Version 0.3 # 2016-04-17 # Andrew Horton # Added website parameter and description ## # Version 0.2 # 2011-02-24 # # Updated version detection ## Plugin.define "easyLink-Web-Solutions" do author "Brendan Coles <[email protected]>" # 2010-09-03 version "0.3" description "German Content Management System" website "http://www.mountaingrafix.eu/tag/easylink/" # Google rsults as at 2010-09-03 # # 308 for "powered by easyLink" # Dorks # dorks [ '"powered by easyLink"' ] # Matches # matches [ # HTML Comment # This may be removed only with permission of the developer { :text=>'Dieser Abschnitt darf nur mit Genehmigung des Entwicklers entfernt werden und bedarf einer' }, # Version Detection # Meta generator { :version=>/<meta name="generator" content="easyLink v([\d\.]+)" \/>/ }, # Version Detection # Powered by text { :version=>/[P|p]?owered by easyLink v([\d\.]+)/ }, ] end
25.244444
104
0.71919
03043727c6992808681f923f0d8fef4bc4929e6b
1,646
class M2c < Formula desc "Modula-2 to C compiler" homepage "https://savannah.nongnu.org/projects/m2c/" url "https://download.savannah.gnu.org/releases/m2c/0.7/m2c-0.7.tar.gz" sha256 "b725ed617f376e1a321e059bf1985098e950965d5edab161c6b24526f10a59bc" head "http://git.savannah.gnu.org/cgit/m2c.git" bottle do sha256 "4aa6ac4f5fd855f4f84d5577ff6f79495fda9edc2ce5335c64bd96a881975eb0" => :el_capitan sha256 "67659bd6a5fe922c1b34d5068a5cecbfee1f804e5ff432e32c8682a04029ccac" => :yosemite sha256 "7bf62153eeb0976851785db04e1319f745709294aa9d0bc99e47ffee3eba1315" => :mavericks sha256 "6db02ad1e1a355edfd1770d29952812baac666d6d2a7d3a599357f796eb7d891" => :mountain_lion end # Hacks purely for this 0.7 release. Git head already fixes installation glitches. # Will remove hacks on release of next version. def install # The config for "gcc" works for clang also. cp "config/generic-gcc.h", "config/generic-clang.h" system "./configure", "+cc=#{ENV.cc}" # Makefile is buggy! inreplace "Makefile", "install: all uninstall", "install: all" inreplace "Makefile", "mkdir", "mkdir -p" include.mkpath system "make", "install", "prefix=#{prefix}", "man1dir=#{man1}" end test do hello_mod = "Hello.mod" hello_exe = testpath/"Hello" (testpath/hello_mod).write <<-EOF.undent MODULE Hello; FROM InOut IMPORT WriteLn, WriteString; BEGIN WriteString ("Hello world!"); WriteLn; END Hello. EOF system "#{bin}/m2c", "-make", hello_mod, "-o", hello_exe assert_equal "Hello world!\n", shell_output(hello_exe) end end
32.27451
95
0.708384
217da97d086be3fec9a52883a57c7cf3409839d5
67
module UserHelper def user_name current_user&.name end end
11.166667
22
0.746269
2841ab3cf984b413f6f2591c97a27f5f64d3e72d
1,584
# Copyright 2015 Google Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. require 'date' require 'google/apis/core/base_service' require 'google/apis/core/json_representation' require 'google/apis/core/hashable' require 'google/apis/errors' module Google module Apis module GroupsmigrationV1 # JSON response template for groups migration API. class Groups include Google::Apis::Core::Hashable # The kind of insert resource this is. # Corresponds to the JSON property `kind` # @return [String] attr_accessor :kind # The status of the insert request. # Corresponds to the JSON property `responseCode` # @return [String] attr_accessor :response_code def initialize(**args) update!(**args) end # Update properties of this object def update!(**args) @kind = args[:kind] if args.key?(:kind) @response_code = args[:response_code] if args.key?(:response_code) end end end end end
30.461538
76
0.667929
d57f84140e9fd51dfb58fe73de1308aabf60a807
4,105
require 'helper' class ItemTest < Test::Unit::TestCase class << self def startup @@item = FC::Item.new(:name => '/test item', :policy_id => 1, :size => 150) @@item.save @@storages = [] @@storages << FC::Storage.new(:name => 'rec1-sda', :host => 'rec1', :url => 'http://rec1/sda/') @@storages << FC::Storage.new(:name => 'rec2-sda', :host => 'rec2', :url => 'http://rec2/sda/') @@item_storages = @@storages.map do |storage| storage.save item_storage = FC::ItemStorage.new(:item_id => @@item.id, :storage_name => storage.name, :status => 'ready') item_storage.save item_storage end @@storages << FC::Storage.new(:name => 'rec3-sda', :host => 'rec3', :url => 'http://rec3/sda/') @@storages[2].save end def shutdown FC::DB.query("DELETE FROM items_storages") FC::DB.query("DELETE FROM items") FC::DB.query("DELETE FROM storages") end end setup do @@storages.each do |s| s.check_time = 0 s.http_check_time = 0 s.save end end should "create_from_local" do policy = FC::Policy.new assert_raise(ArgumentError) { FC::Item.create_from_local } assert_raise(ArgumentError) { FC::Item.create_from_local '/bla/bla' } assert_raise(ArgumentError) { FC::Item.create_from_local '/bla/bla', 'test' } assert_raise(RuntimeError) { FC::Item.create_from_local '/bla/bla', 'test', {}} assert_raise() { FC::Item.create_from_local '/bla/bla/bla', 'test', policy} end should "immediate_delete" do @@item.immediate_delete @@item.reload assert_equal 'delete', @@item.status @@item_storages.each do |item_storage| item_storage.reload assert_equal 'delete', item_storage.status item_storage.status = 'ready' item_storage.save end @@item.status = 'ready' @@item.save end should 'mark_deleted' do @@item.mark_deleted @@item.reload assert_equal 'deferred_delete', @@item.status @@item_storages.each do |item_storage| item_storage.reload assert_equal 'ready', item_storage.status end end should "make_item_storage" do storage_size = @@storages[2].size.to_i assert_kind_of FC::ItemStorage, @@item.make_item_storage(@@storages[2]) assert_equal storage_size+@@item.size, @@storages[2].size end should "get_item_storages" do assert_same_elements @@item_storages.map(&:id), @@item.get_item_storages.map(&:id) end should 'item get_available_storages' do @@storages[0].update_check_time assert_equal 1, @@item.get_available_storages.count assert_equal @@storages[0].name, @@item.get_available_storages.first.name end should 'item get_available_storages (http_up? influence)' do # all storages have http_up? == false storages = @@item_storages.map do |is| @@storages.detect { |s| s.name == is.storage_name } end.compact storages.each do |s| assert !s.http_up? end assert storages.size > 1 storages.each(&:update_check_time) assert_equal storages.size, @@item.get_available_storages.count # set one storage http_up? == true storages[0].update_http_check_time assert storages[0].http_up? assert_equal 1, @@item.get_available_storages.count assert_equal storages[0].name, @@item.get_available_storages.first.name assert_equal 'http://rec1/sda/test item', @@item.url end should "item urls" do assert_equal 0, @@item.urls.count @@storages.each(&:update_check_time) assert_same_elements ["http://rec1/sda/test item", "http://rec2/sda/test item"], @@item.urls end should "item url by url_weight" do @@storages.each(&:update_check_time) @@storages.each{|s| s.url_weight = -1; s.save} assert_raise(RuntimeError) { @@item.url } @@storages[0].url_weight = 1 @@storages[0].save assert_equal "http://rec1/sda/test item", @@item.url @@storages[1].url_weight = 2 @@storages[1].save Kernel.stubs(:rand).returns(1) assert_equal "http://rec2/sda/test item", @@item.url end end
32.579365
116
0.653593
8701e7217eabd626462ca07ce86ca3ee4cf78763
129
require 'test_helper' class TaskParticipantTest < ActiveSupport::TestCase # test "the truth" do # assert true # end end
16.125
51
0.72093
030d93a740aa6b03ce4ef22fb740dee3757cd7d7
1,650
$:.unshift(File.join(File.dirname(__FILE__), '..', 'lib')) require "bundler/setup" require 'rubygems' require 'rspec' require 'yaml' require 'rdf/isomorphic' require 'rdf/spec' require 'rdf/spec/matchers' require 'rdf/turtle' require 'rdf/vocab' require_relative 'matchers' begin require 'nokogiri' rescue LoadError end begin require 'simplecov' require 'simplecov-lcov' SimpleCov::Formatter::LcovFormatter.config do |config| #Coveralls is coverage by default/lcov. Send info results config.report_with_single_file = true config.single_report_path = 'coverage/lcov.info' end SimpleCov.formatter = SimpleCov::Formatter::MultiFormatter.new([ SimpleCov::Formatter::HTMLFormatter, SimpleCov::Formatter::LcovFormatter ]) SimpleCov.start do add_filter "/spec/" add_filter "/lib/rdf/rdfa/reader/rexml.rb" add_filter "/lib/rdf/rdfa/context.rb" end rescue LoadError end require 'rdf/rdfa' ::RSpec.configure do |c| c.filter_run focus: true c.run_all_when_everything_filtered = true c.exclusion_filter = { ruby: lambda { |version| !(RUBY_VERSION.to_s =~ /^#{version}/) }, } c.include(RDF::Spec::Matchers) end TMP_DIR = File.join(File.expand_path(File.dirname(__FILE__)), "tmp") # Heuristically detect the input stream def detect_format(stream) # Got to look into the file to see if stream.is_a?(IO) || stream.is_a?(StringIO) stream.rewind string = stream.read(1000) stream.rewind else string = stream.to_s end case string when /<html/i then RDF::RDFa::Reader when /@prefix/i then RDF::Turtle::Reader else RDF::NTriples::Reader end end
24.626866
73
0.712121
3898f66d647fbbc8fa3483611f6f7ac07ddf394f
3,014
# # Be sure to run `pod spec lint Iterable-iOS-SDK.podspec' to ensure this is a # valid spec and to remove all comments including this before submitting the spec. # # To learn more about Podspec attributes see http://docs.cocoapods.org/specification.html # To see working Podspecs in the CocoaPods repo see https://github.com/CocoaPods/Specs/ # Pod::Spec.new do |s| # ――― Spec Metadata ―――――――――――――――――――――――――――――――――――――――――――――――――――――――――― # # # These will help people to find your library, and whilst it # can feel like a chore to fill in it's definitely to your advantage. The # summary should be tweet-length, and the description more in depth. # s.name = "IterableSDK" s.module_name = "IterableSDK" s.version = "6.1.0" s.summary = "Iterable's official SDK for iOS" s.description = <<-DESC iOS SDK containing a wrapper around Iterable's API, in addition to some utility functions DESC s.homepage = "https://github.com/Iterable/swift-sdk.git" # ――― Spec License ――――――――――――――――――――――――――――――――――――――――――――――――――――――――――― # # # Licensing your code is important. See http://choosealicense.com for more info. # CocoaPods will detect a license file if there is a named LICENSE* # Popular ones are 'MIT', 'BSD' and 'Apache License, Version 2.0'. # s.license = "MIT" # ――― Author Metadata ――――――――――――――――――――――――――――――――――――――――――――――――――――――――― # # # Specify the authors of the library, with email addresses. Email addresses # of the authors are extracted from the SCM log. E.g. $ git log. CocoaPods also # accepts just a name if you'd rather not provide an email address. # # Specify a social_media_url where others can refer to, for example a twitter # profile URL. # s.author = { "Ilya Brin" => "[email protected]" } # ――― Platform Specifics ――――――――――――――――――――――――――――――――――――――――――――――――――――――― # # # If this Pod runs only on iOS or OS X, then specify the platform and # the deployment target. You can optionally include the target after the platform. # s.platform = :ios, "9.0" # ――― Source Location ―――――――――――――――――――――――――――――――――――――――――――――――――――――――――― # # # Specify the location from where the source should be retrieved. # Supports git, hg, bzr, svn and HTTP. # s.source = { :git => "https://github.com/Iterable/swift-sdk.git", :tag => s.version } # ――― Source Code ―――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――― # # # CocoaPods is smart about how it includes source code. For source files # giving a folder will include any swift, h, m, mm, c & cpp files. # For header files it will include any header in the folder. # Not including the public_header_files will make all headers public. # s.source_files = "swift-sdk/**/*.{h,m,swift}" s.pod_target_xcconfig = { 'SWIFT_VERSION' => '4.2' } s.swift_version = '4.2' end
36.313253
93
0.576311
91262bf3330fe0f2e98f1a04b141343bd2e81671
545
Pod::Spec.new do |s| s.name = 'Mixpanel' s.version = '1.1.1' s.license = 'Apache License' s.summary = 'iPhone tracking library for Mixpanel Analytics.' s.homepage = 'http://mixpanel.com' s.author = { 'Mixpanel' => '[email protected]' } s.source = { :git => 'https://github.com/mixpanel/mixpanel-iphone.git', :tag => 'v1.1.1' } s.frameworks = 'CoreTelephony', 'SystemConfiguration' s.platform = :ios s.source_files = 'Mixpanel/**/*.{h,m}' s.requires_arc = false end
38.928571
99
0.585321
26dce67770de7ed7b3f861130b6dabf900f75727
1,172
# WARNING ABOUT GENERATED CODE # # This file is generated. See the contributing guide for more information: # https://github.com/aws/aws-sdk-ruby/blob/master/CONTRIBUTING.md # # WARNING ABOUT GENERATED CODE Gem::Specification.new do |spec| spec.name = 'aws-sdk-snowball' spec.version = File.read(File.expand_path('../VERSION', __FILE__)).strip spec.summary = 'AWS SDK for Ruby - Amazon Snowball' spec.description = 'Official AWS Ruby gem for Amazon Import/Export Snowball (Amazon Snowball). This gem is part of the AWS SDK for Ruby.' spec.author = 'Amazon Web Services' spec.homepage = 'https://github.com/aws/aws-sdk-ruby' spec.license = 'Apache-2.0' spec.email = ['[email protected]'] spec.require_paths = ['lib'] spec.files = Dir['lib/**/*.rb'] spec.metadata = { 'source_code_uri' => 'https://github.com/aws/aws-sdk-ruby/tree/master/gems/aws-sdk-snowball', 'changelog_uri' => 'https://github.com/aws/aws-sdk-ruby/tree/master/gems/aws-sdk-snowball/CHANGELOG.md' } spec.add_dependency('aws-sdk-core', '~> 3', '>= 3.52.1') spec.add_dependency('aws-sigv4', '~> 1.1') end
39.066667
141
0.665529
1df61a6b465999f970f6e026ed13ea24f248dc67
964
# Use this file to easily define all of your cron jobs. # # It's helpful, but not entirely necessary to understand cron before proceeding. # http://en.wikipedia.org/wiki/Cron # Example: # # set :output, "/path/to/my/cron_log.log" # # every 2.hours do # command "/usr/bin/some_great_command" # runner "MyModel.some_method" # rake "some:great:rake:task" # end # # every 4.days do # runner "AnotherModel.prune_old_records" # end # Learn more: http://github.com/javan/whenever set :output, 'log/whenever.log' env :PATH, ENV['PATH'] job_type :rake, 'export PATH="$HOME/.rbenv/bin:$PATH"; eval "$(rbenv init -)"; cd :path && :environment_variable=:environment bundle exec rake :task --silent :output' every 12.hours do rake "maintenance:clear_old_ahoy_visits" end every 1.day do rake "report:send_analytics_report" end every 5.minutes do rake "api_action:rerun_failed_actions" end every 1.day do rake "maintenance:clear_discarded_api_actions" end
23.512195
166
0.732365
216ec13d4fbcda1b013cc534ee1f4c61131579ab
46
class OrderItemPolicy < ApplicationPolicy end
15.333333
41
0.869565
e8a80b59df396ba1feaf349f5e96b69d63efe8e8
464
require File.expand_path(File.join(File.dirname(__FILE__), "helper")) class SelectNoneTest < Test::Unit::TestCase def setup @agent = WWW::Mechanize.new @page = @agent.get("http://localhost/form_select_none.html") @form = @page.forms.first end def test_select_default assert_equal("1", @form.list) page = @agent.submit(@form) assert_equal(1, page.links.length) assert_equal(1, page.links_with(:text => 'list:1').length) end end
29
69
0.693966
1ce51ef6ea807e94b451328198c8034ad5d59478
977
# frozen_string_literal: true class Report::SubsidiaryHumanResourcesController < Report::BaseController before_action :authenticate_user! before_action :set_page_layout_data, if: -> { request.format.html? } before_action :set_breadcrumbs, only: %i[index], if: -> { request.format.html? } before_action :prepare_encrypt_uid def show authorize :"Bi::SubsidiaryHumanResource" prepare_meta_tags title: t('.title') @redirect_url = 'view/form?viewlet=HR/SY_2.frm&ref_t=design&ref_c=7c332fd4-ca53-4731-8200-9147e584be33' @hide_app_footer = true @hide_main_header_wrapper = true render 'shared/report_show' end protected def set_page_layout_data @_sidebar_name = 'human_resource' end def set_breadcrumbs @_breadcrumbs = [ { text: t('layouts.sidebar.application.header'), link: root_path }, { text: t('layouts.sidebar.human_resource.header'), link: report_human_resource_path }] end end
30.53125
107
0.71955
61d3c388e46f80a8a7c75452e1aaa9cf2c600e9e
158
class RemoveWorkOrder < Neo4j::Migrations::Base def up drop_constraint :WorkOrder, :ref end def down add_constraint :WorkOrder, :ref end end
15.8
47
0.71519
6adf11b2edc9d5dabd953c3b10553be0c9fffec2
81
require 'test_helper' class ShiftTemplatesHelperTest < ActionView::TestCase end
16.2
53
0.839506
039aa72608540a9b0faa76fe7cc5d86d70d4fd1e
264
# coding: utf-8 module ThinReports module Core # @private module ArrayExtensions def simple_deep_copy map {|v| v.dup rescue v } end end end end # @private class Array include ThinReports::Core::ArrayExtensions end
13.2
44
0.636364
d596148191d6d21059c5269de7d4db1e36d039f6
1,901
# frozen_string_literal: true require 'test_helper' module Api module V1 class CuratedModelsControllerTest < ActionDispatch::IntegrationTest test 'user should be able to get conditions for their profile' do user = users(:harry) token = generate_token(user.id) profile = user.profiles.first get "/api/v1/profiles/#{profile.id}/conditions", params: { access_token: token.token } assert_response :success end test 'user should be able to get conditions for their profile via FHIR API' do fhir_manager = FhirUtilities.new fhir = fhir_manager.fhir user = users(:harry) token = generate_token(user.id) get '/api/v1/Condition', params: { access_token: token.token } assert_response :success bundle = fhir::Json.from_json(@response.body) assert_not_nil(bundle) assert_equal(2, bundle.entry.size) codes = bundle.entry.map { |e| e.resource.code.coding[0].code }.sort assert_equal('Q84.1', codes[0]) assert_equal('W61.62', codes[1]) end test 'user should not be able to get conditions for someone else' do user = users(:harry) token = generate_token(user.id) someone_else = users(:sema) profile = someone_else.profiles.first get "/api/v1/profiles/#{profile.id}/conditions", params: { access_token: token.token } assert_response :missing end test 'user should not be able to get conditions for someone else via FHIR API' do user = users(:harry) token = generate_token(user.id) someone_else = users(:sema) profile = someone_else.profiles.first condition = profile.conditions.first get "/api/v1/Condition/#{condition.id}", params: { access_token: token.token } assert_response :missing end end end end
31.683333
94
0.647028
33f5a24a30dc638b2e3fa6e587fa5fb2c2f43aa4
4,577
module App::Usr::Html extend Waxx::Html extend self def home(x, usr:{}, person:{}) %( <div class="row"> <div class="col-md-3 nav2">#{App::Person::Html.nav2(x)}</div> <div class="col-md-8"> <h1>#{person['first_name'].h} #{person['last_name'].h}</h1> <div>#{usr['usr_name'].h}</div> <div>#{person['phone'].h}</div> </div> </div> ) end def login(x, return_to:"/") %( <div class="container"> <h1>Client Portal</h1> <div class="row"> <div class="col-md-4">#{login_form(x, return_to:return_to)}</div> <div class="col-md-8">#{App::WebsitePage.by_uri(x, uri:"/portal")['content']}</div> </div> </div> ) end def login_form(x, return_to: "/") %( <form action="/usr/login" method="post"> <!--#{Waxx::Csrf.ht(x)}--> <input type="hidden" name="return_to" value="#{h return_to}"> <div class="form-group"> <label for="usr_name">Email</label> <input type="email" class="form-control" id="usr_name" name="usr_name" placeholder="" value="#{x.ua['un'] if x.ua['rm']}"> </div> <div class="form-group"> <label for="password">Password</label> <input name="password" type="password" class="form-control" id="password" placeholder=""> </div> <div class="checkbox"> <label><input name="remember_me" type="checkbox" #{"checked" if x.ua['rm']} value="1"> Remember me </label> </div> <button type="submit" class="btn btn-primary">Login</button> </form> <p style="margin-top: 2em;"><a href="#password_reset_form" onclick="$('#password_reset_form').toggle('blind')">Forgot password?</a></p> <div id="password_reset_form" style="display:none;"> <form action="/usr/password_reset" method="post"> <!--#{Waxx::Csrf.ht(x)}--> <div class="form-group"> <label for="email">Enter your email address</label> <input type="email" class="form-control" id="email" name="email" placeholder="[email protected]" value="#{x.ua['un'] if x.ua['rm'] == 1}"> </div> <button type="submit" class="btn btn-warning">Send Password Reset Link</button> </form> </div> ) end def change_password(x, u=nil, key=nil) content = App::Content.by_slug(x, slug: "password-rules") %( <div class="row"> <div class="col-md-3 nav2">#{App::Person::Html.nav2(x) if x.usr?}</div> <div class="col-md-5"> <h1>Change Password</h1> <form action="" method="post"> #{Waxx::Csrf.ht(x)} <div class="form-group"> <!-- <label for="usr_name">User Name</label> --> #{h(u ? u['usr_name'] : x.ua['un'])} </div> #{new_password_field(x)} <button type="submit" class="btn btn-primary" id="btn-submit" disabled="disabled">Change Password</button> </form> </div> </div> ) end def new_password_field(x) %( <div class="form-group"> <label for="password">Password</label> <input name="password1" type="password" class="form-control" id="pw1" onkeyup="app.passwordNew('#pw1', '#pw2', '#btn-submit');"> <div class="text-muted">Score 60+. Use upper &amp; lower case, numbers, and symbols.</div> <div class="text-muted">Score: <span id="pw1-status" style="color:#000; font-weight: normal;">0 Continue</span></div> <div style="border: 1px solid #ccc; background-color:#eee;"> <div id="pw1-meter" style="height: 4px; width:0; background-color:red;color:white;overflow:visible;font-size:9px;"></div> </div> </div> <div class="form-group"> <label for="password">Confirm Password <span id="pw2-icon" class="glyphicon glyphicon-unchecked"></span></label> <input name="password2" type="password" class="form-control" id="pw2" onkeyup="app.passwordNew('#pw1', '#pw2', '#btn-submit');"> </div> ) end def list(x, usrs) re = [%(<table class="table"> <tr><th>ID</th><th>User Name</th><th>Last Login</th><th>Failed Logins</th></tr> )] re << usrs.map{|u| %(<tr><td>#{u/:id}</td> <td>#{u/:usr_name}</td> <td>#{u['last_login_date'].f("%d-%b-%Y @%H:%M")} from #{u/:last_login_host}</td> <td>#{u/:failed_login_count}</td> </tr>) } re << %(</table><a href="/usr/record/0" class="btn btn-success"><span class="glyphicon glyphicon-plus"></span> Add User</a>) re.join end end
39.456897
148
0.555386
e995de622ec61f5488759016d1325356a7521195
1,465
module Sipity module Controllers # A junk drawer of actions for visitors that have not yet authenticated. class VisitorsController < ApplicationController class_attribute :response_handler_container self.runner_container = Sipity::Runners::VisitorsRunner self.response_handler_container = Sipity::ResponseHandlers::WorkAreaHandler def work_area run_and_respond_with_processing_action(work_area_slug: work_area_slug) end def initialize(*args, &block) super(*args, &block) self.processing_action_composer = ProcessingActionComposer.build_for_controller( controller: self, processing_action_name: action_name ) end def status headers['Content-Type'] = 'application/json' work = Sipity::Models::Work.includes(:processing_entity).find(work_id) json = { id: work.id, status: work.processing_state } render json: json end delegate( :prepend_processing_action_view_path_with, :run_and_respond_with_processing_action, to: :processing_action_composer ) attr_accessor :view_object helper_method :view_object alias model view_object helper_method :model private attr_accessor :processing_action_composer def work_area_slug params.require(:work_area_slug) end def work_id params.require(:work_id) end end end end
28.173077
88
0.693515
ab315639bffd76bbfa858a4f9555af68cb98de5a
5,626
require "rails_helper" feature "Account" do scenario "user without Stripe Customer ID" do user = create(:user, stripe_customer_id: nil) sign_in_as(user) visit account_path expect(page).not_to have_text("Update Credit Card") end scenario "user with Stripe Customer ID", js: true do user = create(:user, stripe_customer_id: "123") stub_customer_find_request(user.stripe_customer_id) sign_in_as(user) visit account_path expect(page).to have_text("Update Credit Card") end scenario "returns a list of all plans", :js do user = create(:user) sign_in_as(user, "letmein") visit account_path plans = page.all(".plan") expect(plans.count).to eq 7 within(plans[0]) do expect(page).to have_text("CURRENT PLAN") expect(find(".plan-title")).to have_text "Open Source" expect(find(".plan-allowance")).to have_text "Unlimited" expect(find(".plan-price")).to have_text "$0 month" end within(plans[1]) do expect(find(".plan-title")).to have_text "Chihuahua" expect(find(".plan-allowance")).to have_text "Up to 50 Reviews" expect(find(".plan-price")).to have_text "$29 month" end within(plans[2]) do expect(find(".plan-title")).to have_text "Terrier" expect(find(".plan-allowance")).to have_text "Up to 300 Reviews" expect(find(".plan-price")).to have_text "$49 month" end within(plans[3]) do expect(find(".plan-title")).to have_text "Labrador" expect(find(".plan-allowance")).to have_text "Up to 1,000 Reviews" expect(find(".plan-price")).to have_text "$99 month" end within(plans[4]) do expect(find(".plan-title")).to have_text "Husky" expect(find(".plan-allowance")).to have_text "Up to 3,000 Reviews" expect(find(".plan-price")).to have_text "$199 month" end within(plans[5]) do expect(find(".plan-title")).to have_text "Great Dane" expect(find(".plan-allowance")).to have_text "Up to 10,000 Reviews" expect(find(".plan-price")).to have_text "$299 month" end end scenario "user with a subscription views account page" do user = create(:user, stripe_customer_id: stripe_customer_id) create(:subscription, user: user) responses = [individual_subscription_response] stub_customer_find_request_with_subscriptions( stripe_customer_id, generate_subscriptions_response(responses), ) sign_in_as(user) visit account_path within(".itemized-receipt") do expect(page).to have_text("Great Dane") expect(page).to have_text("$49") end end scenario "user with discounted-amount subscription views account page" do user = create(:user, stripe_customer_id: stripe_customer_id) create(:subscription, user: user) responses = [discounted_amount_subscription_response] stub_customer_find_request_with_subscriptions( stripe_customer_id, generate_subscriptions_response(responses), ) sign_in_as(user) visit account_path within(".itemized-receipt") do expect(page).to have_text("Great Dane") expect(page).to have_text("$250") end end scenario "user with discounted-percentage subscription views account page" do user = create(:user, stripe_customer_id: stripe_customer_id) create(:subscription, user: user) responses = [discounted_percent_subscription_response] stub_customer_find_request_with_subscriptions( stripe_customer_id, generate_subscriptions_response(responses), ) sign_in_as(user) visit account_path within(".itemized-receipt") do expect(page).to have_text("Great Dane") expect(page).to have_text("$24.50") end end scenario "user updates their email address", :js do email_address = "[email protected]" stub_customer_find_request stub_customer_update_request(email: email_address) sign_in_as(create(:user, :stripe)) visit account_path user = user_on_page user.update(email_address) expect(user).to be_updated end private def stub_customer_find_request_with_subscriptions(customer_id, subscriptions) stub_request(:get, "#{stripe_base_url}/customers/#{customer_id}"). with(headers: { "Authorization" => "Bearer #{ENV['STRIPE_API_KEY']}" }). to_return(status: 200, body: merge_customer_subscriptions(subscriptions)) end def user_on_page UserOnPage.new end def generate_subscriptions_response(subscriptions) { "object" => "list", "total_count" => subscriptions.length, "has_more" => false, "url" => "/v1/customers/cus_2e3fqARc1uHtCv/subscriptions", "data" => subscriptions, } end def discounted_amount_subscription_response read_subscription_fixture("discounted_amount") end def discounted_percent_subscription_response read_subscription_fixture("discounted_percent") end def individual_subscription_response read_subscription_fixture("individual") end def private_subscription_response read_subscription_fixture("private") end def org_subscription_response read_subscription_fixture("org") end def read_subscription_fixture(fixture) file_path = "spec/support/fixtures/stripe_#{fixture}_subscription.json" JSON.parse(File.read(file_path)) end def merge_customer_subscriptions(subscriptions) file_path = "spec/support/fixtures/stripe_customer_find.json" customer_response = File.read(file_path) customer = JSON.parse(customer_response) customer["subscriptions"] = subscriptions customer.to_json end end
28.704082
79
0.706363
f899889c3bf4864d787c8ec7bf57c2d60d2e1296
434
module TailCallOptimization RubyVM::InstructionSequence.compile_option = { tailcall_optimization: true, trace_instruction: false } def recompile_with_tail(meth) meth = %{ class #{self.to_s} #{meth} end } RubyVM::InstructionSequence.new(meth).eval end def xtail(meth) m = self.instance_method(meth) self.send :undef_method, meth recompile_with_tail(m.source) end end
18.083333
48
0.675115
b9e9ab20eff6f744554675c40619334d62692b80
3,591
require "json" class StackGraph HTML_HEADER = <<~HTML <!DOCTYPE html> <html> <head> <meta charset="utf-8" /> <title>Racc stack graph</title> <link rel="stylesheet" href="stack_graph.css" type="text/css" /> <script src="stack_graph.js"></script> </head> <body> HTML HTML_FOOTER = <<~HTML </body> </html> HTML class Block < Struct.new(:from, :to, :token, :layer) end def initialize( log, color_map: {}, step_width: 6 # rem ) @log = log @color_map = color_map @step_width = step_width end def print_tag(tag, attrs) print "<#{tag} #{attrs}>" yield print "</#{tag}>" end def rand_color format( "rgb(%d, %d, %d)", 128 + rand(127), 128 + rand(127), 128 + rand(127) ) end def make_blocks(vstack_list) blocks = [] current = [] num_layers = vstack_list.map(&:size).max vstack_list.each_with_index do |vstack, step| (0...num_layers).each do |layer| v = vstack[layer] if current[layer].nil? if v.nil? ; else # 出現 block = Block.new(step, step, v, layer) blocks << block current[layer] = block end else if v.nil? # 消滅 current[layer] = nil else if v == current[layer].token # 継続 current[layer].to = step else # 変更 block = Block.new(step, step, v, layer) blocks << block current[layer] = block end end end end end blocks end def bgcolor(bl) @color_map.fetch(bl.token[0]) { rand_color() } end def print_graph(vstack_list, blocks) num_layers = vstack_list.map(&:size).max (0...num_layers).to_a.reverse.each do |layer| # 上から下へ layer_blocks = blocks .select { |bl| bl.layer == layer } .sort { |a, b| a.from <=> b.from } step = 0 print_tag "div", %(class="layer_container") do layer_blocks.each do |bl| # 左から右へ blank_steps = bl.from - step blank_steps.times do print %(<div class="block_space" style="width: #{@step_width}rem;"> </div>) end step = bl.to + 1 w = @step_width * (bl.to - bl.from + 1) print_tag "div", %(class="block" style="width: #{w}rem; background: #{bgcolor(bl)};") do sym, val = bl.token print sym print %(<hr />) print val end end end end print_tag "div", %(class="layer_container") do (0...(vstack_list.size)).to_a .map(&:succ) .each { |step| print %(<div class="block" style="width: #{@step_width}rem;">#{step}</div>) } end end def print_html vstack_list = @log.lines .map { |line| JSON.parse(line) } blocks = make_blocks(vstack_list) puts HTML_HEADER print_tag "div", %(style="width: #{ @step_width * vstack_list.size + 4 }rem; padding: 2rem;") do print_graph(vstack_list, blocks) end puts HTML_FOOTER end end if $0 == __FILE__ color_map = { "IDENT" => "rgb(225, 233, 151)", "STRING" => "rgb(225, 233, 151)", "INT" => "rgb(225, 233, 151)", "primary" => "rgb(162, 234, 231)", "expr" => "#fb6", "empty" => "#eee", } sg = StackGraph.new( ARGF.read, color_map: color_map, step_width: 8 ) sg.print_html end
22.304348
100
0.508215
d54771e27135d866db4d7256d8681f3ee028de67
500
require 'spec_helper' describe Datatrans::Common do context "sign" do before do #class Request # include Datatrans::Common #end @request = Datatrans::XML::Transaction::Request.new(@datatrans, {}) end it "generates the correct sign" do amount = 1000 currency = 'CHF' reference_number = 'ABCEDF' expect(@request.sign(@datatrans.merchant_id, amount, currency, reference_number)).to eq '4e7d4d5bbde548c586f3b7f109635ffc' end end end
23.809524
128
0.67
2682f4e5ba09aaa86b52df0688bc82533ee33092
541
class CreateGoodJobs < ActiveRecord::Migration[5.2] def change enable_extension 'pgcrypto' create_table :good_jobs, id: :uuid do |t| t.text :queue_name t.integer :priority t.jsonb :serialized_params t.timestamp :scheduled_at t.timestamp :performed_at t.timestamp :finished_at t.text :error t.timestamps end add_index :good_jobs, :scheduled_at, where: "(finished_at IS NULL)" add_index :good_jobs, [:queue_name, :scheduled_at], where: "(finished_at IS NULL)" end end
25.761905
86
0.680222
91e78c1ff4847bb7d3ecc2bbfa81867790188fad
2,151
require 'rails_helper' module Genova module Ecs describe Client do before do service_client_mock = double(Ecs::Deployer::Service::Client) allow(service_client_mock).to receive(:wait_timeout=) allow(service_client_mock).to receive(:update) allow(service_client_mock).to receive(:exist?).and_return(true) allow(Ecs::Deployer::Service::Client).to receive(:new).and_return(service_client_mock) task_definition_mock = double(Aws::ECS::Types::TaskDefinition) allow(task_definition_mock).to receive(:task_definition_arn).and_return('task_definition_arn') task_client_mock = double(Ecs::Task::Client) allow(task_client_mock).to receive(:register).and_return(task_definition_mock) ecr_client_mock = double(Ecr::Client) allow(ecr_client_mock).to receive(:push_image) allow(ecr_client_mock).to receive(:destroy_images) allow(Ecr::Client).to receive(:new).and_return(ecr_client_mock) allow(Ecs::Task::Client).to receive(:new).and_return(task_client_mock) docker_client_mock = double(Genova::Docker::Client) allow(docker_client_mock).to receive(:build_image).and_return(['repository_name']) allow(Genova::Docker::Client).to receive(:new).and_return(docker_client_mock) end describe 'deploy_service' do let(:code_manager_mock) { double(CodeManager::Git) } let(:client) { Ecs::Client.new('cluster', code_manager_mock) } let(:deploy_config_mock) { double(Genova::Config::TaskDefinitionConfig) } it 'should be return DeployResponse' do allow(deploy_config_mock).to receive(:find_service).and_return(containers: ['container'], path: 'path') allow(deploy_config_mock).to receive(:find_cluster).and_return([]) allow(code_manager_mock).to receive(:load_deploy_config).and_return(deploy_config_mock) allow(code_manager_mock).to receive(:task_definition_config_path).and_return('task_definition_path') expect(client.deploy_service('service', 'tag_revision')).to be_a(DeployResponse) end end end end end
44.8125
113
0.711297
edd776c3f842f6e0d1cfb8b8aad701f80f60fc50
1,627
require 'spec_helper' describe 'et_upload::default' do let(:chef_run) { ChefSpec::Runner.new.converge(described_recipe) } users = data_bag('users') before do setup_environment end users['upload'].each do |uname, u| next if uname == 'id' u['home'] = "/home/#{uname}" u['gid'] = 'uploadonly' evertrue_gid = 'evertrue' it "creates user #{uname}" do expect(chef_run).to create_user(uname).with( uid: u['uid'], gid: u['gid'], comment: u['comment'], password: u['password'], home: u['home'] ) end it "sets #{uname} home folder mode and ownership" do expect(chef_run).to create_directory(u['home']).with( user: 'root', group: u['gid'], mode: '0755' ) end mode = (uname == 'trial-user') ? '0300' : '0700' it "creates #{dir}" do expect(chef_run).to create_directory(dir).with( user: uname, group: u['gid'], mode: mode ) end ["#{u['home']}/uploads", "#{u['home']}/exports"].each do |dir| it "creates #{dir}" do expect(chef_run).to create_directory(dir).with( user: uname, group: evertrue_gid, mode: 770 ) end end it "creates #{uname}'s authorized_keys" do auth_keys_path = "#{u['home']}/.ssh/authorized_keys" expect(chef_run).to create_template(auth_keys_path).with( user: uname, group: u['gid'], mode: '0600' ) expect(chef_run).to render_file(auth_keys_path).with_content(u['keys']) end end end
23.926471
77
0.548248
f77e5c4684a44a8c7a363646c2bdfb5df328e68e
1,747
# frozen_string_literal: true $LOAD_PATH.push File.expand_path('lib', __dir__) require 'faker/version' Gem::Specification.new do |spec| spec.name = 'faker' spec.version = Faker::VERSION spec.platform = Gem::Platform::RUBY spec.authors = ['Benjamin Curtis', 'Vitor Oliveira'] spec.email = ['[email protected]', '[email protected]'] spec.summary = 'Easily generate fake data' spec.description = 'Faker, a port of Data::Faker from Perl, is used to easily generate fake data: names, addresses, phone numbers, etc.' spec.homepage = 'https://github.com/faker-ruby/faker' spec.license = 'MIT' spec.files = Dir['lib/**/*'] + %w[History.md License.txt CHANGELOG.md README.md] spec.bindir = 'bin' spec.executables = ['faker'] spec.require_paths = ['lib'] spec.required_ruby_version = '>= 2.3' spec.metadata['changelog_uri'] = 'https://github.com/faker-ruby/faker/blob/master/CHANGELOG.md' spec.metadata['source_code_uri'] = 'https://github.com/faker-ruby/faker' spec.metadata['bug_tracker_uri'] = 'https://github.com/faker-ruby/faker/issues' spec.metadata['documentation_uri'] = 'https://rubydoc.info/github/faker-ruby/faker/master' spec.metadata['yard.run'] = 'yri' spec.add_dependency('i18n', '>= 1.6', '< 1.8') spec.add_development_dependency('minitest', '5.13.0') spec.add_development_dependency('pry', '0.12.2') spec.add_development_dependency('rake', '13.0.1') spec.add_development_dependency('rubocop', '0.78.0') spec.add_development_dependency('simplecov', '0.17.1') spec.add_development_dependency('test-unit', '3.3.4') spec.add_development_dependency('timecop', '0.9.1') spec.add_development_dependency('yard', '0.9.20') end
42.609756
138
0.692044
f7ff7101b037d249ab17f151c501845ef7ee385b
5,121
module Webspicy class Tester extend Forwardable class FailFast < Exception; end def initialize(config) @config = Configuration.dress(config) @scope = nil @hooks = nil @client = nil @spec_file = nil @specification = nil @service = nil @test_case = nil @invocation = nil @invocation_error = nil @reporter = @config.reporter end attr_reader :config, :scope, :hooks, :client attr_reader :specification, :spec_file attr_reader :service, :test_case attr_reader :invocation, :invocation_error, :result attr_reader :reporter def_delegators :@config, *[ :world ] def failfast? config.failfast end def call reporter.init(self) begin run_config rescue FailFast end reporter.report reporter.find(Reporter::SuccessOrNot).report end def call! res = call abort("KO") unless reporter.find(Reporter::SuccessOrNot).success? end def find_and_call(method, url, mutation) unless tc = scope.find_test_case(method, url) raise Error, "No such service `#{method} #{url}`" end mutated = tc.mutate(mutation) fork_tester(test_case: mutated) do |t| instrumented = t.instrument_test_case t.client.call(instrumented) end end def bind_condition(c) c = Specification::Oldies::Bridge.new(c) unless c.respond_to?(:bind) c.bind(self) end protected def run_config config.each_scope do |scope| @scope = scope @hooks = Support::Hooks.for(scope.config) @client = scope.get_client run_scope end end def run_scope reporter.before_all hooks.fire_before_all(self) reporter.before_all_done reporter.before_scope scope.each_specification_file do |spec_file| @specification = load_specification(spec_file) if @specification reporter.before_specification run_specification reporter.specification_done reporter.spec_file_done elsif failfast? raise FailFast end end reporter.scope_done reporter.after_all hooks.fire_after_all(self) reporter.after_all_done end def load_specification(spec_file) @spec_file = spec_file reporter.before_spec_file config.factory.specification(spec_file.load, spec_file, scope) rescue *PASSTHROUGH_EXCEPTIONS raise rescue Exception => e reporter.spec_file_error(e) nil end def run_specification scope.each_service(specification) do |service| @service = service reporter.before_service run_service reporter.service_done end end def run_service scope.each_testcase(service) do |test_case| @test_case = test_case reporter.before_test_case run_test_case reporter.test_case_done end end def run_test_case hooks.fire_around(self) do reporter.before_each hooks.fire_before_each(self) reporter.before_each_done reporter.before_instrument instrument_test_case reporter.instrument_done call_test_case_target reporter.before_assertions check_invocation reporter.assertions_done reporter.after_each hooks.fire_after_each(self) reporter.after_each_done raise FailFast if !result.success? and failfast? end end def call_test_case_target @invocation = nil @invocation_error = nil reporter.before_invocation @invocation = client.call(test_case) reporter.invocation_done rescue *PASSTHROUGH_EXCEPTIONS raise rescue => ex @invocation_error = ex reporter.invocation_done end def instrument_test_case service = test_case.service service.preconditions.each do |pre| instrument_one(pre) end service.postconditions.each do |post| instrument_one(post) end if test_case.example? service.errconditions.each do |err| instrument_one(err) end if test_case.counterexample? config.listeners(:instrument).each do |i| i.call(self) end test_case end def instrument_one(condition) bind_condition(condition).instrument rescue ArgumentError raise "#{condition.class} implements old PRE/POST contract" end def check_invocation @result = Result.from(self) end def fork_tester(binding = {}) yield dup.tap{|t| binding.each_pair do |k,v| t.send(:"#{k}=", v) end } end private attr_writer :test_case end # class Tester end # module Webspicy require_relative 'tester/reporter' require_relative 'tester/client' require_relative 'tester/invocation' require_relative 'tester/result' require_relative 'tester/failure' require_relative 'tester/assertions' require_relative 'tester/asserter' require_relative 'tester/file_checker'
24.042254
74
0.652997
f893a07cf22f5627e81a68dcad4e58d7e5bc4ef0
1,767
describe Fastlane do describe Fastlane::FastFile do describe "Backup file Integration" do let (:test_path) { "/tmp/fastlane/tests/fastlane" } let (:file_path) { "file.txt" } let (:backup_path) { "#{file_path}.back" } let (:file_content) { Time.now.to_s } before do FileUtils.mkdir_p(test_path) File.write(File.join(test_path, file_path), file_content) end describe "when use action after `backup_file` action" do before do Fastlane::FastFile.new.parse("lane :test do backup_file path: '#{File.join(test_path, file_path)}' end").runner.execute(:test) File.write(File.join(test_path, file_path), file_content.reverse) end it "should be restore file" do Fastlane::FastFile.new.parse("lane :test do restore_file path: '#{File.join(test_path, file_path)}' end").runner.execute(:test) restored_file = File.open(File.join(test_path, file_path)).read expect(restored_file).to include file_content expect(File).not_to exist(File.join(test_path, backup_path)) end end describe "when use action without `backup_file` action" do it "should raise error" do expect { Fastlane::FastFile.new.parse("lane :test do restore_file path: '#{File.join(test_path, file_path)}' end").runner.execute(:test) }.to raise_error("Could not find file '#{File.join(test_path, backup_path)}'") end end after do File.delete(File.join(test_path, backup_path)) if File.exists? File.join(test_path, backup_path) File.delete(File.join(test_path, file_path)) end end end end
35.34
104
0.62309
1cc203924ba7191e1a890e167fca0806a68456e8
18,964
# frozen_string_literal: true require_relative './data_absent_reason_checker' require_relative './profile_definitions/us_core_allergyintolerance_definitions' module Inferno module Sequence class USCore310AllergyintoleranceSequence < SequenceBase include Inferno::DataAbsentReasonChecker include Inferno::USCore310ProfileDefinitions title 'AllergyIntolerance Tests' description 'Verify support for the server capabilities required by the US Core AllergyIntolerance Profile.' details %( # Background The US Core #{title} sequence verifies that the system under test is able to provide correct responses for AllergyIntolerance queries. These queries must contain resources conforming to US Core AllergyIntolerance Profile as specified in the US Core v3.1.0 Implementation Guide. # Testing Methodology ## Searching This test sequence will first perform each required search associated with this resource. This sequence will perform searches with the following parameters: * patient ### Search Parameters The first search uses the selected patient(s) from the prior launch sequence. Any subsequent searches will look for its parameter values from the results of the first search. For example, the `identifier` search in the patient sequence is performed by looking for an existing `Patient.identifier` from any of the resources returned in the `_id` search. If a value cannot be found this way, the search is skipped. ### Search Validation Inferno will retrieve up to the first 20 bundle pages of the reply for AllergyIntolerance resources and save them for subsequent tests. Each of these resources is then checked to see if it matches the searched parameters in accordance with [FHIR search guidelines](https://www.hl7.org/fhir/search.html). The test will fail, for example, if a patient search for gender=male returns a female patient. ## Must Support Each profile has a list of elements marked as "must support". This test sequence expects to see each of these elements at least once. If at least one cannot be found, the test will fail. The test will look through the AllergyIntolerance resources found for these elements. ## Profile Validation Each resource returned from the first search is expected to conform to the [US Core AllergyIntolerance Profile](http://hl7.org/fhir/us/core/StructureDefinition/us-core-allergyintolerance). Each element is checked against teminology binding and cardinality requirements. Elements with a required binding is validated against its bound valueset. If the code/system in the element is not part of the valueset, then the test will fail. ## Reference Validation Each reference within the resources found from the first search must resolve. The test will attempt to read each reference found and will fail if any attempted read fails. ) test_id_prefix 'USCAI' requires :token, :patient_ids conformance_supports :AllergyIntolerance def validate_resource_item(resource, property, value) case property when 'clinical-status' values_found = resolve_path(resource, 'clinicalStatus') coding_system = value.split('|').first.empty? ? nil : value.split('|').first coding_value = value.split('|').last match_found = values_found.any? do |codeable_concept| if value.include? '|' codeable_concept.coding.any? { |coding| coding.system == coding_system && coding.code == coding_value } else codeable_concept.coding.any? { |coding| coding.code == value } end end assert match_found, "clinical-status in AllergyIntolerance/#{resource.id} (#{values_found}) does not match clinical-status requested (#{value})" when 'patient' values_found = resolve_path(resource, 'patient.reference') value = value.split('Patient/').last match_found = values_found.any? { |reference| [value, 'Patient/' + value, "#{@instance.url}/Patient/#{value}"].include? reference } assert match_found, "patient in AllergyIntolerance/#{resource.id} (#{values_found}) does not match patient requested (#{value})" end end def perform_search_with_status(reply, search_param) begin parsed_reply = JSON.parse(reply.body) assert parsed_reply['resourceType'] == 'OperationOutcome', 'Server returned a status of 400 without an OperationOutcome.' rescue JSON::ParserError assert false, 'Server returned a status of 400 without an OperationOutcome.' end warning do assert @instance.server_capabilities&.search_documented?('AllergyIntolerance'), %(Server returned a status of 400 with an OperationOutcome, but the search interaction for this resource is not documented in the CapabilityStatement. If this response was due to the server requiring a status parameter, the server must document this requirement in its CapabilityStatement.) end ['active', 'inactive', 'resolved'].each do |status_value| params_with_status = search_param.merge('clinical-status': status_value) reply = get_resource_by_params(versioned_resource_class('AllergyIntolerance'), params_with_status) assert_response_ok(reply) assert_bundle_response(reply) entries = reply.resource.entry.select { |entry| entry.resource.resourceType == 'AllergyIntolerance' } next if entries.blank? search_param.merge!('clinical-status': status_value) break end reply end def patient_ids @instance.patient_ids.split(',').map(&:strip) end @resources_found = false test :search_by_patient do metadata do id '01' name 'Server returns valid results for AllergyIntolerance search by patient.' link 'https://www.hl7.org/fhir/us/core/CapabilityStatement-us-core-server.html' description %( A server SHALL support searching by patient on the AllergyIntolerance resource. This test will pass if resources are returned and match the search criteria. If none are returned, the test is skipped. Because this is the first search of the sequence, resources in the response will be used for subsequent tests. ) versions :r4 end skip_if_known_search_not_supported('AllergyIntolerance', ['patient']) @allergy_intolerance_ary = {} patient_ids.each do |patient| search_params = { 'patient': patient } reply = get_resource_by_params(versioned_resource_class('AllergyIntolerance'), search_params) reply = perform_search_with_status(reply, search_params) if reply.code == 400 assert_response_ok(reply) assert_bundle_response(reply) any_resources = reply&.resource&.entry&.any? { |entry| entry&.resource&.resourceType == 'AllergyIntolerance' } next unless any_resources @allergy_intolerance_ary[patient] = fetch_all_bundled_resources(reply, check_for_data_absent_reasons) @allergy_intolerance = @allergy_intolerance_ary[patient] .find { |resource| resource.resourceType == 'AllergyIntolerance' } @resources_found = @allergy_intolerance.present? save_resource_references(versioned_resource_class('AllergyIntolerance'), @allergy_intolerance_ary[patient]) save_delayed_sequence_references(@allergy_intolerance_ary[patient], USCore310AllergyintoleranceSequenceDefinitions::DELAYED_REFERENCES) validate_reply_entries(@allergy_intolerance_ary[patient], search_params) search_params = search_params.merge('patient': "Patient/#{patient}") reply = get_resource_by_params(versioned_resource_class('AllergyIntolerance'), search_params) assert_response_ok(reply) assert_bundle_response(reply) search_with_type = fetch_all_bundled_resources(reply, check_for_data_absent_reasons) assert search_with_type.length == @allergy_intolerance_ary[patient].length, 'Expected search by Patient/ID to have the same results as search by ID' end skip_if_not_found(resource_type: 'AllergyIntolerance', delayed: false) end test :search_by_patient_clinical_status do metadata do id '02' name 'Server returns valid results for AllergyIntolerance search by patient+clinical-status.' link 'https://www.hl7.org/fhir/us/core/CapabilityStatement-us-core-server.html' optional description %( A server SHOULD support searching by patient+clinical-status on the AllergyIntolerance resource. This test will pass if resources are returned and match the search criteria. If none are returned, the test is skipped. ) versions :r4 end skip_if_known_search_not_supported('AllergyIntolerance', ['patient', 'clinical-status']) skip_if_not_found(resource_type: 'AllergyIntolerance', delayed: false) resolved_one = false patient_ids.each do |patient| search_params = { 'patient': patient, 'clinical-status': get_value_for_search_param(resolve_element_from_path(@allergy_intolerance_ary[patient], 'clinicalStatus') { |el| get_value_for_search_param(el).present? }) } next if search_params.any? { |_param, value| value.nil? } resolved_one = true reply = get_resource_by_params(versioned_resource_class('AllergyIntolerance'), search_params) validate_search_reply(versioned_resource_class('AllergyIntolerance'), reply, search_params) value_with_system = get_value_for_search_param(resolve_element_from_path(@allergy_intolerance_ary[patient], 'clinicalStatus'), true) token_with_system_search_params = search_params.merge('clinical-status': value_with_system) reply = get_resource_by_params(versioned_resource_class('AllergyIntolerance'), token_with_system_search_params) validate_search_reply(versioned_resource_class('AllergyIntolerance'), reply, token_with_system_search_params) end skip 'Could not resolve all parameters (patient, clinical-status) in any resource.' unless resolved_one end test :read_interaction do metadata do id '03' name 'Server returns correct AllergyIntolerance resource from AllergyIntolerance read interaction' link 'https://www.hl7.org/fhir/us/core/CapabilityStatement-us-core-server.html' description %( A server SHALL support the AllergyIntolerance read interaction. ) versions :r4 end skip_if_known_not_supported(:AllergyIntolerance, [:read]) skip_if_not_found(resource_type: 'AllergyIntolerance', delayed: false) validate_read_reply(@allergy_intolerance, versioned_resource_class('AllergyIntolerance'), check_for_data_absent_reasons) end test :vread_interaction do metadata do id '04' name 'Server returns correct AllergyIntolerance resource from AllergyIntolerance vread interaction' link 'https://www.hl7.org/fhir/us/core/CapabilityStatement-us-core-server.html' optional description %( A server SHOULD support the AllergyIntolerance vread interaction. ) versions :r4 end skip_if_known_not_supported(:AllergyIntolerance, [:vread]) skip_if_not_found(resource_type: 'AllergyIntolerance', delayed: false) validate_vread_reply(@allergy_intolerance, versioned_resource_class('AllergyIntolerance')) end test :history_interaction do metadata do id '05' name 'Server returns correct AllergyIntolerance resource from AllergyIntolerance history interaction' link 'https://www.hl7.org/fhir/us/core/CapabilityStatement-us-core-server.html' optional description %( A server SHOULD support the AllergyIntolerance history interaction. ) versions :r4 end skip_if_known_not_supported(:AllergyIntolerance, [:history]) skip_if_not_found(resource_type: 'AllergyIntolerance', delayed: false) validate_history_reply(@allergy_intolerance, versioned_resource_class('AllergyIntolerance')) end test 'Server returns Provenance resources from AllergyIntolerance search by patient + _revIncludes: Provenance:target' do metadata do id '06' link 'https://www.hl7.org/fhir/search.html#revinclude' description %( A Server SHALL be capable of supporting the following _revincludes: Provenance:target. This test will perform a search for patient + _revIncludes: Provenance:target and will pass if a Provenance resource is found in the reponse. ) versions :r4 end skip_if_known_revinclude_not_supported('AllergyIntolerance', 'Provenance:target') skip_if_not_found(resource_type: 'AllergyIntolerance', delayed: false) provenance_results = [] patient_ids.each do |patient| search_params = { 'patient': patient } search_params['_revinclude'] = 'Provenance:target' reply = get_resource_by_params(versioned_resource_class('AllergyIntolerance'), search_params) reply = perform_search_with_status(reply, search_params) if reply.code == 400 assert_response_ok(reply) assert_bundle_response(reply) provenance_results += fetch_all_bundled_resources(reply, check_for_data_absent_reasons) .select { |resource| resource.resourceType == 'Provenance' } end save_resource_references(versioned_resource_class('Provenance'), provenance_results) save_delayed_sequence_references(provenance_results, USCore310AllergyintoleranceSequenceDefinitions::DELAYED_REFERENCES) skip 'No Provenance resources were returned from this search' unless provenance_results.present? end test :validate_resources do metadata do id '07' name 'AllergyIntolerance resources returned from previous search conform to the US Core AllergyIntolerance Profile.' link 'http://hl7.org/fhir/us/core/StructureDefinition/us-core-allergyintolerance' description %( This test verifies resources returned from the first search conform to the [US Core AllergyIntolerance Profile](http://hl7.org/fhir/us/core/StructureDefinition/us-core-allergyintolerance). It verifies the presence of mandatory elements and that elements with required bindings contain appropriate values. CodeableConcept element bindings will fail if none of its codings have a code/system that is part of the bound ValueSet. Quantity, Coding, and code element bindings will fail if its code/system is not found in the valueset. This test also checks that the following CodeableConcepts with required ValueSet bindings include a code rather than just text: 'clinicalStatus' and 'verificationStatus' ) versions :r4 end skip_if_not_found(resource_type: 'AllergyIntolerance', delayed: false) test_resources_against_profile('AllergyIntolerance') do |resource| ['clinicalStatus', 'verificationStatus'].flat_map do |path| concepts = resolve_path(resource, path) next if concepts.blank? code_present = concepts.any? { |concept| concept.coding.any? { |coding| coding.code.present? } } unless code_present # rubocop:disable Style/IfUnlessModifier "The CodeableConcept at '#{path}' is bound to a required ValueSet but does not contain any codes." end end.compact end end test 'All must support elements are provided in the AllergyIntolerance resources returned.' do metadata do id '08' link 'http://www.hl7.org/fhir/us/core/general-guidance.html#must-support' description %( US Core Responders SHALL be capable of populating all data elements as part of the query results as specified by the US Core Server Capability Statement. This will look through the AllergyIntolerance resources found previously for the following must support elements: * clinicalStatus * verificationStatus * code * patient ) versions :r4 end skip_if_not_found(resource_type: 'AllergyIntolerance', delayed: false) must_supports = USCore310AllergyintoleranceSequenceDefinitions::MUST_SUPPORTS missing_must_support_elements = must_supports[:elements].reject do |element| @allergy_intolerance_ary&.values&.flatten&.any? do |resource| value_found = resolve_element_from_path(resource, element[:path]) { |value| element[:fixed_value].blank? || value == element[:fixed_value] } value_found.present? end end missing_must_support_elements.map! { |must_support| "#{must_support[:path]}#{': ' + must_support[:fixed_value] if must_support[:fixed_value].present?}" } skip_if missing_must_support_elements.present?, "Could not find #{missing_must_support_elements.join(', ')} in the #{@allergy_intolerance_ary&.values&.flatten&.length} provided AllergyIntolerance resource(s)" @instance.save! end test 'Every reference within AllergyIntolerance resources can be read.' do metadata do id '09' link 'http://hl7.org/fhir/references.html' description %( This test will attempt to read the first 50 reference found in the resources from the first search. The test will fail if Inferno fails to read any of those references. ) versions :r4 end skip_if_known_not_supported(:AllergyIntolerance, [:search, :read]) skip_if_not_found(resource_type: 'AllergyIntolerance', delayed: false) validated_resources = Set.new max_resolutions = 50 @allergy_intolerance_ary&.values&.flatten&.each do |resource| validate_reference_resolutions(resource, validated_resources, max_resolutions) if validated_resources.length < max_resolutions end end end end end
45.586538
200
0.690888
61e6896bf2d2c33d096e1cfd79ff69ddc70d1b13
5,356
# frozen_string_literal: true require 'spec_helper' RSpec.describe Gitlab::SeatLinkData do subject do described_class.new( timestamp: timestamp, key: key, max_users: max_users, active_users: active_users ) end let_it_be(:timestamp) { Time.iso8601('2020-03-22T06:09:18Z') } let_it_be(:key) { 'key' } let_it_be(:max_users) { 11 } let_it_be(:active_users) { 5 } describe '#initialize' do let_it_be(:utc_time) { Time.utc(2020, 3, 12, 12, 00) } let_it_be(:license_start_date) { utc_time.to_date - 1.month } let_it_be(:current_license) { create_current_license(starts_at: license_start_date)} let_it_be(:max_before_today) { 15 } let_it_be(:yesterday_active_count) { 12 } let_it_be(:today_active_count) { 20 } before_all do create(:historical_data, recorded_at: license_start_date, active_user_count: 10) create(:historical_data, recorded_at: license_start_date + 1.day, active_user_count: max_before_today) create(:historical_data, recorded_at: utc_time - 1.day, active_user_count: yesterday_active_count) create(:historical_data, recorded_at: utc_time, active_user_count: today_active_count) end around do |example| travel_to(utc_time) { example.run } end context 'when passing no params' do subject { described_class.new } it 'returns object with default attributes set' do expect(subject).to have_attributes( timestamp: eq(utc_time), key: eq(current_license.data), max_users: eq(today_active_count), active_users: eq(today_active_count) ) end end context 'when passing params' do it 'returns object with given attributes set' do expect(subject).to have_attributes( timestamp: eq(timestamp), key: eq(key), max_users: eq(max_users), active_users: eq(active_users) ) end context 'when passing date param only' do subject { described_class.new(timestamp: utc_time - 1.day) } it 'returns object with attributes set using given date' do expect(subject).to have_attributes( timestamp: eq(utc_time - 1.day), key: eq(current_license.data), max_users: eq(max_before_today), active_users: eq(yesterday_active_count) ) end end end end describe '.to_json' do it { is_expected.to delegate_method(:to_json).to(:data) } it 'returns payload data as a JSON string' do expect(subject.to_json).to eq( { timestamp: timestamp.iso8601, date: timestamp.to_date.iso8601, license_key: key, max_historical_user_count: max_users, active_users: active_users }.to_json ) end end describe '#sync' do before do allow(subject).to receive(:should_sync_seats?).and_return(sync_seats) end context 'when ready to sync seats' do let(:sync_seats) { true } it 'performs the sync' do expect(SyncSeatLinkWorker).to receive(:perform_async) subject.sync end end context 'when not ready to sync seats' do let(:sync_seats) { false } it 'does not perform the sync' do expect(SyncSeatLinkWorker).not_to receive(:perform_async) subject.sync end end end describe '#should_sync_seats?' do let_it_be(:historical_data) { create(:historical_data, recorded_at: timestamp) } let(:license) { build(:license, :cloud) } before do allow(License).to receive(:current).and_return(license) end subject { super().should_sync_seats? } context 'when all the pre conditions are valid' do it { is_expected.to eq(true) } end context 'when license key is missing' do let(:license) { nil } it { is_expected.to be_falsey } end context 'when expires_at is not set' do let(:license) { build(:license, expires_at: nil) } it { is_expected.to be_falsey } end context 'cloud license' do context 'when license is trial' do let(:license) { build(:license, trial: true) } it { is_expected.to be_falsey } end context 'when timestamp is out of the range' do let(:timestamp) { license.starts_at - 1.day } it { is_expected.to be_falsey } end context 'when historical data not found' do before do historical_data.destroy! end it { is_expected.to eq(true) } end end context 'legacy license' do let(:license) { build(:license) } context 'when seat link is disabled' do before do allow(Settings.gitlab).to receive(:seat_link_enabled).and_return(false) end it { is_expected.to be_falsey } end context 'when license is trial' do let(:license) { build(:license, trial: true) } it { is_expected.to be_falsey } end context 'when timestamp is out of the range' do let(:timestamp) { license.starts_at - 1.day } it { is_expected.to be_falsey } end context 'when historical data not found' do before do historical_data.destroy! end it { is_expected.to eq(false) } end end end end
26.514851
108
0.634055
ff89570645f923055ef0a228be1f9af00ef797df
2,671
require 'spec_helper' describe Entity do describe :search do SEARCH_ORG_SCHEME = { 'attr' => [ {'name' => 'name', 'type' => 'str', 'ind' => true}, ] } SEARCH_PERSON_SCHEME = { 'attr' => [ {'name' => 'last_name', 'type' => 'str', 'ind' => true}, {'name' => 'first_name', 'type' => 'str', 'ind' => true}, {'name' => 'yr', 'type' => 'int', 'ind' => true}, ], 'rel' => [ {'name' => 'employer', 'target' => 'org', 'type' => '0n'}, ], 'header' => ['last_name', 'first_name'], } SEARCH_BOOK_SCHEME = { 'attr' => [ {'name' => 'name', 'type' => 'str', 'ind' => true}, ], 'rel' => [ {'name' => 'author', 'target' => 'person', 'type' => '0n'}, ], } before(:all) do sqlclear @e = Engine.new(CREDENTIALS) @org = Entity.new('org', SEARCH_ORG_SCHEME) @e.entity_create(@org) @person = Entity.new('person', SEARCH_PERSON_SCHEME) @e.entity_create(@person) @book = Entity.new('book', SEARCH_BOOK_SCHEME) @e.entity_create(@book) end it 'can insert sample records' do @org.insert({'name' => 'Princeton'}) @org.insert({'name' => 'Stanford'}) @org.insert({'name' => 'Harvard'}) @org.insert({'name' => 'Bell Labs'}) @person.insert({'last_name' => 'Knuth', 'first_name' => 'Donald', 'yr' => 1938, 'employer' => [2]}) @person.insert({'last_name' => 'Ritchie', 'first_name' => 'Dennis', 'yr' => 1941, 'employer' => [3, 4]}) @person.insert({'last_name' => 'Kernighan', 'first_name' => 'Brian', 'yr' => 1942, 'employer' => [1, 4]}) @book.insert({'name' => 'The Art of Computer Programming', 'author' => [1]}) @book.insert({'name' => 'The C Programming Language', 'author' => [2, 3]}) end it 'can do simple query without joins' do # What organizations end with "rd"? q = { fields: ['org._id', 'org.name'], where: [ ['org.name', 'ends', 'rd'], ], order: ['org.name'], } expect(@org.search(q)).to eq([ {'org._id' => 3, 'org.name' => 'Harvard'}, {'org._id' => 2, 'org.name' => 'Stanford'}, ]) end it 'can do one-step backward join' do # What organizations employed Dennis Ritchie? q = { fields: ['org._id', 'org.name'], where: [ ['person.last_name', 'eq', 'Ritchie'], ], order: ['org.name'], } expect(@org.search(q)).to eq([ {'org._id' => 4, 'org.name' => 'Bell Labs'}, {'org._id' => 3, 'org.name' => 'Harvard'}, ]) end end end
29.351648
111
0.481093
f8ba5ab16c22bd934b8b5f5bb2436df40c12c540
2,098
# mundi_api # # This file was automatically generated by APIMATIC v2.0 ( # https://apimatic.io ). module MundiApi # GetAutomaticAnticipationResponse Model. class GetAutomaticAnticipationResponse < BaseModel # TODO: Write general description for this method # @return [Boolean] attr_accessor :enabled # TODO: Write general description for this method # @return [String] attr_accessor :type # TODO: Write general description for this method # @return [Integer] attr_accessor :volume_percentage # TODO: Write general description for this method # @return [Integer] attr_accessor :delay # TODO: Write general description for this method # @return [List of Integer] attr_accessor :days # A mapping from model property names to API property names. def self.names @_hash = {} if @_hash.nil? @_hash['enabled'] = 'enabled' @_hash['type'] = 'type' @_hash['volume_percentage'] = 'volume_percentage' @_hash['delay'] = 'delay' @_hash['days'] = 'days' @_hash end def initialize(enabled = nil, type = nil, volume_percentage = nil, delay = nil, days = nil) @enabled = enabled @type = type @volume_percentage = volume_percentage @delay = delay @days = days end # Creates an instance of the object from a hash. def self.from_hash(hash) return nil unless hash # Extract variables from the hash. enabled = hash['enabled'] type = hash['type'] volume_percentage = hash['volume_percentage'] delay = hash['delay'] days = hash['days'] # Create object from extracted values. GetAutomaticAnticipationResponse.new(enabled, type, volume_percentage, delay, days) end end end
29.138889
65
0.558627
61070fa6f144063756632179a321f665bb09b7c0
342
require 'spec_helper' describe 'plugin uninstall' do context 'with the aws plugin installed' do before(:each) do run! 'kontena plugin install aws' end it 'removes installed plugin' do run! 'kontena plugin uninstall aws' k = run! 'kontena plugin ls' expect(k.out).to_not match(/aws/) end end end
20.117647
44
0.654971
01b9abe8bfbece0fefcd16bc31c7c31ba7c683d1
67
class Events::NoteOnWorkspaceDatasetAccess < Events::NoteAccess end
33.5
63
0.865672
abba83190ab51f6d2f69e4d81227f6435a0b2e45
628
class Exodriver < Formula desc "Thin interface to LabJack devices" homepage "https://labjack.com/support/linux-and-mac-os-x-drivers" url "https://github.com/labjack/exodriver/archive/v2.5.3.tar.gz" sha256 "24cae64bbbb29dc0ef13f482f065a14d075d2e975b7765abed91f1f8504ac2a5" head "https://github.com/labjack/exodriver.git" option :universal depends_on "libusb" def install ENV.universal_binary if build.universal? cd "liblabjackusb" system "make", "-f", "Makefile", "DESTINATION=#{lib}", "HEADER_DESTINATION=#{include}", "install" end end
27.304348
75
0.673567
79870eba7a8218e566f9292b9c0702b3d5f508ff
223
module SystemsAnalysisReport module Models SystemTemperature = Struct.new(:supply, :return, :mixed_air, :fan_heat_temperature_difference) do include Models::Model def validate end end end end
20.272727
101
0.717489
f7f1e5205afe17cf5471d33f6223d558d01b29cf
192
class ToolSerializer < ActiveModel::Serializer attributes :id, :name, :status def status active = ToolSlot.where(tool_id: object.id).any? active ? "active" : "inactive" end end
21.333333
52
0.697917
014769c8fbd1da389e2d1dd18564f3fc5d377868
2,418
class Diamond < Formula desc "Accelerated BLAST compatible local sequence aligner" homepage "https://www.wsi.uni-tuebingen.de/lehrstuehle/algorithms-in-bioinformatics/software/diamond/" url "https://github.com/bbuchfink/diamond/archive/v2.0.14.tar.gz" sha256 "3eaef2b957e4ba845eac27a2ca3249aae4259ff1fe0ff5a21b094481328fdc53" license "GPL-3.0-or-later" bottle do sha256 cellar: :any_skip_relocation, arm64_monterey: "b25c5a7ba1e13f49d35da8b4900cb32cd0c2f7b4fe50c817c13d8b3e34f0ca60" sha256 cellar: :any_skip_relocation, arm64_big_sur: "ab37a61dbad817a0e4dca0310e0473420fde013ffc15f7af5725cde4be961cb0" sha256 cellar: :any_skip_relocation, monterey: "ce33ea102c71dd819f4f9d9e3ba5ebffaa259de08e768c1ceeebec4a42684628" sha256 cellar: :any_skip_relocation, big_sur: "72fb7f8ebf9a4ab1cc82ff79b935d37131c00e8cbbae8d9fe159d781d93b30f7" sha256 cellar: :any_skip_relocation, catalina: "431477a165d719fa67ece1e9bedea04c0377d6a7d4513f631d8bf1ad2e9d859f" sha256 cellar: :any_skip_relocation, x86_64_linux: "2d2e5729a550d2d2394448aaebd968875ecacef041789a0bab314e398701bfe8" end depends_on "cmake" => :build uses_from_macos "zlib" def install system "cmake", ".", *std_cmake_args system "make", "install" end test do (testpath/"nr.faa").write <<~EOS >gnl|alu|HSU14568_Alu_Sb_consensus_rf1 grarwltpvipalweaeaggsrgqeietilantvkprlyXkyknXpgvvagacspsysgg XgrrmaXtreaelavsrdratalqpgrqsetpsqkk >gnl|alu|HSU14568_Alu_Sb_consensus_rf2 agrggsrlXsqhfgrprradhevrrsrpswltrXnpvstkntkisrawwrapvvpatrea eagewrepgrrslqXaeiaplhsslgdrarlrlkk >gnl|alu|HSU14568_Alu_Sb_consensus_rf3 pgavahacnpstlggrggritrsgdrdhpgXhgetpsllkiqklagrgggrlXsqllgrl rqengvnpgggacseprsrhctpawaterdsvskk >gnl|alu|HSU14568_Alu_Sb_consensus_rf-1 fflrrslalsprlecsgaisahcklrlpgsrhspasasrvagttgarhharlifvflvet gfhrvsqdgldlltsXsarlglpkcwdyrrepprpa >gnl|alu|HSU14568_Alu_Sb_consensus_rf-2 ffXdgvslcrpgwsavarsrltassasrvhaillpqppeXlglqapattpgXflyfXXrr gftvlarmvsisXprdppasasqsagitgvshrar >gnl|alu|HSU14568_Alu_Sb_consensus_rf-3 ffetesrsvaqagvqwrdlgslqapppgftpfsclslpsswdyrrppprpanfcifsrdg vspcXpgwsrspdlvirpprppkvlglqaXatapg EOS output = shell_output("#{bin}/diamond makedb --in nr.faa -d nr 2>&1") assert_match "Database sequences 6\n Database letters 572", output end end
47.411765
123
0.806038
79edea6220f52bb2aab7f2200be3e697d82072ee
233
json.extract! player, :id, :census_plus_datum_id, :server_id, :user, :faction, :race, :klass, :name, :level, :guild_id, :guild_name, :guild_rank, :guild_rank_index, :created_at, :updated_at json.url player_url(player, format: :json)
77.666667
189
0.746781
ac7d6e68bedb1e5c889978f5b0112fcbfe6fe27a
124
RSpec.configure do |config| config.before(:each) do FileUtils.rm_rf(Figgy.config["disk_preservation_path"]) end end
20.666667
59
0.75
62167ceb4abb19c2b8fb473f56b8dfb7b1ab7d11
528
Rails.application.routes.draw do resources :order_details resources :food_items do end root 'home#index' post 'orders' => 'orders#create' get 'orders/:food_item_id' => 'orders#new' get 'orders' => "orders#new" get 'order_details/add/:id' => "order_details#add" get 'search' => 'home#menu' get 'detail' => 'order_details#show' get 'menu' => 'home#menu' get 'contact_us' => 'home#contact_us' # For details on the DSL available within this file, see http://guides.rubyonrails.org/routing.html end
22
101
0.683712
21fd502838ccb0f89c284ede3535895151b4d5ae
1,118
module SolidusWalletBackport module AppConfigurationDecorator # Allows providing your own class for adding default payments to a user's # order from their "wallet". # # @!attribute [rw] default_payment_builder_class # @return [Class] a class with the same public interfaces as # Spree::Wallet::DefaultPaymentBuilder. attr_writer :default_payment_builder_class def default_payment_builder_class @default_payment_builder_class ||= Spree::Wallet::DefaultPaymentBuilder end # Allows providing your own class for adding payment sources to a user's # "wallet" after an order moves to the complete state. # # @!attribute [rw] add_payment_sources_to_wallet_class # @return [Class] a class with the same public interfaces # as Spree::Wallet::AddPaymentSourcesToWallet. attr_writer :add_payment_sources_to_wallet_class def add_payment_sources_to_wallet_class @add_payment_sources_to_wallet_class ||= Spree::Wallet::AddPaymentSourcesToWallet end end end Spree::AppConfiguration.prepend SolidusWalletBackport::AppConfigurationDecorator
39.928571
87
0.765653
918dd8b4e84caf7e92ed5fc5a4b942af3cc1d392
1,106
control 'os-family-version' do describe os.family do it { should eq 'debian' } end describe os.release do it { should eq '16.04' } end describe os.name do it { should eq 'ubuntu' } end end control 'nomad' do describe file('/usr/bin/nomad') do it { should exist } it { should be_file } it { should_not be_directory } it { should be_owned_by 'root' } its('mode') { should cmp '00755' } end end control 'nomad-client-drivers' do describe package('docker.io') do it { should be_installed } end describe package('default-jre') do it { should be_installed } end end control 'cfssl' do describe file('/usr/local/bin/cfssl') do it { should exist } its('owner') { should eq 'root' } its('mode') { should cmp '493' } end describe file('/usr/local/bin/cfssl-certinfo') do it { should exist } its('owner') { should eq 'root' } its('mode') { should cmp '493' } end describe file('/usr/local/bin/cfssljson') do it { should exist } its('owner') { should eq 'root' } its('mode') { should cmp '493' } end end
23.041667
51
0.622061
33f0fb26c32abcf0ade30326451b437cff4359c3
236
class CreateTaskComments < ActiveRecord::Migration def change create_table :task_comments do |t| t.string :text t.timestamps end add_reference :task_comments, :task, references: :tasks, index: true end end
19.666667
72
0.70339
013a712b91e91521d894e3c0de9cc192a5e06cfa
193
require 'pusher' Pusher.app_id = '906931' Pusher.key = '840146084389f2f4d94a' Pusher.secret = 'f2d01c1ebfe0d3dfb852' Pusher.cluster = 'eu' Pusher.logger = Rails.logger Pusher.encrypted = true
21.444444
38
0.772021
87da8e29631809f2c50bbe7f5feb2853180d29bc
364
# Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is # regenerated. module Azure::StorSimple8000Series::Mgmt::V2017_06_01 module Models # # Defines values for OwnerShipStatus # module OwnerShipStatus Owned = "Owned" NotOwned = "NotOwned" end end end
22.75
70
0.708791
1d9100bbbaed117c105935d6ca0e7feaef26050a
58
module Gdal module Ruby VERSION = "0.0.7" end end
9.666667
21
0.62069
21896b0f2cc81e79da908b104587a6c96299bfe1
2,798
=begin #Selling Partner API for Catalog Items #The Selling Partner API for Catalog Items provides programmatic access to information about items in the Amazon catalog. For more information, see the [Catalog Items API Use Case Guide](https://github.com/amzn/selling-partner-api-docs/blob/main/guides/en-US/use-case-guides/catalog-items-api-use-case-guide/catalog-items-api-use-case-guide_2020-12-01.md). OpenAPI spec version: 2020-12-01 Generated by: https://github.com/swagger-api/swagger-codegen.git Swagger Codegen version: 3.0.24 =end require 'spec_helper' require 'json' require 'date' # Unit tests for AmzSpApi::CatalogItemsApiModel::Item # Automatically generated by swagger-codegen (github.com/swagger-api/swagger-codegen) # Please update as you see appropriate describe 'Item' do before do # run before each test @instance = AmzSpApi::CatalogItemsApiModel::Item.new end after do # run after each test end describe 'test an instance of Item' do it 'should create an instance of Item' do expect(@instance).to be_instance_of(AmzSpApi::CatalogItemsApiModel::Item) end end describe 'test attribute "asin"' do it 'should work' do # assertion here. ref: https://www.relishapp.com/rspec/rspec-expectations/docs/built-in-matchers end end describe 'test attribute "attributes"' do it 'should work' do # assertion here. ref: https://www.relishapp.com/rspec/rspec-expectations/docs/built-in-matchers end end describe 'test attribute "identifiers"' do it 'should work' do # assertion here. ref: https://www.relishapp.com/rspec/rspec-expectations/docs/built-in-matchers end end describe 'test attribute "images"' do it 'should work' do # assertion here. ref: https://www.relishapp.com/rspec/rspec-expectations/docs/built-in-matchers end end describe 'test attribute "product_types"' do it 'should work' do # assertion here. ref: https://www.relishapp.com/rspec/rspec-expectations/docs/built-in-matchers end end describe 'test attribute "sales_ranks"' do it 'should work' do # assertion here. ref: https://www.relishapp.com/rspec/rspec-expectations/docs/built-in-matchers end end describe 'test attribute "summaries"' do it 'should work' do # assertion here. ref: https://www.relishapp.com/rspec/rspec-expectations/docs/built-in-matchers end end describe 'test attribute "variations"' do it 'should work' do # assertion here. ref: https://www.relishapp.com/rspec/rspec-expectations/docs/built-in-matchers end end describe 'test attribute "vendor_details"' do it 'should work' do # assertion here. ref: https://www.relishapp.com/rspec/rspec-expectations/docs/built-in-matchers end end end
31.438202
357
0.725161
bfcdf11c8a1e3a457206ab27875fff35f9564ff6
798
# frozen_string_literal: true require 'dotenv' Dotenv.load environment = ENV.fetch('RACK_ENV', 'development') require 'bundler/setup' Bundler.require :default, ENV.fetch('RACK_ENV', 'development') require 'rollbar' Rollbar.configure do |config| config.access_token = ENV['ROLLBAR_ACCESS_TOKEN'] config.environment = ENV.fetch 'RACK_ENV', 'development' config.host = ENV.fetch 'HOST', 'localhost' end require 'pathname' $root = Pathname.new File.expand_path('..', __dir__) $LOAD_PATH.unshift $root.join('lib').to_s log_file = $root.join('log', "#{environment}.log") FileUtils.mkdir_p $root.join('log') FileUtils.touch log_file Arctic.logger = Logger.new log_file Arctic.logger.level = Logger::INFO module CDON autoload :Country, 'cdon/country' end require 'cdon/v2' require 'cdon/v1'
24.181818
62
0.746867
ac7055c7419e3a56c651cce7528dbd5bd357f222
4,091
class Feed require 'osub' require 'opub' require 'nokogiri' include MongoMapper::Document # Feed url (and an indicator that it is local) key :remote_url, String # OStatus subscriber information key :verify_token, String key :secret, String # For both pubs and subs, it needs to know # what hubs are communicating with it key :hubs, Array belongs_to :author many :updates one :user after_create :default_hubs def populate # TODO: More entropy would be nice self.verify_token = Digest::MD5.hexdigest(rand.to_s) self.secret = Digest::MD5.hexdigest(rand.to_s) f = OStatus::Feed.from_url(url) avatar_url = f.icon if avatar_url == nil avatar_url = f.logo end a = f.author self.author = Author.create(:name => a.portable_contacts.display_name, :username => a.name, :email => a.email, :url => a.uri, :image_url => avatar_url) self.hubs = f.hubs populate_entries(f.entries) save end def populate_entries(os_entries) os_entries.each do |entry| u = Update.first(:url => entry.url) if u.nil? u = Update.create(:author => self.author, :created_at => entry.published, :url => entry.url, :updated_at => entry.updated) self.updates << u save end # Strip HTML u.text = Nokogiri::HTML::Document.parse(entry.content).text u.save end end # Pings hub # needs absolute url for feed to give to hub for callback def ping_hubs(feed_url) OPub::Publisher.new(feed_url, hubs).ping_hubs end def local url.start_with?("/") end def url if remote_url.nil? "/feeds/#{id}" else remote_url end end def update_entries(atom_xml, callback_url, feed_url, signature) sub = OSub::Subscription.new(callback_url, feed_url, self.secret) if sub.verify_content(atom_xml, signature) os_feed = OStatus::Feed.from_string(atom_xml) # TODO: # Update author if necessary # Update entries populate_entries(os_feed.entries) end end # Set default hubs def default_hubs self.hubs << "http://pubsubhubbub.appspot.com/publish" save end # create atom feed # need base_uri since urls outgoing should be absolute def atom(base_uri) # Create the OStatus::PortableContacts object poco = OStatus::PortableContacts.new(:id => author.id, :display_name => author.name, :preferred_username => author.username) # Create the OStatus::Author object os_auth = OStatus::Author.new(:name => author.username, :email => author.email, :uri => author.website, :portable_contacts => poco) # Gather entries as OStatus::Entry objects entries = updates.sort{|a, b| b.created_at <=> a.created_at}.map do |update| OStatus::Entry.new(:title => update.text, :content => update.text, :updated => update.updated_at, :published => update.created_at, :id => update.id, :link => { :href => ("#{base_uri}updates/#{update.id.to_s}")}) end # Create a Feed representation which we can generate # the Atom feed and send out. feed = OStatus::Feed.from_data("#{base_uri}feeds/#{id}.atom", :title => "#{author.username}'s Updates", :id => "#{base_uri}feeds/#{id}.atom", :author => os_auth, :entries => entries, :links => { :hub => [{:href => hubs.first}] }) feed.atom end end
28.409722
87
0.539232
e893a59dc42f66faf8d468b9365a9a0e19f745f1
2,976
describe NOVAHawk::Providers::Redhat::InfraManager::ProvisionViaIso do context "A new provision request," do before(:each) do @os = OperatingSystem.new(:product_name => 'Microsoft Windows') @admin = FactoryGirl.create(:user_admin) @target_vm_name = 'clone test' @options = { :pass => 1, :vm_name => @target_vm_name, :vm_target_name => @target_vm_name, :number_of_vms => 1, :cpu_limit => -1, :cpu_reserve => 0, :provision_type => "iso" } end context "RHEV-M provisioning" do before(:each) do @ems = FactoryGirl.create(:ems_redhat_with_authentication) @vm_template = FactoryGirl.create(:template_redhat, :name => "template1", :ext_management_system => @ems, :operating_system => @os, :cpu_limit => -1, :cpu_reserve => 0) @vm = FactoryGirl.create(:vm_redhat, :name => "vm1", :location => "abc/def.vmx") @pr = FactoryGirl.create(:miq_provision_request, :requester => @admin, :src_vm_id => @vm_template.id) @options[:src_vm_id] = [@vm_template.id, @vm_template.name] @vm_prov = FactoryGirl.create(:miq_provision_redhat_via_iso, :userid => @admin.userid, :miq_request => @pr, :source => @vm_template, :request_type => 'template', :state => 'pending', :status => 'Ok', :options => @options) end context "#prepare_for_clone_task" do before do @ems_cluster = FactoryGirl.create(:ems_cluster, :ems_ref => "test_ref") allow(@vm_prov).to receive(:dest_cluster).and_return(@ems_cluster) end it "with default options" do clone_options = @vm_prov.prepare_for_clone_task expect(clone_options[:clone_type]).to eq(:skeletal) end it "with linked-clone true" do @vm_prov.options[:linked_clone] = true clone_options = @vm_prov.prepare_for_clone_task expect(clone_options[:clone_type]).to eq(:skeletal) end it "with linked-clone false" do @vm_prov.options[:linked_clone] = false clone_options = @vm_prov.prepare_for_clone_task expect(clone_options[:clone_type]).to eq(:skeletal) end end context "#provision_completed" do before do @vm_prov.destination = @vm end it "when phase is poll_destination_powered_off_in_vmdb" do @vm_prov.phase = "poll_destination_powered_off_in_vmdb" expect(@vm).to receive(:stop) @vm_prov.provision_completed expect(@vm_prov.phase).to eq("poll_destination_powered_off_in_vmdb") end it "when phase is not poll_destination_powered_off_in_vmdb" do @vm_prov.phase = "post_provision" expect(@vm).not_to receive(:stop) @vm_prov.provision_completed expect(@vm_prov.phase).to eq("post_provision") end end end end end
38.153846
229
0.620296
114bf501889d91e34a12397ba9787d81294ebbd4
822
# coding: utf-8 lib = File.expand_path('../lib', __FILE__) $LOAD_PATH.unshift(lib) unless $LOAD_PATH.include?(lib) require 'tap_as/version' Gem::Specification.new do |spec| spec.name = 'tap_as' spec.version = TapAs::VERSION spec.authors = ['kbaba1001'] spec.email = ['[email protected]'] spec.summary = '`Object#as` define as `tap {|obj| break yield(obj) }`' spec.description = '`Object#as` define as `tap {|obj| break yield(obj) }`' spec.homepage = 'https://github.com/kbaba1001/tap_as' spec.license = 'MIT' spec.files = `git ls-files -z`.split("\x0").reject { |f| f.match(%r{^(test|spec|features)/}) } spec.bindir = 'exe' spec.executables = spec.files.grep(%r{^exe/}) { |f| File.basename(f) } spec.require_paths = ['lib'] end
37.363636
104
0.609489
87c83aed49abe4ca7baeef14690089af6d72c78a
308
# frozen_string_literal: true class AddOrderDetailToOccupancy < ActiveRecord::Migration[4.2] def change add_column :secure_rooms_occupancies, :order_detail_id, :integer add_index :secure_rooms_occupancies, :order_detail_id add_foreign_key :secure_rooms_occupancies, :order_details end end
25.666667
68
0.808442
ffadc7cabff29d89a85fd1da6d23eae4ef14a001
804
# frozen_string_literal: true # Copyright 2018 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # https://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. require "active_support/inflector" module FilepathHelper def ruby_file_path api ruby_require(api) + ".rb" end def ruby_require api api.address.map(&:underscore).join("/") end end
28.714286
74
0.752488
3879a0d539412f494ae18985bf34760b5cfb41dd
2,068
###################################################################### # Copyright (c) 2008-2013, Alliance for Sustainable Energy. # All rights reserved. # # This library is free software; you can redistribute it and/or # modify it under the terms of the GNU Lesser General Public # License as published by the Free Software Foundation; either # version 2.1 of the License, or (at your option) any later version. # # This library is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU # Lesser General Public License for more details. # # You should have received a copy of the GNU Lesser General Public # License along with this library; if not, write to the Free Software # Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA ###################################################################### ###################################################################### # == Synopsis # # Marks all measures found in a directory as clean. # # == Usage # # ruby MarkAllMeasuresInDirClean.rb ARGV[0] # # ARGV[0] - Path to directory containing measures # # == Examples # # ruby MarkAllMeasuresInDirClean.rb 'C:\path\to\measures\' # ###################################################################### require 'openstudio' dir = ARGV[0].gsub("\\", "/") if not dir or not File.directory?(dir) puts "Script requires argument which is path to directory containing measures" exit(false) end puts "Marking measures found in '#{dir}' as clean" num_measures = 0 Dir.glob("#{dir}/*/") do |measure_dir| puts measure_dir if File.directory?("#{measure_dir}") measure = OpenStudio::BCLMeasure::load(OpenStudio::Path.new("#{measure_dir}")) if measure.empty? puts "Directory #{measure_dir} is not a measure" else measure = measure.get measure.checkForUpdates measure.save end end end
33.354839
83
0.59381
91256e4cbae89c230ee38a33ce7f99ca0538086d
2,449
# == Schema Information # # Table name: recipes # # id :integer not null, primary key # name :string # instructions :string # image :string # created_at :datetime not null # updated_at :datetime not null # class RecipesController < ApplicationController before_action :require_login before_action :set_recipe, only: [:show, :edit, :update, :add_to_cart, :remove_from_cart] def index if params[:ingredient].present? # use heroku vars edamam = EdamamService.new body = edamam.recipes(params[:ingredient]) Recipe.find_or_create_from_api(body) @recipes = Recipe.filter_by_ingredient(params[:ingredient]) end recipes = Recipe.all respond_to do |format| format.html { render :index } format.json { render json: recipes } end end def show respond_to do |format| format.html { render :show } format.json { render json: @recipe } end end def new @recipe = Recipe.new 5.times {@recipe.recipe_ingredients.build} end def create @recipe = Recipe.new(recipe_params) if @recipe.save render json: @recipe, status: 201 else render :new end end def edit end def update if @recipe.update(recipe_params) redirect_to recipe_path(@recipe) else render :edit end end def destroy end def add_to_cart CartRecipe.find_or_create_by(cart_id: session[:cart_id], recipe_id: params[:id]) flash[:notice] = "Recipe successfully added" redirect_to recipe_path(@recipe) end def remove_from_cart cart = Cart.find_by(:id => session[:cart_id]) cart_recipe = CartRecipe.find_by(cart_id: cart.id, recipe_id: @recipe.id) cart_recipe.destroy flash[:notice] = "Recipe successfully removed" redirect_to cart_path end private def set_recipe @recipe = Recipe.find_by(:id => params[:id]) end def recipe_params params.require(:recipe).permit(:name, :content, ingredient_ids: [], recipe_ingredients_attributes: [:quantity, :ingredient_id, ingredient_attributes: [:name]]) end end
24.009804
167
0.581053
61c7eeabc41866e073d58ee7edff0d13c16eb4aa
649
# encoding: utf-8 module CslCli class Tokenstore @path = "" @saved_token = nil @tokenfile = nil attr_reader :path def initialize(path) @path = path @tokenfile = File.join(@path, '.csl_token') end def load file = File.open(@tokenfile, 'rb') @saved_token = file.read() file.close return @saved_token end def store(token) file = File.open(@tokenfile, 'w') file.write(token) file.close File.chmod(0600, @tokenfile) return true end def clear File.delete(@tokenfile) if File.exist?(@tokenfile) return true end end end
17.078947
56
0.583975
ff82e51e1a1d1c59cea9127d3edd57886fd12e12
9,352
module Export module Helpers # Helper methods, for inclusion into rake tasks module RakeHelper # Helpers to extract death records module DeathExtractor # Interactively pick a weekly batch of death data # If optional week parameter is supplied (in YYYY-MM-DD format), then this runs in # batch mode, and returns the latest data for that week, or raises an exception if no # batch for that month is available. def self.pick_mbis_weekly_death_batch(desc, fname_patterns, weeks_ago: 4, week: nil, logger: ActiveSupport::Logger.new($stdout)) if week && !/\A[0-9]{4}-[0-9]{2}-[0-9]{2}\z/.match?(week) raise(ArgumentError, "Invalid week #{week}, expected YYYY-MM-DD") end date0 = if week Date.strptime(week, '%Y-%m-%d') else Date.current - weeks_ago * 7 end weekly_death_re = /MBIS(WEEKLY_Deaths_D|_20)([0-9]{6}).txt/ # TODO: Refactor batch_scope = EBatch.imported.where(e_type: 'PSDEATH') dated_batches = batch_scope.all.collect do |eb| next unless weekly_death_re =~ eb.original_filename date = Date.strptime(Regexp.last_match(2), '%y%m%d') next if date < date0 [date, fname_patterns.collect { |s| date.strftime(s) }, eb] end.compact.sort if week date, fnames, eb = dated_batches.reverse.find do |date, _, _| date.strftime('%Y-%m-%d') == week end raise "No batch found for week #{week}" unless date logger&.warn "Extracting week #{week} with e_batchid #{eb.id}" else puts "e_batchid: original MBIS filename -> #{desc} files" dated_batches.each do |_date, fnames, eb| puts format('%-9d: %s -> %s', eb.id, Pathname.new(eb.original_filename).basename, fnames[0..1].collect { |s| File.basename(s) }.join(', ')) end print 'Choose e_batchid to export, or enter "older" to show older batches: ' answer = STDIN.readline.chomp if answer == 'older' return pick_mbis_weekly_death_batch(desc, fname_patterns, weeks_ago: weeks_ago + 4) end e_batchid = answer.to_i date, fnames, eb = dated_batches.find { |_, _, eb2| eb2.id == e_batchid } end fnames&.each do |fname| raise "Not overwriting existing #{fname}" if File.exist?(SafePath.new('mbis_data'). join(fname)) end [date, fnames, eb] end def self.extract_mbis_weekly_death_file(e_batch, fname, klass, filter = nil) puts "Extracting #{fname}..." system('rake', e_batch.e_type == 'PSDEATH' ? 'export:death' : 'export:birth', "fname=#{fname.sub(%r{\Aextracts/}, '')}", # Remove extracts/ prefix for rake task "original_filename=#{e_batch.original_filename}", "klass=#{klass}", "filter=#{filter}") full_fname = SafePath.new('mbis_data').join(fname) File.exist?(full_fname) end # Which batch filenames should be included in this month's extract? # Returns a pair: a pattern for matching all the batches in the month, and a pattern # for the final batch to include in the current month's extract def self.monthly_batch_patterns(date) this_month_yymm = date.strftime('%y%m') last_month_yymm = (date - 1.month).strftime('%y%m') if date < Date.new(2019, 8, 1) # From the 3rd batch of last month, to the second batch of this month # Final batch will be received 8th-14th of the month [Regexp.new('/MBIS(WEEKLY_Deaths_D|_20)' \ "(#{last_month_yymm}(1[5-9]|[23][0-9])" \ "|#{this_month_yymm}(0[0-9]|1[0-4])).txt"), /(0[89]|1[0-4]).txt/] elsif date < Date.new(2019, 9, 1) # From the 3rd batch of last month, to the third batch of this month # Final batch will be received 15th-21st of the month [Regexp.new('/MBIS(WEEKLY_Deaths_D|_20)' \ "(#{last_month_yymm}(1[5-9]|[23][0-9])" \ "|#{this_month_yymm}([01][0-9]|2[0-1])).txt"), /(1[5-9]|2[0-1]).txt/] elsif date < Date.new(2019, 10, 1) # From the 4th batch of last month, to the fourth batch of this month # Final batch will be received 22nd-28th of the month [Regexp.new('/MBIS(WEEKLY_Deaths_D|_20)' \ "(#{last_month_yymm}(2[2-9]|3[0-1])" \ "|#{this_month_yymm}([01][0-9]|2[0-8])).txt"), /(2[2-8]).txt/] else # From the 5th batch of last month, to the fourth batch of this month # Final batch will be received 22nd-28th of the month [Regexp.new('/MBIS(WEEKLY_Deaths_D|_20)' \ "(#{last_month_yymm}(29|3[0-1])" \ "|#{this_month_yymm}([01][0-9]|2[0-8])).txt"), /(2[2-8]).txt/] end end # Interactively pick month's data, after the final MBIS weekly batch of each month # If optional month parameter is supplied (in YYYY-MM format), then this runs in # batch mode, and returns the latest data for that month, or raises an exception if no # batch for that month is available. def self.pick_mbis_monthly_death_batches(desc, fname_patterns, months_ago: 2, month: nil, logger: ActiveSupport::Logger.new($stdout)) month ||= ENV['month'] if month year0, month0 = /^([0-9]{4})-([0-9]{2})$/.match(month)[1..2].collect(&:to_i) months_ago = (Time.current.year - year0) * 12 + (Time.current.month - month0) end weekly_death_re = /MBIS(WEEKLY_Deaths_D|_20)([0-9]{6}).txt/ # TODO: Refactor batch_scope = EBatch.imported.where(e_type: 'PSDEATH') # Second batches would usually be those received 8th-14th of each month # and would be those from the previous month with a day-of-month of 15-31 # for the previous month and 1-14 for the current month. monthly_batches = (0..months_ago).collect do |n| pattern, final_batch_re = monthly_batch_patterns(n.month.ago) batches = batch_scope.all.select { |eb| eb.original_filename =~ pattern } # Ignore unless this contains a record from the final week of the month # (second week (8th to 14th) up to 2019-07-31, or from 2019-08-01 onwards, # a record from the third week (15th to 21st) of the month. next unless batches.any? { |eb| eb.original_filename =~ final_batch_re } batches.sort_by(&:e_batchid) # latest last end.compact dated_batches = monthly_batches.collect do |batches| next if batches.empty? eb = batches.last next unless weekly_death_re =~ eb.original_filename date = Date.strptime(Regexp.last_match(2), '%y%m%d') [date, fname_patterns.collect { |s| date.strftime(s) }, eb, batches] end.compact.sort if month date, fnames, _eb, batches = dated_batches.reverse.find do |date, _, _, _| date.strftime('%Y-%m') == month end raise "No batch found for month #{month}" unless date logger&.warn "Extracting month #{month} with e_batchids #{batches.collect(&:id)}" else puts "e_batchid: original MBIS filename -> #{desc} files" dated_batches.each do |_date, fnames, eb, batches| # next unless date >= 1.month.ago puts format('%-9d: %s -> %s', eb.id, Pathname.new(eb.original_filename).basename, fnames[0..1].join(', ')) (batches - [eb]).each do |eb2| puts " (+ #{Pathname.new(eb2.original_filename).basename})" end end print 'Choose e_batchid to export, or enter "older" to show older batches: ' answer = STDIN.readline.chomp if answer == 'older' return pick_mbis_monthly_death_batches(desc, fname_patterns, months_ago: months_ago + 3) end e_batchid = answer.to_i date, fnames, _eb, batches = dated_batches.find { |_, _, eb2, _| eb2.id == e_batchid } end fnames&.each do |fname| raise "Not overwriting existing #{fname}" if File.exist?(SafePath.new('mbis_data'). join(fname)) end [date, fnames, batches] end end end end end
50.27957
99
0.53518
7aa5eabae8f8d702301aeac590d17e32ecc915d7
8,975
require_relative "test_helper" class BoostTest < Minitest::Test # conversions def test_conversions store [ {name: "Tomato A", conversions: {"tomato" => 1}}, {name: "Tomato B", conversions: {"tomato" => 2}}, {name: "Tomato C", conversions: {"tomato" => 3}} ] assert_order "tomato", ["Tomato C", "Tomato B", "Tomato A"] assert_equal_scores "tomato", conversions: false end def test_multiple_conversions store [ {name: "Speaker A", conversions_a: {"speaker" => 1}, conversions_b: {"speaker" => 6}}, {name: "Speaker B", conversions_a: {"speaker" => 2}, conversions_b: {"speaker" => 5}}, {name: "Speaker C", conversions_a: {"speaker" => 3}, conversions_b: {"speaker" => 4}} ], Speaker assert_equal_scores "speaker", {conversions: false}, Speaker assert_equal_scores "speaker", {}, Speaker assert_equal_scores "speaker", {conversions: ["conversions_a", "conversions_b"]}, Speaker assert_equal_scores "speaker", {conversions: ["conversions_b", "conversions_a"]}, Speaker assert_order "speaker", ["Speaker C", "Speaker B", "Speaker A"], {conversions: "conversions_a"}, Speaker assert_order "speaker", ["Speaker A", "Speaker B", "Speaker C"], {conversions: "conversions_b"}, Speaker end def test_multiple_conversions_with_boost_term store [ {name: "Speaker A", conversions_a: {"speaker" => 4, "speaker_1" => 1}}, {name: "Speaker B", conversions_a: {"speaker" => 3, "speaker_1" => 2}}, {name: "Speaker C", conversions_a: {"speaker" => 2, "speaker_1" => 3}}, {name: "Speaker D", conversions_a: {"speaker" => 1, "speaker_1" => 4}} ], Speaker assert_order "speaker", ["Speaker A", "Speaker B", "Speaker C", "Speaker D"], {conversions: "conversions_a"}, Speaker assert_order "speaker", ["Speaker D", "Speaker C", "Speaker B", "Speaker A"], {conversions: "conversions_a", conversions_term: "speaker_1"}, Speaker end def test_conversions_case store [ {name: "Tomato A", conversions: {"tomato" => 1, "TOMATO" => 1, "tOmAtO" => 1}}, {name: "Tomato B", conversions: {"tomato" => 2}} ] assert_order "tomato", ["Tomato A", "Tomato B"] end # global boost def test_boost store [ {name: "Tomato A"}, {name: "Tomato B", orders_count: 10}, {name: "Tomato C", orders_count: 100} ] assert_order "tomato", ["Tomato C", "Tomato B", "Tomato A"], boost: "orders_count" end def test_boost_zero store [ {name: "Zero Boost", orders_count: 0} ] assert_order "zero", ["Zero Boost"], boost: "orders_count" end def test_conversions_weight Product.reindex store [ {name: "Product Boost", orders_count: 20}, {name: "Product Conversions", conversions: {"product" => 10}} ] assert_order "product", ["Product Conversions", "Product Boost"], boost: "orders_count" end def test_boost_fields store [ {name: "Red", color: "White"}, {name: "White", color: "Red Red Red"} ] assert_order "red", ["Red", "White"], fields: ["name^10", "color"] end def test_boost_fields_decimal store [ {name: "Red", color: "White"}, {name: "White", color: "Red Red Red"} ] assert_order "red", ["Red", "White"], fields: ["name^10.5", "color"] end def test_boost_fields_word_start store [ {name: "Red", color: "White"}, {name: "White", color: "Red Red Red"} ] assert_order "red", ["Red", "White"], fields: [{"name^10" => :word_start}, "color"] end # for issue #855 def test_apostrophes store_names ["Valentine's Day Special"] assert_search "Valentines", ["Valentine's Day Special"], fields: ["name^5"] assert_search "Valentine's", ["Valentine's Day Special"], fields: ["name^5"] assert_search "Valentine", ["Valentine's Day Special"], fields: ["name^5"] end def test_boost_by store [ {name: "Tomato A"}, {name: "Tomato B", orders_count: 10}, {name: "Tomato C", orders_count: 100} ] assert_order "tomato", ["Tomato C", "Tomato B", "Tomato A"], boost_by: [:orders_count] assert_order "tomato", ["Tomato C", "Tomato B", "Tomato A"], boost_by: {orders_count: {factor: 10}} end def test_boost_by_missing store [ {name: "Tomato A"}, {name: "Tomato B", orders_count: 10}, ] assert_order "tomato", ["Tomato A", "Tomato B"], boost_by: {orders_count: {missing: 100}} end def test_boost_by_boost_mode_multiply store [ {name: "Tomato A", found_rate: 0.9}, {name: "Tomato B"}, {name: "Tomato C", found_rate: 0.5} ] assert_order "tomato", ["Tomato B", "Tomato A", "Tomato C"], boost_by: {found_rate: {boost_mode: "multiply"}} end def test_boost_where store [ {name: "Tomato A"}, {name: "Tomato B", user_ids: [1, 2]}, {name: "Tomato C", user_ids: [3]} ] assert_first "tomato", "Tomato B", boost_where: {user_ids: 2} assert_first "tomato", "Tomato B", boost_where: {user_ids: 1..2} assert_first "tomato", "Tomato B", boost_where: {user_ids: [1, 4]} assert_first "tomato", "Tomato B", boost_where: {user_ids: {value: 2, factor: 10}} assert_first "tomato", "Tomato B", boost_where: {user_ids: {value: [1, 4], factor: 10}} assert_order "tomato", ["Tomato C", "Tomato B", "Tomato A"], boost_where: {user_ids: [{value: 1, factor: 10}, {value: 3, factor: 20}]} end def test_boost_where_negative_boost store [ {name: "Tomato A"}, {name: "Tomato B", user_ids: [2]}, {name: "Tomato C", user_ids: [2]} ] assert_first "tomato", "Tomato A", boost_where: {user_ids: {value: 2, factor: 0.5}} end def test_boost_by_recency store [ {name: "Article 1", created_at: 2.days.ago}, {name: "Article 2", created_at: 1.day.ago}, {name: "Article 3", created_at: Time.now} ] assert_order "article", ["Article 3", "Article 2", "Article 1"], boost_by_recency: {created_at: {scale: "7d", decay: 0.5}} end def test_boost_by_recency_origin store [ {name: "Article 1", created_at: 2.days.ago}, {name: "Article 2", created_at: 1.day.ago}, {name: "Article 3", created_at: Time.now} ] assert_order "article", ["Article 1", "Article 2", "Article 3"], boost_by_recency: {created_at: {origin: 2.days.ago, scale: "7d", decay: 0.5}} end def test_boost_by_distance store [ {name: "San Francisco", latitude: 37.7833, longitude: -122.4167}, {name: "San Antonio", latitude: 29.4167, longitude: -98.5000}, {name: "San Marino", latitude: 43.9333, longitude: 12.4667} ] assert_order "san", ["San Francisco", "San Antonio", "San Marino"], boost_by_distance: {field: :location, origin: [37, -122], scale: "1000mi"} end def test_boost_by_distance_hash store [ {name: "San Francisco", latitude: 37.7833, longitude: -122.4167}, {name: "San Antonio", latitude: 29.4167, longitude: -98.5000}, {name: "San Marino", latitude: 43.9333, longitude: 12.4667} ] assert_order "san", ["San Francisco", "San Antonio", "San Marino"], boost_by_distance: {field: :location, origin: {lat: 37, lon: -122}, scale: "1000mi"} end def test_boost_by_distance_v2 store [ {name: "San Francisco", latitude: 37.7833, longitude: -122.4167}, {name: "San Antonio", latitude: 29.4167, longitude: -98.5000}, {name: "San Marino", latitude: 43.9333, longitude: 12.4667} ] assert_order "san", ["San Francisco", "San Antonio", "San Marino"], boost_by_distance: {location: {origin: [37, -122], scale: "1000mi"}} end def test_boost_by_distance_v2_hash store [ {name: "San Francisco", latitude: 37.7833, longitude: -122.4167}, {name: "San Antonio", latitude: 29.4167, longitude: -98.5000}, {name: "San Marino", latitude: 43.9333, longitude: 12.4667} ] assert_order "san", ["San Francisco", "San Antonio", "San Marino"], boost_by_distance: {location: {origin: {lat: 37, lon: -122}, scale: "1000mi"}} end def test_boost_by_distance_v2_factor store [ {name: "San Francisco", latitude: 37.7833, longitude: -122.4167, found_rate: 0.1}, {name: "San Antonio", latitude: 29.4167, longitude: -98.5000, found_rate: 0.99}, {name: "San Marino", latitude: 43.9333, longitude: 12.4667, found_rate: 0.2} ] assert_order "san", ["San Antonio","San Francisco", "San Marino"], boost_by: {found_rate: {factor: 100}}, boost_by_distance: {location: {origin: [37, -122], scale: "1000mi"}} assert_order "san", ["San Francisco", "San Antonio", "San Marino"], boost_by: {found_rate: {factor: 100}}, boost_by_distance: {location: {origin: [37, -122], scale: "1000mi", factor: 100}} end def test_boost_by_indices skip if cequel? store_names ["Rex"], Animal store_names ["Rexx"], Product assert_order "Rex", ["Rexx", "Rex"], {models: [Animal, Product], indices_boost: {Animal => 1, Product => 200}, fields: [:name]}, Searchkick end end
38.029661
192
0.625515
bb9974c7a4986db4334b916b5eed60e6e735c4e9
1,464
require "spec_helper" describe Savon::HTTP::Error do let(:http_error) { Savon::HTTP::Error.new new_response(:code => 404, :body => "Not Found") } let(:no_error) { Savon::HTTP::Error.new new_response } it "should be a Savon::Error" do Savon::HTTP::Error.should < Savon::Error end describe "#http" do it "should return the HTTPI::Response" do http_error.http.should be_an(HTTPI::Response) end end describe "#present?" do it "should return true if there was an HTTP error" do http_error.should be_present end it "should return false unless there was an HTTP error" do no_error.should_not be_present end end [:message, :to_s].each do |method| describe "##{method}" do it "should return an empty String unless an HTTP error is present" do no_error.send(method).should == "" end it "should return the HTTP error message" do http_error.send(method).should == "HTTP error (404): Not Found" end end end describe "#to_hash" do it "should return the HTTP response details as a Hash" do http_error.to_hash.should == { :code => 404, :headers => {}, :body => "Not Found" } end end def new_response(options = {}) defaults = { :code => 200, :headers => {}, :body => Fixture.response(:authentication) } response = defaults.merge options HTTPI::Response.new response[:code], response[:headers], response[:body] end end
27.622642
94
0.648907
18c151fbcf6ee401e5e8a2dedf06057521171bc1
1,356
# frozen_string_literal: true # WARNING ABOUT GENERATED CODE # # This file is generated. See the contributing guide for more information: # https://github.com/aws/aws-sdk-ruby/blob/version-3/CONTRIBUTING.md # # WARNING ABOUT GENERATED CODE require 'aws-sdk-core' require 'aws-sigv4' require_relative 'aws-sdk-datasync/types' require_relative 'aws-sdk-datasync/client_api' require_relative 'aws-sdk-datasync/client' require_relative 'aws-sdk-datasync/errors' require_relative 'aws-sdk-datasync/resource' require_relative 'aws-sdk-datasync/customizations' # This module provides support for AWS DataSync. This module is available in the # `aws-sdk-datasync` gem. # # # Client # # The {Client} class provides one method for each API operation. Operation # methods each accept a hash of request parameters and return a response # structure. # # data_sync = Aws::DataSync::Client.new # resp = data_sync.cancel_task_execution(params) # # See {Client} for more information. # # # Errors # # Errors returned from AWS DataSync are defined in the # {Errors} module and all extend {Errors::ServiceError}. # # begin # # do stuff # rescue Aws::DataSync::Errors::ServiceError # # rescues all AWS DataSync API errors # end # # See {Errors} for more information. # # @!group service module Aws::DataSync GEM_VERSION = '1.44.0' end
25.111111
80
0.741888
1cc29455da5bbe79061651978b790e12cd6c8475
1,525
# Copyright 2006-2011 Stanislav Senotrusov <[email protected]> # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. class Exceptional::DetailsExceptionData < Exceptional::ExceptionData def initialize exception @exception = exception @details = @exception.details.dup @name = @details.delete(:name) @request = @details[:request].kind_of?(Hash) && @details.delete(:request) end alias_method :context_stuff_orig, :context_stuff def context_stuff data = context_stuff_orig if @details.any? if data['context'] data['context'].merge! @details else data['context'] = @details end end if @request data['request'] = { 'url' => @request[:url], 'controller' => @request[:controller], 'action' => @request[:action], 'parameters' => @request[:params], 'headers' => @request[:headers], 'session' => @request[:session] } end data end end
28.773585
79
0.645902
0807cdb05fa91fe4c7861a2ab3b0818d5b2720d5
566
Shindo.tests('Fog::Compute[:hp] | user agent', ['hp', 'user_agent']) do tests('default for HP providers').returns("hpfog/#{Fog::HP::VERSION}") do pending if Fog.mocking? conn = Fog::Compute[:hp] conn.instance_variable_get(:@connection_options)[:headers]['User-Agent'] end tests('overriden by clients').returns("hpfog/#{Fog::HP::VERSION} (TesterClient/1.0.0)") do pending if Fog.mocking? conn = Fog::Compute::HP.new(:user_agent => "TesterClient/1.0.0") conn.instance_variable_get(:@connection_options)[:headers]['User-Agent'] end end
40.428571
92
0.683746
798878d8891aa08574fe38f8a67753079f269015
84
class Trade::PresenceValidationRuleSerializer < Trade::ValidationRuleSerializer end
28
79
0.880952
380c5b6510cf97e8dd72b88bee06d96141e5a68a
3,699
require 'rails_helper' describe TaxHousehold do subject { TaxHousehold.new } let(:tax_household_members) { [member1, member2, member3]} let(:member1) { double(financial_statements: [tax_filer]) } let(:member2) { double(financial_statements: [non_filer1]) } let(:member3) { double(financial_statements: [dependent]) } let(:tax_filer) { double(tax_filing_status: 'tax_filer', is_tax_filing_together: false) } let(:non_filer1) { double(tax_filing_status: 'non_filer', is_tax_filing_together: false) } let(:non_filer2) { double(tax_filing_status: 'non_filer', is_tax_filing_together: false) } let(:dependent) { double(tax_filing_status: 'dependent', is_tax_filing_together: false) } let(:joint_filer1) { double(tax_filing_status: 'tax_filer', is_tax_filing_together: true) } let(:joint_filer2) { double(tax_filing_status: 'tax_filer', is_tax_filing_together: true) } before(:each) do allow(subject).to receive(:tax_household_members).and_return(tax_household_members) allow(subject).to receive(:has_spouse_relation?).with(member2).and_return(true) end context '#primary' do context 'when single filer present' do it 'should return tax filer' do expect(subject.primary).to eq(member1) end end context 'when multiple filers filing together' do let(:member1) { double(financial_statements: [joint_filer1]) } let(:member2) { double(financial_statements: [joint_filer2]) } it 'should return primary_applicant' do allow(member1).to receive(:is_primary_applicant?).and_return(false) allow(member2).to receive(:is_primary_applicant?).and_return(true) expect(subject.primary).to eq(member2) end end end context '#spouse' do context 'when single filer present' do let(:member2) { double(financial_statements: [non_filer1]) } let(:member3) { double(financial_statements: [non_filer2]) } it 'should return non_filer with spouse relation on policy' do expect(subject.spouse).to eq(member2) end end context 'when multiple filers filing together' do let(:member1) { double(financial_statements: [joint_filer1]) } let(:member2) { double(financial_statements: [non_filer1]) } let(:member3) { double(financial_statements: [joint_filer2]) } it 'should return non primary_applicant' do allow(member1).to receive(:is_primary_applicant?).and_return(true) allow(member3).to receive(:is_primary_applicant?).and_return(false) expect(subject.spouse).to eq(member3) end end end context '#dependents' do context 'when member with filing status dependent present' do it 'should return' do expect(subject.dependents).to eq([member3]) end end context 'when non_filer without spouse relation present' do let(:member2) { double(financial_statements: [non_filer1]) } let(:member3) { double(financial_statements: [non_filer2]) } it 'should return as dependent' do expect(subject.dependents).to eq([member3]) end end context 'when both non filers and dependents present' do let(:member2) { double(financial_statements: [non_filer1]) } let(:member3) { double(financial_statements: [non_filer2]) } let(:member4) { double(financial_statements: [dependent]) } let(:tax_household_members) { [member1, member2, member3, member4]} it 'should return non_filers without spouse relation and dependents' do expect(subject.primary).to eq(member1) expect(subject.spouse).to eq(member2) expect(subject.dependents).to eq([member4, member3]) end end end end
37.744898
93
0.701811
7af7705bd29de7a758ed3b4ea4a2d08db7306fb9
594
cask "skype-preview" do version "8.75.76.133" sha256 "15a2c6c0759de11b441442b62a3e8f3b83d9f7492eeccd65cbf3646b72916262" url "https://endpoint920510.azureedge.net/s4l/s4l/download/mac/Skype-#{version}.dmg", verified: "endpoint920510.azureedge.net/s4l/s4l/download/mac/" name "Skype Preview" desc "Video chat, voice call and instant messaging application" homepage "https://www.skype.com/en/insider/" livecheck do url "https://go.skype.com/mac.preview.download" strategy :header_match end auto_updates true conflicts_with cask: "skype" app "Skype.app" end
28.285714
87
0.747475
28ee489fef305b830adff36fbc8cb1ee4fd50ef2
693
# frozen_string_literal: true require_relative "../support/command_testing" using CommandTesting describe "patch has source location meta" do it "works" do skip if RUBY_VERSION >= "2.7" source_path = Pathname.new(File.join(__dir__, "../../lib/ruby-next/core/enumerator/produce.rb")).realpath source_line = File.open(source_path).each_line.with_index { |line, i| break i + 1 if /wrong number of arguments/.match?(line) } run( "ruby -rbundler/setup -I#{File.join(__dir__, "../../lib")} "\ "#{File.join(__dir__, "fixtures", "backtrace.rb")}" ) do |_status, output, _err| output.should include("TRACE: #{source_path}:#{source_line}") end end end
31.5
131
0.673882
28ef2d4229f97378af599cf399055929934a0b40
950
# frozen_string_literal: true require_relative 'core/lib/spree/core/version.rb' Gem::Specification.new do |s| s.platform = Gem::Platform::RUBY s.name = 'solidus' s.version = Spree.solidus_version s.summary = 'Full-stack e-commerce framework for Ruby on Rails.' s.description = 'Solidus is an open source e-commerce framework for Ruby on Rails.' s.files = Dir['README.md', 'lib/**/*'] s.require_path = 'lib' s.requirements << 'none' s.required_ruby_version = '>= 2.2.2' s.required_rubygems_version = '>= 1.8.23' s.author = 'Solidus Team' s.email = '[email protected]' s.homepage = 'http://solidus.io' s.license = 'BSD-3-Clause' s.add_dependency 'solidus_core', s.version s.add_dependency 'solidus_api', s.version s.add_dependency 'solidus_backend', s.version s.add_dependency 'solidus_frontend', s.version s.add_dependency 'solidus_sample', s.version end
31.666667
85
0.673684
08317768a6d6e25459abaa9f07aeed338235cff0
1,632
# -*- coding: binary -*- module PacketFu # This Mixin simplifies access to the IPv6Headers. Mix this in with your # packet interface, and it will add methods that essentially delegate to # the 'ipv6_header' method (assuming that it is a IPv6Header object) module IPv6HeaderMixin def ipv6_v=(v); self.ipv6_header.ipv6_v= v; end def ipv6_v; self.ipv6_header.ipv6_v; end def ipv6_class=(v); self.ipv6_header.ipv6_class= v; end def ipv6_class; self.ipv6_header.ipv6_class; end def ipv6_label=(v); self.ipv6_header.ipv6_label= v; end def ipv6_label; self.ipv6_header.ipv6_label; end def ipv6_len=(v); self.ipv6_header.ipv6_len= v; end def ipv6_len; self.ipv6_header.ipv6_len; end def ipv6_next=(v); self.ipv6_header.ipv6_next= v; end def ipv6_next; self.ipv6_header.ipv6_next; end def ipv6_hop=(v); self.ipv6_header.ipv6_hop= v; end def ipv6_hop; self.ipv6_header.ipv6_hop; end def ipv6_src=(v); self.ipv6_header.ipv6_src= v; end def ipv6_src; self.ipv6_header.ipv6_src; end def ipv6_dst=(v); self.ipv6_header.ipv6_dst= v; end def ipv6_dst; self.ipv6_header.ipv6_dst; end def ipv6_calc_len; self.ipv6_header.ipv6_calc_len; end def ipv6_recalc(*v); self.ipv6_header.ipv6_recalc(*v); end def ipv6_saddr; self.ipv6_header.ipv6_saddr; end def ipv6_saddr=(v); self.ipv6_header.ipv6_saddr= v; end def ipv6_daddr; self.ipv6_header.ipv6_daddr; end def ipv6_daddr=(v); self.ipv6_header.ipv6_daddr= v; end def ipv6_src_readable; self.ipv6_header.ipv6_src_readable; end def ipv6_dst_readable; self.ipv6_header.ipv6_dst_readable; end end end
49.454545
75
0.74326
910894108b59d9d31893010de606e3599527853e
582
class Jyanken < ActiveRecord::Base GUU = 0 CHYOKI = 1 PAA = 2 TE = {GUU =>"グー", CHYOKI => "チョキ", PAA => "パー"} DRAW = 0 WIN = 1 LOSS = 2 JUDGMENT = {DRAW => "引き分け", WIN => "人の勝ち", LOSS => "人の負け"} before_save :fight def judge(human, computer) if human == computer DRAW elsif human < computer || human == PAA && computer == CHYOKI WIN else LOSS end end def fight self.computer = rand(3) self.judgment = judge(human, computer) end def self.status Jyanken.group(:judgment).count(:judgment) end end
17.117647
64
0.570447
b9a1896c087ef2e39e2291f9dcb16ca9cddfcf17
2,070
# Generated by jeweler # DO NOT EDIT THIS FILE DIRECTLY # Instead, edit Jeweler::Tasks in Rakefile, and run the gemspec command # -*- encoding: utf-8 -*- Gem::Specification.new do |s| s.name = %q{deepbeige} s.version = "0.2.3" s.required_rubygems_version = Gem::Requirement.new(">= 0") if s.respond_to? :required_rubygems_version= s.authors = ["David Bochenski"] s.date = %q{2010-09-10} s.default_executable = %q{deepbeige} s.description = %q{An AI learning program that plays noughts and crosses} s.email = %q{[email protected]} s.executables = ["deepbeige"] s.extra_rdoc_files = [ "LICENSE", "README.rdoc" ] s.files = [ ".document", ".gitignore", "LICENSE", "README.rdoc", "Rakefile", "VERSION", "bin/deepbeige", "deepbeige.gemspec", "lib/arena.rb", "lib/deepbeige.rb", "lib/deepbeige/main.rb", "lib/game.rb", "lib/human.rb", "lib/match.rb", "lib/neuralnet.rb", "lib/node.rb", "lib/noughts_and_crosses.rb", "lib/pick_a_number.rb", "lib/player.rb", "lib/table.rb", "lib/tier.rb", "lib/tournament.rb", "test/helper.rb", "test/test_deepbeige.rb", "ui/Rakefile", "ui/config/build.yml", "ui/lib/application.rb", "ui/lib/menu.rb", "ui/resources/DeepBeige.icns" ] s.homepage = %q{http://github.com/bochenski/deepbeige} s.rdoc_options = ["--charset=UTF-8"] s.require_paths = ["lib"] s.rubygems_version = %q{1.3.7} s.summary = %q{An AI learning algorithm for games} s.test_files = [ "test/helper.rb", "test/test_deepbeige.rb" ] if s.respond_to? :specification_version then current_version = Gem::Specification::CURRENT_SPECIFICATION_VERSION s.specification_version = 3 if Gem::Version.new(Gem::VERSION) >= Gem::Version.new('1.2.0') then s.add_development_dependency(%q<thoughtbot-shoulda>, [">= 0"]) else s.add_dependency(%q<thoughtbot-shoulda>, [">= 0"]) end else s.add_dependency(%q<thoughtbot-shoulda>, [">= 0"]) end end
27.236842
105
0.628019
7907432a02e68121891917190234c6906fe57d8f
338
Gem::Specification.new do |s| s.name = 'mlb-scores' s.version = '0.0.1' s.date = '2016-03-29' s.summary = "Summary" s.description = "Description" s.authors = ["Eric Baker"] s.email = "[email protected]" s.homepage = '' s.files = ["lib/mlb_scores.rb"] s.license = 'MIT' end
26
39
0.532544
bfd04eba090208509f34a6353be9b501e7a43642
3,259
#! /usr/bin/env ruby -S rspec require 'spec_helper_acceptance' describe 'icingaweb2::module::director class:' do it 'runs successfully' do pp = " case $::osfamily { 'redhat': { package { 'centos-release-scl': before => Class['icingaweb2'] } } } package { 'git': } include ::mysql::server mysql::db { 'icingaweb2': user => 'icingaweb2', password => 'icingaweb2', host => 'localhost', grant => ['SELECT', 'INSERT', 'UPDATE', 'DELETE', 'DROP', 'CREATE VIEW', 'CREATE', 'INDEX', 'EXECUTE', 'ALTER', 'REFERENCES'], } mysql::db { 'director': user => 'director', password => 'director', host => 'localhost', charset => 'utf8', grant => ['SELECT', 'INSERT', 'UPDATE', 'DELETE', 'DROP', 'CREATE VIEW', 'CREATE', 'INDEX', 'EXECUTE', 'ALTER', 'REFERENCES'], } class {'icingaweb2': manage_repo => true, import_schema => true, db_type => 'mysql', db_host => 'localhost', db_port => 3306, db_username => 'icingaweb2', db_password => 'icingaweb2', require => Mysql::Db['icingaweb2'], } class {'icingaweb2::module::monitoring': ido_host => 'localhost', ido_db_name => 'icinga2', ido_db_username => 'icinga2', ido_db_password => 'supersecret', commandtransports => { icinga2 => { transport => 'api', username => 'root', password => 'icinga', } } } class {'icingaweb2::module::director': git_revision => 'v1.3.2', db_host => 'localhost', db_name => 'director', db_username => 'director', db_password => 'director', import_schema => true, kickstart => false, endpoint => 'puppet-icingaweb2.localdomain', api_username => 'root', api_password => 'icinga', require => Mysql::Db['director'] } " apply_manifest(pp, catch_failures: true) end describe file('/etc/icingaweb2/enabledModules/director') do it { is_expected.to be_symlink } end describe file('/etc/icingaweb2/modules/director/config.ini') do it { is_expected.to be_file } it { is_expected.to contain '[db]' } it { is_expected.to contain 'resource = "icingaweb2-module-director"' } end describe file('/etc/icingaweb2/resources.ini') do it { is_expected.to be_file } it { is_expected.to contain '[icingaweb2-module-director]' } it { is_expected.to contain 'type = "db"' } it { is_expected.to contain 'db = "mysql"' } it { is_expected.to contain 'host = "localhost"' } it { is_expected.to contain 'port = "3306"' } it { is_expected.to contain 'dbname = "director"' } it { is_expected.to contain 'username = "director"' } it { is_expected.to contain 'password = "director"' } it { is_expected.to contain 'charset = "utf8"' } end #describe command('mysql -e "select object_name from director.icinga_apiuser"') do # its(:stdout) { should match(%r{root}) } #end end
31.336538
137
0.548021
ed8856a0d2cdaed03da6d782738c5aab3e4e646e
3,794
require 'spec_helper' describe NetSuite::Records::AccountingPeriod do let(:accounting_period) { NetSuite::Records::AccountingPeriod.new } it 'has all the right fields' do [ :allow_non_gl_changes, :end_date, :is_adjust, :is_quarter, :is_year, :period_name, :start_date ].each do |field| accounting_period.should have_field(field) end end it 'has all the right record refs' do [ :parent ].each do |record_ref| accounting_period.should have_record_ref(record_ref) end end describe '.get' do context 'when the response is successful' do let(:response) { NetSuite::Response.new(:success => true, :body => { :period_name => 'Accounting Period 1' }) } it 'returns a Account instance populated with the data from the response object' do NetSuite::Actions::Get.should_receive(:call).with([NetSuite::Records::AccountingPeriod, {:external_id => 1}], {}).and_return(response) accounting_period = NetSuite::Records::AccountingPeriod.get(:external_id => 1) accounting_period.should be_kind_of(NetSuite::Records::AccountingPeriod) accounting_period.period_name.should eql('Accounting Period 1') end end context 'when the response is unsuccessful' do let(:response) { NetSuite::Response.new(:success => false, :body => {}) } it 'raises a RecordNotFound exception' do NetSuite::Actions::Get.should_receive(:call).with([NetSuite::Records::AccountingPeriod, {:external_id => 1}], {}).and_return(response) lambda { NetSuite::Records::AccountingPeriod.get(:external_id => 1) }.should raise_error(NetSuite::RecordNotFound, /NetSuite::Records::AccountingPeriod with OPTIONS=(.*) could not be found/) end end end describe '#add' do let(:test_data) { { :acct_name => 'Test Accounting Period', :description => 'An example accounting period' } } context 'when the response is successful' do let(:response) { NetSuite::Response.new(:success => true, :body => { :internal_id => '1' }) } it 'returns true' do accounting_period = NetSuite::Records::AccountingPeriod.new(test_data) NetSuite::Actions::Add.should_receive(:call). with([accounting_period], {}). and_return(response) accounting_period.add.should be_true end end context 'when the response is unsuccessful' do let(:response) { NetSuite::Response.new(:success => false, :body => {}) } it 'returns false' do accounting_period = NetSuite::Records::AccountingPeriod.new(test_data) NetSuite::Actions::Add.should_receive(:call). with([accounting_period], {}). and_return(response) accounting_period.add.should be_false end end end describe '#delete' do let(:test_data) { { :internal_id => '1' } } context 'when the response is successful' do let(:response) { NetSuite::Response.new(:success => true, :body => { :internal_id => '1' }) } it 'returns true' do accounting_period = NetSuite::Records::AccountingPeriod.new(test_data) NetSuite::Actions::Delete.should_receive(:call). with([accounting_period], {}). and_return(response) accounting_period.delete.should be_true end end context 'when the response is unsuccessful' do let(:response) { NetSuite::Response.new(:success => false, :body => {}) } it 'returns false' do accounting_period = NetSuite::Records::AccountingPeriod.new(test_data) NetSuite::Actions::Delete.should_receive(:call). with([accounting_period], {}). and_return(response) accounting_period.delete.should be_false end end end end
36.480769
142
0.656299
613ed369baeabbc5b29185434ee6a981e92add11
2,676
class Pyinstaller < Formula include Language::Python::Virtualenv desc "Bundle a Python application and all its dependencies" homepage "https://pyinstaller.org/" url "https://files.pythonhosted.org/packages/ad/09/82d3f066eddb9ab3cbed8284a0bda84a93806ff2c44536474841a98ae0e7/pyinstaller-5.0.1.tar.gz" sha256 "25f5f758b005c66471af0d452c90ec328179116afe9d5bd82f5fb06d9416267b" license "GPL-2.0-or-later" head "https://github.com/pyinstaller/pyinstaller.git", branch: "develop" bottle do sha256 cellar: :any_skip_relocation, arm64_monterey: "9d6e03804e1676310f50aaac061ba4fbbde1cb5e9bce72747e5da4f25ba28188" sha256 cellar: :any_skip_relocation, arm64_big_sur: "2960c2033f41a3649b09e0c88f186fb461b69e077e478be47bcff82f980fe171" sha256 cellar: :any_skip_relocation, monterey: "f5de29bf658db9778d204f7688d09490d8b1406248ca9027295bcf116aebbd5d" sha256 cellar: :any_skip_relocation, big_sur: "6060cc43e7668a561a22c8be59de483cb5590bbb1a34f2bf8ae6cb83076b007a" sha256 cellar: :any_skip_relocation, catalina: "470ddb2a7762e66557c381da5bc1a550077c8e84008661c72ab3f573084b878b" sha256 cellar: :any_skip_relocation, x86_64_linux: "afc9926a550721ac536df12b3880e9e92f2aa23651a34f536b1a0dcff1d6bc6c" end depends_on "[email protected]" resource "altgraph" do url "https://files.pythonhosted.org/packages/a9/f1/62830c4915178dbc6948687916603f1cd37c2c299634e4a8ee0efc9977e7/altgraph-0.17.2.tar.gz" sha256 "ebf2269361b47d97b3b88e696439f6e4cbc607c17c51feb1754f90fb79839158" end resource "macholib" do url "https://files.pythonhosted.org/packages/16/1b/85fd523a1d5507e9a5b63e25365e0a26410d5b6ee89082426e6ffff30792/macholib-1.16.tar.gz" sha256 "001bf281279b986a66d7821790d734e61150d52f40c080899df8fefae056e9f7" end resource "pyinstaller-hooks-contrib" do url "https://files.pythonhosted.org/packages/4c/c9/065f97b7e89dcb438ce3e270f3f28af5cba759f4127afe14ce6038701270/pyinstaller-hooks-contrib-2022.4.tar.gz" sha256 "b7f7da20e5b83c22219a21b8f849525e5f735197975313208f4e07ff9549cdaf" end def install cd "bootloader" do system "python3", "./waf", "all", "--no-universal2", "STRIP=/usr/bin/strip" end virtualenv_install_with_resources end test do (testpath/"easy_install.py").write <<~EOS """Run the EasyInstall command""" if __name__ == '__main__': from setuptools.command.easy_install import main main() EOS system bin/"pyinstaller", "-F", "--distpath=#{testpath}/dist", "--workpath=#{testpath}/build", "#{testpath}/easy_install.py" assert_predicate testpath/"dist/easy_install", :exist? end end
46.947368
156
0.778401
62c5f356d5df1127bb03eb708598bf1ccec7140c
5,153
require 'sequel/plugins/upsert' require 'sequel' module Sequel module Plugins module Upsert class PacticipantNoUpsert < Sequel::Model(:pacticipants) plugin :timestamps, update_on_create: true end class Pacticipant < Sequel::Model plugin :upsert, identifying_columns: [:name] plugin :timestamps, update_on_create: true end class Version < Sequel::Model plugin :upsert, identifying_columns: [:pacticipant_id, :number] plugin :timestamps, update_on_create: true end class LatestPactPublicationIdForConsumerVersion < Sequel::Model(:latest_pact_publication_ids_for_consumer_versions) set_primary_key [:provider_id, :consumer_version_id] unrestrict_primary_key plugin :upsert, identifying_columns: [:provider_id, :consumer_version_id] end describe PacticipantNoUpsert do it "has an _insert_dataset method" do expect(PacticipantNoUpsert.private_instance_methods).to include (:_insert_dataset) end end describe "LatestPactPublicationIdForConsumerVersion" do before do td.create_pact_with_hierarchy("Foo", "1", "Bar") .create_consumer_version("2") end let!(:new_pact_publication) do pact_publication_values = PactBroker::Pacts::PactPublication.first.values.dup pact_publication_values.delete(:id) pact_publication_values.delete(:created_at) pact_publication_values.delete(:updated_at) pact_publication_values[:revision_number] = 2 PactBroker::Pacts::PactPublication.new(pact_publication_values).save end let(:new_latest_pact_publication_id_for_consumer_version) do values = LatestPactPublicationIdForConsumerVersion.first.values LatestPactPublicationIdForConsumerVersion.new(values.merge(pact_publication_id: new_pact_publication.id)) end describe "save" do subject { new_latest_pact_publication_id_for_consumer_version.save } it "raises an error" do expect { subject }.to raise_error Sequel::UniqueConstraintViolation end end describe "upsert" do subject { new_latest_pact_publication_id_for_consumer_version.upsert } it "updates the new object with the values from the existing object" do expect(subject.pact_publication_id).to eq new_pact_publication.id end end end context "when a duplicate is inserted with no upsert" do before do PacticipantNoUpsert.new(name: "Foo").save end subject do PacticipantNoUpsert.new(name: "Foo").save end it "raises an error" do expect { subject }.to raise_error Sequel::UniqueConstraintViolation end end # This doesn't work on MSQL because the _insert_raw method # does not return the row ID of the duplicated row when upsert is used # May have to go back to the old method of doing this context "when a duplicate Pacticipant is inserted with upsert" do before do Pacticipant.new(name: "Foo", repository_url: "http://foo").upsert end subject do Pacticipant.new(name: "Foo", repository_url: "http://bar").upsert end it "does not raise an error" do expect { subject }.to_not raise_error end it "sets the values on the object" do expect(subject.repository_url).to eq "http://bar" end it "does not insert another row" do expect { subject }.to_not change { Pacticipant.count } end end context "when a duplicate Version is inserted with upsert" do let!(:pacticipant) { Pacticipant.new(name: "Foo").save } let!(:original_version) do version = Version.new( number: "1", pacticipant_id: pacticipant.id, branch: "original-branch", build_url: "original-url" ).upsert Version.where(id: version.id).update(created_at: yesterday, updated_at: yesterday) version end let(:yesterday) { DateTime.now - 2 } subject do Version.new(number: "1", pacticipant_id: pacticipant.id, branch: "new-branch").upsert end it "does not raise an error" do expect { subject }.to_not raise_error end it "sets the values on the object" do expect(subject.branch).to eq "new-branch" end it "nils out values that weren't set on the second model" do expect(subject.build_url).to eq nil end it "does not insert another row" do expect { subject }.to_not change { Version.count } end it "does not change the created_at" do expect { subject }.to_not change { Version.where(number: "1").first.created_at } end it "does change the updated_at" do expect { subject }.to change { Version.where(number: "1").first.updated_at } end end end end end
33.461039
121
0.643121
b95f7d75c54dfc0df0d53bb4dab4bfc55f3bd46d
56
load File.expand_path("../tasks/canvas.rake", __FILE__)
28
55
0.75
260bcf793b10f56aaca1ae7809d290b458f55264
60
module Unicode module Eaw VERSION = "2.2.0" end end
10
21
0.633333
e9680cda09c8de53a6542c13ea274dd7008867d8
2,288
# frozen_string_literal: true module Spree module Admin class ReimbursementsController < ResourceController helper 'spree/admin/reimbursement_type' helper 'spree/admin/customer_returns' belongs_to 'spree/order', find_by: :number before_action :load_stock_locations, only: :edit before_action :load_simulated_refunds, only: :edit create.after :recalculate_order rescue_from Spree::Core::GatewayError, with: :spree_core_gateway_error def perform @reimbursement.perform!(created_by: try_spree_current_user) redirect_to location_after_save end private def recalculate_order @reimbursement.order.recalculate end def build_resource if params[:build_from_customer_return_id].present? customer_return = Spree::CustomerReturn.find(params[:build_from_customer_return_id]) Spree::Reimbursement.build_from_customer_return(customer_return) else super end end def location_after_save if @reimbursement.reimbursed? admin_order_reimbursement_path(parent, @reimbursement) else edit_admin_order_reimbursement_path(parent, @reimbursement) end end # We don't currently have a real Reimbursement "new" page. And the only # built-in way to create reimburesments via Solidus admin is from the # customer returns admin page via a button that supplies the # "build_from_customer_return" parameter. The "edit" page is not # suitable for use here for that reason as well. # Perhaps we should add a reimbursement new page of some sort. def render_after_create_error flash.keep if request.referer redirect_to request.referer else redirect_to admin_url end end def load_stock_locations @stock_locations = Spree::StockLocation.active end def load_simulated_refunds @reimbursement_objects = @reimbursement.simulate(created_by: try_spree_current_user) end def spree_core_gateway_error(error) flash[:error] = error.message redirect_to edit_admin_order_reimbursement_path(parent, @reimbursement) end end end end
30.506667
94
0.694493
4a344479ed4a46d644eabd9737ecdad2e09f89e7
428
class SessionsController < ApplicationController def new end def create user = User.find_by(email: params[:session][:email].downcase) if user && user.authenticate(params[:session][:password]) # Log the user in and redirect to the user's show page log_in user redirect_to user else flash.now[:danger] = 'Invalid email/password combination' render 'new' end end def destroy end end
21.4
64
0.693925
d5b32f2e84e083a48e0fba556ed4ac28187b6b36
121
require 'test_helper' class DocTypeTest < ActiveSupport::TestCase # test "the truth" do # assert true # end end
15.125
43
0.702479
e2e01eca20ec03424490ef5e748ed8f946ab0bfb
1,319
include_recipe "mongodb-10gen::default" directory File.join(node['mongodb']['data_dir'], node['mongodb']['nodename']) do owner "mongodb" group "mongodb" mode 00700 end directory File.join(node['mongodb']['log_dir']) do owner "mongodb" group "mongodb" mode 00755 end template File.join("/etc/init", "#{node['mongodb']['nodename']}.conf") do source "init_mongodb.erb" owner "root" group "root" mode 00644 variables({ :nodename => node['mongodb']['nodename'] }) end template File.join("/etc/logrotate.d", node['mongodb']['nodename']) do source "logrotate_mongodb.erb" owner "root" group "root" mode 00644 variables({ :nodename => node['mongodb']['nodename'] }) end template File.join(node['mongodb']['etc_dir'], "#{node['mongodb']['nodename']}.conf") do source "mongodb.conf.erb" owner "mongodb" group "mongodb" mode 00600 variables({ :nodename => node['mongodb']['nodename'], :port => node['mongodb']['port'], :replSet => node['mongodb']['replSet'] }) notifies :restart, "service[#{node['mongodb']['nodename']}]" end service node['mongodb']['nodename'] do case node['platform'] when "ubuntu" if node['platform_version'].to_f >= 9.10 provider Chef::Provider::Service::Upstart end end action [:enable, :start] end
21.983333
88
0.648218
6a192122a1ffb9273c11491dbde12fa1a9118b54
1,158
# WARNING ABOUT GENERATED CODE # # This file is generated. See the contributing guide for more information: # https://github.com/aws/aws-sdk-ruby/blob/master/CONTRIBUTING.md # # WARNING ABOUT GENERATED CODE require 'aws-sdk-core' require 'aws-sigv4' require_relative 'aws-sdk-sms/types' require_relative 'aws-sdk-sms/client_api' require_relative 'aws-sdk-sms/client' require_relative 'aws-sdk-sms/errors' require_relative 'aws-sdk-sms/resource' require_relative 'aws-sdk-sms/customizations' # This module provides support for AWS Server Migration Service. This module is available in the # `aws-sdk-sms` gem. # # # Client # # The {Client} class provides one method for each API operation. Operation # methods each accept a hash of request parameters and return a response # structure. # # See {Client} for more information. # # # Errors # # Errors returned from AWS Server Migration Service all # extend {Errors::ServiceError}. # # begin # # do stuff # rescue Aws::SMS::Errors::ServiceError # # rescues all service API errors # end # # See {Errors} for more information. # # @service module Aws::SMS GEM_VERSION = '1.15.0' end
24.125
96
0.737478
e211dfd89d651ef177c38f598a7065d58ca1fd30
385
class ApplicationController < ActionController::Base before_action :configure_permitted_parameters, if: :devise_controller? protected def configure_permitted_parameters devise_parameter_sanitizer.permit(:account_update, keys: [:nickname, :course_chart, :avatar, :avatar_cache, :remove_avatar]) devise_parameter_sanitizer.permit(:sign_up, keys: [:nickname]) end end
35
129
0.8
282e80dd9b2711dc8442a16c9b60f70eaceddf43
1,545
require 'app/aggregates/todo' module EventSourceryTodoApp module Commands module Todo module Amend class Command attr_reader :payload, :aggregate_id def self.build(**args) new(**args).tap(&:validate) end def initialize(params) @payload = params.slice( :todo_id, :title, :description, :due_date, :stakeholder_email, :overview ) @aggregate_id = payload.delete(:todo_id) end def validate raise BadRequest, 'todo_id is blank' if aggregate_id.nil? begin Date.parse(payload[:due_date]) if payload[:due_date] rescue ArgumentError raise BadRequest, 'due_date is invalid' end end end class CommandHandler def initialize(repository: EventSourceryTodoApp.repository) @repository = repository end # Handle loads the aggregate state from the store using the repository, # defers to the aggregate to execute the command, and saves off any newly # raised events to the store. def handle(command) aggregate = repository.load(Aggregates::Todo, command.aggregate_id) aggregate.amend(command.payload) repository.save(aggregate) end private attr_reader :repository end end end end end
26.637931
83
0.554693
ffebef9994b9a6ecf801ac2d7a9b2a8fd999d6bd
1,262
# -*- encoding : utf-8 -*- module GlobalCollect::Requests class Base attr_accessor :action attr_reader :version attr_reader :suggested_response_mixins def initialize(action) @action = action @suggested_response_mixins ||= [] @version = GlobalCollect::default_api_version end def version=(v) raise ArgumentError.new("Invalid version identifier!") unless %w[1.0 2.0].include?(v) @version = v end # WDL §3.1 def default_xml_encoding_options { :encoding => "UTF-8" } end # WDL §4 def to_xml xml = Builder::XmlMarkup.new xml.instruct!(:xml, default_xml_encoding_options) xml.tag!("XML") do |xml_node| xml_node.tag!("REQUEST") do |req_node| req_node.tag!("ACTION", @action) req_node.tag!("META") do |meta_node| meta_node.tag!("IPADDRESS" , GlobalCollect::ip_address ) if GlobalCollect::authentication_scheme == :ip_check meta_node.tag!("MERCHANTID", GlobalCollect::merchant_id) meta_node.tag!("VERSION" , version) end req_node.tag!("PARAMS") do |params_node| yield(params_node) end end end xml.target! end end end
28.044444
121
0.606181
08a836868e63b17169454ead74315ed082c3e464
1,374
# +Product+ Model class Product < ApplicationRecord # @!attribute id # @return [Integer] +Product+ unique ID # @!attribute model # @return [String] Model of product # @!attribute price # @return [Decimal] Price in pesos # @!attribute inventory # @return [Integer] Amount of products in stock # @!attribute product_type # @return [Integer] Specifies type of product # @!attribute high_tech # @return [Boolean] Specifies if product is high tech # @!attribute rentable # @return [Boolean] Specifies if product can be rented # @!attribute created_at # @return [Date] Creation Date # @!attribute updated_at # @return [Date] Last update Date # relations has_many :characteristics, dependent: :destroy has_many :product_adquisitions, dependent: :destroy has_many :product_sales, dependent: :destroy has_many :high_tech_products, dependent: :destroy has_many :rent_products, dependent: :destroy # validations validates :model, :price, :product_type, :inventory, presence: true validates :price, :inventory, presence: true, numericality: true validates :high_tech, :rentable, inclusion: { in: [true, false] } # modules # +ProductType+ Module module ProductType CPU = 0 PRINTER = 1 MONITOR = 2 HDD = 3 OTHER = 4 end end
28.625
58
0.666667
7a087d4eca9e2b0644d2fcef72db9183f53b6ece
8,404
=begin #Fireblocks API #No description provided (generated by Openapi Generator https://github.com/openapitools/openapi-generator) The version of the OpenAPI document: 1.5.0 Generated by: https://openapi-generator.tech OpenAPI Generator version: 5.3.0 =end require 'date' require 'time' module OpenapiClient class TransferPeerPathResponseAllOf attr_accessor :name attr_accessor :sub_type attr_accessor :virtual_type attr_accessor :virtual_id class EnumAttributeValidator attr_reader :datatype attr_reader :allowable_values def initialize(datatype, allowable_values) @allowable_values = allowable_values.map do |value| case datatype.to_s when /Integer/i value.to_i when /Float/i value.to_f else value end end end def valid?(value) !value || allowable_values.include?(value) end end # Attribute mapping from ruby-style variable name to JSON key. def self.attribute_map { :'name' => :'name', :'sub_type' => :'subType', :'virtual_type' => :'virtualType', :'virtual_id' => :'virtualId' } end # Returns all the JSON keys this model knows about def self.acceptable_attributes attribute_map.values end # Attribute type mapping. def self.openapi_types { :'name' => :'String', :'sub_type' => :'String', :'virtual_type' => :'String', :'virtual_id' => :'String' } end # List of attributes with nullable: true def self.openapi_nullable Set.new([ ]) end # Initializes the object # @param [Hash] attributes Model attributes in the form of hash def initialize(attributes = {}) if (!attributes.is_a?(Hash)) fail ArgumentError, "The input argument (attributes) must be a hash in `OpenapiClient::TransferPeerPathResponseAllOf` initialize method" end # check to see if the attribute exists and convert string to symbol for hash key attributes = attributes.each_with_object({}) { |(k, v), h| if (!self.class.attribute_map.key?(k.to_sym)) fail ArgumentError, "`#{k}` is not a valid attribute in `OpenapiClient::TransferPeerPathResponseAllOf`. Please check the name to make sure it's valid. List of attributes: " + self.class.attribute_map.keys.inspect end h[k.to_sym] = v } if attributes.key?(:'name') self.name = attributes[:'name'] end if attributes.key?(:'sub_type') self.sub_type = attributes[:'sub_type'] end if attributes.key?(:'virtual_type') self.virtual_type = attributes[:'virtual_type'] end if attributes.key?(:'virtual_id') self.virtual_id = attributes[:'virtual_id'] end end # Show invalid properties with the reasons. Usually used together with valid? # @return Array for valid properties with the reasons def list_invalid_properties invalid_properties = Array.new invalid_properties end # Check to see if the all the properties in the model are valid # @return true if the model is valid def valid? virtual_type_validator = EnumAttributeValidator.new('String', ["OFF_EXCHANGE", "DEFAULT", "UNKNOWN"]) return false unless virtual_type_validator.valid?(@virtual_type) true end # Custom attribute writer method checking allowed values (enum). # @param [Object] virtual_type Object to be assigned def virtual_type=(virtual_type) validator = EnumAttributeValidator.new('String', ["OFF_EXCHANGE", "DEFAULT", "UNKNOWN"]) unless validator.valid?(virtual_type) fail ArgumentError, "invalid value for \"virtual_type\", must be one of #{validator.allowable_values}." end @virtual_type = virtual_type end # Checks equality by comparing each attribute. # @param [Object] Object to be compared def ==(o) return true if self.equal?(o) self.class == o.class && name == o.name && sub_type == o.sub_type && virtual_type == o.virtual_type && virtual_id == o.virtual_id end # @see the `==` method # @param [Object] Object to be compared def eql?(o) self == o end # Calculates hash code according to all attributes. # @return [Integer] Hash code def hash [name, sub_type, virtual_type, virtual_id].hash end # Builds the object from hash # @param [Hash] attributes Model attributes in the form of hash # @return [Object] Returns the model itself def self.build_from_hash(attributes) new.build_from_hash(attributes) end # Builds the object from hash # @param [Hash] attributes Model attributes in the form of hash # @return [Object] Returns the model itself def build_from_hash(attributes) return nil unless attributes.is_a?(Hash) self.class.openapi_types.each_pair do |key, type| if attributes[self.class.attribute_map[key]].nil? && self.class.openapi_nullable.include?(key) self.send("#{key}=", nil) elsif type =~ /\AArray<(.*)>/i # check to ensure the input is an array given that the attribute # is documented as an array but the input is not if attributes[self.class.attribute_map[key]].is_a?(Array) self.send("#{key}=", attributes[self.class.attribute_map[key]].map { |v| _deserialize($1, v) }) end elsif !attributes[self.class.attribute_map[key]].nil? self.send("#{key}=", _deserialize(type, attributes[self.class.attribute_map[key]])) end end self end # Deserializes the data based on type # @param string type Data type # @param string value Value to be deserialized # @return [Object] Deserialized data def _deserialize(type, value) case type.to_sym when :Time Time.parse(value) when :Date Date.parse(value) when :String value.to_s when :Integer value.to_i when :Float value.to_f when :Boolean if value.to_s =~ /\A(true|t|yes|y|1)\z/i true else false end when :Object # generic object (usually a Hash), return directly value when /\AArray<(?<inner_type>.+)>\z/ inner_type = Regexp.last_match[:inner_type] value.map { |v| _deserialize(inner_type, v) } when /\AHash<(?<k_type>.+?), (?<v_type>.+)>\z/ k_type = Regexp.last_match[:k_type] v_type = Regexp.last_match[:v_type] {}.tap do |hash| value.each do |k, v| hash[_deserialize(k_type, k)] = _deserialize(v_type, v) end end else # model # models (e.g. Pet) or oneOf klass = OpenapiClient.const_get(type) klass.respond_to?(:openapi_one_of) ? klass.build(value) : klass.build_from_hash(value) end end # Returns the string representation of the object # @return [String] String presentation of the object def to_s to_hash.to_s end # to_body is an alias to to_hash (backward compatibility) # @return [Hash] Returns the object in the form of hash def to_body to_hash end # Returns the object in the form of hash # @return [Hash] Returns the object in the form of hash def to_hash hash = {} self.class.attribute_map.each_pair do |attr, param| value = self.send(attr) if value.nil? is_nullable = self.class.openapi_nullable.include?(attr) next if !is_nullable || (is_nullable && !instance_variable_defined?(:"@#{attr}")) end hash[param] = _to_hash(value) end hash end # Outputs non-array value in the form of hash # For object, use to_hash. Otherwise, just return the value # @param [Object] value Any valid value # @return [Hash] Returns the value in the form of hash def _to_hash(value) if value.is_a?(Array) value.compact.map { |v| _to_hash(v) } elsif value.is_a?(Hash) {}.tap do |hash| value.each { |k, v| hash[k] = _to_hash(v) } end elsif value.respond_to? :to_hash value.to_hash else value end end end end
30.014286
222
0.625297
91a5d72d1fdf7bfb42b2bed9956043e58dd23a78
706
require 'dragonfly' # Configure Dragonfly.app.configure do plugin :imagemagick, convert_command: "/usr/local/bin/convert", identify_command: "/usr/local/bin/identify" secret "d4daf39f7a689aa7c1f0f5f297b288eeeef009289c2fa3ba3df242c0a354e163" url_format "/media/:job/:name" datastore :file, root_path: Rails.root.join('public/system/dragonfly', Rails.env), server_root: Rails.root.join('public') end # Logger Dragonfly.logger = Rails.logger # Mount as middleware Rails.application.middleware.use Dragonfly::Middleware # Add model functionality if defined?(ActiveRecord::Base) ActiveRecord::Base.extend Dragonfly::Model ActiveRecord::Base.extend Dragonfly::Model::Validations end
24.344828
75
0.780453
1dc9c30684813a0508f42574dbf4ea216f6bb031
670
# frozen_string_literal: true RSpec.describe RuboCop::Cop::Lint::FlipFlop, :config do it 'registers an offense for inclusive flip-flops' do expect_offense(<<~RUBY) DATA.each_line do |line| print line if (line =~ /begin/)..(line =~ /end/) ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ Avoid the use of flip-flop operators. end RUBY end it 'registers an offense for exclusive flip-flops' do expect_offense(<<~RUBY) DATA.each_line do |line| print line if (line =~ /begin/)...(line =~ /end/) ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ Avoid the use of flip-flop operators. end RUBY end end
30.454545
93
0.544776
6225e3260b57d53488601a15a209083cfc0a0b8f
138
class AddSkillsCountToTopics < ActiveRecord::Migration def change add_column :topics, :skills_count, :integer, default: 0 end end
23
59
0.768116
878b41f5a85f4ec9d93945d111a695b6ab9f15f8
1,348
# frozen_string_literal: true class Actor # Ensure your inputs and outputs are not nil by adding `allow_nil: false`. # # Example: # # class CreateUser < Actor # input :name, allow_nil: false # output :user, allow_nil: false # end module NilCheckable def _call check_context_for_nil(self.class.inputs, origin: 'input') super check_context_for_nil(self.class.outputs, origin: 'output') end private def check_context_for_nil(definitions, origin:) definitions.each do |key, options| options = deprecated_required_option(options, name: key, origin: origin) next unless context[key].nil? next unless options.key?(:allow_nil) next if options[:allow_nil] raise Actor::ArgumentError, "The #{origin} \"#{key}\" on #{self.class} does not allow nil " \ 'values.' end end def deprecated_required_option(options, name:, origin:) return options unless options.key?(:required) warn 'DEPRECATED: The "required" option is deprecated. Replace ' \ "`#{origin} :#{name}, required: #{options[:required]}` by " \ "`#{origin} :#{name}, allow_nil: #{!options[:required]}` in " \ "#{self.class}." options.merge(allow_nil: !options[:required]) end end end
27.510204
80
0.621662
91eccc2a14400676510e8ce53b462aa4e703701e
625
FactoryBot.define do factory :response, class: TariffSynchronizer::Response do skip_create response_code { [200, 404, 403].sample } content { Forgery(:basic).text } trait :success do response_code { 200 } end trait :not_found do response_code { 404 } content { nil } end trait :failed do response_code { 403 } content { nil } end trait :blank do success content { "" } end trait :retry_exceeded do failed after(:build, &:retry_count_exceeded!) end initialize_with { new(response_code, content) } end end
17.857143
59
0.6096
e949ade9eedb96c6c3186f3e2b96e03764bd24cc
768
class TransactionalTestCase < Test::Unit::TestCase def run(result, &block) # Handle the default "you have no tests" test if it turns up return if @method_name.to_s == "default_test" ActiveRecord::Base.transaction do load_fixtures result = super(result, &block) raise ActiveRecord::Rollback end result end protected def load_fixtures fixtures = YAML.load_file( File.join(File.dirname(__FILE__), '..', 'fixtures', 'dc.yml') ) disable_logging do fixtures.keys.sort.each do |key| DCField.create(fixtures[key]) end end end def disable_logging logger = ActiveRecord::Base.logger ActiveRecord::Base.logger = nil yield ActiveRecord::Base.logger = logger end end
22.588235
67
0.665365
87422ede99f2601fb6e3e45d0ed2c6e6e42ecd34
1,509
require 'test_helper' class <%= class_name %>HelperTest < Test::Unit::TestCase include OffsitePayments::Integrations def setup @helper = <%= class_name %>::Helper.new('order-500','[email protected]', :amount => 500, :currency => 'USD') end def test_basic_helper_fields assert_field '', '[email protected]' assert_field '', '5.00' assert_field '', 'order-500' end def test_customer_fields @helper.customer :first_name => 'Cody', :last_name => 'Fauser', :email => '[email protected]' assert_field '', 'Cody' assert_field '', 'Fauser' assert_field '', '[email protected]' end def test_address_mapping @helper.billing_address :address1 => '1 My Street', :address2 => '', :city => 'Leeds', :state => 'Yorkshire', :zip => 'LS2 7EE', :country => 'CA' assert_field '', '1 My Street' assert_field '', 'Leeds' assert_field '', 'Yorkshire' assert_field '', 'LS2 7EE' end def test_unknown_address_mapping @helper.billing_address :farm => 'CA' assert_equal 3, @helper.fields.size end def test_unknown_mapping assert_nothing_raised do @helper.company_address :address => '500 Dwemthy Fox Road' end end def test_setting_invalid_address_field fields = @helper.fields.dup @helper.billing_address :street => 'My Street' assert_equal fields, @helper.fields end end
27.436364
111
0.607025
26e9d5697bd520d01716a727ba92144ff8d6ccad
973
class FreezeRework < ActiveRecord::Migration def self.up remove_column :casts, :frozen remove_column :cast_versions, :frozen remove_column :movie_references, :frozen remove_column :reference_versions, :frozen remove_column :movies, :frozen remove_column :movie_versions, :frozen remove_column :name_aliases, :frozen remove_column :production_companies, :frozen end def self.down add_column :casts, :frozen, :boolean, :default => false add_column :cast_versions, :frozen, :boolean, :default => false add_column :movie_references, :frozen, :boolean, :default => false add_column :reference_versions, :frozen, :boolean, :default => false add_column :movies, :frozen, :boolean, :default => false add_column :movie_versions, :frozen, :boolean, :default => false add_column :name_aliases, :frozen, :boolean, :default => false add_column :production_companies, :frozen, :boolean, :default => false end end
38.92
74
0.726619